1 //===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code dealing with C++ code generation of virtual tables.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "CGCXXABI.h"
14 #include "CodeGenFunction.h"
15 #include "CodeGenModule.h"
16 #include "clang/AST/Attr.h"
17 #include "clang/AST/CXXInheritance.h"
18 #include "clang/AST/RecordLayout.h"
19 #include "clang/Basic/CodeGenOptions.h"
20 #include "clang/CodeGen/CGFunctionInfo.h"
21 #include "clang/CodeGen/ConstantInitBuilder.h"
22 #include "llvm/IR/IntrinsicInst.h"
23 #include "llvm/Support/Format.h"
24 #include "llvm/Transforms/Utils/Cloning.h"
25 #include <algorithm>
26 #include <cstdio>
27 
28 using namespace clang;
29 using namespace CodeGen;
30 
31 CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
32     : CGM(CGM), VTContext(CGM.getContext().getVTableContext()) {}
33 
34 llvm::Constant *CodeGenModule::GetAddrOfThunk(StringRef Name, llvm::Type *FnTy,
35                                               GlobalDecl GD) {
36   return GetOrCreateLLVMFunction(Name, FnTy, GD, /*ForVTable=*/true,
37                                  /*DontDefer=*/true, /*IsThunk=*/true);
38 }
39 
40 static void setThunkProperties(CodeGenModule &CGM, const ThunkInfo &Thunk,
41                                llvm::Function *ThunkFn, bool ForVTable,
42                                GlobalDecl GD) {
43   CGM.setFunctionLinkage(GD, ThunkFn);
44   CGM.getCXXABI().setThunkLinkage(ThunkFn, ForVTable, GD,
45                                   !Thunk.Return.isEmpty());
46 
47   // Set the right visibility.
48   CGM.setGVProperties(ThunkFn, GD);
49 
50   if (!CGM.getCXXABI().exportThunk()) {
51     ThunkFn->setDLLStorageClass(llvm::GlobalValue::DefaultStorageClass);
52     ThunkFn->setDSOLocal(true);
53   }
54 
55   if (CGM.supportsCOMDAT() && ThunkFn->isWeakForLinker())
56     ThunkFn->setComdat(CGM.getModule().getOrInsertComdat(ThunkFn->getName()));
57 }
58 
59 #ifndef NDEBUG
60 static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
61                     const ABIArgInfo &infoR, CanQualType typeR) {
62   return (infoL.getKind() == infoR.getKind() &&
63           (typeL == typeR ||
64            (isa<PointerType>(typeL) && isa<PointerType>(typeR)) ||
65            (isa<ReferenceType>(typeL) && isa<ReferenceType>(typeR))));
66 }
67 #endif
68 
69 static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
70                                       QualType ResultType, RValue RV,
71                                       const ThunkInfo &Thunk) {
72   // Emit the return adjustment.
73   bool NullCheckValue = !ResultType->isReferenceType();
74 
75   llvm::BasicBlock *AdjustNull = nullptr;
76   llvm::BasicBlock *AdjustNotNull = nullptr;
77   llvm::BasicBlock *AdjustEnd = nullptr;
78 
79   llvm::Value *ReturnValue = RV.getScalarVal();
80 
81   if (NullCheckValue) {
82     AdjustNull = CGF.createBasicBlock("adjust.null");
83     AdjustNotNull = CGF.createBasicBlock("adjust.notnull");
84     AdjustEnd = CGF.createBasicBlock("adjust.end");
85 
86     llvm::Value *IsNull = CGF.Builder.CreateIsNull(ReturnValue);
87     CGF.Builder.CreateCondBr(IsNull, AdjustNull, AdjustNotNull);
88     CGF.EmitBlock(AdjustNotNull);
89   }
90 
91   auto ClassDecl = ResultType->getPointeeType()->getAsCXXRecordDecl();
92   auto ClassAlign = CGF.CGM.getClassPointerAlignment(ClassDecl);
93   ReturnValue = CGF.CGM.getCXXABI().performReturnAdjustment(CGF,
94                                             Address(ReturnValue, ClassAlign),
95                                             Thunk.Return);
96 
97   if (NullCheckValue) {
98     CGF.Builder.CreateBr(AdjustEnd);
99     CGF.EmitBlock(AdjustNull);
100     CGF.Builder.CreateBr(AdjustEnd);
101     CGF.EmitBlock(AdjustEnd);
102 
103     llvm::PHINode *PHI = CGF.Builder.CreatePHI(ReturnValue->getType(), 2);
104     PHI->addIncoming(ReturnValue, AdjustNotNull);
105     PHI->addIncoming(llvm::Constant::getNullValue(ReturnValue->getType()),
106                      AdjustNull);
107     ReturnValue = PHI;
108   }
109 
110   return RValue::get(ReturnValue);
111 }
112 
113 /// This function clones a function's DISubprogram node and enters it into
114 /// a value map with the intent that the map can be utilized by the cloner
115 /// to short-circuit Metadata node mapping.
116 /// Furthermore, the function resolves any DILocalVariable nodes referenced
117 /// by dbg.value intrinsics so they can be properly mapped during cloning.
118 static void resolveTopLevelMetadata(llvm::Function *Fn,
119                                     llvm::ValueToValueMapTy &VMap) {
120   // Clone the DISubprogram node and put it into the Value map.
121   auto *DIS = Fn->getSubprogram();
122   if (!DIS)
123     return;
124   auto *NewDIS = DIS->replaceWithDistinct(DIS->clone());
125   VMap.MD()[DIS].reset(NewDIS);
126 
127   // Find all llvm.dbg.declare intrinsics and resolve the DILocalVariable nodes
128   // they are referencing.
129   for (auto &BB : Fn->getBasicBlockList()) {
130     for (auto &I : BB) {
131       if (auto *DII = dyn_cast<llvm::DbgVariableIntrinsic>(&I)) {
132         auto *DILocal = DII->getVariable();
133         if (!DILocal->isResolved())
134           DILocal->resolve();
135       }
136     }
137   }
138 }
139 
140 // This function does roughly the same thing as GenerateThunk, but in a
141 // very different way, so that va_start and va_end work correctly.
142 // FIXME: This function assumes "this" is the first non-sret LLVM argument of
143 //        a function, and that there is an alloca built in the entry block
144 //        for all accesses to "this".
145 // FIXME: This function assumes there is only one "ret" statement per function.
146 // FIXME: Cloning isn't correct in the presence of indirect goto!
147 // FIXME: This implementation of thunks bloats codesize by duplicating the
148 //        function definition.  There are alternatives:
149 //        1. Add some sort of stub support to LLVM for cases where we can
150 //           do a this adjustment, then a sibcall.
151 //        2. We could transform the definition to take a va_list instead of an
152 //           actual variable argument list, then have the thunks (including a
153 //           no-op thunk for the regular definition) call va_start/va_end.
154 //           There's a bit of per-call overhead for this solution, but it's
155 //           better for codesize if the definition is long.
156 llvm::Function *
157 CodeGenFunction::GenerateVarArgsThunk(llvm::Function *Fn,
158                                       const CGFunctionInfo &FnInfo,
159                                       GlobalDecl GD, const ThunkInfo &Thunk) {
160   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
161   const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
162   QualType ResultType = FPT->getReturnType();
163 
164   // Get the original function
165   assert(FnInfo.isVariadic());
166   llvm::Type *Ty = CGM.getTypes().GetFunctionType(FnInfo);
167   llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
168   llvm::Function *BaseFn = cast<llvm::Function>(Callee);
169 
170   // Cloning can't work if we don't have a definition. The Microsoft ABI may
171   // require thunks when a definition is not available. Emit an error in these
172   // cases.
173   if (!MD->isDefined()) {
174     CGM.ErrorUnsupported(MD, "return-adjusting thunk with variadic arguments");
175     return Fn;
176   }
177   assert(!BaseFn->isDeclaration() && "cannot clone undefined variadic method");
178 
179   // Clone to thunk.
180   llvm::ValueToValueMapTy VMap;
181 
182   // We are cloning a function while some Metadata nodes are still unresolved.
183   // Ensure that the value mapper does not encounter any of them.
184   resolveTopLevelMetadata(BaseFn, VMap);
185   llvm::Function *NewFn = llvm::CloneFunction(BaseFn, VMap);
186   Fn->replaceAllUsesWith(NewFn);
187   NewFn->takeName(Fn);
188   Fn->eraseFromParent();
189   Fn = NewFn;
190 
191   // "Initialize" CGF (minimally).
192   CurFn = Fn;
193 
194   // Get the "this" value
195   llvm::Function::arg_iterator AI = Fn->arg_begin();
196   if (CGM.ReturnTypeUsesSRet(FnInfo))
197     ++AI;
198 
199   // Find the first store of "this", which will be to the alloca associated
200   // with "this".
201   Address ThisPtr(&*AI, CGM.getClassPointerAlignment(MD->getParent()));
202   llvm::BasicBlock *EntryBB = &Fn->front();
203   llvm::BasicBlock::iterator ThisStore =
204       std::find_if(EntryBB->begin(), EntryBB->end(), [&](llvm::Instruction &I) {
205         return isa<llvm::StoreInst>(I) &&
206                I.getOperand(0) == ThisPtr.getPointer();
207       });
208   assert(ThisStore != EntryBB->end() &&
209          "Store of this should be in entry block?");
210   // Adjust "this", if necessary.
211   Builder.SetInsertPoint(&*ThisStore);
212   llvm::Value *AdjustedThisPtr =
213       CGM.getCXXABI().performThisAdjustment(*this, ThisPtr, Thunk.This);
214   AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr,
215                                           ThisStore->getOperand(0)->getType());
216   ThisStore->setOperand(0, AdjustedThisPtr);
217 
218   if (!Thunk.Return.isEmpty()) {
219     // Fix up the returned value, if necessary.
220     for (llvm::BasicBlock &BB : *Fn) {
221       llvm::Instruction *T = BB.getTerminator();
222       if (isa<llvm::ReturnInst>(T)) {
223         RValue RV = RValue::get(T->getOperand(0));
224         T->eraseFromParent();
225         Builder.SetInsertPoint(&BB);
226         RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
227         Builder.CreateRet(RV.getScalarVal());
228         break;
229       }
230     }
231   }
232 
233   return Fn;
234 }
235 
236 void CodeGenFunction::StartThunk(llvm::Function *Fn, GlobalDecl GD,
237                                  const CGFunctionInfo &FnInfo,
238                                  bool IsUnprototyped) {
239   assert(!CurGD.getDecl() && "CurGD was already set!");
240   CurGD = GD;
241   CurFuncIsThunk = true;
242 
243   // Build FunctionArgs.
244   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
245   QualType ThisType = MD->getThisType();
246   QualType ResultType;
247   if (IsUnprototyped)
248     ResultType = CGM.getContext().VoidTy;
249   else if (CGM.getCXXABI().HasThisReturn(GD))
250     ResultType = ThisType;
251   else if (CGM.getCXXABI().hasMostDerivedReturn(GD))
252     ResultType = CGM.getContext().VoidPtrTy;
253   else
254     ResultType = MD->getType()->castAs<FunctionProtoType>()->getReturnType();
255   FunctionArgList FunctionArgs;
256 
257   // Create the implicit 'this' parameter declaration.
258   CGM.getCXXABI().buildThisParam(*this, FunctionArgs);
259 
260   // Add the rest of the parameters, if we have a prototype to work with.
261   if (!IsUnprototyped) {
262     FunctionArgs.append(MD->param_begin(), MD->param_end());
263 
264     if (isa<CXXDestructorDecl>(MD))
265       CGM.getCXXABI().addImplicitStructorParams(*this, ResultType,
266                                                 FunctionArgs);
267   }
268 
269   // Start defining the function.
270   auto NL = ApplyDebugLocation::CreateEmpty(*this);
271   StartFunction(GlobalDecl(), ResultType, Fn, FnInfo, FunctionArgs,
272                 MD->getLocation());
273   // Create a scope with an artificial location for the body of this function.
274   auto AL = ApplyDebugLocation::CreateArtificial(*this);
275 
276   // Since we didn't pass a GlobalDecl to StartFunction, do this ourselves.
277   CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
278   CXXThisValue = CXXABIThisValue;
279   CurCodeDecl = MD;
280   CurFuncDecl = MD;
281 }
282 
283 void CodeGenFunction::FinishThunk() {
284   // Clear these to restore the invariants expected by
285   // StartFunction/FinishFunction.
286   CurCodeDecl = nullptr;
287   CurFuncDecl = nullptr;
288 
289   FinishFunction();
290 }
291 
292 void CodeGenFunction::EmitCallAndReturnForThunk(llvm::FunctionCallee Callee,
293                                                 const ThunkInfo *Thunk,
294                                                 bool IsUnprototyped) {
295   assert(isa<CXXMethodDecl>(CurGD.getDecl()) &&
296          "Please use a new CGF for this thunk");
297   const CXXMethodDecl *MD = cast<CXXMethodDecl>(CurGD.getDecl());
298 
299   // Adjust the 'this' pointer if necessary
300   llvm::Value *AdjustedThisPtr =
301     Thunk ? CGM.getCXXABI().performThisAdjustment(
302                           *this, LoadCXXThisAddress(), Thunk->This)
303           : LoadCXXThis();
304 
305   // If perfect forwarding is required a variadic method, a method using
306   // inalloca, or an unprototyped thunk, use musttail. Emit an error if this
307   // thunk requires a return adjustment, since that is impossible with musttail.
308   if (CurFnInfo->usesInAlloca() || CurFnInfo->isVariadic() || IsUnprototyped) {
309     if (Thunk && !Thunk->Return.isEmpty()) {
310       if (IsUnprototyped)
311         CGM.ErrorUnsupported(
312             MD, "return-adjusting thunk with incomplete parameter type");
313       else if (CurFnInfo->isVariadic())
314         llvm_unreachable("shouldn't try to emit musttail return-adjusting "
315                          "thunks for variadic functions");
316       else
317         CGM.ErrorUnsupported(
318             MD, "non-trivial argument copy for return-adjusting thunk");
319     }
320     EmitMustTailThunk(CurGD, AdjustedThisPtr, Callee);
321     return;
322   }
323 
324   // Start building CallArgs.
325   CallArgList CallArgs;
326   QualType ThisType = MD->getThisType();
327   CallArgs.add(RValue::get(AdjustedThisPtr), ThisType);
328 
329   if (isa<CXXDestructorDecl>(MD))
330     CGM.getCXXABI().adjustCallArgsForDestructorThunk(*this, CurGD, CallArgs);
331 
332 #ifndef NDEBUG
333   unsigned PrefixArgs = CallArgs.size() - 1;
334 #endif
335   // Add the rest of the arguments.
336   for (const ParmVarDecl *PD : MD->parameters())
337     EmitDelegateCallArg(CallArgs, PD, SourceLocation());
338 
339   const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
340 
341 #ifndef NDEBUG
342   const CGFunctionInfo &CallFnInfo = CGM.getTypes().arrangeCXXMethodCall(
343       CallArgs, FPT, RequiredArgs::forPrototypePlus(FPT, 1), PrefixArgs);
344   assert(CallFnInfo.getRegParm() == CurFnInfo->getRegParm() &&
345          CallFnInfo.isNoReturn() == CurFnInfo->isNoReturn() &&
346          CallFnInfo.getCallingConvention() == CurFnInfo->getCallingConvention());
347   assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
348          similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
349                  CurFnInfo->getReturnInfo(), CurFnInfo->getReturnType()));
350   assert(CallFnInfo.arg_size() == CurFnInfo->arg_size());
351   for (unsigned i = 0, e = CurFnInfo->arg_size(); i != e; ++i)
352     assert(similar(CallFnInfo.arg_begin()[i].info,
353                    CallFnInfo.arg_begin()[i].type,
354                    CurFnInfo->arg_begin()[i].info,
355                    CurFnInfo->arg_begin()[i].type));
356 #endif
357 
358   // Determine whether we have a return value slot to use.
359   QualType ResultType = CGM.getCXXABI().HasThisReturn(CurGD)
360                             ? ThisType
361                             : CGM.getCXXABI().hasMostDerivedReturn(CurGD)
362                                   ? CGM.getContext().VoidPtrTy
363                                   : FPT->getReturnType();
364   ReturnValueSlot Slot;
365   if (!ResultType->isVoidType() &&
366       CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect)
367     Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified(),
368                            /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
369 
370   // Now emit our call.
371   llvm::CallBase *CallOrInvoke;
372   RValue RV = EmitCall(*CurFnInfo, CGCallee::forDirect(Callee, CurGD), Slot,
373                        CallArgs, &CallOrInvoke);
374 
375   // Consider return adjustment if we have ThunkInfo.
376   if (Thunk && !Thunk->Return.isEmpty())
377     RV = PerformReturnAdjustment(*this, ResultType, RV, *Thunk);
378   else if (llvm::CallInst* Call = dyn_cast<llvm::CallInst>(CallOrInvoke))
379     Call->setTailCallKind(llvm::CallInst::TCK_Tail);
380 
381   // Emit return.
382   if (!ResultType->isVoidType() && Slot.isNull())
383     CGM.getCXXABI().EmitReturnFromThunk(*this, RV, ResultType);
384 
385   // Disable the final ARC autorelease.
386   AutoreleaseResult = false;
387 
388   FinishThunk();
389 }
390 
391 void CodeGenFunction::EmitMustTailThunk(GlobalDecl GD,
392                                         llvm::Value *AdjustedThisPtr,
393                                         llvm::FunctionCallee Callee) {
394   // Emitting a musttail call thunk doesn't use any of the CGCall.cpp machinery
395   // to translate AST arguments into LLVM IR arguments.  For thunks, we know
396   // that the caller prototype more or less matches the callee prototype with
397   // the exception of 'this'.
398   SmallVector<llvm::Value *, 8> Args;
399   for (llvm::Argument &A : CurFn->args())
400     Args.push_back(&A);
401 
402   // Set the adjusted 'this' pointer.
403   const ABIArgInfo &ThisAI = CurFnInfo->arg_begin()->info;
404   if (ThisAI.isDirect()) {
405     const ABIArgInfo &RetAI = CurFnInfo->getReturnInfo();
406     int ThisArgNo = RetAI.isIndirect() && !RetAI.isSRetAfterThis() ? 1 : 0;
407     llvm::Type *ThisType = Args[ThisArgNo]->getType();
408     if (ThisType != AdjustedThisPtr->getType())
409       AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
410     Args[ThisArgNo] = AdjustedThisPtr;
411   } else {
412     assert(ThisAI.isInAlloca() && "this is passed directly or inalloca");
413     Address ThisAddr = GetAddrOfLocalVar(CXXABIThisDecl);
414     llvm::Type *ThisType = ThisAddr.getElementType();
415     if (ThisType != AdjustedThisPtr->getType())
416       AdjustedThisPtr = Builder.CreateBitCast(AdjustedThisPtr, ThisType);
417     Builder.CreateStore(AdjustedThisPtr, ThisAddr);
418   }
419 
420   // Emit the musttail call manually.  Even if the prologue pushed cleanups, we
421   // don't actually want to run them.
422   llvm::CallInst *Call = Builder.CreateCall(Callee, Args);
423   Call->setTailCallKind(llvm::CallInst::TCK_MustTail);
424 
425   // Apply the standard set of call attributes.
426   unsigned CallingConv;
427   llvm::AttributeList Attrs;
428   CGM.ConstructAttributeList(Callee.getCallee()->getName(), *CurFnInfo, GD,
429                              Attrs, CallingConv, /*AttrOnCallSite=*/true);
430   Call->setAttributes(Attrs);
431   Call->setCallingConv(static_cast<llvm::CallingConv::ID>(CallingConv));
432 
433   if (Call->getType()->isVoidTy())
434     Builder.CreateRetVoid();
435   else
436     Builder.CreateRet(Call);
437 
438   // Finish the function to maintain CodeGenFunction invariants.
439   // FIXME: Don't emit unreachable code.
440   EmitBlock(createBasicBlock());
441 
442   FinishThunk();
443 }
444 
445 void CodeGenFunction::generateThunk(llvm::Function *Fn,
446                                     const CGFunctionInfo &FnInfo, GlobalDecl GD,
447                                     const ThunkInfo &Thunk,
448                                     bool IsUnprototyped) {
449   StartThunk(Fn, GD, FnInfo, IsUnprototyped);
450   // Create a scope with an artificial location for the body of this function.
451   auto AL = ApplyDebugLocation::CreateArtificial(*this);
452 
453   // Get our callee. Use a placeholder type if this method is unprototyped so
454   // that CodeGenModule doesn't try to set attributes.
455   llvm::Type *Ty;
456   if (IsUnprototyped)
457     Ty = llvm::StructType::get(getLLVMContext());
458   else
459     Ty = CGM.getTypes().GetFunctionType(FnInfo);
460 
461   llvm::Constant *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
462 
463   // Fix up the function type for an unprototyped musttail call.
464   if (IsUnprototyped)
465     Callee = llvm::ConstantExpr::getBitCast(Callee, Fn->getType());
466 
467   // Make the call and return the result.
468   EmitCallAndReturnForThunk(llvm::FunctionCallee(Fn->getFunctionType(), Callee),
469                             &Thunk, IsUnprototyped);
470 }
471 
472 static bool shouldEmitVTableThunk(CodeGenModule &CGM, const CXXMethodDecl *MD,
473                                   bool IsUnprototyped, bool ForVTable) {
474   // Always emit thunks in the MS C++ ABI. We cannot rely on other TUs to
475   // provide thunks for us.
476   if (CGM.getTarget().getCXXABI().isMicrosoft())
477     return true;
478 
479   // In the Itanium C++ ABI, vtable thunks are provided by TUs that provide
480   // definitions of the main method. Therefore, emitting thunks with the vtable
481   // is purely an optimization. Emit the thunk if optimizations are enabled and
482   // all of the parameter types are complete.
483   if (ForVTable)
484     return CGM.getCodeGenOpts().OptimizationLevel && !IsUnprototyped;
485 
486   // Always emit thunks along with the method definition.
487   return true;
488 }
489 
490 llvm::Constant *CodeGenVTables::maybeEmitThunk(GlobalDecl GD,
491                                                const ThunkInfo &TI,
492                                                bool ForVTable) {
493   const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
494 
495   // First, get a declaration. Compute the mangled name. Don't worry about
496   // getting the function prototype right, since we may only need this
497   // declaration to fill in a vtable slot.
498   SmallString<256> Name;
499   MangleContext &MCtx = CGM.getCXXABI().getMangleContext();
500   llvm::raw_svector_ostream Out(Name);
501   if (const CXXDestructorDecl *DD = dyn_cast<CXXDestructorDecl>(MD))
502     MCtx.mangleCXXDtorThunk(DD, GD.getDtorType(), TI.This, Out);
503   else
504     MCtx.mangleThunk(MD, TI, Out);
505   llvm::Type *ThunkVTableTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
506   llvm::Constant *Thunk = CGM.GetAddrOfThunk(Name, ThunkVTableTy, GD);
507 
508   // If we don't need to emit a definition, return this declaration as is.
509   bool IsUnprototyped = !CGM.getTypes().isFuncTypeConvertible(
510       MD->getType()->castAs<FunctionType>());
511   if (!shouldEmitVTableThunk(CGM, MD, IsUnprototyped, ForVTable))
512     return Thunk;
513 
514   // Arrange a function prototype appropriate for a function definition. In some
515   // cases in the MS ABI, we may need to build an unprototyped musttail thunk.
516   const CGFunctionInfo &FnInfo =
517       IsUnprototyped ? CGM.getTypes().arrangeUnprototypedMustTailThunk(MD)
518                      : CGM.getTypes().arrangeGlobalDeclaration(GD);
519   llvm::FunctionType *ThunkFnTy = CGM.getTypes().GetFunctionType(FnInfo);
520 
521   // If the type of the underlying GlobalValue is wrong, we'll have to replace
522   // it. It should be a declaration.
523   llvm::Function *ThunkFn = cast<llvm::Function>(Thunk->stripPointerCasts());
524   if (ThunkFn->getFunctionType() != ThunkFnTy) {
525     llvm::GlobalValue *OldThunkFn = ThunkFn;
526 
527     assert(OldThunkFn->isDeclaration() && "Shouldn't replace non-declaration");
528 
529     // Remove the name from the old thunk function and get a new thunk.
530     OldThunkFn->setName(StringRef());
531     ThunkFn = llvm::Function::Create(ThunkFnTy, llvm::Function::ExternalLinkage,
532                                      Name.str(), &CGM.getModule());
533     CGM.SetLLVMFunctionAttributes(MD, FnInfo, ThunkFn);
534 
535     // If needed, replace the old thunk with a bitcast.
536     if (!OldThunkFn->use_empty()) {
537       llvm::Constant *NewPtrForOldDecl =
538           llvm::ConstantExpr::getBitCast(ThunkFn, OldThunkFn->getType());
539       OldThunkFn->replaceAllUsesWith(NewPtrForOldDecl);
540     }
541 
542     // Remove the old thunk.
543     OldThunkFn->eraseFromParent();
544   }
545 
546   bool ABIHasKeyFunctions = CGM.getTarget().getCXXABI().hasKeyFunctions();
547   bool UseAvailableExternallyLinkage = ForVTable && ABIHasKeyFunctions;
548 
549   if (!ThunkFn->isDeclaration()) {
550     if (!ABIHasKeyFunctions || UseAvailableExternallyLinkage) {
551       // There is already a thunk emitted for this function, do nothing.
552       return ThunkFn;
553     }
554 
555     setThunkProperties(CGM, TI, ThunkFn, ForVTable, GD);
556     return ThunkFn;
557   }
558 
559   // If this will be unprototyped, add the "thunk" attribute so that LLVM knows
560   // that the return type is meaningless. These thunks can be used to call
561   // functions with differing return types, and the caller is required to cast
562   // the prototype appropriately to extract the correct value.
563   if (IsUnprototyped)
564     ThunkFn->addFnAttr("thunk");
565 
566   CGM.SetLLVMFunctionAttributesForDefinition(GD.getDecl(), ThunkFn);
567 
568   // Thunks for variadic methods are special because in general variadic
569   // arguments cannot be perfectly forwarded. In the general case, clang
570   // implements such thunks by cloning the original function body. However, for
571   // thunks with no return adjustment on targets that support musttail, we can
572   // use musttail to perfectly forward the variadic arguments.
573   bool ShouldCloneVarArgs = false;
574   if (!IsUnprototyped && ThunkFn->isVarArg()) {
575     ShouldCloneVarArgs = true;
576     if (TI.Return.isEmpty()) {
577       switch (CGM.getTriple().getArch()) {
578       case llvm::Triple::x86_64:
579       case llvm::Triple::x86:
580       case llvm::Triple::aarch64:
581         ShouldCloneVarArgs = false;
582         break;
583       default:
584         break;
585       }
586     }
587   }
588 
589   if (ShouldCloneVarArgs) {
590     if (UseAvailableExternallyLinkage)
591       return ThunkFn;
592     ThunkFn =
593         CodeGenFunction(CGM).GenerateVarArgsThunk(ThunkFn, FnInfo, GD, TI);
594   } else {
595     // Normal thunk body generation.
596     CodeGenFunction(CGM).generateThunk(ThunkFn, FnInfo, GD, TI, IsUnprototyped);
597   }
598 
599   setThunkProperties(CGM, TI, ThunkFn, ForVTable, GD);
600   return ThunkFn;
601 }
602 
603 void CodeGenVTables::EmitThunks(GlobalDecl GD) {
604   const CXXMethodDecl *MD =
605     cast<CXXMethodDecl>(GD.getDecl())->getCanonicalDecl();
606 
607   // We don't need to generate thunks for the base destructor.
608   if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
609     return;
610 
611   const VTableContextBase::ThunkInfoVectorTy *ThunkInfoVector =
612       VTContext->getThunkInfo(GD);
613 
614   if (!ThunkInfoVector)
615     return;
616 
617   for (const ThunkInfo& Thunk : *ThunkInfoVector)
618     maybeEmitThunk(GD, Thunk, /*ForVTable=*/false);
619 }
620 
621 void CodeGenVTables::addRelativeComponent(ConstantArrayBuilder &builder,
622                                           llvm::Constant *component,
623                                           unsigned vtableAddressPoint,
624                                           bool vtableHasLocalLinkage,
625                                           bool isCompleteDtor) const {
626   // No need to get the offset of a nullptr.
627   if (component->isNullValue())
628     return builder.add(llvm::ConstantInt::get(CGM.Int32Ty, 0));
629 
630   auto *globalVal =
631       cast<llvm::GlobalValue>(component->stripPointerCastsAndAliases());
632   llvm::Module &module = CGM.getModule();
633 
634   // We don't want to copy the linkage of the vtable exactly because we still
635   // want the stub/proxy to be emitted for properly calculating the offset.
636   // Examples where there would be no symbol emitted are available_externally
637   // and private linkages.
638   auto stubLinkage = vtableHasLocalLinkage ? llvm::GlobalValue::InternalLinkage
639                                            : llvm::GlobalValue::ExternalLinkage;
640 
641   llvm::Constant *target;
642   if (auto *func = dyn_cast<llvm::Function>(globalVal)) {
643     target = getOrCreateRelativeStub(func, stubLinkage, isCompleteDtor);
644   } else {
645     llvm::SmallString<16> rttiProxyName(globalVal->getName());
646     rttiProxyName.append(".rtti_proxy");
647 
648     // The RTTI component may not always be emitted in the same linkage unit as
649     // the vtable. As a general case, we can make a dso_local proxy to the RTTI
650     // that points to the actual RTTI struct somewhere. This will result in a
651     // GOTPCREL relocation when taking the relative offset to the proxy.
652     llvm::GlobalVariable *proxy = module.getNamedGlobal(rttiProxyName);
653     if (!proxy) {
654       proxy = new llvm::GlobalVariable(module, globalVal->getType(),
655                                        /*isConstant=*/true, stubLinkage,
656                                        globalVal, rttiProxyName);
657       proxy->setDSOLocal(true);
658       proxy->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
659       if (!proxy->hasLocalLinkage()) {
660         proxy->setVisibility(llvm::GlobalValue::HiddenVisibility);
661         proxy->setComdat(module.getOrInsertComdat(rttiProxyName));
662       }
663     }
664     target = proxy;
665   }
666 
667   builder.addRelativeOffsetToPosition(CGM.Int32Ty, target,
668                                       /*position=*/vtableAddressPoint);
669 }
670 
671 llvm::Function *CodeGenVTables::getOrCreateRelativeStub(
672     llvm::Function *func, llvm::GlobalValue::LinkageTypes stubLinkage,
673     bool isCompleteDtor) const {
674   // A complete object destructor can later be substituted in the vtable for an
675   // appropriate base object destructor when optimizations are enabled. This can
676   // happen for child classes that don't have their own destructor. In the case
677   // where a parent virtual destructor is not guaranteed to be in the same
678   // linkage unit as the child vtable, it's possible for an external reference
679   // for this destructor to be substituted into the child vtable, preventing it
680   // from being in rodata. If this function is a complete virtual destructor, we
681   // can just force a stub to be emitted for it.
682   if (func->isDSOLocal() && !isCompleteDtor)
683     return func;
684 
685   llvm::SmallString<16> stubName(func->getName());
686   stubName.append(".stub");
687 
688   // Instead of taking the offset between the vtable and virtual function
689   // directly, we emit a dso_local stub that just contains a tail call to the
690   // original virtual function and take the offset between that and the
691   // vtable. We do this because there are some cases where the original
692   // function that would've been inserted into the vtable is not dso_local
693   // which may require some kind of dynamic relocation which prevents the
694   // vtable from being readonly. On x86_64, taking the offset between the
695   // function and the vtable gets lowered to the offset between the PLT entry
696   // for the function and the vtable which gives us a PLT32 reloc. On AArch64,
697   // right now only CALL26 and JUMP26 instructions generate PLT relocations,
698   // so we manifest them with stubs that are just jumps to the original
699   // function.
700   auto &module = CGM.getModule();
701   llvm::Function *stub = module.getFunction(stubName);
702   if (stub) {
703     assert(stub->isDSOLocal() &&
704            "The previous definition of this stub should've been dso_local.");
705     return stub;
706   }
707 
708   stub = llvm::Function::Create(func->getFunctionType(), stubLinkage, stubName,
709                                 module);
710 
711   // Propogate function attributes.
712   stub->setAttributes(func->getAttributes());
713 
714   stub->setDSOLocal(true);
715   stub->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
716   if (!stub->hasLocalLinkage()) {
717     stub->setVisibility(llvm::GlobalValue::HiddenVisibility);
718     stub->setComdat(module.getOrInsertComdat(stubName));
719   }
720 
721   // Fill the stub with a tail call that will be optimized.
722   llvm::BasicBlock *block =
723       llvm::BasicBlock::Create(module.getContext(), "entry", stub);
724   llvm::IRBuilder<> block_builder(block);
725   llvm::SmallVector<llvm::Value *, 8> args;
726   for (auto &arg : stub->args())
727     args.push_back(&arg);
728   llvm::CallInst *call = block_builder.CreateCall(func, args);
729   call->setAttributes(func->getAttributes());
730   call->setTailCall();
731   if (call->getType()->isVoidTy())
732     block_builder.CreateRetVoid();
733   else
734     block_builder.CreateRet(call);
735 
736   return stub;
737 }
738 
739 bool CodeGenVTables::useRelativeLayout() const {
740   return CGM.getTarget().getCXXABI().isItaniumFamily() &&
741          CGM.getItaniumVTableContext().isRelativeLayout();
742 }
743 
744 llvm::Type *CodeGenVTables::getVTableComponentType() const {
745   if (useRelativeLayout())
746     return CGM.Int32Ty;
747   return CGM.Int8PtrTy;
748 }
749 
750 static void AddPointerLayoutOffset(const CodeGenModule &CGM,
751                                    ConstantArrayBuilder &builder,
752                                    CharUnits offset) {
753   builder.add(llvm::ConstantExpr::getIntToPtr(
754       llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity()),
755       CGM.Int8PtrTy));
756 }
757 
758 static void AddRelativeLayoutOffset(const CodeGenModule &CGM,
759                                     ConstantArrayBuilder &builder,
760                                     CharUnits offset) {
761   builder.add(llvm::ConstantInt::get(CGM.Int32Ty, offset.getQuantity()));
762 }
763 
764 void CodeGenVTables::addVTableComponent(ConstantArrayBuilder &builder,
765                                         const VTableLayout &layout,
766                                         unsigned componentIndex,
767                                         llvm::Constant *rtti,
768                                         unsigned &nextVTableThunkIndex,
769                                         unsigned vtableAddressPoint,
770                                         bool vtableHasLocalLinkage) {
771   auto &component = layout.vtable_components()[componentIndex];
772 
773   auto addOffsetConstant =
774       useRelativeLayout() ? AddRelativeLayoutOffset : AddPointerLayoutOffset;
775 
776   switch (component.getKind()) {
777   case VTableComponent::CK_VCallOffset:
778     return addOffsetConstant(CGM, builder, component.getVCallOffset());
779 
780   case VTableComponent::CK_VBaseOffset:
781     return addOffsetConstant(CGM, builder, component.getVBaseOffset());
782 
783   case VTableComponent::CK_OffsetToTop:
784     return addOffsetConstant(CGM, builder, component.getOffsetToTop());
785 
786   case VTableComponent::CK_RTTI:
787     if (useRelativeLayout())
788       return addRelativeComponent(builder, rtti, vtableAddressPoint,
789                                   vtableHasLocalLinkage,
790                                   /*isCompleteDtor=*/false);
791     else
792       return builder.add(llvm::ConstantExpr::getBitCast(rtti, CGM.Int8PtrTy));
793 
794   case VTableComponent::CK_FunctionPointer:
795   case VTableComponent::CK_CompleteDtorPointer:
796   case VTableComponent::CK_DeletingDtorPointer: {
797     GlobalDecl GD;
798 
799     // Get the right global decl.
800     switch (component.getKind()) {
801     default:
802       llvm_unreachable("Unexpected vtable component kind");
803     case VTableComponent::CK_FunctionPointer:
804       GD = component.getFunctionDecl();
805       break;
806     case VTableComponent::CK_CompleteDtorPointer:
807       GD = GlobalDecl(component.getDestructorDecl(), Dtor_Complete);
808       break;
809     case VTableComponent::CK_DeletingDtorPointer:
810       GD = GlobalDecl(component.getDestructorDecl(), Dtor_Deleting);
811       break;
812     }
813 
814     if (CGM.getLangOpts().CUDA) {
815       // Emit NULL for methods we can't codegen on this
816       // side. Otherwise we'd end up with vtable with unresolved
817       // references.
818       const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
819       // OK on device side: functions w/ __device__ attribute
820       // OK on host side: anything except __device__-only functions.
821       bool CanEmitMethod =
822           CGM.getLangOpts().CUDAIsDevice
823               ? MD->hasAttr<CUDADeviceAttr>()
824               : (MD->hasAttr<CUDAHostAttr>() || !MD->hasAttr<CUDADeviceAttr>());
825       if (!CanEmitMethod)
826         return builder.add(llvm::ConstantExpr::getNullValue(CGM.Int8PtrTy));
827       // Method is acceptable, continue processing as usual.
828     }
829 
830     auto getSpecialVirtualFn = [&](StringRef name) -> llvm::Constant * {
831       // FIXME(PR43094): When merging comdat groups, lld can select a local
832       // symbol as the signature symbol even though it cannot be accessed
833       // outside that symbol's TU. The relative vtables ABI would make
834       // __cxa_pure_virtual and __cxa_deleted_virtual local symbols, and
835       // depending on link order, the comdat groups could resolve to the one
836       // with the local symbol. As a temporary solution, fill these components
837       // with zero. We shouldn't be calling these in the first place anyway.
838       if (useRelativeLayout())
839         return llvm::ConstantPointerNull::get(CGM.Int8PtrTy);
840 
841       // For NVPTX devices in OpenMP emit special functon as null pointers,
842       // otherwise linking ends up with unresolved references.
843       if (CGM.getLangOpts().OpenMP && CGM.getLangOpts().OpenMPIsDevice &&
844           CGM.getTriple().isNVPTX())
845         return llvm::ConstantPointerNull::get(CGM.Int8PtrTy);
846       llvm::FunctionType *fnTy =
847           llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
848       llvm::Constant *fn = cast<llvm::Constant>(
849           CGM.CreateRuntimeFunction(fnTy, name).getCallee());
850       if (auto f = dyn_cast<llvm::Function>(fn))
851         f->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
852       return llvm::ConstantExpr::getBitCast(fn, CGM.Int8PtrTy);
853     };
854 
855     llvm::Constant *fnPtr;
856 
857     // Pure virtual member functions.
858     if (cast<CXXMethodDecl>(GD.getDecl())->isPure()) {
859       if (!PureVirtualFn)
860         PureVirtualFn =
861             getSpecialVirtualFn(CGM.getCXXABI().GetPureVirtualCallName());
862       fnPtr = PureVirtualFn;
863 
864     // Deleted virtual member functions.
865     } else if (cast<CXXMethodDecl>(GD.getDecl())->isDeleted()) {
866       if (!DeletedVirtualFn)
867         DeletedVirtualFn =
868             getSpecialVirtualFn(CGM.getCXXABI().GetDeletedVirtualCallName());
869       fnPtr = DeletedVirtualFn;
870 
871     // Thunks.
872     } else if (nextVTableThunkIndex < layout.vtable_thunks().size() &&
873                layout.vtable_thunks()[nextVTableThunkIndex].first ==
874                    componentIndex) {
875       auto &thunkInfo = layout.vtable_thunks()[nextVTableThunkIndex].second;
876 
877       nextVTableThunkIndex++;
878       fnPtr = maybeEmitThunk(GD, thunkInfo, /*ForVTable=*/true);
879 
880     // Otherwise we can use the method definition directly.
881     } else {
882       llvm::Type *fnTy = CGM.getTypes().GetFunctionTypeForVTable(GD);
883       fnPtr = CGM.GetAddrOfFunction(GD, fnTy, /*ForVTable=*/true);
884     }
885 
886     if (useRelativeLayout()) {
887       return addRelativeComponent(
888           builder, fnPtr, vtableAddressPoint, vtableHasLocalLinkage,
889           component.getKind() == VTableComponent::CK_CompleteDtorPointer);
890     } else
891       return builder.add(llvm::ConstantExpr::getBitCast(fnPtr, CGM.Int8PtrTy));
892   }
893 
894   case VTableComponent::CK_UnusedFunctionPointer:
895     if (useRelativeLayout())
896       return builder.add(llvm::ConstantExpr::getNullValue(CGM.Int32Ty));
897     else
898       return builder.addNullPointer(CGM.Int8PtrTy);
899   }
900 
901   llvm_unreachable("Unexpected vtable component kind");
902 }
903 
904 llvm::Type *CodeGenVTables::getVTableType(const VTableLayout &layout) {
905   SmallVector<llvm::Type *, 4> tys;
906   llvm::Type *componentType = getVTableComponentType();
907   for (unsigned i = 0, e = layout.getNumVTables(); i != e; ++i)
908     tys.push_back(llvm::ArrayType::get(componentType, layout.getVTableSize(i)));
909 
910   return llvm::StructType::get(CGM.getLLVMContext(), tys);
911 }
912 
913 void CodeGenVTables::createVTableInitializer(ConstantStructBuilder &builder,
914                                              const VTableLayout &layout,
915                                              llvm::Constant *rtti,
916                                              bool vtableHasLocalLinkage) {
917   llvm::Type *componentType = getVTableComponentType();
918 
919   const auto &addressPoints = layout.getAddressPointIndices();
920   unsigned nextVTableThunkIndex = 0;
921   for (unsigned vtableIndex = 0, endIndex = layout.getNumVTables();
922        vtableIndex != endIndex; ++vtableIndex) {
923     auto vtableElem = builder.beginArray(componentType);
924 
925     size_t vtableStart = layout.getVTableOffset(vtableIndex);
926     size_t vtableEnd = vtableStart + layout.getVTableSize(vtableIndex);
927     for (size_t componentIndex = vtableStart; componentIndex < vtableEnd;
928          ++componentIndex) {
929       addVTableComponent(vtableElem, layout, componentIndex, rtti,
930                          nextVTableThunkIndex, addressPoints[vtableIndex],
931                          vtableHasLocalLinkage);
932     }
933     vtableElem.finishAndAddTo(builder);
934   }
935 }
936 
937 llvm::GlobalVariable *CodeGenVTables::GenerateConstructionVTable(
938     const CXXRecordDecl *RD, const BaseSubobject &Base, bool BaseIsVirtual,
939     llvm::GlobalVariable::LinkageTypes Linkage,
940     VTableAddressPointsMapTy &AddressPoints) {
941   if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
942     DI->completeClassData(Base.getBase());
943 
944   std::unique_ptr<VTableLayout> VTLayout(
945       getItaniumVTableContext().createConstructionVTableLayout(
946           Base.getBase(), Base.getBaseOffset(), BaseIsVirtual, RD));
947 
948   // Add the address points.
949   AddressPoints = VTLayout->getAddressPoints();
950 
951   // Get the mangled construction vtable name.
952   SmallString<256> OutName;
953   llvm::raw_svector_ostream Out(OutName);
954   cast<ItaniumMangleContext>(CGM.getCXXABI().getMangleContext())
955       .mangleCXXCtorVTable(RD, Base.getBaseOffset().getQuantity(),
956                            Base.getBase(), Out);
957   SmallString<256> Name(OutName);
958 
959   bool UsingRelativeLayout = getItaniumVTableContext().isRelativeLayout();
960   bool VTableAliasExists = UsingRelativeLayout && CGM.getModule().getNamedAlias(Name);
961   if (VTableAliasExists) {
962     // We previously made the vtable hidden and changed its name.
963     Name.append(".local");
964   }
965 
966   llvm::Type *VTType = getVTableType(*VTLayout);
967 
968   // Construction vtable symbols are not part of the Itanium ABI, so we cannot
969   // guarantee that they actually will be available externally. Instead, when
970   // emitting an available_externally VTT, we provide references to an internal
971   // linkage construction vtable. The ABI only requires complete-object vtables
972   // to be the same for all instances of a type, not construction vtables.
973   if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
974     Linkage = llvm::GlobalVariable::InternalLinkage;
975 
976   unsigned Align = CGM.getDataLayout().getABITypeAlignment(VTType);
977 
978   // Create the variable that will hold the construction vtable.
979   llvm::GlobalVariable *VTable =
980       CGM.CreateOrReplaceCXXRuntimeVariable(Name, VTType, Linkage, Align);
981 
982   // V-tables are always unnamed_addr.
983   VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
984 
985   llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(
986       CGM.getContext().getTagDeclType(Base.getBase()));
987 
988   // Create and set the initializer.
989   ConstantInitBuilder builder(CGM);
990   auto components = builder.beginStruct();
991   createVTableInitializer(components, *VTLayout, RTTI,
992                           VTable->hasLocalLinkage());
993   components.finishAndSetAsInitializer(VTable);
994 
995   // Set properties only after the initializer has been set to ensure that the
996   // GV is treated as definition and not declaration.
997   assert(!VTable->isDeclaration() && "Shouldn't set properties on declaration");
998   CGM.setGVProperties(VTable, RD);
999 
1000   CGM.EmitVTableTypeMetadata(RD, VTable, *VTLayout.get());
1001 
1002   if (UsingRelativeLayout && !VTable->isDSOLocal())
1003     GenerateRelativeVTableAlias(VTable, OutName);
1004 
1005   return VTable;
1006 }
1007 
1008 // If the VTable is not dso_local, then we will not be able to indicate that
1009 // the VTable does not need a relocation and move into rodata. An frequent
1010 // time this can occur is for classes that should be made public from a DSO
1011 // (like in libc++). For cases like these, we can make the vtable hidden or
1012 // private and create a public alias with the same visibility and linkage as
1013 // the original vtable type.
1014 void CodeGenVTables::GenerateRelativeVTableAlias(llvm::GlobalVariable *VTable,
1015                                                  llvm::StringRef AliasName) {
1016   assert(getItaniumVTableContext().isRelativeLayout() &&
1017          "Can only use this if the relative vtable ABI is used");
1018   assert(!VTable->isDSOLocal() && "This should be called only if the vtable is "
1019                                   "not guaranteed to be dso_local");
1020 
1021   // If the vtable is available_externally, we shouldn't (or need to) generate
1022   // an alias for it in the first place since the vtable won't actually by
1023   // emitted in this compilation unit.
1024   if (VTable->hasAvailableExternallyLinkage())
1025     return;
1026 
1027   VTable->setName(AliasName + ".local");
1028 
1029   auto Linkage = VTable->getLinkage();
1030   assert(llvm::GlobalAlias::isValidLinkage(Linkage) &&
1031          "Invalid vtable alias linkage");
1032 
1033   llvm::GlobalAlias *VTableAlias = CGM.getModule().getNamedAlias(AliasName);
1034   if (!VTableAlias) {
1035     VTableAlias = llvm::GlobalAlias::create(
1036         VTable->getValueType(), VTable->getAddressSpace(), Linkage, AliasName,
1037         &CGM.getModule());
1038   } else {
1039     assert(VTableAlias->getValueType() == VTable->getValueType());
1040     assert(VTableAlias->getLinkage() == Linkage);
1041   }
1042   VTableAlias->setVisibility(VTable->getVisibility());
1043   VTableAlias->setUnnamedAddr(VTable->getUnnamedAddr());
1044 
1045   // Both of these imply dso_local for the vtable.
1046   if (!VTable->hasComdat()) {
1047     // If this is in a comdat, then we shouldn't make the linkage private due to
1048     // an issue in lld where private symbols can be used as the key symbol when
1049     // choosing the prevelant group. This leads to "relocation refers to a
1050     // symbol in a discarded section".
1051     VTable->setLinkage(llvm::GlobalValue::PrivateLinkage);
1052   } else {
1053     // We should at least make this hidden since we don't want to expose it.
1054     VTable->setVisibility(llvm::GlobalValue::HiddenVisibility);
1055   }
1056 
1057   VTableAlias->setAliasee(VTable);
1058 }
1059 
1060 static bool shouldEmitAvailableExternallyVTable(const CodeGenModule &CGM,
1061                                                 const CXXRecordDecl *RD) {
1062   return CGM.getCodeGenOpts().OptimizationLevel > 0 &&
1063          CGM.getCXXABI().canSpeculativelyEmitVTable(RD);
1064 }
1065 
1066 /// Compute the required linkage of the vtable for the given class.
1067 ///
1068 /// Note that we only call this at the end of the translation unit.
1069 llvm::GlobalVariable::LinkageTypes
1070 CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
1071   if (!RD->isExternallyVisible())
1072     return llvm::GlobalVariable::InternalLinkage;
1073 
1074   // We're at the end of the translation unit, so the current key
1075   // function is fully correct.
1076   const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD);
1077   if (keyFunction && !RD->hasAttr<DLLImportAttr>()) {
1078     // If this class has a key function, use that to determine the
1079     // linkage of the vtable.
1080     const FunctionDecl *def = nullptr;
1081     if (keyFunction->hasBody(def))
1082       keyFunction = cast<CXXMethodDecl>(def);
1083 
1084     switch (keyFunction->getTemplateSpecializationKind()) {
1085       case TSK_Undeclared:
1086       case TSK_ExplicitSpecialization:
1087         assert((def || CodeGenOpts.OptimizationLevel > 0 ||
1088                 CodeGenOpts.getDebugInfo() != codegenoptions::NoDebugInfo) &&
1089                "Shouldn't query vtable linkage without key function, "
1090                "optimizations, or debug info");
1091         if (!def && CodeGenOpts.OptimizationLevel > 0)
1092           return llvm::GlobalVariable::AvailableExternallyLinkage;
1093 
1094         if (keyFunction->isInlined())
1095           return !Context.getLangOpts().AppleKext ?
1096                    llvm::GlobalVariable::LinkOnceODRLinkage :
1097                    llvm::Function::InternalLinkage;
1098 
1099         return llvm::GlobalVariable::ExternalLinkage;
1100 
1101       case TSK_ImplicitInstantiation:
1102         return !Context.getLangOpts().AppleKext ?
1103                  llvm::GlobalVariable::LinkOnceODRLinkage :
1104                  llvm::Function::InternalLinkage;
1105 
1106       case TSK_ExplicitInstantiationDefinition:
1107         return !Context.getLangOpts().AppleKext ?
1108                  llvm::GlobalVariable::WeakODRLinkage :
1109                  llvm::Function::InternalLinkage;
1110 
1111       case TSK_ExplicitInstantiationDeclaration:
1112         llvm_unreachable("Should not have been asked to emit this");
1113     }
1114   }
1115 
1116   // -fapple-kext mode does not support weak linkage, so we must use
1117   // internal linkage.
1118   if (Context.getLangOpts().AppleKext)
1119     return llvm::Function::InternalLinkage;
1120 
1121   llvm::GlobalVariable::LinkageTypes DiscardableODRLinkage =
1122       llvm::GlobalValue::LinkOnceODRLinkage;
1123   llvm::GlobalVariable::LinkageTypes NonDiscardableODRLinkage =
1124       llvm::GlobalValue::WeakODRLinkage;
1125   if (RD->hasAttr<DLLExportAttr>()) {
1126     // Cannot discard exported vtables.
1127     DiscardableODRLinkage = NonDiscardableODRLinkage;
1128   } else if (RD->hasAttr<DLLImportAttr>()) {
1129     // Imported vtables are available externally.
1130     DiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1131     NonDiscardableODRLinkage = llvm::GlobalVariable::AvailableExternallyLinkage;
1132   }
1133 
1134   switch (RD->getTemplateSpecializationKind()) {
1135     case TSK_Undeclared:
1136     case TSK_ExplicitSpecialization:
1137     case TSK_ImplicitInstantiation:
1138       return DiscardableODRLinkage;
1139 
1140     case TSK_ExplicitInstantiationDeclaration:
1141       // Explicit instantiations in MSVC do not provide vtables, so we must emit
1142       // our own.
1143       if (getTarget().getCXXABI().isMicrosoft())
1144         return DiscardableODRLinkage;
1145       return shouldEmitAvailableExternallyVTable(*this, RD)
1146                  ? llvm::GlobalVariable::AvailableExternallyLinkage
1147                  : llvm::GlobalVariable::ExternalLinkage;
1148 
1149     case TSK_ExplicitInstantiationDefinition:
1150       return NonDiscardableODRLinkage;
1151   }
1152 
1153   llvm_unreachable("Invalid TemplateSpecializationKind!");
1154 }
1155 
1156 /// This is a callback from Sema to tell us that a particular vtable is
1157 /// required to be emitted in this translation unit.
1158 ///
1159 /// This is only called for vtables that _must_ be emitted (mainly due to key
1160 /// functions).  For weak vtables, CodeGen tracks when they are needed and
1161 /// emits them as-needed.
1162 void CodeGenModule::EmitVTable(CXXRecordDecl *theClass) {
1163   VTables.GenerateClassData(theClass);
1164 }
1165 
1166 void
1167 CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
1168   if (CGDebugInfo *DI = CGM.getModuleDebugInfo())
1169     DI->completeClassData(RD);
1170 
1171   if (RD->getNumVBases())
1172     CGM.getCXXABI().emitVirtualInheritanceTables(RD);
1173 
1174   CGM.getCXXABI().emitVTableDefinitions(*this, RD);
1175 }
1176 
1177 /// At this point in the translation unit, does it appear that can we
1178 /// rely on the vtable being defined elsewhere in the program?
1179 ///
1180 /// The response is really only definitive when called at the end of
1181 /// the translation unit.
1182 ///
1183 /// The only semantic restriction here is that the object file should
1184 /// not contain a vtable definition when that vtable is defined
1185 /// strongly elsewhere.  Otherwise, we'd just like to avoid emitting
1186 /// vtables when unnecessary.
1187 bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
1188   assert(RD->isDynamicClass() && "Non-dynamic classes have no VTable.");
1189 
1190   // We always synthesize vtables if they are needed in the MS ABI. MSVC doesn't
1191   // emit them even if there is an explicit template instantiation.
1192   if (CGM.getTarget().getCXXABI().isMicrosoft())
1193     return false;
1194 
1195   // If we have an explicit instantiation declaration (and not a
1196   // definition), the vtable is defined elsewhere.
1197   TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
1198   if (TSK == TSK_ExplicitInstantiationDeclaration)
1199     return true;
1200 
1201   // Otherwise, if the class is an instantiated template, the
1202   // vtable must be defined here.
1203   if (TSK == TSK_ImplicitInstantiation ||
1204       TSK == TSK_ExplicitInstantiationDefinition)
1205     return false;
1206 
1207   // Otherwise, if the class doesn't have a key function (possibly
1208   // anymore), the vtable must be defined here.
1209   const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
1210   if (!keyFunction)
1211     return false;
1212 
1213   // Otherwise, if we don't have a definition of the key function, the
1214   // vtable must be defined somewhere else.
1215   return !keyFunction->hasBody();
1216 }
1217 
1218 /// Given that we're currently at the end of the translation unit, and
1219 /// we've emitted a reference to the vtable for this class, should
1220 /// we define that vtable?
1221 static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
1222                                                    const CXXRecordDecl *RD) {
1223   // If vtable is internal then it has to be done.
1224   if (!CGM.getVTables().isVTableExternal(RD))
1225     return true;
1226 
1227   // If it's external then maybe we will need it as available_externally.
1228   return shouldEmitAvailableExternallyVTable(CGM, RD);
1229 }
1230 
1231 /// Given that at some point we emitted a reference to one or more
1232 /// vtables, and that we are now at the end of the translation unit,
1233 /// decide whether we should emit them.
1234 void CodeGenModule::EmitDeferredVTables() {
1235 #ifndef NDEBUG
1236   // Remember the size of DeferredVTables, because we're going to assume
1237   // that this entire operation doesn't modify it.
1238   size_t savedSize = DeferredVTables.size();
1239 #endif
1240 
1241   for (const CXXRecordDecl *RD : DeferredVTables)
1242     if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD))
1243       VTables.GenerateClassData(RD);
1244     else if (shouldOpportunisticallyEmitVTables())
1245       OpportunisticVTables.push_back(RD);
1246 
1247   assert(savedSize == DeferredVTables.size() &&
1248          "deferred extra vtables during vtable emission?");
1249   DeferredVTables.clear();
1250 }
1251 
1252 bool CodeGenModule::HasLTOVisibilityPublicStd(const CXXRecordDecl *RD) {
1253   if (!getCodeGenOpts().LTOVisibilityPublicStd)
1254     return false;
1255 
1256   const DeclContext *DC = RD;
1257   while (1) {
1258     auto *D = cast<Decl>(DC);
1259     DC = DC->getParent();
1260     if (isa<TranslationUnitDecl>(DC->getRedeclContext())) {
1261       if (auto *ND = dyn_cast<NamespaceDecl>(D))
1262         if (const IdentifierInfo *II = ND->getIdentifier())
1263           if (II->isStr("std") || II->isStr("stdext"))
1264             return true;
1265       break;
1266     }
1267   }
1268 
1269   return false;
1270 }
1271 
1272 bool CodeGenModule::HasHiddenLTOVisibility(const CXXRecordDecl *RD) {
1273   LinkageInfo LV = RD->getLinkageAndVisibility();
1274   if (!isExternallyVisible(LV.getLinkage()))
1275     return true;
1276 
1277   if (RD->hasAttr<LTOVisibilityPublicAttr>() || RD->hasAttr<UuidAttr>())
1278     return false;
1279 
1280   if (getTriple().isOSBinFormatCOFF()) {
1281     if (RD->hasAttr<DLLExportAttr>() || RD->hasAttr<DLLImportAttr>())
1282       return false;
1283   } else {
1284     if (LV.getVisibility() != HiddenVisibility)
1285       return false;
1286   }
1287 
1288   return !HasLTOVisibilityPublicStd(RD);
1289 }
1290 
1291 llvm::GlobalObject::VCallVisibility
1292 CodeGenModule::GetVCallVisibilityLevel(const CXXRecordDecl *RD) {
1293   LinkageInfo LV = RD->getLinkageAndVisibility();
1294   llvm::GlobalObject::VCallVisibility TypeVis;
1295   if (!isExternallyVisible(LV.getLinkage()))
1296     TypeVis = llvm::GlobalObject::VCallVisibilityTranslationUnit;
1297   else if (HasHiddenLTOVisibility(RD))
1298     TypeVis = llvm::GlobalObject::VCallVisibilityLinkageUnit;
1299   else
1300     TypeVis = llvm::GlobalObject::VCallVisibilityPublic;
1301 
1302   for (auto B : RD->bases())
1303     if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1304       TypeVis = std::min(TypeVis,
1305                     GetVCallVisibilityLevel(B.getType()->getAsCXXRecordDecl()));
1306 
1307   for (auto B : RD->vbases())
1308     if (B.getType()->getAsCXXRecordDecl()->isDynamicClass())
1309       TypeVis = std::min(TypeVis,
1310                     GetVCallVisibilityLevel(B.getType()->getAsCXXRecordDecl()));
1311 
1312   return TypeVis;
1313 }
1314 
1315 void CodeGenModule::EmitVTableTypeMetadata(const CXXRecordDecl *RD,
1316                                            llvm::GlobalVariable *VTable,
1317                                            const VTableLayout &VTLayout) {
1318   if (!getCodeGenOpts().LTOUnit)
1319     return;
1320 
1321   CharUnits PointerWidth =
1322       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
1323 
1324   typedef std::pair<const CXXRecordDecl *, unsigned> AddressPoint;
1325   std::vector<AddressPoint> AddressPoints;
1326   for (auto &&AP : VTLayout.getAddressPoints())
1327     AddressPoints.push_back(std::make_pair(
1328         AP.first.getBase(), VTLayout.getVTableOffset(AP.second.VTableIndex) +
1329                                 AP.second.AddressPointIndex));
1330 
1331   // Sort the address points for determinism.
1332   llvm::sort(AddressPoints, [this](const AddressPoint &AP1,
1333                                    const AddressPoint &AP2) {
1334     if (&AP1 == &AP2)
1335       return false;
1336 
1337     std::string S1;
1338     llvm::raw_string_ostream O1(S1);
1339     getCXXABI().getMangleContext().mangleTypeName(
1340         QualType(AP1.first->getTypeForDecl(), 0), O1);
1341     O1.flush();
1342 
1343     std::string S2;
1344     llvm::raw_string_ostream O2(S2);
1345     getCXXABI().getMangleContext().mangleTypeName(
1346         QualType(AP2.first->getTypeForDecl(), 0), O2);
1347     O2.flush();
1348 
1349     if (S1 < S2)
1350       return true;
1351     if (S1 != S2)
1352       return false;
1353 
1354     return AP1.second < AP2.second;
1355   });
1356 
1357   ArrayRef<VTableComponent> Comps = VTLayout.vtable_components();
1358   for (auto AP : AddressPoints) {
1359     // Create type metadata for the address point.
1360     AddVTableTypeMetadata(VTable, PointerWidth * AP.second, AP.first);
1361 
1362     // The class associated with each address point could also potentially be
1363     // used for indirect calls via a member function pointer, so we need to
1364     // annotate the address of each function pointer with the appropriate member
1365     // function pointer type.
1366     for (unsigned I = 0; I != Comps.size(); ++I) {
1367       if (Comps[I].getKind() != VTableComponent::CK_FunctionPointer)
1368         continue;
1369       llvm::Metadata *MD = CreateMetadataIdentifierForVirtualMemPtrType(
1370           Context.getMemberPointerType(
1371               Comps[I].getFunctionDecl()->getType(),
1372               Context.getRecordType(AP.first).getTypePtr()));
1373       VTable->addTypeMetadata((PointerWidth * I).getQuantity(), MD);
1374     }
1375   }
1376 
1377   if (getCodeGenOpts().VirtualFunctionElimination ||
1378       getCodeGenOpts().WholeProgramVTables) {
1379     llvm::GlobalObject::VCallVisibility TypeVis = GetVCallVisibilityLevel(RD);
1380     if (TypeVis != llvm::GlobalObject::VCallVisibilityPublic)
1381       VTable->setVCallVisibilityMetadata(TypeVis);
1382   }
1383 }
1384