1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code dealing with C++ code generation of classes
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "CGDebugInfo.h"
15 #include "CodeGenFunction.h"
16 #include "clang/AST/CXXInheritance.h"
17 #include "clang/AST/EvaluatedExprVisitor.h"
18 #include "clang/AST/RecordLayout.h"
19 #include "clang/AST/StmtCXX.h"
20 
21 using namespace clang;
22 using namespace CodeGen;
23 
24 static uint64_t
25 ComputeNonVirtualBaseClassOffset(ASTContext &Context,
26                                  const CXXRecordDecl *DerivedClass,
27                                  CastExpr::path_const_iterator Start,
28                                  CastExpr::path_const_iterator End) {
29   uint64_t Offset = 0;
30 
31   const CXXRecordDecl *RD = DerivedClass;
32 
33   for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
34     const CXXBaseSpecifier *Base = *I;
35     assert(!Base->isVirtual() && "Should not see virtual bases here!");
36 
37     // Get the layout.
38     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
39 
40     const CXXRecordDecl *BaseDecl =
41       cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
42 
43     // Add the offset.
44     Offset += Layout.getBaseClassOffset(BaseDecl);
45 
46     RD = BaseDecl;
47   }
48 
49   // FIXME: We should not use / 8 here.
50   return Offset / 8;
51 }
52 
53 llvm::Constant *
54 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
55                                    CastExpr::path_const_iterator PathBegin,
56                                    CastExpr::path_const_iterator PathEnd) {
57   assert(PathBegin != PathEnd && "Base path should not be empty!");
58 
59   uint64_t Offset =
60     ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl,
61                                      PathBegin, PathEnd);
62   if (!Offset)
63     return 0;
64 
65   const llvm::Type *PtrDiffTy =
66   Types.ConvertType(getContext().getPointerDiffType());
67 
68   return llvm::ConstantInt::get(PtrDiffTy, Offset);
69 }
70 
71 /// Gets the address of a direct base class within a complete object.
72 /// This should only be used for (1) non-virtual bases or (2) virtual bases
73 /// when the type is known to be complete (e.g. in complete destructors).
74 ///
75 /// The object pointed to by 'This' is assumed to be non-null.
76 llvm::Value *
77 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This,
78                                                    const CXXRecordDecl *Derived,
79                                                    const CXXRecordDecl *Base,
80                                                    bool BaseIsVirtual) {
81   // 'this' must be a pointer (in some address space) to Derived.
82   assert(This->getType()->isPointerTy() &&
83          cast<llvm::PointerType>(This->getType())->getElementType()
84            == ConvertType(Derived));
85 
86   // Compute the offset of the virtual base.
87   uint64_t Offset;
88   const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
89   if (BaseIsVirtual)
90     Offset = Layout.getVBaseClassOffset(Base);
91   else
92     Offset = Layout.getBaseClassOffset(Base);
93 
94   // Shift and cast down to the base type.
95   // TODO: for complete types, this should be possible with a GEP.
96   llvm::Value *V = This;
97   if (Offset) {
98     const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext());
99     V = Builder.CreateBitCast(V, Int8PtrTy);
100     V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8);
101   }
102   V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo());
103 
104   return V;
105 }
106 
107 static llvm::Value *
108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr,
109                                 uint64_t NonVirtual, llvm::Value *Virtual) {
110   const llvm::Type *PtrDiffTy =
111     CGF.ConvertType(CGF.getContext().getPointerDiffType());
112 
113   llvm::Value *NonVirtualOffset = 0;
114   if (NonVirtual)
115     NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual);
116 
117   llvm::Value *BaseOffset;
118   if (Virtual) {
119     if (NonVirtualOffset)
120       BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset);
121     else
122       BaseOffset = Virtual;
123   } else
124     BaseOffset = NonVirtualOffset;
125 
126   // Apply the base offset.
127   const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext());
128   ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy);
129   ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr");
130 
131   return ThisPtr;
132 }
133 
134 llvm::Value *
135 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value,
136                                        const CXXRecordDecl *Derived,
137                                        CastExpr::path_const_iterator PathBegin,
138                                        CastExpr::path_const_iterator PathEnd,
139                                        bool NullCheckValue) {
140   assert(PathBegin != PathEnd && "Base path should not be empty!");
141 
142   CastExpr::path_const_iterator Start = PathBegin;
143   const CXXRecordDecl *VBase = 0;
144 
145   // Get the virtual base.
146   if ((*Start)->isVirtual()) {
147     VBase =
148       cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl());
149     ++Start;
150   }
151 
152   uint64_t NonVirtualOffset =
153     ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived,
154                                      Start, PathEnd);
155 
156   // Get the base pointer type.
157   const llvm::Type *BasePtrTy =
158     ConvertType((PathEnd[-1])->getType())->getPointerTo();
159 
160   if (!NonVirtualOffset && !VBase) {
161     // Just cast back.
162     return Builder.CreateBitCast(Value, BasePtrTy);
163   }
164 
165   llvm::BasicBlock *CastNull = 0;
166   llvm::BasicBlock *CastNotNull = 0;
167   llvm::BasicBlock *CastEnd = 0;
168 
169   if (NullCheckValue) {
170     CastNull = createBasicBlock("cast.null");
171     CastNotNull = createBasicBlock("cast.notnull");
172     CastEnd = createBasicBlock("cast.end");
173 
174     llvm::Value *IsNull =
175       Builder.CreateICmpEQ(Value,
176                            llvm::Constant::getNullValue(Value->getType()));
177     Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
178     EmitBlock(CastNotNull);
179   }
180 
181   llvm::Value *VirtualOffset = 0;
182 
183   if (VBase)
184     VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase);
185 
186   // Apply the offsets.
187   Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
188                                           VirtualOffset);
189 
190   // Cast back.
191   Value = Builder.CreateBitCast(Value, BasePtrTy);
192 
193   if (NullCheckValue) {
194     Builder.CreateBr(CastEnd);
195     EmitBlock(CastNull);
196     Builder.CreateBr(CastEnd);
197     EmitBlock(CastEnd);
198 
199     llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
200     PHI->reserveOperandSpace(2);
201     PHI->addIncoming(Value, CastNotNull);
202     PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
203                      CastNull);
204     Value = PHI;
205   }
206 
207   return Value;
208 }
209 
210 llvm::Value *
211 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value,
212                                           const CXXRecordDecl *Derived,
213                                         CastExpr::path_const_iterator PathBegin,
214                                           CastExpr::path_const_iterator PathEnd,
215                                           bool NullCheckValue) {
216   assert(PathBegin != PathEnd && "Base path should not be empty!");
217 
218   QualType DerivedTy =
219     getContext().getCanonicalType(getContext().getTagDeclType(Derived));
220   const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo();
221 
222   llvm::Value *NonVirtualOffset =
223     CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
224 
225   if (!NonVirtualOffset) {
226     // No offset, we can just cast back.
227     return Builder.CreateBitCast(Value, DerivedPtrTy);
228   }
229 
230   llvm::BasicBlock *CastNull = 0;
231   llvm::BasicBlock *CastNotNull = 0;
232   llvm::BasicBlock *CastEnd = 0;
233 
234   if (NullCheckValue) {
235     CastNull = createBasicBlock("cast.null");
236     CastNotNull = createBasicBlock("cast.notnull");
237     CastEnd = createBasicBlock("cast.end");
238 
239     llvm::Value *IsNull =
240     Builder.CreateICmpEQ(Value,
241                          llvm::Constant::getNullValue(Value->getType()));
242     Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
243     EmitBlock(CastNotNull);
244   }
245 
246   // Apply the offset.
247   Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType());
248   Value = Builder.CreateSub(Value, NonVirtualOffset);
249   Value = Builder.CreateIntToPtr(Value, DerivedPtrTy);
250 
251   // Just cast.
252   Value = Builder.CreateBitCast(Value, DerivedPtrTy);
253 
254   if (NullCheckValue) {
255     Builder.CreateBr(CastEnd);
256     EmitBlock(CastNull);
257     Builder.CreateBr(CastEnd);
258     EmitBlock(CastEnd);
259 
260     llvm::PHINode *PHI = Builder.CreatePHI(Value->getType());
261     PHI->reserveOperandSpace(2);
262     PHI->addIncoming(Value, CastNotNull);
263     PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()),
264                      CastNull);
265     Value = PHI;
266   }
267 
268   return Value;
269 }
270 
271 /// GetVTTParameter - Return the VTT parameter that should be passed to a
272 /// base constructor/destructor with virtual bases.
273 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
274                                     bool ForVirtualBase) {
275   if (!CodeGenVTables::needsVTTParameter(GD)) {
276     // This constructor/destructor does not need a VTT parameter.
277     return 0;
278   }
279 
280   const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent();
281   const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
282 
283   llvm::Value *VTT;
284 
285   uint64_t SubVTTIndex;
286 
287   // If the record matches the base, this is the complete ctor/dtor
288   // variant calling the base variant in a class with virtual bases.
289   if (RD == Base) {
290     assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) &&
291            "doing no-op VTT offset in base dtor/ctor?");
292     assert(!ForVirtualBase && "Can't have same class as virtual base!");
293     SubVTTIndex = 0;
294   } else {
295     const ASTRecordLayout &Layout =
296       CGF.getContext().getASTRecordLayout(RD);
297     uint64_t BaseOffset = ForVirtualBase ?
298       Layout.getVBaseClassOffset(Base) : Layout.getBaseClassOffset(Base);
299 
300     SubVTTIndex =
301       CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
302     assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
303   }
304 
305   if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) {
306     // A VTT parameter was passed to the constructor, use it.
307     VTT = CGF.LoadCXXVTT();
308     VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex);
309   } else {
310     // We're the complete constructor, so get the VTT by name.
311     VTT = CGF.CGM.getVTables().getVTT(RD);
312     VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex);
313   }
314 
315   return VTT;
316 }
317 
318 namespace {
319   /// Call the destructor for a direct base class.
320   struct CallBaseDtor : EHScopeStack::Cleanup {
321     const CXXRecordDecl *BaseClass;
322     bool BaseIsVirtual;
323     CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
324       : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
325 
326     void Emit(CodeGenFunction &CGF, bool IsForEH) {
327       const CXXRecordDecl *DerivedClass =
328         cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
329 
330       const CXXDestructorDecl *D = BaseClass->getDestructor();
331       llvm::Value *Addr =
332         CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(),
333                                                   DerivedClass, BaseClass,
334                                                   BaseIsVirtual);
335       CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr);
336     }
337   };
338 
339   /// A visitor which checks whether an initializer uses 'this' in a
340   /// way which requires the vtable to be properly set.
341   struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> {
342     typedef EvaluatedExprVisitor<DynamicThisUseChecker> super;
343 
344     bool UsesThis;
345 
346     DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {}
347 
348     // Black-list all explicit and implicit references to 'this'.
349     //
350     // Do we need to worry about external references to 'this' derived
351     // from arbitrary code?  If so, then anything which runs arbitrary
352     // external code might potentially access the vtable.
353     void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; }
354   };
355 }
356 
357 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
358   DynamicThisUseChecker Checker(C);
359   Checker.Visit(const_cast<Expr*>(Init));
360   return Checker.UsesThis;
361 }
362 
363 static void EmitBaseInitializer(CodeGenFunction &CGF,
364                                 const CXXRecordDecl *ClassDecl,
365                                 CXXBaseOrMemberInitializer *BaseInit,
366                                 CXXCtorType CtorType) {
367   assert(BaseInit->isBaseInitializer() &&
368          "Must have base initializer!");
369 
370   llvm::Value *ThisPtr = CGF.LoadCXXThis();
371 
372   const Type *BaseType = BaseInit->getBaseClass();
373   CXXRecordDecl *BaseClassDecl =
374     cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl());
375 
376   bool isBaseVirtual = BaseInit->isBaseVirtual();
377 
378   // The base constructor doesn't construct virtual bases.
379   if (CtorType == Ctor_Base && isBaseVirtual)
380     return;
381 
382   // If the initializer for the base (other than the constructor
383   // itself) accesses 'this' in any way, we need to initialize the
384   // vtables.
385   if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
386     CGF.InitializeVTablePointers(ClassDecl);
387 
388   // We can pretend to be a complete class because it only matters for
389   // virtual bases, and we only do virtual bases for complete ctors.
390   llvm::Value *V =
391     CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
392                                               BaseClassDecl,
393                                               isBaseVirtual);
394 
395   AggValueSlot AggSlot = AggValueSlot::forAddr(V, false, /*Lifetime*/ true);
396 
397   CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
398 
399   if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
400     CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
401                                           isBaseVirtual);
402 }
403 
404 static void EmitAggMemberInitializer(CodeGenFunction &CGF,
405                                      LValue LHS,
406                                      llvm::Value *ArrayIndexVar,
407                                      CXXBaseOrMemberInitializer *MemberInit,
408                                      QualType T,
409                                      unsigned Index) {
410   if (Index == MemberInit->getNumArrayIndices()) {
411     CodeGenFunction::RunCleanupsScope Cleanups(CGF);
412 
413     llvm::Value *Dest = LHS.getAddress();
414     if (ArrayIndexVar) {
415       // If we have an array index variable, load it and use it as an offset.
416       // Then, increment the value.
417       llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar);
418       Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress");
419       llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1);
420       Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc");
421       CGF.Builder.CreateStore(Next, ArrayIndexVar);
422     }
423 
424     AggValueSlot Slot = AggValueSlot::forAddr(Dest, LHS.isVolatileQualified(),
425                                               /*Lifetime*/ true);
426 
427     CGF.EmitAggExpr(MemberInit->getInit(), Slot);
428 
429     return;
430   }
431 
432   const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T);
433   assert(Array && "Array initialization without the array type?");
434   llvm::Value *IndexVar
435     = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index));
436   assert(IndexVar && "Array index variable not loaded");
437 
438   // Initialize this index variable to zero.
439   llvm::Value* Zero
440     = llvm::Constant::getNullValue(
441                               CGF.ConvertType(CGF.getContext().getSizeType()));
442   CGF.Builder.CreateStore(Zero, IndexVar);
443 
444   // Start the loop with a block that tests the condition.
445   llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond");
446   llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end");
447 
448   CGF.EmitBlock(CondBlock);
449 
450   llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body");
451   // Generate: if (loop-index < number-of-elements) fall to the loop body,
452   // otherwise, go to the block after the for-loop.
453   uint64_t NumElements = Array->getSize().getZExtValue();
454   llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar);
455   llvm::Value *NumElementsPtr =
456     llvm::ConstantInt::get(Counter->getType(), NumElements);
457   llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr,
458                                                   "isless");
459 
460   // If the condition is true, execute the body.
461   CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor);
462 
463   CGF.EmitBlock(ForBody);
464   llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc");
465 
466   {
467     CodeGenFunction::RunCleanupsScope Cleanups(CGF);
468 
469     // Inside the loop body recurse to emit the inner loop or, eventually, the
470     // constructor call.
471     EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit,
472                              Array->getElementType(), Index + 1);
473   }
474 
475   CGF.EmitBlock(ContinueBlock);
476 
477   // Emit the increment of the loop counter.
478   llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1);
479   Counter = CGF.Builder.CreateLoad(IndexVar);
480   NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc");
481   CGF.Builder.CreateStore(NextVal, IndexVar);
482 
483   // Finally, branch back up to the condition for the next iteration.
484   CGF.EmitBranch(CondBlock);
485 
486   // Emit the fall-through block.
487   CGF.EmitBlock(AfterFor, true);
488 }
489 
490 namespace {
491   struct CallMemberDtor : EHScopeStack::Cleanup {
492     FieldDecl *Field;
493     CXXDestructorDecl *Dtor;
494 
495     CallMemberDtor(FieldDecl *Field, CXXDestructorDecl *Dtor)
496       : Field(Field), Dtor(Dtor) {}
497 
498     void Emit(CodeGenFunction &CGF, bool IsForEH) {
499       // FIXME: Is this OK for C++0x delegating constructors?
500       llvm::Value *ThisPtr = CGF.LoadCXXThis();
501       LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0);
502 
503       CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
504                                 LHS.getAddress());
505     }
506   };
507 }
508 
509 static void EmitMemberInitializer(CodeGenFunction &CGF,
510                                   const CXXRecordDecl *ClassDecl,
511                                   CXXBaseOrMemberInitializer *MemberInit,
512                                   const CXXConstructorDecl *Constructor,
513                                   FunctionArgList &Args) {
514   assert(MemberInit->isMemberInitializer() &&
515          "Must have member initializer!");
516 
517   // non-static data member initializers.
518   FieldDecl *Field = MemberInit->getMember();
519   QualType FieldType = CGF.getContext().getCanonicalType(Field->getType());
520 
521   llvm::Value *ThisPtr = CGF.LoadCXXThis();
522   LValue LHS;
523 
524   // If we are initializing an anonymous union field, drill down to the field.
525   if (MemberInit->getAnonUnionMember()) {
526     Field = MemberInit->getAnonUnionMember();
527     LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, Field, 0);
528     FieldType = Field->getType();
529   } else {
530     LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0);
531   }
532 
533   // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer
534   // was implicitly generated, we shouldn't be zeroing memory.
535   RValue RHS;
536   if (FieldType->isReferenceType()) {
537     RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field);
538     CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
539   } else if (FieldType->isArrayType() && !MemberInit->getInit()) {
540     CGF.EmitNullInitialization(LHS.getAddress(), Field->getType());
541   } else if (!CGF.hasAggregateLLVMType(Field->getType())) {
542     RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit()));
543     CGF.EmitStoreThroughLValue(RHS, LHS, FieldType);
544   } else if (MemberInit->getInit()->getType()->isAnyComplexType()) {
545     CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(),
546                                 LHS.isVolatileQualified());
547   } else {
548     llvm::Value *ArrayIndexVar = 0;
549     const ConstantArrayType *Array
550       = CGF.getContext().getAsConstantArrayType(FieldType);
551     if (Array && Constructor->isImplicit() &&
552         Constructor->isCopyConstructor()) {
553       const llvm::Type *SizeTy
554         = CGF.ConvertType(CGF.getContext().getSizeType());
555 
556       // The LHS is a pointer to the first object we'll be constructing, as
557       // a flat array.
558       QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
559       const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy);
560       BasePtr = llvm::PointerType::getUnqual(BasePtr);
561       llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(),
562                                                            BasePtr);
563       LHS = CGF.MakeAddrLValue(BaseAddrPtr, BaseElementTy);
564 
565       // Create an array index that will be used to walk over all of the
566       // objects we're constructing.
567       ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index");
568       llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
569       CGF.Builder.CreateStore(Zero, ArrayIndexVar);
570 
571       // If we are copying an array of scalars or classes with trivial copy
572       // constructors, perform a single aggregate copy.
573       const RecordType *Record = BaseElementTy->getAs<RecordType>();
574       if (!Record ||
575           cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) {
576         // Find the source pointer. We knows it's the last argument because
577         // we know we're in a copy constructor.
578         unsigned SrcArgIndex = Args.size() - 1;
579         llvm::Value *SrcPtr
580           = CGF.Builder.CreateLoad(
581                                CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first));
582         LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0);
583 
584         // Copy the aggregate.
585         CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType,
586                               LHS.isVolatileQualified());
587         return;
588       }
589 
590       // Emit the block variables for the array indices, if any.
591       for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I)
592         CGF.EmitAutoVarDecl(*MemberInit->getArrayIndex(I));
593     }
594 
595     EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0);
596 
597     if (!CGF.Exceptions)
598       return;
599 
600     // FIXME: If we have an array of classes w/ non-trivial destructors,
601     // we need to destroy in reverse order of construction along the exception
602     // path.
603     const RecordType *RT = FieldType->getAs<RecordType>();
604     if (!RT)
605       return;
606 
607     CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
608     if (!RD->hasTrivialDestructor())
609       CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
610                                               RD->getDestructor());
611   }
612 }
613 
614 /// Checks whether the given constructor is a valid subject for the
615 /// complete-to-base constructor delegation optimization, i.e.
616 /// emitting the complete constructor as a simple call to the base
617 /// constructor.
618 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) {
619 
620   // Currently we disable the optimization for classes with virtual
621   // bases because (1) the addresses of parameter variables need to be
622   // consistent across all initializers but (2) the delegate function
623   // call necessarily creates a second copy of the parameter variable.
624   //
625   // The limiting example (purely theoretical AFAIK):
626   //   struct A { A(int &c) { c++; } };
627   //   struct B : virtual A {
628   //     B(int count) : A(count) { printf("%d\n", count); }
629   //   };
630   // ...although even this example could in principle be emitted as a
631   // delegation since the address of the parameter doesn't escape.
632   if (Ctor->getParent()->getNumVBases()) {
633     // TODO: white-list trivial vbase initializers.  This case wouldn't
634     // be subject to the restrictions below.
635 
636     // TODO: white-list cases where:
637     //  - there are no non-reference parameters to the constructor
638     //  - the initializers don't access any non-reference parameters
639     //  - the initializers don't take the address of non-reference
640     //    parameters
641     //  - etc.
642     // If we ever add any of the above cases, remember that:
643     //  - function-try-blocks will always blacklist this optimization
644     //  - we need to perform the constructor prologue and cleanup in
645     //    EmitConstructorBody.
646 
647     return false;
648   }
649 
650   // We also disable the optimization for variadic functions because
651   // it's impossible to "re-pass" varargs.
652   if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic())
653     return false;
654 
655   return true;
656 }
657 
658 /// EmitConstructorBody - Emits the body of the current constructor.
659 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
660   const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
661   CXXCtorType CtorType = CurGD.getCtorType();
662 
663   // Before we go any further, try the complete->base constructor
664   // delegation optimization.
665   if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) {
666     if (CGDebugInfo *DI = getDebugInfo())
667       DI->EmitStopPoint(Builder);
668     EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args);
669     return;
670   }
671 
672   Stmt *Body = Ctor->getBody();
673 
674   // Enter the function-try-block before the constructor prologue if
675   // applicable.
676   bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
677   if (IsTryBody)
678     EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
679 
680   EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin();
681 
682   // Emit the constructor prologue, i.e. the base and member
683   // initializers.
684   EmitCtorPrologue(Ctor, CtorType, Args);
685 
686   // Emit the body of the statement.
687   if (IsTryBody)
688     EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
689   else if (Body)
690     EmitStmt(Body);
691 
692   // Emit any cleanup blocks associated with the member or base
693   // initializers, which includes (along the exceptional path) the
694   // destructors for those members and bases that were fully
695   // constructed.
696   PopCleanupBlocks(CleanupDepth);
697 
698   if (IsTryBody)
699     ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
700 }
701 
702 /// EmitCtorPrologue - This routine generates necessary code to initialize
703 /// base classes and non-static data members belonging to this constructor.
704 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
705                                        CXXCtorType CtorType,
706                                        FunctionArgList &Args) {
707   const CXXRecordDecl *ClassDecl = CD->getParent();
708 
709   llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers;
710 
711   for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
712        E = CD->init_end();
713        B != E; ++B) {
714     CXXBaseOrMemberInitializer *Member = (*B);
715 
716     if (Member->isBaseInitializer())
717       EmitBaseInitializer(*this, ClassDecl, Member, CtorType);
718     else
719       MemberInitializers.push_back(Member);
720   }
721 
722   InitializeVTablePointers(ClassDecl);
723 
724   for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I)
725     EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args);
726 }
727 
728 /// EmitDestructorBody - Emits the body of the current destructor.
729 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
730   const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
731   CXXDtorType DtorType = CurGD.getDtorType();
732 
733   // The call to operator delete in a deleting destructor happens
734   // outside of the function-try-block, which means it's always
735   // possible to delegate the destructor body to the complete
736   // destructor.  Do so.
737   if (DtorType == Dtor_Deleting) {
738     EnterDtorCleanups(Dtor, Dtor_Deleting);
739     EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
740                           LoadCXXThis());
741     PopCleanupBlock();
742     return;
743   }
744 
745   Stmt *Body = Dtor->getBody();
746 
747   // If the body is a function-try-block, enter the try before
748   // anything else.
749   bool isTryBody = (Body && isa<CXXTryStmt>(Body));
750   if (isTryBody)
751     EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
752 
753   // Enter the epilogue cleanups.
754   RunCleanupsScope DtorEpilogue(*this);
755 
756   // If this is the complete variant, just invoke the base variant;
757   // the epilogue will destruct the virtual bases.  But we can't do
758   // this optimization if the body is a function-try-block, because
759   // we'd introduce *two* handler blocks.
760   switch (DtorType) {
761   case Dtor_Deleting: llvm_unreachable("already handled deleting case");
762 
763   case Dtor_Complete:
764     // Enter the cleanup scopes for virtual bases.
765     EnterDtorCleanups(Dtor, Dtor_Complete);
766 
767     if (!isTryBody) {
768       EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
769                             LoadCXXThis());
770       break;
771     }
772     // Fallthrough: act like we're in the base variant.
773 
774   case Dtor_Base:
775     // Enter the cleanup scopes for fields and non-virtual bases.
776     EnterDtorCleanups(Dtor, Dtor_Base);
777 
778     // Initialize the vtable pointers before entering the body.
779     InitializeVTablePointers(Dtor->getParent());
780 
781     if (isTryBody)
782       EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
783     else if (Body)
784       EmitStmt(Body);
785     else {
786       assert(Dtor->isImplicit() && "bodyless dtor not implicit");
787       // nothing to do besides what's in the epilogue
788     }
789     break;
790   }
791 
792   // Jump out through the epilogue cleanups.
793   DtorEpilogue.ForceCleanup();
794 
795   // Exit the try if applicable.
796   if (isTryBody)
797     ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
798 }
799 
800 namespace {
801   /// Call the operator delete associated with the current destructor.
802   struct CallDtorDelete : EHScopeStack::Cleanup {
803     CallDtorDelete() {}
804 
805     void Emit(CodeGenFunction &CGF, bool IsForEH) {
806       const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
807       const CXXRecordDecl *ClassDecl = Dtor->getParent();
808       CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(),
809                          CGF.getContext().getTagDeclType(ClassDecl));
810     }
811   };
812 
813   struct CallArrayFieldDtor : EHScopeStack::Cleanup {
814     const FieldDecl *Field;
815     CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
816 
817     void Emit(CodeGenFunction &CGF, bool IsForEH) {
818       QualType FieldType = Field->getType();
819       const ConstantArrayType *Array =
820         CGF.getContext().getAsConstantArrayType(FieldType);
821 
822       QualType BaseType =
823         CGF.getContext().getBaseElementType(Array->getElementType());
824       const CXXRecordDecl *FieldClassDecl = BaseType->getAsCXXRecordDecl();
825 
826       llvm::Value *ThisPtr = CGF.LoadCXXThis();
827       LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
828                                           // FIXME: Qualifiers?
829                                           /*CVRQualifiers=*/0);
830 
831       const llvm::Type *BasePtr = CGF.ConvertType(BaseType)->getPointerTo();
832       llvm::Value *BaseAddrPtr =
833         CGF.Builder.CreateBitCast(LHS.getAddress(), BasePtr);
834       CGF.EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(),
835                                     Array, BaseAddrPtr);
836     }
837   };
838 
839   struct CallFieldDtor : EHScopeStack::Cleanup {
840     const FieldDecl *Field;
841     CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
842 
843     void Emit(CodeGenFunction &CGF, bool IsForEH) {
844       const CXXRecordDecl *FieldClassDecl =
845         Field->getType()->getAsCXXRecordDecl();
846 
847       llvm::Value *ThisPtr = CGF.LoadCXXThis();
848       LValue LHS = CGF.EmitLValueForField(ThisPtr, Field,
849                                           // FIXME: Qualifiers?
850                                           /*CVRQualifiers=*/0);
851 
852       CGF.EmitCXXDestructorCall(FieldClassDecl->getDestructor(),
853                                 Dtor_Complete, /*ForVirtualBase=*/false,
854                                 LHS.getAddress());
855     }
856   };
857 }
858 
859 /// EmitDtorEpilogue - Emit all code that comes at the end of class's
860 /// destructor. This is to call destructors on members and base classes
861 /// in reverse order of their construction.
862 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
863                                         CXXDtorType DtorType) {
864   assert(!DD->isTrivial() &&
865          "Should not emit dtor epilogue for trivial dtor!");
866 
867   // The deleting-destructor phase just needs to call the appropriate
868   // operator delete that Sema picked up.
869   if (DtorType == Dtor_Deleting) {
870     assert(DD->getOperatorDelete() &&
871            "operator delete missing - EmitDtorEpilogue");
872     EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
873     return;
874   }
875 
876   const CXXRecordDecl *ClassDecl = DD->getParent();
877 
878   // The complete-destructor phase just destructs all the virtual bases.
879   if (DtorType == Dtor_Complete) {
880 
881     // We push them in the forward order so that they'll be popped in
882     // the reverse order.
883     for (CXXRecordDecl::base_class_const_iterator I =
884            ClassDecl->vbases_begin(), E = ClassDecl->vbases_end();
885               I != E; ++I) {
886       const CXXBaseSpecifier &Base = *I;
887       CXXRecordDecl *BaseClassDecl
888         = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl());
889 
890       // Ignore trivial destructors.
891       if (BaseClassDecl->hasTrivialDestructor())
892         continue;
893 
894       EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
895                                         BaseClassDecl,
896                                         /*BaseIsVirtual*/ true);
897     }
898 
899     return;
900   }
901 
902   assert(DtorType == Dtor_Base);
903 
904   // Destroy non-virtual bases.
905   for (CXXRecordDecl::base_class_const_iterator I =
906         ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) {
907     const CXXBaseSpecifier &Base = *I;
908 
909     // Ignore virtual bases.
910     if (Base.isVirtual())
911       continue;
912 
913     CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
914 
915     // Ignore trivial destructors.
916     if (BaseClassDecl->hasTrivialDestructor())
917       continue;
918 
919     EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
920                                       BaseClassDecl,
921                                       /*BaseIsVirtual*/ false);
922   }
923 
924   // Destroy direct fields.
925   llvm::SmallVector<const FieldDecl *, 16> FieldDecls;
926   for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(),
927        E = ClassDecl->field_end(); I != E; ++I) {
928     const FieldDecl *Field = *I;
929 
930     QualType FieldType = getContext().getCanonicalType(Field->getType());
931     const ConstantArrayType *Array =
932       getContext().getAsConstantArrayType(FieldType);
933     if (Array)
934       FieldType = getContext().getBaseElementType(Array->getElementType());
935 
936     const RecordType *RT = FieldType->getAs<RecordType>();
937     if (!RT)
938       continue;
939 
940     CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
941     if (FieldClassDecl->hasTrivialDestructor())
942         continue;
943 
944     if (Array)
945       EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
946     else
947       EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
948   }
949 }
950 
951 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested)
952 /// for-loop to call the default constructor on individual members of the
953 /// array.
954 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the
955 /// array type and 'ArrayPtr' points to the beginning fo the array.
956 /// It is assumed that all relevant checks have been made by the caller.
957 ///
958 /// \param ZeroInitialization True if each element should be zero-initialized
959 /// before it is constructed.
960 void
961 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
962                                             const ConstantArrayType *ArrayTy,
963                                             llvm::Value *ArrayPtr,
964                                             CallExpr::const_arg_iterator ArgBeg,
965                                             CallExpr::const_arg_iterator ArgEnd,
966                                             bool ZeroInitialization) {
967 
968   const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
969   llvm::Value * NumElements =
970     llvm::ConstantInt::get(SizeTy,
971                            getContext().getConstantArrayElementCount(ArrayTy));
972 
973   EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd,
974                              ZeroInitialization);
975 }
976 
977 void
978 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
979                                           llvm::Value *NumElements,
980                                           llvm::Value *ArrayPtr,
981                                           CallExpr::const_arg_iterator ArgBeg,
982                                           CallExpr::const_arg_iterator ArgEnd,
983                                             bool ZeroInitialization) {
984   const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
985 
986   // Create a temporary for the loop index and initialize it with 0.
987   llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index");
988   llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy);
989   Builder.CreateStore(Zero, IndexPtr);
990 
991   // Start the loop with a block that tests the condition.
992   llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
993   llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
994 
995   EmitBlock(CondBlock);
996 
997   llvm::BasicBlock *ForBody = createBasicBlock("for.body");
998 
999   // Generate: if (loop-index < number-of-elements fall to the loop body,
1000   // otherwise, go to the block after the for-loop.
1001   llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
1002   llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless");
1003   // If the condition is true, execute the body.
1004   Builder.CreateCondBr(IsLess, ForBody, AfterFor);
1005 
1006   EmitBlock(ForBody);
1007 
1008   llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
1009   // Inside the loop body, emit the constructor call on the array element.
1010   Counter = Builder.CreateLoad(IndexPtr);
1011   llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter,
1012                                                    "arrayidx");
1013 
1014   // Zero initialize the storage, if requested.
1015   if (ZeroInitialization)
1016     EmitNullInitialization(Address,
1017                            getContext().getTypeDeclType(D->getParent()));
1018 
1019   // C++ [class.temporary]p4:
1020   // There are two contexts in which temporaries are destroyed at a different
1021   // point than the end of the full-expression. The first context is when a
1022   // default constructor is called to initialize an element of an array.
1023   // If the constructor has one or more default arguments, the destruction of
1024   // every temporary created in a default argument expression is sequenced
1025   // before the construction of the next array element, if any.
1026 
1027   // Keep track of the current number of live temporaries.
1028   {
1029     RunCleanupsScope Scope(*this);
1030 
1031     EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address,
1032                            ArgBeg, ArgEnd);
1033   }
1034 
1035   EmitBlock(ContinueBlock);
1036 
1037   // Emit the increment of the loop counter.
1038   llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1);
1039   Counter = Builder.CreateLoad(IndexPtr);
1040   NextVal = Builder.CreateAdd(Counter, NextVal, "inc");
1041   Builder.CreateStore(NextVal, IndexPtr);
1042 
1043   // Finally, branch back up to the condition for the next iteration.
1044   EmitBranch(CondBlock);
1045 
1046   // Emit the fall-through block.
1047   EmitBlock(AfterFor, true);
1048 }
1049 
1050 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1051 /// elements in reverse order of construction.
1052 void
1053 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1054                                            const ArrayType *Array,
1055                                            llvm::Value *This) {
1056   const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array);
1057   assert(CA && "Do we support VLA for destruction ?");
1058   uint64_t ElementCount = getContext().getConstantArrayElementCount(CA);
1059 
1060   const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1061   llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount);
1062   EmitCXXAggrDestructorCall(D, ElementCountPtr, This);
1063 }
1064 
1065 /// EmitCXXAggrDestructorCall - calls the default destructor on array
1066 /// elements in reverse order of construction.
1067 void
1068 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1069                                            llvm::Value *UpperCount,
1070                                            llvm::Value *This) {
1071   const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType());
1072   llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1);
1073 
1074   // Create a temporary for the loop index and initialize it with count of
1075   // array elements.
1076   llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index");
1077 
1078   // Store the number of elements in the index pointer.
1079   Builder.CreateStore(UpperCount, IndexPtr);
1080 
1081   // Start the loop with a block that tests the condition.
1082   llvm::BasicBlock *CondBlock = createBasicBlock("for.cond");
1083   llvm::BasicBlock *AfterFor = createBasicBlock("for.end");
1084 
1085   EmitBlock(CondBlock);
1086 
1087   llvm::BasicBlock *ForBody = createBasicBlock("for.body");
1088 
1089   // Generate: if (loop-index != 0 fall to the loop body,
1090   // otherwise, go to the block after the for-loop.
1091   llvm::Value* zeroConstant =
1092     llvm::Constant::getNullValue(SizeLTy);
1093   llvm::Value *Counter = Builder.CreateLoad(IndexPtr);
1094   llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant,
1095                                             "isne");
1096   // If the condition is true, execute the body.
1097   Builder.CreateCondBr(IsNE, ForBody, AfterFor);
1098 
1099   EmitBlock(ForBody);
1100 
1101   llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc");
1102   // Inside the loop body, emit the constructor call on the array element.
1103   Counter = Builder.CreateLoad(IndexPtr);
1104   Counter = Builder.CreateSub(Counter, One);
1105   llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx");
1106   EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address);
1107 
1108   EmitBlock(ContinueBlock);
1109 
1110   // Emit the decrement of the loop counter.
1111   Counter = Builder.CreateLoad(IndexPtr);
1112   Counter = Builder.CreateSub(Counter, One, "dec");
1113   Builder.CreateStore(Counter, IndexPtr);
1114 
1115   // Finally, branch back up to the condition for the next iteration.
1116   EmitBranch(CondBlock);
1117 
1118   // Emit the fall-through block.
1119   EmitBlock(AfterFor, true);
1120 }
1121 
1122 void
1123 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
1124                                         CXXCtorType Type, bool ForVirtualBase,
1125                                         llvm::Value *This,
1126                                         CallExpr::const_arg_iterator ArgBeg,
1127                                         CallExpr::const_arg_iterator ArgEnd) {
1128   if (D->isTrivial()) {
1129     if (ArgBeg == ArgEnd) {
1130       // Trivial default constructor, no codegen required.
1131       assert(D->isDefaultConstructor() &&
1132              "trivial 0-arg ctor not a default ctor");
1133       return;
1134     }
1135 
1136     assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor");
1137     assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor");
1138 
1139     const Expr *E = (*ArgBeg);
1140     QualType Ty = E->getType();
1141     llvm::Value *Src = EmitLValue(E).getAddress();
1142     EmitAggregateCopy(This, Src, Ty);
1143     return;
1144   }
1145 
1146   llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase);
1147   llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type);
1148 
1149   EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd);
1150 }
1151 
1152 void
1153 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1154                                                 CXXCtorType CtorType,
1155                                                 const FunctionArgList &Args) {
1156   CallArgList DelegateArgs;
1157 
1158   FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
1159   assert(I != E && "no parameters to constructor");
1160 
1161   // this
1162   DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()),
1163                                         I->second));
1164   ++I;
1165 
1166   // vtt
1167   if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType),
1168                                          /*ForVirtualBase=*/false)) {
1169     QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy);
1170     DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP));
1171 
1172     if (CodeGenVTables::needsVTTParameter(CurGD)) {
1173       assert(I != E && "cannot skip vtt parameter, already done with args");
1174       assert(I->second == VoidPP && "skipping parameter not of vtt type");
1175       ++I;
1176     }
1177   }
1178 
1179   // Explicit arguments.
1180   for (; I != E; ++I) {
1181     const VarDecl *Param = I->first;
1182     QualType ArgType = Param->getType(); // because we're passing it to itself
1183     RValue Arg = EmitDelegateCallArg(Param);
1184 
1185     DelegateArgs.push_back(std::make_pair(Arg, ArgType));
1186   }
1187 
1188   EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType),
1189            CGM.GetAddrOfCXXConstructor(Ctor, CtorType),
1190            ReturnValueSlot(), DelegateArgs, Ctor);
1191 }
1192 
1193 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
1194                                             CXXDtorType Type,
1195                                             bool ForVirtualBase,
1196                                             llvm::Value *This) {
1197   llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type),
1198                                      ForVirtualBase);
1199   llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type);
1200 
1201   EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0);
1202 }
1203 
1204 namespace {
1205   struct CallLocalDtor : EHScopeStack::Cleanup {
1206     const CXXDestructorDecl *Dtor;
1207     llvm::Value *Addr;
1208 
1209     CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr)
1210       : Dtor(D), Addr(Addr) {}
1211 
1212     void Emit(CodeGenFunction &CGF, bool IsForEH) {
1213       CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
1214                                 /*ForVirtualBase=*/false, Addr);
1215     }
1216   };
1217 }
1218 
1219 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
1220                                             llvm::Value *Addr) {
1221   EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
1222 }
1223 
1224 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {
1225   CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
1226   if (!ClassDecl) return;
1227   if (ClassDecl->hasTrivialDestructor()) return;
1228 
1229   const CXXDestructorDecl *D = ClassDecl->getDestructor();
1230   PushDestructorCleanup(D, Addr);
1231 }
1232 
1233 llvm::Value *
1234 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This,
1235                                            const CXXRecordDecl *ClassDecl,
1236                                            const CXXRecordDecl *BaseClassDecl) {
1237   const llvm::Type *Int8PtrTy =
1238     llvm::Type::getInt8Ty(VMContext)->getPointerTo();
1239 
1240   llvm::Value *VTablePtr = Builder.CreateBitCast(This,
1241                                                  Int8PtrTy->getPointerTo());
1242   VTablePtr = Builder.CreateLoad(VTablePtr, "vtable");
1243 
1244   int64_t VBaseOffsetOffset =
1245     CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl);
1246 
1247   llvm::Value *VBaseOffsetPtr =
1248     Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr");
1249   const llvm::Type *PtrDiffTy =
1250     ConvertType(getContext().getPointerDiffType());
1251 
1252   VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr,
1253                                          PtrDiffTy->getPointerTo());
1254 
1255   llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset");
1256 
1257   return VBaseOffset;
1258 }
1259 
1260 void
1261 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base,
1262                                          const CXXRecordDecl *NearestVBase,
1263                                          uint64_t OffsetFromNearestVBase,
1264                                          llvm::Constant *VTable,
1265                                          const CXXRecordDecl *VTableClass) {
1266   const CXXRecordDecl *RD = Base.getBase();
1267 
1268   // Compute the address point.
1269   llvm::Value *VTableAddressPoint;
1270 
1271   // Check if we need to use a vtable from the VTT.
1272   if (CodeGenVTables::needsVTTParameter(CurGD) &&
1273       (RD->getNumVBases() || NearestVBase)) {
1274     // Get the secondary vpointer index.
1275     uint64_t VirtualPointerIndex =
1276      CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
1277 
1278     /// Load the VTT.
1279     llvm::Value *VTT = LoadCXXVTT();
1280     if (VirtualPointerIndex)
1281       VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex);
1282 
1283     // And load the address point from the VTT.
1284     VTableAddressPoint = Builder.CreateLoad(VTT);
1285   } else {
1286     uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass);
1287     VTableAddressPoint =
1288       Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint);
1289   }
1290 
1291   // Compute where to store the address point.
1292   llvm::Value *VirtualOffset = 0;
1293   uint64_t NonVirtualOffset = 0;
1294 
1295   if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) {
1296     // We need to use the virtual base offset offset because the virtual base
1297     // might have a different offset in the most derived class.
1298     VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass,
1299                                               NearestVBase);
1300     NonVirtualOffset = OffsetFromNearestVBase / 8;
1301   } else {
1302     // We can just use the base offset in the complete class.
1303     NonVirtualOffset = Base.getBaseOffset() / 8;
1304   }
1305 
1306   // Apply the offsets.
1307   llvm::Value *VTableField = LoadCXXThis();
1308 
1309   if (NonVirtualOffset || VirtualOffset)
1310     VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField,
1311                                                   NonVirtualOffset,
1312                                                   VirtualOffset);
1313 
1314   // Finally, store the address point.
1315   const llvm::Type *AddressPointPtrTy =
1316     VTableAddressPoint->getType()->getPointerTo();
1317   VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy);
1318   Builder.CreateStore(VTableAddressPoint, VTableField);
1319 }
1320 
1321 void
1322 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base,
1323                                           const CXXRecordDecl *NearestVBase,
1324                                           uint64_t OffsetFromNearestVBase,
1325                                           bool BaseIsNonVirtualPrimaryBase,
1326                                           llvm::Constant *VTable,
1327                                           const CXXRecordDecl *VTableClass,
1328                                           VisitedVirtualBasesSetTy& VBases) {
1329   // If this base is a non-virtual primary base the address point has already
1330   // been set.
1331   if (!BaseIsNonVirtualPrimaryBase) {
1332     // Initialize the vtable pointer for this base.
1333     InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase,
1334                             VTable, VTableClass);
1335   }
1336 
1337   const CXXRecordDecl *RD = Base.getBase();
1338 
1339   // Traverse bases.
1340   for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
1341        E = RD->bases_end(); I != E; ++I) {
1342     CXXRecordDecl *BaseDecl
1343       = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl());
1344 
1345     // Ignore classes without a vtable.
1346     if (!BaseDecl->isDynamicClass())
1347       continue;
1348 
1349     uint64_t BaseOffset;
1350     uint64_t BaseOffsetFromNearestVBase;
1351     bool BaseDeclIsNonVirtualPrimaryBase;
1352 
1353     if (I->isVirtual()) {
1354       // Check if we've visited this virtual base before.
1355       if (!VBases.insert(BaseDecl))
1356         continue;
1357 
1358       const ASTRecordLayout &Layout =
1359         getContext().getASTRecordLayout(VTableClass);
1360 
1361       BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
1362       BaseOffsetFromNearestVBase = 0;
1363       BaseDeclIsNonVirtualPrimaryBase = false;
1364     } else {
1365       const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
1366 
1367       BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
1368       BaseOffsetFromNearestVBase =
1369         OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
1370       BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
1371     }
1372 
1373     InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset),
1374                              I->isVirtual() ? BaseDecl : NearestVBase,
1375                              BaseOffsetFromNearestVBase,
1376                              BaseDeclIsNonVirtualPrimaryBase,
1377                              VTable, VTableClass, VBases);
1378   }
1379 }
1380 
1381 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
1382   // Ignore classes without a vtable.
1383   if (!RD->isDynamicClass())
1384     return;
1385 
1386   // Get the VTable.
1387   llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD);
1388 
1389   // Initialize the vtable pointers for this class and all of its bases.
1390   VisitedVirtualBasesSetTy VBases;
1391   InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0,
1392                            /*OffsetFromNearestVBase=*/0,
1393                            /*BaseIsNonVirtualPrimaryBase=*/false,
1394                            VTable, RD, VBases);
1395 }
1396