1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGDebugInfo.h" 15 #include "CodeGenFunction.h" 16 #include "clang/AST/CXXInheritance.h" 17 #include "clang/AST/EvaluatedExprVisitor.h" 18 #include "clang/AST/RecordLayout.h" 19 #include "clang/AST/StmtCXX.h" 20 21 using namespace clang; 22 using namespace CodeGen; 23 24 static uint64_t 25 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 26 const CXXRecordDecl *DerivedClass, 27 CastExpr::path_const_iterator Start, 28 CastExpr::path_const_iterator End) { 29 uint64_t Offset = 0; 30 31 const CXXRecordDecl *RD = DerivedClass; 32 33 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 34 const CXXBaseSpecifier *Base = *I; 35 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 36 37 // Get the layout. 38 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 39 40 const CXXRecordDecl *BaseDecl = 41 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 42 43 // Add the offset. 44 Offset += Layout.getBaseClassOffsetInBits(BaseDecl); 45 46 RD = BaseDecl; 47 } 48 49 // FIXME: We should not use / 8 here. 50 return Offset / 8; 51 } 52 53 llvm::Constant * 54 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 55 CastExpr::path_const_iterator PathBegin, 56 CastExpr::path_const_iterator PathEnd) { 57 assert(PathBegin != PathEnd && "Base path should not be empty!"); 58 59 uint64_t Offset = 60 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 61 PathBegin, PathEnd); 62 if (!Offset) 63 return 0; 64 65 const llvm::Type *PtrDiffTy = 66 Types.ConvertType(getContext().getPointerDiffType()); 67 68 return llvm::ConstantInt::get(PtrDiffTy, Offset); 69 } 70 71 /// Gets the address of a direct base class within a complete object. 72 /// This should only be used for (1) non-virtual bases or (2) virtual bases 73 /// when the type is known to be complete (e.g. in complete destructors). 74 /// 75 /// The object pointed to by 'This' is assumed to be non-null. 76 llvm::Value * 77 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 78 const CXXRecordDecl *Derived, 79 const CXXRecordDecl *Base, 80 bool BaseIsVirtual) { 81 // 'this' must be a pointer (in some address space) to Derived. 82 assert(This->getType()->isPointerTy() && 83 cast<llvm::PointerType>(This->getType())->getElementType() 84 == ConvertType(Derived)); 85 86 // Compute the offset of the virtual base. 87 uint64_t Offset; 88 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 89 if (BaseIsVirtual) 90 Offset = Layout.getVBaseClassOffsetInBits(Base); 91 else 92 Offset = Layout.getBaseClassOffsetInBits(Base); 93 94 // Shift and cast down to the base type. 95 // TODO: for complete types, this should be possible with a GEP. 96 llvm::Value *V = This; 97 if (Offset) { 98 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 99 V = Builder.CreateBitCast(V, Int8PtrTy); 100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8); 101 } 102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 103 104 return V; 105 } 106 107 static llvm::Value * 108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 109 uint64_t NonVirtual, llvm::Value *Virtual) { 110 const llvm::Type *PtrDiffTy = 111 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 112 113 llvm::Value *NonVirtualOffset = 0; 114 if (NonVirtual) 115 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual); 116 117 llvm::Value *BaseOffset; 118 if (Virtual) { 119 if (NonVirtualOffset) 120 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 121 else 122 BaseOffset = Virtual; 123 } else 124 BaseOffset = NonVirtualOffset; 125 126 // Apply the base offset. 127 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 128 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy); 129 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 130 131 return ThisPtr; 132 } 133 134 llvm::Value * 135 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 136 const CXXRecordDecl *Derived, 137 CastExpr::path_const_iterator PathBegin, 138 CastExpr::path_const_iterator PathEnd, 139 bool NullCheckValue) { 140 assert(PathBegin != PathEnd && "Base path should not be empty!"); 141 142 CastExpr::path_const_iterator Start = PathBegin; 143 const CXXRecordDecl *VBase = 0; 144 145 // Get the virtual base. 146 if ((*Start)->isVirtual()) { 147 VBase = 148 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 149 ++Start; 150 } 151 152 uint64_t NonVirtualOffset = 153 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 154 Start, PathEnd); 155 156 // Get the base pointer type. 157 const llvm::Type *BasePtrTy = 158 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 159 160 if (!NonVirtualOffset && !VBase) { 161 // Just cast back. 162 return Builder.CreateBitCast(Value, BasePtrTy); 163 } 164 165 llvm::BasicBlock *CastNull = 0; 166 llvm::BasicBlock *CastNotNull = 0; 167 llvm::BasicBlock *CastEnd = 0; 168 169 if (NullCheckValue) { 170 CastNull = createBasicBlock("cast.null"); 171 CastNotNull = createBasicBlock("cast.notnull"); 172 CastEnd = createBasicBlock("cast.end"); 173 174 llvm::Value *IsNull = 175 Builder.CreateICmpEQ(Value, 176 llvm::Constant::getNullValue(Value->getType())); 177 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 178 EmitBlock(CastNotNull); 179 } 180 181 llvm::Value *VirtualOffset = 0; 182 183 if (VBase) 184 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 185 186 // Apply the offsets. 187 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 188 VirtualOffset); 189 190 // Cast back. 191 Value = Builder.CreateBitCast(Value, BasePtrTy); 192 193 if (NullCheckValue) { 194 Builder.CreateBr(CastEnd); 195 EmitBlock(CastNull); 196 Builder.CreateBr(CastEnd); 197 EmitBlock(CastEnd); 198 199 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 200 PHI->reserveOperandSpace(2); 201 PHI->addIncoming(Value, CastNotNull); 202 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 203 CastNull); 204 Value = PHI; 205 } 206 207 return Value; 208 } 209 210 llvm::Value * 211 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 212 const CXXRecordDecl *Derived, 213 CastExpr::path_const_iterator PathBegin, 214 CastExpr::path_const_iterator PathEnd, 215 bool NullCheckValue) { 216 assert(PathBegin != PathEnd && "Base path should not be empty!"); 217 218 QualType DerivedTy = 219 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 220 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 221 222 llvm::Value *NonVirtualOffset = 223 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 224 225 if (!NonVirtualOffset) { 226 // No offset, we can just cast back. 227 return Builder.CreateBitCast(Value, DerivedPtrTy); 228 } 229 230 llvm::BasicBlock *CastNull = 0; 231 llvm::BasicBlock *CastNotNull = 0; 232 llvm::BasicBlock *CastEnd = 0; 233 234 if (NullCheckValue) { 235 CastNull = createBasicBlock("cast.null"); 236 CastNotNull = createBasicBlock("cast.notnull"); 237 CastEnd = createBasicBlock("cast.end"); 238 239 llvm::Value *IsNull = 240 Builder.CreateICmpEQ(Value, 241 llvm::Constant::getNullValue(Value->getType())); 242 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 243 EmitBlock(CastNotNull); 244 } 245 246 // Apply the offset. 247 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType()); 248 Value = Builder.CreateSub(Value, NonVirtualOffset); 249 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy); 250 251 // Just cast. 252 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 253 254 if (NullCheckValue) { 255 Builder.CreateBr(CastEnd); 256 EmitBlock(CastNull); 257 Builder.CreateBr(CastEnd); 258 EmitBlock(CastEnd); 259 260 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 261 PHI->reserveOperandSpace(2); 262 PHI->addIncoming(Value, CastNotNull); 263 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 264 CastNull); 265 Value = PHI; 266 } 267 268 return Value; 269 } 270 271 /// GetVTTParameter - Return the VTT parameter that should be passed to a 272 /// base constructor/destructor with virtual bases. 273 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 274 bool ForVirtualBase) { 275 if (!CodeGenVTables::needsVTTParameter(GD)) { 276 // This constructor/destructor does not need a VTT parameter. 277 return 0; 278 } 279 280 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 281 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 282 283 llvm::Value *VTT; 284 285 uint64_t SubVTTIndex; 286 287 // If the record matches the base, this is the complete ctor/dtor 288 // variant calling the base variant in a class with virtual bases. 289 if (RD == Base) { 290 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 291 "doing no-op VTT offset in base dtor/ctor?"); 292 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 293 SubVTTIndex = 0; 294 } else { 295 const ASTRecordLayout &Layout = 296 CGF.getContext().getASTRecordLayout(RD); 297 uint64_t BaseOffset = ForVirtualBase ? 298 Layout.getVBaseClassOffsetInBits(Base) : 299 Layout.getBaseClassOffsetInBits(Base); 300 301 SubVTTIndex = 302 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 303 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 304 } 305 306 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 307 // A VTT parameter was passed to the constructor, use it. 308 VTT = CGF.LoadCXXVTT(); 309 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 310 } else { 311 // We're the complete constructor, so get the VTT by name. 312 VTT = CGF.CGM.getVTables().getVTT(RD); 313 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 314 } 315 316 return VTT; 317 } 318 319 namespace { 320 /// Call the destructor for a direct base class. 321 struct CallBaseDtor : EHScopeStack::Cleanup { 322 const CXXRecordDecl *BaseClass; 323 bool BaseIsVirtual; 324 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 325 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 326 327 void Emit(CodeGenFunction &CGF, bool IsForEH) { 328 const CXXRecordDecl *DerivedClass = 329 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 330 331 const CXXDestructorDecl *D = BaseClass->getDestructor(); 332 llvm::Value *Addr = 333 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 334 DerivedClass, BaseClass, 335 BaseIsVirtual); 336 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr); 337 } 338 }; 339 340 /// A visitor which checks whether an initializer uses 'this' in a 341 /// way which requires the vtable to be properly set. 342 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 343 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 344 345 bool UsesThis; 346 347 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 348 349 // Black-list all explicit and implicit references to 'this'. 350 // 351 // Do we need to worry about external references to 'this' derived 352 // from arbitrary code? If so, then anything which runs arbitrary 353 // external code might potentially access the vtable. 354 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 355 }; 356 } 357 358 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 359 DynamicThisUseChecker Checker(C); 360 Checker.Visit(const_cast<Expr*>(Init)); 361 return Checker.UsesThis; 362 } 363 364 static void EmitBaseInitializer(CodeGenFunction &CGF, 365 const CXXRecordDecl *ClassDecl, 366 CXXCtorInitializer *BaseInit, 367 CXXCtorType CtorType) { 368 assert(BaseInit->isBaseInitializer() && 369 "Must have base initializer!"); 370 371 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 372 373 const Type *BaseType = BaseInit->getBaseClass(); 374 CXXRecordDecl *BaseClassDecl = 375 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 376 377 bool isBaseVirtual = BaseInit->isBaseVirtual(); 378 379 // The base constructor doesn't construct virtual bases. 380 if (CtorType == Ctor_Base && isBaseVirtual) 381 return; 382 383 // If the initializer for the base (other than the constructor 384 // itself) accesses 'this' in any way, we need to initialize the 385 // vtables. 386 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 387 CGF.InitializeVTablePointers(ClassDecl); 388 389 // We can pretend to be a complete class because it only matters for 390 // virtual bases, and we only do virtual bases for complete ctors. 391 llvm::Value *V = 392 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 393 BaseClassDecl, 394 isBaseVirtual); 395 396 AggValueSlot AggSlot = AggValueSlot::forAddr(V, false, /*Lifetime*/ true); 397 398 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 399 400 if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) 401 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 402 isBaseVirtual); 403 } 404 405 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 406 LValue LHS, 407 llvm::Value *ArrayIndexVar, 408 CXXCtorInitializer *MemberInit, 409 QualType T, 410 unsigned Index) { 411 if (Index == MemberInit->getNumArrayIndices()) { 412 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 413 414 llvm::Value *Dest = LHS.getAddress(); 415 if (ArrayIndexVar) { 416 // If we have an array index variable, load it and use it as an offset. 417 // Then, increment the value. 418 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 419 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 420 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 421 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 422 CGF.Builder.CreateStore(Next, ArrayIndexVar); 423 } 424 425 AggValueSlot Slot = AggValueSlot::forAddr(Dest, LHS.isVolatileQualified(), 426 /*Lifetime*/ true); 427 428 CGF.EmitAggExpr(MemberInit->getInit(), Slot); 429 430 return; 431 } 432 433 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 434 assert(Array && "Array initialization without the array type?"); 435 llvm::Value *IndexVar 436 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index)); 437 assert(IndexVar && "Array index variable not loaded"); 438 439 // Initialize this index variable to zero. 440 llvm::Value* Zero 441 = llvm::Constant::getNullValue( 442 CGF.ConvertType(CGF.getContext().getSizeType())); 443 CGF.Builder.CreateStore(Zero, IndexVar); 444 445 // Start the loop with a block that tests the condition. 446 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 447 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 448 449 CGF.EmitBlock(CondBlock); 450 451 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 452 // Generate: if (loop-index < number-of-elements) fall to the loop body, 453 // otherwise, go to the block after the for-loop. 454 uint64_t NumElements = Array->getSize().getZExtValue(); 455 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 456 llvm::Value *NumElementsPtr = 457 llvm::ConstantInt::get(Counter->getType(), NumElements); 458 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 459 "isless"); 460 461 // If the condition is true, execute the body. 462 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 463 464 CGF.EmitBlock(ForBody); 465 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 466 467 { 468 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 469 470 // Inside the loop body recurse to emit the inner loop or, eventually, the 471 // constructor call. 472 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, 473 Array->getElementType(), Index + 1); 474 } 475 476 CGF.EmitBlock(ContinueBlock); 477 478 // Emit the increment of the loop counter. 479 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 480 Counter = CGF.Builder.CreateLoad(IndexVar); 481 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 482 CGF.Builder.CreateStore(NextVal, IndexVar); 483 484 // Finally, branch back up to the condition for the next iteration. 485 CGF.EmitBranch(CondBlock); 486 487 // Emit the fall-through block. 488 CGF.EmitBlock(AfterFor, true); 489 } 490 491 namespace { 492 struct CallMemberDtor : EHScopeStack::Cleanup { 493 FieldDecl *Field; 494 CXXDestructorDecl *Dtor; 495 496 CallMemberDtor(FieldDecl *Field, CXXDestructorDecl *Dtor) 497 : Field(Field), Dtor(Dtor) {} 498 499 void Emit(CodeGenFunction &CGF, bool IsForEH) { 500 // FIXME: Is this OK for C++0x delegating constructors? 501 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 502 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0); 503 504 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 505 LHS.getAddress()); 506 } 507 }; 508 } 509 510 static void EmitMemberInitializer(CodeGenFunction &CGF, 511 const CXXRecordDecl *ClassDecl, 512 CXXCtorInitializer *MemberInit, 513 const CXXConstructorDecl *Constructor, 514 FunctionArgList &Args) { 515 assert(MemberInit->isAnyMemberInitializer() && 516 "Must have member initializer!"); 517 518 // non-static data member initializers. 519 FieldDecl *Field = MemberInit->getAnyMember(); 520 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType()); 521 522 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 523 LValue LHS; 524 525 // If we are initializing an anonymous union field, drill down to the field. 526 if (MemberInit->isIndirectMemberInitializer()) { 527 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, 528 MemberInit->getIndirectMember(), 0); 529 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 530 } else { 531 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0); 532 } 533 534 // FIXME: If there's no initializer and the CXXCtorInitializer 535 // was implicitly generated, we shouldn't be zeroing memory. 536 RValue RHS; 537 if (FieldType->isReferenceType()) { 538 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field); 539 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 540 } else if (FieldType->isArrayType() && !MemberInit->getInit()) { 541 CGF.EmitNullInitialization(LHS.getAddress(), Field->getType()); 542 } else if (!CGF.hasAggregateLLVMType(Field->getType())) { 543 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit())); 544 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 545 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) { 546 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(), 547 LHS.isVolatileQualified()); 548 } else { 549 llvm::Value *ArrayIndexVar = 0; 550 const ConstantArrayType *Array 551 = CGF.getContext().getAsConstantArrayType(FieldType); 552 if (Array && Constructor->isImplicit() && 553 Constructor->isCopyConstructor()) { 554 const llvm::Type *SizeTy 555 = CGF.ConvertType(CGF.getContext().getSizeType()); 556 557 // The LHS is a pointer to the first object we'll be constructing, as 558 // a flat array. 559 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 560 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy); 561 BasePtr = llvm::PointerType::getUnqual(BasePtr); 562 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(), 563 BasePtr); 564 LHS = CGF.MakeAddrLValue(BaseAddrPtr, BaseElementTy); 565 566 // Create an array index that will be used to walk over all of the 567 // objects we're constructing. 568 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index"); 569 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 570 CGF.Builder.CreateStore(Zero, ArrayIndexVar); 571 572 // If we are copying an array of scalars or classes with trivial copy 573 // constructors, perform a single aggregate copy. 574 const RecordType *Record = BaseElementTy->getAs<RecordType>(); 575 if (!Record || 576 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) { 577 // Find the source pointer. We knows it's the last argument because 578 // we know we're in a copy constructor. 579 unsigned SrcArgIndex = Args.size() - 1; 580 llvm::Value *SrcPtr 581 = CGF.Builder.CreateLoad( 582 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first)); 583 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0); 584 585 // Copy the aggregate. 586 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 587 LHS.isVolatileQualified()); 588 return; 589 } 590 591 // Emit the block variables for the array indices, if any. 592 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I) 593 CGF.EmitAutoVarDecl(*MemberInit->getArrayIndex(I)); 594 } 595 596 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0); 597 598 if (!CGF.Exceptions) 599 return; 600 601 // FIXME: If we have an array of classes w/ non-trivial destructors, 602 // we need to destroy in reverse order of construction along the exception 603 // path. 604 const RecordType *RT = FieldType->getAs<RecordType>(); 605 if (!RT) 606 return; 607 608 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 609 if (!RD->hasTrivialDestructor()) 610 CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field, 611 RD->getDestructor()); 612 } 613 } 614 615 /// Checks whether the given constructor is a valid subject for the 616 /// complete-to-base constructor delegation optimization, i.e. 617 /// emitting the complete constructor as a simple call to the base 618 /// constructor. 619 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 620 621 // Currently we disable the optimization for classes with virtual 622 // bases because (1) the addresses of parameter variables need to be 623 // consistent across all initializers but (2) the delegate function 624 // call necessarily creates a second copy of the parameter variable. 625 // 626 // The limiting example (purely theoretical AFAIK): 627 // struct A { A(int &c) { c++; } }; 628 // struct B : virtual A { 629 // B(int count) : A(count) { printf("%d\n", count); } 630 // }; 631 // ...although even this example could in principle be emitted as a 632 // delegation since the address of the parameter doesn't escape. 633 if (Ctor->getParent()->getNumVBases()) { 634 // TODO: white-list trivial vbase initializers. This case wouldn't 635 // be subject to the restrictions below. 636 637 // TODO: white-list cases where: 638 // - there are no non-reference parameters to the constructor 639 // - the initializers don't access any non-reference parameters 640 // - the initializers don't take the address of non-reference 641 // parameters 642 // - etc. 643 // If we ever add any of the above cases, remember that: 644 // - function-try-blocks will always blacklist this optimization 645 // - we need to perform the constructor prologue and cleanup in 646 // EmitConstructorBody. 647 648 return false; 649 } 650 651 // We also disable the optimization for variadic functions because 652 // it's impossible to "re-pass" varargs. 653 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 654 return false; 655 656 return true; 657 } 658 659 /// EmitConstructorBody - Emits the body of the current constructor. 660 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 661 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 662 CXXCtorType CtorType = CurGD.getCtorType(); 663 664 // Before we go any further, try the complete->base constructor 665 // delegation optimization. 666 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) { 667 if (CGDebugInfo *DI = getDebugInfo()) 668 DI->EmitStopPoint(Builder); 669 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 670 return; 671 } 672 673 Stmt *Body = Ctor->getBody(); 674 675 // Enter the function-try-block before the constructor prologue if 676 // applicable. 677 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 678 if (IsTryBody) 679 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 680 681 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 682 683 // Emit the constructor prologue, i.e. the base and member 684 // initializers. 685 EmitCtorPrologue(Ctor, CtorType, Args); 686 687 // Emit the body of the statement. 688 if (IsTryBody) 689 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 690 else if (Body) 691 EmitStmt(Body); 692 693 // Emit any cleanup blocks associated with the member or base 694 // initializers, which includes (along the exceptional path) the 695 // destructors for those members and bases that were fully 696 // constructed. 697 PopCleanupBlocks(CleanupDepth); 698 699 if (IsTryBody) 700 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 701 } 702 703 /// EmitCtorPrologue - This routine generates necessary code to initialize 704 /// base classes and non-static data members belonging to this constructor. 705 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 706 CXXCtorType CtorType, 707 FunctionArgList &Args) { 708 const CXXRecordDecl *ClassDecl = CD->getParent(); 709 710 llvm::SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 711 712 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 713 E = CD->init_end(); 714 B != E; ++B) { 715 CXXCtorInitializer *Member = (*B); 716 717 if (Member->isBaseInitializer()) 718 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 719 else 720 MemberInitializers.push_back(Member); 721 } 722 723 InitializeVTablePointers(ClassDecl); 724 725 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 726 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 727 } 728 729 /// EmitDestructorBody - Emits the body of the current destructor. 730 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 731 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 732 CXXDtorType DtorType = CurGD.getDtorType(); 733 734 // The call to operator delete in a deleting destructor happens 735 // outside of the function-try-block, which means it's always 736 // possible to delegate the destructor body to the complete 737 // destructor. Do so. 738 if (DtorType == Dtor_Deleting) { 739 EnterDtorCleanups(Dtor, Dtor_Deleting); 740 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 741 LoadCXXThis()); 742 PopCleanupBlock(); 743 return; 744 } 745 746 Stmt *Body = Dtor->getBody(); 747 748 // If the body is a function-try-block, enter the try before 749 // anything else. 750 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 751 if (isTryBody) 752 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 753 754 // Enter the epilogue cleanups. 755 RunCleanupsScope DtorEpilogue(*this); 756 757 // If this is the complete variant, just invoke the base variant; 758 // the epilogue will destruct the virtual bases. But we can't do 759 // this optimization if the body is a function-try-block, because 760 // we'd introduce *two* handler blocks. 761 switch (DtorType) { 762 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 763 764 case Dtor_Complete: 765 // Enter the cleanup scopes for virtual bases. 766 EnterDtorCleanups(Dtor, Dtor_Complete); 767 768 if (!isTryBody) { 769 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 770 LoadCXXThis()); 771 break; 772 } 773 // Fallthrough: act like we're in the base variant. 774 775 case Dtor_Base: 776 // Enter the cleanup scopes for fields and non-virtual bases. 777 EnterDtorCleanups(Dtor, Dtor_Base); 778 779 // Initialize the vtable pointers before entering the body. 780 InitializeVTablePointers(Dtor->getParent()); 781 782 if (isTryBody) 783 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 784 else if (Body) 785 EmitStmt(Body); 786 else { 787 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 788 // nothing to do besides what's in the epilogue 789 } 790 break; 791 } 792 793 // Jump out through the epilogue cleanups. 794 DtorEpilogue.ForceCleanup(); 795 796 // Exit the try if applicable. 797 if (isTryBody) 798 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 799 } 800 801 namespace { 802 /// Call the operator delete associated with the current destructor. 803 struct CallDtorDelete : EHScopeStack::Cleanup { 804 CallDtorDelete() {} 805 806 void Emit(CodeGenFunction &CGF, bool IsForEH) { 807 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 808 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 809 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 810 CGF.getContext().getTagDeclType(ClassDecl)); 811 } 812 }; 813 814 struct CallArrayFieldDtor : EHScopeStack::Cleanup { 815 const FieldDecl *Field; 816 CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {} 817 818 void Emit(CodeGenFunction &CGF, bool IsForEH) { 819 QualType FieldType = Field->getType(); 820 const ConstantArrayType *Array = 821 CGF.getContext().getAsConstantArrayType(FieldType); 822 823 QualType BaseType = 824 CGF.getContext().getBaseElementType(Array->getElementType()); 825 const CXXRecordDecl *FieldClassDecl = BaseType->getAsCXXRecordDecl(); 826 827 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 828 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 829 // FIXME: Qualifiers? 830 /*CVRQualifiers=*/0); 831 832 const llvm::Type *BasePtr = CGF.ConvertType(BaseType)->getPointerTo(); 833 llvm::Value *BaseAddrPtr = 834 CGF.Builder.CreateBitCast(LHS.getAddress(), BasePtr); 835 CGF.EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(), 836 Array, BaseAddrPtr); 837 } 838 }; 839 840 struct CallFieldDtor : EHScopeStack::Cleanup { 841 const FieldDecl *Field; 842 CallFieldDtor(const FieldDecl *Field) : Field(Field) {} 843 844 void Emit(CodeGenFunction &CGF, bool IsForEH) { 845 const CXXRecordDecl *FieldClassDecl = 846 Field->getType()->getAsCXXRecordDecl(); 847 848 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 849 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 850 // FIXME: Qualifiers? 851 /*CVRQualifiers=*/0); 852 853 CGF.EmitCXXDestructorCall(FieldClassDecl->getDestructor(), 854 Dtor_Complete, /*ForVirtualBase=*/false, 855 LHS.getAddress()); 856 } 857 }; 858 } 859 860 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 861 /// destructor. This is to call destructors on members and base classes 862 /// in reverse order of their construction. 863 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 864 CXXDtorType DtorType) { 865 assert(!DD->isTrivial() && 866 "Should not emit dtor epilogue for trivial dtor!"); 867 868 // The deleting-destructor phase just needs to call the appropriate 869 // operator delete that Sema picked up. 870 if (DtorType == Dtor_Deleting) { 871 assert(DD->getOperatorDelete() && 872 "operator delete missing - EmitDtorEpilogue"); 873 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 874 return; 875 } 876 877 const CXXRecordDecl *ClassDecl = DD->getParent(); 878 879 // The complete-destructor phase just destructs all the virtual bases. 880 if (DtorType == Dtor_Complete) { 881 882 // We push them in the forward order so that they'll be popped in 883 // the reverse order. 884 for (CXXRecordDecl::base_class_const_iterator I = 885 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 886 I != E; ++I) { 887 const CXXBaseSpecifier &Base = *I; 888 CXXRecordDecl *BaseClassDecl 889 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 890 891 // Ignore trivial destructors. 892 if (BaseClassDecl->hasTrivialDestructor()) 893 continue; 894 895 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 896 BaseClassDecl, 897 /*BaseIsVirtual*/ true); 898 } 899 900 return; 901 } 902 903 assert(DtorType == Dtor_Base); 904 905 // Destroy non-virtual bases. 906 for (CXXRecordDecl::base_class_const_iterator I = 907 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 908 const CXXBaseSpecifier &Base = *I; 909 910 // Ignore virtual bases. 911 if (Base.isVirtual()) 912 continue; 913 914 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 915 916 // Ignore trivial destructors. 917 if (BaseClassDecl->hasTrivialDestructor()) 918 continue; 919 920 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 921 BaseClassDecl, 922 /*BaseIsVirtual*/ false); 923 } 924 925 // Destroy direct fields. 926 llvm::SmallVector<const FieldDecl *, 16> FieldDecls; 927 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 928 E = ClassDecl->field_end(); I != E; ++I) { 929 const FieldDecl *Field = *I; 930 931 QualType FieldType = getContext().getCanonicalType(Field->getType()); 932 const ConstantArrayType *Array = 933 getContext().getAsConstantArrayType(FieldType); 934 if (Array) 935 FieldType = getContext().getBaseElementType(Array->getElementType()); 936 937 const RecordType *RT = FieldType->getAs<RecordType>(); 938 if (!RT) 939 continue; 940 941 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 942 if (FieldClassDecl->hasTrivialDestructor()) 943 continue; 944 945 if (Array) 946 EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field); 947 else 948 EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field); 949 } 950 } 951 952 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 953 /// for-loop to call the default constructor on individual members of the 954 /// array. 955 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the 956 /// array type and 'ArrayPtr' points to the beginning fo the array. 957 /// It is assumed that all relevant checks have been made by the caller. 958 /// 959 /// \param ZeroInitialization True if each element should be zero-initialized 960 /// before it is constructed. 961 void 962 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 963 const ConstantArrayType *ArrayTy, 964 llvm::Value *ArrayPtr, 965 CallExpr::const_arg_iterator ArgBeg, 966 CallExpr::const_arg_iterator ArgEnd, 967 bool ZeroInitialization) { 968 969 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 970 llvm::Value * NumElements = 971 llvm::ConstantInt::get(SizeTy, 972 getContext().getConstantArrayElementCount(ArrayTy)); 973 974 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd, 975 ZeroInitialization); 976 } 977 978 void 979 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 980 llvm::Value *NumElements, 981 llvm::Value *ArrayPtr, 982 CallExpr::const_arg_iterator ArgBeg, 983 CallExpr::const_arg_iterator ArgEnd, 984 bool ZeroInitialization) { 985 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 986 987 // Create a temporary for the loop index and initialize it with 0. 988 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index"); 989 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 990 Builder.CreateStore(Zero, IndexPtr); 991 992 // Start the loop with a block that tests the condition. 993 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 994 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 995 996 EmitBlock(CondBlock); 997 998 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 999 1000 // Generate: if (loop-index < number-of-elements fall to the loop body, 1001 // otherwise, go to the block after the for-loop. 1002 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1003 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless"); 1004 // If the condition is true, execute the body. 1005 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 1006 1007 EmitBlock(ForBody); 1008 1009 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1010 // Inside the loop body, emit the constructor call on the array element. 1011 Counter = Builder.CreateLoad(IndexPtr); 1012 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter, 1013 "arrayidx"); 1014 1015 // Zero initialize the storage, if requested. 1016 if (ZeroInitialization) 1017 EmitNullInitialization(Address, 1018 getContext().getTypeDeclType(D->getParent())); 1019 1020 // C++ [class.temporary]p4: 1021 // There are two contexts in which temporaries are destroyed at a different 1022 // point than the end of the full-expression. The first context is when a 1023 // default constructor is called to initialize an element of an array. 1024 // If the constructor has one or more default arguments, the destruction of 1025 // every temporary created in a default argument expression is sequenced 1026 // before the construction of the next array element, if any. 1027 1028 // Keep track of the current number of live temporaries. 1029 { 1030 RunCleanupsScope Scope(*this); 1031 1032 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address, 1033 ArgBeg, ArgEnd); 1034 } 1035 1036 EmitBlock(ContinueBlock); 1037 1038 // Emit the increment of the loop counter. 1039 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1); 1040 Counter = Builder.CreateLoad(IndexPtr); 1041 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 1042 Builder.CreateStore(NextVal, IndexPtr); 1043 1044 // Finally, branch back up to the condition for the next iteration. 1045 EmitBranch(CondBlock); 1046 1047 // Emit the fall-through block. 1048 EmitBlock(AfterFor, true); 1049 } 1050 1051 /// EmitCXXAggrDestructorCall - calls the default destructor on array 1052 /// elements in reverse order of construction. 1053 void 1054 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1055 const ArrayType *Array, 1056 llvm::Value *This) { 1057 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 1058 assert(CA && "Do we support VLA for destruction ?"); 1059 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 1060 1061 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 1062 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount); 1063 EmitCXXAggrDestructorCall(D, ElementCountPtr, This); 1064 } 1065 1066 /// EmitCXXAggrDestructorCall - calls the default destructor on array 1067 /// elements in reverse order of construction. 1068 void 1069 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1070 llvm::Value *UpperCount, 1071 llvm::Value *This) { 1072 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 1073 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1); 1074 1075 // Create a temporary for the loop index and initialize it with count of 1076 // array elements. 1077 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index"); 1078 1079 // Store the number of elements in the index pointer. 1080 Builder.CreateStore(UpperCount, IndexPtr); 1081 1082 // Start the loop with a block that tests the condition. 1083 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 1084 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 1085 1086 EmitBlock(CondBlock); 1087 1088 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1089 1090 // Generate: if (loop-index != 0 fall to the loop body, 1091 // otherwise, go to the block after the for-loop. 1092 llvm::Value* zeroConstant = 1093 llvm::Constant::getNullValue(SizeLTy); 1094 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1095 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 1096 "isne"); 1097 // If the condition is true, execute the body. 1098 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 1099 1100 EmitBlock(ForBody); 1101 1102 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1103 // Inside the loop body, emit the constructor call on the array element. 1104 Counter = Builder.CreateLoad(IndexPtr); 1105 Counter = Builder.CreateSub(Counter, One); 1106 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 1107 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address); 1108 1109 EmitBlock(ContinueBlock); 1110 1111 // Emit the decrement of the loop counter. 1112 Counter = Builder.CreateLoad(IndexPtr); 1113 Counter = Builder.CreateSub(Counter, One, "dec"); 1114 Builder.CreateStore(Counter, IndexPtr); 1115 1116 // Finally, branch back up to the condition for the next iteration. 1117 EmitBranch(CondBlock); 1118 1119 // Emit the fall-through block. 1120 EmitBlock(AfterFor, true); 1121 } 1122 1123 void 1124 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1125 CXXCtorType Type, bool ForVirtualBase, 1126 llvm::Value *This, 1127 CallExpr::const_arg_iterator ArgBeg, 1128 CallExpr::const_arg_iterator ArgEnd) { 1129 if (D->isTrivial()) { 1130 if (ArgBeg == ArgEnd) { 1131 // Trivial default constructor, no codegen required. 1132 assert(D->isDefaultConstructor() && 1133 "trivial 0-arg ctor not a default ctor"); 1134 return; 1135 } 1136 1137 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1138 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor"); 1139 1140 const Expr *E = (*ArgBeg); 1141 QualType Ty = E->getType(); 1142 llvm::Value *Src = EmitLValue(E).getAddress(); 1143 EmitAggregateCopy(This, Src, Ty); 1144 return; 1145 } 1146 1147 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1148 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1149 1150 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1151 } 1152 1153 void 1154 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1155 llvm::Value *This, llvm::Value *Src, 1156 CallExpr::const_arg_iterator ArgBeg, 1157 CallExpr::const_arg_iterator ArgEnd) { 1158 if (D->isTrivial()) { 1159 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1160 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor"); 1161 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1162 return; 1163 } 1164 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1165 clang::Ctor_Complete); 1166 assert(D->isInstance() && 1167 "Trying to emit a member call expr on a static method!"); 1168 1169 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1170 1171 CallArgList Args; 1172 1173 // Push the this ptr. 1174 Args.push_back(std::make_pair(RValue::get(This), 1175 D->getThisType(getContext()))); 1176 1177 1178 // Push the src ptr. 1179 QualType QT = *(FPT->arg_type_begin()); 1180 const llvm::Type *t = CGM.getTypes().ConvertType(QT); 1181 Src = Builder.CreateBitCast(Src, t); 1182 Args.push_back(std::make_pair(RValue::get(Src), QT)); 1183 1184 // Skip over first argument (Src). 1185 ++ArgBeg; 1186 CallExpr::const_arg_iterator Arg = ArgBeg; 1187 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1188 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1189 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1190 QualType ArgType = *I; 1191 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1192 ArgType)); 1193 } 1194 // Either we've emitted all the call args, or we have a call to a 1195 // variadic function. 1196 assert((Arg == ArgEnd || FPT->isVariadic()) && 1197 "Extra arguments in non-variadic function!"); 1198 // If we still have any arguments, emit them using the type of the argument. 1199 for (; Arg != ArgEnd; ++Arg) { 1200 QualType ArgType = Arg->getType(); 1201 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1202 ArgType)); 1203 } 1204 1205 QualType ResultType = FPT->getResultType(); 1206 EmitCall(CGM.getTypes().getFunctionInfo(ResultType, Args, 1207 FPT->getExtInfo()), 1208 Callee, ReturnValueSlot(), Args, D); 1209 } 1210 1211 void 1212 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1213 CXXCtorType CtorType, 1214 const FunctionArgList &Args) { 1215 CallArgList DelegateArgs; 1216 1217 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1218 assert(I != E && "no parameters to constructor"); 1219 1220 // this 1221 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()), 1222 I->second)); 1223 ++I; 1224 1225 // vtt 1226 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1227 /*ForVirtualBase=*/false)) { 1228 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1229 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP)); 1230 1231 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1232 assert(I != E && "cannot skip vtt parameter, already done with args"); 1233 assert(I->second == VoidPP && "skipping parameter not of vtt type"); 1234 ++I; 1235 } 1236 } 1237 1238 // Explicit arguments. 1239 for (; I != E; ++I) { 1240 const VarDecl *Param = I->first; 1241 QualType ArgType = Param->getType(); // because we're passing it to itself 1242 RValue Arg = EmitDelegateCallArg(Param); 1243 1244 DelegateArgs.push_back(std::make_pair(Arg, ArgType)); 1245 } 1246 1247 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType), 1248 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1249 ReturnValueSlot(), DelegateArgs, Ctor); 1250 } 1251 1252 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1253 CXXDtorType Type, 1254 bool ForVirtualBase, 1255 llvm::Value *This) { 1256 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1257 ForVirtualBase); 1258 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1259 1260 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1261 } 1262 1263 namespace { 1264 struct CallLocalDtor : EHScopeStack::Cleanup { 1265 const CXXDestructorDecl *Dtor; 1266 llvm::Value *Addr; 1267 1268 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1269 : Dtor(D), Addr(Addr) {} 1270 1271 void Emit(CodeGenFunction &CGF, bool IsForEH) { 1272 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1273 /*ForVirtualBase=*/false, Addr); 1274 } 1275 }; 1276 } 1277 1278 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1279 llvm::Value *Addr) { 1280 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1281 } 1282 1283 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1284 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1285 if (!ClassDecl) return; 1286 if (ClassDecl->hasTrivialDestructor()) return; 1287 1288 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1289 PushDestructorCleanup(D, Addr); 1290 } 1291 1292 llvm::Value * 1293 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1294 const CXXRecordDecl *ClassDecl, 1295 const CXXRecordDecl *BaseClassDecl) { 1296 const llvm::Type *Int8PtrTy = 1297 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 1298 1299 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1300 int64_t VBaseOffsetOffset = 1301 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1302 1303 llvm::Value *VBaseOffsetPtr = 1304 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr"); 1305 const llvm::Type *PtrDiffTy = 1306 ConvertType(getContext().getPointerDiffType()); 1307 1308 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1309 PtrDiffTy->getPointerTo()); 1310 1311 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1312 1313 return VBaseOffset; 1314 } 1315 1316 void 1317 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1318 const CXXRecordDecl *NearestVBase, 1319 uint64_t OffsetFromNearestVBase, 1320 llvm::Constant *VTable, 1321 const CXXRecordDecl *VTableClass) { 1322 const CXXRecordDecl *RD = Base.getBase(); 1323 1324 // Compute the address point. 1325 llvm::Value *VTableAddressPoint; 1326 1327 // Check if we need to use a vtable from the VTT. 1328 if (CodeGenVTables::needsVTTParameter(CurGD) && 1329 (RD->getNumVBases() || NearestVBase)) { 1330 // Get the secondary vpointer index. 1331 uint64_t VirtualPointerIndex = 1332 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1333 1334 /// Load the VTT. 1335 llvm::Value *VTT = LoadCXXVTT(); 1336 if (VirtualPointerIndex) 1337 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1338 1339 // And load the address point from the VTT. 1340 VTableAddressPoint = Builder.CreateLoad(VTT); 1341 } else { 1342 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass); 1343 VTableAddressPoint = 1344 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1345 } 1346 1347 // Compute where to store the address point. 1348 llvm::Value *VirtualOffset = 0; 1349 uint64_t NonVirtualOffset = 0; 1350 1351 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1352 // We need to use the virtual base offset offset because the virtual base 1353 // might have a different offset in the most derived class. 1354 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1355 NearestVBase); 1356 NonVirtualOffset = OffsetFromNearestVBase / 8; 1357 } else { 1358 // We can just use the base offset in the complete class. 1359 NonVirtualOffset = Base.getBaseOffset() / 8; 1360 } 1361 1362 // Apply the offsets. 1363 llvm::Value *VTableField = LoadCXXThis(); 1364 1365 if (NonVirtualOffset || VirtualOffset) 1366 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1367 NonVirtualOffset, 1368 VirtualOffset); 1369 1370 // Finally, store the address point. 1371 const llvm::Type *AddressPointPtrTy = 1372 VTableAddressPoint->getType()->getPointerTo(); 1373 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1374 Builder.CreateStore(VTableAddressPoint, VTableField); 1375 } 1376 1377 void 1378 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1379 const CXXRecordDecl *NearestVBase, 1380 uint64_t OffsetFromNearestVBase, 1381 bool BaseIsNonVirtualPrimaryBase, 1382 llvm::Constant *VTable, 1383 const CXXRecordDecl *VTableClass, 1384 VisitedVirtualBasesSetTy& VBases) { 1385 // If this base is a non-virtual primary base the address point has already 1386 // been set. 1387 if (!BaseIsNonVirtualPrimaryBase) { 1388 // Initialize the vtable pointer for this base. 1389 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1390 VTable, VTableClass); 1391 } 1392 1393 const CXXRecordDecl *RD = Base.getBase(); 1394 1395 // Traverse bases. 1396 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1397 E = RD->bases_end(); I != E; ++I) { 1398 CXXRecordDecl *BaseDecl 1399 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1400 1401 // Ignore classes without a vtable. 1402 if (!BaseDecl->isDynamicClass()) 1403 continue; 1404 1405 uint64_t BaseOffset; 1406 uint64_t BaseOffsetFromNearestVBase; 1407 bool BaseDeclIsNonVirtualPrimaryBase; 1408 1409 if (I->isVirtual()) { 1410 // Check if we've visited this virtual base before. 1411 if (!VBases.insert(BaseDecl)) 1412 continue; 1413 1414 const ASTRecordLayout &Layout = 1415 getContext().getASTRecordLayout(VTableClass); 1416 1417 BaseOffset = Layout.getVBaseClassOffsetInBits(BaseDecl); 1418 BaseOffsetFromNearestVBase = 0; 1419 BaseDeclIsNonVirtualPrimaryBase = false; 1420 } else { 1421 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1422 1423 BaseOffset = 1424 Base.getBaseOffset() + Layout.getBaseClassOffsetInBits(BaseDecl); 1425 BaseOffsetFromNearestVBase = 1426 OffsetFromNearestVBase + Layout.getBaseClassOffsetInBits(BaseDecl); 1427 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1428 } 1429 1430 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1431 I->isVirtual() ? BaseDecl : NearestVBase, 1432 BaseOffsetFromNearestVBase, 1433 BaseDeclIsNonVirtualPrimaryBase, 1434 VTable, VTableClass, VBases); 1435 } 1436 } 1437 1438 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1439 // Ignore classes without a vtable. 1440 if (!RD->isDynamicClass()) 1441 return; 1442 1443 // Get the VTable. 1444 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1445 1446 // Initialize the vtable pointers for this class and all of its bases. 1447 VisitedVirtualBasesSetTy VBases; 1448 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0, 1449 /*OffsetFromNearestVBase=*/0, 1450 /*BaseIsNonVirtualPrimaryBase=*/false, 1451 VTable, RD, VBases); 1452 } 1453 1454 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1455 const llvm::Type *Ty) { 1456 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1457 return Builder.CreateLoad(VTablePtrSrc, "vtable"); 1458 } 1459