1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CodeGenFunction.h" 15 #include "clang/AST/CXXInheritance.h" 16 #include "clang/AST/RecordLayout.h" 17 #include "clang/AST/StmtCXX.h" 18 19 using namespace clang; 20 using namespace CodeGen; 21 22 static uint64_t 23 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 24 const CXXRecordDecl *DerivedClass, 25 CXXBaseSpecifierArray::iterator Start, 26 CXXBaseSpecifierArray::iterator End) { 27 uint64_t Offset = 0; 28 29 const CXXRecordDecl *RD = DerivedClass; 30 31 for (CXXBaseSpecifierArray::iterator I = Start; I != End; ++I) { 32 const CXXBaseSpecifier *Base = *I; 33 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 34 35 // Get the layout. 36 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 37 38 const CXXRecordDecl *BaseDecl = 39 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 40 41 // Add the offset. 42 Offset += Layout.getBaseClassOffset(BaseDecl); 43 44 RD = BaseDecl; 45 } 46 47 // FIXME: We should not use / 8 here. 48 return Offset / 8; 49 } 50 51 llvm::Constant * 52 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 53 const CXXBaseSpecifierArray &BasePath) { 54 assert(!BasePath.empty() && "Base path should not be empty!"); 55 56 uint64_t Offset = 57 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 58 BasePath.begin(), BasePath.end()); 59 if (!Offset) 60 return 0; 61 62 const llvm::Type *PtrDiffTy = 63 Types.ConvertType(getContext().getPointerDiffType()); 64 65 return llvm::ConstantInt::get(PtrDiffTy, Offset); 66 } 67 68 /// Gets the address of a direct base class within a complete object. 69 /// This should only be used for (1) non-virtual bases or (2) virtual bases 70 /// when the type is known to be complete (e.g. in complete destructors). 71 /// 72 /// The object pointed to by 'This' is assumed to be non-null. 73 llvm::Value * 74 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 75 const CXXRecordDecl *Derived, 76 const CXXRecordDecl *Base, 77 bool BaseIsVirtual) { 78 // 'this' must be a pointer (in some address space) to Derived. 79 assert(This->getType()->isPointerTy() && 80 cast<llvm::PointerType>(This->getType())->getElementType() 81 == ConvertType(Derived)); 82 83 // Compute the offset of the virtual base. 84 uint64_t Offset; 85 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 86 if (BaseIsVirtual) 87 Offset = Layout.getVBaseClassOffset(Base); 88 else 89 Offset = Layout.getBaseClassOffset(Base); 90 91 // Shift and cast down to the base type. 92 // TODO: for complete types, this should be possible with a GEP. 93 llvm::Value *V = This; 94 if (Offset) { 95 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 96 V = Builder.CreateBitCast(V, Int8PtrTy); 97 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8); 98 } 99 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 100 101 return V; 102 } 103 104 static llvm::Value * 105 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 106 uint64_t NonVirtual, llvm::Value *Virtual) { 107 const llvm::Type *PtrDiffTy = 108 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 109 110 llvm::Value *NonVirtualOffset = 0; 111 if (NonVirtual) 112 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual); 113 114 llvm::Value *BaseOffset; 115 if (Virtual) { 116 if (NonVirtualOffset) 117 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 118 else 119 BaseOffset = Virtual; 120 } else 121 BaseOffset = NonVirtualOffset; 122 123 // Apply the base offset. 124 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 125 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy); 126 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 127 128 return ThisPtr; 129 } 130 131 llvm::Value * 132 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 133 const CXXRecordDecl *Derived, 134 const CXXBaseSpecifierArray &BasePath, 135 bool NullCheckValue) { 136 assert(!BasePath.empty() && "Base path should not be empty!"); 137 138 CXXBaseSpecifierArray::iterator Start = BasePath.begin(); 139 const CXXRecordDecl *VBase = 0; 140 141 // Get the virtual base. 142 if ((*Start)->isVirtual()) { 143 VBase = 144 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 145 ++Start; 146 } 147 148 uint64_t NonVirtualOffset = 149 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 150 Start, BasePath.end()); 151 152 // Get the base pointer type. 153 const llvm::Type *BasePtrTy = 154 ConvertType((BasePath.end()[-1])->getType())->getPointerTo(); 155 156 if (!NonVirtualOffset && !VBase) { 157 // Just cast back. 158 return Builder.CreateBitCast(Value, BasePtrTy); 159 } 160 161 llvm::BasicBlock *CastNull = 0; 162 llvm::BasicBlock *CastNotNull = 0; 163 llvm::BasicBlock *CastEnd = 0; 164 165 if (NullCheckValue) { 166 CastNull = createBasicBlock("cast.null"); 167 CastNotNull = createBasicBlock("cast.notnull"); 168 CastEnd = createBasicBlock("cast.end"); 169 170 llvm::Value *IsNull = 171 Builder.CreateICmpEQ(Value, 172 llvm::Constant::getNullValue(Value->getType())); 173 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 174 EmitBlock(CastNotNull); 175 } 176 177 llvm::Value *VirtualOffset = 0; 178 179 if (VBase) 180 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 181 182 // Apply the offsets. 183 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 184 VirtualOffset); 185 186 // Cast back. 187 Value = Builder.CreateBitCast(Value, BasePtrTy); 188 189 if (NullCheckValue) { 190 Builder.CreateBr(CastEnd); 191 EmitBlock(CastNull); 192 Builder.CreateBr(CastEnd); 193 EmitBlock(CastEnd); 194 195 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 196 PHI->reserveOperandSpace(2); 197 PHI->addIncoming(Value, CastNotNull); 198 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 199 CastNull); 200 Value = PHI; 201 } 202 203 return Value; 204 } 205 206 llvm::Value * 207 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 208 const CXXRecordDecl *Derived, 209 const CXXBaseSpecifierArray &BasePath, 210 bool NullCheckValue) { 211 assert(!BasePath.empty() && "Base path should not be empty!"); 212 213 QualType DerivedTy = 214 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 215 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 216 217 llvm::Value *NonVirtualOffset = 218 CGM.GetNonVirtualBaseClassOffset(Derived, BasePath); 219 220 if (!NonVirtualOffset) { 221 // No offset, we can just cast back. 222 return Builder.CreateBitCast(Value, DerivedPtrTy); 223 } 224 225 llvm::BasicBlock *CastNull = 0; 226 llvm::BasicBlock *CastNotNull = 0; 227 llvm::BasicBlock *CastEnd = 0; 228 229 if (NullCheckValue) { 230 CastNull = createBasicBlock("cast.null"); 231 CastNotNull = createBasicBlock("cast.notnull"); 232 CastEnd = createBasicBlock("cast.end"); 233 234 llvm::Value *IsNull = 235 Builder.CreateICmpEQ(Value, 236 llvm::Constant::getNullValue(Value->getType())); 237 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 238 EmitBlock(CastNotNull); 239 } 240 241 // Apply the offset. 242 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType()); 243 Value = Builder.CreateSub(Value, NonVirtualOffset); 244 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy); 245 246 // Just cast. 247 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 248 249 if (NullCheckValue) { 250 Builder.CreateBr(CastEnd); 251 EmitBlock(CastNull); 252 Builder.CreateBr(CastEnd); 253 EmitBlock(CastEnd); 254 255 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 256 PHI->reserveOperandSpace(2); 257 PHI->addIncoming(Value, CastNotNull); 258 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 259 CastNull); 260 Value = PHI; 261 } 262 263 return Value; 264 } 265 266 /// GetVTTParameter - Return the VTT parameter that should be passed to a 267 /// base constructor/destructor with virtual bases. 268 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 269 bool ForVirtualBase) { 270 if (!CodeGenVTables::needsVTTParameter(GD)) { 271 // This constructor/destructor does not need a VTT parameter. 272 return 0; 273 } 274 275 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 276 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 277 278 llvm::Value *VTT; 279 280 uint64_t SubVTTIndex; 281 282 // If the record matches the base, this is the complete ctor/dtor 283 // variant calling the base variant in a class with virtual bases. 284 if (RD == Base) { 285 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 286 "doing no-op VTT offset in base dtor/ctor?"); 287 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 288 SubVTTIndex = 0; 289 } else { 290 const ASTRecordLayout &Layout = 291 CGF.getContext().getASTRecordLayout(RD); 292 uint64_t BaseOffset = ForVirtualBase ? 293 Layout.getVBaseClassOffset(Base) : Layout.getBaseClassOffset(Base); 294 295 SubVTTIndex = 296 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 297 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 298 } 299 300 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 301 // A VTT parameter was passed to the constructor, use it. 302 VTT = CGF.LoadCXXVTT(); 303 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 304 } else { 305 // We're the complete constructor, so get the VTT by name. 306 VTT = CGF.CGM.getVTables().getVTT(RD); 307 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 308 } 309 310 return VTT; 311 } 312 313 static void EmitBaseInitializer(CodeGenFunction &CGF, 314 const CXXRecordDecl *ClassDecl, 315 CXXBaseOrMemberInitializer *BaseInit, 316 CXXCtorType CtorType) { 317 assert(BaseInit->isBaseInitializer() && 318 "Must have base initializer!"); 319 320 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 321 322 const Type *BaseType = BaseInit->getBaseClass(); 323 CXXRecordDecl *BaseClassDecl = 324 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 325 326 bool isBaseVirtual = BaseInit->isBaseVirtual(); 327 328 // The base constructor doesn't construct virtual bases. 329 if (CtorType == Ctor_Base && isBaseVirtual) 330 return; 331 332 // We can pretend to be a complete class because it only matters for 333 // virtual bases, and we only do virtual bases for complete ctors. 334 llvm::Value *V = 335 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 336 BaseClassDecl, 337 BaseInit->isBaseVirtual()); 338 339 CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true); 340 341 if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) { 342 // FIXME: Is this OK for C++0x delegating constructors? 343 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 344 345 CXXDestructorDecl *DD = BaseClassDecl->getDestructor(CGF.getContext()); 346 CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V); 347 } 348 } 349 350 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 351 LValue LHS, 352 llvm::Value *ArrayIndexVar, 353 CXXBaseOrMemberInitializer *MemberInit, 354 QualType T, 355 unsigned Index) { 356 if (Index == MemberInit->getNumArrayIndices()) { 357 CodeGenFunction::CleanupScope Cleanups(CGF); 358 359 llvm::Value *Dest = LHS.getAddress(); 360 if (ArrayIndexVar) { 361 // If we have an array index variable, load it and use it as an offset. 362 // Then, increment the value. 363 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 364 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 365 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 366 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 367 CGF.Builder.CreateStore(Next, ArrayIndexVar); 368 } 369 370 CGF.EmitAggExpr(MemberInit->getInit(), Dest, 371 LHS.isVolatileQualified(), 372 /*IgnoreResult*/ false, 373 /*IsInitializer*/ true); 374 375 return; 376 } 377 378 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 379 assert(Array && "Array initialization without the array type?"); 380 llvm::Value *IndexVar 381 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index)); 382 assert(IndexVar && "Array index variable not loaded"); 383 384 // Initialize this index variable to zero. 385 llvm::Value* Zero 386 = llvm::Constant::getNullValue( 387 CGF.ConvertType(CGF.getContext().getSizeType())); 388 CGF.Builder.CreateStore(Zero, IndexVar); 389 390 // Start the loop with a block that tests the condition. 391 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 392 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 393 394 CGF.EmitBlock(CondBlock); 395 396 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 397 // Generate: if (loop-index < number-of-elements) fall to the loop body, 398 // otherwise, go to the block after the for-loop. 399 uint64_t NumElements = Array->getSize().getZExtValue(); 400 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 401 llvm::Value *NumElementsPtr = 402 llvm::ConstantInt::get(Counter->getType(), NumElements); 403 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 404 "isless"); 405 406 // If the condition is true, execute the body. 407 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 408 409 CGF.EmitBlock(ForBody); 410 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 411 412 { 413 CodeGenFunction::CleanupScope Cleanups(CGF); 414 415 // Inside the loop body recurse to emit the inner loop or, eventually, the 416 // constructor call. 417 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, 418 Array->getElementType(), Index + 1); 419 } 420 421 CGF.EmitBlock(ContinueBlock); 422 423 // Emit the increment of the loop counter. 424 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 425 Counter = CGF.Builder.CreateLoad(IndexVar); 426 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 427 CGF.Builder.CreateStore(NextVal, IndexVar); 428 429 // Finally, branch back up to the condition for the next iteration. 430 CGF.EmitBranch(CondBlock); 431 432 // Emit the fall-through block. 433 CGF.EmitBlock(AfterFor, true); 434 } 435 436 static void EmitMemberInitializer(CodeGenFunction &CGF, 437 const CXXRecordDecl *ClassDecl, 438 CXXBaseOrMemberInitializer *MemberInit, 439 const CXXConstructorDecl *Constructor, 440 FunctionArgList &Args) { 441 assert(MemberInit->isMemberInitializer() && 442 "Must have member initializer!"); 443 444 // non-static data member initializers. 445 FieldDecl *Field = MemberInit->getMember(); 446 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType()); 447 448 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 449 LValue LHS; 450 451 // If we are initializing an anonymous union field, drill down to the field. 452 if (MemberInit->getAnonUnionMember()) { 453 Field = MemberInit->getAnonUnionMember(); 454 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, Field, 0); 455 FieldType = Field->getType(); 456 } else { 457 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0); 458 } 459 460 // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer 461 // was implicitly generated, we shouldn't be zeroing memory. 462 RValue RHS; 463 if (FieldType->isReferenceType()) { 464 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), 465 /*IsInitializer=*/true); 466 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 467 } else if (FieldType->isArrayType() && !MemberInit->getInit()) { 468 CGF.EmitNullInitialization(LHS.getAddress(), Field->getType()); 469 } else if (!CGF.hasAggregateLLVMType(Field->getType())) { 470 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit())); 471 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 472 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) { 473 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(), 474 LHS.isVolatileQualified()); 475 } else { 476 llvm::Value *ArrayIndexVar = 0; 477 const ConstantArrayType *Array 478 = CGF.getContext().getAsConstantArrayType(FieldType); 479 if (Array && Constructor->isImplicit() && 480 Constructor->isCopyConstructor()) { 481 const llvm::Type *SizeTy 482 = CGF.ConvertType(CGF.getContext().getSizeType()); 483 484 // The LHS is a pointer to the first object we'll be constructing, as 485 // a flat array. 486 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 487 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy); 488 BasePtr = llvm::PointerType::getUnqual(BasePtr); 489 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(), 490 BasePtr); 491 LHS = LValue::MakeAddr(BaseAddrPtr, CGF.MakeQualifiers(BaseElementTy)); 492 493 // Create an array index that will be used to walk over all of the 494 // objects we're constructing. 495 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index"); 496 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 497 CGF.Builder.CreateStore(Zero, ArrayIndexVar); 498 499 // If we are copying an array of scalars or classes with trivial copy 500 // constructors, perform a single aggregate copy. 501 const RecordType *Record = BaseElementTy->getAs<RecordType>(); 502 if (!Record || 503 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) { 504 // Find the source pointer. We knows it's the last argument because 505 // we know we're in a copy constructor. 506 unsigned SrcArgIndex = Args.size() - 1; 507 llvm::Value *SrcPtr 508 = CGF.Builder.CreateLoad( 509 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first)); 510 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0); 511 512 // Copy the aggregate. 513 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 514 LHS.isVolatileQualified()); 515 return; 516 } 517 518 // Emit the block variables for the array indices, if any. 519 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I) 520 CGF.EmitLocalBlockVarDecl(*MemberInit->getArrayIndex(I)); 521 } 522 523 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0); 524 525 if (!CGF.Exceptions) 526 return; 527 528 // FIXME: If we have an array of classes w/ non-trivial destructors, 529 // we need to destroy in reverse order of construction along the exception 530 // path. 531 const RecordType *RT = FieldType->getAs<RecordType>(); 532 if (!RT) 533 return; 534 535 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 536 if (!RD->hasTrivialDestructor()) { 537 // FIXME: Is this OK for C++0x delegating constructors? 538 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 539 540 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 541 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0); 542 543 CXXDestructorDecl *DD = RD->getDestructor(CGF.getContext()); 544 CGF.EmitCXXDestructorCall(DD, Dtor_Complete, /*ForVirtualBase=*/false, 545 LHS.getAddress()); 546 } 547 } 548 } 549 550 /// Checks whether the given constructor is a valid subject for the 551 /// complete-to-base constructor delegation optimization, i.e. 552 /// emitting the complete constructor as a simple call to the base 553 /// constructor. 554 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 555 556 // Currently we disable the optimization for classes with virtual 557 // bases because (1) the addresses of parameter variables need to be 558 // consistent across all initializers but (2) the delegate function 559 // call necessarily creates a second copy of the parameter variable. 560 // 561 // The limiting example (purely theoretical AFAIK): 562 // struct A { A(int &c) { c++; } }; 563 // struct B : virtual A { 564 // B(int count) : A(count) { printf("%d\n", count); } 565 // }; 566 // ...although even this example could in principle be emitted as a 567 // delegation since the address of the parameter doesn't escape. 568 if (Ctor->getParent()->getNumVBases()) { 569 // TODO: white-list trivial vbase initializers. This case wouldn't 570 // be subject to the restrictions below. 571 572 // TODO: white-list cases where: 573 // - there are no non-reference parameters to the constructor 574 // - the initializers don't access any non-reference parameters 575 // - the initializers don't take the address of non-reference 576 // parameters 577 // - etc. 578 // If we ever add any of the above cases, remember that: 579 // - function-try-blocks will always blacklist this optimization 580 // - we need to perform the constructor prologue and cleanup in 581 // EmitConstructorBody. 582 583 return false; 584 } 585 586 // We also disable the optimization for variadic functions because 587 // it's impossible to "re-pass" varargs. 588 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 589 return false; 590 591 return true; 592 } 593 594 /// EmitConstructorBody - Emits the body of the current constructor. 595 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 596 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 597 CXXCtorType CtorType = CurGD.getCtorType(); 598 599 // Before we go any further, try the complete->base constructor 600 // delegation optimization. 601 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) { 602 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 603 return; 604 } 605 606 Stmt *Body = Ctor->getBody(); 607 608 // Enter the function-try-block before the constructor prologue if 609 // applicable. 610 CXXTryStmtInfo TryInfo; 611 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 612 613 if (IsTryBody) 614 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 615 616 unsigned CleanupStackSize = CleanupEntries.size(); 617 618 // Emit the constructor prologue, i.e. the base and member 619 // initializers. 620 EmitCtorPrologue(Ctor, CtorType, Args); 621 622 // Emit the body of the statement. 623 if (IsTryBody) 624 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 625 else if (Body) 626 EmitStmt(Body); 627 628 // Emit any cleanup blocks associated with the member or base 629 // initializers, which includes (along the exceptional path) the 630 // destructors for those members and bases that were fully 631 // constructed. 632 EmitCleanupBlocks(CleanupStackSize); 633 634 if (IsTryBody) 635 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 636 } 637 638 /// EmitCtorPrologue - This routine generates necessary code to initialize 639 /// base classes and non-static data members belonging to this constructor. 640 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 641 CXXCtorType CtorType, 642 FunctionArgList &Args) { 643 const CXXRecordDecl *ClassDecl = CD->getParent(); 644 645 llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers; 646 647 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 648 E = CD->init_end(); 649 B != E; ++B) { 650 CXXBaseOrMemberInitializer *Member = (*B); 651 652 assert(LiveTemporaries.empty() && 653 "Should not have any live temporaries at initializer start!"); 654 655 if (Member->isBaseInitializer()) 656 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 657 else 658 MemberInitializers.push_back(Member); 659 } 660 661 InitializeVTablePointers(ClassDecl); 662 663 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) { 664 assert(LiveTemporaries.empty() && 665 "Should not have any live temporaries at initializer start!"); 666 667 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 668 } 669 } 670 671 /// EmitDestructorBody - Emits the body of the current destructor. 672 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 673 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 674 CXXDtorType DtorType = CurGD.getDtorType(); 675 676 Stmt *Body = Dtor->getBody(); 677 678 // If the body is a function-try-block, enter the try before 679 // anything else --- unless we're in a deleting destructor, in which 680 // case we're just going to call the complete destructor and then 681 // call operator delete() on the way out. 682 CXXTryStmtInfo TryInfo; 683 bool isTryBody = (DtorType != Dtor_Deleting && 684 Body && isa<CXXTryStmt>(Body)); 685 if (isTryBody) 686 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 687 688 llvm::BasicBlock *DtorEpilogue = createBasicBlock("dtor.epilogue"); 689 PushCleanupBlock(DtorEpilogue); 690 691 bool SkipBody = false; // should get jump-threaded 692 693 // If this is the deleting variant, just invoke the complete 694 // variant, then call the appropriate operator delete() on the way 695 // out. 696 if (DtorType == Dtor_Deleting) { 697 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 698 LoadCXXThis()); 699 SkipBody = true; 700 701 // If this is the complete variant, just invoke the base variant; 702 // the epilogue will destruct the virtual bases. But we can't do 703 // this optimization if the body is a function-try-block, because 704 // we'd introduce *two* handler blocks. 705 } else if (!isTryBody && DtorType == Dtor_Complete) { 706 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 707 LoadCXXThis()); 708 SkipBody = true; 709 710 // Otherwise, we're in the base variant, so we need to ensure the 711 // vtable ptrs are right before emitting the body. 712 } else { 713 InitializeVTablePointers(Dtor->getParent()); 714 } 715 716 // Emit the body of the statement. 717 if (SkipBody) 718 (void) 0; 719 else if (isTryBody) 720 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 721 else if (Body) 722 EmitStmt(Body); 723 else { 724 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 725 // nothing to do besides what's in the epilogue 726 } 727 728 // Jump to the cleanup block. 729 CleanupBlockInfo Info = PopCleanupBlock(); 730 assert(Info.CleanupBlock == DtorEpilogue && "Block mismatch!"); 731 EmitBlock(DtorEpilogue); 732 733 // Emit the destructor epilogue now. If this is a complete 734 // destructor with a function-try-block, perform the base epilogue 735 // as well. 736 if (isTryBody && DtorType == Dtor_Complete) 737 EmitDtorEpilogue(Dtor, Dtor_Base); 738 EmitDtorEpilogue(Dtor, DtorType); 739 740 // Link up the cleanup information. 741 if (Info.SwitchBlock) 742 EmitBlock(Info.SwitchBlock); 743 if (Info.EndBlock) 744 EmitBlock(Info.EndBlock); 745 746 // Exit the try if applicable. 747 if (isTryBody) 748 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 749 } 750 751 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 752 /// destructor. This is to call destructors on members and base classes 753 /// in reverse order of their construction. 754 void CodeGenFunction::EmitDtorEpilogue(const CXXDestructorDecl *DD, 755 CXXDtorType DtorType) { 756 assert(!DD->isTrivial() && 757 "Should not emit dtor epilogue for trivial dtor!"); 758 759 const CXXRecordDecl *ClassDecl = DD->getParent(); 760 761 // In a deleting destructor, we've already called the complete 762 // destructor as a subroutine, so we just have to delete the 763 // appropriate value. 764 if (DtorType == Dtor_Deleting) { 765 assert(DD->getOperatorDelete() && 766 "operator delete missing - EmitDtorEpilogue"); 767 EmitDeleteCall(DD->getOperatorDelete(), LoadCXXThis(), 768 getContext().getTagDeclType(ClassDecl)); 769 return; 770 } 771 772 // For complete destructors, we've already called the base 773 // destructor (in GenerateBody), so we just need to destruct all the 774 // virtual bases. 775 if (DtorType == Dtor_Complete) { 776 // Handle virtual bases. 777 for (CXXRecordDecl::reverse_base_class_const_iterator I = 778 ClassDecl->vbases_rbegin(), E = ClassDecl->vbases_rend(); 779 I != E; ++I) { 780 const CXXBaseSpecifier &Base = *I; 781 CXXRecordDecl *BaseClassDecl 782 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 783 784 // Ignore trivial destructors. 785 if (BaseClassDecl->hasTrivialDestructor()) 786 continue; 787 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 788 llvm::Value *V = 789 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), 790 ClassDecl, BaseClassDecl, 791 /*BaseIsVirtual=*/true); 792 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/true, V); 793 } 794 return; 795 } 796 797 assert(DtorType == Dtor_Base); 798 799 // Collect the fields. 800 llvm::SmallVector<const FieldDecl *, 16> FieldDecls; 801 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 802 E = ClassDecl->field_end(); I != E; ++I) { 803 const FieldDecl *Field = *I; 804 805 QualType FieldType = getContext().getCanonicalType(Field->getType()); 806 FieldType = getContext().getBaseElementType(FieldType); 807 808 const RecordType *RT = FieldType->getAs<RecordType>(); 809 if (!RT) 810 continue; 811 812 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 813 if (FieldClassDecl->hasTrivialDestructor()) 814 continue; 815 816 FieldDecls.push_back(Field); 817 } 818 819 // Now destroy the fields. 820 for (size_t i = FieldDecls.size(); i > 0; --i) { 821 const FieldDecl *Field = FieldDecls[i - 1]; 822 823 QualType FieldType = Field->getType(); 824 const ConstantArrayType *Array = 825 getContext().getAsConstantArrayType(FieldType); 826 if (Array) 827 FieldType = getContext().getBaseElementType(FieldType); 828 829 const RecordType *RT = FieldType->getAs<RecordType>(); 830 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 831 832 llvm::Value *ThisPtr = LoadCXXThis(); 833 834 LValue LHS = EmitLValueForField(ThisPtr, Field, 835 // FIXME: Qualifiers? 836 /*CVRQualifiers=*/0); 837 if (Array) { 838 const llvm::Type *BasePtr = ConvertType(FieldType); 839 BasePtr = llvm::PointerType::getUnqual(BasePtr); 840 llvm::Value *BaseAddrPtr = 841 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 842 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 843 Array, BaseAddrPtr); 844 } else 845 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 846 Dtor_Complete, /*ForVirtualBase=*/false, 847 LHS.getAddress()); 848 } 849 850 // Destroy non-virtual bases. 851 for (CXXRecordDecl::reverse_base_class_const_iterator I = 852 ClassDecl->bases_rbegin(), E = ClassDecl->bases_rend(); I != E; ++I) { 853 const CXXBaseSpecifier &Base = *I; 854 855 // Ignore virtual bases. 856 if (Base.isVirtual()) 857 continue; 858 859 CXXRecordDecl *BaseClassDecl 860 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 861 862 // Ignore trivial destructors. 863 if (BaseClassDecl->hasTrivialDestructor()) 864 continue; 865 866 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 867 llvm::Value *V = 868 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), ClassDecl, 869 BaseClassDecl, 870 /*BaseIsVirtual=*/false); 871 872 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/false, V); 873 } 874 } 875 876 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 877 /// for-loop to call the default constructor on individual members of the 878 /// array. 879 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the 880 /// array type and 'ArrayPtr' points to the beginning fo the array. 881 /// It is assumed that all relevant checks have been made by the caller. 882 void 883 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 884 const ConstantArrayType *ArrayTy, 885 llvm::Value *ArrayPtr, 886 CallExpr::const_arg_iterator ArgBeg, 887 CallExpr::const_arg_iterator ArgEnd) { 888 889 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 890 llvm::Value * NumElements = 891 llvm::ConstantInt::get(SizeTy, 892 getContext().getConstantArrayElementCount(ArrayTy)); 893 894 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd); 895 } 896 897 void 898 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 899 llvm::Value *NumElements, 900 llvm::Value *ArrayPtr, 901 CallExpr::const_arg_iterator ArgBeg, 902 CallExpr::const_arg_iterator ArgEnd) { 903 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 904 905 // Create a temporary for the loop index and initialize it with 0. 906 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index"); 907 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 908 Builder.CreateStore(Zero, IndexPtr); 909 910 // Start the loop with a block that tests the condition. 911 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 912 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 913 914 EmitBlock(CondBlock); 915 916 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 917 918 // Generate: if (loop-index < number-of-elements fall to the loop body, 919 // otherwise, go to the block after the for-loop. 920 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 921 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless"); 922 // If the condition is true, execute the body. 923 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 924 925 EmitBlock(ForBody); 926 927 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 928 // Inside the loop body, emit the constructor call on the array element. 929 Counter = Builder.CreateLoad(IndexPtr); 930 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter, 931 "arrayidx"); 932 933 // C++ [class.temporary]p4: 934 // There are two contexts in which temporaries are destroyed at a different 935 // point than the end of the full-expression. The first context is when a 936 // default constructor is called to initialize an element of an array. 937 // If the constructor has one or more default arguments, the destruction of 938 // every temporary created in a default argument expression is sequenced 939 // before the construction of the next array element, if any. 940 941 // Keep track of the current number of live temporaries. 942 { 943 CXXTemporariesCleanupScope Scope(*this); 944 945 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address, 946 ArgBeg, ArgEnd); 947 } 948 949 EmitBlock(ContinueBlock); 950 951 // Emit the increment of the loop counter. 952 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1); 953 Counter = Builder.CreateLoad(IndexPtr); 954 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 955 Builder.CreateStore(NextVal, IndexPtr); 956 957 // Finally, branch back up to the condition for the next iteration. 958 EmitBranch(CondBlock); 959 960 // Emit the fall-through block. 961 EmitBlock(AfterFor, true); 962 } 963 964 /// EmitCXXAggrDestructorCall - calls the default destructor on array 965 /// elements in reverse order of construction. 966 void 967 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 968 const ArrayType *Array, 969 llvm::Value *This) { 970 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 971 assert(CA && "Do we support VLA for destruction ?"); 972 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 973 974 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 975 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount); 976 EmitCXXAggrDestructorCall(D, ElementCountPtr, This); 977 } 978 979 /// EmitCXXAggrDestructorCall - calls the default destructor on array 980 /// elements in reverse order of construction. 981 void 982 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 983 llvm::Value *UpperCount, 984 llvm::Value *This) { 985 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 986 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1); 987 988 // Create a temporary for the loop index and initialize it with count of 989 // array elements. 990 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index"); 991 992 // Store the number of elements in the index pointer. 993 Builder.CreateStore(UpperCount, IndexPtr); 994 995 // Start the loop with a block that tests the condition. 996 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 997 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 998 999 EmitBlock(CondBlock); 1000 1001 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1002 1003 // Generate: if (loop-index != 0 fall to the loop body, 1004 // otherwise, go to the block after the for-loop. 1005 llvm::Value* zeroConstant = 1006 llvm::Constant::getNullValue(SizeLTy); 1007 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1008 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 1009 "isne"); 1010 // If the condition is true, execute the body. 1011 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 1012 1013 EmitBlock(ForBody); 1014 1015 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1016 // Inside the loop body, emit the constructor call on the array element. 1017 Counter = Builder.CreateLoad(IndexPtr); 1018 Counter = Builder.CreateSub(Counter, One); 1019 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 1020 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address); 1021 1022 EmitBlock(ContinueBlock); 1023 1024 // Emit the decrement of the loop counter. 1025 Counter = Builder.CreateLoad(IndexPtr); 1026 Counter = Builder.CreateSub(Counter, One, "dec"); 1027 Builder.CreateStore(Counter, IndexPtr); 1028 1029 // Finally, branch back up to the condition for the next iteration. 1030 EmitBranch(CondBlock); 1031 1032 // Emit the fall-through block. 1033 EmitBlock(AfterFor, true); 1034 } 1035 1036 /// GenerateCXXAggrDestructorHelper - Generates a helper function which when 1037 /// invoked, calls the default destructor on array elements in reverse order of 1038 /// construction. 1039 llvm::Constant * 1040 CodeGenFunction::GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D, 1041 const ArrayType *Array, 1042 llvm::Value *This) { 1043 FunctionArgList Args; 1044 ImplicitParamDecl *Dst = 1045 ImplicitParamDecl::Create(getContext(), 0, 1046 SourceLocation(), 0, 1047 getContext().getPointerType(getContext().VoidTy)); 1048 Args.push_back(std::make_pair(Dst, Dst->getType())); 1049 1050 llvm::SmallString<16> Name; 1051 llvm::raw_svector_ostream(Name) << "__tcf_" << (++UniqueAggrDestructorCount); 1052 QualType R = getContext().VoidTy; 1053 const CGFunctionInfo &FI 1054 = CGM.getTypes().getFunctionInfo(R, Args, FunctionType::ExtInfo()); 1055 const llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FI, false); 1056 llvm::Function *Fn = 1057 llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage, 1058 Name.str(), 1059 &CGM.getModule()); 1060 IdentifierInfo *II = &CGM.getContext().Idents.get(Name.str()); 1061 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1062 getContext().getTranslationUnitDecl(), 1063 SourceLocation(), II, R, 0, 1064 FunctionDecl::Static, 1065 FunctionDecl::None, 1066 false, true); 1067 StartFunction(FD, R, Fn, Args, SourceLocation()); 1068 QualType BaseElementTy = getContext().getBaseElementType(Array); 1069 const llvm::Type *BasePtr = ConvertType(BaseElementTy); 1070 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1071 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(This, BasePtr); 1072 EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); 1073 FinishFunction(); 1074 llvm::Type *Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 1075 0); 1076 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1077 return m; 1078 } 1079 1080 1081 void 1082 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1083 CXXCtorType Type, bool ForVirtualBase, 1084 llvm::Value *This, 1085 CallExpr::const_arg_iterator ArgBeg, 1086 CallExpr::const_arg_iterator ArgEnd) { 1087 if (D->isTrivial()) { 1088 if (ArgBeg == ArgEnd) { 1089 // Trivial default constructor, no codegen required. 1090 assert(D->isDefaultConstructor() && 1091 "trivial 0-arg ctor not a default ctor"); 1092 return; 1093 } 1094 1095 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1096 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor"); 1097 1098 const Expr *E = (*ArgBeg); 1099 QualType Ty = E->getType(); 1100 llvm::Value *Src = EmitLValue(E).getAddress(); 1101 EmitAggregateCopy(This, Src, Ty); 1102 return; 1103 } 1104 1105 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1106 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1107 1108 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1109 } 1110 1111 void 1112 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1113 CXXCtorType CtorType, 1114 const FunctionArgList &Args) { 1115 CallArgList DelegateArgs; 1116 1117 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1118 assert(I != E && "no parameters to constructor"); 1119 1120 // this 1121 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()), 1122 I->second)); 1123 ++I; 1124 1125 // vtt 1126 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1127 /*ForVirtualBase=*/false)) { 1128 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1129 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP)); 1130 1131 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1132 assert(I != E && "cannot skip vtt parameter, already done with args"); 1133 assert(I->second == VoidPP && "skipping parameter not of vtt type"); 1134 ++I; 1135 } 1136 } 1137 1138 // Explicit arguments. 1139 for (; I != E; ++I) { 1140 const VarDecl *Param = I->first; 1141 QualType ArgType = Param->getType(); // because we're passing it to itself 1142 RValue Arg = EmitDelegateCallArg(Param); 1143 1144 DelegateArgs.push_back(std::make_pair(Arg, ArgType)); 1145 } 1146 1147 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType), 1148 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1149 ReturnValueSlot(), DelegateArgs, Ctor); 1150 } 1151 1152 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1153 CXXDtorType Type, 1154 bool ForVirtualBase, 1155 llvm::Value *This) { 1156 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1157 ForVirtualBase); 1158 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1159 1160 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1161 } 1162 1163 llvm::Value * 1164 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1165 const CXXRecordDecl *ClassDecl, 1166 const CXXRecordDecl *BaseClassDecl) { 1167 const llvm::Type *Int8PtrTy = 1168 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 1169 1170 llvm::Value *VTablePtr = Builder.CreateBitCast(This, 1171 Int8PtrTy->getPointerTo()); 1172 VTablePtr = Builder.CreateLoad(VTablePtr, "vtable"); 1173 1174 int64_t VBaseOffsetOffset = 1175 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1176 1177 llvm::Value *VBaseOffsetPtr = 1178 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr"); 1179 const llvm::Type *PtrDiffTy = 1180 ConvertType(getContext().getPointerDiffType()); 1181 1182 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1183 PtrDiffTy->getPointerTo()); 1184 1185 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1186 1187 return VBaseOffset; 1188 } 1189 1190 void 1191 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1192 const CXXRecordDecl *NearestVBase, 1193 uint64_t OffsetFromNearestVBase, 1194 llvm::Constant *VTable, 1195 const CXXRecordDecl *VTableClass) { 1196 const CXXRecordDecl *RD = Base.getBase(); 1197 1198 // Compute the address point. 1199 llvm::Value *VTableAddressPoint; 1200 1201 // Check if we need to use a vtable from the VTT. 1202 if (CodeGenVTables::needsVTTParameter(CurGD) && 1203 (RD->getNumVBases() || NearestVBase)) { 1204 // Get the secondary vpointer index. 1205 uint64_t VirtualPointerIndex = 1206 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1207 1208 /// Load the VTT. 1209 llvm::Value *VTT = LoadCXXVTT(); 1210 if (VirtualPointerIndex) 1211 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1212 1213 // And load the address point from the VTT. 1214 VTableAddressPoint = Builder.CreateLoad(VTT); 1215 } else { 1216 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass); 1217 VTableAddressPoint = 1218 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1219 } 1220 1221 // Compute where to store the address point. 1222 llvm::Value *VirtualOffset = 0; 1223 uint64_t NonVirtualOffset = 0; 1224 1225 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1226 // We need to use the virtual base offset offset because the virtual base 1227 // might have a different offset in the most derived class. 1228 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1229 NearestVBase); 1230 NonVirtualOffset = OffsetFromNearestVBase / 8; 1231 } else { 1232 // We can just use the base offset in the complete class. 1233 NonVirtualOffset = Base.getBaseOffset() / 8; 1234 } 1235 1236 // Apply the offsets. 1237 llvm::Value *VTableField = LoadCXXThis(); 1238 1239 if (NonVirtualOffset || VirtualOffset) 1240 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1241 NonVirtualOffset, 1242 VirtualOffset); 1243 1244 // Finally, store the address point. 1245 const llvm::Type *AddressPointPtrTy = 1246 VTableAddressPoint->getType()->getPointerTo(); 1247 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1248 Builder.CreateStore(VTableAddressPoint, VTableField); 1249 } 1250 1251 void 1252 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1253 const CXXRecordDecl *NearestVBase, 1254 uint64_t OffsetFromNearestVBase, 1255 bool BaseIsNonVirtualPrimaryBase, 1256 llvm::Constant *VTable, 1257 const CXXRecordDecl *VTableClass, 1258 VisitedVirtualBasesSetTy& VBases) { 1259 // If this base is a non-virtual primary base the address point has already 1260 // been set. 1261 if (!BaseIsNonVirtualPrimaryBase) { 1262 // Initialize the vtable pointer for this base. 1263 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1264 VTable, VTableClass); 1265 } 1266 1267 const CXXRecordDecl *RD = Base.getBase(); 1268 1269 // Traverse bases. 1270 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1271 E = RD->bases_end(); I != E; ++I) { 1272 CXXRecordDecl *BaseDecl 1273 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1274 1275 // Ignore classes without a vtable. 1276 if (!BaseDecl->isDynamicClass()) 1277 continue; 1278 1279 uint64_t BaseOffset; 1280 uint64_t BaseOffsetFromNearestVBase; 1281 bool BaseDeclIsNonVirtualPrimaryBase; 1282 1283 if (I->isVirtual()) { 1284 // Check if we've visited this virtual base before. 1285 if (!VBases.insert(BaseDecl)) 1286 continue; 1287 1288 const ASTRecordLayout &Layout = 1289 getContext().getASTRecordLayout(VTableClass); 1290 1291 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1292 BaseOffsetFromNearestVBase = 0; 1293 BaseDeclIsNonVirtualPrimaryBase = false; 1294 } else { 1295 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1296 1297 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1298 BaseOffsetFromNearestVBase = 1299 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1300 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1301 } 1302 1303 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1304 I->isVirtual() ? BaseDecl : NearestVBase, 1305 BaseOffsetFromNearestVBase, 1306 BaseDeclIsNonVirtualPrimaryBase, 1307 VTable, VTableClass, VBases); 1308 } 1309 } 1310 1311 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1312 // Ignore classes without a vtable. 1313 if (!RD->isDynamicClass()) 1314 return; 1315 1316 // Get the VTable. 1317 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1318 1319 // Initialize the vtable pointers for this class and all of its bases. 1320 VisitedVirtualBasesSetTy VBases; 1321 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0, 1322 /*OffsetFromNearestVBase=*/0, 1323 /*BaseIsNonVirtualPrimaryBase=*/false, 1324 VTable, RD, VBases); 1325 } 1326