1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CodeGenFunction.h" 15 #include "clang/AST/CXXInheritance.h" 16 #include "clang/AST/RecordLayout.h" 17 #include "clang/AST/StmtCXX.h" 18 19 using namespace clang; 20 using namespace CodeGen; 21 22 static uint64_t 23 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 24 const CXXRecordDecl *DerivedClass, 25 CXXBaseSpecifierArray::iterator Start, 26 CXXBaseSpecifierArray::iterator End) { 27 uint64_t Offset = 0; 28 29 const CXXRecordDecl *RD = DerivedClass; 30 31 for (CXXBaseSpecifierArray::iterator I = Start; I != End; ++I) { 32 const CXXBaseSpecifier *Base = *I; 33 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 34 35 // Get the layout. 36 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 37 38 const CXXRecordDecl *BaseDecl = 39 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 40 41 // Add the offset. 42 Offset += Layout.getBaseClassOffset(BaseDecl); 43 44 RD = BaseDecl; 45 } 46 47 // FIXME: We should not use / 8 here. 48 return Offset / 8; 49 } 50 51 llvm::Constant * 52 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 53 const CXXBaseSpecifierArray &BasePath) { 54 assert(!BasePath.empty() && "Base path should not be empty!"); 55 56 uint64_t Offset = 57 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 58 BasePath.begin(), BasePath.end()); 59 if (!Offset) 60 return 0; 61 62 const llvm::Type *PtrDiffTy = 63 Types.ConvertType(getContext().getPointerDiffType()); 64 65 return llvm::ConstantInt::get(PtrDiffTy, Offset); 66 } 67 68 /// Gets the address of a direct base class within a complete object. 69 /// This should only be used for (1) non-virtual bases or (2) virtual bases 70 /// when the type is known to be complete (e.g. in complete destructors). 71 /// 72 /// The object pointed to by 'This' is assumed to be non-null. 73 llvm::Value * 74 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 75 const CXXRecordDecl *Derived, 76 const CXXRecordDecl *Base, 77 bool BaseIsVirtual) { 78 // 'this' must be a pointer (in some address space) to Derived. 79 assert(This->getType()->isPointerTy() && 80 cast<llvm::PointerType>(This->getType())->getElementType() 81 == ConvertType(Derived)); 82 83 // Compute the offset of the virtual base. 84 uint64_t Offset; 85 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 86 if (BaseIsVirtual) 87 Offset = Layout.getVBaseClassOffset(Base); 88 else 89 Offset = Layout.getBaseClassOffset(Base); 90 91 // Shift and cast down to the base type. 92 // TODO: for complete types, this should be possible with a GEP. 93 llvm::Value *V = This; 94 if (Offset) { 95 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 96 V = Builder.CreateBitCast(V, Int8PtrTy); 97 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8); 98 } 99 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 100 101 return V; 102 } 103 104 static llvm::Value * 105 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 106 uint64_t NonVirtual, llvm::Value *Virtual) { 107 const llvm::Type *PtrDiffTy = 108 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 109 110 llvm::Value *NonVirtualOffset = 0; 111 if (NonVirtual) 112 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual); 113 114 llvm::Value *BaseOffset; 115 if (Virtual) { 116 if (NonVirtualOffset) 117 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 118 else 119 BaseOffset = Virtual; 120 } else 121 BaseOffset = NonVirtualOffset; 122 123 // Apply the base offset. 124 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 125 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy); 126 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 127 128 return ThisPtr; 129 } 130 131 llvm::Value * 132 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 133 const CXXRecordDecl *Derived, 134 const CXXBaseSpecifierArray &BasePath, 135 bool NullCheckValue) { 136 assert(!BasePath.empty() && "Base path should not be empty!"); 137 138 CXXBaseSpecifierArray::iterator Start = BasePath.begin(); 139 const CXXRecordDecl *VBase = 0; 140 141 // Get the virtual base. 142 if ((*Start)->isVirtual()) { 143 VBase = 144 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 145 ++Start; 146 } 147 148 uint64_t NonVirtualOffset = 149 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 150 Start, BasePath.end()); 151 152 // Get the base pointer type. 153 const llvm::Type *BasePtrTy = 154 ConvertType((BasePath.end()[-1])->getType())->getPointerTo(); 155 156 if (!NonVirtualOffset && !VBase) { 157 // Just cast back. 158 return Builder.CreateBitCast(Value, BasePtrTy); 159 } 160 161 llvm::BasicBlock *CastNull = 0; 162 llvm::BasicBlock *CastNotNull = 0; 163 llvm::BasicBlock *CastEnd = 0; 164 165 if (NullCheckValue) { 166 CastNull = createBasicBlock("cast.null"); 167 CastNotNull = createBasicBlock("cast.notnull"); 168 CastEnd = createBasicBlock("cast.end"); 169 170 llvm::Value *IsNull = 171 Builder.CreateICmpEQ(Value, 172 llvm::Constant::getNullValue(Value->getType())); 173 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 174 EmitBlock(CastNotNull); 175 } 176 177 llvm::Value *VirtualOffset = 0; 178 179 if (VBase) 180 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 181 182 // Apply the offsets. 183 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 184 VirtualOffset); 185 186 // Cast back. 187 Value = Builder.CreateBitCast(Value, BasePtrTy); 188 189 if (NullCheckValue) { 190 Builder.CreateBr(CastEnd); 191 EmitBlock(CastNull); 192 Builder.CreateBr(CastEnd); 193 EmitBlock(CastEnd); 194 195 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 196 PHI->reserveOperandSpace(2); 197 PHI->addIncoming(Value, CastNotNull); 198 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 199 CastNull); 200 Value = PHI; 201 } 202 203 return Value; 204 } 205 206 llvm::Value * 207 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 208 const CXXRecordDecl *Derived, 209 const CXXBaseSpecifierArray &BasePath, 210 bool NullCheckValue) { 211 assert(!BasePath.empty() && "Base path should not be empty!"); 212 213 QualType DerivedTy = 214 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 215 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 216 217 llvm::Value *NonVirtualOffset = 218 CGM.GetNonVirtualBaseClassOffset(Derived, BasePath); 219 220 if (!NonVirtualOffset) { 221 // No offset, we can just cast back. 222 return Builder.CreateBitCast(Value, DerivedPtrTy); 223 } 224 225 llvm::BasicBlock *CastNull = 0; 226 llvm::BasicBlock *CastNotNull = 0; 227 llvm::BasicBlock *CastEnd = 0; 228 229 if (NullCheckValue) { 230 CastNull = createBasicBlock("cast.null"); 231 CastNotNull = createBasicBlock("cast.notnull"); 232 CastEnd = createBasicBlock("cast.end"); 233 234 llvm::Value *IsNull = 235 Builder.CreateICmpEQ(Value, 236 llvm::Constant::getNullValue(Value->getType())); 237 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 238 EmitBlock(CastNotNull); 239 } 240 241 // Apply the offset. 242 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType()); 243 Value = Builder.CreateSub(Value, NonVirtualOffset); 244 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy); 245 246 // Just cast. 247 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 248 249 if (NullCheckValue) { 250 Builder.CreateBr(CastEnd); 251 EmitBlock(CastNull); 252 Builder.CreateBr(CastEnd); 253 EmitBlock(CastEnd); 254 255 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 256 PHI->reserveOperandSpace(2); 257 PHI->addIncoming(Value, CastNotNull); 258 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 259 CastNull); 260 Value = PHI; 261 } 262 263 return Value; 264 } 265 266 /// GetVTTParameter - Return the VTT parameter that should be passed to a 267 /// base constructor/destructor with virtual bases. 268 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 269 bool ForVirtualBase) { 270 if (!CodeGenVTables::needsVTTParameter(GD)) { 271 // This constructor/destructor does not need a VTT parameter. 272 return 0; 273 } 274 275 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 276 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 277 278 llvm::Value *VTT; 279 280 uint64_t SubVTTIndex; 281 282 // If the record matches the base, this is the complete ctor/dtor 283 // variant calling the base variant in a class with virtual bases. 284 if (RD == Base) { 285 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 286 "doing no-op VTT offset in base dtor/ctor?"); 287 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 288 SubVTTIndex = 0; 289 } else { 290 const ASTRecordLayout &Layout = 291 CGF.getContext().getASTRecordLayout(RD); 292 uint64_t BaseOffset = ForVirtualBase ? 293 Layout.getVBaseClassOffset(Base) : Layout.getBaseClassOffset(Base); 294 295 SubVTTIndex = 296 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 297 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 298 } 299 300 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 301 // A VTT parameter was passed to the constructor, use it. 302 VTT = CGF.LoadCXXVTT(); 303 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 304 } else { 305 // We're the complete constructor, so get the VTT by name. 306 VTT = CGF.CGM.getVTables().getVTT(RD); 307 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 308 } 309 310 return VTT; 311 } 312 313 static void EmitBaseInitializer(CodeGenFunction &CGF, 314 const CXXRecordDecl *ClassDecl, 315 CXXBaseOrMemberInitializer *BaseInit, 316 CXXCtorType CtorType) { 317 assert(BaseInit->isBaseInitializer() && 318 "Must have base initializer!"); 319 320 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 321 322 const Type *BaseType = BaseInit->getBaseClass(); 323 CXXRecordDecl *BaseClassDecl = 324 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 325 326 bool isBaseVirtual = BaseInit->isBaseVirtual(); 327 328 // The base constructor doesn't construct virtual bases. 329 if (CtorType == Ctor_Base && isBaseVirtual) 330 return; 331 332 // We can pretend to be a complete class because it only matters for 333 // virtual bases, and we only do virtual bases for complete ctors. 334 llvm::Value *V = 335 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 336 BaseClassDecl, 337 BaseInit->isBaseVirtual()); 338 339 CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true); 340 341 if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) { 342 // FIXME: Is this OK for C++0x delegating constructors? 343 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 344 345 CXXDestructorDecl *DD = BaseClassDecl->getDestructor(CGF.getContext()); 346 CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V); 347 } 348 } 349 350 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 351 LValue LHS, 352 llvm::Value *ArrayIndexVar, 353 CXXBaseOrMemberInitializer *MemberInit, 354 QualType T, 355 unsigned Index) { 356 if (Index == MemberInit->getNumArrayIndices()) { 357 CodeGenFunction::CleanupScope Cleanups(CGF); 358 359 llvm::Value *Dest = LHS.getAddress(); 360 if (ArrayIndexVar) { 361 // If we have an array index variable, load it and use it as an offset. 362 // Then, increment the value. 363 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 364 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 365 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 366 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 367 CGF.Builder.CreateStore(Next, ArrayIndexVar); 368 } 369 370 CGF.EmitAggExpr(MemberInit->getInit(), Dest, 371 LHS.isVolatileQualified(), 372 /*IgnoreResult*/ false, 373 /*IsInitializer*/ true); 374 375 return; 376 } 377 378 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 379 assert(Array && "Array initialization without the array type?"); 380 llvm::Value *IndexVar 381 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index)); 382 assert(IndexVar && "Array index variable not loaded"); 383 384 // Initialize this index variable to zero. 385 llvm::Value* Zero 386 = llvm::Constant::getNullValue( 387 CGF.ConvertType(CGF.getContext().getSizeType())); 388 CGF.Builder.CreateStore(Zero, IndexVar); 389 390 // Start the loop with a block that tests the condition. 391 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 392 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 393 394 CGF.EmitBlock(CondBlock); 395 396 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 397 // Generate: if (loop-index < number-of-elements) fall to the loop body, 398 // otherwise, go to the block after the for-loop. 399 uint64_t NumElements = Array->getSize().getZExtValue(); 400 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 401 llvm::Value *NumElementsPtr = 402 llvm::ConstantInt::get(Counter->getType(), NumElements); 403 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 404 "isless"); 405 406 // If the condition is true, execute the body. 407 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 408 409 CGF.EmitBlock(ForBody); 410 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 411 412 { 413 CodeGenFunction::CleanupScope Cleanups(CGF); 414 415 // Inside the loop body recurse to emit the inner loop or, eventually, the 416 // constructor call. 417 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, 418 Array->getElementType(), Index + 1); 419 } 420 421 CGF.EmitBlock(ContinueBlock); 422 423 // Emit the increment of the loop counter. 424 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 425 Counter = CGF.Builder.CreateLoad(IndexVar); 426 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 427 CGF.Builder.CreateStore(NextVal, IndexVar); 428 429 // Finally, branch back up to the condition for the next iteration. 430 CGF.EmitBranch(CondBlock); 431 432 // Emit the fall-through block. 433 CGF.EmitBlock(AfterFor, true); 434 } 435 436 static void EmitMemberInitializer(CodeGenFunction &CGF, 437 const CXXRecordDecl *ClassDecl, 438 CXXBaseOrMemberInitializer *MemberInit, 439 const CXXConstructorDecl *Constructor, 440 FunctionArgList &Args) { 441 assert(MemberInit->isMemberInitializer() && 442 "Must have member initializer!"); 443 444 // non-static data member initializers. 445 FieldDecl *Field = MemberInit->getMember(); 446 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType()); 447 448 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 449 LValue LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0); 450 451 // If we are initializing an anonymous union field, drill down to the field. 452 if (MemberInit->getAnonUnionMember()) { 453 Field = MemberInit->getAnonUnionMember(); 454 LHS = CGF.EmitLValueForField(LHS.getAddress(), Field, 0); 455 FieldType = Field->getType(); 456 } 457 458 // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer 459 // was implicitly generated, we shouldn't be zeroing memory. 460 RValue RHS; 461 if (FieldType->isReferenceType()) { 462 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), 463 /*IsInitializer=*/true); 464 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 465 } else if (FieldType->isArrayType() && !MemberInit->getInit()) { 466 CGF.EmitMemSetToZero(LHS.getAddress(), Field->getType()); 467 } else if (!CGF.hasAggregateLLVMType(Field->getType())) { 468 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit(), true)); 469 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 470 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) { 471 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(), 472 LHS.isVolatileQualified()); 473 } else { 474 llvm::Value *ArrayIndexVar = 0; 475 const ConstantArrayType *Array 476 = CGF.getContext().getAsConstantArrayType(FieldType); 477 if (Array && Constructor->isImplicit() && 478 Constructor->isCopyConstructor()) { 479 const llvm::Type *SizeTy 480 = CGF.ConvertType(CGF.getContext().getSizeType()); 481 482 // The LHS is a pointer to the first object we'll be constructing, as 483 // a flat array. 484 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 485 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy); 486 BasePtr = llvm::PointerType::getUnqual(BasePtr); 487 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(), 488 BasePtr); 489 LHS = LValue::MakeAddr(BaseAddrPtr, CGF.MakeQualifiers(BaseElementTy)); 490 491 // Create an array index that will be used to walk over all of the 492 // objects we're constructing. 493 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index"); 494 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 495 CGF.Builder.CreateStore(Zero, ArrayIndexVar); 496 497 // If we are copying an array of scalars or classes with trivial copy 498 // constructors, perform a single aggregate copy. 499 const RecordType *Record = BaseElementTy->getAs<RecordType>(); 500 if (!Record || 501 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) { 502 // Find the source pointer. We knows it's the last argument because 503 // we know we're in a copy constructor. 504 unsigned SrcArgIndex = Args.size() - 1; 505 llvm::Value *SrcPtr 506 = CGF.Builder.CreateLoad( 507 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first)); 508 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0); 509 510 // Copy the aggregate. 511 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 512 LHS.isVolatileQualified()); 513 return; 514 } 515 516 // Emit the block variables for the array indices, if any. 517 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I) 518 CGF.EmitLocalBlockVarDecl(*MemberInit->getArrayIndex(I)); 519 } 520 521 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0); 522 523 if (!CGF.Exceptions) 524 return; 525 526 // FIXME: If we have an array of classes w/ non-trivial destructors, 527 // we need to destroy in reverse order of construction along the exception 528 // path. 529 const RecordType *RT = FieldType->getAs<RecordType>(); 530 if (!RT) 531 return; 532 533 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 534 if (!RD->hasTrivialDestructor()) { 535 // FIXME: Is this OK for C++0x delegating constructors? 536 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 537 538 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 539 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0); 540 541 CXXDestructorDecl *DD = RD->getDestructor(CGF.getContext()); 542 CGF.EmitCXXDestructorCall(DD, Dtor_Complete, /*ForVirtualBase=*/false, 543 LHS.getAddress()); 544 } 545 } 546 } 547 548 /// Checks whether the given constructor is a valid subject for the 549 /// complete-to-base constructor delegation optimization, i.e. 550 /// emitting the complete constructor as a simple call to the base 551 /// constructor. 552 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 553 554 // Currently we disable the optimization for classes with virtual 555 // bases because (1) the addresses of parameter variables need to be 556 // consistent across all initializers but (2) the delegate function 557 // call necessarily creates a second copy of the parameter variable. 558 // 559 // The limiting example (purely theoretical AFAIK): 560 // struct A { A(int &c) { c++; } }; 561 // struct B : virtual A { 562 // B(int count) : A(count) { printf("%d\n", count); } 563 // }; 564 // ...although even this example could in principle be emitted as a 565 // delegation since the address of the parameter doesn't escape. 566 if (Ctor->getParent()->getNumVBases()) { 567 // TODO: white-list trivial vbase initializers. This case wouldn't 568 // be subject to the restrictions below. 569 570 // TODO: white-list cases where: 571 // - there are no non-reference parameters to the constructor 572 // - the initializers don't access any non-reference parameters 573 // - the initializers don't take the address of non-reference 574 // parameters 575 // - etc. 576 // If we ever add any of the above cases, remember that: 577 // - function-try-blocks will always blacklist this optimization 578 // - we need to perform the constructor prologue and cleanup in 579 // EmitConstructorBody. 580 581 return false; 582 } 583 584 // We also disable the optimization for variadic functions because 585 // it's impossible to "re-pass" varargs. 586 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 587 return false; 588 589 return true; 590 } 591 592 /// EmitConstructorBody - Emits the body of the current constructor. 593 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 594 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 595 CXXCtorType CtorType = CurGD.getCtorType(); 596 597 // Before we go any further, try the complete->base constructor 598 // delegation optimization. 599 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) { 600 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 601 return; 602 } 603 604 Stmt *Body = Ctor->getBody(); 605 606 // Enter the function-try-block before the constructor prologue if 607 // applicable. 608 CXXTryStmtInfo TryInfo; 609 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 610 611 if (IsTryBody) 612 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 613 614 unsigned CleanupStackSize = CleanupEntries.size(); 615 616 // Emit the constructor prologue, i.e. the base and member 617 // initializers. 618 EmitCtorPrologue(Ctor, CtorType, Args); 619 620 // Emit the body of the statement. 621 if (IsTryBody) 622 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 623 else if (Body) 624 EmitStmt(Body); 625 626 // Emit any cleanup blocks associated with the member or base 627 // initializers, which includes (along the exceptional path) the 628 // destructors for those members and bases that were fully 629 // constructed. 630 EmitCleanupBlocks(CleanupStackSize); 631 632 if (IsTryBody) 633 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 634 } 635 636 /// EmitCtorPrologue - This routine generates necessary code to initialize 637 /// base classes and non-static data members belonging to this constructor. 638 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 639 CXXCtorType CtorType, 640 FunctionArgList &Args) { 641 const CXXRecordDecl *ClassDecl = CD->getParent(); 642 643 llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers; 644 645 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 646 E = CD->init_end(); 647 B != E; ++B) { 648 CXXBaseOrMemberInitializer *Member = (*B); 649 650 assert(LiveTemporaries.empty() && 651 "Should not have any live temporaries at initializer start!"); 652 653 if (Member->isBaseInitializer()) 654 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 655 else 656 MemberInitializers.push_back(Member); 657 } 658 659 InitializeVTablePointers(ClassDecl); 660 661 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) { 662 assert(LiveTemporaries.empty() && 663 "Should not have any live temporaries at initializer start!"); 664 665 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 666 } 667 } 668 669 /// EmitDestructorBody - Emits the body of the current destructor. 670 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 671 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 672 CXXDtorType DtorType = CurGD.getDtorType(); 673 674 Stmt *Body = Dtor->getBody(); 675 676 // If the body is a function-try-block, enter the try before 677 // anything else --- unless we're in a deleting destructor, in which 678 // case we're just going to call the complete destructor and then 679 // call operator delete() on the way out. 680 CXXTryStmtInfo TryInfo; 681 bool isTryBody = (DtorType != Dtor_Deleting && 682 Body && isa<CXXTryStmt>(Body)); 683 if (isTryBody) 684 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 685 686 llvm::BasicBlock *DtorEpilogue = createBasicBlock("dtor.epilogue"); 687 PushCleanupBlock(DtorEpilogue); 688 689 bool SkipBody = false; // should get jump-threaded 690 691 // If this is the deleting variant, just invoke the complete 692 // variant, then call the appropriate operator delete() on the way 693 // out. 694 if (DtorType == Dtor_Deleting) { 695 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 696 LoadCXXThis()); 697 SkipBody = true; 698 699 // If this is the complete variant, just invoke the base variant; 700 // the epilogue will destruct the virtual bases. But we can't do 701 // this optimization if the body is a function-try-block, because 702 // we'd introduce *two* handler blocks. 703 } else if (!isTryBody && DtorType == Dtor_Complete) { 704 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 705 LoadCXXThis()); 706 SkipBody = true; 707 708 // Otherwise, we're in the base variant, so we need to ensure the 709 // vtable ptrs are right before emitting the body. 710 } else { 711 InitializeVTablePointers(Dtor->getParent()); 712 } 713 714 // Emit the body of the statement. 715 if (SkipBody) 716 (void) 0; 717 else if (isTryBody) 718 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 719 else if (Body) 720 EmitStmt(Body); 721 else { 722 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 723 // nothing to do besides what's in the epilogue 724 } 725 726 // Jump to the cleanup block. 727 CleanupBlockInfo Info = PopCleanupBlock(); 728 assert(Info.CleanupBlock == DtorEpilogue && "Block mismatch!"); 729 EmitBlock(DtorEpilogue); 730 731 // Emit the destructor epilogue now. If this is a complete 732 // destructor with a function-try-block, perform the base epilogue 733 // as well. 734 if (isTryBody && DtorType == Dtor_Complete) 735 EmitDtorEpilogue(Dtor, Dtor_Base); 736 EmitDtorEpilogue(Dtor, DtorType); 737 738 // Link up the cleanup information. 739 if (Info.SwitchBlock) 740 EmitBlock(Info.SwitchBlock); 741 if (Info.EndBlock) 742 EmitBlock(Info.EndBlock); 743 744 // Exit the try if applicable. 745 if (isTryBody) 746 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 747 } 748 749 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 750 /// destructor. This is to call destructors on members and base classes 751 /// in reverse order of their construction. 752 void CodeGenFunction::EmitDtorEpilogue(const CXXDestructorDecl *DD, 753 CXXDtorType DtorType) { 754 assert(!DD->isTrivial() && 755 "Should not emit dtor epilogue for trivial dtor!"); 756 757 const CXXRecordDecl *ClassDecl = DD->getParent(); 758 759 // In a deleting destructor, we've already called the complete 760 // destructor as a subroutine, so we just have to delete the 761 // appropriate value. 762 if (DtorType == Dtor_Deleting) { 763 assert(DD->getOperatorDelete() && 764 "operator delete missing - EmitDtorEpilogue"); 765 EmitDeleteCall(DD->getOperatorDelete(), LoadCXXThis(), 766 getContext().getTagDeclType(ClassDecl)); 767 return; 768 } 769 770 // For complete destructors, we've already called the base 771 // destructor (in GenerateBody), so we just need to destruct all the 772 // virtual bases. 773 if (DtorType == Dtor_Complete) { 774 // Handle virtual bases. 775 for (CXXRecordDecl::reverse_base_class_const_iterator I = 776 ClassDecl->vbases_rbegin(), E = ClassDecl->vbases_rend(); 777 I != E; ++I) { 778 const CXXBaseSpecifier &Base = *I; 779 CXXRecordDecl *BaseClassDecl 780 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 781 782 // Ignore trivial destructors. 783 if (BaseClassDecl->hasTrivialDestructor()) 784 continue; 785 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 786 llvm::Value *V = 787 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), 788 ClassDecl, BaseClassDecl, 789 /*BaseIsVirtual=*/true); 790 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/true, V); 791 } 792 return; 793 } 794 795 assert(DtorType == Dtor_Base); 796 797 // Collect the fields. 798 llvm::SmallVector<const FieldDecl *, 16> FieldDecls; 799 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 800 E = ClassDecl->field_end(); I != E; ++I) { 801 const FieldDecl *Field = *I; 802 803 QualType FieldType = getContext().getCanonicalType(Field->getType()); 804 FieldType = getContext().getBaseElementType(FieldType); 805 806 const RecordType *RT = FieldType->getAs<RecordType>(); 807 if (!RT) 808 continue; 809 810 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 811 if (FieldClassDecl->hasTrivialDestructor()) 812 continue; 813 814 FieldDecls.push_back(Field); 815 } 816 817 // Now destroy the fields. 818 for (size_t i = FieldDecls.size(); i > 0; --i) { 819 const FieldDecl *Field = FieldDecls[i - 1]; 820 821 QualType FieldType = Field->getType(); 822 const ConstantArrayType *Array = 823 getContext().getAsConstantArrayType(FieldType); 824 if (Array) 825 FieldType = getContext().getBaseElementType(FieldType); 826 827 const RecordType *RT = FieldType->getAs<RecordType>(); 828 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 829 830 llvm::Value *ThisPtr = LoadCXXThis(); 831 832 LValue LHS = EmitLValueForField(ThisPtr, Field, 833 // FIXME: Qualifiers? 834 /*CVRQualifiers=*/0); 835 if (Array) { 836 const llvm::Type *BasePtr = ConvertType(FieldType); 837 BasePtr = llvm::PointerType::getUnqual(BasePtr); 838 llvm::Value *BaseAddrPtr = 839 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 840 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 841 Array, BaseAddrPtr); 842 } else 843 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 844 Dtor_Complete, /*ForVirtualBase=*/false, 845 LHS.getAddress()); 846 } 847 848 // Destroy non-virtual bases. 849 for (CXXRecordDecl::reverse_base_class_const_iterator I = 850 ClassDecl->bases_rbegin(), E = ClassDecl->bases_rend(); I != E; ++I) { 851 const CXXBaseSpecifier &Base = *I; 852 853 // Ignore virtual bases. 854 if (Base.isVirtual()) 855 continue; 856 857 CXXRecordDecl *BaseClassDecl 858 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 859 860 // Ignore trivial destructors. 861 if (BaseClassDecl->hasTrivialDestructor()) 862 continue; 863 864 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 865 llvm::Value *V = 866 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), ClassDecl, 867 BaseClassDecl, 868 /*BaseIsVirtual=*/false); 869 870 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/false, V); 871 } 872 } 873 874 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 875 /// for-loop to call the default constructor on individual members of the 876 /// array. 877 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the 878 /// array type and 'ArrayPtr' points to the beginning fo the array. 879 /// It is assumed that all relevant checks have been made by the caller. 880 void 881 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 882 const ConstantArrayType *ArrayTy, 883 llvm::Value *ArrayPtr, 884 CallExpr::const_arg_iterator ArgBeg, 885 CallExpr::const_arg_iterator ArgEnd) { 886 887 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 888 llvm::Value * NumElements = 889 llvm::ConstantInt::get(SizeTy, 890 getContext().getConstantArrayElementCount(ArrayTy)); 891 892 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd); 893 } 894 895 void 896 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 897 llvm::Value *NumElements, 898 llvm::Value *ArrayPtr, 899 CallExpr::const_arg_iterator ArgBeg, 900 CallExpr::const_arg_iterator ArgEnd) { 901 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 902 903 // Create a temporary for the loop index and initialize it with 0. 904 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index"); 905 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 906 Builder.CreateStore(Zero, IndexPtr); 907 908 // Start the loop with a block that tests the condition. 909 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 910 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 911 912 EmitBlock(CondBlock); 913 914 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 915 916 // Generate: if (loop-index < number-of-elements fall to the loop body, 917 // otherwise, go to the block after the for-loop. 918 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 919 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless"); 920 // If the condition is true, execute the body. 921 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 922 923 EmitBlock(ForBody); 924 925 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 926 // Inside the loop body, emit the constructor call on the array element. 927 Counter = Builder.CreateLoad(IndexPtr); 928 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter, 929 "arrayidx"); 930 931 // C++ [class.temporary]p4: 932 // There are two contexts in which temporaries are destroyed at a different 933 // point than the end of the full-expression. The first context is when a 934 // default constructor is called to initialize an element of an array. 935 // If the constructor has one or more default arguments, the destruction of 936 // every temporary created in a default argument expression is sequenced 937 // before the construction of the next array element, if any. 938 939 // Keep track of the current number of live temporaries. 940 { 941 CXXTemporariesCleanupScope Scope(*this); 942 943 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address, 944 ArgBeg, ArgEnd); 945 } 946 947 EmitBlock(ContinueBlock); 948 949 // Emit the increment of the loop counter. 950 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1); 951 Counter = Builder.CreateLoad(IndexPtr); 952 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 953 Builder.CreateStore(NextVal, IndexPtr); 954 955 // Finally, branch back up to the condition for the next iteration. 956 EmitBranch(CondBlock); 957 958 // Emit the fall-through block. 959 EmitBlock(AfterFor, true); 960 } 961 962 /// EmitCXXAggrDestructorCall - calls the default destructor on array 963 /// elements in reverse order of construction. 964 void 965 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 966 const ArrayType *Array, 967 llvm::Value *This) { 968 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 969 assert(CA && "Do we support VLA for destruction ?"); 970 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 971 972 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 973 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount); 974 EmitCXXAggrDestructorCall(D, ElementCountPtr, This); 975 } 976 977 /// EmitCXXAggrDestructorCall - calls the default destructor on array 978 /// elements in reverse order of construction. 979 void 980 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 981 llvm::Value *UpperCount, 982 llvm::Value *This) { 983 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 984 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1); 985 986 // Create a temporary for the loop index and initialize it with count of 987 // array elements. 988 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index"); 989 990 // Store the number of elements in the index pointer. 991 Builder.CreateStore(UpperCount, IndexPtr); 992 993 // Start the loop with a block that tests the condition. 994 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 995 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 996 997 EmitBlock(CondBlock); 998 999 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1000 1001 // Generate: if (loop-index != 0 fall to the loop body, 1002 // otherwise, go to the block after the for-loop. 1003 llvm::Value* zeroConstant = 1004 llvm::Constant::getNullValue(SizeLTy); 1005 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1006 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 1007 "isne"); 1008 // If the condition is true, execute the body. 1009 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 1010 1011 EmitBlock(ForBody); 1012 1013 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1014 // Inside the loop body, emit the constructor call on the array element. 1015 Counter = Builder.CreateLoad(IndexPtr); 1016 Counter = Builder.CreateSub(Counter, One); 1017 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 1018 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address); 1019 1020 EmitBlock(ContinueBlock); 1021 1022 // Emit the decrement of the loop counter. 1023 Counter = Builder.CreateLoad(IndexPtr); 1024 Counter = Builder.CreateSub(Counter, One, "dec"); 1025 Builder.CreateStore(Counter, IndexPtr); 1026 1027 // Finally, branch back up to the condition for the next iteration. 1028 EmitBranch(CondBlock); 1029 1030 // Emit the fall-through block. 1031 EmitBlock(AfterFor, true); 1032 } 1033 1034 /// GenerateCXXAggrDestructorHelper - Generates a helper function which when 1035 /// invoked, calls the default destructor on array elements in reverse order of 1036 /// construction. 1037 llvm::Constant * 1038 CodeGenFunction::GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D, 1039 const ArrayType *Array, 1040 llvm::Value *This) { 1041 FunctionArgList Args; 1042 ImplicitParamDecl *Dst = 1043 ImplicitParamDecl::Create(getContext(), 0, 1044 SourceLocation(), 0, 1045 getContext().getPointerType(getContext().VoidTy)); 1046 Args.push_back(std::make_pair(Dst, Dst->getType())); 1047 1048 llvm::SmallString<16> Name; 1049 llvm::raw_svector_ostream(Name) << "__tcf_" << (++UniqueAggrDestructorCount); 1050 QualType R = getContext().VoidTy; 1051 const CGFunctionInfo &FI 1052 = CGM.getTypes().getFunctionInfo(R, Args, FunctionType::ExtInfo()); 1053 const llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FI, false); 1054 llvm::Function *Fn = 1055 llvm::Function::Create(FTy, llvm::GlobalValue::InternalLinkage, 1056 Name.str(), 1057 &CGM.getModule()); 1058 IdentifierInfo *II = &CGM.getContext().Idents.get(Name.str()); 1059 FunctionDecl *FD = FunctionDecl::Create(getContext(), 1060 getContext().getTranslationUnitDecl(), 1061 SourceLocation(), II, R, 0, 1062 FunctionDecl::Static, 1063 FunctionDecl::None, 1064 false, true); 1065 StartFunction(FD, R, Fn, Args, SourceLocation()); 1066 QualType BaseElementTy = getContext().getBaseElementType(Array); 1067 const llvm::Type *BasePtr = ConvertType(BaseElementTy); 1068 BasePtr = llvm::PointerType::getUnqual(BasePtr); 1069 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(This, BasePtr); 1070 EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); 1071 FinishFunction(); 1072 llvm::Type *Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 1073 0); 1074 llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty); 1075 return m; 1076 } 1077 1078 1079 void 1080 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1081 CXXCtorType Type, bool ForVirtualBase, 1082 llvm::Value *This, 1083 CallExpr::const_arg_iterator ArgBeg, 1084 CallExpr::const_arg_iterator ArgEnd) { 1085 if (D->isTrivial()) { 1086 if (ArgBeg == ArgEnd) { 1087 // Trivial default constructor, no codegen required. 1088 assert(D->isDefaultConstructor() && 1089 "trivial 0-arg ctor not a default ctor"); 1090 return; 1091 } 1092 1093 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1094 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor"); 1095 1096 const Expr *E = (*ArgBeg); 1097 QualType Ty = E->getType(); 1098 llvm::Value *Src = EmitLValue(E).getAddress(); 1099 EmitAggregateCopy(This, Src, Ty); 1100 return; 1101 } 1102 1103 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1104 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1105 1106 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1107 } 1108 1109 void 1110 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1111 CXXCtorType CtorType, 1112 const FunctionArgList &Args) { 1113 CallArgList DelegateArgs; 1114 1115 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1116 assert(I != E && "no parameters to constructor"); 1117 1118 // this 1119 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()), 1120 I->second)); 1121 ++I; 1122 1123 // vtt 1124 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1125 /*ForVirtualBase=*/false)) { 1126 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1127 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP)); 1128 1129 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1130 assert(I != E && "cannot skip vtt parameter, already done with args"); 1131 assert(I->second == VoidPP && "skipping parameter not of vtt type"); 1132 ++I; 1133 } 1134 } 1135 1136 // Explicit arguments. 1137 for (; I != E; ++I) { 1138 1139 const VarDecl *Param = I->first; 1140 QualType ArgType = Param->getType(); // because we're passing it to itself 1141 1142 // StartFunction converted the ABI-lowered parameter(s) into a 1143 // local alloca. We need to turn that into an r-value suitable 1144 // for EmitCall. 1145 llvm::Value *Local = GetAddrOfLocalVar(Param); 1146 RValue Arg; 1147 1148 // For the most part, we just need to load the alloca, except: 1149 // 1) aggregate r-values are actually pointers to temporaries, and 1150 // 2) references to aggregates are pointers directly to the aggregate. 1151 // I don't know why references to non-aggregates are different here. 1152 if (ArgType->isReferenceType()) { 1153 const ReferenceType *RefType = ArgType->getAs<ReferenceType>(); 1154 if (hasAggregateLLVMType(RefType->getPointeeType())) 1155 Arg = RValue::getAggregate(Local); 1156 else 1157 // Locals which are references to scalars are represented 1158 // with allocas holding the pointer. 1159 Arg = RValue::get(Builder.CreateLoad(Local)); 1160 } else { 1161 if (hasAggregateLLVMType(ArgType)) 1162 Arg = RValue::getAggregate(Local); 1163 else 1164 Arg = RValue::get(EmitLoadOfScalar(Local, false, ArgType)); 1165 } 1166 1167 DelegateArgs.push_back(std::make_pair(Arg, ArgType)); 1168 } 1169 1170 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType), 1171 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1172 ReturnValueSlot(), DelegateArgs, Ctor); 1173 } 1174 1175 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1176 CXXDtorType Type, 1177 bool ForVirtualBase, 1178 llvm::Value *This) { 1179 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1180 ForVirtualBase); 1181 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1182 1183 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1184 } 1185 1186 llvm::Value * 1187 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1188 const CXXRecordDecl *ClassDecl, 1189 const CXXRecordDecl *BaseClassDecl) { 1190 const llvm::Type *Int8PtrTy = 1191 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 1192 1193 llvm::Value *VTablePtr = Builder.CreateBitCast(This, 1194 Int8PtrTy->getPointerTo()); 1195 VTablePtr = Builder.CreateLoad(VTablePtr, "vtable"); 1196 1197 int64_t VBaseOffsetOffset = 1198 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1199 1200 llvm::Value *VBaseOffsetPtr = 1201 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr"); 1202 const llvm::Type *PtrDiffTy = 1203 ConvertType(getContext().getPointerDiffType()); 1204 1205 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1206 PtrDiffTy->getPointerTo()); 1207 1208 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1209 1210 return VBaseOffset; 1211 } 1212 1213 void 1214 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1215 const CXXRecordDecl *NearestVBase, 1216 uint64_t OffsetFromNearestVBase, 1217 llvm::Constant *VTable, 1218 const CXXRecordDecl *VTableClass) { 1219 const CXXRecordDecl *RD = Base.getBase(); 1220 1221 // Compute the address point. 1222 llvm::Value *VTableAddressPoint; 1223 1224 // Check if we need to use a vtable from the VTT. 1225 if (CodeGenVTables::needsVTTParameter(CurGD) && 1226 (RD->getNumVBases() || NearestVBase)) { 1227 // Get the secondary vpointer index. 1228 uint64_t VirtualPointerIndex = 1229 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1230 1231 /// Load the VTT. 1232 llvm::Value *VTT = LoadCXXVTT(); 1233 if (VirtualPointerIndex) 1234 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1235 1236 // And load the address point from the VTT. 1237 VTableAddressPoint = Builder.CreateLoad(VTT); 1238 } else { 1239 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass); 1240 VTableAddressPoint = 1241 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1242 } 1243 1244 // Compute where to store the address point. 1245 llvm::Value *VirtualOffset = 0; 1246 uint64_t NonVirtualOffset = 0; 1247 1248 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1249 // We need to use the virtual base offset offset because the virtual base 1250 // might have a different offset in the most derived class. 1251 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1252 NearestVBase); 1253 NonVirtualOffset = OffsetFromNearestVBase / 8; 1254 } else { 1255 // We can just use the base offset in the complete class. 1256 NonVirtualOffset = Base.getBaseOffset() / 8; 1257 } 1258 1259 // Apply the offsets. 1260 llvm::Value *VTableField = LoadCXXThis(); 1261 1262 if (NonVirtualOffset || VirtualOffset) 1263 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1264 NonVirtualOffset, 1265 VirtualOffset); 1266 1267 // Finally, store the address point. 1268 const llvm::Type *AddressPointPtrTy = 1269 VTableAddressPoint->getType()->getPointerTo(); 1270 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1271 Builder.CreateStore(VTableAddressPoint, VTableField); 1272 } 1273 1274 void 1275 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1276 const CXXRecordDecl *NearestVBase, 1277 uint64_t OffsetFromNearestVBase, 1278 bool BaseIsNonVirtualPrimaryBase, 1279 llvm::Constant *VTable, 1280 const CXXRecordDecl *VTableClass, 1281 VisitedVirtualBasesSetTy& VBases) { 1282 // If this base is a non-virtual primary base the address point has already 1283 // been set. 1284 if (!BaseIsNonVirtualPrimaryBase) { 1285 // Initialize the vtable pointer for this base. 1286 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1287 VTable, VTableClass); 1288 } 1289 1290 const CXXRecordDecl *RD = Base.getBase(); 1291 1292 // Traverse bases. 1293 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1294 E = RD->bases_end(); I != E; ++I) { 1295 CXXRecordDecl *BaseDecl 1296 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1297 1298 // Ignore classes without a vtable. 1299 if (!BaseDecl->isDynamicClass()) 1300 continue; 1301 1302 uint64_t BaseOffset; 1303 uint64_t BaseOffsetFromNearestVBase; 1304 bool BaseDeclIsNonVirtualPrimaryBase; 1305 1306 if (I->isVirtual()) { 1307 // Check if we've visited this virtual base before. 1308 if (!VBases.insert(BaseDecl)) 1309 continue; 1310 1311 const ASTRecordLayout &Layout = 1312 getContext().getASTRecordLayout(VTableClass); 1313 1314 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1315 BaseOffsetFromNearestVBase = 0; 1316 BaseDeclIsNonVirtualPrimaryBase = false; 1317 } else { 1318 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1319 1320 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1321 BaseOffsetFromNearestVBase = 1322 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1323 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1324 } 1325 1326 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1327 I->isVirtual() ? BaseDecl : NearestVBase, 1328 BaseOffsetFromNearestVBase, 1329 BaseDeclIsNonVirtualPrimaryBase, 1330 VTable, VTableClass, VBases); 1331 } 1332 } 1333 1334 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1335 // Ignore classes without a vtable. 1336 if (!RD->isDynamicClass()) 1337 return; 1338 1339 // Get the VTable. 1340 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1341 1342 // Initialize the vtable pointers for this class and all of its bases. 1343 VisitedVirtualBasesSetTy VBases; 1344 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0, 1345 /*OffsetFromNearestVBase=*/0, 1346 /*BaseIsNonVirtualPrimaryBase=*/false, 1347 VTable, RD, VBases); 1348 } 1349