1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CodeGenFunction.h" 15 #include "clang/AST/CXXInheritance.h" 16 #include "clang/AST/RecordLayout.h" 17 #include "clang/AST/StmtCXX.h" 18 19 using namespace clang; 20 using namespace CodeGen; 21 22 static uint64_t 23 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 24 const CXXRecordDecl *DerivedClass, 25 CXXBaseSpecifierArray::iterator Start, 26 CXXBaseSpecifierArray::iterator End) { 27 uint64_t Offset = 0; 28 29 const CXXRecordDecl *RD = DerivedClass; 30 31 for (CXXBaseSpecifierArray::iterator I = Start; I != End; ++I) { 32 const CXXBaseSpecifier *Base = *I; 33 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 34 35 // Get the layout. 36 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 37 38 const CXXRecordDecl *BaseDecl = 39 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 40 41 // Add the offset. 42 Offset += Layout.getBaseClassOffset(BaseDecl); 43 44 RD = BaseDecl; 45 } 46 47 // FIXME: We should not use / 8 here. 48 return Offset / 8; 49 } 50 51 llvm::Constant * 52 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 53 const CXXBaseSpecifierArray &BasePath) { 54 assert(!BasePath.empty() && "Base path should not be empty!"); 55 56 uint64_t Offset = 57 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 58 BasePath.begin(), BasePath.end()); 59 if (!Offset) 60 return 0; 61 62 const llvm::Type *PtrDiffTy = 63 Types.ConvertType(getContext().getPointerDiffType()); 64 65 return llvm::ConstantInt::get(PtrDiffTy, Offset); 66 } 67 68 /// Gets the address of a direct base class within a complete object. 69 /// This should only be used for (1) non-virtual bases or (2) virtual bases 70 /// when the type is known to be complete (e.g. in complete destructors). 71 /// 72 /// The object pointed to by 'This' is assumed to be non-null. 73 llvm::Value * 74 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 75 const CXXRecordDecl *Derived, 76 const CXXRecordDecl *Base, 77 bool BaseIsVirtual) { 78 // 'this' must be a pointer (in some address space) to Derived. 79 assert(This->getType()->isPointerTy() && 80 cast<llvm::PointerType>(This->getType())->getElementType() 81 == ConvertType(Derived)); 82 83 // Compute the offset of the virtual base. 84 uint64_t Offset; 85 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 86 if (BaseIsVirtual) 87 Offset = Layout.getVBaseClassOffset(Base); 88 else 89 Offset = Layout.getBaseClassOffset(Base); 90 91 // Shift and cast down to the base type. 92 // TODO: for complete types, this should be possible with a GEP. 93 llvm::Value *V = This; 94 if (Offset) { 95 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(getLLVMContext()); 96 V = Builder.CreateBitCast(V, Int8PtrTy); 97 V = Builder.CreateConstInBoundsGEP1_64(V, Offset / 8); 98 } 99 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 100 101 return V; 102 } 103 104 static llvm::Value * 105 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 106 uint64_t NonVirtual, llvm::Value *Virtual) { 107 const llvm::Type *PtrDiffTy = 108 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 109 110 llvm::Value *NonVirtualOffset = 0; 111 if (NonVirtual) 112 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, NonVirtual); 113 114 llvm::Value *BaseOffset; 115 if (Virtual) { 116 if (NonVirtualOffset) 117 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 118 else 119 BaseOffset = Virtual; 120 } else 121 BaseOffset = NonVirtualOffset; 122 123 // Apply the base offset. 124 const llvm::Type *Int8PtrTy = llvm::Type::getInt8PtrTy(CGF.getLLVMContext()); 125 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, Int8PtrTy); 126 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 127 128 return ThisPtr; 129 } 130 131 llvm::Value * 132 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 133 const CXXRecordDecl *Derived, 134 const CXXBaseSpecifierArray &BasePath, 135 bool NullCheckValue) { 136 assert(!BasePath.empty() && "Base path should not be empty!"); 137 138 CXXBaseSpecifierArray::iterator Start = BasePath.begin(); 139 const CXXRecordDecl *VBase = 0; 140 141 // Get the virtual base. 142 if ((*Start)->isVirtual()) { 143 VBase = 144 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 145 ++Start; 146 } 147 148 uint64_t NonVirtualOffset = 149 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 150 Start, BasePath.end()); 151 152 // Get the base pointer type. 153 const llvm::Type *BasePtrTy = 154 ConvertType((BasePath.end()[-1])->getType())->getPointerTo(); 155 156 if (!NonVirtualOffset && !VBase) { 157 // Just cast back. 158 return Builder.CreateBitCast(Value, BasePtrTy); 159 } 160 161 llvm::BasicBlock *CastNull = 0; 162 llvm::BasicBlock *CastNotNull = 0; 163 llvm::BasicBlock *CastEnd = 0; 164 165 if (NullCheckValue) { 166 CastNull = createBasicBlock("cast.null"); 167 CastNotNull = createBasicBlock("cast.notnull"); 168 CastEnd = createBasicBlock("cast.end"); 169 170 llvm::Value *IsNull = 171 Builder.CreateICmpEQ(Value, 172 llvm::Constant::getNullValue(Value->getType())); 173 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 174 EmitBlock(CastNotNull); 175 } 176 177 llvm::Value *VirtualOffset = 0; 178 179 if (VBase) 180 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 181 182 // Apply the offsets. 183 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset, 184 VirtualOffset); 185 186 // Cast back. 187 Value = Builder.CreateBitCast(Value, BasePtrTy); 188 189 if (NullCheckValue) { 190 Builder.CreateBr(CastEnd); 191 EmitBlock(CastNull); 192 Builder.CreateBr(CastEnd); 193 EmitBlock(CastEnd); 194 195 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 196 PHI->reserveOperandSpace(2); 197 PHI->addIncoming(Value, CastNotNull); 198 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 199 CastNull); 200 Value = PHI; 201 } 202 203 return Value; 204 } 205 206 llvm::Value * 207 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 208 const CXXRecordDecl *Derived, 209 const CXXBaseSpecifierArray &BasePath, 210 bool NullCheckValue) { 211 assert(!BasePath.empty() && "Base path should not be empty!"); 212 213 QualType DerivedTy = 214 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 215 const llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 216 217 llvm::Value *NonVirtualOffset = 218 CGM.GetNonVirtualBaseClassOffset(Derived, BasePath); 219 220 if (!NonVirtualOffset) { 221 // No offset, we can just cast back. 222 return Builder.CreateBitCast(Value, DerivedPtrTy); 223 } 224 225 llvm::BasicBlock *CastNull = 0; 226 llvm::BasicBlock *CastNotNull = 0; 227 llvm::BasicBlock *CastEnd = 0; 228 229 if (NullCheckValue) { 230 CastNull = createBasicBlock("cast.null"); 231 CastNotNull = createBasicBlock("cast.notnull"); 232 CastEnd = createBasicBlock("cast.end"); 233 234 llvm::Value *IsNull = 235 Builder.CreateICmpEQ(Value, 236 llvm::Constant::getNullValue(Value->getType())); 237 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 238 EmitBlock(CastNotNull); 239 } 240 241 // Apply the offset. 242 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType()); 243 Value = Builder.CreateSub(Value, NonVirtualOffset); 244 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy); 245 246 // Just cast. 247 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 248 249 if (NullCheckValue) { 250 Builder.CreateBr(CastEnd); 251 EmitBlock(CastNull); 252 Builder.CreateBr(CastEnd); 253 EmitBlock(CastEnd); 254 255 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType()); 256 PHI->reserveOperandSpace(2); 257 PHI->addIncoming(Value, CastNotNull); 258 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 259 CastNull); 260 Value = PHI; 261 } 262 263 return Value; 264 } 265 266 /// GetVTTParameter - Return the VTT parameter that should be passed to a 267 /// base constructor/destructor with virtual bases. 268 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 269 bool ForVirtualBase) { 270 if (!CodeGenVTables::needsVTTParameter(GD)) { 271 // This constructor/destructor does not need a VTT parameter. 272 return 0; 273 } 274 275 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 276 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 277 278 llvm::Value *VTT; 279 280 uint64_t SubVTTIndex; 281 282 // If the record matches the base, this is the complete ctor/dtor 283 // variant calling the base variant in a class with virtual bases. 284 if (RD == Base) { 285 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 286 "doing no-op VTT offset in base dtor/ctor?"); 287 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 288 SubVTTIndex = 0; 289 } else { 290 const ASTRecordLayout &Layout = 291 CGF.getContext().getASTRecordLayout(RD); 292 uint64_t BaseOffset = ForVirtualBase ? 293 Layout.getVBaseClassOffset(Base) : Layout.getBaseClassOffset(Base); 294 295 SubVTTIndex = 296 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 297 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 298 } 299 300 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 301 // A VTT parameter was passed to the constructor, use it. 302 VTT = CGF.LoadCXXVTT(); 303 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 304 } else { 305 // We're the complete constructor, so get the VTT by name. 306 VTT = CGF.CGM.getVTables().getVTT(RD); 307 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 308 } 309 310 return VTT; 311 } 312 313 static void EmitBaseInitializer(CodeGenFunction &CGF, 314 const CXXRecordDecl *ClassDecl, 315 CXXBaseOrMemberInitializer *BaseInit, 316 CXXCtorType CtorType) { 317 assert(BaseInit->isBaseInitializer() && 318 "Must have base initializer!"); 319 320 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 321 322 const Type *BaseType = BaseInit->getBaseClass(); 323 CXXRecordDecl *BaseClassDecl = 324 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 325 326 bool isBaseVirtual = BaseInit->isBaseVirtual(); 327 328 // The base constructor doesn't construct virtual bases. 329 if (CtorType == Ctor_Base && isBaseVirtual) 330 return; 331 332 // We can pretend to be a complete class because it only matters for 333 // virtual bases, and we only do virtual bases for complete ctors. 334 llvm::Value *V = 335 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 336 BaseClassDecl, 337 BaseInit->isBaseVirtual()); 338 339 CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true); 340 341 if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) { 342 // FIXME: Is this OK for C++0x delegating constructors? 343 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 344 345 CXXDestructorDecl *DD = BaseClassDecl->getDestructor(CGF.getContext()); 346 CGF.EmitCXXDestructorCall(DD, Dtor_Base, isBaseVirtual, V); 347 } 348 } 349 350 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 351 LValue LHS, 352 llvm::Value *ArrayIndexVar, 353 CXXBaseOrMemberInitializer *MemberInit, 354 QualType T, 355 unsigned Index) { 356 if (Index == MemberInit->getNumArrayIndices()) { 357 CodeGenFunction::CleanupScope Cleanups(CGF); 358 359 llvm::Value *Dest = LHS.getAddress(); 360 if (ArrayIndexVar) { 361 // If we have an array index variable, load it and use it as an offset. 362 // Then, increment the value. 363 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 364 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 365 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 366 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 367 CGF.Builder.CreateStore(Next, ArrayIndexVar); 368 } 369 370 CGF.EmitAggExpr(MemberInit->getInit(), Dest, 371 LHS.isVolatileQualified(), 372 /*IgnoreResult*/ false, 373 /*IsInitializer*/ true); 374 375 return; 376 } 377 378 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 379 assert(Array && "Array initialization without the array type?"); 380 llvm::Value *IndexVar 381 = CGF.GetAddrOfLocalVar(MemberInit->getArrayIndex(Index)); 382 assert(IndexVar && "Array index variable not loaded"); 383 384 // Initialize this index variable to zero. 385 llvm::Value* Zero 386 = llvm::Constant::getNullValue( 387 CGF.ConvertType(CGF.getContext().getSizeType())); 388 CGF.Builder.CreateStore(Zero, IndexVar); 389 390 // Start the loop with a block that tests the condition. 391 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 392 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 393 394 CGF.EmitBlock(CondBlock); 395 396 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 397 // Generate: if (loop-index < number-of-elements) fall to the loop body, 398 // otherwise, go to the block after the for-loop. 399 uint64_t NumElements = Array->getSize().getZExtValue(); 400 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 401 llvm::Value *NumElementsPtr = 402 llvm::ConstantInt::get(Counter->getType(), NumElements); 403 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 404 "isless"); 405 406 // If the condition is true, execute the body. 407 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 408 409 CGF.EmitBlock(ForBody); 410 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 411 412 { 413 CodeGenFunction::CleanupScope Cleanups(CGF); 414 415 // Inside the loop body recurse to emit the inner loop or, eventually, the 416 // constructor call. 417 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, 418 Array->getElementType(), Index + 1); 419 } 420 421 CGF.EmitBlock(ContinueBlock); 422 423 // Emit the increment of the loop counter. 424 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 425 Counter = CGF.Builder.CreateLoad(IndexVar); 426 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 427 CGF.Builder.CreateStore(NextVal, IndexVar); 428 429 // Finally, branch back up to the condition for the next iteration. 430 CGF.EmitBranch(CondBlock); 431 432 // Emit the fall-through block. 433 CGF.EmitBlock(AfterFor, true); 434 } 435 436 static void EmitMemberInitializer(CodeGenFunction &CGF, 437 const CXXRecordDecl *ClassDecl, 438 CXXBaseOrMemberInitializer *MemberInit, 439 const CXXConstructorDecl *Constructor, 440 FunctionArgList &Args) { 441 assert(MemberInit->isMemberInitializer() && 442 "Must have member initializer!"); 443 444 // non-static data member initializers. 445 FieldDecl *Field = MemberInit->getMember(); 446 QualType FieldType = CGF.getContext().getCanonicalType(Field->getType()); 447 448 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 449 LValue LHS; 450 451 // If we are initializing an anonymous union field, drill down to the field. 452 if (MemberInit->getAnonUnionMember()) { 453 Field = MemberInit->getAnonUnionMember(); 454 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, Field, 0); 455 FieldType = Field->getType(); 456 } else { 457 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0); 458 } 459 460 // FIXME: If there's no initializer and the CXXBaseOrMemberInitializer 461 // was implicitly generated, we shouldn't be zeroing memory. 462 RValue RHS; 463 if (FieldType->isReferenceType()) { 464 RHS = CGF.EmitReferenceBindingToExpr(MemberInit->getInit(), Field); 465 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 466 } else if (FieldType->isArrayType() && !MemberInit->getInit()) { 467 CGF.EmitNullInitialization(LHS.getAddress(), Field->getType()); 468 } else if (!CGF.hasAggregateLLVMType(Field->getType())) { 469 RHS = RValue::get(CGF.EmitScalarExpr(MemberInit->getInit())); 470 CGF.EmitStoreThroughLValue(RHS, LHS, FieldType); 471 } else if (MemberInit->getInit()->getType()->isAnyComplexType()) { 472 CGF.EmitComplexExprIntoAddr(MemberInit->getInit(), LHS.getAddress(), 473 LHS.isVolatileQualified()); 474 } else { 475 llvm::Value *ArrayIndexVar = 0; 476 const ConstantArrayType *Array 477 = CGF.getContext().getAsConstantArrayType(FieldType); 478 if (Array && Constructor->isImplicit() && 479 Constructor->isCopyConstructor()) { 480 const llvm::Type *SizeTy 481 = CGF.ConvertType(CGF.getContext().getSizeType()); 482 483 // The LHS is a pointer to the first object we'll be constructing, as 484 // a flat array. 485 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 486 const llvm::Type *BasePtr = CGF.ConvertType(BaseElementTy); 487 BasePtr = llvm::PointerType::getUnqual(BasePtr); 488 llvm::Value *BaseAddrPtr = CGF.Builder.CreateBitCast(LHS.getAddress(), 489 BasePtr); 490 LHS = LValue::MakeAddr(BaseAddrPtr, CGF.MakeQualifiers(BaseElementTy)); 491 492 // Create an array index that will be used to walk over all of the 493 // objects we're constructing. 494 ArrayIndexVar = CGF.CreateTempAlloca(SizeTy, "object.index"); 495 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 496 CGF.Builder.CreateStore(Zero, ArrayIndexVar); 497 498 // If we are copying an array of scalars or classes with trivial copy 499 // constructors, perform a single aggregate copy. 500 const RecordType *Record = BaseElementTy->getAs<RecordType>(); 501 if (!Record || 502 cast<CXXRecordDecl>(Record->getDecl())->hasTrivialCopyConstructor()) { 503 // Find the source pointer. We knows it's the last argument because 504 // we know we're in a copy constructor. 505 unsigned SrcArgIndex = Args.size() - 1; 506 llvm::Value *SrcPtr 507 = CGF.Builder.CreateLoad( 508 CGF.GetAddrOfLocalVar(Args[SrcArgIndex].first)); 509 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0); 510 511 // Copy the aggregate. 512 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 513 LHS.isVolatileQualified()); 514 return; 515 } 516 517 // Emit the block variables for the array indices, if any. 518 for (unsigned I = 0, N = MemberInit->getNumArrayIndices(); I != N; ++I) 519 CGF.EmitLocalBlockVarDecl(*MemberInit->getArrayIndex(I)); 520 } 521 522 EmitAggMemberInitializer(CGF, LHS, ArrayIndexVar, MemberInit, FieldType, 0); 523 524 if (!CGF.Exceptions) 525 return; 526 527 // FIXME: If we have an array of classes w/ non-trivial destructors, 528 // we need to destroy in reverse order of construction along the exception 529 // path. 530 const RecordType *RT = FieldType->getAs<RecordType>(); 531 if (!RT) 532 return; 533 534 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 535 if (!RD->hasTrivialDestructor()) { 536 // FIXME: Is this OK for C++0x delegating constructors? 537 CodeGenFunction::EHCleanupBlock Cleanup(CGF); 538 539 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 540 LValue LHS = CGF.EmitLValueForField(ThisPtr, Field, 0); 541 542 CXXDestructorDecl *DD = RD->getDestructor(CGF.getContext()); 543 CGF.EmitCXXDestructorCall(DD, Dtor_Complete, /*ForVirtualBase=*/false, 544 LHS.getAddress()); 545 } 546 } 547 } 548 549 /// Checks whether the given constructor is a valid subject for the 550 /// complete-to-base constructor delegation optimization, i.e. 551 /// emitting the complete constructor as a simple call to the base 552 /// constructor. 553 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 554 555 // Currently we disable the optimization for classes with virtual 556 // bases because (1) the addresses of parameter variables need to be 557 // consistent across all initializers but (2) the delegate function 558 // call necessarily creates a second copy of the parameter variable. 559 // 560 // The limiting example (purely theoretical AFAIK): 561 // struct A { A(int &c) { c++; } }; 562 // struct B : virtual A { 563 // B(int count) : A(count) { printf("%d\n", count); } 564 // }; 565 // ...although even this example could in principle be emitted as a 566 // delegation since the address of the parameter doesn't escape. 567 if (Ctor->getParent()->getNumVBases()) { 568 // TODO: white-list trivial vbase initializers. This case wouldn't 569 // be subject to the restrictions below. 570 571 // TODO: white-list cases where: 572 // - there are no non-reference parameters to the constructor 573 // - the initializers don't access any non-reference parameters 574 // - the initializers don't take the address of non-reference 575 // parameters 576 // - etc. 577 // If we ever add any of the above cases, remember that: 578 // - function-try-blocks will always blacklist this optimization 579 // - we need to perform the constructor prologue and cleanup in 580 // EmitConstructorBody. 581 582 return false; 583 } 584 585 // We also disable the optimization for variadic functions because 586 // it's impossible to "re-pass" varargs. 587 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 588 return false; 589 590 return true; 591 } 592 593 /// EmitConstructorBody - Emits the body of the current constructor. 594 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 595 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 596 CXXCtorType CtorType = CurGD.getCtorType(); 597 598 // Before we go any further, try the complete->base constructor 599 // delegation optimization. 600 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) { 601 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 602 return; 603 } 604 605 Stmt *Body = Ctor->getBody(); 606 607 // Enter the function-try-block before the constructor prologue if 608 // applicable. 609 CXXTryStmtInfo TryInfo; 610 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 611 612 if (IsTryBody) 613 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 614 615 unsigned CleanupStackSize = CleanupEntries.size(); 616 617 // Emit the constructor prologue, i.e. the base and member 618 // initializers. 619 EmitCtorPrologue(Ctor, CtorType, Args); 620 621 // Emit the body of the statement. 622 if (IsTryBody) 623 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 624 else if (Body) 625 EmitStmt(Body); 626 627 // Emit any cleanup blocks associated with the member or base 628 // initializers, which includes (along the exceptional path) the 629 // destructors for those members and bases that were fully 630 // constructed. 631 EmitCleanupBlocks(CleanupStackSize); 632 633 if (IsTryBody) 634 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 635 } 636 637 /// EmitCtorPrologue - This routine generates necessary code to initialize 638 /// base classes and non-static data members belonging to this constructor. 639 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 640 CXXCtorType CtorType, 641 FunctionArgList &Args) { 642 const CXXRecordDecl *ClassDecl = CD->getParent(); 643 644 llvm::SmallVector<CXXBaseOrMemberInitializer *, 8> MemberInitializers; 645 646 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 647 E = CD->init_end(); 648 B != E; ++B) { 649 CXXBaseOrMemberInitializer *Member = (*B); 650 651 assert(LiveTemporaries.empty() && 652 "Should not have any live temporaries at initializer start!"); 653 654 if (Member->isBaseInitializer()) 655 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 656 else 657 MemberInitializers.push_back(Member); 658 } 659 660 InitializeVTablePointers(ClassDecl); 661 662 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) { 663 assert(LiveTemporaries.empty() && 664 "Should not have any live temporaries at initializer start!"); 665 666 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 667 } 668 } 669 670 /// EmitDestructorBody - Emits the body of the current destructor. 671 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 672 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 673 CXXDtorType DtorType = CurGD.getDtorType(); 674 675 Stmt *Body = Dtor->getBody(); 676 677 // If the body is a function-try-block, enter the try before 678 // anything else --- unless we're in a deleting destructor, in which 679 // case we're just going to call the complete destructor and then 680 // call operator delete() on the way out. 681 CXXTryStmtInfo TryInfo; 682 bool isTryBody = (DtorType != Dtor_Deleting && 683 Body && isa<CXXTryStmt>(Body)); 684 if (isTryBody) 685 TryInfo = EnterCXXTryStmt(*cast<CXXTryStmt>(Body)); 686 687 llvm::BasicBlock *DtorEpilogue = createBasicBlock("dtor.epilogue"); 688 PushCleanupBlock(DtorEpilogue); 689 690 bool SkipBody = false; // should get jump-threaded 691 692 // If this is the deleting variant, just invoke the complete 693 // variant, then call the appropriate operator delete() on the way 694 // out. 695 if (DtorType == Dtor_Deleting) { 696 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 697 LoadCXXThis()); 698 SkipBody = true; 699 700 // If this is the complete variant, just invoke the base variant; 701 // the epilogue will destruct the virtual bases. But we can't do 702 // this optimization if the body is a function-try-block, because 703 // we'd introduce *two* handler blocks. 704 } else if (!isTryBody && DtorType == Dtor_Complete) { 705 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 706 LoadCXXThis()); 707 SkipBody = true; 708 709 // Otherwise, we're in the base variant, so we need to ensure the 710 // vtable ptrs are right before emitting the body. 711 } else { 712 InitializeVTablePointers(Dtor->getParent()); 713 } 714 715 // Emit the body of the statement. 716 if (SkipBody) 717 (void) 0; 718 else if (isTryBody) 719 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 720 else if (Body) 721 EmitStmt(Body); 722 else { 723 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 724 // nothing to do besides what's in the epilogue 725 } 726 727 // Jump to the cleanup block. 728 CleanupBlockInfo Info = PopCleanupBlock(); 729 assert(Info.CleanupBlock == DtorEpilogue && "Block mismatch!"); 730 EmitBlock(DtorEpilogue); 731 732 // Emit the destructor epilogue now. If this is a complete 733 // destructor with a function-try-block, perform the base epilogue 734 // as well. 735 if (isTryBody && DtorType == Dtor_Complete) 736 EmitDtorEpilogue(Dtor, Dtor_Base); 737 EmitDtorEpilogue(Dtor, DtorType); 738 739 // Link up the cleanup information. 740 if (Info.SwitchBlock) 741 EmitBlock(Info.SwitchBlock); 742 if (Info.EndBlock) 743 EmitBlock(Info.EndBlock); 744 745 // Exit the try if applicable. 746 if (isTryBody) 747 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), TryInfo); 748 } 749 750 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 751 /// destructor. This is to call destructors on members and base classes 752 /// in reverse order of their construction. 753 void CodeGenFunction::EmitDtorEpilogue(const CXXDestructorDecl *DD, 754 CXXDtorType DtorType) { 755 assert(!DD->isTrivial() && 756 "Should not emit dtor epilogue for trivial dtor!"); 757 758 const CXXRecordDecl *ClassDecl = DD->getParent(); 759 760 // In a deleting destructor, we've already called the complete 761 // destructor as a subroutine, so we just have to delete the 762 // appropriate value. 763 if (DtorType == Dtor_Deleting) { 764 assert(DD->getOperatorDelete() && 765 "operator delete missing - EmitDtorEpilogue"); 766 EmitDeleteCall(DD->getOperatorDelete(), LoadCXXThis(), 767 getContext().getTagDeclType(ClassDecl)); 768 return; 769 } 770 771 // For complete destructors, we've already called the base 772 // destructor (in GenerateBody), so we just need to destruct all the 773 // virtual bases. 774 if (DtorType == Dtor_Complete) { 775 // Handle virtual bases. 776 for (CXXRecordDecl::reverse_base_class_const_iterator I = 777 ClassDecl->vbases_rbegin(), E = ClassDecl->vbases_rend(); 778 I != E; ++I) { 779 const CXXBaseSpecifier &Base = *I; 780 CXXRecordDecl *BaseClassDecl 781 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 782 783 // Ignore trivial destructors. 784 if (BaseClassDecl->hasTrivialDestructor()) 785 continue; 786 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 787 llvm::Value *V = 788 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), 789 ClassDecl, BaseClassDecl, 790 /*BaseIsVirtual=*/true); 791 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/true, V); 792 } 793 return; 794 } 795 796 assert(DtorType == Dtor_Base); 797 798 // Collect the fields. 799 llvm::SmallVector<const FieldDecl *, 16> FieldDecls; 800 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 801 E = ClassDecl->field_end(); I != E; ++I) { 802 const FieldDecl *Field = *I; 803 804 QualType FieldType = getContext().getCanonicalType(Field->getType()); 805 FieldType = getContext().getBaseElementType(FieldType); 806 807 const RecordType *RT = FieldType->getAs<RecordType>(); 808 if (!RT) 809 continue; 810 811 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 812 if (FieldClassDecl->hasTrivialDestructor()) 813 continue; 814 815 FieldDecls.push_back(Field); 816 } 817 818 // Now destroy the fields. 819 for (size_t i = FieldDecls.size(); i > 0; --i) { 820 const FieldDecl *Field = FieldDecls[i - 1]; 821 822 QualType FieldType = Field->getType(); 823 const ConstantArrayType *Array = 824 getContext().getAsConstantArrayType(FieldType); 825 if (Array) 826 FieldType = getContext().getBaseElementType(FieldType); 827 828 const RecordType *RT = FieldType->getAs<RecordType>(); 829 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 830 831 llvm::Value *ThisPtr = LoadCXXThis(); 832 833 LValue LHS = EmitLValueForField(ThisPtr, Field, 834 // FIXME: Qualifiers? 835 /*CVRQualifiers=*/0); 836 if (Array) { 837 const llvm::Type *BasePtr = ConvertType(FieldType); 838 BasePtr = llvm::PointerType::getUnqual(BasePtr); 839 llvm::Value *BaseAddrPtr = 840 Builder.CreateBitCast(LHS.getAddress(), BasePtr); 841 EmitCXXAggrDestructorCall(FieldClassDecl->getDestructor(getContext()), 842 Array, BaseAddrPtr); 843 } else 844 EmitCXXDestructorCall(FieldClassDecl->getDestructor(getContext()), 845 Dtor_Complete, /*ForVirtualBase=*/false, 846 LHS.getAddress()); 847 } 848 849 // Destroy non-virtual bases. 850 for (CXXRecordDecl::reverse_base_class_const_iterator I = 851 ClassDecl->bases_rbegin(), E = ClassDecl->bases_rend(); I != E; ++I) { 852 const CXXBaseSpecifier &Base = *I; 853 854 // Ignore virtual bases. 855 if (Base.isVirtual()) 856 continue; 857 858 CXXRecordDecl *BaseClassDecl 859 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 860 861 // Ignore trivial destructors. 862 if (BaseClassDecl->hasTrivialDestructor()) 863 continue; 864 865 const CXXDestructorDecl *D = BaseClassDecl->getDestructor(getContext()); 866 llvm::Value *V = 867 GetAddressOfDirectBaseInCompleteClass(LoadCXXThis(), ClassDecl, 868 BaseClassDecl, 869 /*BaseIsVirtual=*/false); 870 871 EmitCXXDestructorCall(D, Dtor_Base, /*ForVirtualBase=*/false, V); 872 } 873 } 874 875 /// EmitCXXAggrConstructorCall - This routine essentially creates a (nested) 876 /// for-loop to call the default constructor on individual members of the 877 /// array. 878 /// 'D' is the default constructor for elements of the array, 'ArrayTy' is the 879 /// array type and 'ArrayPtr' points to the beginning fo the array. 880 /// It is assumed that all relevant checks have been made by the caller. 881 void 882 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 883 const ConstantArrayType *ArrayTy, 884 llvm::Value *ArrayPtr, 885 CallExpr::const_arg_iterator ArgBeg, 886 CallExpr::const_arg_iterator ArgEnd) { 887 888 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 889 llvm::Value * NumElements = 890 llvm::ConstantInt::get(SizeTy, 891 getContext().getConstantArrayElementCount(ArrayTy)); 892 893 EmitCXXAggrConstructorCall(D, NumElements, ArrayPtr, ArgBeg, ArgEnd); 894 } 895 896 void 897 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 898 llvm::Value *NumElements, 899 llvm::Value *ArrayPtr, 900 CallExpr::const_arg_iterator ArgBeg, 901 CallExpr::const_arg_iterator ArgEnd) { 902 const llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 903 904 // Create a temporary for the loop index and initialize it with 0. 905 llvm::Value *IndexPtr = CreateTempAlloca(SizeTy, "loop.index"); 906 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 907 Builder.CreateStore(Zero, IndexPtr); 908 909 // Start the loop with a block that tests the condition. 910 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 911 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 912 913 EmitBlock(CondBlock); 914 915 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 916 917 // Generate: if (loop-index < number-of-elements fall to the loop body, 918 // otherwise, go to the block after the for-loop. 919 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 920 llvm::Value *IsLess = Builder.CreateICmpULT(Counter, NumElements, "isless"); 921 // If the condition is true, execute the body. 922 Builder.CreateCondBr(IsLess, ForBody, AfterFor); 923 924 EmitBlock(ForBody); 925 926 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 927 // Inside the loop body, emit the constructor call on the array element. 928 Counter = Builder.CreateLoad(IndexPtr); 929 llvm::Value *Address = Builder.CreateInBoundsGEP(ArrayPtr, Counter, 930 "arrayidx"); 931 932 // C++ [class.temporary]p4: 933 // There are two contexts in which temporaries are destroyed at a different 934 // point than the end of the full-expression. The first context is when a 935 // default constructor is called to initialize an element of an array. 936 // If the constructor has one or more default arguments, the destruction of 937 // every temporary created in a default argument expression is sequenced 938 // before the construction of the next array element, if any. 939 940 // Keep track of the current number of live temporaries. 941 { 942 CXXTemporariesCleanupScope Scope(*this); 943 944 EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase=*/false, Address, 945 ArgBeg, ArgEnd); 946 } 947 948 EmitBlock(ContinueBlock); 949 950 // Emit the increment of the loop counter. 951 llvm::Value *NextVal = llvm::ConstantInt::get(SizeTy, 1); 952 Counter = Builder.CreateLoad(IndexPtr); 953 NextVal = Builder.CreateAdd(Counter, NextVal, "inc"); 954 Builder.CreateStore(NextVal, IndexPtr); 955 956 // Finally, branch back up to the condition for the next iteration. 957 EmitBranch(CondBlock); 958 959 // Emit the fall-through block. 960 EmitBlock(AfterFor, true); 961 } 962 963 /// EmitCXXAggrDestructorCall - calls the default destructor on array 964 /// elements in reverse order of construction. 965 void 966 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 967 const ArrayType *Array, 968 llvm::Value *This) { 969 const ConstantArrayType *CA = dyn_cast<ConstantArrayType>(Array); 970 assert(CA && "Do we support VLA for destruction ?"); 971 uint64_t ElementCount = getContext().getConstantArrayElementCount(CA); 972 973 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 974 llvm::Value* ElementCountPtr = llvm::ConstantInt::get(SizeLTy, ElementCount); 975 EmitCXXAggrDestructorCall(D, ElementCountPtr, This); 976 } 977 978 /// EmitCXXAggrDestructorCall - calls the default destructor on array 979 /// elements in reverse order of construction. 980 void 981 CodeGenFunction::EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 982 llvm::Value *UpperCount, 983 llvm::Value *This) { 984 const llvm::Type *SizeLTy = ConvertType(getContext().getSizeType()); 985 llvm::Value *One = llvm::ConstantInt::get(SizeLTy, 1); 986 987 // Create a temporary for the loop index and initialize it with count of 988 // array elements. 989 llvm::Value *IndexPtr = CreateTempAlloca(SizeLTy, "loop.index"); 990 991 // Store the number of elements in the index pointer. 992 Builder.CreateStore(UpperCount, IndexPtr); 993 994 // Start the loop with a block that tests the condition. 995 llvm::BasicBlock *CondBlock = createBasicBlock("for.cond"); 996 llvm::BasicBlock *AfterFor = createBasicBlock("for.end"); 997 998 EmitBlock(CondBlock); 999 1000 llvm::BasicBlock *ForBody = createBasicBlock("for.body"); 1001 1002 // Generate: if (loop-index != 0 fall to the loop body, 1003 // otherwise, go to the block after the for-loop. 1004 llvm::Value* zeroConstant = 1005 llvm::Constant::getNullValue(SizeLTy); 1006 llvm::Value *Counter = Builder.CreateLoad(IndexPtr); 1007 llvm::Value *IsNE = Builder.CreateICmpNE(Counter, zeroConstant, 1008 "isne"); 1009 // If the condition is true, execute the body. 1010 Builder.CreateCondBr(IsNE, ForBody, AfterFor); 1011 1012 EmitBlock(ForBody); 1013 1014 llvm::BasicBlock *ContinueBlock = createBasicBlock("for.inc"); 1015 // Inside the loop body, emit the constructor call on the array element. 1016 Counter = Builder.CreateLoad(IndexPtr); 1017 Counter = Builder.CreateSub(Counter, One); 1018 llvm::Value *Address = Builder.CreateInBoundsGEP(This, Counter, "arrayidx"); 1019 EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Address); 1020 1021 EmitBlock(ContinueBlock); 1022 1023 // Emit the decrement of the loop counter. 1024 Counter = Builder.CreateLoad(IndexPtr); 1025 Counter = Builder.CreateSub(Counter, One, "dec"); 1026 Builder.CreateStore(Counter, IndexPtr); 1027 1028 // Finally, branch back up to the condition for the next iteration. 1029 EmitBranch(CondBlock); 1030 1031 // Emit the fall-through block. 1032 EmitBlock(AfterFor, true); 1033 } 1034 1035 void 1036 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1037 CXXCtorType Type, bool ForVirtualBase, 1038 llvm::Value *This, 1039 CallExpr::const_arg_iterator ArgBeg, 1040 CallExpr::const_arg_iterator ArgEnd) { 1041 if (D->isTrivial()) { 1042 if (ArgBeg == ArgEnd) { 1043 // Trivial default constructor, no codegen required. 1044 assert(D->isDefaultConstructor() && 1045 "trivial 0-arg ctor not a default ctor"); 1046 return; 1047 } 1048 1049 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1050 assert(D->isCopyConstructor() && "trivial 1-arg ctor not a copy ctor"); 1051 1052 const Expr *E = (*ArgBeg); 1053 QualType Ty = E->getType(); 1054 llvm::Value *Src = EmitLValue(E).getAddress(); 1055 EmitAggregateCopy(This, Src, Ty); 1056 return; 1057 } 1058 1059 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1060 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1061 1062 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1063 } 1064 1065 void 1066 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1067 CXXCtorType CtorType, 1068 const FunctionArgList &Args) { 1069 CallArgList DelegateArgs; 1070 1071 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1072 assert(I != E && "no parameters to constructor"); 1073 1074 // this 1075 DelegateArgs.push_back(std::make_pair(RValue::get(LoadCXXThis()), 1076 I->second)); 1077 ++I; 1078 1079 // vtt 1080 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1081 /*ForVirtualBase=*/false)) { 1082 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1083 DelegateArgs.push_back(std::make_pair(RValue::get(VTT), VoidPP)); 1084 1085 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1086 assert(I != E && "cannot skip vtt parameter, already done with args"); 1087 assert(I->second == VoidPP && "skipping parameter not of vtt type"); 1088 ++I; 1089 } 1090 } 1091 1092 // Explicit arguments. 1093 for (; I != E; ++I) { 1094 const VarDecl *Param = I->first; 1095 QualType ArgType = Param->getType(); // because we're passing it to itself 1096 RValue Arg = EmitDelegateCallArg(Param); 1097 1098 DelegateArgs.push_back(std::make_pair(Arg, ArgType)); 1099 } 1100 1101 EmitCall(CGM.getTypes().getFunctionInfo(Ctor, CtorType), 1102 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1103 ReturnValueSlot(), DelegateArgs, Ctor); 1104 } 1105 1106 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1107 CXXDtorType Type, 1108 bool ForVirtualBase, 1109 llvm::Value *This) { 1110 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1111 ForVirtualBase); 1112 llvm::Value *Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1113 1114 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1115 } 1116 1117 llvm::Value * 1118 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1119 const CXXRecordDecl *ClassDecl, 1120 const CXXRecordDecl *BaseClassDecl) { 1121 const llvm::Type *Int8PtrTy = 1122 llvm::Type::getInt8Ty(VMContext)->getPointerTo(); 1123 1124 llvm::Value *VTablePtr = Builder.CreateBitCast(This, 1125 Int8PtrTy->getPointerTo()); 1126 VTablePtr = Builder.CreateLoad(VTablePtr, "vtable"); 1127 1128 int64_t VBaseOffsetOffset = 1129 CGM.getVTables().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1130 1131 llvm::Value *VBaseOffsetPtr = 1132 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset, "vbase.offset.ptr"); 1133 const llvm::Type *PtrDiffTy = 1134 ConvertType(getContext().getPointerDiffType()); 1135 1136 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1137 PtrDiffTy->getPointerTo()); 1138 1139 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1140 1141 return VBaseOffset; 1142 } 1143 1144 void 1145 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1146 const CXXRecordDecl *NearestVBase, 1147 uint64_t OffsetFromNearestVBase, 1148 llvm::Constant *VTable, 1149 const CXXRecordDecl *VTableClass) { 1150 const CXXRecordDecl *RD = Base.getBase(); 1151 1152 // Compute the address point. 1153 llvm::Value *VTableAddressPoint; 1154 1155 // Check if we need to use a vtable from the VTT. 1156 if (CodeGenVTables::needsVTTParameter(CurGD) && 1157 (RD->getNumVBases() || NearestVBase)) { 1158 // Get the secondary vpointer index. 1159 uint64_t VirtualPointerIndex = 1160 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1161 1162 /// Load the VTT. 1163 llvm::Value *VTT = LoadCXXVTT(); 1164 if (VirtualPointerIndex) 1165 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1166 1167 // And load the address point from the VTT. 1168 VTableAddressPoint = Builder.CreateLoad(VTT); 1169 } else { 1170 uint64_t AddressPoint = CGM.getVTables().getAddressPoint(Base, VTableClass); 1171 VTableAddressPoint = 1172 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1173 } 1174 1175 // Compute where to store the address point. 1176 llvm::Value *VirtualOffset = 0; 1177 uint64_t NonVirtualOffset = 0; 1178 1179 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1180 // We need to use the virtual base offset offset because the virtual base 1181 // might have a different offset in the most derived class. 1182 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1183 NearestVBase); 1184 NonVirtualOffset = OffsetFromNearestVBase / 8; 1185 } else { 1186 // We can just use the base offset in the complete class. 1187 NonVirtualOffset = Base.getBaseOffset() / 8; 1188 } 1189 1190 // Apply the offsets. 1191 llvm::Value *VTableField = LoadCXXThis(); 1192 1193 if (NonVirtualOffset || VirtualOffset) 1194 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1195 NonVirtualOffset, 1196 VirtualOffset); 1197 1198 // Finally, store the address point. 1199 const llvm::Type *AddressPointPtrTy = 1200 VTableAddressPoint->getType()->getPointerTo(); 1201 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1202 Builder.CreateStore(VTableAddressPoint, VTableField); 1203 } 1204 1205 void 1206 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1207 const CXXRecordDecl *NearestVBase, 1208 uint64_t OffsetFromNearestVBase, 1209 bool BaseIsNonVirtualPrimaryBase, 1210 llvm::Constant *VTable, 1211 const CXXRecordDecl *VTableClass, 1212 VisitedVirtualBasesSetTy& VBases) { 1213 // If this base is a non-virtual primary base the address point has already 1214 // been set. 1215 if (!BaseIsNonVirtualPrimaryBase) { 1216 // Initialize the vtable pointer for this base. 1217 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1218 VTable, VTableClass); 1219 } 1220 1221 const CXXRecordDecl *RD = Base.getBase(); 1222 1223 // Traverse bases. 1224 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1225 E = RD->bases_end(); I != E; ++I) { 1226 CXXRecordDecl *BaseDecl 1227 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1228 1229 // Ignore classes without a vtable. 1230 if (!BaseDecl->isDynamicClass()) 1231 continue; 1232 1233 uint64_t BaseOffset; 1234 uint64_t BaseOffsetFromNearestVBase; 1235 bool BaseDeclIsNonVirtualPrimaryBase; 1236 1237 if (I->isVirtual()) { 1238 // Check if we've visited this virtual base before. 1239 if (!VBases.insert(BaseDecl)) 1240 continue; 1241 1242 const ASTRecordLayout &Layout = 1243 getContext().getASTRecordLayout(VTableClass); 1244 1245 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1246 BaseOffsetFromNearestVBase = 0; 1247 BaseDeclIsNonVirtualPrimaryBase = false; 1248 } else { 1249 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1250 1251 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1252 BaseOffsetFromNearestVBase = 1253 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1254 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1255 } 1256 1257 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1258 I->isVirtual() ? BaseDecl : NearestVBase, 1259 BaseOffsetFromNearestVBase, 1260 BaseDeclIsNonVirtualPrimaryBase, 1261 VTable, VTableClass, VBases); 1262 } 1263 } 1264 1265 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1266 // Ignore classes without a vtable. 1267 if (!RD->isDynamicClass()) 1268 return; 1269 1270 // Get the VTable. 1271 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1272 1273 // Initialize the vtable pointers for this class and all of its bases. 1274 VisitedVirtualBasesSetTy VBases; 1275 InitializeVTablePointers(BaseSubobject(RD, 0), /*NearestVBase=*/0, 1276 /*OffsetFromNearestVBase=*/0, 1277 /*BaseIsNonVirtualPrimaryBase=*/false, 1278 VTable, RD, VBases); 1279 } 1280