1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGBlocks.h" 15 #include "CGDebugInfo.h" 16 #include "CodeGenFunction.h" 17 #include "clang/AST/CXXInheritance.h" 18 #include "clang/AST/EvaluatedExprVisitor.h" 19 #include "clang/AST/RecordLayout.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/Frontend/CodeGenOptions.h" 22 23 using namespace clang; 24 using namespace CodeGen; 25 26 static CharUnits 27 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 28 const CXXRecordDecl *DerivedClass, 29 CastExpr::path_const_iterator Start, 30 CastExpr::path_const_iterator End) { 31 CharUnits Offset = CharUnits::Zero(); 32 33 const CXXRecordDecl *RD = DerivedClass; 34 35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 36 const CXXBaseSpecifier *Base = *I; 37 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 38 39 // Get the layout. 40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 41 42 const CXXRecordDecl *BaseDecl = 43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 44 45 // Add the offset. 46 Offset += Layout.getBaseClassOffset(BaseDecl); 47 48 RD = BaseDecl; 49 } 50 51 return Offset; 52 } 53 54 llvm::Constant * 55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 56 CastExpr::path_const_iterator PathBegin, 57 CastExpr::path_const_iterator PathEnd) { 58 assert(PathBegin != PathEnd && "Base path should not be empty!"); 59 60 CharUnits Offset = 61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 62 PathBegin, PathEnd); 63 if (Offset.isZero()) 64 return 0; 65 66 llvm::Type *PtrDiffTy = 67 Types.ConvertType(getContext().getPointerDiffType()); 68 69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 70 } 71 72 /// Gets the address of a direct base class within a complete object. 73 /// This should only be used for (1) non-virtual bases or (2) virtual bases 74 /// when the type is known to be complete (e.g. in complete destructors). 75 /// 76 /// The object pointed to by 'This' is assumed to be non-null. 77 llvm::Value * 78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 79 const CXXRecordDecl *Derived, 80 const CXXRecordDecl *Base, 81 bool BaseIsVirtual) { 82 // 'this' must be a pointer (in some address space) to Derived. 83 assert(This->getType()->isPointerTy() && 84 cast<llvm::PointerType>(This->getType())->getElementType() 85 == ConvertType(Derived)); 86 87 // Compute the offset of the virtual base. 88 CharUnits Offset; 89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 90 if (BaseIsVirtual) 91 Offset = Layout.getVBaseClassOffset(Base); 92 else 93 Offset = Layout.getBaseClassOffset(Base); 94 95 // Shift and cast down to the base type. 96 // TODO: for complete types, this should be possible with a GEP. 97 llvm::Value *V = This; 98 if (Offset.isPositive()) { 99 V = Builder.CreateBitCast(V, Int8PtrTy); 100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); 101 } 102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 103 104 return V; 105 } 106 107 static llvm::Value * 108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, 109 CharUnits nonVirtualOffset, 110 llvm::Value *virtualOffset) { 111 // Assert that we have something to do. 112 assert(!nonVirtualOffset.isZero() || virtualOffset != 0); 113 114 // Compute the offset from the static and dynamic components. 115 llvm::Value *baseOffset; 116 if (!nonVirtualOffset.isZero()) { 117 baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy, 118 nonVirtualOffset.getQuantity()); 119 if (virtualOffset) { 120 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 121 } 122 } else { 123 baseOffset = virtualOffset; 124 } 125 126 // Apply the base offset. 127 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); 128 ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); 129 return ptr; 130 } 131 132 llvm::Value * 133 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 134 const CXXRecordDecl *Derived, 135 CastExpr::path_const_iterator PathBegin, 136 CastExpr::path_const_iterator PathEnd, 137 bool NullCheckValue) { 138 assert(PathBegin != PathEnd && "Base path should not be empty!"); 139 140 CastExpr::path_const_iterator Start = PathBegin; 141 const CXXRecordDecl *VBase = 0; 142 143 // Sema has done some convenient canonicalization here: if the 144 // access path involved any virtual steps, the conversion path will 145 // *start* with a step down to the correct virtual base subobject, 146 // and hence will not require any further steps. 147 if ((*Start)->isVirtual()) { 148 VBase = 149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 150 ++Start; 151 } 152 153 // Compute the static offset of the ultimate destination within its 154 // allocating subobject (the virtual base, if there is one, or else 155 // the "complete" object that we see). 156 CharUnits NonVirtualOffset = 157 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 158 Start, PathEnd); 159 160 // If there's a virtual step, we can sometimes "devirtualize" it. 161 // For now, that's limited to when the derived type is final. 162 // TODO: "devirtualize" this for accesses to known-complete objects. 163 if (VBase && Derived->hasAttr<FinalAttr>()) { 164 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 165 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 166 NonVirtualOffset += vBaseOffset; 167 VBase = 0; // we no longer have a virtual step 168 } 169 170 // Get the base pointer type. 171 llvm::Type *BasePtrTy = 172 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 173 174 // If the static offset is zero and we don't have a virtual step, 175 // just do a bitcast; null checks are unnecessary. 176 if (NonVirtualOffset.isZero() && !VBase) { 177 return Builder.CreateBitCast(Value, BasePtrTy); 178 } 179 180 llvm::BasicBlock *origBB = 0; 181 llvm::BasicBlock *endBB = 0; 182 183 // Skip over the offset (and the vtable load) if we're supposed to 184 // null-check the pointer. 185 if (NullCheckValue) { 186 origBB = Builder.GetInsertBlock(); 187 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 188 endBB = createBasicBlock("cast.end"); 189 190 llvm::Value *isNull = Builder.CreateIsNull(Value); 191 Builder.CreateCondBr(isNull, endBB, notNullBB); 192 EmitBlock(notNullBB); 193 } 194 195 // Compute the virtual offset. 196 llvm::Value *VirtualOffset = 0; 197 if (VBase) { 198 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 199 } 200 201 // Apply both offsets. 202 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, 203 NonVirtualOffset, 204 VirtualOffset); 205 206 // Cast to the destination type. 207 Value = Builder.CreateBitCast(Value, BasePtrTy); 208 209 // Build a phi if we needed a null check. 210 if (NullCheckValue) { 211 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 212 Builder.CreateBr(endBB); 213 EmitBlock(endBB); 214 215 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); 216 PHI->addIncoming(Value, notNullBB); 217 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); 218 Value = PHI; 219 } 220 221 return Value; 222 } 223 224 llvm::Value * 225 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 226 const CXXRecordDecl *Derived, 227 CastExpr::path_const_iterator PathBegin, 228 CastExpr::path_const_iterator PathEnd, 229 bool NullCheckValue) { 230 assert(PathBegin != PathEnd && "Base path should not be empty!"); 231 232 QualType DerivedTy = 233 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 234 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 235 236 llvm::Value *NonVirtualOffset = 237 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 238 239 if (!NonVirtualOffset) { 240 // No offset, we can just cast back. 241 return Builder.CreateBitCast(Value, DerivedPtrTy); 242 } 243 244 llvm::BasicBlock *CastNull = 0; 245 llvm::BasicBlock *CastNotNull = 0; 246 llvm::BasicBlock *CastEnd = 0; 247 248 if (NullCheckValue) { 249 CastNull = createBasicBlock("cast.null"); 250 CastNotNull = createBasicBlock("cast.notnull"); 251 CastEnd = createBasicBlock("cast.end"); 252 253 llvm::Value *IsNull = Builder.CreateIsNull(Value); 254 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 255 EmitBlock(CastNotNull); 256 } 257 258 // Apply the offset. 259 Value = Builder.CreateBitCast(Value, Int8PtrTy); 260 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), 261 "sub.ptr"); 262 263 // Just cast. 264 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 265 266 if (NullCheckValue) { 267 Builder.CreateBr(CastEnd); 268 EmitBlock(CastNull); 269 Builder.CreateBr(CastEnd); 270 EmitBlock(CastEnd); 271 272 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 273 PHI->addIncoming(Value, CastNotNull); 274 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 275 CastNull); 276 Value = PHI; 277 } 278 279 return Value; 280 } 281 282 /// GetVTTParameter - Return the VTT parameter that should be passed to a 283 /// base constructor/destructor with virtual bases. 284 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 285 bool ForVirtualBase, 286 bool Delegating) { 287 if (!CodeGenVTables::needsVTTParameter(GD)) { 288 // This constructor/destructor does not need a VTT parameter. 289 return 0; 290 } 291 292 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 293 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 294 295 llvm::Value *VTT; 296 297 uint64_t SubVTTIndex; 298 299 if (Delegating) { 300 // If this is a delegating constructor call, just load the VTT. 301 return CGF.LoadCXXVTT(); 302 } else if (RD == Base) { 303 // If the record matches the base, this is the complete ctor/dtor 304 // variant calling the base variant in a class with virtual bases. 305 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 306 "doing no-op VTT offset in base dtor/ctor?"); 307 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 308 SubVTTIndex = 0; 309 } else { 310 const ASTRecordLayout &Layout = 311 CGF.getContext().getASTRecordLayout(RD); 312 CharUnits BaseOffset = ForVirtualBase ? 313 Layout.getVBaseClassOffset(Base) : 314 Layout.getBaseClassOffset(Base); 315 316 SubVTTIndex = 317 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 318 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 319 } 320 321 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 322 // A VTT parameter was passed to the constructor, use it. 323 VTT = CGF.LoadCXXVTT(); 324 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 325 } else { 326 // We're the complete constructor, so get the VTT by name. 327 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD); 328 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 329 } 330 331 return VTT; 332 } 333 334 namespace { 335 /// Call the destructor for a direct base class. 336 struct CallBaseDtor : EHScopeStack::Cleanup { 337 const CXXRecordDecl *BaseClass; 338 bool BaseIsVirtual; 339 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 340 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 341 342 void Emit(CodeGenFunction &CGF, Flags flags) { 343 const CXXRecordDecl *DerivedClass = 344 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 345 346 const CXXDestructorDecl *D = BaseClass->getDestructor(); 347 llvm::Value *Addr = 348 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 349 DerivedClass, BaseClass, 350 BaseIsVirtual); 351 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, 352 /*Delegating=*/false, Addr); 353 } 354 }; 355 356 /// A visitor which checks whether an initializer uses 'this' in a 357 /// way which requires the vtable to be properly set. 358 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 359 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 360 361 bool UsesThis; 362 363 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 364 365 // Black-list all explicit and implicit references to 'this'. 366 // 367 // Do we need to worry about external references to 'this' derived 368 // from arbitrary code? If so, then anything which runs arbitrary 369 // external code might potentially access the vtable. 370 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 371 }; 372 } 373 374 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 375 DynamicThisUseChecker Checker(C); 376 Checker.Visit(const_cast<Expr*>(Init)); 377 return Checker.UsesThis; 378 } 379 380 static void EmitBaseInitializer(CodeGenFunction &CGF, 381 const CXXRecordDecl *ClassDecl, 382 CXXCtorInitializer *BaseInit, 383 CXXCtorType CtorType) { 384 assert(BaseInit->isBaseInitializer() && 385 "Must have base initializer!"); 386 387 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 388 389 const Type *BaseType = BaseInit->getBaseClass(); 390 CXXRecordDecl *BaseClassDecl = 391 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 392 393 bool isBaseVirtual = BaseInit->isBaseVirtual(); 394 395 // The base constructor doesn't construct virtual bases. 396 if (CtorType == Ctor_Base && isBaseVirtual) 397 return; 398 399 // If the initializer for the base (other than the constructor 400 // itself) accesses 'this' in any way, we need to initialize the 401 // vtables. 402 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 403 CGF.InitializeVTablePointers(ClassDecl); 404 405 // We can pretend to be a complete class because it only matters for 406 // virtual bases, and we only do virtual bases for complete ctors. 407 llvm::Value *V = 408 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 409 BaseClassDecl, 410 isBaseVirtual); 411 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); 412 AggValueSlot AggSlot = 413 AggValueSlot::forAddr(V, Alignment, Qualifiers(), 414 AggValueSlot::IsDestructed, 415 AggValueSlot::DoesNotNeedGCBarriers, 416 AggValueSlot::IsNotAliased); 417 418 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 419 420 if (CGF.CGM.getLangOpts().Exceptions && 421 !BaseClassDecl->hasTrivialDestructor()) 422 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 423 isBaseVirtual); 424 } 425 426 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 427 LValue LHS, 428 Expr *Init, 429 llvm::Value *ArrayIndexVar, 430 QualType T, 431 ArrayRef<VarDecl *> ArrayIndexes, 432 unsigned Index) { 433 if (Index == ArrayIndexes.size()) { 434 LValue LV = LHS; 435 { // Scope for Cleanups. 436 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 437 438 if (ArrayIndexVar) { 439 // If we have an array index variable, load it and use it as an offset. 440 // Then, increment the value. 441 llvm::Value *Dest = LHS.getAddress(); 442 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 443 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 444 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 445 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 446 CGF.Builder.CreateStore(Next, ArrayIndexVar); 447 448 // Update the LValue. 449 LV.setAddress(Dest); 450 CharUnits Align = CGF.getContext().getTypeAlignInChars(T); 451 LV.setAlignment(std::min(Align, LV.getAlignment())); 452 } 453 454 if (!CGF.hasAggregateLLVMType(T)) { 455 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); 456 } else if (T->isAnyComplexType()) { 457 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(), 458 LV.isVolatileQualified()); 459 } else { 460 AggValueSlot Slot = 461 AggValueSlot::forLValue(LV, 462 AggValueSlot::IsDestructed, 463 AggValueSlot::DoesNotNeedGCBarriers, 464 AggValueSlot::IsNotAliased); 465 466 CGF.EmitAggExpr(Init, Slot); 467 } 468 } 469 470 // Now, outside of the initializer cleanup scope, destroy the backing array 471 // for a std::initializer_list member. 472 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init); 473 474 return; 475 } 476 477 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 478 assert(Array && "Array initialization without the array type?"); 479 llvm::Value *IndexVar 480 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 481 assert(IndexVar && "Array index variable not loaded"); 482 483 // Initialize this index variable to zero. 484 llvm::Value* Zero 485 = llvm::Constant::getNullValue( 486 CGF.ConvertType(CGF.getContext().getSizeType())); 487 CGF.Builder.CreateStore(Zero, IndexVar); 488 489 // Start the loop with a block that tests the condition. 490 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 491 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 492 493 CGF.EmitBlock(CondBlock); 494 495 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 496 // Generate: if (loop-index < number-of-elements) fall to the loop body, 497 // otherwise, go to the block after the for-loop. 498 uint64_t NumElements = Array->getSize().getZExtValue(); 499 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 500 llvm::Value *NumElementsPtr = 501 llvm::ConstantInt::get(Counter->getType(), NumElements); 502 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 503 "isless"); 504 505 // If the condition is true, execute the body. 506 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 507 508 CGF.EmitBlock(ForBody); 509 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 510 511 { 512 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 513 514 // Inside the loop body recurse to emit the inner loop or, eventually, the 515 // constructor call. 516 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 517 Array->getElementType(), ArrayIndexes, Index + 1); 518 } 519 520 CGF.EmitBlock(ContinueBlock); 521 522 // Emit the increment of the loop counter. 523 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 524 Counter = CGF.Builder.CreateLoad(IndexVar); 525 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 526 CGF.Builder.CreateStore(NextVal, IndexVar); 527 528 // Finally, branch back up to the condition for the next iteration. 529 CGF.EmitBranch(CondBlock); 530 531 // Emit the fall-through block. 532 CGF.EmitBlock(AfterFor, true); 533 } 534 535 static void EmitMemberInitializer(CodeGenFunction &CGF, 536 const CXXRecordDecl *ClassDecl, 537 CXXCtorInitializer *MemberInit, 538 const CXXConstructorDecl *Constructor, 539 FunctionArgList &Args) { 540 assert(MemberInit->isAnyMemberInitializer() && 541 "Must have member initializer!"); 542 assert(MemberInit->getInit() && "Must have initializer!"); 543 544 // non-static data member initializers. 545 FieldDecl *Field = MemberInit->getAnyMember(); 546 QualType FieldType = Field->getType(); 547 548 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 549 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 550 LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 551 552 if (MemberInit->isIndirectMemberInitializer()) { 553 // If we are initializing an anonymous union field, drill down to 554 // the field. 555 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 556 IndirectFieldDecl::chain_iterator I = IndirectField->chain_begin(), 557 IEnd = IndirectField->chain_end(); 558 for ( ; I != IEnd; ++I) 559 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(*I)); 560 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 561 } else { 562 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 563 } 564 565 // Special case: if we are in a copy or move constructor, and we are copying 566 // an array of PODs or classes with trivial copy constructors, ignore the 567 // AST and perform the copy we know is equivalent. 568 // FIXME: This is hacky at best... if we had a bit more explicit information 569 // in the AST, we could generalize it more easily. 570 const ConstantArrayType *Array 571 = CGF.getContext().getAsConstantArrayType(FieldType); 572 if (Array && Constructor->isImplicitlyDefined() && 573 Constructor->isCopyOrMoveConstructor()) { 574 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 575 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 576 if (BaseElementTy.isPODType(CGF.getContext()) || 577 (CE && CE->getConstructor()->isTrivial())) { 578 // Find the source pointer. We know it's the last argument because 579 // we know we're in an implicit copy constructor. 580 unsigned SrcArgIndex = Args.size() - 1; 581 llvm::Value *SrcPtr 582 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 583 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 584 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 585 586 // Copy the aggregate. 587 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 588 LHS.isVolatileQualified()); 589 return; 590 } 591 } 592 593 ArrayRef<VarDecl *> ArrayIndexes; 594 if (MemberInit->getNumArrayIndices()) 595 ArrayIndexes = MemberInit->getArrayIndexes(); 596 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 597 } 598 599 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, 600 LValue LHS, Expr *Init, 601 ArrayRef<VarDecl *> ArrayIndexes) { 602 QualType FieldType = Field->getType(); 603 if (!hasAggregateLLVMType(FieldType)) { 604 if (LHS.isSimple()) { 605 EmitExprAsInit(Init, Field, LHS, false); 606 } else { 607 RValue RHS = RValue::get(EmitScalarExpr(Init)); 608 EmitStoreThroughLValue(RHS, LHS); 609 } 610 } else if (FieldType->isAnyComplexType()) { 611 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified()); 612 } else { 613 llvm::Value *ArrayIndexVar = 0; 614 if (ArrayIndexes.size()) { 615 llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 616 617 // The LHS is a pointer to the first object we'll be constructing, as 618 // a flat array. 619 QualType BaseElementTy = getContext().getBaseElementType(FieldType); 620 llvm::Type *BasePtr = ConvertType(BaseElementTy); 621 BasePtr = llvm::PointerType::getUnqual(BasePtr); 622 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), 623 BasePtr); 624 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 625 626 // Create an array index that will be used to walk over all of the 627 // objects we're constructing. 628 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); 629 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 630 Builder.CreateStore(Zero, ArrayIndexVar); 631 632 633 // Emit the block variables for the array indices, if any. 634 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 635 EmitAutoVarDecl(*ArrayIndexes[I]); 636 } 637 638 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 639 ArrayIndexes, 0); 640 } 641 642 // Ensure that we destroy this object if an exception is thrown 643 // later in the constructor. 644 QualType::DestructionKind dtorKind = FieldType.isDestructedType(); 645 if (needsEHCleanup(dtorKind)) 646 pushEHDestroy(dtorKind, LHS.getAddress(), FieldType); 647 } 648 649 /// Checks whether the given constructor is a valid subject for the 650 /// complete-to-base constructor delegation optimization, i.e. 651 /// emitting the complete constructor as a simple call to the base 652 /// constructor. 653 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 654 655 // Currently we disable the optimization for classes with virtual 656 // bases because (1) the addresses of parameter variables need to be 657 // consistent across all initializers but (2) the delegate function 658 // call necessarily creates a second copy of the parameter variable. 659 // 660 // The limiting example (purely theoretical AFAIK): 661 // struct A { A(int &c) { c++; } }; 662 // struct B : virtual A { 663 // B(int count) : A(count) { printf("%d\n", count); } 664 // }; 665 // ...although even this example could in principle be emitted as a 666 // delegation since the address of the parameter doesn't escape. 667 if (Ctor->getParent()->getNumVBases()) { 668 // TODO: white-list trivial vbase initializers. This case wouldn't 669 // be subject to the restrictions below. 670 671 // TODO: white-list cases where: 672 // - there are no non-reference parameters to the constructor 673 // - the initializers don't access any non-reference parameters 674 // - the initializers don't take the address of non-reference 675 // parameters 676 // - etc. 677 // If we ever add any of the above cases, remember that: 678 // - function-try-blocks will always blacklist this optimization 679 // - we need to perform the constructor prologue and cleanup in 680 // EmitConstructorBody. 681 682 return false; 683 } 684 685 // We also disable the optimization for variadic functions because 686 // it's impossible to "re-pass" varargs. 687 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 688 return false; 689 690 // FIXME: Decide if we can do a delegation of a delegating constructor. 691 if (Ctor->isDelegatingConstructor()) 692 return false; 693 694 return true; 695 } 696 697 /// EmitConstructorBody - Emits the body of the current constructor. 698 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 699 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 700 CXXCtorType CtorType = CurGD.getCtorType(); 701 702 // Before we go any further, try the complete->base constructor 703 // delegation optimization. 704 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 705 CGM.getContext().getTargetInfo().getCXXABI().hasConstructorVariants()) { 706 if (CGDebugInfo *DI = getDebugInfo()) 707 DI->EmitLocation(Builder, Ctor->getLocEnd()); 708 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 709 return; 710 } 711 712 Stmt *Body = Ctor->getBody(); 713 714 // Enter the function-try-block before the constructor prologue if 715 // applicable. 716 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 717 if (IsTryBody) 718 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 719 720 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 721 722 // TODO: in restricted cases, we can emit the vbase initializers of 723 // a complete ctor and then delegate to the base ctor. 724 725 // Emit the constructor prologue, i.e. the base and member 726 // initializers. 727 EmitCtorPrologue(Ctor, CtorType, Args); 728 729 // Emit the body of the statement. 730 if (IsTryBody) 731 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 732 else if (Body) 733 EmitStmt(Body); 734 735 // Emit any cleanup blocks associated with the member or base 736 // initializers, which includes (along the exceptional path) the 737 // destructors for those members and bases that were fully 738 // constructed. 739 PopCleanupBlocks(CleanupDepth); 740 741 if (IsTryBody) 742 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 743 } 744 745 /// EmitCtorPrologue - This routine generates necessary code to initialize 746 /// base classes and non-static data members belonging to this constructor. 747 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 748 CXXCtorType CtorType, 749 FunctionArgList &Args) { 750 if (CD->isDelegatingConstructor()) 751 return EmitDelegatingCXXConstructorCall(CD, Args); 752 753 const CXXRecordDecl *ClassDecl = CD->getParent(); 754 755 SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 756 757 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 758 E = CD->init_end(); 759 B != E; ++B) { 760 CXXCtorInitializer *Member = (*B); 761 762 if (Member->isBaseInitializer()) { 763 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 764 } else { 765 assert(Member->isAnyMemberInitializer() && 766 "Delegating initializer on non-delegating constructor"); 767 MemberInitializers.push_back(Member); 768 } 769 } 770 771 InitializeVTablePointers(ClassDecl); 772 773 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 774 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 775 } 776 777 static bool 778 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 779 780 static bool 781 HasTrivialDestructorBody(ASTContext &Context, 782 const CXXRecordDecl *BaseClassDecl, 783 const CXXRecordDecl *MostDerivedClassDecl) 784 { 785 // If the destructor is trivial we don't have to check anything else. 786 if (BaseClassDecl->hasTrivialDestructor()) 787 return true; 788 789 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 790 return false; 791 792 // Check fields. 793 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(), 794 E = BaseClassDecl->field_end(); I != E; ++I) { 795 const FieldDecl *Field = *I; 796 797 if (!FieldHasTrivialDestructorBody(Context, Field)) 798 return false; 799 } 800 801 // Check non-virtual bases. 802 for (CXXRecordDecl::base_class_const_iterator I = 803 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end(); 804 I != E; ++I) { 805 if (I->isVirtual()) 806 continue; 807 808 const CXXRecordDecl *NonVirtualBase = 809 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 810 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 811 MostDerivedClassDecl)) 812 return false; 813 } 814 815 if (BaseClassDecl == MostDerivedClassDecl) { 816 // Check virtual bases. 817 for (CXXRecordDecl::base_class_const_iterator I = 818 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end(); 819 I != E; ++I) { 820 const CXXRecordDecl *VirtualBase = 821 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 822 if (!HasTrivialDestructorBody(Context, VirtualBase, 823 MostDerivedClassDecl)) 824 return false; 825 } 826 } 827 828 return true; 829 } 830 831 static bool 832 FieldHasTrivialDestructorBody(ASTContext &Context, 833 const FieldDecl *Field) 834 { 835 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 836 837 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 838 if (!RT) 839 return true; 840 841 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 842 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 843 } 844 845 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 846 /// any vtable pointers before calling this destructor. 847 static bool CanSkipVTablePointerInitialization(ASTContext &Context, 848 const CXXDestructorDecl *Dtor) { 849 if (!Dtor->hasTrivialBody()) 850 return false; 851 852 // Check the fields. 853 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 854 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 855 E = ClassDecl->field_end(); I != E; ++I) { 856 const FieldDecl *Field = *I; 857 858 if (!FieldHasTrivialDestructorBody(Context, Field)) 859 return false; 860 } 861 862 return true; 863 } 864 865 /// EmitDestructorBody - Emits the body of the current destructor. 866 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 867 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 868 CXXDtorType DtorType = CurGD.getDtorType(); 869 870 // The call to operator delete in a deleting destructor happens 871 // outside of the function-try-block, which means it's always 872 // possible to delegate the destructor body to the complete 873 // destructor. Do so. 874 if (DtorType == Dtor_Deleting) { 875 EnterDtorCleanups(Dtor, Dtor_Deleting); 876 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 877 /*Delegating=*/false, LoadCXXThis()); 878 PopCleanupBlock(); 879 return; 880 } 881 882 Stmt *Body = Dtor->getBody(); 883 884 // If the body is a function-try-block, enter the try before 885 // anything else. 886 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 887 if (isTryBody) 888 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 889 890 // Enter the epilogue cleanups. 891 RunCleanupsScope DtorEpilogue(*this); 892 893 // If this is the complete variant, just invoke the base variant; 894 // the epilogue will destruct the virtual bases. But we can't do 895 // this optimization if the body is a function-try-block, because 896 // we'd introduce *two* handler blocks. 897 switch (DtorType) { 898 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 899 900 case Dtor_Complete: 901 // Enter the cleanup scopes for virtual bases. 902 EnterDtorCleanups(Dtor, Dtor_Complete); 903 904 if (!isTryBody && 905 CGM.getContext().getTargetInfo().getCXXABI().hasDestructorVariants()) { 906 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 907 /*Delegating=*/false, LoadCXXThis()); 908 break; 909 } 910 // Fallthrough: act like we're in the base variant. 911 912 case Dtor_Base: 913 // Enter the cleanup scopes for fields and non-virtual bases. 914 EnterDtorCleanups(Dtor, Dtor_Base); 915 916 // Initialize the vtable pointers before entering the body. 917 if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) 918 InitializeVTablePointers(Dtor->getParent()); 919 920 if (isTryBody) 921 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 922 else if (Body) 923 EmitStmt(Body); 924 else { 925 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 926 // nothing to do besides what's in the epilogue 927 } 928 // -fapple-kext must inline any call to this dtor into 929 // the caller's body. 930 if (getLangOpts().AppleKext) 931 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 932 break; 933 } 934 935 // Jump out through the epilogue cleanups. 936 DtorEpilogue.ForceCleanup(); 937 938 // Exit the try if applicable. 939 if (isTryBody) 940 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 941 } 942 943 namespace { 944 /// Call the operator delete associated with the current destructor. 945 struct CallDtorDelete : EHScopeStack::Cleanup { 946 CallDtorDelete() {} 947 948 void Emit(CodeGenFunction &CGF, Flags flags) { 949 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 950 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 951 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 952 CGF.getContext().getTagDeclType(ClassDecl)); 953 } 954 }; 955 956 class DestroyField : public EHScopeStack::Cleanup { 957 const FieldDecl *field; 958 CodeGenFunction::Destroyer *destroyer; 959 bool useEHCleanupForArray; 960 961 public: 962 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 963 bool useEHCleanupForArray) 964 : field(field), destroyer(destroyer), 965 useEHCleanupForArray(useEHCleanupForArray) {} 966 967 void Emit(CodeGenFunction &CGF, Flags flags) { 968 // Find the address of the field. 969 llvm::Value *thisValue = CGF.LoadCXXThis(); 970 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 971 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 972 LValue LV = CGF.EmitLValueForField(ThisLV, field); 973 assert(LV.isSimple()); 974 975 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 976 flags.isForNormalCleanup() && useEHCleanupForArray); 977 } 978 }; 979 } 980 981 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 982 /// destructor. This is to call destructors on members and base classes 983 /// in reverse order of their construction. 984 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 985 CXXDtorType DtorType) { 986 assert(!DD->isTrivial() && 987 "Should not emit dtor epilogue for trivial dtor!"); 988 989 // The deleting-destructor phase just needs to call the appropriate 990 // operator delete that Sema picked up. 991 if (DtorType == Dtor_Deleting) { 992 assert(DD->getOperatorDelete() && 993 "operator delete missing - EmitDtorEpilogue"); 994 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 995 return; 996 } 997 998 const CXXRecordDecl *ClassDecl = DD->getParent(); 999 1000 // Unions have no bases and do not call field destructors. 1001 if (ClassDecl->isUnion()) 1002 return; 1003 1004 // The complete-destructor phase just destructs all the virtual bases. 1005 if (DtorType == Dtor_Complete) { 1006 1007 // We push them in the forward order so that they'll be popped in 1008 // the reverse order. 1009 for (CXXRecordDecl::base_class_const_iterator I = 1010 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 1011 I != E; ++I) { 1012 const CXXBaseSpecifier &Base = *I; 1013 CXXRecordDecl *BaseClassDecl 1014 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1015 1016 // Ignore trivial destructors. 1017 if (BaseClassDecl->hasTrivialDestructor()) 1018 continue; 1019 1020 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1021 BaseClassDecl, 1022 /*BaseIsVirtual*/ true); 1023 } 1024 1025 return; 1026 } 1027 1028 assert(DtorType == Dtor_Base); 1029 1030 // Destroy non-virtual bases. 1031 for (CXXRecordDecl::base_class_const_iterator I = 1032 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 1033 const CXXBaseSpecifier &Base = *I; 1034 1035 // Ignore virtual bases. 1036 if (Base.isVirtual()) 1037 continue; 1038 1039 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1040 1041 // Ignore trivial destructors. 1042 if (BaseClassDecl->hasTrivialDestructor()) 1043 continue; 1044 1045 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1046 BaseClassDecl, 1047 /*BaseIsVirtual*/ false); 1048 } 1049 1050 // Destroy direct fields. 1051 SmallVector<const FieldDecl *, 16> FieldDecls; 1052 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 1053 E = ClassDecl->field_end(); I != E; ++I) { 1054 const FieldDecl *field = *I; 1055 QualType type = field->getType(); 1056 QualType::DestructionKind dtorKind = type.isDestructedType(); 1057 if (!dtorKind) continue; 1058 1059 // Anonymous union members do not have their destructors called. 1060 const RecordType *RT = type->getAsUnionType(); 1061 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1062 1063 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1064 EHStack.pushCleanup<DestroyField>(cleanupKind, field, 1065 getDestroyer(dtorKind), 1066 cleanupKind & EHCleanup); 1067 } 1068 } 1069 1070 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1071 /// constructor for each of several members of an array. 1072 /// 1073 /// \param ctor the constructor to call for each element 1074 /// \param arrayType the type of the array to initialize 1075 /// \param arrayBegin an arrayType* 1076 /// \param zeroInitialize true if each element should be 1077 /// zero-initialized before it is constructed 1078 void 1079 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1080 const ConstantArrayType *arrayType, 1081 llvm::Value *arrayBegin, 1082 CallExpr::const_arg_iterator argBegin, 1083 CallExpr::const_arg_iterator argEnd, 1084 bool zeroInitialize) { 1085 QualType elementType; 1086 llvm::Value *numElements = 1087 emitArrayLength(arrayType, elementType, arrayBegin); 1088 1089 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, 1090 argBegin, argEnd, zeroInitialize); 1091 } 1092 1093 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1094 /// constructor for each of several members of an array. 1095 /// 1096 /// \param ctor the constructor to call for each element 1097 /// \param numElements the number of elements in the array; 1098 /// may be zero 1099 /// \param arrayBegin a T*, where T is the type constructed by ctor 1100 /// \param zeroInitialize true if each element should be 1101 /// zero-initialized before it is constructed 1102 void 1103 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1104 llvm::Value *numElements, 1105 llvm::Value *arrayBegin, 1106 CallExpr::const_arg_iterator argBegin, 1107 CallExpr::const_arg_iterator argEnd, 1108 bool zeroInitialize) { 1109 1110 // It's legal for numElements to be zero. This can happen both 1111 // dynamically, because x can be zero in 'new A[x]', and statically, 1112 // because of GCC extensions that permit zero-length arrays. There 1113 // are probably legitimate places where we could assume that this 1114 // doesn't happen, but it's not clear that it's worth it. 1115 llvm::BranchInst *zeroCheckBranch = 0; 1116 1117 // Optimize for a constant count. 1118 llvm::ConstantInt *constantCount 1119 = dyn_cast<llvm::ConstantInt>(numElements); 1120 if (constantCount) { 1121 // Just skip out if the constant count is zero. 1122 if (constantCount->isZero()) return; 1123 1124 // Otherwise, emit the check. 1125 } else { 1126 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1127 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1128 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1129 EmitBlock(loopBB); 1130 } 1131 1132 // Find the end of the array. 1133 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1134 "arrayctor.end"); 1135 1136 // Enter the loop, setting up a phi for the current location to initialize. 1137 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1138 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1139 EmitBlock(loopBB); 1140 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1141 "arrayctor.cur"); 1142 cur->addIncoming(arrayBegin, entryBB); 1143 1144 // Inside the loop body, emit the constructor call on the array element. 1145 1146 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1147 1148 // Zero initialize the storage, if requested. 1149 if (zeroInitialize) 1150 EmitNullInitialization(cur, type); 1151 1152 // C++ [class.temporary]p4: 1153 // There are two contexts in which temporaries are destroyed at a different 1154 // point than the end of the full-expression. The first context is when a 1155 // default constructor is called to initialize an element of an array. 1156 // If the constructor has one or more default arguments, the destruction of 1157 // every temporary created in a default argument expression is sequenced 1158 // before the construction of the next array element, if any. 1159 1160 { 1161 RunCleanupsScope Scope(*this); 1162 1163 // Evaluate the constructor and its arguments in a regular 1164 // partial-destroy cleanup. 1165 if (getLangOpts().Exceptions && 1166 !ctor->getParent()->hasTrivialDestructor()) { 1167 Destroyer *destroyer = destroyCXXObject; 1168 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); 1169 } 1170 1171 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false, 1172 /*Delegating=*/false, cur, argBegin, argEnd); 1173 } 1174 1175 // Go to the next element. 1176 llvm::Value *next = 1177 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 1178 "arrayctor.next"); 1179 cur->addIncoming(next, Builder.GetInsertBlock()); 1180 1181 // Check whether that's the end of the loop. 1182 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 1183 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 1184 Builder.CreateCondBr(done, contBB, loopBB); 1185 1186 // Patch the earlier check to skip over the loop. 1187 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 1188 1189 EmitBlock(contBB); 1190 } 1191 1192 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 1193 llvm::Value *addr, 1194 QualType type) { 1195 const RecordType *rtype = type->castAs<RecordType>(); 1196 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 1197 const CXXDestructorDecl *dtor = record->getDestructor(); 1198 assert(!dtor->isTrivial()); 1199 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 1200 /*Delegating=*/false, addr); 1201 } 1202 1203 void 1204 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1205 CXXCtorType Type, bool ForVirtualBase, 1206 bool Delegating, 1207 llvm::Value *This, 1208 CallExpr::const_arg_iterator ArgBeg, 1209 CallExpr::const_arg_iterator ArgEnd) { 1210 1211 CGDebugInfo *DI = getDebugInfo(); 1212 if (DI && 1213 CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::LimitedDebugInfo) { 1214 // If debug info for this class has not been emitted then this is the 1215 // right time to do so. 1216 const CXXRecordDecl *Parent = D->getParent(); 1217 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent), 1218 Parent->getLocation()); 1219 } 1220 1221 if (D->isTrivial()) { 1222 if (ArgBeg == ArgEnd) { 1223 // Trivial default constructor, no codegen required. 1224 assert(D->isDefaultConstructor() && 1225 "trivial 0-arg ctor not a default ctor"); 1226 return; 1227 } 1228 1229 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1230 assert(D->isCopyOrMoveConstructor() && 1231 "trivial 1-arg ctor not a copy/move ctor"); 1232 1233 const Expr *E = (*ArgBeg); 1234 QualType Ty = E->getType(); 1235 llvm::Value *Src = EmitLValue(E).getAddress(); 1236 EmitAggregateCopy(This, Src, Ty); 1237 return; 1238 } 1239 1240 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase, 1241 Delegating); 1242 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1243 1244 // FIXME: Provide a source location here. 1245 EmitCXXMemberCall(D, SourceLocation(), Callee, ReturnValueSlot(), This, 1246 VTT, ArgBeg, ArgEnd); 1247 } 1248 1249 void 1250 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1251 llvm::Value *This, llvm::Value *Src, 1252 CallExpr::const_arg_iterator ArgBeg, 1253 CallExpr::const_arg_iterator ArgEnd) { 1254 if (D->isTrivial()) { 1255 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1256 assert(D->isCopyOrMoveConstructor() && 1257 "trivial 1-arg ctor not a copy/move ctor"); 1258 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1259 return; 1260 } 1261 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1262 clang::Ctor_Complete); 1263 assert(D->isInstance() && 1264 "Trying to emit a member call expr on a static method!"); 1265 1266 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1267 1268 CallArgList Args; 1269 1270 // Push the this ptr. 1271 Args.add(RValue::get(This), D->getThisType(getContext())); 1272 1273 1274 // Push the src ptr. 1275 QualType QT = *(FPT->arg_type_begin()); 1276 llvm::Type *t = CGM.getTypes().ConvertType(QT); 1277 Src = Builder.CreateBitCast(Src, t); 1278 Args.add(RValue::get(Src), QT); 1279 1280 // Skip over first argument (Src). 1281 ++ArgBeg; 1282 CallExpr::const_arg_iterator Arg = ArgBeg; 1283 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1284 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1285 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1286 EmitCallArg(Args, *Arg, *I); 1287 } 1288 // Either we've emitted all the call args, or we have a call to a 1289 // variadic function. 1290 assert((Arg == ArgEnd || FPT->isVariadic()) && 1291 "Extra arguments in non-variadic function!"); 1292 // If we still have any arguments, emit them using the type of the argument. 1293 for (; Arg != ArgEnd; ++Arg) { 1294 QualType ArgType = Arg->getType(); 1295 EmitCallArg(Args, *Arg, ArgType); 1296 } 1297 1298 EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All), 1299 Callee, ReturnValueSlot(), Args, D); 1300 } 1301 1302 void 1303 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1304 CXXCtorType CtorType, 1305 const FunctionArgList &Args) { 1306 CallArgList DelegateArgs; 1307 1308 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1309 assert(I != E && "no parameters to constructor"); 1310 1311 // this 1312 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 1313 ++I; 1314 1315 // vtt 1316 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1317 /*ForVirtualBase=*/false, 1318 /*Delegating=*/true)) { 1319 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1320 DelegateArgs.add(RValue::get(VTT), VoidPP); 1321 1322 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1323 assert(I != E && "cannot skip vtt parameter, already done with args"); 1324 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 1325 ++I; 1326 } 1327 } 1328 1329 // Explicit arguments. 1330 for (; I != E; ++I) { 1331 const VarDecl *param = *I; 1332 EmitDelegateCallArg(DelegateArgs, param); 1333 } 1334 1335 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType), 1336 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1337 ReturnValueSlot(), DelegateArgs, Ctor); 1338 } 1339 1340 namespace { 1341 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { 1342 const CXXDestructorDecl *Dtor; 1343 llvm::Value *Addr; 1344 CXXDtorType Type; 1345 1346 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, 1347 CXXDtorType Type) 1348 : Dtor(D), Addr(Addr), Type(Type) {} 1349 1350 void Emit(CodeGenFunction &CGF, Flags flags) { 1351 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 1352 /*Delegating=*/true, Addr); 1353 } 1354 }; 1355 } 1356 1357 void 1358 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 1359 const FunctionArgList &Args) { 1360 assert(Ctor->isDelegatingConstructor()); 1361 1362 llvm::Value *ThisPtr = LoadCXXThis(); 1363 1364 QualType Ty = getContext().getTagDeclType(Ctor->getParent()); 1365 CharUnits Alignment = getContext().getTypeAlignInChars(Ty); 1366 AggValueSlot AggSlot = 1367 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), 1368 AggValueSlot::IsDestructed, 1369 AggValueSlot::DoesNotNeedGCBarriers, 1370 AggValueSlot::IsNotAliased); 1371 1372 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 1373 1374 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1375 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 1376 CXXDtorType Type = 1377 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 1378 1379 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 1380 ClassDecl->getDestructor(), 1381 ThisPtr, Type); 1382 } 1383 } 1384 1385 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1386 CXXDtorType Type, 1387 bool ForVirtualBase, 1388 bool Delegating, 1389 llvm::Value *This) { 1390 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1391 ForVirtualBase, Delegating); 1392 llvm::Value *Callee = 0; 1393 if (getLangOpts().AppleKext) 1394 Callee = BuildAppleKextVirtualDestructorCall(DD, Type, 1395 DD->getParent()); 1396 1397 if (!Callee) 1398 Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1399 1400 // FIXME: Provide a source location here. 1401 EmitCXXMemberCall(DD, SourceLocation(), Callee, ReturnValueSlot(), This, 1402 VTT, 0, 0); 1403 } 1404 1405 namespace { 1406 struct CallLocalDtor : EHScopeStack::Cleanup { 1407 const CXXDestructorDecl *Dtor; 1408 llvm::Value *Addr; 1409 1410 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1411 : Dtor(D), Addr(Addr) {} 1412 1413 void Emit(CodeGenFunction &CGF, Flags flags) { 1414 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1415 /*ForVirtualBase=*/false, 1416 /*Delegating=*/false, Addr); 1417 } 1418 }; 1419 } 1420 1421 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1422 llvm::Value *Addr) { 1423 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1424 } 1425 1426 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1427 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1428 if (!ClassDecl) return; 1429 if (ClassDecl->hasTrivialDestructor()) return; 1430 1431 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1432 assert(D && D->isUsed() && "destructor not marked as used!"); 1433 PushDestructorCleanup(D, Addr); 1434 } 1435 1436 llvm::Value * 1437 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1438 const CXXRecordDecl *ClassDecl, 1439 const CXXRecordDecl *BaseClassDecl) { 1440 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1441 CharUnits VBaseOffsetOffset = 1442 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1443 1444 llvm::Value *VBaseOffsetPtr = 1445 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), 1446 "vbase.offset.ptr"); 1447 llvm::Type *PtrDiffTy = 1448 ConvertType(getContext().getPointerDiffType()); 1449 1450 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1451 PtrDiffTy->getPointerTo()); 1452 1453 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1454 1455 return VBaseOffset; 1456 } 1457 1458 void 1459 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1460 const CXXRecordDecl *NearestVBase, 1461 CharUnits OffsetFromNearestVBase, 1462 llvm::Constant *VTable, 1463 const CXXRecordDecl *VTableClass) { 1464 const CXXRecordDecl *RD = Base.getBase(); 1465 1466 // Compute the address point. 1467 llvm::Value *VTableAddressPoint; 1468 1469 // Check if we need to use a vtable from the VTT. 1470 if (CodeGenVTables::needsVTTParameter(CurGD) && 1471 (RD->getNumVBases() || NearestVBase)) { 1472 // Get the secondary vpointer index. 1473 uint64_t VirtualPointerIndex = 1474 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1475 1476 /// Load the VTT. 1477 llvm::Value *VTT = LoadCXXVTT(); 1478 if (VirtualPointerIndex) 1479 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1480 1481 // And load the address point from the VTT. 1482 VTableAddressPoint = Builder.CreateLoad(VTT); 1483 } else { 1484 uint64_t AddressPoint = 1485 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base); 1486 VTableAddressPoint = 1487 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1488 } 1489 1490 // Compute where to store the address point. 1491 llvm::Value *VirtualOffset = 0; 1492 CharUnits NonVirtualOffset = CharUnits::Zero(); 1493 1494 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1495 // We need to use the virtual base offset offset because the virtual base 1496 // might have a different offset in the most derived class. 1497 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1498 NearestVBase); 1499 NonVirtualOffset = OffsetFromNearestVBase; 1500 } else { 1501 // We can just use the base offset in the complete class. 1502 NonVirtualOffset = Base.getBaseOffset(); 1503 } 1504 1505 // Apply the offsets. 1506 llvm::Value *VTableField = LoadCXXThis(); 1507 1508 if (!NonVirtualOffset.isZero() || VirtualOffset) 1509 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1510 NonVirtualOffset, 1511 VirtualOffset); 1512 1513 // Finally, store the address point. 1514 llvm::Type *AddressPointPtrTy = 1515 VTableAddressPoint->getType()->getPointerTo(); 1516 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1517 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 1518 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr()); 1519 } 1520 1521 void 1522 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1523 const CXXRecordDecl *NearestVBase, 1524 CharUnits OffsetFromNearestVBase, 1525 bool BaseIsNonVirtualPrimaryBase, 1526 llvm::Constant *VTable, 1527 const CXXRecordDecl *VTableClass, 1528 VisitedVirtualBasesSetTy& VBases) { 1529 // If this base is a non-virtual primary base the address point has already 1530 // been set. 1531 if (!BaseIsNonVirtualPrimaryBase) { 1532 // Initialize the vtable pointer for this base. 1533 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1534 VTable, VTableClass); 1535 } 1536 1537 const CXXRecordDecl *RD = Base.getBase(); 1538 1539 // Traverse bases. 1540 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1541 E = RD->bases_end(); I != E; ++I) { 1542 CXXRecordDecl *BaseDecl 1543 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1544 1545 // Ignore classes without a vtable. 1546 if (!BaseDecl->isDynamicClass()) 1547 continue; 1548 1549 CharUnits BaseOffset; 1550 CharUnits BaseOffsetFromNearestVBase; 1551 bool BaseDeclIsNonVirtualPrimaryBase; 1552 1553 if (I->isVirtual()) { 1554 // Check if we've visited this virtual base before. 1555 if (!VBases.insert(BaseDecl)) 1556 continue; 1557 1558 const ASTRecordLayout &Layout = 1559 getContext().getASTRecordLayout(VTableClass); 1560 1561 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1562 BaseOffsetFromNearestVBase = CharUnits::Zero(); 1563 BaseDeclIsNonVirtualPrimaryBase = false; 1564 } else { 1565 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1566 1567 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1568 BaseOffsetFromNearestVBase = 1569 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1570 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1571 } 1572 1573 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1574 I->isVirtual() ? BaseDecl : NearestVBase, 1575 BaseOffsetFromNearestVBase, 1576 BaseDeclIsNonVirtualPrimaryBase, 1577 VTable, VTableClass, VBases); 1578 } 1579 } 1580 1581 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1582 // Ignore classes without a vtable. 1583 if (!RD->isDynamicClass()) 1584 return; 1585 1586 // Get the VTable. 1587 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1588 1589 // Initialize the vtable pointers for this class and all of its bases. 1590 VisitedVirtualBasesSetTy VBases; 1591 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), 1592 /*NearestVBase=*/0, 1593 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 1594 /*BaseIsNonVirtualPrimaryBase=*/false, 1595 VTable, RD, VBases); 1596 } 1597 1598 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1599 llvm::Type *Ty) { 1600 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1601 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 1602 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr()); 1603 return VTable; 1604 } 1605 1606 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) { 1607 const Expr *E = Base; 1608 1609 while (true) { 1610 E = E->IgnoreParens(); 1611 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1612 if (CE->getCastKind() == CK_DerivedToBase || 1613 CE->getCastKind() == CK_UncheckedDerivedToBase || 1614 CE->getCastKind() == CK_NoOp) { 1615 E = CE->getSubExpr(); 1616 continue; 1617 } 1618 } 1619 1620 break; 1621 } 1622 1623 QualType DerivedType = E->getType(); 1624 if (const PointerType *PTy = DerivedType->getAs<PointerType>()) 1625 DerivedType = PTy->getPointeeType(); 1626 1627 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl()); 1628 } 1629 1630 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 1631 // quite what we want. 1632 static const Expr *skipNoOpCastsAndParens(const Expr *E) { 1633 while (true) { 1634 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 1635 E = PE->getSubExpr(); 1636 continue; 1637 } 1638 1639 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1640 if (CE->getCastKind() == CK_NoOp) { 1641 E = CE->getSubExpr(); 1642 continue; 1643 } 1644 } 1645 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 1646 if (UO->getOpcode() == UO_Extension) { 1647 E = UO->getSubExpr(); 1648 continue; 1649 } 1650 } 1651 return E; 1652 } 1653 } 1654 1655 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member 1656 /// function call on the given expr can be devirtualized. 1657 static bool canDevirtualizeMemberFunctionCall(const Expr *Base, 1658 const CXXMethodDecl *MD) { 1659 // If the most derived class is marked final, we know that no subclass can 1660 // override this member function and so we can devirtualize it. For example: 1661 // 1662 // struct A { virtual void f(); } 1663 // struct B final : A { }; 1664 // 1665 // void f(B *b) { 1666 // b->f(); 1667 // } 1668 // 1669 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base); 1670 if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 1671 return true; 1672 1673 // If the member function is marked 'final', we know that it can't be 1674 // overridden and can therefore devirtualize it. 1675 if (MD->hasAttr<FinalAttr>()) 1676 return true; 1677 1678 // Similarly, if the class itself is marked 'final' it can't be overridden 1679 // and we can therefore devirtualize the member function call. 1680 if (MD->getParent()->hasAttr<FinalAttr>()) 1681 return true; 1682 1683 Base = skipNoOpCastsAndParens(Base); 1684 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 1685 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 1686 // This is a record decl. We know the type and can devirtualize it. 1687 return VD->getType()->isRecordType(); 1688 } 1689 1690 return false; 1691 } 1692 1693 // We can always devirtualize calls on temporary object expressions. 1694 if (isa<CXXConstructExpr>(Base)) 1695 return true; 1696 1697 // And calls on bound temporaries. 1698 if (isa<CXXBindTemporaryExpr>(Base)) 1699 return true; 1700 1701 // Check if this is a call expr that returns a record type. 1702 if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 1703 return CE->getCallReturnType()->isRecordType(); 1704 1705 // We can't devirtualize the call. 1706 return false; 1707 } 1708 1709 static bool UseVirtualCall(ASTContext &Context, 1710 const CXXOperatorCallExpr *CE, 1711 const CXXMethodDecl *MD) { 1712 if (!MD->isVirtual()) 1713 return false; 1714 1715 // When building with -fapple-kext, all calls must go through the vtable since 1716 // the kernel linker can do runtime patching of vtables. 1717 if (Context.getLangOpts().AppleKext) 1718 return true; 1719 1720 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD); 1721 } 1722 1723 llvm::Value * 1724 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E, 1725 const CXXMethodDecl *MD, 1726 llvm::Value *This) { 1727 llvm::FunctionType *fnType = 1728 CGM.getTypes().GetFunctionType( 1729 CGM.getTypes().arrangeCXXMethodDeclaration(MD)); 1730 1731 if (UseVirtualCall(getContext(), E, MD)) 1732 return BuildVirtualCall(MD, This, fnType); 1733 1734 return CGM.GetAddrOfFunction(MD, fnType); 1735 } 1736 1737 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *lambda, 1738 CallArgList &callArgs) { 1739 // Lookup the call operator 1740 DeclarationName operatorName 1741 = getContext().DeclarationNames.getCXXOperatorName(OO_Call); 1742 CXXMethodDecl *callOperator = 1743 cast<CXXMethodDecl>(lambda->lookup(operatorName).front()); 1744 1745 // Get the address of the call operator. 1746 const CGFunctionInfo &calleeFnInfo = 1747 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 1748 llvm::Value *callee = 1749 CGM.GetAddrOfFunction(GlobalDecl(callOperator), 1750 CGM.getTypes().GetFunctionType(calleeFnInfo)); 1751 1752 // Prepare the return slot. 1753 const FunctionProtoType *FPT = 1754 callOperator->getType()->castAs<FunctionProtoType>(); 1755 QualType resultType = FPT->getResultType(); 1756 ReturnValueSlot returnSlot; 1757 if (!resultType->isVoidType() && 1758 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && 1759 hasAggregateLLVMType(calleeFnInfo.getReturnType())) 1760 returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified()); 1761 1762 // We don't need to separately arrange the call arguments because 1763 // the call can't be variadic anyway --- it's impossible to forward 1764 // variadic arguments. 1765 1766 // Now emit our call. 1767 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, 1768 callArgs, callOperator); 1769 1770 // If necessary, copy the returned value into the slot. 1771 if (!resultType->isVoidType() && returnSlot.isNull()) 1772 EmitReturnOfRValue(RV, resultType); 1773 else 1774 EmitBranchThroughCleanup(ReturnBlock); 1775 } 1776 1777 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 1778 const BlockDecl *BD = BlockInfo->getBlockDecl(); 1779 const VarDecl *variable = BD->capture_begin()->getVariable(); 1780 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 1781 1782 // Start building arguments for forwarding call 1783 CallArgList CallArgs; 1784 1785 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1786 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false); 1787 CallArgs.add(RValue::get(ThisPtr), ThisType); 1788 1789 // Add the rest of the parameters. 1790 for (BlockDecl::param_const_iterator I = BD->param_begin(), 1791 E = BD->param_end(); I != E; ++I) { 1792 ParmVarDecl *param = *I; 1793 EmitDelegateCallArg(CallArgs, param); 1794 } 1795 1796 EmitForwardingCallToLambda(Lambda, CallArgs); 1797 } 1798 1799 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 1800 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) { 1801 // FIXME: Making this work correctly is nasty because it requires either 1802 // cloning the body of the call operator or making the call operator forward. 1803 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function"); 1804 return; 1805 } 1806 1807 EmitFunctionBody(Args); 1808 } 1809 1810 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 1811 const CXXRecordDecl *Lambda = MD->getParent(); 1812 1813 // Start building arguments for forwarding call 1814 CallArgList CallArgs; 1815 1816 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1817 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 1818 CallArgs.add(RValue::get(ThisPtr), ThisType); 1819 1820 // Add the rest of the parameters. 1821 for (FunctionDecl::param_const_iterator I = MD->param_begin(), 1822 E = MD->param_end(); I != E; ++I) { 1823 ParmVarDecl *param = *I; 1824 EmitDelegateCallArg(CallArgs, param); 1825 } 1826 1827 EmitForwardingCallToLambda(Lambda, CallArgs); 1828 } 1829 1830 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 1831 if (MD->isVariadic()) { 1832 // FIXME: Making this work correctly is nasty because it requires either 1833 // cloning the body of the call operator or making the call operator forward. 1834 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 1835 return; 1836 } 1837 1838 EmitLambdaDelegatingInvokeBody(MD); 1839 } 1840