1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGDebugInfo.h" 15 #include "CodeGenFunction.h" 16 #include "clang/AST/CXXInheritance.h" 17 #include "clang/AST/EvaluatedExprVisitor.h" 18 #include "clang/AST/RecordLayout.h" 19 #include "clang/AST/StmtCXX.h" 20 #include "clang/Frontend/CodeGenOptions.h" 21 22 using namespace clang; 23 using namespace CodeGen; 24 25 static CharUnits 26 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 27 const CXXRecordDecl *DerivedClass, 28 CastExpr::path_const_iterator Start, 29 CastExpr::path_const_iterator End) { 30 CharUnits Offset = CharUnits::Zero(); 31 32 const CXXRecordDecl *RD = DerivedClass; 33 34 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 35 const CXXBaseSpecifier *Base = *I; 36 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 37 38 // Get the layout. 39 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 40 41 const CXXRecordDecl *BaseDecl = 42 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 43 44 // Add the offset. 45 Offset += Layout.getBaseClassOffset(BaseDecl); 46 47 RD = BaseDecl; 48 } 49 50 return Offset; 51 } 52 53 llvm::Constant * 54 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 55 CastExpr::path_const_iterator PathBegin, 56 CastExpr::path_const_iterator PathEnd) { 57 assert(PathBegin != PathEnd && "Base path should not be empty!"); 58 59 CharUnits Offset = 60 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 61 PathBegin, PathEnd); 62 if (Offset.isZero()) 63 return 0; 64 65 llvm::Type *PtrDiffTy = 66 Types.ConvertType(getContext().getPointerDiffType()); 67 68 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 69 } 70 71 /// Gets the address of a direct base class within a complete object. 72 /// This should only be used for (1) non-virtual bases or (2) virtual bases 73 /// when the type is known to be complete (e.g. in complete destructors). 74 /// 75 /// The object pointed to by 'This' is assumed to be non-null. 76 llvm::Value * 77 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 78 const CXXRecordDecl *Derived, 79 const CXXRecordDecl *Base, 80 bool BaseIsVirtual) { 81 // 'this' must be a pointer (in some address space) to Derived. 82 assert(This->getType()->isPointerTy() && 83 cast<llvm::PointerType>(This->getType())->getElementType() 84 == ConvertType(Derived)); 85 86 // Compute the offset of the virtual base. 87 CharUnits Offset; 88 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 89 if (BaseIsVirtual) 90 Offset = Layout.getVBaseClassOffset(Base); 91 else 92 Offset = Layout.getBaseClassOffset(Base); 93 94 // Shift and cast down to the base type. 95 // TODO: for complete types, this should be possible with a GEP. 96 llvm::Value *V = This; 97 if (Offset.isPositive()) { 98 V = Builder.CreateBitCast(V, Int8PtrTy); 99 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); 100 } 101 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 102 103 return V; 104 } 105 106 static llvm::Value * 107 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 108 CharUnits NonVirtual, llvm::Value *Virtual) { 109 llvm::Type *PtrDiffTy = 110 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 111 112 llvm::Value *NonVirtualOffset = 0; 113 if (!NonVirtual.isZero()) 114 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, 115 NonVirtual.getQuantity()); 116 117 llvm::Value *BaseOffset; 118 if (Virtual) { 119 if (NonVirtualOffset) 120 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 121 else 122 BaseOffset = Virtual; 123 } else 124 BaseOffset = NonVirtualOffset; 125 126 // Apply the base offset. 127 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, CGF.Int8PtrTy); 128 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 129 130 return ThisPtr; 131 } 132 133 llvm::Value * 134 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 135 const CXXRecordDecl *Derived, 136 CastExpr::path_const_iterator PathBegin, 137 CastExpr::path_const_iterator PathEnd, 138 bool NullCheckValue) { 139 assert(PathBegin != PathEnd && "Base path should not be empty!"); 140 141 CastExpr::path_const_iterator Start = PathBegin; 142 const CXXRecordDecl *VBase = 0; 143 144 // Get the virtual base. 145 if ((*Start)->isVirtual()) { 146 VBase = 147 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 148 ++Start; 149 } 150 151 CharUnits NonVirtualOffset = 152 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 153 Start, PathEnd); 154 155 // Get the base pointer type. 156 llvm::Type *BasePtrTy = 157 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 158 159 if (NonVirtualOffset.isZero() && !VBase) { 160 // Just cast back. 161 return Builder.CreateBitCast(Value, BasePtrTy); 162 } 163 164 llvm::BasicBlock *CastNull = 0; 165 llvm::BasicBlock *CastNotNull = 0; 166 llvm::BasicBlock *CastEnd = 0; 167 168 if (NullCheckValue) { 169 CastNull = createBasicBlock("cast.null"); 170 CastNotNull = createBasicBlock("cast.notnull"); 171 CastEnd = createBasicBlock("cast.end"); 172 173 llvm::Value *IsNull = Builder.CreateIsNull(Value); 174 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 175 EmitBlock(CastNotNull); 176 } 177 178 llvm::Value *VirtualOffset = 0; 179 180 if (VBase) { 181 if (Derived->hasAttr<FinalAttr>()) { 182 VirtualOffset = 0; 183 184 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 185 186 CharUnits VBaseOffset = Layout.getVBaseClassOffset(VBase); 187 NonVirtualOffset += VBaseOffset; 188 } else 189 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 190 } 191 192 // Apply the offsets. 193 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, 194 NonVirtualOffset, 195 VirtualOffset); 196 197 // Cast back. 198 Value = Builder.CreateBitCast(Value, BasePtrTy); 199 200 if (NullCheckValue) { 201 Builder.CreateBr(CastEnd); 202 EmitBlock(CastNull); 203 Builder.CreateBr(CastEnd); 204 EmitBlock(CastEnd); 205 206 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 207 PHI->addIncoming(Value, CastNotNull); 208 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 209 CastNull); 210 Value = PHI; 211 } 212 213 return Value; 214 } 215 216 llvm::Value * 217 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 218 const CXXRecordDecl *Derived, 219 CastExpr::path_const_iterator PathBegin, 220 CastExpr::path_const_iterator PathEnd, 221 bool NullCheckValue) { 222 assert(PathBegin != PathEnd && "Base path should not be empty!"); 223 224 QualType DerivedTy = 225 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 226 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 227 228 llvm::Value *NonVirtualOffset = 229 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 230 231 if (!NonVirtualOffset) { 232 // No offset, we can just cast back. 233 return Builder.CreateBitCast(Value, DerivedPtrTy); 234 } 235 236 llvm::BasicBlock *CastNull = 0; 237 llvm::BasicBlock *CastNotNull = 0; 238 llvm::BasicBlock *CastEnd = 0; 239 240 if (NullCheckValue) { 241 CastNull = createBasicBlock("cast.null"); 242 CastNotNull = createBasicBlock("cast.notnull"); 243 CastEnd = createBasicBlock("cast.end"); 244 245 llvm::Value *IsNull = Builder.CreateIsNull(Value); 246 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 247 EmitBlock(CastNotNull); 248 } 249 250 // Apply the offset. 251 Value = Builder.CreatePtrToInt(Value, NonVirtualOffset->getType()); 252 Value = Builder.CreateSub(Value, NonVirtualOffset); 253 Value = Builder.CreateIntToPtr(Value, DerivedPtrTy); 254 255 // Just cast. 256 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 257 258 if (NullCheckValue) { 259 Builder.CreateBr(CastEnd); 260 EmitBlock(CastNull); 261 Builder.CreateBr(CastEnd); 262 EmitBlock(CastEnd); 263 264 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 265 PHI->addIncoming(Value, CastNotNull); 266 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 267 CastNull); 268 Value = PHI; 269 } 270 271 return Value; 272 } 273 274 /// GetVTTParameter - Return the VTT parameter that should be passed to a 275 /// base constructor/destructor with virtual bases. 276 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 277 bool ForVirtualBase) { 278 if (!CodeGenVTables::needsVTTParameter(GD)) { 279 // This constructor/destructor does not need a VTT parameter. 280 return 0; 281 } 282 283 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 284 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 285 286 llvm::Value *VTT; 287 288 uint64_t SubVTTIndex; 289 290 // If the record matches the base, this is the complete ctor/dtor 291 // variant calling the base variant in a class with virtual bases. 292 if (RD == Base) { 293 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 294 "doing no-op VTT offset in base dtor/ctor?"); 295 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 296 SubVTTIndex = 0; 297 } else { 298 const ASTRecordLayout &Layout = 299 CGF.getContext().getASTRecordLayout(RD); 300 CharUnits BaseOffset = ForVirtualBase ? 301 Layout.getVBaseClassOffset(Base) : 302 Layout.getBaseClassOffset(Base); 303 304 SubVTTIndex = 305 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 306 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 307 } 308 309 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 310 // A VTT parameter was passed to the constructor, use it. 311 VTT = CGF.LoadCXXVTT(); 312 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 313 } else { 314 // We're the complete constructor, so get the VTT by name. 315 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD); 316 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 317 } 318 319 return VTT; 320 } 321 322 namespace { 323 /// Call the destructor for a direct base class. 324 struct CallBaseDtor : EHScopeStack::Cleanup { 325 const CXXRecordDecl *BaseClass; 326 bool BaseIsVirtual; 327 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 328 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 329 330 void Emit(CodeGenFunction &CGF, Flags flags) { 331 const CXXRecordDecl *DerivedClass = 332 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 333 334 const CXXDestructorDecl *D = BaseClass->getDestructor(); 335 llvm::Value *Addr = 336 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 337 DerivedClass, BaseClass, 338 BaseIsVirtual); 339 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr); 340 } 341 }; 342 343 /// A visitor which checks whether an initializer uses 'this' in a 344 /// way which requires the vtable to be properly set. 345 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 346 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 347 348 bool UsesThis; 349 350 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 351 352 // Black-list all explicit and implicit references to 'this'. 353 // 354 // Do we need to worry about external references to 'this' derived 355 // from arbitrary code? If so, then anything which runs arbitrary 356 // external code might potentially access the vtable. 357 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 358 }; 359 } 360 361 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 362 DynamicThisUseChecker Checker(C); 363 Checker.Visit(const_cast<Expr*>(Init)); 364 return Checker.UsesThis; 365 } 366 367 static void EmitBaseInitializer(CodeGenFunction &CGF, 368 const CXXRecordDecl *ClassDecl, 369 CXXCtorInitializer *BaseInit, 370 CXXCtorType CtorType) { 371 assert(BaseInit->isBaseInitializer() && 372 "Must have base initializer!"); 373 374 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 375 376 const Type *BaseType = BaseInit->getBaseClass(); 377 CXXRecordDecl *BaseClassDecl = 378 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 379 380 bool isBaseVirtual = BaseInit->isBaseVirtual(); 381 382 // The base constructor doesn't construct virtual bases. 383 if (CtorType == Ctor_Base && isBaseVirtual) 384 return; 385 386 // If the initializer for the base (other than the constructor 387 // itself) accesses 'this' in any way, we need to initialize the 388 // vtables. 389 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 390 CGF.InitializeVTablePointers(ClassDecl); 391 392 // We can pretend to be a complete class because it only matters for 393 // virtual bases, and we only do virtual bases for complete ctors. 394 llvm::Value *V = 395 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 396 BaseClassDecl, 397 isBaseVirtual); 398 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); 399 AggValueSlot AggSlot = 400 AggValueSlot::forAddr(V, Alignment, Qualifiers(), 401 AggValueSlot::IsDestructed, 402 AggValueSlot::DoesNotNeedGCBarriers, 403 AggValueSlot::IsNotAliased); 404 405 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 406 407 if (CGF.CGM.getLangOptions().Exceptions && 408 !BaseClassDecl->hasTrivialDestructor()) 409 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 410 isBaseVirtual); 411 } 412 413 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 414 LValue LHS, 415 Expr *Init, 416 llvm::Value *ArrayIndexVar, 417 QualType T, 418 ArrayRef<VarDecl *> ArrayIndexes, 419 unsigned Index) { 420 if (Index == ArrayIndexes.size()) { 421 LValue LV = LHS; 422 { // Scope for Cleanups. 423 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 424 425 if (ArrayIndexVar) { 426 // If we have an array index variable, load it and use it as an offset. 427 // Then, increment the value. 428 llvm::Value *Dest = LHS.getAddress(); 429 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 430 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 431 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 432 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 433 CGF.Builder.CreateStore(Next, ArrayIndexVar); 434 435 // Update the LValue. 436 LV.setAddress(Dest); 437 CharUnits Align = CGF.getContext().getTypeAlignInChars(T); 438 LV.setAlignment(std::min(Align, LV.getAlignment())); 439 } 440 441 if (!CGF.hasAggregateLLVMType(T)) { 442 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); 443 } else if (T->isAnyComplexType()) { 444 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(), 445 LV.isVolatileQualified()); 446 } else { 447 AggValueSlot Slot = 448 AggValueSlot::forLValue(LV, 449 AggValueSlot::IsDestructed, 450 AggValueSlot::DoesNotNeedGCBarriers, 451 AggValueSlot::IsNotAliased); 452 453 CGF.EmitAggExpr(Init, Slot); 454 } 455 } 456 457 // Now, outside of the initializer cleanup scope, destroy the backing array 458 // for a std::initializer_list member. 459 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init); 460 461 return; 462 } 463 464 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 465 assert(Array && "Array initialization without the array type?"); 466 llvm::Value *IndexVar 467 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 468 assert(IndexVar && "Array index variable not loaded"); 469 470 // Initialize this index variable to zero. 471 llvm::Value* Zero 472 = llvm::Constant::getNullValue( 473 CGF.ConvertType(CGF.getContext().getSizeType())); 474 CGF.Builder.CreateStore(Zero, IndexVar); 475 476 // Start the loop with a block that tests the condition. 477 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 478 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 479 480 CGF.EmitBlock(CondBlock); 481 482 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 483 // Generate: if (loop-index < number-of-elements) fall to the loop body, 484 // otherwise, go to the block after the for-loop. 485 uint64_t NumElements = Array->getSize().getZExtValue(); 486 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 487 llvm::Value *NumElementsPtr = 488 llvm::ConstantInt::get(Counter->getType(), NumElements); 489 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 490 "isless"); 491 492 // If the condition is true, execute the body. 493 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 494 495 CGF.EmitBlock(ForBody); 496 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 497 498 { 499 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 500 501 // Inside the loop body recurse to emit the inner loop or, eventually, the 502 // constructor call. 503 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 504 Array->getElementType(), ArrayIndexes, Index + 1); 505 } 506 507 CGF.EmitBlock(ContinueBlock); 508 509 // Emit the increment of the loop counter. 510 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 511 Counter = CGF.Builder.CreateLoad(IndexVar); 512 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 513 CGF.Builder.CreateStore(NextVal, IndexVar); 514 515 // Finally, branch back up to the condition for the next iteration. 516 CGF.EmitBranch(CondBlock); 517 518 // Emit the fall-through block. 519 CGF.EmitBlock(AfterFor, true); 520 } 521 522 namespace { 523 struct CallMemberDtor : EHScopeStack::Cleanup { 524 llvm::Value *V; 525 CXXDestructorDecl *Dtor; 526 527 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor) 528 : V(V), Dtor(Dtor) {} 529 530 void Emit(CodeGenFunction &CGF, Flags flags) { 531 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 532 V); 533 } 534 }; 535 } 536 537 static bool hasTrivialCopyOrMoveConstructor(const CXXRecordDecl *Record, 538 bool Moving) { 539 return Moving ? Record->hasTrivialMoveConstructor() : 540 Record->hasTrivialCopyConstructor(); 541 } 542 543 static void EmitMemberInitializer(CodeGenFunction &CGF, 544 const CXXRecordDecl *ClassDecl, 545 CXXCtorInitializer *MemberInit, 546 const CXXConstructorDecl *Constructor, 547 FunctionArgList &Args) { 548 assert(MemberInit->isAnyMemberInitializer() && 549 "Must have member initializer!"); 550 assert(MemberInit->getInit() && "Must have initializer!"); 551 552 // non-static data member initializers. 553 FieldDecl *Field = MemberInit->getAnyMember(); 554 QualType FieldType = Field->getType(); 555 556 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 557 LValue LHS; 558 559 // If we are initializing an anonymous union field, drill down to the field. 560 if (MemberInit->isIndirectMemberInitializer()) { 561 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, 562 MemberInit->getIndirectMember(), 0); 563 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 564 } else { 565 LHS = CGF.EmitLValueForFieldInitialization(ThisPtr, Field, 0); 566 } 567 568 // Special case: if we are in a copy or move constructor, and we are copying 569 // an array of PODs or classes with trivial copy constructors, ignore the 570 // AST and perform the copy we know is equivalent. 571 // FIXME: This is hacky at best... if we had a bit more explicit information 572 // in the AST, we could generalize it more easily. 573 const ConstantArrayType *Array 574 = CGF.getContext().getAsConstantArrayType(FieldType); 575 if (Array && Constructor->isImplicitlyDefined() && 576 Constructor->isCopyOrMoveConstructor()) { 577 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 578 const CXXRecordDecl *Record = BaseElementTy->getAsCXXRecordDecl(); 579 if (BaseElementTy.isPODType(CGF.getContext()) || 580 (Record && hasTrivialCopyOrMoveConstructor(Record, 581 Constructor->isMoveConstructor()))) { 582 // Find the source pointer. We knows it's the last argument because 583 // we know we're in a copy constructor. 584 unsigned SrcArgIndex = Args.size() - 1; 585 llvm::Value *SrcPtr 586 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 587 LValue Src = CGF.EmitLValueForFieldInitialization(SrcPtr, Field, 0); 588 589 // Copy the aggregate. 590 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 591 LHS.isVolatileQualified()); 592 return; 593 } 594 } 595 596 ArrayRef<VarDecl *> ArrayIndexes; 597 if (MemberInit->getNumArrayIndices()) 598 ArrayIndexes = MemberInit->getArrayIndexes(); 599 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 600 } 601 602 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, 603 LValue LHS, Expr *Init, 604 ArrayRef<VarDecl *> ArrayIndexes) { 605 QualType FieldType = Field->getType(); 606 if (!hasAggregateLLVMType(FieldType)) { 607 if (LHS.isSimple()) { 608 EmitExprAsInit(Init, Field, LHS, false); 609 } else { 610 RValue RHS = RValue::get(EmitScalarExpr(Init)); 611 EmitStoreThroughLValue(RHS, LHS); 612 } 613 } else if (FieldType->isAnyComplexType()) { 614 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified()); 615 } else { 616 llvm::Value *ArrayIndexVar = 0; 617 if (ArrayIndexes.size()) { 618 llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 619 620 // The LHS is a pointer to the first object we'll be constructing, as 621 // a flat array. 622 QualType BaseElementTy = getContext().getBaseElementType(FieldType); 623 llvm::Type *BasePtr = ConvertType(BaseElementTy); 624 BasePtr = llvm::PointerType::getUnqual(BasePtr); 625 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), 626 BasePtr); 627 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 628 629 // Create an array index that will be used to walk over all of the 630 // objects we're constructing. 631 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); 632 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 633 Builder.CreateStore(Zero, ArrayIndexVar); 634 635 636 // Emit the block variables for the array indices, if any. 637 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 638 EmitAutoVarDecl(*ArrayIndexes[I]); 639 } 640 641 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 642 ArrayIndexes, 0); 643 644 if (!CGM.getLangOptions().Exceptions) 645 return; 646 647 // FIXME: If we have an array of classes w/ non-trivial destructors, 648 // we need to destroy in reverse order of construction along the exception 649 // path. 650 const RecordType *RT = FieldType->getAs<RecordType>(); 651 if (!RT) 652 return; 653 654 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 655 if (!RD->hasTrivialDestructor()) 656 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(), 657 RD->getDestructor()); 658 } 659 } 660 661 /// Checks whether the given constructor is a valid subject for the 662 /// complete-to-base constructor delegation optimization, i.e. 663 /// emitting the complete constructor as a simple call to the base 664 /// constructor. 665 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 666 667 // Currently we disable the optimization for classes with virtual 668 // bases because (1) the addresses of parameter variables need to be 669 // consistent across all initializers but (2) the delegate function 670 // call necessarily creates a second copy of the parameter variable. 671 // 672 // The limiting example (purely theoretical AFAIK): 673 // struct A { A(int &c) { c++; } }; 674 // struct B : virtual A { 675 // B(int count) : A(count) { printf("%d\n", count); } 676 // }; 677 // ...although even this example could in principle be emitted as a 678 // delegation since the address of the parameter doesn't escape. 679 if (Ctor->getParent()->getNumVBases()) { 680 // TODO: white-list trivial vbase initializers. This case wouldn't 681 // be subject to the restrictions below. 682 683 // TODO: white-list cases where: 684 // - there are no non-reference parameters to the constructor 685 // - the initializers don't access any non-reference parameters 686 // - the initializers don't take the address of non-reference 687 // parameters 688 // - etc. 689 // If we ever add any of the above cases, remember that: 690 // - function-try-blocks will always blacklist this optimization 691 // - we need to perform the constructor prologue and cleanup in 692 // EmitConstructorBody. 693 694 return false; 695 } 696 697 // We also disable the optimization for variadic functions because 698 // it's impossible to "re-pass" varargs. 699 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 700 return false; 701 702 // FIXME: Decide if we can do a delegation of a delegating constructor. 703 if (Ctor->isDelegatingConstructor()) 704 return false; 705 706 return true; 707 } 708 709 /// EmitConstructorBody - Emits the body of the current constructor. 710 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 711 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 712 CXXCtorType CtorType = CurGD.getCtorType(); 713 714 // Before we go any further, try the complete->base constructor 715 // delegation optimization. 716 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor)) { 717 if (CGDebugInfo *DI = getDebugInfo()) 718 DI->EmitLocation(Builder, Ctor->getLocEnd()); 719 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 720 return; 721 } 722 723 Stmt *Body = Ctor->getBody(); 724 725 // Enter the function-try-block before the constructor prologue if 726 // applicable. 727 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 728 if (IsTryBody) 729 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 730 731 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 732 733 // Emit the constructor prologue, i.e. the base and member 734 // initializers. 735 EmitCtorPrologue(Ctor, CtorType, Args); 736 737 // Emit the body of the statement. 738 if (IsTryBody) 739 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 740 else if (Body) 741 EmitStmt(Body); 742 743 // Emit any cleanup blocks associated with the member or base 744 // initializers, which includes (along the exceptional path) the 745 // destructors for those members and bases that were fully 746 // constructed. 747 PopCleanupBlocks(CleanupDepth); 748 749 if (IsTryBody) 750 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 751 } 752 753 /// EmitCtorPrologue - This routine generates necessary code to initialize 754 /// base classes and non-static data members belonging to this constructor. 755 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 756 CXXCtorType CtorType, 757 FunctionArgList &Args) { 758 if (CD->isDelegatingConstructor()) 759 return EmitDelegatingCXXConstructorCall(CD, Args); 760 761 const CXXRecordDecl *ClassDecl = CD->getParent(); 762 763 SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 764 765 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 766 E = CD->init_end(); 767 B != E; ++B) { 768 CXXCtorInitializer *Member = (*B); 769 770 if (Member->isBaseInitializer()) { 771 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 772 } else { 773 assert(Member->isAnyMemberInitializer() && 774 "Delegating initializer on non-delegating constructor"); 775 MemberInitializers.push_back(Member); 776 } 777 } 778 779 InitializeVTablePointers(ClassDecl); 780 781 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 782 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 783 } 784 785 static bool 786 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 787 788 static bool 789 HasTrivialDestructorBody(ASTContext &Context, 790 const CXXRecordDecl *BaseClassDecl, 791 const CXXRecordDecl *MostDerivedClassDecl) 792 { 793 // If the destructor is trivial we don't have to check anything else. 794 if (BaseClassDecl->hasTrivialDestructor()) 795 return true; 796 797 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 798 return false; 799 800 // Check fields. 801 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(), 802 E = BaseClassDecl->field_end(); I != E; ++I) { 803 const FieldDecl *Field = *I; 804 805 if (!FieldHasTrivialDestructorBody(Context, Field)) 806 return false; 807 } 808 809 // Check non-virtual bases. 810 for (CXXRecordDecl::base_class_const_iterator I = 811 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end(); 812 I != E; ++I) { 813 if (I->isVirtual()) 814 continue; 815 816 const CXXRecordDecl *NonVirtualBase = 817 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 818 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 819 MostDerivedClassDecl)) 820 return false; 821 } 822 823 if (BaseClassDecl == MostDerivedClassDecl) { 824 // Check virtual bases. 825 for (CXXRecordDecl::base_class_const_iterator I = 826 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end(); 827 I != E; ++I) { 828 const CXXRecordDecl *VirtualBase = 829 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 830 if (!HasTrivialDestructorBody(Context, VirtualBase, 831 MostDerivedClassDecl)) 832 return false; 833 } 834 } 835 836 return true; 837 } 838 839 static bool 840 FieldHasTrivialDestructorBody(ASTContext &Context, 841 const FieldDecl *Field) 842 { 843 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 844 845 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 846 if (!RT) 847 return true; 848 849 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 850 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 851 } 852 853 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 854 /// any vtable pointers before calling this destructor. 855 static bool CanSkipVTablePointerInitialization(ASTContext &Context, 856 const CXXDestructorDecl *Dtor) { 857 if (!Dtor->hasTrivialBody()) 858 return false; 859 860 // Check the fields. 861 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 862 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 863 E = ClassDecl->field_end(); I != E; ++I) { 864 const FieldDecl *Field = *I; 865 866 if (!FieldHasTrivialDestructorBody(Context, Field)) 867 return false; 868 } 869 870 return true; 871 } 872 873 /// EmitDestructorBody - Emits the body of the current destructor. 874 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 875 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 876 CXXDtorType DtorType = CurGD.getDtorType(); 877 878 // The call to operator delete in a deleting destructor happens 879 // outside of the function-try-block, which means it's always 880 // possible to delegate the destructor body to the complete 881 // destructor. Do so. 882 if (DtorType == Dtor_Deleting) { 883 EnterDtorCleanups(Dtor, Dtor_Deleting); 884 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 885 LoadCXXThis()); 886 PopCleanupBlock(); 887 return; 888 } 889 890 Stmt *Body = Dtor->getBody(); 891 892 // If the body is a function-try-block, enter the try before 893 // anything else. 894 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 895 if (isTryBody) 896 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 897 898 // Enter the epilogue cleanups. 899 RunCleanupsScope DtorEpilogue(*this); 900 901 // If this is the complete variant, just invoke the base variant; 902 // the epilogue will destruct the virtual bases. But we can't do 903 // this optimization if the body is a function-try-block, because 904 // we'd introduce *two* handler blocks. 905 switch (DtorType) { 906 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 907 908 case Dtor_Complete: 909 // Enter the cleanup scopes for virtual bases. 910 EnterDtorCleanups(Dtor, Dtor_Complete); 911 912 if (!isTryBody) { 913 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 914 LoadCXXThis()); 915 break; 916 } 917 // Fallthrough: act like we're in the base variant. 918 919 case Dtor_Base: 920 // Enter the cleanup scopes for fields and non-virtual bases. 921 EnterDtorCleanups(Dtor, Dtor_Base); 922 923 // Initialize the vtable pointers before entering the body. 924 if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) 925 InitializeVTablePointers(Dtor->getParent()); 926 927 if (isTryBody) 928 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 929 else if (Body) 930 EmitStmt(Body); 931 else { 932 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 933 // nothing to do besides what's in the epilogue 934 } 935 // -fapple-kext must inline any call to this dtor into 936 // the caller's body. 937 if (getContext().getLangOptions().AppleKext) 938 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 939 break; 940 } 941 942 // Jump out through the epilogue cleanups. 943 DtorEpilogue.ForceCleanup(); 944 945 // Exit the try if applicable. 946 if (isTryBody) 947 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 948 } 949 950 namespace { 951 /// Call the operator delete associated with the current destructor. 952 struct CallDtorDelete : EHScopeStack::Cleanup { 953 CallDtorDelete() {} 954 955 void Emit(CodeGenFunction &CGF, Flags flags) { 956 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 957 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 958 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 959 CGF.getContext().getTagDeclType(ClassDecl)); 960 } 961 }; 962 963 class DestroyField : public EHScopeStack::Cleanup { 964 const FieldDecl *field; 965 CodeGenFunction::Destroyer *destroyer; 966 bool useEHCleanupForArray; 967 968 public: 969 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 970 bool useEHCleanupForArray) 971 : field(field), destroyer(destroyer), 972 useEHCleanupForArray(useEHCleanupForArray) {} 973 974 void Emit(CodeGenFunction &CGF, Flags flags) { 975 // Find the address of the field. 976 llvm::Value *thisValue = CGF.LoadCXXThis(); 977 LValue LV = CGF.EmitLValueForField(thisValue, field, /*CVRQualifiers=*/0); 978 assert(LV.isSimple()); 979 980 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 981 flags.isForNormalCleanup() && useEHCleanupForArray); 982 } 983 }; 984 } 985 986 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 987 /// destructor. This is to call destructors on members and base classes 988 /// in reverse order of their construction. 989 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 990 CXXDtorType DtorType) { 991 assert(!DD->isTrivial() && 992 "Should not emit dtor epilogue for trivial dtor!"); 993 994 // The deleting-destructor phase just needs to call the appropriate 995 // operator delete that Sema picked up. 996 if (DtorType == Dtor_Deleting) { 997 assert(DD->getOperatorDelete() && 998 "operator delete missing - EmitDtorEpilogue"); 999 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1000 return; 1001 } 1002 1003 const CXXRecordDecl *ClassDecl = DD->getParent(); 1004 1005 // Unions have no bases and do not call field destructors. 1006 if (ClassDecl->isUnion()) 1007 return; 1008 1009 // The complete-destructor phase just destructs all the virtual bases. 1010 if (DtorType == Dtor_Complete) { 1011 1012 // We push them in the forward order so that they'll be popped in 1013 // the reverse order. 1014 for (CXXRecordDecl::base_class_const_iterator I = 1015 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 1016 I != E; ++I) { 1017 const CXXBaseSpecifier &Base = *I; 1018 CXXRecordDecl *BaseClassDecl 1019 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1020 1021 // Ignore trivial destructors. 1022 if (BaseClassDecl->hasTrivialDestructor()) 1023 continue; 1024 1025 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1026 BaseClassDecl, 1027 /*BaseIsVirtual*/ true); 1028 } 1029 1030 return; 1031 } 1032 1033 assert(DtorType == Dtor_Base); 1034 1035 // Destroy non-virtual bases. 1036 for (CXXRecordDecl::base_class_const_iterator I = 1037 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 1038 const CXXBaseSpecifier &Base = *I; 1039 1040 // Ignore virtual bases. 1041 if (Base.isVirtual()) 1042 continue; 1043 1044 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1045 1046 // Ignore trivial destructors. 1047 if (BaseClassDecl->hasTrivialDestructor()) 1048 continue; 1049 1050 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1051 BaseClassDecl, 1052 /*BaseIsVirtual*/ false); 1053 } 1054 1055 // Destroy direct fields. 1056 SmallVector<const FieldDecl *, 16> FieldDecls; 1057 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 1058 E = ClassDecl->field_end(); I != E; ++I) { 1059 const FieldDecl *field = *I; 1060 QualType type = field->getType(); 1061 QualType::DestructionKind dtorKind = type.isDestructedType(); 1062 if (!dtorKind) continue; 1063 1064 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1065 EHStack.pushCleanup<DestroyField>(cleanupKind, field, 1066 getDestroyer(dtorKind), 1067 cleanupKind & EHCleanup); 1068 } 1069 } 1070 1071 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1072 /// constructor for each of several members of an array. 1073 /// 1074 /// \param ctor the constructor to call for each element 1075 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1076 /// constructor 1077 /// \param arrayType the type of the array to initialize 1078 /// \param arrayBegin an arrayType* 1079 /// \param zeroInitialize true if each element should be 1080 /// zero-initialized before it is constructed 1081 void 1082 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1083 const ConstantArrayType *arrayType, 1084 llvm::Value *arrayBegin, 1085 CallExpr::const_arg_iterator argBegin, 1086 CallExpr::const_arg_iterator argEnd, 1087 bool zeroInitialize) { 1088 QualType elementType; 1089 llvm::Value *numElements = 1090 emitArrayLength(arrayType, elementType, arrayBegin); 1091 1092 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, 1093 argBegin, argEnd, zeroInitialize); 1094 } 1095 1096 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1097 /// constructor for each of several members of an array. 1098 /// 1099 /// \param ctor the constructor to call for each element 1100 /// \param numElements the number of elements in the array; 1101 /// may be zero 1102 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1103 /// constructor 1104 /// \param arrayBegin a T*, where T is the type constructed by ctor 1105 /// \param zeroInitialize true if each element should be 1106 /// zero-initialized before it is constructed 1107 void 1108 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1109 llvm::Value *numElements, 1110 llvm::Value *arrayBegin, 1111 CallExpr::const_arg_iterator argBegin, 1112 CallExpr::const_arg_iterator argEnd, 1113 bool zeroInitialize) { 1114 1115 // It's legal for numElements to be zero. This can happen both 1116 // dynamically, because x can be zero in 'new A[x]', and statically, 1117 // because of GCC extensions that permit zero-length arrays. There 1118 // are probably legitimate places where we could assume that this 1119 // doesn't happen, but it's not clear that it's worth it. 1120 llvm::BranchInst *zeroCheckBranch = 0; 1121 1122 // Optimize for a constant count. 1123 llvm::ConstantInt *constantCount 1124 = dyn_cast<llvm::ConstantInt>(numElements); 1125 if (constantCount) { 1126 // Just skip out if the constant count is zero. 1127 if (constantCount->isZero()) return; 1128 1129 // Otherwise, emit the check. 1130 } else { 1131 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1132 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1133 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1134 EmitBlock(loopBB); 1135 } 1136 1137 // Find the end of the array. 1138 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1139 "arrayctor.end"); 1140 1141 // Enter the loop, setting up a phi for the current location to initialize. 1142 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1143 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1144 EmitBlock(loopBB); 1145 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1146 "arrayctor.cur"); 1147 cur->addIncoming(arrayBegin, entryBB); 1148 1149 // Inside the loop body, emit the constructor call on the array element. 1150 1151 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1152 1153 // Zero initialize the storage, if requested. 1154 if (zeroInitialize) 1155 EmitNullInitialization(cur, type); 1156 1157 // C++ [class.temporary]p4: 1158 // There are two contexts in which temporaries are destroyed at a different 1159 // point than the end of the full-expression. The first context is when a 1160 // default constructor is called to initialize an element of an array. 1161 // If the constructor has one or more default arguments, the destruction of 1162 // every temporary created in a default argument expression is sequenced 1163 // before the construction of the next array element, if any. 1164 1165 { 1166 RunCleanupsScope Scope(*this); 1167 1168 // Evaluate the constructor and its arguments in a regular 1169 // partial-destroy cleanup. 1170 if (getLangOptions().Exceptions && 1171 !ctor->getParent()->hasTrivialDestructor()) { 1172 Destroyer *destroyer = destroyCXXObject; 1173 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); 1174 } 1175 1176 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false, 1177 cur, argBegin, argEnd); 1178 } 1179 1180 // Go to the next element. 1181 llvm::Value *next = 1182 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 1183 "arrayctor.next"); 1184 cur->addIncoming(next, Builder.GetInsertBlock()); 1185 1186 // Check whether that's the end of the loop. 1187 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 1188 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 1189 Builder.CreateCondBr(done, contBB, loopBB); 1190 1191 // Patch the earlier check to skip over the loop. 1192 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 1193 1194 EmitBlock(contBB); 1195 } 1196 1197 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 1198 llvm::Value *addr, 1199 QualType type) { 1200 const RecordType *rtype = type->castAs<RecordType>(); 1201 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 1202 const CXXDestructorDecl *dtor = record->getDestructor(); 1203 assert(!dtor->isTrivial()); 1204 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 1205 addr); 1206 } 1207 1208 void 1209 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1210 CXXCtorType Type, bool ForVirtualBase, 1211 llvm::Value *This, 1212 CallExpr::const_arg_iterator ArgBeg, 1213 CallExpr::const_arg_iterator ArgEnd) { 1214 1215 CGDebugInfo *DI = getDebugInfo(); 1216 if (DI && CGM.getCodeGenOpts().LimitDebugInfo) { 1217 // If debug info for this class has not been emitted then this is the 1218 // right time to do so. 1219 const CXXRecordDecl *Parent = D->getParent(); 1220 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent), 1221 Parent->getLocation()); 1222 } 1223 1224 if (D->isTrivial()) { 1225 if (ArgBeg == ArgEnd) { 1226 // Trivial default constructor, no codegen required. 1227 assert(D->isDefaultConstructor() && 1228 "trivial 0-arg ctor not a default ctor"); 1229 return; 1230 } 1231 1232 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1233 assert(D->isCopyOrMoveConstructor() && 1234 "trivial 1-arg ctor not a copy/move ctor"); 1235 1236 const Expr *E = (*ArgBeg); 1237 QualType Ty = E->getType(); 1238 llvm::Value *Src = EmitLValue(E).getAddress(); 1239 EmitAggregateCopy(This, Src, Ty); 1240 return; 1241 } 1242 1243 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1244 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1245 1246 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1247 } 1248 1249 void 1250 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1251 llvm::Value *This, llvm::Value *Src, 1252 CallExpr::const_arg_iterator ArgBeg, 1253 CallExpr::const_arg_iterator ArgEnd) { 1254 if (D->isTrivial()) { 1255 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1256 assert(D->isCopyOrMoveConstructor() && 1257 "trivial 1-arg ctor not a copy/move ctor"); 1258 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1259 return; 1260 } 1261 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1262 clang::Ctor_Complete); 1263 assert(D->isInstance() && 1264 "Trying to emit a member call expr on a static method!"); 1265 1266 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1267 1268 CallArgList Args; 1269 1270 // Push the this ptr. 1271 Args.add(RValue::get(This), D->getThisType(getContext())); 1272 1273 1274 // Push the src ptr. 1275 QualType QT = *(FPT->arg_type_begin()); 1276 llvm::Type *t = CGM.getTypes().ConvertType(QT); 1277 Src = Builder.CreateBitCast(Src, t); 1278 Args.add(RValue::get(Src), QT); 1279 1280 // Skip over first argument (Src). 1281 ++ArgBeg; 1282 CallExpr::const_arg_iterator Arg = ArgBeg; 1283 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1284 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1285 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1286 EmitCallArg(Args, *Arg, *I); 1287 } 1288 // Either we've emitted all the call args, or we have a call to a 1289 // variadic function. 1290 assert((Arg == ArgEnd || FPT->isVariadic()) && 1291 "Extra arguments in non-variadic function!"); 1292 // If we still have any arguments, emit them using the type of the argument. 1293 for (; Arg != ArgEnd; ++Arg) { 1294 QualType ArgType = Arg->getType(); 1295 EmitCallArg(Args, *Arg, ArgType); 1296 } 1297 1298 EmitCall(CGM.getTypes().arrangeFunctionCall(Args, FPT), Callee, 1299 ReturnValueSlot(), Args, D); 1300 } 1301 1302 void 1303 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1304 CXXCtorType CtorType, 1305 const FunctionArgList &Args) { 1306 CallArgList DelegateArgs; 1307 1308 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1309 assert(I != E && "no parameters to constructor"); 1310 1311 // this 1312 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 1313 ++I; 1314 1315 // vtt 1316 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1317 /*ForVirtualBase=*/false)) { 1318 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1319 DelegateArgs.add(RValue::get(VTT), VoidPP); 1320 1321 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1322 assert(I != E && "cannot skip vtt parameter, already done with args"); 1323 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 1324 ++I; 1325 } 1326 } 1327 1328 // Explicit arguments. 1329 for (; I != E; ++I) { 1330 const VarDecl *param = *I; 1331 EmitDelegateCallArg(DelegateArgs, param); 1332 } 1333 1334 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType), 1335 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1336 ReturnValueSlot(), DelegateArgs, Ctor); 1337 } 1338 1339 namespace { 1340 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { 1341 const CXXDestructorDecl *Dtor; 1342 llvm::Value *Addr; 1343 CXXDtorType Type; 1344 1345 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, 1346 CXXDtorType Type) 1347 : Dtor(D), Addr(Addr), Type(Type) {} 1348 1349 void Emit(CodeGenFunction &CGF, Flags flags) { 1350 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 1351 Addr); 1352 } 1353 }; 1354 } 1355 1356 void 1357 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 1358 const FunctionArgList &Args) { 1359 assert(Ctor->isDelegatingConstructor()); 1360 1361 llvm::Value *ThisPtr = LoadCXXThis(); 1362 1363 QualType Ty = getContext().getTagDeclType(Ctor->getParent()); 1364 CharUnits Alignment = getContext().getTypeAlignInChars(Ty); 1365 AggValueSlot AggSlot = 1366 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), 1367 AggValueSlot::IsDestructed, 1368 AggValueSlot::DoesNotNeedGCBarriers, 1369 AggValueSlot::IsNotAliased); 1370 1371 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 1372 1373 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1374 if (CGM.getLangOptions().Exceptions && !ClassDecl->hasTrivialDestructor()) { 1375 CXXDtorType Type = 1376 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 1377 1378 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 1379 ClassDecl->getDestructor(), 1380 ThisPtr, Type); 1381 } 1382 } 1383 1384 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1385 CXXDtorType Type, 1386 bool ForVirtualBase, 1387 llvm::Value *This) { 1388 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1389 ForVirtualBase); 1390 llvm::Value *Callee = 0; 1391 if (getContext().getLangOptions().AppleKext) 1392 Callee = BuildAppleKextVirtualDestructorCall(DD, Type, 1393 DD->getParent()); 1394 1395 if (!Callee) 1396 Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1397 1398 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1399 } 1400 1401 namespace { 1402 struct CallLocalDtor : EHScopeStack::Cleanup { 1403 const CXXDestructorDecl *Dtor; 1404 llvm::Value *Addr; 1405 1406 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1407 : Dtor(D), Addr(Addr) {} 1408 1409 void Emit(CodeGenFunction &CGF, Flags flags) { 1410 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1411 /*ForVirtualBase=*/false, Addr); 1412 } 1413 }; 1414 } 1415 1416 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1417 llvm::Value *Addr) { 1418 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1419 } 1420 1421 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1422 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1423 if (!ClassDecl) return; 1424 if (ClassDecl->hasTrivialDestructor()) return; 1425 1426 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1427 assert(D && D->isUsed() && "destructor not marked as used!"); 1428 PushDestructorCleanup(D, Addr); 1429 } 1430 1431 llvm::Value * 1432 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1433 const CXXRecordDecl *ClassDecl, 1434 const CXXRecordDecl *BaseClassDecl) { 1435 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1436 CharUnits VBaseOffsetOffset = 1437 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1438 1439 llvm::Value *VBaseOffsetPtr = 1440 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), 1441 "vbase.offset.ptr"); 1442 llvm::Type *PtrDiffTy = 1443 ConvertType(getContext().getPointerDiffType()); 1444 1445 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1446 PtrDiffTy->getPointerTo()); 1447 1448 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1449 1450 return VBaseOffset; 1451 } 1452 1453 void 1454 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1455 const CXXRecordDecl *NearestVBase, 1456 CharUnits OffsetFromNearestVBase, 1457 llvm::Constant *VTable, 1458 const CXXRecordDecl *VTableClass) { 1459 const CXXRecordDecl *RD = Base.getBase(); 1460 1461 // Compute the address point. 1462 llvm::Value *VTableAddressPoint; 1463 1464 // Check if we need to use a vtable from the VTT. 1465 if (CodeGenVTables::needsVTTParameter(CurGD) && 1466 (RD->getNumVBases() || NearestVBase)) { 1467 // Get the secondary vpointer index. 1468 uint64_t VirtualPointerIndex = 1469 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1470 1471 /// Load the VTT. 1472 llvm::Value *VTT = LoadCXXVTT(); 1473 if (VirtualPointerIndex) 1474 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1475 1476 // And load the address point from the VTT. 1477 VTableAddressPoint = Builder.CreateLoad(VTT); 1478 } else { 1479 uint64_t AddressPoint = 1480 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base); 1481 VTableAddressPoint = 1482 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1483 } 1484 1485 // Compute where to store the address point. 1486 llvm::Value *VirtualOffset = 0; 1487 CharUnits NonVirtualOffset = CharUnits::Zero(); 1488 1489 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1490 // We need to use the virtual base offset offset because the virtual base 1491 // might have a different offset in the most derived class. 1492 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1493 NearestVBase); 1494 NonVirtualOffset = OffsetFromNearestVBase; 1495 } else { 1496 // We can just use the base offset in the complete class. 1497 NonVirtualOffset = Base.getBaseOffset(); 1498 } 1499 1500 // Apply the offsets. 1501 llvm::Value *VTableField = LoadCXXThis(); 1502 1503 if (!NonVirtualOffset.isZero() || VirtualOffset) 1504 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1505 NonVirtualOffset, 1506 VirtualOffset); 1507 1508 // Finally, store the address point. 1509 llvm::Type *AddressPointPtrTy = 1510 VTableAddressPoint->getType()->getPointerTo(); 1511 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1512 Builder.CreateStore(VTableAddressPoint, VTableField); 1513 } 1514 1515 void 1516 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1517 const CXXRecordDecl *NearestVBase, 1518 CharUnits OffsetFromNearestVBase, 1519 bool BaseIsNonVirtualPrimaryBase, 1520 llvm::Constant *VTable, 1521 const CXXRecordDecl *VTableClass, 1522 VisitedVirtualBasesSetTy& VBases) { 1523 // If this base is a non-virtual primary base the address point has already 1524 // been set. 1525 if (!BaseIsNonVirtualPrimaryBase) { 1526 // Initialize the vtable pointer for this base. 1527 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1528 VTable, VTableClass); 1529 } 1530 1531 const CXXRecordDecl *RD = Base.getBase(); 1532 1533 // Traverse bases. 1534 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1535 E = RD->bases_end(); I != E; ++I) { 1536 CXXRecordDecl *BaseDecl 1537 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1538 1539 // Ignore classes without a vtable. 1540 if (!BaseDecl->isDynamicClass()) 1541 continue; 1542 1543 CharUnits BaseOffset; 1544 CharUnits BaseOffsetFromNearestVBase; 1545 bool BaseDeclIsNonVirtualPrimaryBase; 1546 1547 if (I->isVirtual()) { 1548 // Check if we've visited this virtual base before. 1549 if (!VBases.insert(BaseDecl)) 1550 continue; 1551 1552 const ASTRecordLayout &Layout = 1553 getContext().getASTRecordLayout(VTableClass); 1554 1555 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1556 BaseOffsetFromNearestVBase = CharUnits::Zero(); 1557 BaseDeclIsNonVirtualPrimaryBase = false; 1558 } else { 1559 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1560 1561 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1562 BaseOffsetFromNearestVBase = 1563 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1564 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1565 } 1566 1567 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1568 I->isVirtual() ? BaseDecl : NearestVBase, 1569 BaseOffsetFromNearestVBase, 1570 BaseDeclIsNonVirtualPrimaryBase, 1571 VTable, VTableClass, VBases); 1572 } 1573 } 1574 1575 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1576 // Ignore classes without a vtable. 1577 if (!RD->isDynamicClass()) 1578 return; 1579 1580 // Get the VTable. 1581 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1582 1583 // Initialize the vtable pointers for this class and all of its bases. 1584 VisitedVirtualBasesSetTy VBases; 1585 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), 1586 /*NearestVBase=*/0, 1587 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 1588 /*BaseIsNonVirtualPrimaryBase=*/false, 1589 VTable, RD, VBases); 1590 } 1591 1592 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1593 llvm::Type *Ty) { 1594 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1595 return Builder.CreateLoad(VTablePtrSrc, "vtable"); 1596 } 1597 1598 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) { 1599 const Expr *E = Base; 1600 1601 while (true) { 1602 E = E->IgnoreParens(); 1603 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1604 if (CE->getCastKind() == CK_DerivedToBase || 1605 CE->getCastKind() == CK_UncheckedDerivedToBase || 1606 CE->getCastKind() == CK_NoOp) { 1607 E = CE->getSubExpr(); 1608 continue; 1609 } 1610 } 1611 1612 break; 1613 } 1614 1615 QualType DerivedType = E->getType(); 1616 if (const PointerType *PTy = DerivedType->getAs<PointerType>()) 1617 DerivedType = PTy->getPointeeType(); 1618 1619 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl()); 1620 } 1621 1622 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 1623 // quite what we want. 1624 static const Expr *skipNoOpCastsAndParens(const Expr *E) { 1625 while (true) { 1626 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 1627 E = PE->getSubExpr(); 1628 continue; 1629 } 1630 1631 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1632 if (CE->getCastKind() == CK_NoOp) { 1633 E = CE->getSubExpr(); 1634 continue; 1635 } 1636 } 1637 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 1638 if (UO->getOpcode() == UO_Extension) { 1639 E = UO->getSubExpr(); 1640 continue; 1641 } 1642 } 1643 return E; 1644 } 1645 } 1646 1647 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member 1648 /// function call on the given expr can be devirtualized. 1649 static bool canDevirtualizeMemberFunctionCall(const Expr *Base, 1650 const CXXMethodDecl *MD) { 1651 // If the most derived class is marked final, we know that no subclass can 1652 // override this member function and so we can devirtualize it. For example: 1653 // 1654 // struct A { virtual void f(); } 1655 // struct B final : A { }; 1656 // 1657 // void f(B *b) { 1658 // b->f(); 1659 // } 1660 // 1661 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base); 1662 if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 1663 return true; 1664 1665 // If the member function is marked 'final', we know that it can't be 1666 // overridden and can therefore devirtualize it. 1667 if (MD->hasAttr<FinalAttr>()) 1668 return true; 1669 1670 // Similarly, if the class itself is marked 'final' it can't be overridden 1671 // and we can therefore devirtualize the member function call. 1672 if (MD->getParent()->hasAttr<FinalAttr>()) 1673 return true; 1674 1675 Base = skipNoOpCastsAndParens(Base); 1676 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 1677 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 1678 // This is a record decl. We know the type and can devirtualize it. 1679 return VD->getType()->isRecordType(); 1680 } 1681 1682 return false; 1683 } 1684 1685 // We can always devirtualize calls on temporary object expressions. 1686 if (isa<CXXConstructExpr>(Base)) 1687 return true; 1688 1689 // And calls on bound temporaries. 1690 if (isa<CXXBindTemporaryExpr>(Base)) 1691 return true; 1692 1693 // Check if this is a call expr that returns a record type. 1694 if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 1695 return CE->getCallReturnType()->isRecordType(); 1696 1697 // We can't devirtualize the call. 1698 return false; 1699 } 1700 1701 static bool UseVirtualCall(ASTContext &Context, 1702 const CXXOperatorCallExpr *CE, 1703 const CXXMethodDecl *MD) { 1704 if (!MD->isVirtual()) 1705 return false; 1706 1707 // When building with -fapple-kext, all calls must go through the vtable since 1708 // the kernel linker can do runtime patching of vtables. 1709 if (Context.getLangOptions().AppleKext) 1710 return true; 1711 1712 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD); 1713 } 1714 1715 llvm::Value * 1716 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E, 1717 const CXXMethodDecl *MD, 1718 llvm::Value *This) { 1719 llvm::FunctionType *fnType = 1720 CGM.getTypes().GetFunctionType( 1721 CGM.getTypes().arrangeCXXMethodDeclaration(MD)); 1722 1723 if (UseVirtualCall(getContext(), E, MD)) 1724 return BuildVirtualCall(MD, This, fnType); 1725 1726 return CGM.GetAddrOfFunction(MD, fnType); 1727 } 1728 1729 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 1730 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to block"); 1731 } 1732 1733 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 1734 const CXXRecordDecl *Lambda = MD->getParent(); 1735 DeclarationName Name 1736 = getContext().DeclarationNames.getCXXOperatorName(OO_Call); 1737 DeclContext::lookup_const_result Calls = Lambda->lookup(Name); 1738 CXXMethodDecl *CallOperator = cast<CXXMethodDecl>(*Calls.first++); 1739 const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>(); 1740 QualType ResultType = FPT->getResultType(); 1741 1742 // Start building arguments for forwarding call 1743 CallArgList CallArgs; 1744 1745 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1746 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 1747 CallArgs.add(RValue::get(ThisPtr), ThisType); 1748 1749 // Add the rest of the parameters. 1750 for (FunctionDecl::param_const_iterator I = MD->param_begin(), 1751 E = MD->param_end(); I != E; ++I) { 1752 ParmVarDecl *param = *I; 1753 EmitDelegateCallArg(CallArgs, param); 1754 } 1755 1756 // Get the address of the call operator. 1757 GlobalDecl GD(CallOperator); 1758 const CGFunctionInfo &CalleeFnInfo = 1759 CGM.getTypes().arrangeFunctionCall(ResultType, CallArgs, FPT->getExtInfo(), 1760 RequiredArgs::forPrototypePlus(FPT, 1)); 1761 llvm::Type *Ty = CGM.getTypes().GetFunctionType(CalleeFnInfo); 1762 llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty); 1763 1764 // Determine whether we have a return value slot to use. 1765 ReturnValueSlot Slot; 1766 if (!ResultType->isVoidType() && 1767 CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect && 1768 hasAggregateLLVMType(CurFnInfo->getReturnType())) 1769 Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified()); 1770 1771 // Now emit our call. 1772 RValue RV = EmitCall(CalleeFnInfo, Callee, Slot, CallArgs, CallOperator); 1773 1774 // Forward the returned value 1775 if (!ResultType->isVoidType() && Slot.isNull()) 1776 EmitReturnOfRValue(RV, ResultType); 1777 } 1778 1779 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 1780 if (MD->isVariadic()) { 1781 // FIXME: Making this work correctly is nasty because it requires either 1782 // cloning the body of the call operator or making the call operator forward. 1783 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 1784 } 1785 1786 EmitLambdaDelegatingInvokeBody(MD); 1787 } 1788