1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGBlocks.h" 15 #include "CGDebugInfo.h" 16 #include "CodeGenFunction.h" 17 #include "clang/AST/CXXInheritance.h" 18 #include "clang/AST/EvaluatedExprVisitor.h" 19 #include "clang/AST/RecordLayout.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/Frontend/CodeGenOptions.h" 22 23 using namespace clang; 24 using namespace CodeGen; 25 26 static CharUnits 27 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 28 const CXXRecordDecl *DerivedClass, 29 CastExpr::path_const_iterator Start, 30 CastExpr::path_const_iterator End) { 31 CharUnits Offset = CharUnits::Zero(); 32 33 const CXXRecordDecl *RD = DerivedClass; 34 35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 36 const CXXBaseSpecifier *Base = *I; 37 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 38 39 // Get the layout. 40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 41 42 const CXXRecordDecl *BaseDecl = 43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 44 45 // Add the offset. 46 Offset += Layout.getBaseClassOffset(BaseDecl); 47 48 RD = BaseDecl; 49 } 50 51 return Offset; 52 } 53 54 llvm::Constant * 55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 56 CastExpr::path_const_iterator PathBegin, 57 CastExpr::path_const_iterator PathEnd) { 58 assert(PathBegin != PathEnd && "Base path should not be empty!"); 59 60 CharUnits Offset = 61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 62 PathBegin, PathEnd); 63 if (Offset.isZero()) 64 return 0; 65 66 llvm::Type *PtrDiffTy = 67 Types.ConvertType(getContext().getPointerDiffType()); 68 69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 70 } 71 72 /// Gets the address of a direct base class within a complete object. 73 /// This should only be used for (1) non-virtual bases or (2) virtual bases 74 /// when the type is known to be complete (e.g. in complete destructors). 75 /// 76 /// The object pointed to by 'This' is assumed to be non-null. 77 llvm::Value * 78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 79 const CXXRecordDecl *Derived, 80 const CXXRecordDecl *Base, 81 bool BaseIsVirtual) { 82 // 'this' must be a pointer (in some address space) to Derived. 83 assert(This->getType()->isPointerTy() && 84 cast<llvm::PointerType>(This->getType())->getElementType() 85 == ConvertType(Derived)); 86 87 // Compute the offset of the virtual base. 88 CharUnits Offset; 89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 90 if (BaseIsVirtual) 91 Offset = Layout.getVBaseClassOffset(Base); 92 else 93 Offset = Layout.getBaseClassOffset(Base); 94 95 // Shift and cast down to the base type. 96 // TODO: for complete types, this should be possible with a GEP. 97 llvm::Value *V = This; 98 if (Offset.isPositive()) { 99 V = Builder.CreateBitCast(V, Int8PtrTy); 100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); 101 } 102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 103 104 return V; 105 } 106 107 static llvm::Value * 108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ThisPtr, 109 CharUnits NonVirtual, llvm::Value *Virtual) { 110 llvm::Type *PtrDiffTy = 111 CGF.ConvertType(CGF.getContext().getPointerDiffType()); 112 113 llvm::Value *NonVirtualOffset = 0; 114 if (!NonVirtual.isZero()) 115 NonVirtualOffset = llvm::ConstantInt::get(PtrDiffTy, 116 NonVirtual.getQuantity()); 117 118 llvm::Value *BaseOffset; 119 if (Virtual) { 120 if (NonVirtualOffset) 121 BaseOffset = CGF.Builder.CreateAdd(Virtual, NonVirtualOffset); 122 else 123 BaseOffset = Virtual; 124 } else 125 BaseOffset = NonVirtualOffset; 126 127 // Apply the base offset. 128 ThisPtr = CGF.Builder.CreateBitCast(ThisPtr, CGF.Int8PtrTy); 129 ThisPtr = CGF.Builder.CreateGEP(ThisPtr, BaseOffset, "add.ptr"); 130 131 return ThisPtr; 132 } 133 134 llvm::Value * 135 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 136 const CXXRecordDecl *Derived, 137 CastExpr::path_const_iterator PathBegin, 138 CastExpr::path_const_iterator PathEnd, 139 bool NullCheckValue) { 140 assert(PathBegin != PathEnd && "Base path should not be empty!"); 141 142 CastExpr::path_const_iterator Start = PathBegin; 143 const CXXRecordDecl *VBase = 0; 144 145 // Get the virtual base. 146 if ((*Start)->isVirtual()) { 147 VBase = 148 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 149 ++Start; 150 } 151 152 CharUnits NonVirtualOffset = 153 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 154 Start, PathEnd); 155 156 // Get the base pointer type. 157 llvm::Type *BasePtrTy = 158 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 159 160 if (NonVirtualOffset.isZero() && !VBase) { 161 // Just cast back. 162 return Builder.CreateBitCast(Value, BasePtrTy); 163 } 164 165 llvm::BasicBlock *CastNull = 0; 166 llvm::BasicBlock *CastNotNull = 0; 167 llvm::BasicBlock *CastEnd = 0; 168 169 if (NullCheckValue) { 170 CastNull = createBasicBlock("cast.null"); 171 CastNotNull = createBasicBlock("cast.notnull"); 172 CastEnd = createBasicBlock("cast.end"); 173 174 llvm::Value *IsNull = Builder.CreateIsNull(Value); 175 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 176 EmitBlock(CastNotNull); 177 } 178 179 llvm::Value *VirtualOffset = 0; 180 181 if (VBase) { 182 if (Derived->hasAttr<FinalAttr>()) { 183 VirtualOffset = 0; 184 185 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 186 187 CharUnits VBaseOffset = Layout.getVBaseClassOffset(VBase); 188 NonVirtualOffset += VBaseOffset; 189 } else 190 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 191 } 192 193 // Apply the offsets. 194 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, 195 NonVirtualOffset, 196 VirtualOffset); 197 198 // Cast back. 199 Value = Builder.CreateBitCast(Value, BasePtrTy); 200 201 if (NullCheckValue) { 202 Builder.CreateBr(CastEnd); 203 EmitBlock(CastNull); 204 Builder.CreateBr(CastEnd); 205 EmitBlock(CastEnd); 206 207 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 208 PHI->addIncoming(Value, CastNotNull); 209 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 210 CastNull); 211 Value = PHI; 212 } 213 214 return Value; 215 } 216 217 llvm::Value * 218 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 219 const CXXRecordDecl *Derived, 220 CastExpr::path_const_iterator PathBegin, 221 CastExpr::path_const_iterator PathEnd, 222 bool NullCheckValue) { 223 assert(PathBegin != PathEnd && "Base path should not be empty!"); 224 225 QualType DerivedTy = 226 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 227 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 228 229 llvm::Value *NonVirtualOffset = 230 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 231 232 if (!NonVirtualOffset) { 233 // No offset, we can just cast back. 234 return Builder.CreateBitCast(Value, DerivedPtrTy); 235 } 236 237 llvm::BasicBlock *CastNull = 0; 238 llvm::BasicBlock *CastNotNull = 0; 239 llvm::BasicBlock *CastEnd = 0; 240 241 if (NullCheckValue) { 242 CastNull = createBasicBlock("cast.null"); 243 CastNotNull = createBasicBlock("cast.notnull"); 244 CastEnd = createBasicBlock("cast.end"); 245 246 llvm::Value *IsNull = Builder.CreateIsNull(Value); 247 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 248 EmitBlock(CastNotNull); 249 } 250 251 // Apply the offset. 252 Value = Builder.CreateBitCast(Value, Int8PtrTy); 253 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), 254 "sub.ptr"); 255 256 // Just cast. 257 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 258 259 if (NullCheckValue) { 260 Builder.CreateBr(CastEnd); 261 EmitBlock(CastNull); 262 Builder.CreateBr(CastEnd); 263 EmitBlock(CastEnd); 264 265 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 266 PHI->addIncoming(Value, CastNotNull); 267 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 268 CastNull); 269 Value = PHI; 270 } 271 272 return Value; 273 } 274 275 /// GetVTTParameter - Return the VTT parameter that should be passed to a 276 /// base constructor/destructor with virtual bases. 277 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 278 bool ForVirtualBase) { 279 if (!CodeGenVTables::needsVTTParameter(GD)) { 280 // This constructor/destructor does not need a VTT parameter. 281 return 0; 282 } 283 284 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 285 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 286 287 llvm::Value *VTT; 288 289 uint64_t SubVTTIndex; 290 291 // If the record matches the base, this is the complete ctor/dtor 292 // variant calling the base variant in a class with virtual bases. 293 if (RD == Base) { 294 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 295 "doing no-op VTT offset in base dtor/ctor?"); 296 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 297 SubVTTIndex = 0; 298 } else { 299 const ASTRecordLayout &Layout = 300 CGF.getContext().getASTRecordLayout(RD); 301 CharUnits BaseOffset = ForVirtualBase ? 302 Layout.getVBaseClassOffset(Base) : 303 Layout.getBaseClassOffset(Base); 304 305 SubVTTIndex = 306 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 307 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 308 } 309 310 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 311 // A VTT parameter was passed to the constructor, use it. 312 VTT = CGF.LoadCXXVTT(); 313 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 314 } else { 315 // We're the complete constructor, so get the VTT by name. 316 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD); 317 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 318 } 319 320 return VTT; 321 } 322 323 namespace { 324 /// Call the destructor for a direct base class. 325 struct CallBaseDtor : EHScopeStack::Cleanup { 326 const CXXRecordDecl *BaseClass; 327 bool BaseIsVirtual; 328 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 329 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 330 331 void Emit(CodeGenFunction &CGF, Flags flags) { 332 const CXXRecordDecl *DerivedClass = 333 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 334 335 const CXXDestructorDecl *D = BaseClass->getDestructor(); 336 llvm::Value *Addr = 337 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 338 DerivedClass, BaseClass, 339 BaseIsVirtual); 340 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr); 341 } 342 }; 343 344 /// A visitor which checks whether an initializer uses 'this' in a 345 /// way which requires the vtable to be properly set. 346 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 347 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 348 349 bool UsesThis; 350 351 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 352 353 // Black-list all explicit and implicit references to 'this'. 354 // 355 // Do we need to worry about external references to 'this' derived 356 // from arbitrary code? If so, then anything which runs arbitrary 357 // external code might potentially access the vtable. 358 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 359 }; 360 } 361 362 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 363 DynamicThisUseChecker Checker(C); 364 Checker.Visit(const_cast<Expr*>(Init)); 365 return Checker.UsesThis; 366 } 367 368 static void EmitBaseInitializer(CodeGenFunction &CGF, 369 const CXXRecordDecl *ClassDecl, 370 CXXCtorInitializer *BaseInit, 371 CXXCtorType CtorType) { 372 assert(BaseInit->isBaseInitializer() && 373 "Must have base initializer!"); 374 375 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 376 377 const Type *BaseType = BaseInit->getBaseClass(); 378 CXXRecordDecl *BaseClassDecl = 379 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 380 381 bool isBaseVirtual = BaseInit->isBaseVirtual(); 382 383 // The base constructor doesn't construct virtual bases. 384 if (CtorType == Ctor_Base && isBaseVirtual) 385 return; 386 387 // If the initializer for the base (other than the constructor 388 // itself) accesses 'this' in any way, we need to initialize the 389 // vtables. 390 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 391 CGF.InitializeVTablePointers(ClassDecl); 392 393 // We can pretend to be a complete class because it only matters for 394 // virtual bases, and we only do virtual bases for complete ctors. 395 llvm::Value *V = 396 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 397 BaseClassDecl, 398 isBaseVirtual); 399 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); 400 AggValueSlot AggSlot = 401 AggValueSlot::forAddr(V, Alignment, Qualifiers(), 402 AggValueSlot::IsDestructed, 403 AggValueSlot::DoesNotNeedGCBarriers, 404 AggValueSlot::IsNotAliased); 405 406 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 407 408 if (CGF.CGM.getLangOpts().Exceptions && 409 !BaseClassDecl->hasTrivialDestructor()) 410 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 411 isBaseVirtual); 412 } 413 414 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 415 LValue LHS, 416 Expr *Init, 417 llvm::Value *ArrayIndexVar, 418 QualType T, 419 ArrayRef<VarDecl *> ArrayIndexes, 420 unsigned Index) { 421 if (Index == ArrayIndexes.size()) { 422 LValue LV = LHS; 423 { // Scope for Cleanups. 424 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 425 426 if (ArrayIndexVar) { 427 // If we have an array index variable, load it and use it as an offset. 428 // Then, increment the value. 429 llvm::Value *Dest = LHS.getAddress(); 430 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 431 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 432 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 433 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 434 CGF.Builder.CreateStore(Next, ArrayIndexVar); 435 436 // Update the LValue. 437 LV.setAddress(Dest); 438 CharUnits Align = CGF.getContext().getTypeAlignInChars(T); 439 LV.setAlignment(std::min(Align, LV.getAlignment())); 440 } 441 442 if (!CGF.hasAggregateLLVMType(T)) { 443 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); 444 } else if (T->isAnyComplexType()) { 445 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(), 446 LV.isVolatileQualified()); 447 } else { 448 AggValueSlot Slot = 449 AggValueSlot::forLValue(LV, 450 AggValueSlot::IsDestructed, 451 AggValueSlot::DoesNotNeedGCBarriers, 452 AggValueSlot::IsNotAliased); 453 454 CGF.EmitAggExpr(Init, Slot); 455 } 456 } 457 458 // Now, outside of the initializer cleanup scope, destroy the backing array 459 // for a std::initializer_list member. 460 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init); 461 462 return; 463 } 464 465 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 466 assert(Array && "Array initialization without the array type?"); 467 llvm::Value *IndexVar 468 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 469 assert(IndexVar && "Array index variable not loaded"); 470 471 // Initialize this index variable to zero. 472 llvm::Value* Zero 473 = llvm::Constant::getNullValue( 474 CGF.ConvertType(CGF.getContext().getSizeType())); 475 CGF.Builder.CreateStore(Zero, IndexVar); 476 477 // Start the loop with a block that tests the condition. 478 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 479 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 480 481 CGF.EmitBlock(CondBlock); 482 483 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 484 // Generate: if (loop-index < number-of-elements) fall to the loop body, 485 // otherwise, go to the block after the for-loop. 486 uint64_t NumElements = Array->getSize().getZExtValue(); 487 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 488 llvm::Value *NumElementsPtr = 489 llvm::ConstantInt::get(Counter->getType(), NumElements); 490 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 491 "isless"); 492 493 // If the condition is true, execute the body. 494 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 495 496 CGF.EmitBlock(ForBody); 497 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 498 499 { 500 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 501 502 // Inside the loop body recurse to emit the inner loop or, eventually, the 503 // constructor call. 504 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 505 Array->getElementType(), ArrayIndexes, Index + 1); 506 } 507 508 CGF.EmitBlock(ContinueBlock); 509 510 // Emit the increment of the loop counter. 511 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 512 Counter = CGF.Builder.CreateLoad(IndexVar); 513 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 514 CGF.Builder.CreateStore(NextVal, IndexVar); 515 516 // Finally, branch back up to the condition for the next iteration. 517 CGF.EmitBranch(CondBlock); 518 519 // Emit the fall-through block. 520 CGF.EmitBlock(AfterFor, true); 521 } 522 523 namespace { 524 struct CallMemberDtor : EHScopeStack::Cleanup { 525 llvm::Value *V; 526 CXXDestructorDecl *Dtor; 527 528 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor) 529 : V(V), Dtor(Dtor) {} 530 531 void Emit(CodeGenFunction &CGF, Flags flags) { 532 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 533 V); 534 } 535 }; 536 } 537 538 static bool hasTrivialCopyOrMoveConstructor(const CXXRecordDecl *Record, 539 bool Moving) { 540 return Moving ? Record->hasTrivialMoveConstructor() : 541 Record->hasTrivialCopyConstructor(); 542 } 543 544 static void EmitMemberInitializer(CodeGenFunction &CGF, 545 const CXXRecordDecl *ClassDecl, 546 CXXCtorInitializer *MemberInit, 547 const CXXConstructorDecl *Constructor, 548 FunctionArgList &Args) { 549 assert(MemberInit->isAnyMemberInitializer() && 550 "Must have member initializer!"); 551 assert(MemberInit->getInit() && "Must have initializer!"); 552 553 // non-static data member initializers. 554 FieldDecl *Field = MemberInit->getAnyMember(); 555 QualType FieldType = Field->getType(); 556 557 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 558 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 559 LValue LHS; 560 561 // If we are initializing an anonymous union field, drill down to the field. 562 if (MemberInit->isIndirectMemberInitializer()) { 563 LHS = CGF.EmitLValueForAnonRecordField(ThisPtr, 564 MemberInit->getIndirectMember(), 0); 565 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 566 } else { 567 LValue ThisLHSLV = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 568 LHS = CGF.EmitLValueForFieldInitialization(ThisLHSLV, Field); 569 } 570 571 // Special case: if we are in a copy or move constructor, and we are copying 572 // an array of PODs or classes with trivial copy constructors, ignore the 573 // AST and perform the copy we know is equivalent. 574 // FIXME: This is hacky at best... if we had a bit more explicit information 575 // in the AST, we could generalize it more easily. 576 const ConstantArrayType *Array 577 = CGF.getContext().getAsConstantArrayType(FieldType); 578 if (Array && Constructor->isImplicitlyDefined() && 579 Constructor->isCopyOrMoveConstructor()) { 580 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 581 const CXXRecordDecl *Record = BaseElementTy->getAsCXXRecordDecl(); 582 if (BaseElementTy.isPODType(CGF.getContext()) || 583 (Record && hasTrivialCopyOrMoveConstructor(Record, 584 Constructor->isMoveConstructor()))) { 585 // Find the source pointer. We knows it's the last argument because 586 // we know we're in a copy constructor. 587 unsigned SrcArgIndex = Args.size() - 1; 588 llvm::Value *SrcPtr 589 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 590 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 591 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 592 593 // Copy the aggregate. 594 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 595 LHS.isVolatileQualified()); 596 return; 597 } 598 } 599 600 ArrayRef<VarDecl *> ArrayIndexes; 601 if (MemberInit->getNumArrayIndices()) 602 ArrayIndexes = MemberInit->getArrayIndexes(); 603 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 604 } 605 606 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, 607 LValue LHS, Expr *Init, 608 ArrayRef<VarDecl *> ArrayIndexes) { 609 QualType FieldType = Field->getType(); 610 if (!hasAggregateLLVMType(FieldType)) { 611 if (LHS.isSimple()) { 612 EmitExprAsInit(Init, Field, LHS, false); 613 } else { 614 RValue RHS = RValue::get(EmitScalarExpr(Init)); 615 EmitStoreThroughLValue(RHS, LHS); 616 } 617 } else if (FieldType->isAnyComplexType()) { 618 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified()); 619 } else { 620 llvm::Value *ArrayIndexVar = 0; 621 if (ArrayIndexes.size()) { 622 llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 623 624 // The LHS is a pointer to the first object we'll be constructing, as 625 // a flat array. 626 QualType BaseElementTy = getContext().getBaseElementType(FieldType); 627 llvm::Type *BasePtr = ConvertType(BaseElementTy); 628 BasePtr = llvm::PointerType::getUnqual(BasePtr); 629 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), 630 BasePtr); 631 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 632 633 // Create an array index that will be used to walk over all of the 634 // objects we're constructing. 635 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); 636 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 637 Builder.CreateStore(Zero, ArrayIndexVar); 638 639 640 // Emit the block variables for the array indices, if any. 641 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 642 EmitAutoVarDecl(*ArrayIndexes[I]); 643 } 644 645 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 646 ArrayIndexes, 0); 647 648 if (!CGM.getLangOpts().Exceptions) 649 return; 650 651 // FIXME: If we have an array of classes w/ non-trivial destructors, 652 // we need to destroy in reverse order of construction along the exception 653 // path. 654 const RecordType *RT = FieldType->getAs<RecordType>(); 655 if (!RT) 656 return; 657 658 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 659 if (!RD->hasTrivialDestructor()) 660 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(), 661 RD->getDestructor()); 662 } 663 } 664 665 /// Checks whether the given constructor is a valid subject for the 666 /// complete-to-base constructor delegation optimization, i.e. 667 /// emitting the complete constructor as a simple call to the base 668 /// constructor. 669 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 670 671 // Currently we disable the optimization for classes with virtual 672 // bases because (1) the addresses of parameter variables need to be 673 // consistent across all initializers but (2) the delegate function 674 // call necessarily creates a second copy of the parameter variable. 675 // 676 // The limiting example (purely theoretical AFAIK): 677 // struct A { A(int &c) { c++; } }; 678 // struct B : virtual A { 679 // B(int count) : A(count) { printf("%d\n", count); } 680 // }; 681 // ...although even this example could in principle be emitted as a 682 // delegation since the address of the parameter doesn't escape. 683 if (Ctor->getParent()->getNumVBases()) { 684 // TODO: white-list trivial vbase initializers. This case wouldn't 685 // be subject to the restrictions below. 686 687 // TODO: white-list cases where: 688 // - there are no non-reference parameters to the constructor 689 // - the initializers don't access any non-reference parameters 690 // - the initializers don't take the address of non-reference 691 // parameters 692 // - etc. 693 // If we ever add any of the above cases, remember that: 694 // - function-try-blocks will always blacklist this optimization 695 // - we need to perform the constructor prologue and cleanup in 696 // EmitConstructorBody. 697 698 return false; 699 } 700 701 // We also disable the optimization for variadic functions because 702 // it's impossible to "re-pass" varargs. 703 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 704 return false; 705 706 // FIXME: Decide if we can do a delegation of a delegating constructor. 707 if (Ctor->isDelegatingConstructor()) 708 return false; 709 710 return true; 711 } 712 713 /// EmitConstructorBody - Emits the body of the current constructor. 714 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 715 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 716 CXXCtorType CtorType = CurGD.getCtorType(); 717 718 // Before we go any further, try the complete->base constructor 719 // delegation optimization. 720 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 721 CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) { 722 if (CGDebugInfo *DI = getDebugInfo()) 723 DI->EmitLocation(Builder, Ctor->getLocEnd()); 724 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 725 return; 726 } 727 728 Stmt *Body = Ctor->getBody(); 729 730 // Enter the function-try-block before the constructor prologue if 731 // applicable. 732 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 733 if (IsTryBody) 734 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 735 736 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 737 738 // TODO: in restricted cases, we can emit the vbase initializers of 739 // a complete ctor and then delegate to the base ctor. 740 741 // Emit the constructor prologue, i.e. the base and member 742 // initializers. 743 EmitCtorPrologue(Ctor, CtorType, Args); 744 745 // Emit the body of the statement. 746 if (IsTryBody) 747 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 748 else if (Body) 749 EmitStmt(Body); 750 751 // Emit any cleanup blocks associated with the member or base 752 // initializers, which includes (along the exceptional path) the 753 // destructors for those members and bases that were fully 754 // constructed. 755 PopCleanupBlocks(CleanupDepth); 756 757 if (IsTryBody) 758 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 759 } 760 761 /// EmitCtorPrologue - This routine generates necessary code to initialize 762 /// base classes and non-static data members belonging to this constructor. 763 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 764 CXXCtorType CtorType, 765 FunctionArgList &Args) { 766 if (CD->isDelegatingConstructor()) 767 return EmitDelegatingCXXConstructorCall(CD, Args); 768 769 const CXXRecordDecl *ClassDecl = CD->getParent(); 770 771 SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 772 773 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 774 E = CD->init_end(); 775 B != E; ++B) { 776 CXXCtorInitializer *Member = (*B); 777 778 if (Member->isBaseInitializer()) { 779 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 780 } else { 781 assert(Member->isAnyMemberInitializer() && 782 "Delegating initializer on non-delegating constructor"); 783 MemberInitializers.push_back(Member); 784 } 785 } 786 787 InitializeVTablePointers(ClassDecl); 788 789 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 790 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 791 } 792 793 static bool 794 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 795 796 static bool 797 HasTrivialDestructorBody(ASTContext &Context, 798 const CXXRecordDecl *BaseClassDecl, 799 const CXXRecordDecl *MostDerivedClassDecl) 800 { 801 // If the destructor is trivial we don't have to check anything else. 802 if (BaseClassDecl->hasTrivialDestructor()) 803 return true; 804 805 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 806 return false; 807 808 // Check fields. 809 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(), 810 E = BaseClassDecl->field_end(); I != E; ++I) { 811 const FieldDecl *Field = &*I; 812 813 if (!FieldHasTrivialDestructorBody(Context, Field)) 814 return false; 815 } 816 817 // Check non-virtual bases. 818 for (CXXRecordDecl::base_class_const_iterator I = 819 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end(); 820 I != E; ++I) { 821 if (I->isVirtual()) 822 continue; 823 824 const CXXRecordDecl *NonVirtualBase = 825 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 826 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 827 MostDerivedClassDecl)) 828 return false; 829 } 830 831 if (BaseClassDecl == MostDerivedClassDecl) { 832 // Check virtual bases. 833 for (CXXRecordDecl::base_class_const_iterator I = 834 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end(); 835 I != E; ++I) { 836 const CXXRecordDecl *VirtualBase = 837 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 838 if (!HasTrivialDestructorBody(Context, VirtualBase, 839 MostDerivedClassDecl)) 840 return false; 841 } 842 } 843 844 return true; 845 } 846 847 static bool 848 FieldHasTrivialDestructorBody(ASTContext &Context, 849 const FieldDecl *Field) 850 { 851 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 852 853 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 854 if (!RT) 855 return true; 856 857 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 858 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 859 } 860 861 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 862 /// any vtable pointers before calling this destructor. 863 static bool CanSkipVTablePointerInitialization(ASTContext &Context, 864 const CXXDestructorDecl *Dtor) { 865 if (!Dtor->hasTrivialBody()) 866 return false; 867 868 // Check the fields. 869 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 870 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 871 E = ClassDecl->field_end(); I != E; ++I) { 872 const FieldDecl *Field = &*I; 873 874 if (!FieldHasTrivialDestructorBody(Context, Field)) 875 return false; 876 } 877 878 return true; 879 } 880 881 /// EmitDestructorBody - Emits the body of the current destructor. 882 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 883 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 884 CXXDtorType DtorType = CurGD.getDtorType(); 885 886 // The call to operator delete in a deleting destructor happens 887 // outside of the function-try-block, which means it's always 888 // possible to delegate the destructor body to the complete 889 // destructor. Do so. 890 if (DtorType == Dtor_Deleting) { 891 EnterDtorCleanups(Dtor, Dtor_Deleting); 892 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 893 LoadCXXThis()); 894 PopCleanupBlock(); 895 return; 896 } 897 898 Stmt *Body = Dtor->getBody(); 899 900 // If the body is a function-try-block, enter the try before 901 // anything else. 902 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 903 if (isTryBody) 904 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 905 906 // Enter the epilogue cleanups. 907 RunCleanupsScope DtorEpilogue(*this); 908 909 // If this is the complete variant, just invoke the base variant; 910 // the epilogue will destruct the virtual bases. But we can't do 911 // this optimization if the body is a function-try-block, because 912 // we'd introduce *two* handler blocks. 913 switch (DtorType) { 914 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 915 916 case Dtor_Complete: 917 // Enter the cleanup scopes for virtual bases. 918 EnterDtorCleanups(Dtor, Dtor_Complete); 919 920 if (!isTryBody && CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) { 921 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 922 LoadCXXThis()); 923 break; 924 } 925 // Fallthrough: act like we're in the base variant. 926 927 case Dtor_Base: 928 // Enter the cleanup scopes for fields and non-virtual bases. 929 EnterDtorCleanups(Dtor, Dtor_Base); 930 931 // Initialize the vtable pointers before entering the body. 932 if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) 933 InitializeVTablePointers(Dtor->getParent()); 934 935 if (isTryBody) 936 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 937 else if (Body) 938 EmitStmt(Body); 939 else { 940 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 941 // nothing to do besides what's in the epilogue 942 } 943 // -fapple-kext must inline any call to this dtor into 944 // the caller's body. 945 if (getContext().getLangOpts().AppleKext) 946 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 947 break; 948 } 949 950 // Jump out through the epilogue cleanups. 951 DtorEpilogue.ForceCleanup(); 952 953 // Exit the try if applicable. 954 if (isTryBody) 955 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 956 } 957 958 namespace { 959 /// Call the operator delete associated with the current destructor. 960 struct CallDtorDelete : EHScopeStack::Cleanup { 961 CallDtorDelete() {} 962 963 void Emit(CodeGenFunction &CGF, Flags flags) { 964 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 965 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 966 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 967 CGF.getContext().getTagDeclType(ClassDecl)); 968 } 969 }; 970 971 class DestroyField : public EHScopeStack::Cleanup { 972 const FieldDecl *field; 973 CodeGenFunction::Destroyer *destroyer; 974 bool useEHCleanupForArray; 975 976 public: 977 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 978 bool useEHCleanupForArray) 979 : field(field), destroyer(destroyer), 980 useEHCleanupForArray(useEHCleanupForArray) {} 981 982 void Emit(CodeGenFunction &CGF, Flags flags) { 983 // Find the address of the field. 984 llvm::Value *thisValue = CGF.LoadCXXThis(); 985 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 986 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 987 LValue LV = CGF.EmitLValueForField(ThisLV, field); 988 assert(LV.isSimple()); 989 990 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 991 flags.isForNormalCleanup() && useEHCleanupForArray); 992 } 993 }; 994 } 995 996 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 997 /// destructor. This is to call destructors on members and base classes 998 /// in reverse order of their construction. 999 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1000 CXXDtorType DtorType) { 1001 assert(!DD->isTrivial() && 1002 "Should not emit dtor epilogue for trivial dtor!"); 1003 1004 // The deleting-destructor phase just needs to call the appropriate 1005 // operator delete that Sema picked up. 1006 if (DtorType == Dtor_Deleting) { 1007 assert(DD->getOperatorDelete() && 1008 "operator delete missing - EmitDtorEpilogue"); 1009 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1010 return; 1011 } 1012 1013 const CXXRecordDecl *ClassDecl = DD->getParent(); 1014 1015 // Unions have no bases and do not call field destructors. 1016 if (ClassDecl->isUnion()) 1017 return; 1018 1019 // The complete-destructor phase just destructs all the virtual bases. 1020 if (DtorType == Dtor_Complete) { 1021 1022 // We push them in the forward order so that they'll be popped in 1023 // the reverse order. 1024 for (CXXRecordDecl::base_class_const_iterator I = 1025 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 1026 I != E; ++I) { 1027 const CXXBaseSpecifier &Base = *I; 1028 CXXRecordDecl *BaseClassDecl 1029 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1030 1031 // Ignore trivial destructors. 1032 if (BaseClassDecl->hasTrivialDestructor()) 1033 continue; 1034 1035 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1036 BaseClassDecl, 1037 /*BaseIsVirtual*/ true); 1038 } 1039 1040 return; 1041 } 1042 1043 assert(DtorType == Dtor_Base); 1044 1045 // Destroy non-virtual bases. 1046 for (CXXRecordDecl::base_class_const_iterator I = 1047 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 1048 const CXXBaseSpecifier &Base = *I; 1049 1050 // Ignore virtual bases. 1051 if (Base.isVirtual()) 1052 continue; 1053 1054 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1055 1056 // Ignore trivial destructors. 1057 if (BaseClassDecl->hasTrivialDestructor()) 1058 continue; 1059 1060 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1061 BaseClassDecl, 1062 /*BaseIsVirtual*/ false); 1063 } 1064 1065 // Destroy direct fields. 1066 SmallVector<const FieldDecl *, 16> FieldDecls; 1067 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 1068 E = ClassDecl->field_end(); I != E; ++I) { 1069 const FieldDecl *field = &*I; 1070 QualType type = field->getType(); 1071 QualType::DestructionKind dtorKind = type.isDestructedType(); 1072 if (!dtorKind) continue; 1073 1074 // Anonymous union members do not have their destructors called. 1075 const RecordType *RT = type->getAsUnionType(); 1076 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1077 1078 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1079 EHStack.pushCleanup<DestroyField>(cleanupKind, field, 1080 getDestroyer(dtorKind), 1081 cleanupKind & EHCleanup); 1082 } 1083 } 1084 1085 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1086 /// constructor for each of several members of an array. 1087 /// 1088 /// \param ctor the constructor to call for each element 1089 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1090 /// constructor 1091 /// \param arrayType the type of the array to initialize 1092 /// \param arrayBegin an arrayType* 1093 /// \param zeroInitialize true if each element should be 1094 /// zero-initialized before it is constructed 1095 void 1096 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1097 const ConstantArrayType *arrayType, 1098 llvm::Value *arrayBegin, 1099 CallExpr::const_arg_iterator argBegin, 1100 CallExpr::const_arg_iterator argEnd, 1101 bool zeroInitialize) { 1102 QualType elementType; 1103 llvm::Value *numElements = 1104 emitArrayLength(arrayType, elementType, arrayBegin); 1105 1106 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, 1107 argBegin, argEnd, zeroInitialize); 1108 } 1109 1110 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1111 /// constructor for each of several members of an array. 1112 /// 1113 /// \param ctor the constructor to call for each element 1114 /// \param numElements the number of elements in the array; 1115 /// may be zero 1116 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1117 /// constructor 1118 /// \param arrayBegin a T*, where T is the type constructed by ctor 1119 /// \param zeroInitialize true if each element should be 1120 /// zero-initialized before it is constructed 1121 void 1122 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1123 llvm::Value *numElements, 1124 llvm::Value *arrayBegin, 1125 CallExpr::const_arg_iterator argBegin, 1126 CallExpr::const_arg_iterator argEnd, 1127 bool zeroInitialize) { 1128 1129 // It's legal for numElements to be zero. This can happen both 1130 // dynamically, because x can be zero in 'new A[x]', and statically, 1131 // because of GCC extensions that permit zero-length arrays. There 1132 // are probably legitimate places where we could assume that this 1133 // doesn't happen, but it's not clear that it's worth it. 1134 llvm::BranchInst *zeroCheckBranch = 0; 1135 1136 // Optimize for a constant count. 1137 llvm::ConstantInt *constantCount 1138 = dyn_cast<llvm::ConstantInt>(numElements); 1139 if (constantCount) { 1140 // Just skip out if the constant count is zero. 1141 if (constantCount->isZero()) return; 1142 1143 // Otherwise, emit the check. 1144 } else { 1145 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1146 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1147 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1148 EmitBlock(loopBB); 1149 } 1150 1151 // Find the end of the array. 1152 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1153 "arrayctor.end"); 1154 1155 // Enter the loop, setting up a phi for the current location to initialize. 1156 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1157 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1158 EmitBlock(loopBB); 1159 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1160 "arrayctor.cur"); 1161 cur->addIncoming(arrayBegin, entryBB); 1162 1163 // Inside the loop body, emit the constructor call on the array element. 1164 1165 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1166 1167 // Zero initialize the storage, if requested. 1168 if (zeroInitialize) 1169 EmitNullInitialization(cur, type); 1170 1171 // C++ [class.temporary]p4: 1172 // There are two contexts in which temporaries are destroyed at a different 1173 // point than the end of the full-expression. The first context is when a 1174 // default constructor is called to initialize an element of an array. 1175 // If the constructor has one or more default arguments, the destruction of 1176 // every temporary created in a default argument expression is sequenced 1177 // before the construction of the next array element, if any. 1178 1179 { 1180 RunCleanupsScope Scope(*this); 1181 1182 // Evaluate the constructor and its arguments in a regular 1183 // partial-destroy cleanup. 1184 if (getLangOpts().Exceptions && 1185 !ctor->getParent()->hasTrivialDestructor()) { 1186 Destroyer *destroyer = destroyCXXObject; 1187 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); 1188 } 1189 1190 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false, 1191 cur, argBegin, argEnd); 1192 } 1193 1194 // Go to the next element. 1195 llvm::Value *next = 1196 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 1197 "arrayctor.next"); 1198 cur->addIncoming(next, Builder.GetInsertBlock()); 1199 1200 // Check whether that's the end of the loop. 1201 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 1202 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 1203 Builder.CreateCondBr(done, contBB, loopBB); 1204 1205 // Patch the earlier check to skip over the loop. 1206 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 1207 1208 EmitBlock(contBB); 1209 } 1210 1211 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 1212 llvm::Value *addr, 1213 QualType type) { 1214 const RecordType *rtype = type->castAs<RecordType>(); 1215 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 1216 const CXXDestructorDecl *dtor = record->getDestructor(); 1217 assert(!dtor->isTrivial()); 1218 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 1219 addr); 1220 } 1221 1222 void 1223 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1224 CXXCtorType Type, bool ForVirtualBase, 1225 llvm::Value *This, 1226 CallExpr::const_arg_iterator ArgBeg, 1227 CallExpr::const_arg_iterator ArgEnd) { 1228 1229 CGDebugInfo *DI = getDebugInfo(); 1230 if (DI && 1231 CGM.getCodeGenOpts().DebugInfo == CodeGenOptions::LimitedDebugInfo) { 1232 // If debug info for this class has not been emitted then this is the 1233 // right time to do so. 1234 const CXXRecordDecl *Parent = D->getParent(); 1235 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent), 1236 Parent->getLocation()); 1237 } 1238 1239 if (D->isTrivial()) { 1240 if (ArgBeg == ArgEnd) { 1241 // Trivial default constructor, no codegen required. 1242 assert(D->isDefaultConstructor() && 1243 "trivial 0-arg ctor not a default ctor"); 1244 return; 1245 } 1246 1247 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1248 assert(D->isCopyOrMoveConstructor() && 1249 "trivial 1-arg ctor not a copy/move ctor"); 1250 1251 const Expr *E = (*ArgBeg); 1252 QualType Ty = E->getType(); 1253 llvm::Value *Src = EmitLValue(E).getAddress(); 1254 EmitAggregateCopy(This, Src, Ty); 1255 return; 1256 } 1257 1258 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1259 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1260 1261 EmitCXXMemberCall(D, Callee, ReturnValueSlot(), This, VTT, ArgBeg, ArgEnd); 1262 } 1263 1264 void 1265 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1266 llvm::Value *This, llvm::Value *Src, 1267 CallExpr::const_arg_iterator ArgBeg, 1268 CallExpr::const_arg_iterator ArgEnd) { 1269 if (D->isTrivial()) { 1270 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1271 assert(D->isCopyOrMoveConstructor() && 1272 "trivial 1-arg ctor not a copy/move ctor"); 1273 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1274 return; 1275 } 1276 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1277 clang::Ctor_Complete); 1278 assert(D->isInstance() && 1279 "Trying to emit a member call expr on a static method!"); 1280 1281 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1282 1283 CallArgList Args; 1284 1285 // Push the this ptr. 1286 Args.add(RValue::get(This), D->getThisType(getContext())); 1287 1288 1289 // Push the src ptr. 1290 QualType QT = *(FPT->arg_type_begin()); 1291 llvm::Type *t = CGM.getTypes().ConvertType(QT); 1292 Src = Builder.CreateBitCast(Src, t); 1293 Args.add(RValue::get(Src), QT); 1294 1295 // Skip over first argument (Src). 1296 ++ArgBeg; 1297 CallExpr::const_arg_iterator Arg = ArgBeg; 1298 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1299 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1300 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1301 EmitCallArg(Args, *Arg, *I); 1302 } 1303 // Either we've emitted all the call args, or we have a call to a 1304 // variadic function. 1305 assert((Arg == ArgEnd || FPT->isVariadic()) && 1306 "Extra arguments in non-variadic function!"); 1307 // If we still have any arguments, emit them using the type of the argument. 1308 for (; Arg != ArgEnd; ++Arg) { 1309 QualType ArgType = Arg->getType(); 1310 EmitCallArg(Args, *Arg, ArgType); 1311 } 1312 1313 EmitCall(CGM.getTypes().arrangeFunctionCall(Args, FPT), Callee, 1314 ReturnValueSlot(), Args, D); 1315 } 1316 1317 void 1318 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1319 CXXCtorType CtorType, 1320 const FunctionArgList &Args) { 1321 CallArgList DelegateArgs; 1322 1323 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1324 assert(I != E && "no parameters to constructor"); 1325 1326 // this 1327 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 1328 ++I; 1329 1330 // vtt 1331 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1332 /*ForVirtualBase=*/false)) { 1333 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1334 DelegateArgs.add(RValue::get(VTT), VoidPP); 1335 1336 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1337 assert(I != E && "cannot skip vtt parameter, already done with args"); 1338 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 1339 ++I; 1340 } 1341 } 1342 1343 // Explicit arguments. 1344 for (; I != E; ++I) { 1345 const VarDecl *param = *I; 1346 EmitDelegateCallArg(DelegateArgs, param); 1347 } 1348 1349 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType), 1350 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1351 ReturnValueSlot(), DelegateArgs, Ctor); 1352 } 1353 1354 namespace { 1355 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { 1356 const CXXDestructorDecl *Dtor; 1357 llvm::Value *Addr; 1358 CXXDtorType Type; 1359 1360 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, 1361 CXXDtorType Type) 1362 : Dtor(D), Addr(Addr), Type(Type) {} 1363 1364 void Emit(CodeGenFunction &CGF, Flags flags) { 1365 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 1366 Addr); 1367 } 1368 }; 1369 } 1370 1371 void 1372 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 1373 const FunctionArgList &Args) { 1374 assert(Ctor->isDelegatingConstructor()); 1375 1376 llvm::Value *ThisPtr = LoadCXXThis(); 1377 1378 QualType Ty = getContext().getTagDeclType(Ctor->getParent()); 1379 CharUnits Alignment = getContext().getTypeAlignInChars(Ty); 1380 AggValueSlot AggSlot = 1381 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), 1382 AggValueSlot::IsDestructed, 1383 AggValueSlot::DoesNotNeedGCBarriers, 1384 AggValueSlot::IsNotAliased); 1385 1386 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 1387 1388 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1389 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 1390 CXXDtorType Type = 1391 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 1392 1393 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 1394 ClassDecl->getDestructor(), 1395 ThisPtr, Type); 1396 } 1397 } 1398 1399 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1400 CXXDtorType Type, 1401 bool ForVirtualBase, 1402 llvm::Value *This) { 1403 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1404 ForVirtualBase); 1405 llvm::Value *Callee = 0; 1406 if (getContext().getLangOpts().AppleKext) 1407 Callee = BuildAppleKextVirtualDestructorCall(DD, Type, 1408 DD->getParent()); 1409 1410 if (!Callee) 1411 Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1412 1413 EmitCXXMemberCall(DD, Callee, ReturnValueSlot(), This, VTT, 0, 0); 1414 } 1415 1416 namespace { 1417 struct CallLocalDtor : EHScopeStack::Cleanup { 1418 const CXXDestructorDecl *Dtor; 1419 llvm::Value *Addr; 1420 1421 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1422 : Dtor(D), Addr(Addr) {} 1423 1424 void Emit(CodeGenFunction &CGF, Flags flags) { 1425 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1426 /*ForVirtualBase=*/false, Addr); 1427 } 1428 }; 1429 } 1430 1431 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1432 llvm::Value *Addr) { 1433 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1434 } 1435 1436 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1437 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1438 if (!ClassDecl) return; 1439 if (ClassDecl->hasTrivialDestructor()) return; 1440 1441 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1442 assert(D && D->isUsed() && "destructor not marked as used!"); 1443 PushDestructorCleanup(D, Addr); 1444 } 1445 1446 llvm::Value * 1447 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1448 const CXXRecordDecl *ClassDecl, 1449 const CXXRecordDecl *BaseClassDecl) { 1450 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1451 CharUnits VBaseOffsetOffset = 1452 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1453 1454 llvm::Value *VBaseOffsetPtr = 1455 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), 1456 "vbase.offset.ptr"); 1457 llvm::Type *PtrDiffTy = 1458 ConvertType(getContext().getPointerDiffType()); 1459 1460 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1461 PtrDiffTy->getPointerTo()); 1462 1463 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1464 1465 return VBaseOffset; 1466 } 1467 1468 void 1469 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1470 const CXXRecordDecl *NearestVBase, 1471 CharUnits OffsetFromNearestVBase, 1472 llvm::Constant *VTable, 1473 const CXXRecordDecl *VTableClass) { 1474 const CXXRecordDecl *RD = Base.getBase(); 1475 1476 // Compute the address point. 1477 llvm::Value *VTableAddressPoint; 1478 1479 // Check if we need to use a vtable from the VTT. 1480 if (CodeGenVTables::needsVTTParameter(CurGD) && 1481 (RD->getNumVBases() || NearestVBase)) { 1482 // Get the secondary vpointer index. 1483 uint64_t VirtualPointerIndex = 1484 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1485 1486 /// Load the VTT. 1487 llvm::Value *VTT = LoadCXXVTT(); 1488 if (VirtualPointerIndex) 1489 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1490 1491 // And load the address point from the VTT. 1492 VTableAddressPoint = Builder.CreateLoad(VTT); 1493 } else { 1494 uint64_t AddressPoint = 1495 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base); 1496 VTableAddressPoint = 1497 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1498 } 1499 1500 // Compute where to store the address point. 1501 llvm::Value *VirtualOffset = 0; 1502 CharUnits NonVirtualOffset = CharUnits::Zero(); 1503 1504 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1505 // We need to use the virtual base offset offset because the virtual base 1506 // might have a different offset in the most derived class. 1507 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1508 NearestVBase); 1509 NonVirtualOffset = OffsetFromNearestVBase; 1510 } else { 1511 // We can just use the base offset in the complete class. 1512 NonVirtualOffset = Base.getBaseOffset(); 1513 } 1514 1515 // Apply the offsets. 1516 llvm::Value *VTableField = LoadCXXThis(); 1517 1518 if (!NonVirtualOffset.isZero() || VirtualOffset) 1519 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1520 NonVirtualOffset, 1521 VirtualOffset); 1522 1523 // Finally, store the address point. 1524 llvm::Type *AddressPointPtrTy = 1525 VTableAddressPoint->getType()->getPointerTo(); 1526 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1527 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 1528 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr()); 1529 } 1530 1531 void 1532 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1533 const CXXRecordDecl *NearestVBase, 1534 CharUnits OffsetFromNearestVBase, 1535 bool BaseIsNonVirtualPrimaryBase, 1536 llvm::Constant *VTable, 1537 const CXXRecordDecl *VTableClass, 1538 VisitedVirtualBasesSetTy& VBases) { 1539 // If this base is a non-virtual primary base the address point has already 1540 // been set. 1541 if (!BaseIsNonVirtualPrimaryBase) { 1542 // Initialize the vtable pointer for this base. 1543 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1544 VTable, VTableClass); 1545 } 1546 1547 const CXXRecordDecl *RD = Base.getBase(); 1548 1549 // Traverse bases. 1550 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1551 E = RD->bases_end(); I != E; ++I) { 1552 CXXRecordDecl *BaseDecl 1553 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1554 1555 // Ignore classes without a vtable. 1556 if (!BaseDecl->isDynamicClass()) 1557 continue; 1558 1559 CharUnits BaseOffset; 1560 CharUnits BaseOffsetFromNearestVBase; 1561 bool BaseDeclIsNonVirtualPrimaryBase; 1562 1563 if (I->isVirtual()) { 1564 // Check if we've visited this virtual base before. 1565 if (!VBases.insert(BaseDecl)) 1566 continue; 1567 1568 const ASTRecordLayout &Layout = 1569 getContext().getASTRecordLayout(VTableClass); 1570 1571 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1572 BaseOffsetFromNearestVBase = CharUnits::Zero(); 1573 BaseDeclIsNonVirtualPrimaryBase = false; 1574 } else { 1575 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1576 1577 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1578 BaseOffsetFromNearestVBase = 1579 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1580 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1581 } 1582 1583 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1584 I->isVirtual() ? BaseDecl : NearestVBase, 1585 BaseOffsetFromNearestVBase, 1586 BaseDeclIsNonVirtualPrimaryBase, 1587 VTable, VTableClass, VBases); 1588 } 1589 } 1590 1591 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1592 // Ignore classes without a vtable. 1593 if (!RD->isDynamicClass()) 1594 return; 1595 1596 // Get the VTable. 1597 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1598 1599 // Initialize the vtable pointers for this class and all of its bases. 1600 VisitedVirtualBasesSetTy VBases; 1601 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), 1602 /*NearestVBase=*/0, 1603 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 1604 /*BaseIsNonVirtualPrimaryBase=*/false, 1605 VTable, RD, VBases); 1606 } 1607 1608 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1609 llvm::Type *Ty) { 1610 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1611 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 1612 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr()); 1613 return VTable; 1614 } 1615 1616 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) { 1617 const Expr *E = Base; 1618 1619 while (true) { 1620 E = E->IgnoreParens(); 1621 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1622 if (CE->getCastKind() == CK_DerivedToBase || 1623 CE->getCastKind() == CK_UncheckedDerivedToBase || 1624 CE->getCastKind() == CK_NoOp) { 1625 E = CE->getSubExpr(); 1626 continue; 1627 } 1628 } 1629 1630 break; 1631 } 1632 1633 QualType DerivedType = E->getType(); 1634 if (const PointerType *PTy = DerivedType->getAs<PointerType>()) 1635 DerivedType = PTy->getPointeeType(); 1636 1637 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl()); 1638 } 1639 1640 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 1641 // quite what we want. 1642 static const Expr *skipNoOpCastsAndParens(const Expr *E) { 1643 while (true) { 1644 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 1645 E = PE->getSubExpr(); 1646 continue; 1647 } 1648 1649 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1650 if (CE->getCastKind() == CK_NoOp) { 1651 E = CE->getSubExpr(); 1652 continue; 1653 } 1654 } 1655 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 1656 if (UO->getOpcode() == UO_Extension) { 1657 E = UO->getSubExpr(); 1658 continue; 1659 } 1660 } 1661 return E; 1662 } 1663 } 1664 1665 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member 1666 /// function call on the given expr can be devirtualized. 1667 static bool canDevirtualizeMemberFunctionCall(const Expr *Base, 1668 const CXXMethodDecl *MD) { 1669 // If the most derived class is marked final, we know that no subclass can 1670 // override this member function and so we can devirtualize it. For example: 1671 // 1672 // struct A { virtual void f(); } 1673 // struct B final : A { }; 1674 // 1675 // void f(B *b) { 1676 // b->f(); 1677 // } 1678 // 1679 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base); 1680 if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 1681 return true; 1682 1683 // If the member function is marked 'final', we know that it can't be 1684 // overridden and can therefore devirtualize it. 1685 if (MD->hasAttr<FinalAttr>()) 1686 return true; 1687 1688 // Similarly, if the class itself is marked 'final' it can't be overridden 1689 // and we can therefore devirtualize the member function call. 1690 if (MD->getParent()->hasAttr<FinalAttr>()) 1691 return true; 1692 1693 Base = skipNoOpCastsAndParens(Base); 1694 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 1695 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 1696 // This is a record decl. We know the type and can devirtualize it. 1697 return VD->getType()->isRecordType(); 1698 } 1699 1700 return false; 1701 } 1702 1703 // We can always devirtualize calls on temporary object expressions. 1704 if (isa<CXXConstructExpr>(Base)) 1705 return true; 1706 1707 // And calls on bound temporaries. 1708 if (isa<CXXBindTemporaryExpr>(Base)) 1709 return true; 1710 1711 // Check if this is a call expr that returns a record type. 1712 if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 1713 return CE->getCallReturnType()->isRecordType(); 1714 1715 // We can't devirtualize the call. 1716 return false; 1717 } 1718 1719 static bool UseVirtualCall(ASTContext &Context, 1720 const CXXOperatorCallExpr *CE, 1721 const CXXMethodDecl *MD) { 1722 if (!MD->isVirtual()) 1723 return false; 1724 1725 // When building with -fapple-kext, all calls must go through the vtable since 1726 // the kernel linker can do runtime patching of vtables. 1727 if (Context.getLangOpts().AppleKext) 1728 return true; 1729 1730 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD); 1731 } 1732 1733 llvm::Value * 1734 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E, 1735 const CXXMethodDecl *MD, 1736 llvm::Value *This) { 1737 llvm::FunctionType *fnType = 1738 CGM.getTypes().GetFunctionType( 1739 CGM.getTypes().arrangeCXXMethodDeclaration(MD)); 1740 1741 if (UseVirtualCall(getContext(), E, MD)) 1742 return BuildVirtualCall(MD, This, fnType); 1743 1744 return CGM.GetAddrOfFunction(MD, fnType); 1745 } 1746 1747 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *Lambda, 1748 CallArgList &CallArgs) { 1749 // Lookup the call operator 1750 DeclarationName Name 1751 = getContext().DeclarationNames.getCXXOperatorName(OO_Call); 1752 DeclContext::lookup_const_result Calls = Lambda->lookup(Name); 1753 CXXMethodDecl *CallOperator = cast<CXXMethodDecl>(*Calls.first++); 1754 const FunctionProtoType *FPT = 1755 CallOperator->getType()->getAs<FunctionProtoType>(); 1756 QualType ResultType = FPT->getResultType(); 1757 1758 // Get the address of the call operator. 1759 GlobalDecl GD(CallOperator); 1760 const CGFunctionInfo &CalleeFnInfo = 1761 CGM.getTypes().arrangeFunctionCall(ResultType, CallArgs, FPT->getExtInfo(), 1762 RequiredArgs::forPrototypePlus(FPT, 1)); 1763 llvm::Type *Ty = CGM.getTypes().GetFunctionType(CalleeFnInfo); 1764 llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty); 1765 1766 // Determine whether we have a return value slot to use. 1767 ReturnValueSlot Slot; 1768 if (!ResultType->isVoidType() && 1769 CurFnInfo->getReturnInfo().getKind() == ABIArgInfo::Indirect && 1770 hasAggregateLLVMType(CurFnInfo->getReturnType())) 1771 Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified()); 1772 1773 // Now emit our call. 1774 RValue RV = EmitCall(CalleeFnInfo, Callee, Slot, CallArgs, CallOperator); 1775 1776 // Forward the returned value 1777 if (!ResultType->isVoidType() && Slot.isNull()) 1778 EmitReturnOfRValue(RV, ResultType); 1779 } 1780 1781 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 1782 const BlockDecl *BD = BlockInfo->getBlockDecl(); 1783 const VarDecl *variable = BD->capture_begin()->getVariable(); 1784 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 1785 1786 // Start building arguments for forwarding call 1787 CallArgList CallArgs; 1788 1789 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1790 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false); 1791 CallArgs.add(RValue::get(ThisPtr), ThisType); 1792 1793 // Add the rest of the parameters. 1794 for (BlockDecl::param_const_iterator I = BD->param_begin(), 1795 E = BD->param_end(); I != E; ++I) { 1796 ParmVarDecl *param = *I; 1797 EmitDelegateCallArg(CallArgs, param); 1798 } 1799 1800 EmitForwardingCallToLambda(Lambda, CallArgs); 1801 } 1802 1803 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 1804 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) { 1805 // FIXME: Making this work correctly is nasty because it requires either 1806 // cloning the body of the call operator or making the call operator forward. 1807 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function"); 1808 return; 1809 } 1810 1811 EmitFunctionBody(Args); 1812 } 1813 1814 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 1815 const CXXRecordDecl *Lambda = MD->getParent(); 1816 1817 // Start building arguments for forwarding call 1818 CallArgList CallArgs; 1819 1820 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1821 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 1822 CallArgs.add(RValue::get(ThisPtr), ThisType); 1823 1824 // Add the rest of the parameters. 1825 for (FunctionDecl::param_const_iterator I = MD->param_begin(), 1826 E = MD->param_end(); I != E; ++I) { 1827 ParmVarDecl *param = *I; 1828 EmitDelegateCallArg(CallArgs, param); 1829 } 1830 1831 EmitForwardingCallToLambda(Lambda, CallArgs); 1832 } 1833 1834 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 1835 if (MD->isVariadic()) { 1836 // FIXME: Making this work correctly is nasty because it requires either 1837 // cloning the body of the call operator or making the call operator forward. 1838 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 1839 return; 1840 } 1841 1842 EmitLambdaDelegatingInvokeBody(MD); 1843 } 1844