1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGBlocks.h" 15 #include "CGDebugInfo.h" 16 #include "CodeGenFunction.h" 17 #include "clang/AST/CXXInheritance.h" 18 #include "clang/AST/EvaluatedExprVisitor.h" 19 #include "clang/AST/RecordLayout.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/Frontend/CodeGenOptions.h" 22 23 using namespace clang; 24 using namespace CodeGen; 25 26 static CharUnits 27 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 28 const CXXRecordDecl *DerivedClass, 29 CastExpr::path_const_iterator Start, 30 CastExpr::path_const_iterator End) { 31 CharUnits Offset = CharUnits::Zero(); 32 33 const CXXRecordDecl *RD = DerivedClass; 34 35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 36 const CXXBaseSpecifier *Base = *I; 37 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 38 39 // Get the layout. 40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 41 42 const CXXRecordDecl *BaseDecl = 43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 44 45 // Add the offset. 46 Offset += Layout.getBaseClassOffset(BaseDecl); 47 48 RD = BaseDecl; 49 } 50 51 return Offset; 52 } 53 54 llvm::Constant * 55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 56 CastExpr::path_const_iterator PathBegin, 57 CastExpr::path_const_iterator PathEnd) { 58 assert(PathBegin != PathEnd && "Base path should not be empty!"); 59 60 CharUnits Offset = 61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 62 PathBegin, PathEnd); 63 if (Offset.isZero()) 64 return 0; 65 66 llvm::Type *PtrDiffTy = 67 Types.ConvertType(getContext().getPointerDiffType()); 68 69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 70 } 71 72 /// Gets the address of a direct base class within a complete object. 73 /// This should only be used for (1) non-virtual bases or (2) virtual bases 74 /// when the type is known to be complete (e.g. in complete destructors). 75 /// 76 /// The object pointed to by 'This' is assumed to be non-null. 77 llvm::Value * 78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 79 const CXXRecordDecl *Derived, 80 const CXXRecordDecl *Base, 81 bool BaseIsVirtual) { 82 // 'this' must be a pointer (in some address space) to Derived. 83 assert(This->getType()->isPointerTy() && 84 cast<llvm::PointerType>(This->getType())->getElementType() 85 == ConvertType(Derived)); 86 87 // Compute the offset of the virtual base. 88 CharUnits Offset; 89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 90 if (BaseIsVirtual) 91 Offset = Layout.getVBaseClassOffset(Base); 92 else 93 Offset = Layout.getBaseClassOffset(Base); 94 95 // Shift and cast down to the base type. 96 // TODO: for complete types, this should be possible with a GEP. 97 llvm::Value *V = This; 98 if (Offset.isPositive()) { 99 V = Builder.CreateBitCast(V, Int8PtrTy); 100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); 101 } 102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 103 104 return V; 105 } 106 107 static llvm::Value * 108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, 109 CharUnits nonVirtualOffset, 110 llvm::Value *virtualOffset) { 111 // Assert that we have something to do. 112 assert(!nonVirtualOffset.isZero() || virtualOffset != 0); 113 114 // Compute the offset from the static and dynamic components. 115 llvm::Value *baseOffset; 116 if (!nonVirtualOffset.isZero()) { 117 baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy, 118 nonVirtualOffset.getQuantity()); 119 if (virtualOffset) { 120 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 121 } 122 } else { 123 baseOffset = virtualOffset; 124 } 125 126 // Apply the base offset. 127 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); 128 ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); 129 return ptr; 130 } 131 132 llvm::Value * 133 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 134 const CXXRecordDecl *Derived, 135 CastExpr::path_const_iterator PathBegin, 136 CastExpr::path_const_iterator PathEnd, 137 bool NullCheckValue) { 138 assert(PathBegin != PathEnd && "Base path should not be empty!"); 139 140 CastExpr::path_const_iterator Start = PathBegin; 141 const CXXRecordDecl *VBase = 0; 142 143 // Sema has done some convenient canonicalization here: if the 144 // access path involved any virtual steps, the conversion path will 145 // *start* with a step down to the correct virtual base subobject, 146 // and hence will not require any further steps. 147 if ((*Start)->isVirtual()) { 148 VBase = 149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 150 ++Start; 151 } 152 153 // Compute the static offset of the ultimate destination within its 154 // allocating subobject (the virtual base, if there is one, or else 155 // the "complete" object that we see). 156 CharUnits NonVirtualOffset = 157 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 158 Start, PathEnd); 159 160 // If there's a virtual step, we can sometimes "devirtualize" it. 161 // For now, that's limited to when the derived type is final. 162 // TODO: "devirtualize" this for accesses to known-complete objects. 163 if (VBase && Derived->hasAttr<FinalAttr>()) { 164 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 165 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 166 NonVirtualOffset += vBaseOffset; 167 VBase = 0; // we no longer have a virtual step 168 } 169 170 // Get the base pointer type. 171 llvm::Type *BasePtrTy = 172 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 173 174 // If the static offset is zero and we don't have a virtual step, 175 // just do a bitcast; null checks are unnecessary. 176 if (NonVirtualOffset.isZero() && !VBase) { 177 return Builder.CreateBitCast(Value, BasePtrTy); 178 } 179 180 llvm::BasicBlock *origBB = 0; 181 llvm::BasicBlock *endBB = 0; 182 183 // Skip over the offset (and the vtable load) if we're supposed to 184 // null-check the pointer. 185 if (NullCheckValue) { 186 origBB = Builder.GetInsertBlock(); 187 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 188 endBB = createBasicBlock("cast.end"); 189 190 llvm::Value *isNull = Builder.CreateIsNull(Value); 191 Builder.CreateCondBr(isNull, endBB, notNullBB); 192 EmitBlock(notNullBB); 193 } 194 195 // Compute the virtual offset. 196 llvm::Value *VirtualOffset = 0; 197 if (VBase) { 198 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 199 } 200 201 // Apply both offsets. 202 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, 203 NonVirtualOffset, 204 VirtualOffset); 205 206 // Cast to the destination type. 207 Value = Builder.CreateBitCast(Value, BasePtrTy); 208 209 // Build a phi if we needed a null check. 210 if (NullCheckValue) { 211 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 212 Builder.CreateBr(endBB); 213 EmitBlock(endBB); 214 215 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); 216 PHI->addIncoming(Value, notNullBB); 217 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); 218 Value = PHI; 219 } 220 221 return Value; 222 } 223 224 llvm::Value * 225 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 226 const CXXRecordDecl *Derived, 227 CastExpr::path_const_iterator PathBegin, 228 CastExpr::path_const_iterator PathEnd, 229 bool NullCheckValue) { 230 assert(PathBegin != PathEnd && "Base path should not be empty!"); 231 232 QualType DerivedTy = 233 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 234 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 235 236 llvm::Value *NonVirtualOffset = 237 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 238 239 if (!NonVirtualOffset) { 240 // No offset, we can just cast back. 241 return Builder.CreateBitCast(Value, DerivedPtrTy); 242 } 243 244 llvm::BasicBlock *CastNull = 0; 245 llvm::BasicBlock *CastNotNull = 0; 246 llvm::BasicBlock *CastEnd = 0; 247 248 if (NullCheckValue) { 249 CastNull = createBasicBlock("cast.null"); 250 CastNotNull = createBasicBlock("cast.notnull"); 251 CastEnd = createBasicBlock("cast.end"); 252 253 llvm::Value *IsNull = Builder.CreateIsNull(Value); 254 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 255 EmitBlock(CastNotNull); 256 } 257 258 // Apply the offset. 259 Value = Builder.CreateBitCast(Value, Int8PtrTy); 260 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), 261 "sub.ptr"); 262 263 // Just cast. 264 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 265 266 if (NullCheckValue) { 267 Builder.CreateBr(CastEnd); 268 EmitBlock(CastNull); 269 Builder.CreateBr(CastEnd); 270 EmitBlock(CastEnd); 271 272 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 273 PHI->addIncoming(Value, CastNotNull); 274 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 275 CastNull); 276 Value = PHI; 277 } 278 279 return Value; 280 } 281 282 /// GetVTTParameter - Return the VTT parameter that should be passed to a 283 /// base constructor/destructor with virtual bases. 284 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 285 bool ForVirtualBase) { 286 if (!CodeGenVTables::needsVTTParameter(GD)) { 287 // This constructor/destructor does not need a VTT parameter. 288 return 0; 289 } 290 291 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 292 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 293 294 llvm::Value *VTT; 295 296 uint64_t SubVTTIndex; 297 298 // If the record matches the base, this is the complete ctor/dtor 299 // variant calling the base variant in a class with virtual bases. 300 if (RD == Base) { 301 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 302 "doing no-op VTT offset in base dtor/ctor?"); 303 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 304 SubVTTIndex = 0; 305 } else { 306 const ASTRecordLayout &Layout = 307 CGF.getContext().getASTRecordLayout(RD); 308 CharUnits BaseOffset = ForVirtualBase ? 309 Layout.getVBaseClassOffset(Base) : 310 Layout.getBaseClassOffset(Base); 311 312 SubVTTIndex = 313 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 314 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 315 } 316 317 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 318 // A VTT parameter was passed to the constructor, use it. 319 VTT = CGF.LoadCXXVTT(); 320 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 321 } else { 322 // We're the complete constructor, so get the VTT by name. 323 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD); 324 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 325 } 326 327 return VTT; 328 } 329 330 namespace { 331 /// Call the destructor for a direct base class. 332 struct CallBaseDtor : EHScopeStack::Cleanup { 333 const CXXRecordDecl *BaseClass; 334 bool BaseIsVirtual; 335 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 336 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 337 338 void Emit(CodeGenFunction &CGF, Flags flags) { 339 const CXXRecordDecl *DerivedClass = 340 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 341 342 const CXXDestructorDecl *D = BaseClass->getDestructor(); 343 llvm::Value *Addr = 344 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 345 DerivedClass, BaseClass, 346 BaseIsVirtual); 347 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr); 348 } 349 }; 350 351 /// A visitor which checks whether an initializer uses 'this' in a 352 /// way which requires the vtable to be properly set. 353 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 354 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 355 356 bool UsesThis; 357 358 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 359 360 // Black-list all explicit and implicit references to 'this'. 361 // 362 // Do we need to worry about external references to 'this' derived 363 // from arbitrary code? If so, then anything which runs arbitrary 364 // external code might potentially access the vtable. 365 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 366 }; 367 } 368 369 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 370 DynamicThisUseChecker Checker(C); 371 Checker.Visit(const_cast<Expr*>(Init)); 372 return Checker.UsesThis; 373 } 374 375 static void EmitBaseInitializer(CodeGenFunction &CGF, 376 const CXXRecordDecl *ClassDecl, 377 CXXCtorInitializer *BaseInit, 378 CXXCtorType CtorType) { 379 assert(BaseInit->isBaseInitializer() && 380 "Must have base initializer!"); 381 382 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 383 384 const Type *BaseType = BaseInit->getBaseClass(); 385 CXXRecordDecl *BaseClassDecl = 386 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 387 388 bool isBaseVirtual = BaseInit->isBaseVirtual(); 389 390 // The base constructor doesn't construct virtual bases. 391 if (CtorType == Ctor_Base && isBaseVirtual) 392 return; 393 394 // If the initializer for the base (other than the constructor 395 // itself) accesses 'this' in any way, we need to initialize the 396 // vtables. 397 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 398 CGF.InitializeVTablePointers(ClassDecl); 399 400 // We can pretend to be a complete class because it only matters for 401 // virtual bases, and we only do virtual bases for complete ctors. 402 llvm::Value *V = 403 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 404 BaseClassDecl, 405 isBaseVirtual); 406 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); 407 AggValueSlot AggSlot = 408 AggValueSlot::forAddr(V, Alignment, Qualifiers(), 409 AggValueSlot::IsDestructed, 410 AggValueSlot::DoesNotNeedGCBarriers, 411 AggValueSlot::IsNotAliased); 412 413 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 414 415 if (CGF.CGM.getLangOpts().Exceptions && 416 !BaseClassDecl->hasTrivialDestructor()) 417 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 418 isBaseVirtual); 419 } 420 421 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 422 LValue LHS, 423 Expr *Init, 424 llvm::Value *ArrayIndexVar, 425 QualType T, 426 ArrayRef<VarDecl *> ArrayIndexes, 427 unsigned Index) { 428 if (Index == ArrayIndexes.size()) { 429 LValue LV = LHS; 430 { // Scope for Cleanups. 431 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 432 433 if (ArrayIndexVar) { 434 // If we have an array index variable, load it and use it as an offset. 435 // Then, increment the value. 436 llvm::Value *Dest = LHS.getAddress(); 437 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 438 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 439 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 440 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 441 CGF.Builder.CreateStore(Next, ArrayIndexVar); 442 443 // Update the LValue. 444 LV.setAddress(Dest); 445 CharUnits Align = CGF.getContext().getTypeAlignInChars(T); 446 LV.setAlignment(std::min(Align, LV.getAlignment())); 447 } 448 449 if (!CGF.hasAggregateLLVMType(T)) { 450 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); 451 } else if (T->isAnyComplexType()) { 452 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(), 453 LV.isVolatileQualified()); 454 } else { 455 AggValueSlot Slot = 456 AggValueSlot::forLValue(LV, 457 AggValueSlot::IsDestructed, 458 AggValueSlot::DoesNotNeedGCBarriers, 459 AggValueSlot::IsNotAliased); 460 461 CGF.EmitAggExpr(Init, Slot); 462 } 463 } 464 465 // Now, outside of the initializer cleanup scope, destroy the backing array 466 // for a std::initializer_list member. 467 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init); 468 469 return; 470 } 471 472 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 473 assert(Array && "Array initialization without the array type?"); 474 llvm::Value *IndexVar 475 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 476 assert(IndexVar && "Array index variable not loaded"); 477 478 // Initialize this index variable to zero. 479 llvm::Value* Zero 480 = llvm::Constant::getNullValue( 481 CGF.ConvertType(CGF.getContext().getSizeType())); 482 CGF.Builder.CreateStore(Zero, IndexVar); 483 484 // Start the loop with a block that tests the condition. 485 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 486 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 487 488 CGF.EmitBlock(CondBlock); 489 490 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 491 // Generate: if (loop-index < number-of-elements) fall to the loop body, 492 // otherwise, go to the block after the for-loop. 493 uint64_t NumElements = Array->getSize().getZExtValue(); 494 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 495 llvm::Value *NumElementsPtr = 496 llvm::ConstantInt::get(Counter->getType(), NumElements); 497 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 498 "isless"); 499 500 // If the condition is true, execute the body. 501 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 502 503 CGF.EmitBlock(ForBody); 504 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 505 506 { 507 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 508 509 // Inside the loop body recurse to emit the inner loop or, eventually, the 510 // constructor call. 511 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 512 Array->getElementType(), ArrayIndexes, Index + 1); 513 } 514 515 CGF.EmitBlock(ContinueBlock); 516 517 // Emit the increment of the loop counter. 518 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 519 Counter = CGF.Builder.CreateLoad(IndexVar); 520 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 521 CGF.Builder.CreateStore(NextVal, IndexVar); 522 523 // Finally, branch back up to the condition for the next iteration. 524 CGF.EmitBranch(CondBlock); 525 526 // Emit the fall-through block. 527 CGF.EmitBlock(AfterFor, true); 528 } 529 530 namespace { 531 struct CallMemberDtor : EHScopeStack::Cleanup { 532 llvm::Value *V; 533 CXXDestructorDecl *Dtor; 534 535 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor) 536 : V(V), Dtor(Dtor) {} 537 538 void Emit(CodeGenFunction &CGF, Flags flags) { 539 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 540 V); 541 } 542 }; 543 } 544 545 static void EmitMemberInitializer(CodeGenFunction &CGF, 546 const CXXRecordDecl *ClassDecl, 547 CXXCtorInitializer *MemberInit, 548 const CXXConstructorDecl *Constructor, 549 FunctionArgList &Args) { 550 assert(MemberInit->isAnyMemberInitializer() && 551 "Must have member initializer!"); 552 assert(MemberInit->getInit() && "Must have initializer!"); 553 554 // non-static data member initializers. 555 FieldDecl *Field = MemberInit->getAnyMember(); 556 QualType FieldType = Field->getType(); 557 558 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 559 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 560 LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 561 562 if (MemberInit->isIndirectMemberInitializer()) { 563 // If we are initializing an anonymous union field, drill down to 564 // the field. 565 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 566 IndirectFieldDecl::chain_iterator I = IndirectField->chain_begin(), 567 IEnd = IndirectField->chain_end(); 568 for ( ; I != IEnd; ++I) 569 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(*I)); 570 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 571 } else { 572 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 573 } 574 575 // Special case: if we are in a copy or move constructor, and we are copying 576 // an array of PODs or classes with trivial copy constructors, ignore the 577 // AST and perform the copy we know is equivalent. 578 // FIXME: This is hacky at best... if we had a bit more explicit information 579 // in the AST, we could generalize it more easily. 580 const ConstantArrayType *Array 581 = CGF.getContext().getAsConstantArrayType(FieldType); 582 if (Array && Constructor->isImplicitlyDefined() && 583 Constructor->isCopyOrMoveConstructor()) { 584 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 585 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 586 if (BaseElementTy.isPODType(CGF.getContext()) || 587 (CE && CE->getConstructor()->isTrivial())) { 588 // Find the source pointer. We know it's the last argument because 589 // we know we're in an implicit copy constructor. 590 unsigned SrcArgIndex = Args.size() - 1; 591 llvm::Value *SrcPtr 592 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 593 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 594 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 595 596 // Copy the aggregate. 597 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 598 LHS.isVolatileQualified()); 599 return; 600 } 601 } 602 603 ArrayRef<VarDecl *> ArrayIndexes; 604 if (MemberInit->getNumArrayIndices()) 605 ArrayIndexes = MemberInit->getArrayIndexes(); 606 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 607 } 608 609 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, 610 LValue LHS, Expr *Init, 611 ArrayRef<VarDecl *> ArrayIndexes) { 612 QualType FieldType = Field->getType(); 613 if (!hasAggregateLLVMType(FieldType)) { 614 if (LHS.isSimple()) { 615 EmitExprAsInit(Init, Field, LHS, false); 616 } else { 617 RValue RHS = RValue::get(EmitScalarExpr(Init)); 618 EmitStoreThroughLValue(RHS, LHS); 619 } 620 } else if (FieldType->isAnyComplexType()) { 621 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified()); 622 } else { 623 llvm::Value *ArrayIndexVar = 0; 624 if (ArrayIndexes.size()) { 625 llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 626 627 // The LHS is a pointer to the first object we'll be constructing, as 628 // a flat array. 629 QualType BaseElementTy = getContext().getBaseElementType(FieldType); 630 llvm::Type *BasePtr = ConvertType(BaseElementTy); 631 BasePtr = llvm::PointerType::getUnqual(BasePtr); 632 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), 633 BasePtr); 634 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 635 636 // Create an array index that will be used to walk over all of the 637 // objects we're constructing. 638 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); 639 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 640 Builder.CreateStore(Zero, ArrayIndexVar); 641 642 643 // Emit the block variables for the array indices, if any. 644 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 645 EmitAutoVarDecl(*ArrayIndexes[I]); 646 } 647 648 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 649 ArrayIndexes, 0); 650 651 if (!CGM.getLangOpts().Exceptions) 652 return; 653 654 // FIXME: If we have an array of classes w/ non-trivial destructors, 655 // we need to destroy in reverse order of construction along the exception 656 // path. 657 const RecordType *RT = FieldType->getAs<RecordType>(); 658 if (!RT) 659 return; 660 661 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 662 if (!RD->hasTrivialDestructor()) 663 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(), 664 RD->getDestructor()); 665 } 666 } 667 668 /// Checks whether the given constructor is a valid subject for the 669 /// complete-to-base constructor delegation optimization, i.e. 670 /// emitting the complete constructor as a simple call to the base 671 /// constructor. 672 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 673 674 // Currently we disable the optimization for classes with virtual 675 // bases because (1) the addresses of parameter variables need to be 676 // consistent across all initializers but (2) the delegate function 677 // call necessarily creates a second copy of the parameter variable. 678 // 679 // The limiting example (purely theoretical AFAIK): 680 // struct A { A(int &c) { c++; } }; 681 // struct B : virtual A { 682 // B(int count) : A(count) { printf("%d\n", count); } 683 // }; 684 // ...although even this example could in principle be emitted as a 685 // delegation since the address of the parameter doesn't escape. 686 if (Ctor->getParent()->getNumVBases()) { 687 // TODO: white-list trivial vbase initializers. This case wouldn't 688 // be subject to the restrictions below. 689 690 // TODO: white-list cases where: 691 // - there are no non-reference parameters to the constructor 692 // - the initializers don't access any non-reference parameters 693 // - the initializers don't take the address of non-reference 694 // parameters 695 // - etc. 696 // If we ever add any of the above cases, remember that: 697 // - function-try-blocks will always blacklist this optimization 698 // - we need to perform the constructor prologue and cleanup in 699 // EmitConstructorBody. 700 701 return false; 702 } 703 704 // We also disable the optimization for variadic functions because 705 // it's impossible to "re-pass" varargs. 706 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 707 return false; 708 709 // FIXME: Decide if we can do a delegation of a delegating constructor. 710 if (Ctor->isDelegatingConstructor()) 711 return false; 712 713 return true; 714 } 715 716 /// EmitConstructorBody - Emits the body of the current constructor. 717 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 718 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 719 CXXCtorType CtorType = CurGD.getCtorType(); 720 721 // Before we go any further, try the complete->base constructor 722 // delegation optimization. 723 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 724 CGM.getContext().getTargetInfo().getCXXABI().hasConstructorVariants()) { 725 if (CGDebugInfo *DI = getDebugInfo()) 726 DI->EmitLocation(Builder, Ctor->getLocEnd()); 727 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 728 return; 729 } 730 731 Stmt *Body = Ctor->getBody(); 732 733 // Enter the function-try-block before the constructor prologue if 734 // applicable. 735 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 736 if (IsTryBody) 737 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 738 739 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 740 741 // TODO: in restricted cases, we can emit the vbase initializers of 742 // a complete ctor and then delegate to the base ctor. 743 744 // Emit the constructor prologue, i.e. the base and member 745 // initializers. 746 EmitCtorPrologue(Ctor, CtorType, Args); 747 748 // Emit the body of the statement. 749 if (IsTryBody) 750 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 751 else if (Body) 752 EmitStmt(Body); 753 754 // Emit any cleanup blocks associated with the member or base 755 // initializers, which includes (along the exceptional path) the 756 // destructors for those members and bases that were fully 757 // constructed. 758 PopCleanupBlocks(CleanupDepth); 759 760 if (IsTryBody) 761 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 762 } 763 764 /// EmitCtorPrologue - This routine generates necessary code to initialize 765 /// base classes and non-static data members belonging to this constructor. 766 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 767 CXXCtorType CtorType, 768 FunctionArgList &Args) { 769 if (CD->isDelegatingConstructor()) 770 return EmitDelegatingCXXConstructorCall(CD, Args); 771 772 const CXXRecordDecl *ClassDecl = CD->getParent(); 773 774 SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 775 776 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 777 E = CD->init_end(); 778 B != E; ++B) { 779 CXXCtorInitializer *Member = (*B); 780 781 if (Member->isBaseInitializer()) { 782 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 783 } else { 784 assert(Member->isAnyMemberInitializer() && 785 "Delegating initializer on non-delegating constructor"); 786 MemberInitializers.push_back(Member); 787 } 788 } 789 790 InitializeVTablePointers(ClassDecl); 791 792 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 793 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 794 } 795 796 static bool 797 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 798 799 static bool 800 HasTrivialDestructorBody(ASTContext &Context, 801 const CXXRecordDecl *BaseClassDecl, 802 const CXXRecordDecl *MostDerivedClassDecl) 803 { 804 // If the destructor is trivial we don't have to check anything else. 805 if (BaseClassDecl->hasTrivialDestructor()) 806 return true; 807 808 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 809 return false; 810 811 // Check fields. 812 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(), 813 E = BaseClassDecl->field_end(); I != E; ++I) { 814 const FieldDecl *Field = *I; 815 816 if (!FieldHasTrivialDestructorBody(Context, Field)) 817 return false; 818 } 819 820 // Check non-virtual bases. 821 for (CXXRecordDecl::base_class_const_iterator I = 822 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end(); 823 I != E; ++I) { 824 if (I->isVirtual()) 825 continue; 826 827 const CXXRecordDecl *NonVirtualBase = 828 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 829 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 830 MostDerivedClassDecl)) 831 return false; 832 } 833 834 if (BaseClassDecl == MostDerivedClassDecl) { 835 // Check virtual bases. 836 for (CXXRecordDecl::base_class_const_iterator I = 837 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end(); 838 I != E; ++I) { 839 const CXXRecordDecl *VirtualBase = 840 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 841 if (!HasTrivialDestructorBody(Context, VirtualBase, 842 MostDerivedClassDecl)) 843 return false; 844 } 845 } 846 847 return true; 848 } 849 850 static bool 851 FieldHasTrivialDestructorBody(ASTContext &Context, 852 const FieldDecl *Field) 853 { 854 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 855 856 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 857 if (!RT) 858 return true; 859 860 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 861 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 862 } 863 864 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 865 /// any vtable pointers before calling this destructor. 866 static bool CanSkipVTablePointerInitialization(ASTContext &Context, 867 const CXXDestructorDecl *Dtor) { 868 if (!Dtor->hasTrivialBody()) 869 return false; 870 871 // Check the fields. 872 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 873 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 874 E = ClassDecl->field_end(); I != E; ++I) { 875 const FieldDecl *Field = *I; 876 877 if (!FieldHasTrivialDestructorBody(Context, Field)) 878 return false; 879 } 880 881 return true; 882 } 883 884 /// EmitDestructorBody - Emits the body of the current destructor. 885 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 886 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 887 CXXDtorType DtorType = CurGD.getDtorType(); 888 889 // The call to operator delete in a deleting destructor happens 890 // outside of the function-try-block, which means it's always 891 // possible to delegate the destructor body to the complete 892 // destructor. Do so. 893 if (DtorType == Dtor_Deleting) { 894 EnterDtorCleanups(Dtor, Dtor_Deleting); 895 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 896 LoadCXXThis()); 897 PopCleanupBlock(); 898 return; 899 } 900 901 Stmt *Body = Dtor->getBody(); 902 903 // If the body is a function-try-block, enter the try before 904 // anything else. 905 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 906 if (isTryBody) 907 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 908 909 // Enter the epilogue cleanups. 910 RunCleanupsScope DtorEpilogue(*this); 911 912 // If this is the complete variant, just invoke the base variant; 913 // the epilogue will destruct the virtual bases. But we can't do 914 // this optimization if the body is a function-try-block, because 915 // we'd introduce *two* handler blocks. 916 switch (DtorType) { 917 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 918 919 case Dtor_Complete: 920 // Enter the cleanup scopes for virtual bases. 921 EnterDtorCleanups(Dtor, Dtor_Complete); 922 923 if (!isTryBody && 924 CGM.getContext().getTargetInfo().getCXXABI().hasDestructorVariants()) { 925 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 926 LoadCXXThis()); 927 break; 928 } 929 // Fallthrough: act like we're in the base variant. 930 931 case Dtor_Base: 932 // Enter the cleanup scopes for fields and non-virtual bases. 933 EnterDtorCleanups(Dtor, Dtor_Base); 934 935 // Initialize the vtable pointers before entering the body. 936 if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) 937 InitializeVTablePointers(Dtor->getParent()); 938 939 if (isTryBody) 940 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 941 else if (Body) 942 EmitStmt(Body); 943 else { 944 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 945 // nothing to do besides what's in the epilogue 946 } 947 // -fapple-kext must inline any call to this dtor into 948 // the caller's body. 949 if (getLangOpts().AppleKext) 950 CurFn->addFnAttr(llvm::Attribute::AlwaysInline); 951 break; 952 } 953 954 // Jump out through the epilogue cleanups. 955 DtorEpilogue.ForceCleanup(); 956 957 // Exit the try if applicable. 958 if (isTryBody) 959 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 960 } 961 962 namespace { 963 /// Call the operator delete associated with the current destructor. 964 struct CallDtorDelete : EHScopeStack::Cleanup { 965 CallDtorDelete() {} 966 967 void Emit(CodeGenFunction &CGF, Flags flags) { 968 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 969 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 970 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 971 CGF.getContext().getTagDeclType(ClassDecl)); 972 } 973 }; 974 975 class DestroyField : public EHScopeStack::Cleanup { 976 const FieldDecl *field; 977 CodeGenFunction::Destroyer *destroyer; 978 bool useEHCleanupForArray; 979 980 public: 981 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 982 bool useEHCleanupForArray) 983 : field(field), destroyer(destroyer), 984 useEHCleanupForArray(useEHCleanupForArray) {} 985 986 void Emit(CodeGenFunction &CGF, Flags flags) { 987 // Find the address of the field. 988 llvm::Value *thisValue = CGF.LoadCXXThis(); 989 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 990 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 991 LValue LV = CGF.EmitLValueForField(ThisLV, field); 992 assert(LV.isSimple()); 993 994 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 995 flags.isForNormalCleanup() && useEHCleanupForArray); 996 } 997 }; 998 } 999 1000 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 1001 /// destructor. This is to call destructors on members and base classes 1002 /// in reverse order of their construction. 1003 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1004 CXXDtorType DtorType) { 1005 assert(!DD->isTrivial() && 1006 "Should not emit dtor epilogue for trivial dtor!"); 1007 1008 // The deleting-destructor phase just needs to call the appropriate 1009 // operator delete that Sema picked up. 1010 if (DtorType == Dtor_Deleting) { 1011 assert(DD->getOperatorDelete() && 1012 "operator delete missing - EmitDtorEpilogue"); 1013 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1014 return; 1015 } 1016 1017 const CXXRecordDecl *ClassDecl = DD->getParent(); 1018 1019 // Unions have no bases and do not call field destructors. 1020 if (ClassDecl->isUnion()) 1021 return; 1022 1023 // The complete-destructor phase just destructs all the virtual bases. 1024 if (DtorType == Dtor_Complete) { 1025 1026 // We push them in the forward order so that they'll be popped in 1027 // the reverse order. 1028 for (CXXRecordDecl::base_class_const_iterator I = 1029 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 1030 I != E; ++I) { 1031 const CXXBaseSpecifier &Base = *I; 1032 CXXRecordDecl *BaseClassDecl 1033 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1034 1035 // Ignore trivial destructors. 1036 if (BaseClassDecl->hasTrivialDestructor()) 1037 continue; 1038 1039 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1040 BaseClassDecl, 1041 /*BaseIsVirtual*/ true); 1042 } 1043 1044 return; 1045 } 1046 1047 assert(DtorType == Dtor_Base); 1048 1049 // Destroy non-virtual bases. 1050 for (CXXRecordDecl::base_class_const_iterator I = 1051 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 1052 const CXXBaseSpecifier &Base = *I; 1053 1054 // Ignore virtual bases. 1055 if (Base.isVirtual()) 1056 continue; 1057 1058 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1059 1060 // Ignore trivial destructors. 1061 if (BaseClassDecl->hasTrivialDestructor()) 1062 continue; 1063 1064 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1065 BaseClassDecl, 1066 /*BaseIsVirtual*/ false); 1067 } 1068 1069 // Destroy direct fields. 1070 SmallVector<const FieldDecl *, 16> FieldDecls; 1071 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 1072 E = ClassDecl->field_end(); I != E; ++I) { 1073 const FieldDecl *field = *I; 1074 QualType type = field->getType(); 1075 QualType::DestructionKind dtorKind = type.isDestructedType(); 1076 if (!dtorKind) continue; 1077 1078 // Anonymous union members do not have their destructors called. 1079 const RecordType *RT = type->getAsUnionType(); 1080 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1081 1082 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1083 EHStack.pushCleanup<DestroyField>(cleanupKind, field, 1084 getDestroyer(dtorKind), 1085 cleanupKind & EHCleanup); 1086 } 1087 } 1088 1089 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1090 /// constructor for each of several members of an array. 1091 /// 1092 /// \param ctor the constructor to call for each element 1093 /// \param arrayType the type of the array to initialize 1094 /// \param arrayBegin an arrayType* 1095 /// \param zeroInitialize true if each element should be 1096 /// zero-initialized before it is constructed 1097 void 1098 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1099 const ConstantArrayType *arrayType, 1100 llvm::Value *arrayBegin, 1101 CallExpr::const_arg_iterator argBegin, 1102 CallExpr::const_arg_iterator argEnd, 1103 bool zeroInitialize) { 1104 QualType elementType; 1105 llvm::Value *numElements = 1106 emitArrayLength(arrayType, elementType, arrayBegin); 1107 1108 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, 1109 argBegin, argEnd, zeroInitialize); 1110 } 1111 1112 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1113 /// constructor for each of several members of an array. 1114 /// 1115 /// \param ctor the constructor to call for each element 1116 /// \param numElements the number of elements in the array; 1117 /// may be zero 1118 /// \param arrayBegin a T*, where T is the type constructed by ctor 1119 /// \param zeroInitialize true if each element should be 1120 /// zero-initialized before it is constructed 1121 void 1122 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1123 llvm::Value *numElements, 1124 llvm::Value *arrayBegin, 1125 CallExpr::const_arg_iterator argBegin, 1126 CallExpr::const_arg_iterator argEnd, 1127 bool zeroInitialize) { 1128 1129 // It's legal for numElements to be zero. This can happen both 1130 // dynamically, because x can be zero in 'new A[x]', and statically, 1131 // because of GCC extensions that permit zero-length arrays. There 1132 // are probably legitimate places where we could assume that this 1133 // doesn't happen, but it's not clear that it's worth it. 1134 llvm::BranchInst *zeroCheckBranch = 0; 1135 1136 // Optimize for a constant count. 1137 llvm::ConstantInt *constantCount 1138 = dyn_cast<llvm::ConstantInt>(numElements); 1139 if (constantCount) { 1140 // Just skip out if the constant count is zero. 1141 if (constantCount->isZero()) return; 1142 1143 // Otherwise, emit the check. 1144 } else { 1145 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1146 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1147 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1148 EmitBlock(loopBB); 1149 } 1150 1151 // Find the end of the array. 1152 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1153 "arrayctor.end"); 1154 1155 // Enter the loop, setting up a phi for the current location to initialize. 1156 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1157 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1158 EmitBlock(loopBB); 1159 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1160 "arrayctor.cur"); 1161 cur->addIncoming(arrayBegin, entryBB); 1162 1163 // Inside the loop body, emit the constructor call on the array element. 1164 1165 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1166 1167 // Zero initialize the storage, if requested. 1168 if (zeroInitialize) 1169 EmitNullInitialization(cur, type); 1170 1171 // C++ [class.temporary]p4: 1172 // There are two contexts in which temporaries are destroyed at a different 1173 // point than the end of the full-expression. The first context is when a 1174 // default constructor is called to initialize an element of an array. 1175 // If the constructor has one or more default arguments, the destruction of 1176 // every temporary created in a default argument expression is sequenced 1177 // before the construction of the next array element, if any. 1178 1179 { 1180 RunCleanupsScope Scope(*this); 1181 1182 // Evaluate the constructor and its arguments in a regular 1183 // partial-destroy cleanup. 1184 if (getLangOpts().Exceptions && 1185 !ctor->getParent()->hasTrivialDestructor()) { 1186 Destroyer *destroyer = destroyCXXObject; 1187 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); 1188 } 1189 1190 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false, 1191 cur, argBegin, argEnd); 1192 } 1193 1194 // Go to the next element. 1195 llvm::Value *next = 1196 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 1197 "arrayctor.next"); 1198 cur->addIncoming(next, Builder.GetInsertBlock()); 1199 1200 // Check whether that's the end of the loop. 1201 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 1202 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 1203 Builder.CreateCondBr(done, contBB, loopBB); 1204 1205 // Patch the earlier check to skip over the loop. 1206 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 1207 1208 EmitBlock(contBB); 1209 } 1210 1211 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 1212 llvm::Value *addr, 1213 QualType type) { 1214 const RecordType *rtype = type->castAs<RecordType>(); 1215 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 1216 const CXXDestructorDecl *dtor = record->getDestructor(); 1217 assert(!dtor->isTrivial()); 1218 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 1219 addr); 1220 } 1221 1222 void 1223 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1224 CXXCtorType Type, bool ForVirtualBase, 1225 llvm::Value *This, 1226 CallExpr::const_arg_iterator ArgBeg, 1227 CallExpr::const_arg_iterator ArgEnd) { 1228 1229 CGDebugInfo *DI = getDebugInfo(); 1230 if (DI && 1231 CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::LimitedDebugInfo) { 1232 // If debug info for this class has not been emitted then this is the 1233 // right time to do so. 1234 const CXXRecordDecl *Parent = D->getParent(); 1235 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent), 1236 Parent->getLocation()); 1237 } 1238 1239 if (D->isTrivial()) { 1240 if (ArgBeg == ArgEnd) { 1241 // Trivial default constructor, no codegen required. 1242 assert(D->isDefaultConstructor() && 1243 "trivial 0-arg ctor not a default ctor"); 1244 return; 1245 } 1246 1247 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1248 assert(D->isCopyOrMoveConstructor() && 1249 "trivial 1-arg ctor not a copy/move ctor"); 1250 1251 const Expr *E = (*ArgBeg); 1252 QualType Ty = E->getType(); 1253 llvm::Value *Src = EmitLValue(E).getAddress(); 1254 EmitAggregateCopy(This, Src, Ty); 1255 return; 1256 } 1257 1258 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1259 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1260 1261 // FIXME: Provide a source location here. 1262 EmitCXXMemberCall(D, SourceLocation(), Callee, ReturnValueSlot(), This, 1263 VTT, ArgBeg, ArgEnd); 1264 } 1265 1266 void 1267 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1268 llvm::Value *This, llvm::Value *Src, 1269 CallExpr::const_arg_iterator ArgBeg, 1270 CallExpr::const_arg_iterator ArgEnd) { 1271 if (D->isTrivial()) { 1272 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1273 assert(D->isCopyOrMoveConstructor() && 1274 "trivial 1-arg ctor not a copy/move ctor"); 1275 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1276 return; 1277 } 1278 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1279 clang::Ctor_Complete); 1280 assert(D->isInstance() && 1281 "Trying to emit a member call expr on a static method!"); 1282 1283 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1284 1285 CallArgList Args; 1286 1287 // Push the this ptr. 1288 Args.add(RValue::get(This), D->getThisType(getContext())); 1289 1290 1291 // Push the src ptr. 1292 QualType QT = *(FPT->arg_type_begin()); 1293 llvm::Type *t = CGM.getTypes().ConvertType(QT); 1294 Src = Builder.CreateBitCast(Src, t); 1295 Args.add(RValue::get(Src), QT); 1296 1297 // Skip over first argument (Src). 1298 ++ArgBeg; 1299 CallExpr::const_arg_iterator Arg = ArgBeg; 1300 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1301 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1302 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1303 EmitCallArg(Args, *Arg, *I); 1304 } 1305 // Either we've emitted all the call args, or we have a call to a 1306 // variadic function. 1307 assert((Arg == ArgEnd || FPT->isVariadic()) && 1308 "Extra arguments in non-variadic function!"); 1309 // If we still have any arguments, emit them using the type of the argument. 1310 for (; Arg != ArgEnd; ++Arg) { 1311 QualType ArgType = Arg->getType(); 1312 EmitCallArg(Args, *Arg, ArgType); 1313 } 1314 1315 EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All), 1316 Callee, ReturnValueSlot(), Args, D); 1317 } 1318 1319 void 1320 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1321 CXXCtorType CtorType, 1322 const FunctionArgList &Args) { 1323 CallArgList DelegateArgs; 1324 1325 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1326 assert(I != E && "no parameters to constructor"); 1327 1328 // this 1329 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 1330 ++I; 1331 1332 // vtt 1333 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1334 /*ForVirtualBase=*/false)) { 1335 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1336 DelegateArgs.add(RValue::get(VTT), VoidPP); 1337 1338 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1339 assert(I != E && "cannot skip vtt parameter, already done with args"); 1340 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 1341 ++I; 1342 } 1343 } 1344 1345 // Explicit arguments. 1346 for (; I != E; ++I) { 1347 const VarDecl *param = *I; 1348 EmitDelegateCallArg(DelegateArgs, param); 1349 } 1350 1351 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType), 1352 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1353 ReturnValueSlot(), DelegateArgs, Ctor); 1354 } 1355 1356 namespace { 1357 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { 1358 const CXXDestructorDecl *Dtor; 1359 llvm::Value *Addr; 1360 CXXDtorType Type; 1361 1362 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, 1363 CXXDtorType Type) 1364 : Dtor(D), Addr(Addr), Type(Type) {} 1365 1366 void Emit(CodeGenFunction &CGF, Flags flags) { 1367 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 1368 Addr); 1369 } 1370 }; 1371 } 1372 1373 void 1374 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 1375 const FunctionArgList &Args) { 1376 assert(Ctor->isDelegatingConstructor()); 1377 1378 llvm::Value *ThisPtr = LoadCXXThis(); 1379 1380 QualType Ty = getContext().getTagDeclType(Ctor->getParent()); 1381 CharUnits Alignment = getContext().getTypeAlignInChars(Ty); 1382 AggValueSlot AggSlot = 1383 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), 1384 AggValueSlot::IsDestructed, 1385 AggValueSlot::DoesNotNeedGCBarriers, 1386 AggValueSlot::IsNotAliased); 1387 1388 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 1389 1390 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1391 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 1392 CXXDtorType Type = 1393 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 1394 1395 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 1396 ClassDecl->getDestructor(), 1397 ThisPtr, Type); 1398 } 1399 } 1400 1401 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1402 CXXDtorType Type, 1403 bool ForVirtualBase, 1404 llvm::Value *This) { 1405 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1406 ForVirtualBase); 1407 llvm::Value *Callee = 0; 1408 if (getLangOpts().AppleKext) 1409 Callee = BuildAppleKextVirtualDestructorCall(DD, Type, 1410 DD->getParent()); 1411 1412 if (!Callee) 1413 Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1414 1415 // FIXME: Provide a source location here. 1416 EmitCXXMemberCall(DD, SourceLocation(), Callee, ReturnValueSlot(), This, 1417 VTT, 0, 0); 1418 } 1419 1420 namespace { 1421 struct CallLocalDtor : EHScopeStack::Cleanup { 1422 const CXXDestructorDecl *Dtor; 1423 llvm::Value *Addr; 1424 1425 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1426 : Dtor(D), Addr(Addr) {} 1427 1428 void Emit(CodeGenFunction &CGF, Flags flags) { 1429 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1430 /*ForVirtualBase=*/false, Addr); 1431 } 1432 }; 1433 } 1434 1435 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1436 llvm::Value *Addr) { 1437 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1438 } 1439 1440 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1441 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1442 if (!ClassDecl) return; 1443 if (ClassDecl->hasTrivialDestructor()) return; 1444 1445 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1446 assert(D && D->isUsed() && "destructor not marked as used!"); 1447 PushDestructorCleanup(D, Addr); 1448 } 1449 1450 llvm::Value * 1451 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1452 const CXXRecordDecl *ClassDecl, 1453 const CXXRecordDecl *BaseClassDecl) { 1454 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1455 CharUnits VBaseOffsetOffset = 1456 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1457 1458 llvm::Value *VBaseOffsetPtr = 1459 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), 1460 "vbase.offset.ptr"); 1461 llvm::Type *PtrDiffTy = 1462 ConvertType(getContext().getPointerDiffType()); 1463 1464 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1465 PtrDiffTy->getPointerTo()); 1466 1467 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1468 1469 return VBaseOffset; 1470 } 1471 1472 void 1473 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1474 const CXXRecordDecl *NearestVBase, 1475 CharUnits OffsetFromNearestVBase, 1476 llvm::Constant *VTable, 1477 const CXXRecordDecl *VTableClass) { 1478 const CXXRecordDecl *RD = Base.getBase(); 1479 1480 // Compute the address point. 1481 llvm::Value *VTableAddressPoint; 1482 1483 // Check if we need to use a vtable from the VTT. 1484 if (CodeGenVTables::needsVTTParameter(CurGD) && 1485 (RD->getNumVBases() || NearestVBase)) { 1486 // Get the secondary vpointer index. 1487 uint64_t VirtualPointerIndex = 1488 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1489 1490 /// Load the VTT. 1491 llvm::Value *VTT = LoadCXXVTT(); 1492 if (VirtualPointerIndex) 1493 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1494 1495 // And load the address point from the VTT. 1496 VTableAddressPoint = Builder.CreateLoad(VTT); 1497 } else { 1498 uint64_t AddressPoint = 1499 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base); 1500 VTableAddressPoint = 1501 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1502 } 1503 1504 // Compute where to store the address point. 1505 llvm::Value *VirtualOffset = 0; 1506 CharUnits NonVirtualOffset = CharUnits::Zero(); 1507 1508 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1509 // We need to use the virtual base offset offset because the virtual base 1510 // might have a different offset in the most derived class. 1511 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1512 NearestVBase); 1513 NonVirtualOffset = OffsetFromNearestVBase; 1514 } else { 1515 // We can just use the base offset in the complete class. 1516 NonVirtualOffset = Base.getBaseOffset(); 1517 } 1518 1519 // Apply the offsets. 1520 llvm::Value *VTableField = LoadCXXThis(); 1521 1522 if (!NonVirtualOffset.isZero() || VirtualOffset) 1523 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1524 NonVirtualOffset, 1525 VirtualOffset); 1526 1527 // Finally, store the address point. 1528 llvm::Type *AddressPointPtrTy = 1529 VTableAddressPoint->getType()->getPointerTo(); 1530 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1531 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 1532 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr()); 1533 } 1534 1535 void 1536 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1537 const CXXRecordDecl *NearestVBase, 1538 CharUnits OffsetFromNearestVBase, 1539 bool BaseIsNonVirtualPrimaryBase, 1540 llvm::Constant *VTable, 1541 const CXXRecordDecl *VTableClass, 1542 VisitedVirtualBasesSetTy& VBases) { 1543 // If this base is a non-virtual primary base the address point has already 1544 // been set. 1545 if (!BaseIsNonVirtualPrimaryBase) { 1546 // Initialize the vtable pointer for this base. 1547 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1548 VTable, VTableClass); 1549 } 1550 1551 const CXXRecordDecl *RD = Base.getBase(); 1552 1553 // Traverse bases. 1554 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1555 E = RD->bases_end(); I != E; ++I) { 1556 CXXRecordDecl *BaseDecl 1557 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1558 1559 // Ignore classes without a vtable. 1560 if (!BaseDecl->isDynamicClass()) 1561 continue; 1562 1563 CharUnits BaseOffset; 1564 CharUnits BaseOffsetFromNearestVBase; 1565 bool BaseDeclIsNonVirtualPrimaryBase; 1566 1567 if (I->isVirtual()) { 1568 // Check if we've visited this virtual base before. 1569 if (!VBases.insert(BaseDecl)) 1570 continue; 1571 1572 const ASTRecordLayout &Layout = 1573 getContext().getASTRecordLayout(VTableClass); 1574 1575 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1576 BaseOffsetFromNearestVBase = CharUnits::Zero(); 1577 BaseDeclIsNonVirtualPrimaryBase = false; 1578 } else { 1579 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1580 1581 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1582 BaseOffsetFromNearestVBase = 1583 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1584 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1585 } 1586 1587 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1588 I->isVirtual() ? BaseDecl : NearestVBase, 1589 BaseOffsetFromNearestVBase, 1590 BaseDeclIsNonVirtualPrimaryBase, 1591 VTable, VTableClass, VBases); 1592 } 1593 } 1594 1595 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1596 // Ignore classes without a vtable. 1597 if (!RD->isDynamicClass()) 1598 return; 1599 1600 // Get the VTable. 1601 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1602 1603 // Initialize the vtable pointers for this class and all of its bases. 1604 VisitedVirtualBasesSetTy VBases; 1605 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), 1606 /*NearestVBase=*/0, 1607 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 1608 /*BaseIsNonVirtualPrimaryBase=*/false, 1609 VTable, RD, VBases); 1610 } 1611 1612 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1613 llvm::Type *Ty) { 1614 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1615 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 1616 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr()); 1617 return VTable; 1618 } 1619 1620 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) { 1621 const Expr *E = Base; 1622 1623 while (true) { 1624 E = E->IgnoreParens(); 1625 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1626 if (CE->getCastKind() == CK_DerivedToBase || 1627 CE->getCastKind() == CK_UncheckedDerivedToBase || 1628 CE->getCastKind() == CK_NoOp) { 1629 E = CE->getSubExpr(); 1630 continue; 1631 } 1632 } 1633 1634 break; 1635 } 1636 1637 QualType DerivedType = E->getType(); 1638 if (const PointerType *PTy = DerivedType->getAs<PointerType>()) 1639 DerivedType = PTy->getPointeeType(); 1640 1641 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl()); 1642 } 1643 1644 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 1645 // quite what we want. 1646 static const Expr *skipNoOpCastsAndParens(const Expr *E) { 1647 while (true) { 1648 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 1649 E = PE->getSubExpr(); 1650 continue; 1651 } 1652 1653 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1654 if (CE->getCastKind() == CK_NoOp) { 1655 E = CE->getSubExpr(); 1656 continue; 1657 } 1658 } 1659 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 1660 if (UO->getOpcode() == UO_Extension) { 1661 E = UO->getSubExpr(); 1662 continue; 1663 } 1664 } 1665 return E; 1666 } 1667 } 1668 1669 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member 1670 /// function call on the given expr can be devirtualized. 1671 static bool canDevirtualizeMemberFunctionCall(const Expr *Base, 1672 const CXXMethodDecl *MD) { 1673 // If the most derived class is marked final, we know that no subclass can 1674 // override this member function and so we can devirtualize it. For example: 1675 // 1676 // struct A { virtual void f(); } 1677 // struct B final : A { }; 1678 // 1679 // void f(B *b) { 1680 // b->f(); 1681 // } 1682 // 1683 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base); 1684 if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 1685 return true; 1686 1687 // If the member function is marked 'final', we know that it can't be 1688 // overridden and can therefore devirtualize it. 1689 if (MD->hasAttr<FinalAttr>()) 1690 return true; 1691 1692 // Similarly, if the class itself is marked 'final' it can't be overridden 1693 // and we can therefore devirtualize the member function call. 1694 if (MD->getParent()->hasAttr<FinalAttr>()) 1695 return true; 1696 1697 Base = skipNoOpCastsAndParens(Base); 1698 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 1699 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 1700 // This is a record decl. We know the type and can devirtualize it. 1701 return VD->getType()->isRecordType(); 1702 } 1703 1704 return false; 1705 } 1706 1707 // We can always devirtualize calls on temporary object expressions. 1708 if (isa<CXXConstructExpr>(Base)) 1709 return true; 1710 1711 // And calls on bound temporaries. 1712 if (isa<CXXBindTemporaryExpr>(Base)) 1713 return true; 1714 1715 // Check if this is a call expr that returns a record type. 1716 if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 1717 return CE->getCallReturnType()->isRecordType(); 1718 1719 // We can't devirtualize the call. 1720 return false; 1721 } 1722 1723 static bool UseVirtualCall(ASTContext &Context, 1724 const CXXOperatorCallExpr *CE, 1725 const CXXMethodDecl *MD) { 1726 if (!MD->isVirtual()) 1727 return false; 1728 1729 // When building with -fapple-kext, all calls must go through the vtable since 1730 // the kernel linker can do runtime patching of vtables. 1731 if (Context.getLangOpts().AppleKext) 1732 return true; 1733 1734 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD); 1735 } 1736 1737 llvm::Value * 1738 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E, 1739 const CXXMethodDecl *MD, 1740 llvm::Value *This) { 1741 llvm::FunctionType *fnType = 1742 CGM.getTypes().GetFunctionType( 1743 CGM.getTypes().arrangeCXXMethodDeclaration(MD)); 1744 1745 if (UseVirtualCall(getContext(), E, MD)) 1746 return BuildVirtualCall(MD, This, fnType); 1747 1748 return CGM.GetAddrOfFunction(MD, fnType); 1749 } 1750 1751 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *lambda, 1752 CallArgList &callArgs) { 1753 // Lookup the call operator 1754 DeclarationName operatorName 1755 = getContext().DeclarationNames.getCXXOperatorName(OO_Call); 1756 CXXMethodDecl *callOperator = 1757 cast<CXXMethodDecl>(lambda->lookup(operatorName).front()); 1758 1759 // Get the address of the call operator. 1760 const CGFunctionInfo &calleeFnInfo = 1761 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 1762 llvm::Value *callee = 1763 CGM.GetAddrOfFunction(GlobalDecl(callOperator), 1764 CGM.getTypes().GetFunctionType(calleeFnInfo)); 1765 1766 // Prepare the return slot. 1767 const FunctionProtoType *FPT = 1768 callOperator->getType()->castAs<FunctionProtoType>(); 1769 QualType resultType = FPT->getResultType(); 1770 ReturnValueSlot returnSlot; 1771 if (!resultType->isVoidType() && 1772 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && 1773 hasAggregateLLVMType(calleeFnInfo.getReturnType())) 1774 returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified()); 1775 1776 // We don't need to separately arrange the call arguments because 1777 // the call can't be variadic anyway --- it's impossible to forward 1778 // variadic arguments. 1779 1780 // Now emit our call. 1781 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, 1782 callArgs, callOperator); 1783 1784 // If necessary, copy the returned value into the slot. 1785 if (!resultType->isVoidType() && returnSlot.isNull()) 1786 EmitReturnOfRValue(RV, resultType); 1787 else 1788 EmitBranchThroughCleanup(ReturnBlock); 1789 } 1790 1791 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 1792 const BlockDecl *BD = BlockInfo->getBlockDecl(); 1793 const VarDecl *variable = BD->capture_begin()->getVariable(); 1794 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 1795 1796 // Start building arguments for forwarding call 1797 CallArgList CallArgs; 1798 1799 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1800 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false); 1801 CallArgs.add(RValue::get(ThisPtr), ThisType); 1802 1803 // Add the rest of the parameters. 1804 for (BlockDecl::param_const_iterator I = BD->param_begin(), 1805 E = BD->param_end(); I != E; ++I) { 1806 ParmVarDecl *param = *I; 1807 EmitDelegateCallArg(CallArgs, param); 1808 } 1809 1810 EmitForwardingCallToLambda(Lambda, CallArgs); 1811 } 1812 1813 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 1814 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) { 1815 // FIXME: Making this work correctly is nasty because it requires either 1816 // cloning the body of the call operator or making the call operator forward. 1817 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function"); 1818 return; 1819 } 1820 1821 EmitFunctionBody(Args); 1822 } 1823 1824 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 1825 const CXXRecordDecl *Lambda = MD->getParent(); 1826 1827 // Start building arguments for forwarding call 1828 CallArgList CallArgs; 1829 1830 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1831 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 1832 CallArgs.add(RValue::get(ThisPtr), ThisType); 1833 1834 // Add the rest of the parameters. 1835 for (FunctionDecl::param_const_iterator I = MD->param_begin(), 1836 E = MD->param_end(); I != E; ++I) { 1837 ParmVarDecl *param = *I; 1838 EmitDelegateCallArg(CallArgs, param); 1839 } 1840 1841 EmitForwardingCallToLambda(Lambda, CallArgs); 1842 } 1843 1844 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 1845 if (MD->isVariadic()) { 1846 // FIXME: Making this work correctly is nasty because it requires either 1847 // cloning the body of the call operator or making the call operator forward. 1848 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 1849 return; 1850 } 1851 1852 EmitLambdaDelegatingInvokeBody(MD); 1853 } 1854