1 //===--- CGClass.cpp - Emit LLVM Code for C++ classes ---------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This contains code dealing with C++ code generation of classes 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CGBlocks.h" 15 #include "CGDebugInfo.h" 16 #include "CodeGenFunction.h" 17 #include "clang/AST/CXXInheritance.h" 18 #include "clang/AST/EvaluatedExprVisitor.h" 19 #include "clang/AST/RecordLayout.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/Frontend/CodeGenOptions.h" 22 23 using namespace clang; 24 using namespace CodeGen; 25 26 static CharUnits 27 ComputeNonVirtualBaseClassOffset(ASTContext &Context, 28 const CXXRecordDecl *DerivedClass, 29 CastExpr::path_const_iterator Start, 30 CastExpr::path_const_iterator End) { 31 CharUnits Offset = CharUnits::Zero(); 32 33 const CXXRecordDecl *RD = DerivedClass; 34 35 for (CastExpr::path_const_iterator I = Start; I != End; ++I) { 36 const CXXBaseSpecifier *Base = *I; 37 assert(!Base->isVirtual() && "Should not see virtual bases here!"); 38 39 // Get the layout. 40 const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); 41 42 const CXXRecordDecl *BaseDecl = 43 cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl()); 44 45 // Add the offset. 46 Offset += Layout.getBaseClassOffset(BaseDecl); 47 48 RD = BaseDecl; 49 } 50 51 return Offset; 52 } 53 54 llvm::Constant * 55 CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl, 56 CastExpr::path_const_iterator PathBegin, 57 CastExpr::path_const_iterator PathEnd) { 58 assert(PathBegin != PathEnd && "Base path should not be empty!"); 59 60 CharUnits Offset = 61 ComputeNonVirtualBaseClassOffset(getContext(), ClassDecl, 62 PathBegin, PathEnd); 63 if (Offset.isZero()) 64 return 0; 65 66 llvm::Type *PtrDiffTy = 67 Types.ConvertType(getContext().getPointerDiffType()); 68 69 return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity()); 70 } 71 72 /// Gets the address of a direct base class within a complete object. 73 /// This should only be used for (1) non-virtual bases or (2) virtual bases 74 /// when the type is known to be complete (e.g. in complete destructors). 75 /// 76 /// The object pointed to by 'This' is assumed to be non-null. 77 llvm::Value * 78 CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(llvm::Value *This, 79 const CXXRecordDecl *Derived, 80 const CXXRecordDecl *Base, 81 bool BaseIsVirtual) { 82 // 'this' must be a pointer (in some address space) to Derived. 83 assert(This->getType()->isPointerTy() && 84 cast<llvm::PointerType>(This->getType())->getElementType() 85 == ConvertType(Derived)); 86 87 // Compute the offset of the virtual base. 88 CharUnits Offset; 89 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived); 90 if (BaseIsVirtual) 91 Offset = Layout.getVBaseClassOffset(Base); 92 else 93 Offset = Layout.getBaseClassOffset(Base); 94 95 // Shift and cast down to the base type. 96 // TODO: for complete types, this should be possible with a GEP. 97 llvm::Value *V = This; 98 if (Offset.isPositive()) { 99 V = Builder.CreateBitCast(V, Int8PtrTy); 100 V = Builder.CreateConstInBoundsGEP1_64(V, Offset.getQuantity()); 101 } 102 V = Builder.CreateBitCast(V, ConvertType(Base)->getPointerTo()); 103 104 return V; 105 } 106 107 static llvm::Value * 108 ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, llvm::Value *ptr, 109 CharUnits nonVirtualOffset, 110 llvm::Value *virtualOffset) { 111 // Assert that we have something to do. 112 assert(!nonVirtualOffset.isZero() || virtualOffset != 0); 113 114 // Compute the offset from the static and dynamic components. 115 llvm::Value *baseOffset; 116 if (!nonVirtualOffset.isZero()) { 117 baseOffset = llvm::ConstantInt::get(CGF.PtrDiffTy, 118 nonVirtualOffset.getQuantity()); 119 if (virtualOffset) { 120 baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset); 121 } 122 } else { 123 baseOffset = virtualOffset; 124 } 125 126 // Apply the base offset. 127 ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8PtrTy); 128 ptr = CGF.Builder.CreateInBoundsGEP(ptr, baseOffset, "add.ptr"); 129 return ptr; 130 } 131 132 llvm::Value * 133 CodeGenFunction::GetAddressOfBaseClass(llvm::Value *Value, 134 const CXXRecordDecl *Derived, 135 CastExpr::path_const_iterator PathBegin, 136 CastExpr::path_const_iterator PathEnd, 137 bool NullCheckValue) { 138 assert(PathBegin != PathEnd && "Base path should not be empty!"); 139 140 CastExpr::path_const_iterator Start = PathBegin; 141 const CXXRecordDecl *VBase = 0; 142 143 // Sema has done some convenient canonicalization here: if the 144 // access path involved any virtual steps, the conversion path will 145 // *start* with a step down to the correct virtual base subobject, 146 // and hence will not require any further steps. 147 if ((*Start)->isVirtual()) { 148 VBase = 149 cast<CXXRecordDecl>((*Start)->getType()->getAs<RecordType>()->getDecl()); 150 ++Start; 151 } 152 153 // Compute the static offset of the ultimate destination within its 154 // allocating subobject (the virtual base, if there is one, or else 155 // the "complete" object that we see). 156 CharUnits NonVirtualOffset = 157 ComputeNonVirtualBaseClassOffset(getContext(), VBase ? VBase : Derived, 158 Start, PathEnd); 159 160 // If there's a virtual step, we can sometimes "devirtualize" it. 161 // For now, that's limited to when the derived type is final. 162 // TODO: "devirtualize" this for accesses to known-complete objects. 163 if (VBase && Derived->hasAttr<FinalAttr>()) { 164 const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived); 165 CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase); 166 NonVirtualOffset += vBaseOffset; 167 VBase = 0; // we no longer have a virtual step 168 } 169 170 // Get the base pointer type. 171 llvm::Type *BasePtrTy = 172 ConvertType((PathEnd[-1])->getType())->getPointerTo(); 173 174 // If the static offset is zero and we don't have a virtual step, 175 // just do a bitcast; null checks are unnecessary. 176 if (NonVirtualOffset.isZero() && !VBase) { 177 return Builder.CreateBitCast(Value, BasePtrTy); 178 } 179 180 llvm::BasicBlock *origBB = 0; 181 llvm::BasicBlock *endBB = 0; 182 183 // Skip over the offset (and the vtable load) if we're supposed to 184 // null-check the pointer. 185 if (NullCheckValue) { 186 origBB = Builder.GetInsertBlock(); 187 llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull"); 188 endBB = createBasicBlock("cast.end"); 189 190 llvm::Value *isNull = Builder.CreateIsNull(Value); 191 Builder.CreateCondBr(isNull, endBB, notNullBB); 192 EmitBlock(notNullBB); 193 } 194 195 // Compute the virtual offset. 196 llvm::Value *VirtualOffset = 0; 197 if (VBase) { 198 VirtualOffset = GetVirtualBaseClassOffset(Value, Derived, VBase); 199 } 200 201 // Apply both offsets. 202 Value = ApplyNonVirtualAndVirtualOffset(*this, Value, 203 NonVirtualOffset, 204 VirtualOffset); 205 206 // Cast to the destination type. 207 Value = Builder.CreateBitCast(Value, BasePtrTy); 208 209 // Build a phi if we needed a null check. 210 if (NullCheckValue) { 211 llvm::BasicBlock *notNullBB = Builder.GetInsertBlock(); 212 Builder.CreateBr(endBB); 213 EmitBlock(endBB); 214 215 llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result"); 216 PHI->addIncoming(Value, notNullBB); 217 PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB); 218 Value = PHI; 219 } 220 221 return Value; 222 } 223 224 llvm::Value * 225 CodeGenFunction::GetAddressOfDerivedClass(llvm::Value *Value, 226 const CXXRecordDecl *Derived, 227 CastExpr::path_const_iterator PathBegin, 228 CastExpr::path_const_iterator PathEnd, 229 bool NullCheckValue) { 230 assert(PathBegin != PathEnd && "Base path should not be empty!"); 231 232 QualType DerivedTy = 233 getContext().getCanonicalType(getContext().getTagDeclType(Derived)); 234 llvm::Type *DerivedPtrTy = ConvertType(DerivedTy)->getPointerTo(); 235 236 llvm::Value *NonVirtualOffset = 237 CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd); 238 239 if (!NonVirtualOffset) { 240 // No offset, we can just cast back. 241 return Builder.CreateBitCast(Value, DerivedPtrTy); 242 } 243 244 llvm::BasicBlock *CastNull = 0; 245 llvm::BasicBlock *CastNotNull = 0; 246 llvm::BasicBlock *CastEnd = 0; 247 248 if (NullCheckValue) { 249 CastNull = createBasicBlock("cast.null"); 250 CastNotNull = createBasicBlock("cast.notnull"); 251 CastEnd = createBasicBlock("cast.end"); 252 253 llvm::Value *IsNull = Builder.CreateIsNull(Value); 254 Builder.CreateCondBr(IsNull, CastNull, CastNotNull); 255 EmitBlock(CastNotNull); 256 } 257 258 // Apply the offset. 259 Value = Builder.CreateBitCast(Value, Int8PtrTy); 260 Value = Builder.CreateGEP(Value, Builder.CreateNeg(NonVirtualOffset), 261 "sub.ptr"); 262 263 // Just cast. 264 Value = Builder.CreateBitCast(Value, DerivedPtrTy); 265 266 if (NullCheckValue) { 267 Builder.CreateBr(CastEnd); 268 EmitBlock(CastNull); 269 Builder.CreateBr(CastEnd); 270 EmitBlock(CastEnd); 271 272 llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2); 273 PHI->addIncoming(Value, CastNotNull); 274 PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), 275 CastNull); 276 Value = PHI; 277 } 278 279 return Value; 280 } 281 282 /// GetVTTParameter - Return the VTT parameter that should be passed to a 283 /// base constructor/destructor with virtual bases. 284 static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, 285 bool ForVirtualBase) { 286 if (!CodeGenVTables::needsVTTParameter(GD)) { 287 // This constructor/destructor does not need a VTT parameter. 288 return 0; 289 } 290 291 const CXXRecordDecl *RD = cast<CXXMethodDecl>(CGF.CurFuncDecl)->getParent(); 292 const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent(); 293 294 llvm::Value *VTT; 295 296 uint64_t SubVTTIndex; 297 298 // If the record matches the base, this is the complete ctor/dtor 299 // variant calling the base variant in a class with virtual bases. 300 if (RD == Base) { 301 assert(!CodeGenVTables::needsVTTParameter(CGF.CurGD) && 302 "doing no-op VTT offset in base dtor/ctor?"); 303 assert(!ForVirtualBase && "Can't have same class as virtual base!"); 304 SubVTTIndex = 0; 305 } else { 306 const ASTRecordLayout &Layout = 307 CGF.getContext().getASTRecordLayout(RD); 308 CharUnits BaseOffset = ForVirtualBase ? 309 Layout.getVBaseClassOffset(Base) : 310 Layout.getBaseClassOffset(Base); 311 312 SubVTTIndex = 313 CGF.CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset)); 314 assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!"); 315 } 316 317 if (CodeGenVTables::needsVTTParameter(CGF.CurGD)) { 318 // A VTT parameter was passed to the constructor, use it. 319 VTT = CGF.LoadCXXVTT(); 320 VTT = CGF.Builder.CreateConstInBoundsGEP1_64(VTT, SubVTTIndex); 321 } else { 322 // We're the complete constructor, so get the VTT by name. 323 VTT = CGF.CGM.getVTables().GetAddrOfVTT(RD); 324 VTT = CGF.Builder.CreateConstInBoundsGEP2_64(VTT, 0, SubVTTIndex); 325 } 326 327 return VTT; 328 } 329 330 namespace { 331 /// Call the destructor for a direct base class. 332 struct CallBaseDtor : EHScopeStack::Cleanup { 333 const CXXRecordDecl *BaseClass; 334 bool BaseIsVirtual; 335 CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) 336 : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {} 337 338 void Emit(CodeGenFunction &CGF, Flags flags) { 339 const CXXRecordDecl *DerivedClass = 340 cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent(); 341 342 const CXXDestructorDecl *D = BaseClass->getDestructor(); 343 llvm::Value *Addr = 344 CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThis(), 345 DerivedClass, BaseClass, 346 BaseIsVirtual); 347 CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual, Addr); 348 } 349 }; 350 351 /// A visitor which checks whether an initializer uses 'this' in a 352 /// way which requires the vtable to be properly set. 353 struct DynamicThisUseChecker : EvaluatedExprVisitor<DynamicThisUseChecker> { 354 typedef EvaluatedExprVisitor<DynamicThisUseChecker> super; 355 356 bool UsesThis; 357 358 DynamicThisUseChecker(ASTContext &C) : super(C), UsesThis(false) {} 359 360 // Black-list all explicit and implicit references to 'this'. 361 // 362 // Do we need to worry about external references to 'this' derived 363 // from arbitrary code? If so, then anything which runs arbitrary 364 // external code might potentially access the vtable. 365 void VisitCXXThisExpr(CXXThisExpr *E) { UsesThis = true; } 366 }; 367 } 368 369 static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) { 370 DynamicThisUseChecker Checker(C); 371 Checker.Visit(const_cast<Expr*>(Init)); 372 return Checker.UsesThis; 373 } 374 375 static void EmitBaseInitializer(CodeGenFunction &CGF, 376 const CXXRecordDecl *ClassDecl, 377 CXXCtorInitializer *BaseInit, 378 CXXCtorType CtorType) { 379 assert(BaseInit->isBaseInitializer() && 380 "Must have base initializer!"); 381 382 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 383 384 const Type *BaseType = BaseInit->getBaseClass(); 385 CXXRecordDecl *BaseClassDecl = 386 cast<CXXRecordDecl>(BaseType->getAs<RecordType>()->getDecl()); 387 388 bool isBaseVirtual = BaseInit->isBaseVirtual(); 389 390 // The base constructor doesn't construct virtual bases. 391 if (CtorType == Ctor_Base && isBaseVirtual) 392 return; 393 394 // If the initializer for the base (other than the constructor 395 // itself) accesses 'this' in any way, we need to initialize the 396 // vtables. 397 if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit())) 398 CGF.InitializeVTablePointers(ClassDecl); 399 400 // We can pretend to be a complete class because it only matters for 401 // virtual bases, and we only do virtual bases for complete ctors. 402 llvm::Value *V = 403 CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl, 404 BaseClassDecl, 405 isBaseVirtual); 406 CharUnits Alignment = CGF.getContext().getTypeAlignInChars(BaseType); 407 AggValueSlot AggSlot = 408 AggValueSlot::forAddr(V, Alignment, Qualifiers(), 409 AggValueSlot::IsDestructed, 410 AggValueSlot::DoesNotNeedGCBarriers, 411 AggValueSlot::IsNotAliased); 412 413 CGF.EmitAggExpr(BaseInit->getInit(), AggSlot); 414 415 if (CGF.CGM.getLangOpts().Exceptions && 416 !BaseClassDecl->hasTrivialDestructor()) 417 CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, 418 isBaseVirtual); 419 } 420 421 static void EmitAggMemberInitializer(CodeGenFunction &CGF, 422 LValue LHS, 423 Expr *Init, 424 llvm::Value *ArrayIndexVar, 425 QualType T, 426 ArrayRef<VarDecl *> ArrayIndexes, 427 unsigned Index) { 428 if (Index == ArrayIndexes.size()) { 429 LValue LV = LHS; 430 { // Scope for Cleanups. 431 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 432 433 if (ArrayIndexVar) { 434 // If we have an array index variable, load it and use it as an offset. 435 // Then, increment the value. 436 llvm::Value *Dest = LHS.getAddress(); 437 llvm::Value *ArrayIndex = CGF.Builder.CreateLoad(ArrayIndexVar); 438 Dest = CGF.Builder.CreateInBoundsGEP(Dest, ArrayIndex, "destaddress"); 439 llvm::Value *Next = llvm::ConstantInt::get(ArrayIndex->getType(), 1); 440 Next = CGF.Builder.CreateAdd(ArrayIndex, Next, "inc"); 441 CGF.Builder.CreateStore(Next, ArrayIndexVar); 442 443 // Update the LValue. 444 LV.setAddress(Dest); 445 CharUnits Align = CGF.getContext().getTypeAlignInChars(T); 446 LV.setAlignment(std::min(Align, LV.getAlignment())); 447 } 448 449 if (!CGF.hasAggregateLLVMType(T)) { 450 CGF.EmitScalarInit(Init, /*decl*/ 0, LV, false); 451 } else if (T->isAnyComplexType()) { 452 CGF.EmitComplexExprIntoAddr(Init, LV.getAddress(), 453 LV.isVolatileQualified()); 454 } else { 455 AggValueSlot Slot = 456 AggValueSlot::forLValue(LV, 457 AggValueSlot::IsDestructed, 458 AggValueSlot::DoesNotNeedGCBarriers, 459 AggValueSlot::IsNotAliased); 460 461 CGF.EmitAggExpr(Init, Slot); 462 } 463 } 464 465 // Now, outside of the initializer cleanup scope, destroy the backing array 466 // for a std::initializer_list member. 467 CGF.MaybeEmitStdInitializerListCleanup(LV.getAddress(), Init); 468 469 return; 470 } 471 472 const ConstantArrayType *Array = CGF.getContext().getAsConstantArrayType(T); 473 assert(Array && "Array initialization without the array type?"); 474 llvm::Value *IndexVar 475 = CGF.GetAddrOfLocalVar(ArrayIndexes[Index]); 476 assert(IndexVar && "Array index variable not loaded"); 477 478 // Initialize this index variable to zero. 479 llvm::Value* Zero 480 = llvm::Constant::getNullValue( 481 CGF.ConvertType(CGF.getContext().getSizeType())); 482 CGF.Builder.CreateStore(Zero, IndexVar); 483 484 // Start the loop with a block that tests the condition. 485 llvm::BasicBlock *CondBlock = CGF.createBasicBlock("for.cond"); 486 llvm::BasicBlock *AfterFor = CGF.createBasicBlock("for.end"); 487 488 CGF.EmitBlock(CondBlock); 489 490 llvm::BasicBlock *ForBody = CGF.createBasicBlock("for.body"); 491 // Generate: if (loop-index < number-of-elements) fall to the loop body, 492 // otherwise, go to the block after the for-loop. 493 uint64_t NumElements = Array->getSize().getZExtValue(); 494 llvm::Value *Counter = CGF.Builder.CreateLoad(IndexVar); 495 llvm::Value *NumElementsPtr = 496 llvm::ConstantInt::get(Counter->getType(), NumElements); 497 llvm::Value *IsLess = CGF.Builder.CreateICmpULT(Counter, NumElementsPtr, 498 "isless"); 499 500 // If the condition is true, execute the body. 501 CGF.Builder.CreateCondBr(IsLess, ForBody, AfterFor); 502 503 CGF.EmitBlock(ForBody); 504 llvm::BasicBlock *ContinueBlock = CGF.createBasicBlock("for.inc"); 505 506 { 507 CodeGenFunction::RunCleanupsScope Cleanups(CGF); 508 509 // Inside the loop body recurse to emit the inner loop or, eventually, the 510 // constructor call. 511 EmitAggMemberInitializer(CGF, LHS, Init, ArrayIndexVar, 512 Array->getElementType(), ArrayIndexes, Index + 1); 513 } 514 515 CGF.EmitBlock(ContinueBlock); 516 517 // Emit the increment of the loop counter. 518 llvm::Value *NextVal = llvm::ConstantInt::get(Counter->getType(), 1); 519 Counter = CGF.Builder.CreateLoad(IndexVar); 520 NextVal = CGF.Builder.CreateAdd(Counter, NextVal, "inc"); 521 CGF.Builder.CreateStore(NextVal, IndexVar); 522 523 // Finally, branch back up to the condition for the next iteration. 524 CGF.EmitBranch(CondBlock); 525 526 // Emit the fall-through block. 527 CGF.EmitBlock(AfterFor, true); 528 } 529 530 namespace { 531 struct CallMemberDtor : EHScopeStack::Cleanup { 532 llvm::Value *V; 533 CXXDestructorDecl *Dtor; 534 535 CallMemberDtor(llvm::Value *V, CXXDestructorDecl *Dtor) 536 : V(V), Dtor(Dtor) {} 537 538 void Emit(CodeGenFunction &CGF, Flags flags) { 539 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 540 V); 541 } 542 }; 543 } 544 545 static void EmitMemberInitializer(CodeGenFunction &CGF, 546 const CXXRecordDecl *ClassDecl, 547 CXXCtorInitializer *MemberInit, 548 const CXXConstructorDecl *Constructor, 549 FunctionArgList &Args) { 550 assert(MemberInit->isAnyMemberInitializer() && 551 "Must have member initializer!"); 552 assert(MemberInit->getInit() && "Must have initializer!"); 553 554 // non-static data member initializers. 555 FieldDecl *Field = MemberInit->getAnyMember(); 556 QualType FieldType = Field->getType(); 557 558 llvm::Value *ThisPtr = CGF.LoadCXXThis(); 559 QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl); 560 LValue LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy); 561 562 if (MemberInit->isIndirectMemberInitializer()) { 563 // If we are initializing an anonymous union field, drill down to 564 // the field. 565 IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember(); 566 IndirectFieldDecl::chain_iterator I = IndirectField->chain_begin(), 567 IEnd = IndirectField->chain_end(); 568 for ( ; I != IEnd; ++I) 569 LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(*I)); 570 FieldType = MemberInit->getIndirectMember()->getAnonField()->getType(); 571 } else { 572 LHS = CGF.EmitLValueForFieldInitialization(LHS, Field); 573 } 574 575 // Special case: if we are in a copy or move constructor, and we are copying 576 // an array of PODs or classes with trivial copy constructors, ignore the 577 // AST and perform the copy we know is equivalent. 578 // FIXME: This is hacky at best... if we had a bit more explicit information 579 // in the AST, we could generalize it more easily. 580 const ConstantArrayType *Array 581 = CGF.getContext().getAsConstantArrayType(FieldType); 582 if (Array && Constructor->isImplicitlyDefined() && 583 Constructor->isCopyOrMoveConstructor()) { 584 QualType BaseElementTy = CGF.getContext().getBaseElementType(Array); 585 CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit()); 586 if (BaseElementTy.isPODType(CGF.getContext()) || 587 (CE && CE->getConstructor()->isTrivial())) { 588 // Find the source pointer. We know it's the last argument because 589 // we know we're in an implicit copy constructor. 590 unsigned SrcArgIndex = Args.size() - 1; 591 llvm::Value *SrcPtr 592 = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex])); 593 LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy); 594 LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field); 595 596 // Copy the aggregate. 597 CGF.EmitAggregateCopy(LHS.getAddress(), Src.getAddress(), FieldType, 598 LHS.isVolatileQualified()); 599 return; 600 } 601 } 602 603 ArrayRef<VarDecl *> ArrayIndexes; 604 if (MemberInit->getNumArrayIndices()) 605 ArrayIndexes = MemberInit->getArrayIndexes(); 606 CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit(), ArrayIndexes); 607 } 608 609 void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, 610 LValue LHS, Expr *Init, 611 ArrayRef<VarDecl *> ArrayIndexes) { 612 QualType FieldType = Field->getType(); 613 if (!hasAggregateLLVMType(FieldType)) { 614 if (LHS.isSimple()) { 615 EmitExprAsInit(Init, Field, LHS, false); 616 } else { 617 RValue RHS = RValue::get(EmitScalarExpr(Init)); 618 EmitStoreThroughLValue(RHS, LHS); 619 } 620 } else if (FieldType->isAnyComplexType()) { 621 EmitComplexExprIntoAddr(Init, LHS.getAddress(), LHS.isVolatileQualified()); 622 } else { 623 llvm::Value *ArrayIndexVar = 0; 624 if (ArrayIndexes.size()) { 625 llvm::Type *SizeTy = ConvertType(getContext().getSizeType()); 626 627 // The LHS is a pointer to the first object we'll be constructing, as 628 // a flat array. 629 QualType BaseElementTy = getContext().getBaseElementType(FieldType); 630 llvm::Type *BasePtr = ConvertType(BaseElementTy); 631 BasePtr = llvm::PointerType::getUnqual(BasePtr); 632 llvm::Value *BaseAddrPtr = Builder.CreateBitCast(LHS.getAddress(), 633 BasePtr); 634 LHS = MakeAddrLValue(BaseAddrPtr, BaseElementTy); 635 636 // Create an array index that will be used to walk over all of the 637 // objects we're constructing. 638 ArrayIndexVar = CreateTempAlloca(SizeTy, "object.index"); 639 llvm::Value *Zero = llvm::Constant::getNullValue(SizeTy); 640 Builder.CreateStore(Zero, ArrayIndexVar); 641 642 643 // Emit the block variables for the array indices, if any. 644 for (unsigned I = 0, N = ArrayIndexes.size(); I != N; ++I) 645 EmitAutoVarDecl(*ArrayIndexes[I]); 646 } 647 648 EmitAggMemberInitializer(*this, LHS, Init, ArrayIndexVar, FieldType, 649 ArrayIndexes, 0); 650 651 if (!CGM.getLangOpts().Exceptions) 652 return; 653 654 // FIXME: If we have an array of classes w/ non-trivial destructors, 655 // we need to destroy in reverse order of construction along the exception 656 // path. 657 const RecordType *RT = FieldType->getAs<RecordType>(); 658 if (!RT) 659 return; 660 661 CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 662 if (!RD->hasTrivialDestructor()) 663 EHStack.pushCleanup<CallMemberDtor>(EHCleanup, LHS.getAddress(), 664 RD->getDestructor()); 665 } 666 } 667 668 /// Checks whether the given constructor is a valid subject for the 669 /// complete-to-base constructor delegation optimization, i.e. 670 /// emitting the complete constructor as a simple call to the base 671 /// constructor. 672 static bool IsConstructorDelegationValid(const CXXConstructorDecl *Ctor) { 673 674 // Currently we disable the optimization for classes with virtual 675 // bases because (1) the addresses of parameter variables need to be 676 // consistent across all initializers but (2) the delegate function 677 // call necessarily creates a second copy of the parameter variable. 678 // 679 // The limiting example (purely theoretical AFAIK): 680 // struct A { A(int &c) { c++; } }; 681 // struct B : virtual A { 682 // B(int count) : A(count) { printf("%d\n", count); } 683 // }; 684 // ...although even this example could in principle be emitted as a 685 // delegation since the address of the parameter doesn't escape. 686 if (Ctor->getParent()->getNumVBases()) { 687 // TODO: white-list trivial vbase initializers. This case wouldn't 688 // be subject to the restrictions below. 689 690 // TODO: white-list cases where: 691 // - there are no non-reference parameters to the constructor 692 // - the initializers don't access any non-reference parameters 693 // - the initializers don't take the address of non-reference 694 // parameters 695 // - etc. 696 // If we ever add any of the above cases, remember that: 697 // - function-try-blocks will always blacklist this optimization 698 // - we need to perform the constructor prologue and cleanup in 699 // EmitConstructorBody. 700 701 return false; 702 } 703 704 // We also disable the optimization for variadic functions because 705 // it's impossible to "re-pass" varargs. 706 if (Ctor->getType()->getAs<FunctionProtoType>()->isVariadic()) 707 return false; 708 709 // FIXME: Decide if we can do a delegation of a delegating constructor. 710 if (Ctor->isDelegatingConstructor()) 711 return false; 712 713 return true; 714 } 715 716 /// EmitConstructorBody - Emits the body of the current constructor. 717 void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) { 718 const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl()); 719 CXXCtorType CtorType = CurGD.getCtorType(); 720 721 // Before we go any further, try the complete->base constructor 722 // delegation optimization. 723 if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) && 724 CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) { 725 if (CGDebugInfo *DI = getDebugInfo()) 726 DI->EmitLocation(Builder, Ctor->getLocEnd()); 727 EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args); 728 return; 729 } 730 731 Stmt *Body = Ctor->getBody(); 732 733 // Enter the function-try-block before the constructor prologue if 734 // applicable. 735 bool IsTryBody = (Body && isa<CXXTryStmt>(Body)); 736 if (IsTryBody) 737 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 738 739 EHScopeStack::stable_iterator CleanupDepth = EHStack.stable_begin(); 740 741 // TODO: in restricted cases, we can emit the vbase initializers of 742 // a complete ctor and then delegate to the base ctor. 743 744 // Emit the constructor prologue, i.e. the base and member 745 // initializers. 746 EmitCtorPrologue(Ctor, CtorType, Args); 747 748 // Emit the body of the statement. 749 if (IsTryBody) 750 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 751 else if (Body) 752 EmitStmt(Body); 753 754 // Emit any cleanup blocks associated with the member or base 755 // initializers, which includes (along the exceptional path) the 756 // destructors for those members and bases that were fully 757 // constructed. 758 PopCleanupBlocks(CleanupDepth); 759 760 if (IsTryBody) 761 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 762 } 763 764 /// EmitCtorPrologue - This routine generates necessary code to initialize 765 /// base classes and non-static data members belonging to this constructor. 766 void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD, 767 CXXCtorType CtorType, 768 FunctionArgList &Args) { 769 if (CD->isDelegatingConstructor()) 770 return EmitDelegatingCXXConstructorCall(CD, Args); 771 772 const CXXRecordDecl *ClassDecl = CD->getParent(); 773 774 SmallVector<CXXCtorInitializer *, 8> MemberInitializers; 775 776 for (CXXConstructorDecl::init_const_iterator B = CD->init_begin(), 777 E = CD->init_end(); 778 B != E; ++B) { 779 CXXCtorInitializer *Member = (*B); 780 781 if (Member->isBaseInitializer()) { 782 EmitBaseInitializer(*this, ClassDecl, Member, CtorType); 783 } else { 784 assert(Member->isAnyMemberInitializer() && 785 "Delegating initializer on non-delegating constructor"); 786 MemberInitializers.push_back(Member); 787 } 788 } 789 790 InitializeVTablePointers(ClassDecl); 791 792 for (unsigned I = 0, E = MemberInitializers.size(); I != E; ++I) 793 EmitMemberInitializer(*this, ClassDecl, MemberInitializers[I], CD, Args); 794 } 795 796 static bool 797 FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field); 798 799 static bool 800 HasTrivialDestructorBody(ASTContext &Context, 801 const CXXRecordDecl *BaseClassDecl, 802 const CXXRecordDecl *MostDerivedClassDecl) 803 { 804 // If the destructor is trivial we don't have to check anything else. 805 if (BaseClassDecl->hasTrivialDestructor()) 806 return true; 807 808 if (!BaseClassDecl->getDestructor()->hasTrivialBody()) 809 return false; 810 811 // Check fields. 812 for (CXXRecordDecl::field_iterator I = BaseClassDecl->field_begin(), 813 E = BaseClassDecl->field_end(); I != E; ++I) { 814 const FieldDecl *Field = *I; 815 816 if (!FieldHasTrivialDestructorBody(Context, Field)) 817 return false; 818 } 819 820 // Check non-virtual bases. 821 for (CXXRecordDecl::base_class_const_iterator I = 822 BaseClassDecl->bases_begin(), E = BaseClassDecl->bases_end(); 823 I != E; ++I) { 824 if (I->isVirtual()) 825 continue; 826 827 const CXXRecordDecl *NonVirtualBase = 828 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 829 if (!HasTrivialDestructorBody(Context, NonVirtualBase, 830 MostDerivedClassDecl)) 831 return false; 832 } 833 834 if (BaseClassDecl == MostDerivedClassDecl) { 835 // Check virtual bases. 836 for (CXXRecordDecl::base_class_const_iterator I = 837 BaseClassDecl->vbases_begin(), E = BaseClassDecl->vbases_end(); 838 I != E; ++I) { 839 const CXXRecordDecl *VirtualBase = 840 cast<CXXRecordDecl>(I->getType()->castAs<RecordType>()->getDecl()); 841 if (!HasTrivialDestructorBody(Context, VirtualBase, 842 MostDerivedClassDecl)) 843 return false; 844 } 845 } 846 847 return true; 848 } 849 850 static bool 851 FieldHasTrivialDestructorBody(ASTContext &Context, 852 const FieldDecl *Field) 853 { 854 QualType FieldBaseElementType = Context.getBaseElementType(Field->getType()); 855 856 const RecordType *RT = FieldBaseElementType->getAs<RecordType>(); 857 if (!RT) 858 return true; 859 860 CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl()); 861 return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl); 862 } 863 864 /// CanSkipVTablePointerInitialization - Check whether we need to initialize 865 /// any vtable pointers before calling this destructor. 866 static bool CanSkipVTablePointerInitialization(ASTContext &Context, 867 const CXXDestructorDecl *Dtor) { 868 if (!Dtor->hasTrivialBody()) 869 return false; 870 871 // Check the fields. 872 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 873 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 874 E = ClassDecl->field_end(); I != E; ++I) { 875 const FieldDecl *Field = *I; 876 877 if (!FieldHasTrivialDestructorBody(Context, Field)) 878 return false; 879 } 880 881 return true; 882 } 883 884 /// EmitDestructorBody - Emits the body of the current destructor. 885 void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { 886 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl()); 887 CXXDtorType DtorType = CurGD.getDtorType(); 888 889 // The call to operator delete in a deleting destructor happens 890 // outside of the function-try-block, which means it's always 891 // possible to delegate the destructor body to the complete 892 // destructor. Do so. 893 if (DtorType == Dtor_Deleting) { 894 EnterDtorCleanups(Dtor, Dtor_Deleting); 895 EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false, 896 LoadCXXThis()); 897 PopCleanupBlock(); 898 return; 899 } 900 901 Stmt *Body = Dtor->getBody(); 902 903 // If the body is a function-try-block, enter the try before 904 // anything else. 905 bool isTryBody = (Body && isa<CXXTryStmt>(Body)); 906 if (isTryBody) 907 EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true); 908 909 // Enter the epilogue cleanups. 910 RunCleanupsScope DtorEpilogue(*this); 911 912 // If this is the complete variant, just invoke the base variant; 913 // the epilogue will destruct the virtual bases. But we can't do 914 // this optimization if the body is a function-try-block, because 915 // we'd introduce *two* handler blocks. 916 switch (DtorType) { 917 case Dtor_Deleting: llvm_unreachable("already handled deleting case"); 918 919 case Dtor_Complete: 920 // Enter the cleanup scopes for virtual bases. 921 EnterDtorCleanups(Dtor, Dtor_Complete); 922 923 if (!isTryBody && CGM.getContext().getTargetInfo().getCXXABI() != CXXABI_Microsoft) { 924 EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false, 925 LoadCXXThis()); 926 break; 927 } 928 // Fallthrough: act like we're in the base variant. 929 930 case Dtor_Base: 931 // Enter the cleanup scopes for fields and non-virtual bases. 932 EnterDtorCleanups(Dtor, Dtor_Base); 933 934 // Initialize the vtable pointers before entering the body. 935 if (!CanSkipVTablePointerInitialization(getContext(), Dtor)) 936 InitializeVTablePointers(Dtor->getParent()); 937 938 if (isTryBody) 939 EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock()); 940 else if (Body) 941 EmitStmt(Body); 942 else { 943 assert(Dtor->isImplicit() && "bodyless dtor not implicit"); 944 // nothing to do besides what's in the epilogue 945 } 946 // -fapple-kext must inline any call to this dtor into 947 // the caller's body. 948 if (getLangOpts().AppleKext) 949 CurFn->addFnAttr(llvm::Attributes::AlwaysInline); 950 break; 951 } 952 953 // Jump out through the epilogue cleanups. 954 DtorEpilogue.ForceCleanup(); 955 956 // Exit the try if applicable. 957 if (isTryBody) 958 ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true); 959 } 960 961 namespace { 962 /// Call the operator delete associated with the current destructor. 963 struct CallDtorDelete : EHScopeStack::Cleanup { 964 CallDtorDelete() {} 965 966 void Emit(CodeGenFunction &CGF, Flags flags) { 967 const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl); 968 const CXXRecordDecl *ClassDecl = Dtor->getParent(); 969 CGF.EmitDeleteCall(Dtor->getOperatorDelete(), CGF.LoadCXXThis(), 970 CGF.getContext().getTagDeclType(ClassDecl)); 971 } 972 }; 973 974 class DestroyField : public EHScopeStack::Cleanup { 975 const FieldDecl *field; 976 CodeGenFunction::Destroyer *destroyer; 977 bool useEHCleanupForArray; 978 979 public: 980 DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer, 981 bool useEHCleanupForArray) 982 : field(field), destroyer(destroyer), 983 useEHCleanupForArray(useEHCleanupForArray) {} 984 985 void Emit(CodeGenFunction &CGF, Flags flags) { 986 // Find the address of the field. 987 llvm::Value *thisValue = CGF.LoadCXXThis(); 988 QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent()); 989 LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy); 990 LValue LV = CGF.EmitLValueForField(ThisLV, field); 991 assert(LV.isSimple()); 992 993 CGF.emitDestroy(LV.getAddress(), field->getType(), destroyer, 994 flags.isForNormalCleanup() && useEHCleanupForArray); 995 } 996 }; 997 } 998 999 /// EmitDtorEpilogue - Emit all code that comes at the end of class's 1000 /// destructor. This is to call destructors on members and base classes 1001 /// in reverse order of their construction. 1002 void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, 1003 CXXDtorType DtorType) { 1004 assert(!DD->isTrivial() && 1005 "Should not emit dtor epilogue for trivial dtor!"); 1006 1007 // The deleting-destructor phase just needs to call the appropriate 1008 // operator delete that Sema picked up. 1009 if (DtorType == Dtor_Deleting) { 1010 assert(DD->getOperatorDelete() && 1011 "operator delete missing - EmitDtorEpilogue"); 1012 EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); 1013 return; 1014 } 1015 1016 const CXXRecordDecl *ClassDecl = DD->getParent(); 1017 1018 // Unions have no bases and do not call field destructors. 1019 if (ClassDecl->isUnion()) 1020 return; 1021 1022 // The complete-destructor phase just destructs all the virtual bases. 1023 if (DtorType == Dtor_Complete) { 1024 1025 // We push them in the forward order so that they'll be popped in 1026 // the reverse order. 1027 for (CXXRecordDecl::base_class_const_iterator I = 1028 ClassDecl->vbases_begin(), E = ClassDecl->vbases_end(); 1029 I != E; ++I) { 1030 const CXXBaseSpecifier &Base = *I; 1031 CXXRecordDecl *BaseClassDecl 1032 = cast<CXXRecordDecl>(Base.getType()->getAs<RecordType>()->getDecl()); 1033 1034 // Ignore trivial destructors. 1035 if (BaseClassDecl->hasTrivialDestructor()) 1036 continue; 1037 1038 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1039 BaseClassDecl, 1040 /*BaseIsVirtual*/ true); 1041 } 1042 1043 return; 1044 } 1045 1046 assert(DtorType == Dtor_Base); 1047 1048 // Destroy non-virtual bases. 1049 for (CXXRecordDecl::base_class_const_iterator I = 1050 ClassDecl->bases_begin(), E = ClassDecl->bases_end(); I != E; ++I) { 1051 const CXXBaseSpecifier &Base = *I; 1052 1053 // Ignore virtual bases. 1054 if (Base.isVirtual()) 1055 continue; 1056 1057 CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl(); 1058 1059 // Ignore trivial destructors. 1060 if (BaseClassDecl->hasTrivialDestructor()) 1061 continue; 1062 1063 EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, 1064 BaseClassDecl, 1065 /*BaseIsVirtual*/ false); 1066 } 1067 1068 // Destroy direct fields. 1069 SmallVector<const FieldDecl *, 16> FieldDecls; 1070 for (CXXRecordDecl::field_iterator I = ClassDecl->field_begin(), 1071 E = ClassDecl->field_end(); I != E; ++I) { 1072 const FieldDecl *field = *I; 1073 QualType type = field->getType(); 1074 QualType::DestructionKind dtorKind = type.isDestructedType(); 1075 if (!dtorKind) continue; 1076 1077 // Anonymous union members do not have their destructors called. 1078 const RecordType *RT = type->getAsUnionType(); 1079 if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue; 1080 1081 CleanupKind cleanupKind = getCleanupKind(dtorKind); 1082 EHStack.pushCleanup<DestroyField>(cleanupKind, field, 1083 getDestroyer(dtorKind), 1084 cleanupKind & EHCleanup); 1085 } 1086 } 1087 1088 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1089 /// constructor for each of several members of an array. 1090 /// 1091 /// \param ctor the constructor to call for each element 1092 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1093 /// constructor 1094 /// \param arrayType the type of the array to initialize 1095 /// \param arrayBegin an arrayType* 1096 /// \param zeroInitialize true if each element should be 1097 /// zero-initialized before it is constructed 1098 void 1099 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1100 const ConstantArrayType *arrayType, 1101 llvm::Value *arrayBegin, 1102 CallExpr::const_arg_iterator argBegin, 1103 CallExpr::const_arg_iterator argEnd, 1104 bool zeroInitialize) { 1105 QualType elementType; 1106 llvm::Value *numElements = 1107 emitArrayLength(arrayType, elementType, arrayBegin); 1108 1109 EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, 1110 argBegin, argEnd, zeroInitialize); 1111 } 1112 1113 /// EmitCXXAggrConstructorCall - Emit a loop to call a particular 1114 /// constructor for each of several members of an array. 1115 /// 1116 /// \param ctor the constructor to call for each element 1117 /// \param numElements the number of elements in the array; 1118 /// may be zero 1119 /// \param argBegin,argEnd the arguments to evaluate and pass to the 1120 /// constructor 1121 /// \param arrayBegin a T*, where T is the type constructed by ctor 1122 /// \param zeroInitialize true if each element should be 1123 /// zero-initialized before it is constructed 1124 void 1125 CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor, 1126 llvm::Value *numElements, 1127 llvm::Value *arrayBegin, 1128 CallExpr::const_arg_iterator argBegin, 1129 CallExpr::const_arg_iterator argEnd, 1130 bool zeroInitialize) { 1131 1132 // It's legal for numElements to be zero. This can happen both 1133 // dynamically, because x can be zero in 'new A[x]', and statically, 1134 // because of GCC extensions that permit zero-length arrays. There 1135 // are probably legitimate places where we could assume that this 1136 // doesn't happen, but it's not clear that it's worth it. 1137 llvm::BranchInst *zeroCheckBranch = 0; 1138 1139 // Optimize for a constant count. 1140 llvm::ConstantInt *constantCount 1141 = dyn_cast<llvm::ConstantInt>(numElements); 1142 if (constantCount) { 1143 // Just skip out if the constant count is zero. 1144 if (constantCount->isZero()) return; 1145 1146 // Otherwise, emit the check. 1147 } else { 1148 llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop"); 1149 llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty"); 1150 zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB); 1151 EmitBlock(loopBB); 1152 } 1153 1154 // Find the end of the array. 1155 llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(arrayBegin, numElements, 1156 "arrayctor.end"); 1157 1158 // Enter the loop, setting up a phi for the current location to initialize. 1159 llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1160 llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop"); 1161 EmitBlock(loopBB); 1162 llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2, 1163 "arrayctor.cur"); 1164 cur->addIncoming(arrayBegin, entryBB); 1165 1166 // Inside the loop body, emit the constructor call on the array element. 1167 1168 QualType type = getContext().getTypeDeclType(ctor->getParent()); 1169 1170 // Zero initialize the storage, if requested. 1171 if (zeroInitialize) 1172 EmitNullInitialization(cur, type); 1173 1174 // C++ [class.temporary]p4: 1175 // There are two contexts in which temporaries are destroyed at a different 1176 // point than the end of the full-expression. The first context is when a 1177 // default constructor is called to initialize an element of an array. 1178 // If the constructor has one or more default arguments, the destruction of 1179 // every temporary created in a default argument expression is sequenced 1180 // before the construction of the next array element, if any. 1181 1182 { 1183 RunCleanupsScope Scope(*this); 1184 1185 // Evaluate the constructor and its arguments in a regular 1186 // partial-destroy cleanup. 1187 if (getLangOpts().Exceptions && 1188 !ctor->getParent()->hasTrivialDestructor()) { 1189 Destroyer *destroyer = destroyCXXObject; 1190 pushRegularPartialArrayCleanup(arrayBegin, cur, type, *destroyer); 1191 } 1192 1193 EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/ false, 1194 cur, argBegin, argEnd); 1195 } 1196 1197 // Go to the next element. 1198 llvm::Value *next = 1199 Builder.CreateInBoundsGEP(cur, llvm::ConstantInt::get(SizeTy, 1), 1200 "arrayctor.next"); 1201 cur->addIncoming(next, Builder.GetInsertBlock()); 1202 1203 // Check whether that's the end of the loop. 1204 llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done"); 1205 llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont"); 1206 Builder.CreateCondBr(done, contBB, loopBB); 1207 1208 // Patch the earlier check to skip over the loop. 1209 if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB); 1210 1211 EmitBlock(contBB); 1212 } 1213 1214 void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF, 1215 llvm::Value *addr, 1216 QualType type) { 1217 const RecordType *rtype = type->castAs<RecordType>(); 1218 const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl()); 1219 const CXXDestructorDecl *dtor = record->getDestructor(); 1220 assert(!dtor->isTrivial()); 1221 CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false, 1222 addr); 1223 } 1224 1225 void 1226 CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D, 1227 CXXCtorType Type, bool ForVirtualBase, 1228 llvm::Value *This, 1229 CallExpr::const_arg_iterator ArgBeg, 1230 CallExpr::const_arg_iterator ArgEnd) { 1231 1232 CGDebugInfo *DI = getDebugInfo(); 1233 if (DI && 1234 CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::LimitedDebugInfo) { 1235 // If debug info for this class has not been emitted then this is the 1236 // right time to do so. 1237 const CXXRecordDecl *Parent = D->getParent(); 1238 DI->getOrCreateRecordType(CGM.getContext().getTypeDeclType(Parent), 1239 Parent->getLocation()); 1240 } 1241 1242 if (D->isTrivial()) { 1243 if (ArgBeg == ArgEnd) { 1244 // Trivial default constructor, no codegen required. 1245 assert(D->isDefaultConstructor() && 1246 "trivial 0-arg ctor not a default ctor"); 1247 return; 1248 } 1249 1250 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1251 assert(D->isCopyOrMoveConstructor() && 1252 "trivial 1-arg ctor not a copy/move ctor"); 1253 1254 const Expr *E = (*ArgBeg); 1255 QualType Ty = E->getType(); 1256 llvm::Value *Src = EmitLValue(E).getAddress(); 1257 EmitAggregateCopy(This, Src, Ty); 1258 return; 1259 } 1260 1261 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(D, Type), ForVirtualBase); 1262 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, Type); 1263 1264 // FIXME: Provide a source location here. 1265 EmitCXXMemberCall(D, SourceLocation(), Callee, ReturnValueSlot(), This, 1266 VTT, ArgBeg, ArgEnd); 1267 } 1268 1269 void 1270 CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D, 1271 llvm::Value *This, llvm::Value *Src, 1272 CallExpr::const_arg_iterator ArgBeg, 1273 CallExpr::const_arg_iterator ArgEnd) { 1274 if (D->isTrivial()) { 1275 assert(ArgBeg + 1 == ArgEnd && "unexpected argcount for trivial ctor"); 1276 assert(D->isCopyOrMoveConstructor() && 1277 "trivial 1-arg ctor not a copy/move ctor"); 1278 EmitAggregateCopy(This, Src, (*ArgBeg)->getType()); 1279 return; 1280 } 1281 llvm::Value *Callee = CGM.GetAddrOfCXXConstructor(D, 1282 clang::Ctor_Complete); 1283 assert(D->isInstance() && 1284 "Trying to emit a member call expr on a static method!"); 1285 1286 const FunctionProtoType *FPT = D->getType()->getAs<FunctionProtoType>(); 1287 1288 CallArgList Args; 1289 1290 // Push the this ptr. 1291 Args.add(RValue::get(This), D->getThisType(getContext())); 1292 1293 1294 // Push the src ptr. 1295 QualType QT = *(FPT->arg_type_begin()); 1296 llvm::Type *t = CGM.getTypes().ConvertType(QT); 1297 Src = Builder.CreateBitCast(Src, t); 1298 Args.add(RValue::get(Src), QT); 1299 1300 // Skip over first argument (Src). 1301 ++ArgBeg; 1302 CallExpr::const_arg_iterator Arg = ArgBeg; 1303 for (FunctionProtoType::arg_type_iterator I = FPT->arg_type_begin()+1, 1304 E = FPT->arg_type_end(); I != E; ++I, ++Arg) { 1305 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1306 EmitCallArg(Args, *Arg, *I); 1307 } 1308 // Either we've emitted all the call args, or we have a call to a 1309 // variadic function. 1310 assert((Arg == ArgEnd || FPT->isVariadic()) && 1311 "Extra arguments in non-variadic function!"); 1312 // If we still have any arguments, emit them using the type of the argument. 1313 for (; Arg != ArgEnd; ++Arg) { 1314 QualType ArgType = Arg->getType(); 1315 EmitCallArg(Args, *Arg, ArgType); 1316 } 1317 1318 EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, RequiredArgs::All), 1319 Callee, ReturnValueSlot(), Args, D); 1320 } 1321 1322 void 1323 CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1324 CXXCtorType CtorType, 1325 const FunctionArgList &Args) { 1326 CallArgList DelegateArgs; 1327 1328 FunctionArgList::const_iterator I = Args.begin(), E = Args.end(); 1329 assert(I != E && "no parameters to constructor"); 1330 1331 // this 1332 DelegateArgs.add(RValue::get(LoadCXXThis()), (*I)->getType()); 1333 ++I; 1334 1335 // vtt 1336 if (llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(Ctor, CtorType), 1337 /*ForVirtualBase=*/false)) { 1338 QualType VoidPP = getContext().getPointerType(getContext().VoidPtrTy); 1339 DelegateArgs.add(RValue::get(VTT), VoidPP); 1340 1341 if (CodeGenVTables::needsVTTParameter(CurGD)) { 1342 assert(I != E && "cannot skip vtt parameter, already done with args"); 1343 assert((*I)->getType() == VoidPP && "skipping parameter not of vtt type"); 1344 ++I; 1345 } 1346 } 1347 1348 // Explicit arguments. 1349 for (; I != E; ++I) { 1350 const VarDecl *param = *I; 1351 EmitDelegateCallArg(DelegateArgs, param); 1352 } 1353 1354 EmitCall(CGM.getTypes().arrangeCXXConstructorDeclaration(Ctor, CtorType), 1355 CGM.GetAddrOfCXXConstructor(Ctor, CtorType), 1356 ReturnValueSlot(), DelegateArgs, Ctor); 1357 } 1358 1359 namespace { 1360 struct CallDelegatingCtorDtor : EHScopeStack::Cleanup { 1361 const CXXDestructorDecl *Dtor; 1362 llvm::Value *Addr; 1363 CXXDtorType Type; 1364 1365 CallDelegatingCtorDtor(const CXXDestructorDecl *D, llvm::Value *Addr, 1366 CXXDtorType Type) 1367 : Dtor(D), Addr(Addr), Type(Type) {} 1368 1369 void Emit(CodeGenFunction &CGF, Flags flags) { 1370 CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false, 1371 Addr); 1372 } 1373 }; 1374 } 1375 1376 void 1377 CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor, 1378 const FunctionArgList &Args) { 1379 assert(Ctor->isDelegatingConstructor()); 1380 1381 llvm::Value *ThisPtr = LoadCXXThis(); 1382 1383 QualType Ty = getContext().getTagDeclType(Ctor->getParent()); 1384 CharUnits Alignment = getContext().getTypeAlignInChars(Ty); 1385 AggValueSlot AggSlot = 1386 AggValueSlot::forAddr(ThisPtr, Alignment, Qualifiers(), 1387 AggValueSlot::IsDestructed, 1388 AggValueSlot::DoesNotNeedGCBarriers, 1389 AggValueSlot::IsNotAliased); 1390 1391 EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot); 1392 1393 const CXXRecordDecl *ClassDecl = Ctor->getParent(); 1394 if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) { 1395 CXXDtorType Type = 1396 CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base; 1397 1398 EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup, 1399 ClassDecl->getDestructor(), 1400 ThisPtr, Type); 1401 } 1402 } 1403 1404 void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, 1405 CXXDtorType Type, 1406 bool ForVirtualBase, 1407 llvm::Value *This) { 1408 llvm::Value *VTT = GetVTTParameter(*this, GlobalDecl(DD, Type), 1409 ForVirtualBase); 1410 llvm::Value *Callee = 0; 1411 if (getLangOpts().AppleKext) 1412 Callee = BuildAppleKextVirtualDestructorCall(DD, Type, 1413 DD->getParent()); 1414 1415 if (!Callee) 1416 Callee = CGM.GetAddrOfCXXDestructor(DD, Type); 1417 1418 // FIXME: Provide a source location here. 1419 EmitCXXMemberCall(DD, SourceLocation(), Callee, ReturnValueSlot(), This, 1420 VTT, 0, 0); 1421 } 1422 1423 namespace { 1424 struct CallLocalDtor : EHScopeStack::Cleanup { 1425 const CXXDestructorDecl *Dtor; 1426 llvm::Value *Addr; 1427 1428 CallLocalDtor(const CXXDestructorDecl *D, llvm::Value *Addr) 1429 : Dtor(D), Addr(Addr) {} 1430 1431 void Emit(CodeGenFunction &CGF, Flags flags) { 1432 CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete, 1433 /*ForVirtualBase=*/false, Addr); 1434 } 1435 }; 1436 } 1437 1438 void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, 1439 llvm::Value *Addr) { 1440 EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); 1441 } 1442 1443 void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { 1444 CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl(); 1445 if (!ClassDecl) return; 1446 if (ClassDecl->hasTrivialDestructor()) return; 1447 1448 const CXXDestructorDecl *D = ClassDecl->getDestructor(); 1449 assert(D && D->isUsed() && "destructor not marked as used!"); 1450 PushDestructorCleanup(D, Addr); 1451 } 1452 1453 llvm::Value * 1454 CodeGenFunction::GetVirtualBaseClassOffset(llvm::Value *This, 1455 const CXXRecordDecl *ClassDecl, 1456 const CXXRecordDecl *BaseClassDecl) { 1457 llvm::Value *VTablePtr = GetVTablePtr(This, Int8PtrTy); 1458 CharUnits VBaseOffsetOffset = 1459 CGM.getVTableContext().getVirtualBaseOffsetOffset(ClassDecl, BaseClassDecl); 1460 1461 llvm::Value *VBaseOffsetPtr = 1462 Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetOffset.getQuantity(), 1463 "vbase.offset.ptr"); 1464 llvm::Type *PtrDiffTy = 1465 ConvertType(getContext().getPointerDiffType()); 1466 1467 VBaseOffsetPtr = Builder.CreateBitCast(VBaseOffsetPtr, 1468 PtrDiffTy->getPointerTo()); 1469 1470 llvm::Value *VBaseOffset = Builder.CreateLoad(VBaseOffsetPtr, "vbase.offset"); 1471 1472 return VBaseOffset; 1473 } 1474 1475 void 1476 CodeGenFunction::InitializeVTablePointer(BaseSubobject Base, 1477 const CXXRecordDecl *NearestVBase, 1478 CharUnits OffsetFromNearestVBase, 1479 llvm::Constant *VTable, 1480 const CXXRecordDecl *VTableClass) { 1481 const CXXRecordDecl *RD = Base.getBase(); 1482 1483 // Compute the address point. 1484 llvm::Value *VTableAddressPoint; 1485 1486 // Check if we need to use a vtable from the VTT. 1487 if (CodeGenVTables::needsVTTParameter(CurGD) && 1488 (RD->getNumVBases() || NearestVBase)) { 1489 // Get the secondary vpointer index. 1490 uint64_t VirtualPointerIndex = 1491 CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base); 1492 1493 /// Load the VTT. 1494 llvm::Value *VTT = LoadCXXVTT(); 1495 if (VirtualPointerIndex) 1496 VTT = Builder.CreateConstInBoundsGEP1_64(VTT, VirtualPointerIndex); 1497 1498 // And load the address point from the VTT. 1499 VTableAddressPoint = Builder.CreateLoad(VTT); 1500 } else { 1501 uint64_t AddressPoint = 1502 CGM.getVTableContext().getVTableLayout(VTableClass).getAddressPoint(Base); 1503 VTableAddressPoint = 1504 Builder.CreateConstInBoundsGEP2_64(VTable, 0, AddressPoint); 1505 } 1506 1507 // Compute where to store the address point. 1508 llvm::Value *VirtualOffset = 0; 1509 CharUnits NonVirtualOffset = CharUnits::Zero(); 1510 1511 if (CodeGenVTables::needsVTTParameter(CurGD) && NearestVBase) { 1512 // We need to use the virtual base offset offset because the virtual base 1513 // might have a different offset in the most derived class. 1514 VirtualOffset = GetVirtualBaseClassOffset(LoadCXXThis(), VTableClass, 1515 NearestVBase); 1516 NonVirtualOffset = OffsetFromNearestVBase; 1517 } else { 1518 // We can just use the base offset in the complete class. 1519 NonVirtualOffset = Base.getBaseOffset(); 1520 } 1521 1522 // Apply the offsets. 1523 llvm::Value *VTableField = LoadCXXThis(); 1524 1525 if (!NonVirtualOffset.isZero() || VirtualOffset) 1526 VTableField = ApplyNonVirtualAndVirtualOffset(*this, VTableField, 1527 NonVirtualOffset, 1528 VirtualOffset); 1529 1530 // Finally, store the address point. 1531 llvm::Type *AddressPointPtrTy = 1532 VTableAddressPoint->getType()->getPointerTo(); 1533 VTableField = Builder.CreateBitCast(VTableField, AddressPointPtrTy); 1534 llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField); 1535 CGM.DecorateInstruction(Store, CGM.getTBAAInfoForVTablePtr()); 1536 } 1537 1538 void 1539 CodeGenFunction::InitializeVTablePointers(BaseSubobject Base, 1540 const CXXRecordDecl *NearestVBase, 1541 CharUnits OffsetFromNearestVBase, 1542 bool BaseIsNonVirtualPrimaryBase, 1543 llvm::Constant *VTable, 1544 const CXXRecordDecl *VTableClass, 1545 VisitedVirtualBasesSetTy& VBases) { 1546 // If this base is a non-virtual primary base the address point has already 1547 // been set. 1548 if (!BaseIsNonVirtualPrimaryBase) { 1549 // Initialize the vtable pointer for this base. 1550 InitializeVTablePointer(Base, NearestVBase, OffsetFromNearestVBase, 1551 VTable, VTableClass); 1552 } 1553 1554 const CXXRecordDecl *RD = Base.getBase(); 1555 1556 // Traverse bases. 1557 for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), 1558 E = RD->bases_end(); I != E; ++I) { 1559 CXXRecordDecl *BaseDecl 1560 = cast<CXXRecordDecl>(I->getType()->getAs<RecordType>()->getDecl()); 1561 1562 // Ignore classes without a vtable. 1563 if (!BaseDecl->isDynamicClass()) 1564 continue; 1565 1566 CharUnits BaseOffset; 1567 CharUnits BaseOffsetFromNearestVBase; 1568 bool BaseDeclIsNonVirtualPrimaryBase; 1569 1570 if (I->isVirtual()) { 1571 // Check if we've visited this virtual base before. 1572 if (!VBases.insert(BaseDecl)) 1573 continue; 1574 1575 const ASTRecordLayout &Layout = 1576 getContext().getASTRecordLayout(VTableClass); 1577 1578 BaseOffset = Layout.getVBaseClassOffset(BaseDecl); 1579 BaseOffsetFromNearestVBase = CharUnits::Zero(); 1580 BaseDeclIsNonVirtualPrimaryBase = false; 1581 } else { 1582 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1583 1584 BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl); 1585 BaseOffsetFromNearestVBase = 1586 OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl); 1587 BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl; 1588 } 1589 1590 InitializeVTablePointers(BaseSubobject(BaseDecl, BaseOffset), 1591 I->isVirtual() ? BaseDecl : NearestVBase, 1592 BaseOffsetFromNearestVBase, 1593 BaseDeclIsNonVirtualPrimaryBase, 1594 VTable, VTableClass, VBases); 1595 } 1596 } 1597 1598 void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) { 1599 // Ignore classes without a vtable. 1600 if (!RD->isDynamicClass()) 1601 return; 1602 1603 // Get the VTable. 1604 llvm::Constant *VTable = CGM.getVTables().GetAddrOfVTable(RD); 1605 1606 // Initialize the vtable pointers for this class and all of its bases. 1607 VisitedVirtualBasesSetTy VBases; 1608 InitializeVTablePointers(BaseSubobject(RD, CharUnits::Zero()), 1609 /*NearestVBase=*/0, 1610 /*OffsetFromNearestVBase=*/CharUnits::Zero(), 1611 /*BaseIsNonVirtualPrimaryBase=*/false, 1612 VTable, RD, VBases); 1613 } 1614 1615 llvm::Value *CodeGenFunction::GetVTablePtr(llvm::Value *This, 1616 llvm::Type *Ty) { 1617 llvm::Value *VTablePtrSrc = Builder.CreateBitCast(This, Ty->getPointerTo()); 1618 llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable"); 1619 CGM.DecorateInstruction(VTable, CGM.getTBAAInfoForVTablePtr()); 1620 return VTable; 1621 } 1622 1623 static const CXXRecordDecl *getMostDerivedClassDecl(const Expr *Base) { 1624 const Expr *E = Base; 1625 1626 while (true) { 1627 E = E->IgnoreParens(); 1628 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1629 if (CE->getCastKind() == CK_DerivedToBase || 1630 CE->getCastKind() == CK_UncheckedDerivedToBase || 1631 CE->getCastKind() == CK_NoOp) { 1632 E = CE->getSubExpr(); 1633 continue; 1634 } 1635 } 1636 1637 break; 1638 } 1639 1640 QualType DerivedType = E->getType(); 1641 if (const PointerType *PTy = DerivedType->getAs<PointerType>()) 1642 DerivedType = PTy->getPointeeType(); 1643 1644 return cast<CXXRecordDecl>(DerivedType->castAs<RecordType>()->getDecl()); 1645 } 1646 1647 // FIXME: Ideally Expr::IgnoreParenNoopCasts should do this, but it doesn't do 1648 // quite what we want. 1649 static const Expr *skipNoOpCastsAndParens(const Expr *E) { 1650 while (true) { 1651 if (const ParenExpr *PE = dyn_cast<ParenExpr>(E)) { 1652 E = PE->getSubExpr(); 1653 continue; 1654 } 1655 1656 if (const CastExpr *CE = dyn_cast<CastExpr>(E)) { 1657 if (CE->getCastKind() == CK_NoOp) { 1658 E = CE->getSubExpr(); 1659 continue; 1660 } 1661 } 1662 if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) { 1663 if (UO->getOpcode() == UO_Extension) { 1664 E = UO->getSubExpr(); 1665 continue; 1666 } 1667 } 1668 return E; 1669 } 1670 } 1671 1672 /// canDevirtualizeMemberFunctionCall - Checks whether the given virtual member 1673 /// function call on the given expr can be devirtualized. 1674 static bool canDevirtualizeMemberFunctionCall(const Expr *Base, 1675 const CXXMethodDecl *MD) { 1676 // If the most derived class is marked final, we know that no subclass can 1677 // override this member function and so we can devirtualize it. For example: 1678 // 1679 // struct A { virtual void f(); } 1680 // struct B final : A { }; 1681 // 1682 // void f(B *b) { 1683 // b->f(); 1684 // } 1685 // 1686 const CXXRecordDecl *MostDerivedClassDecl = getMostDerivedClassDecl(Base); 1687 if (MostDerivedClassDecl->hasAttr<FinalAttr>()) 1688 return true; 1689 1690 // If the member function is marked 'final', we know that it can't be 1691 // overridden and can therefore devirtualize it. 1692 if (MD->hasAttr<FinalAttr>()) 1693 return true; 1694 1695 // Similarly, if the class itself is marked 'final' it can't be overridden 1696 // and we can therefore devirtualize the member function call. 1697 if (MD->getParent()->hasAttr<FinalAttr>()) 1698 return true; 1699 1700 Base = skipNoOpCastsAndParens(Base); 1701 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Base)) { 1702 if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl())) { 1703 // This is a record decl. We know the type and can devirtualize it. 1704 return VD->getType()->isRecordType(); 1705 } 1706 1707 return false; 1708 } 1709 1710 // We can always devirtualize calls on temporary object expressions. 1711 if (isa<CXXConstructExpr>(Base)) 1712 return true; 1713 1714 // And calls on bound temporaries. 1715 if (isa<CXXBindTemporaryExpr>(Base)) 1716 return true; 1717 1718 // Check if this is a call expr that returns a record type. 1719 if (const CallExpr *CE = dyn_cast<CallExpr>(Base)) 1720 return CE->getCallReturnType()->isRecordType(); 1721 1722 // We can't devirtualize the call. 1723 return false; 1724 } 1725 1726 static bool UseVirtualCall(ASTContext &Context, 1727 const CXXOperatorCallExpr *CE, 1728 const CXXMethodDecl *MD) { 1729 if (!MD->isVirtual()) 1730 return false; 1731 1732 // When building with -fapple-kext, all calls must go through the vtable since 1733 // the kernel linker can do runtime patching of vtables. 1734 if (Context.getLangOpts().AppleKext) 1735 return true; 1736 1737 return !canDevirtualizeMemberFunctionCall(CE->getArg(0), MD); 1738 } 1739 1740 llvm::Value * 1741 CodeGenFunction::EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E, 1742 const CXXMethodDecl *MD, 1743 llvm::Value *This) { 1744 llvm::FunctionType *fnType = 1745 CGM.getTypes().GetFunctionType( 1746 CGM.getTypes().arrangeCXXMethodDeclaration(MD)); 1747 1748 if (UseVirtualCall(getContext(), E, MD)) 1749 return BuildVirtualCall(MD, This, fnType); 1750 1751 return CGM.GetAddrOfFunction(MD, fnType); 1752 } 1753 1754 void CodeGenFunction::EmitForwardingCallToLambda(const CXXRecordDecl *lambda, 1755 CallArgList &callArgs) { 1756 // Lookup the call operator 1757 DeclarationName operatorName 1758 = getContext().DeclarationNames.getCXXOperatorName(OO_Call); 1759 CXXMethodDecl *callOperator = 1760 cast<CXXMethodDecl>(*lambda->lookup(operatorName).first); 1761 1762 // Get the address of the call operator. 1763 const CGFunctionInfo &calleeFnInfo = 1764 CGM.getTypes().arrangeCXXMethodDeclaration(callOperator); 1765 llvm::Value *callee = 1766 CGM.GetAddrOfFunction(GlobalDecl(callOperator), 1767 CGM.getTypes().GetFunctionType(calleeFnInfo)); 1768 1769 // Prepare the return slot. 1770 const FunctionProtoType *FPT = 1771 callOperator->getType()->castAs<FunctionProtoType>(); 1772 QualType resultType = FPT->getResultType(); 1773 ReturnValueSlot returnSlot; 1774 if (!resultType->isVoidType() && 1775 calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect && 1776 hasAggregateLLVMType(calleeFnInfo.getReturnType())) 1777 returnSlot = ReturnValueSlot(ReturnValue, resultType.isVolatileQualified()); 1778 1779 // We don't need to separately arrange the call arguments because 1780 // the call can't be variadic anyway --- it's impossible to forward 1781 // variadic arguments. 1782 1783 // Now emit our call. 1784 RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, 1785 callArgs, callOperator); 1786 1787 // If necessary, copy the returned value into the slot. 1788 if (!resultType->isVoidType() && returnSlot.isNull()) 1789 EmitReturnOfRValue(RV, resultType); 1790 } 1791 1792 void CodeGenFunction::EmitLambdaBlockInvokeBody() { 1793 const BlockDecl *BD = BlockInfo->getBlockDecl(); 1794 const VarDecl *variable = BD->capture_begin()->getVariable(); 1795 const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl(); 1796 1797 // Start building arguments for forwarding call 1798 CallArgList CallArgs; 1799 1800 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1801 llvm::Value *ThisPtr = GetAddrOfBlockDecl(variable, false); 1802 CallArgs.add(RValue::get(ThisPtr), ThisType); 1803 1804 // Add the rest of the parameters. 1805 for (BlockDecl::param_const_iterator I = BD->param_begin(), 1806 E = BD->param_end(); I != E; ++I) { 1807 ParmVarDecl *param = *I; 1808 EmitDelegateCallArg(CallArgs, param); 1809 } 1810 1811 EmitForwardingCallToLambda(Lambda, CallArgs); 1812 } 1813 1814 void CodeGenFunction::EmitLambdaToBlockPointerBody(FunctionArgList &Args) { 1815 if (cast<CXXMethodDecl>(CurFuncDecl)->isVariadic()) { 1816 // FIXME: Making this work correctly is nasty because it requires either 1817 // cloning the body of the call operator or making the call operator forward. 1818 CGM.ErrorUnsupported(CurFuncDecl, "lambda conversion to variadic function"); 1819 return; 1820 } 1821 1822 EmitFunctionBody(Args); 1823 } 1824 1825 void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) { 1826 const CXXRecordDecl *Lambda = MD->getParent(); 1827 1828 // Start building arguments for forwarding call 1829 CallArgList CallArgs; 1830 1831 QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda)); 1832 llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType)); 1833 CallArgs.add(RValue::get(ThisPtr), ThisType); 1834 1835 // Add the rest of the parameters. 1836 for (FunctionDecl::param_const_iterator I = MD->param_begin(), 1837 E = MD->param_end(); I != E; ++I) { 1838 ParmVarDecl *param = *I; 1839 EmitDelegateCallArg(CallArgs, param); 1840 } 1841 1842 EmitForwardingCallToLambda(Lambda, CallArgs); 1843 } 1844 1845 void CodeGenFunction::EmitLambdaStaticInvokeFunction(const CXXMethodDecl *MD) { 1846 if (MD->isVariadic()) { 1847 // FIXME: Making this work correctly is nasty because it requires either 1848 // cloning the body of the call operator or making the call operator forward. 1849 CGM.ErrorUnsupported(MD, "lambda conversion to variadic function"); 1850 return; 1851 } 1852 1853 EmitLambdaDelegatingInvokeBody(MD); 1854 } 1855