1*0b57cec5SDimitry Andric //===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===// 2*0b57cec5SDimitry Andric // 3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6*0b57cec5SDimitry Andric // 7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 8*0b57cec5SDimitry Andric // 9*0b57cec5SDimitry Andric // This contains code to emit Aggregate Expr nodes as LLVM code. 10*0b57cec5SDimitry Andric // 11*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 12*0b57cec5SDimitry Andric 13*0b57cec5SDimitry Andric #include "CGCXXABI.h" 14*0b57cec5SDimitry Andric #include "CGObjCRuntime.h" 15480093f4SDimitry Andric #include "CodeGenFunction.h" 16*0b57cec5SDimitry Andric #include "CodeGenModule.h" 17*0b57cec5SDimitry Andric #include "ConstantEmitter.h" 185ffd83dbSDimitry Andric #include "TargetInfo.h" 19*0b57cec5SDimitry Andric #include "clang/AST/ASTContext.h" 20480093f4SDimitry Andric #include "clang/AST/Attr.h" 21*0b57cec5SDimitry Andric #include "clang/AST/DeclCXX.h" 22*0b57cec5SDimitry Andric #include "clang/AST/DeclTemplate.h" 23*0b57cec5SDimitry Andric #include "clang/AST/StmtVisitor.h" 24*0b57cec5SDimitry Andric #include "llvm/IR/Constants.h" 25*0b57cec5SDimitry Andric #include "llvm/IR/Function.h" 26*0b57cec5SDimitry Andric #include "llvm/IR/GlobalVariable.h" 27*0b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h" 28480093f4SDimitry Andric #include "llvm/IR/Intrinsics.h" 29*0b57cec5SDimitry Andric using namespace clang; 30*0b57cec5SDimitry Andric using namespace CodeGen; 31*0b57cec5SDimitry Andric 32*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 33*0b57cec5SDimitry Andric // Aggregate Expression Emitter 34*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 35*0b57cec5SDimitry Andric 36*0b57cec5SDimitry Andric namespace { 37*0b57cec5SDimitry Andric class AggExprEmitter : public StmtVisitor<AggExprEmitter> { 38*0b57cec5SDimitry Andric CodeGenFunction &CGF; 39*0b57cec5SDimitry Andric CGBuilderTy &Builder; 40*0b57cec5SDimitry Andric AggValueSlot Dest; 41*0b57cec5SDimitry Andric bool IsResultUnused; 42*0b57cec5SDimitry Andric 43*0b57cec5SDimitry Andric AggValueSlot EnsureSlot(QualType T) { 44*0b57cec5SDimitry Andric if (!Dest.isIgnored()) return Dest; 45*0b57cec5SDimitry Andric return CGF.CreateAggTemp(T, "agg.tmp.ensured"); 46*0b57cec5SDimitry Andric } 47*0b57cec5SDimitry Andric void EnsureDest(QualType T) { 48*0b57cec5SDimitry Andric if (!Dest.isIgnored()) return; 49*0b57cec5SDimitry Andric Dest = CGF.CreateAggTemp(T, "agg.tmp.ensured"); 50*0b57cec5SDimitry Andric } 51*0b57cec5SDimitry Andric 52*0b57cec5SDimitry Andric // Calls `Fn` with a valid return value slot, potentially creating a temporary 53*0b57cec5SDimitry Andric // to do so. If a temporary is created, an appropriate copy into `Dest` will 54*0b57cec5SDimitry Andric // be emitted, as will lifetime markers. 55*0b57cec5SDimitry Andric // 56*0b57cec5SDimitry Andric // The given function should take a ReturnValueSlot, and return an RValue that 57*0b57cec5SDimitry Andric // points to said slot. 58*0b57cec5SDimitry Andric void withReturnValueSlot(const Expr *E, 59*0b57cec5SDimitry Andric llvm::function_ref<RValue(ReturnValueSlot)> Fn); 60*0b57cec5SDimitry Andric 61*0b57cec5SDimitry Andric public: 62*0b57cec5SDimitry Andric AggExprEmitter(CodeGenFunction &cgf, AggValueSlot Dest, bool IsResultUnused) 63*0b57cec5SDimitry Andric : CGF(cgf), Builder(CGF.Builder), Dest(Dest), 64*0b57cec5SDimitry Andric IsResultUnused(IsResultUnused) { } 65*0b57cec5SDimitry Andric 66*0b57cec5SDimitry Andric //===--------------------------------------------------------------------===// 67*0b57cec5SDimitry Andric // Utilities 68*0b57cec5SDimitry Andric //===--------------------------------------------------------------------===// 69*0b57cec5SDimitry Andric 70*0b57cec5SDimitry Andric /// EmitAggLoadOfLValue - Given an expression with aggregate type that 71*0b57cec5SDimitry Andric /// represents a value lvalue, this method emits the address of the lvalue, 72*0b57cec5SDimitry Andric /// then loads the result into DestPtr. 73*0b57cec5SDimitry Andric void EmitAggLoadOfLValue(const Expr *E); 74*0b57cec5SDimitry Andric 75*0b57cec5SDimitry Andric enum ExprValueKind { 76*0b57cec5SDimitry Andric EVK_RValue, 77*0b57cec5SDimitry Andric EVK_NonRValue 78*0b57cec5SDimitry Andric }; 79*0b57cec5SDimitry Andric 80*0b57cec5SDimitry Andric /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired. 81*0b57cec5SDimitry Andric /// SrcIsRValue is true if source comes from an RValue. 82*0b57cec5SDimitry Andric void EmitFinalDestCopy(QualType type, const LValue &src, 83*0b57cec5SDimitry Andric ExprValueKind SrcValueKind = EVK_NonRValue); 84*0b57cec5SDimitry Andric void EmitFinalDestCopy(QualType type, RValue src); 85*0b57cec5SDimitry Andric void EmitCopy(QualType type, const AggValueSlot &dest, 86*0b57cec5SDimitry Andric const AggValueSlot &src); 87*0b57cec5SDimitry Andric 88*0b57cec5SDimitry Andric void EmitMoveFromReturnSlot(const Expr *E, RValue Src); 89*0b57cec5SDimitry Andric 90*0b57cec5SDimitry Andric void EmitArrayInit(Address DestPtr, llvm::ArrayType *AType, 91*0b57cec5SDimitry Andric QualType ArrayQTy, InitListExpr *E); 92*0b57cec5SDimitry Andric 93*0b57cec5SDimitry Andric AggValueSlot::NeedsGCBarriers_t needsGC(QualType T) { 94*0b57cec5SDimitry Andric if (CGF.getLangOpts().getGC() && TypeRequiresGCollection(T)) 95*0b57cec5SDimitry Andric return AggValueSlot::NeedsGCBarriers; 96*0b57cec5SDimitry Andric return AggValueSlot::DoesNotNeedGCBarriers; 97*0b57cec5SDimitry Andric } 98*0b57cec5SDimitry Andric 99*0b57cec5SDimitry Andric bool TypeRequiresGCollection(QualType T); 100*0b57cec5SDimitry Andric 101*0b57cec5SDimitry Andric //===--------------------------------------------------------------------===// 102*0b57cec5SDimitry Andric // Visitor Methods 103*0b57cec5SDimitry Andric //===--------------------------------------------------------------------===// 104*0b57cec5SDimitry Andric 105*0b57cec5SDimitry Andric void Visit(Expr *E) { 106*0b57cec5SDimitry Andric ApplyDebugLocation DL(CGF, E); 107*0b57cec5SDimitry Andric StmtVisitor<AggExprEmitter>::Visit(E); 108*0b57cec5SDimitry Andric } 109*0b57cec5SDimitry Andric 110*0b57cec5SDimitry Andric void VisitStmt(Stmt *S) { 111*0b57cec5SDimitry Andric CGF.ErrorUnsupported(S, "aggregate expression"); 112*0b57cec5SDimitry Andric } 113*0b57cec5SDimitry Andric void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); } 114*0b57cec5SDimitry Andric void VisitGenericSelectionExpr(GenericSelectionExpr *GE) { 115*0b57cec5SDimitry Andric Visit(GE->getResultExpr()); 116*0b57cec5SDimitry Andric } 117*0b57cec5SDimitry Andric void VisitCoawaitExpr(CoawaitExpr *E) { 118*0b57cec5SDimitry Andric CGF.EmitCoawaitExpr(*E, Dest, IsResultUnused); 119*0b57cec5SDimitry Andric } 120*0b57cec5SDimitry Andric void VisitCoyieldExpr(CoyieldExpr *E) { 121*0b57cec5SDimitry Andric CGF.EmitCoyieldExpr(*E, Dest, IsResultUnused); 122*0b57cec5SDimitry Andric } 123*0b57cec5SDimitry Andric void VisitUnaryCoawait(UnaryOperator *E) { Visit(E->getSubExpr()); } 124*0b57cec5SDimitry Andric void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); } 125*0b57cec5SDimitry Andric void VisitSubstNonTypeTemplateParmExpr(SubstNonTypeTemplateParmExpr *E) { 126*0b57cec5SDimitry Andric return Visit(E->getReplacement()); 127*0b57cec5SDimitry Andric } 128*0b57cec5SDimitry Andric 129*0b57cec5SDimitry Andric void VisitConstantExpr(ConstantExpr *E) { 1305ffd83dbSDimitry Andric if (llvm::Value *Result = ConstantEmitter(CGF).tryEmitConstantExpr(E)) { 1315ffd83dbSDimitry Andric CGF.EmitAggregateStore(Result, Dest.getAddress(), 1325ffd83dbSDimitry Andric E->getType().isVolatileQualified()); 1335ffd83dbSDimitry Andric return; 1345ffd83dbSDimitry Andric } 135*0b57cec5SDimitry Andric return Visit(E->getSubExpr()); 136*0b57cec5SDimitry Andric } 137*0b57cec5SDimitry Andric 138*0b57cec5SDimitry Andric // l-values. 139*0b57cec5SDimitry Andric void VisitDeclRefExpr(DeclRefExpr *E) { EmitAggLoadOfLValue(E); } 140*0b57cec5SDimitry Andric void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); } 141*0b57cec5SDimitry Andric void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); } 142*0b57cec5SDimitry Andric void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); } 143*0b57cec5SDimitry Andric void VisitCompoundLiteralExpr(CompoundLiteralExpr *E); 144*0b57cec5SDimitry Andric void VisitArraySubscriptExpr(ArraySubscriptExpr *E) { 145*0b57cec5SDimitry Andric EmitAggLoadOfLValue(E); 146*0b57cec5SDimitry Andric } 147*0b57cec5SDimitry Andric void VisitPredefinedExpr(const PredefinedExpr *E) { 148*0b57cec5SDimitry Andric EmitAggLoadOfLValue(E); 149*0b57cec5SDimitry Andric } 150*0b57cec5SDimitry Andric 151*0b57cec5SDimitry Andric // Operators. 152*0b57cec5SDimitry Andric void VisitCastExpr(CastExpr *E); 153*0b57cec5SDimitry Andric void VisitCallExpr(const CallExpr *E); 154*0b57cec5SDimitry Andric void VisitStmtExpr(const StmtExpr *E); 155*0b57cec5SDimitry Andric void VisitBinaryOperator(const BinaryOperator *BO); 156*0b57cec5SDimitry Andric void VisitPointerToDataMemberBinaryOperator(const BinaryOperator *BO); 157*0b57cec5SDimitry Andric void VisitBinAssign(const BinaryOperator *E); 158*0b57cec5SDimitry Andric void VisitBinComma(const BinaryOperator *E); 159*0b57cec5SDimitry Andric void VisitBinCmp(const BinaryOperator *E); 160a7dea167SDimitry Andric void VisitCXXRewrittenBinaryOperator(CXXRewrittenBinaryOperator *E) { 161a7dea167SDimitry Andric Visit(E->getSemanticForm()); 162a7dea167SDimitry Andric } 163*0b57cec5SDimitry Andric 164*0b57cec5SDimitry Andric void VisitObjCMessageExpr(ObjCMessageExpr *E); 165*0b57cec5SDimitry Andric void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) { 166*0b57cec5SDimitry Andric EmitAggLoadOfLValue(E); 167*0b57cec5SDimitry Andric } 168*0b57cec5SDimitry Andric 169*0b57cec5SDimitry Andric void VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E); 170*0b57cec5SDimitry Andric void VisitAbstractConditionalOperator(const AbstractConditionalOperator *CO); 171*0b57cec5SDimitry Andric void VisitChooseExpr(const ChooseExpr *CE); 172*0b57cec5SDimitry Andric void VisitInitListExpr(InitListExpr *E); 173*0b57cec5SDimitry Andric void VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E, 174*0b57cec5SDimitry Andric llvm::Value *outerBegin = nullptr); 175*0b57cec5SDimitry Andric void VisitImplicitValueInitExpr(ImplicitValueInitExpr *E); 176*0b57cec5SDimitry Andric void VisitNoInitExpr(NoInitExpr *E) { } // Do nothing. 177*0b57cec5SDimitry Andric void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) { 178*0b57cec5SDimitry Andric CodeGenFunction::CXXDefaultArgExprScope Scope(CGF, DAE); 179*0b57cec5SDimitry Andric Visit(DAE->getExpr()); 180*0b57cec5SDimitry Andric } 181*0b57cec5SDimitry Andric void VisitCXXDefaultInitExpr(CXXDefaultInitExpr *DIE) { 182*0b57cec5SDimitry Andric CodeGenFunction::CXXDefaultInitExprScope Scope(CGF, DIE); 183*0b57cec5SDimitry Andric Visit(DIE->getExpr()); 184*0b57cec5SDimitry Andric } 185*0b57cec5SDimitry Andric void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E); 186*0b57cec5SDimitry Andric void VisitCXXConstructExpr(const CXXConstructExpr *E); 187*0b57cec5SDimitry Andric void VisitCXXInheritedCtorInitExpr(const CXXInheritedCtorInitExpr *E); 188*0b57cec5SDimitry Andric void VisitLambdaExpr(LambdaExpr *E); 189*0b57cec5SDimitry Andric void VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E); 190*0b57cec5SDimitry Andric void VisitExprWithCleanups(ExprWithCleanups *E); 191*0b57cec5SDimitry Andric void VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E); 192*0b57cec5SDimitry Andric void VisitCXXTypeidExpr(CXXTypeidExpr *E) { EmitAggLoadOfLValue(E); } 193*0b57cec5SDimitry Andric void VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E); 194*0b57cec5SDimitry Andric void VisitOpaqueValueExpr(OpaqueValueExpr *E); 195*0b57cec5SDimitry Andric 196*0b57cec5SDimitry Andric void VisitPseudoObjectExpr(PseudoObjectExpr *E) { 197*0b57cec5SDimitry Andric if (E->isGLValue()) { 198*0b57cec5SDimitry Andric LValue LV = CGF.EmitPseudoObjectLValue(E); 199*0b57cec5SDimitry Andric return EmitFinalDestCopy(E->getType(), LV); 200*0b57cec5SDimitry Andric } 201*0b57cec5SDimitry Andric 202*0b57cec5SDimitry Andric CGF.EmitPseudoObjectRValue(E, EnsureSlot(E->getType())); 203*0b57cec5SDimitry Andric } 204*0b57cec5SDimitry Andric 205*0b57cec5SDimitry Andric void VisitVAArgExpr(VAArgExpr *E); 206*0b57cec5SDimitry Andric 207*0b57cec5SDimitry Andric void EmitInitializationToLValue(Expr *E, LValue Address); 208*0b57cec5SDimitry Andric void EmitNullInitializationToLValue(LValue Address); 209*0b57cec5SDimitry Andric // case Expr::ChooseExprClass: 210*0b57cec5SDimitry Andric void VisitCXXThrowExpr(const CXXThrowExpr *E) { CGF.EmitCXXThrowExpr(E); } 211*0b57cec5SDimitry Andric void VisitAtomicExpr(AtomicExpr *E) { 212*0b57cec5SDimitry Andric RValue Res = CGF.EmitAtomicExpr(E); 213*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), Res); 214*0b57cec5SDimitry Andric } 215*0b57cec5SDimitry Andric }; 216*0b57cec5SDimitry Andric } // end anonymous namespace. 217*0b57cec5SDimitry Andric 218*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 219*0b57cec5SDimitry Andric // Utilities 220*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 221*0b57cec5SDimitry Andric 222*0b57cec5SDimitry Andric /// EmitAggLoadOfLValue - Given an expression with aggregate type that 223*0b57cec5SDimitry Andric /// represents a value lvalue, this method emits the address of the lvalue, 224*0b57cec5SDimitry Andric /// then loads the result into DestPtr. 225*0b57cec5SDimitry Andric void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) { 226*0b57cec5SDimitry Andric LValue LV = CGF.EmitLValue(E); 227*0b57cec5SDimitry Andric 228*0b57cec5SDimitry Andric // If the type of the l-value is atomic, then do an atomic load. 229*0b57cec5SDimitry Andric if (LV.getType()->isAtomicType() || CGF.LValueIsSuitableForInlineAtomic(LV)) { 230*0b57cec5SDimitry Andric CGF.EmitAtomicLoad(LV, E->getExprLoc(), Dest); 231*0b57cec5SDimitry Andric return; 232*0b57cec5SDimitry Andric } 233*0b57cec5SDimitry Andric 234*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), LV); 235*0b57cec5SDimitry Andric } 236*0b57cec5SDimitry Andric 237*0b57cec5SDimitry Andric /// True if the given aggregate type requires special GC API calls. 238*0b57cec5SDimitry Andric bool AggExprEmitter::TypeRequiresGCollection(QualType T) { 239*0b57cec5SDimitry Andric // Only record types have members that might require garbage collection. 240*0b57cec5SDimitry Andric const RecordType *RecordTy = T->getAs<RecordType>(); 241*0b57cec5SDimitry Andric if (!RecordTy) return false; 242*0b57cec5SDimitry Andric 243*0b57cec5SDimitry Andric // Don't mess with non-trivial C++ types. 244*0b57cec5SDimitry Andric RecordDecl *Record = RecordTy->getDecl(); 245*0b57cec5SDimitry Andric if (isa<CXXRecordDecl>(Record) && 246*0b57cec5SDimitry Andric (cast<CXXRecordDecl>(Record)->hasNonTrivialCopyConstructor() || 247*0b57cec5SDimitry Andric !cast<CXXRecordDecl>(Record)->hasTrivialDestructor())) 248*0b57cec5SDimitry Andric return false; 249*0b57cec5SDimitry Andric 250*0b57cec5SDimitry Andric // Check whether the type has an object member. 251*0b57cec5SDimitry Andric return Record->hasObjectMember(); 252*0b57cec5SDimitry Andric } 253*0b57cec5SDimitry Andric 254*0b57cec5SDimitry Andric void AggExprEmitter::withReturnValueSlot( 255*0b57cec5SDimitry Andric const Expr *E, llvm::function_ref<RValue(ReturnValueSlot)> EmitCall) { 256*0b57cec5SDimitry Andric QualType RetTy = E->getType(); 257*0b57cec5SDimitry Andric bool RequiresDestruction = 2585ffd83dbSDimitry Andric !Dest.isExternallyDestructed() && 259*0b57cec5SDimitry Andric RetTy.isDestructedType() == QualType::DK_nontrivial_c_struct; 260*0b57cec5SDimitry Andric 261*0b57cec5SDimitry Andric // If it makes no observable difference, save a memcpy + temporary. 262*0b57cec5SDimitry Andric // 263*0b57cec5SDimitry Andric // We need to always provide our own temporary if destruction is required. 264*0b57cec5SDimitry Andric // Otherwise, EmitCall will emit its own, notice that it's "unused", and end 265*0b57cec5SDimitry Andric // its lifetime before we have the chance to emit a proper destructor call. 266*0b57cec5SDimitry Andric bool UseTemp = Dest.isPotentiallyAliased() || Dest.requiresGCollection() || 267*0b57cec5SDimitry Andric (RequiresDestruction && !Dest.getAddress().isValid()); 268*0b57cec5SDimitry Andric 269*0b57cec5SDimitry Andric Address RetAddr = Address::invalid(); 270*0b57cec5SDimitry Andric Address RetAllocaAddr = Address::invalid(); 271*0b57cec5SDimitry Andric 272*0b57cec5SDimitry Andric EHScopeStack::stable_iterator LifetimeEndBlock; 273*0b57cec5SDimitry Andric llvm::Value *LifetimeSizePtr = nullptr; 274*0b57cec5SDimitry Andric llvm::IntrinsicInst *LifetimeStartInst = nullptr; 275*0b57cec5SDimitry Andric if (!UseTemp) { 276*0b57cec5SDimitry Andric RetAddr = Dest.getAddress(); 277*0b57cec5SDimitry Andric } else { 278*0b57cec5SDimitry Andric RetAddr = CGF.CreateMemTemp(RetTy, "tmp", &RetAllocaAddr); 279*0b57cec5SDimitry Andric uint64_t Size = 280*0b57cec5SDimitry Andric CGF.CGM.getDataLayout().getTypeAllocSize(CGF.ConvertTypeForMem(RetTy)); 281*0b57cec5SDimitry Andric LifetimeSizePtr = CGF.EmitLifetimeStart(Size, RetAllocaAddr.getPointer()); 282*0b57cec5SDimitry Andric if (LifetimeSizePtr) { 283*0b57cec5SDimitry Andric LifetimeStartInst = 284*0b57cec5SDimitry Andric cast<llvm::IntrinsicInst>(std::prev(Builder.GetInsertPoint())); 285*0b57cec5SDimitry Andric assert(LifetimeStartInst->getIntrinsicID() == 286*0b57cec5SDimitry Andric llvm::Intrinsic::lifetime_start && 287*0b57cec5SDimitry Andric "Last insertion wasn't a lifetime.start?"); 288*0b57cec5SDimitry Andric 289*0b57cec5SDimitry Andric CGF.pushFullExprCleanup<CodeGenFunction::CallLifetimeEnd>( 290*0b57cec5SDimitry Andric NormalEHLifetimeMarker, RetAllocaAddr, LifetimeSizePtr); 291*0b57cec5SDimitry Andric LifetimeEndBlock = CGF.EHStack.stable_begin(); 292*0b57cec5SDimitry Andric } 293*0b57cec5SDimitry Andric } 294*0b57cec5SDimitry Andric 295*0b57cec5SDimitry Andric RValue Src = 2965ffd83dbSDimitry Andric EmitCall(ReturnValueSlot(RetAddr, Dest.isVolatile(), IsResultUnused, 2975ffd83dbSDimitry Andric Dest.isExternallyDestructed())); 298*0b57cec5SDimitry Andric 299*0b57cec5SDimitry Andric if (!UseTemp) 300*0b57cec5SDimitry Andric return; 301*0b57cec5SDimitry Andric 302*0b57cec5SDimitry Andric assert(Dest.getPointer() != Src.getAggregatePointer()); 303*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), Src); 304*0b57cec5SDimitry Andric 305*0b57cec5SDimitry Andric if (!RequiresDestruction && LifetimeStartInst) { 306*0b57cec5SDimitry Andric // If there's no dtor to run, the copy was the last use of our temporary. 307*0b57cec5SDimitry Andric // Since we're not guaranteed to be in an ExprWithCleanups, clean up 308*0b57cec5SDimitry Andric // eagerly. 309*0b57cec5SDimitry Andric CGF.DeactivateCleanupBlock(LifetimeEndBlock, LifetimeStartInst); 310*0b57cec5SDimitry Andric CGF.EmitLifetimeEnd(LifetimeSizePtr, RetAllocaAddr.getPointer()); 311*0b57cec5SDimitry Andric } 312*0b57cec5SDimitry Andric } 313*0b57cec5SDimitry Andric 314*0b57cec5SDimitry Andric /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired. 315*0b57cec5SDimitry Andric void AggExprEmitter::EmitFinalDestCopy(QualType type, RValue src) { 316*0b57cec5SDimitry Andric assert(src.isAggregate() && "value must be aggregate value!"); 317*0b57cec5SDimitry Andric LValue srcLV = CGF.MakeAddrLValue(src.getAggregateAddress(), type); 318*0b57cec5SDimitry Andric EmitFinalDestCopy(type, srcLV, EVK_RValue); 319*0b57cec5SDimitry Andric } 320*0b57cec5SDimitry Andric 321*0b57cec5SDimitry Andric /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired. 322*0b57cec5SDimitry Andric void AggExprEmitter::EmitFinalDestCopy(QualType type, const LValue &src, 323*0b57cec5SDimitry Andric ExprValueKind SrcValueKind) { 324*0b57cec5SDimitry Andric // If Dest is ignored, then we're evaluating an aggregate expression 325*0b57cec5SDimitry Andric // in a context that doesn't care about the result. Note that loads 326*0b57cec5SDimitry Andric // from volatile l-values force the existence of a non-ignored 327*0b57cec5SDimitry Andric // destination. 328*0b57cec5SDimitry Andric if (Dest.isIgnored()) 329*0b57cec5SDimitry Andric return; 330*0b57cec5SDimitry Andric 331*0b57cec5SDimitry Andric // Copy non-trivial C structs here. 332*0b57cec5SDimitry Andric LValue DstLV = CGF.MakeAddrLValue( 333*0b57cec5SDimitry Andric Dest.getAddress(), Dest.isVolatile() ? type.withVolatile() : type); 334*0b57cec5SDimitry Andric 335*0b57cec5SDimitry Andric if (SrcValueKind == EVK_RValue) { 336*0b57cec5SDimitry Andric if (type.isNonTrivialToPrimitiveDestructiveMove() == QualType::PCK_Struct) { 337*0b57cec5SDimitry Andric if (Dest.isPotentiallyAliased()) 338*0b57cec5SDimitry Andric CGF.callCStructMoveAssignmentOperator(DstLV, src); 339*0b57cec5SDimitry Andric else 340*0b57cec5SDimitry Andric CGF.callCStructMoveConstructor(DstLV, src); 341*0b57cec5SDimitry Andric return; 342*0b57cec5SDimitry Andric } 343*0b57cec5SDimitry Andric } else { 344*0b57cec5SDimitry Andric if (type.isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) { 345*0b57cec5SDimitry Andric if (Dest.isPotentiallyAliased()) 346*0b57cec5SDimitry Andric CGF.callCStructCopyAssignmentOperator(DstLV, src); 347*0b57cec5SDimitry Andric else 348*0b57cec5SDimitry Andric CGF.callCStructCopyConstructor(DstLV, src); 349*0b57cec5SDimitry Andric return; 350*0b57cec5SDimitry Andric } 351*0b57cec5SDimitry Andric } 352*0b57cec5SDimitry Andric 353480093f4SDimitry Andric AggValueSlot srcAgg = AggValueSlot::forLValue( 354480093f4SDimitry Andric src, CGF, AggValueSlot::IsDestructed, needsGC(type), 355480093f4SDimitry Andric AggValueSlot::IsAliased, AggValueSlot::MayOverlap); 356*0b57cec5SDimitry Andric EmitCopy(type, Dest, srcAgg); 357*0b57cec5SDimitry Andric } 358*0b57cec5SDimitry Andric 359*0b57cec5SDimitry Andric /// Perform a copy from the source into the destination. 360*0b57cec5SDimitry Andric /// 361*0b57cec5SDimitry Andric /// \param type - the type of the aggregate being copied; qualifiers are 362*0b57cec5SDimitry Andric /// ignored 363*0b57cec5SDimitry Andric void AggExprEmitter::EmitCopy(QualType type, const AggValueSlot &dest, 364*0b57cec5SDimitry Andric const AggValueSlot &src) { 365*0b57cec5SDimitry Andric if (dest.requiresGCollection()) { 366*0b57cec5SDimitry Andric CharUnits sz = dest.getPreferredSize(CGF.getContext(), type); 367*0b57cec5SDimitry Andric llvm::Value *size = llvm::ConstantInt::get(CGF.SizeTy, sz.getQuantity()); 368*0b57cec5SDimitry Andric CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF, 369*0b57cec5SDimitry Andric dest.getAddress(), 370*0b57cec5SDimitry Andric src.getAddress(), 371*0b57cec5SDimitry Andric size); 372*0b57cec5SDimitry Andric return; 373*0b57cec5SDimitry Andric } 374*0b57cec5SDimitry Andric 375*0b57cec5SDimitry Andric // If the result of the assignment is used, copy the LHS there also. 376*0b57cec5SDimitry Andric // It's volatile if either side is. Use the minimum alignment of 377*0b57cec5SDimitry Andric // the two sides. 378*0b57cec5SDimitry Andric LValue DestLV = CGF.MakeAddrLValue(dest.getAddress(), type); 379*0b57cec5SDimitry Andric LValue SrcLV = CGF.MakeAddrLValue(src.getAddress(), type); 380*0b57cec5SDimitry Andric CGF.EmitAggregateCopy(DestLV, SrcLV, type, dest.mayOverlap(), 381*0b57cec5SDimitry Andric dest.isVolatile() || src.isVolatile()); 382*0b57cec5SDimitry Andric } 383*0b57cec5SDimitry Andric 384*0b57cec5SDimitry Andric /// Emit the initializer for a std::initializer_list initialized with a 385*0b57cec5SDimitry Andric /// real initializer list. 386*0b57cec5SDimitry Andric void 387*0b57cec5SDimitry Andric AggExprEmitter::VisitCXXStdInitializerListExpr(CXXStdInitializerListExpr *E) { 388*0b57cec5SDimitry Andric // Emit an array containing the elements. The array is externally destructed 389*0b57cec5SDimitry Andric // if the std::initializer_list object is. 390*0b57cec5SDimitry Andric ASTContext &Ctx = CGF.getContext(); 391*0b57cec5SDimitry Andric LValue Array = CGF.EmitLValue(E->getSubExpr()); 392*0b57cec5SDimitry Andric assert(Array.isSimple() && "initializer_list array not a simple lvalue"); 393480093f4SDimitry Andric Address ArrayPtr = Array.getAddress(CGF); 394*0b57cec5SDimitry Andric 395*0b57cec5SDimitry Andric const ConstantArrayType *ArrayType = 396*0b57cec5SDimitry Andric Ctx.getAsConstantArrayType(E->getSubExpr()->getType()); 397*0b57cec5SDimitry Andric assert(ArrayType && "std::initializer_list constructed from non-array"); 398*0b57cec5SDimitry Andric 399*0b57cec5SDimitry Andric // FIXME: Perform the checks on the field types in SemaInit. 400*0b57cec5SDimitry Andric RecordDecl *Record = E->getType()->castAs<RecordType>()->getDecl(); 401*0b57cec5SDimitry Andric RecordDecl::field_iterator Field = Record->field_begin(); 402*0b57cec5SDimitry Andric if (Field == Record->field_end()) { 403*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "weird std::initializer_list"); 404*0b57cec5SDimitry Andric return; 405*0b57cec5SDimitry Andric } 406*0b57cec5SDimitry Andric 407*0b57cec5SDimitry Andric // Start pointer. 408*0b57cec5SDimitry Andric if (!Field->getType()->isPointerType() || 409*0b57cec5SDimitry Andric !Ctx.hasSameType(Field->getType()->getPointeeType(), 410*0b57cec5SDimitry Andric ArrayType->getElementType())) { 411*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "weird std::initializer_list"); 412*0b57cec5SDimitry Andric return; 413*0b57cec5SDimitry Andric } 414*0b57cec5SDimitry Andric 415*0b57cec5SDimitry Andric AggValueSlot Dest = EnsureSlot(E->getType()); 416*0b57cec5SDimitry Andric LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType()); 417*0b57cec5SDimitry Andric LValue Start = CGF.EmitLValueForFieldInitialization(DestLV, *Field); 418*0b57cec5SDimitry Andric llvm::Value *Zero = llvm::ConstantInt::get(CGF.PtrDiffTy, 0); 419*0b57cec5SDimitry Andric llvm::Value *IdxStart[] = { Zero, Zero }; 420*0b57cec5SDimitry Andric llvm::Value *ArrayStart = 421*0b57cec5SDimitry Andric Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxStart, "arraystart"); 422*0b57cec5SDimitry Andric CGF.EmitStoreThroughLValue(RValue::get(ArrayStart), Start); 423*0b57cec5SDimitry Andric ++Field; 424*0b57cec5SDimitry Andric 425*0b57cec5SDimitry Andric if (Field == Record->field_end()) { 426*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "weird std::initializer_list"); 427*0b57cec5SDimitry Andric return; 428*0b57cec5SDimitry Andric } 429*0b57cec5SDimitry Andric 430*0b57cec5SDimitry Andric llvm::Value *Size = Builder.getInt(ArrayType->getSize()); 431*0b57cec5SDimitry Andric LValue EndOrLength = CGF.EmitLValueForFieldInitialization(DestLV, *Field); 432*0b57cec5SDimitry Andric if (Field->getType()->isPointerType() && 433*0b57cec5SDimitry Andric Ctx.hasSameType(Field->getType()->getPointeeType(), 434*0b57cec5SDimitry Andric ArrayType->getElementType())) { 435*0b57cec5SDimitry Andric // End pointer. 436*0b57cec5SDimitry Andric llvm::Value *IdxEnd[] = { Zero, Size }; 437*0b57cec5SDimitry Andric llvm::Value *ArrayEnd = 438*0b57cec5SDimitry Andric Builder.CreateInBoundsGEP(ArrayPtr.getPointer(), IdxEnd, "arrayend"); 439*0b57cec5SDimitry Andric CGF.EmitStoreThroughLValue(RValue::get(ArrayEnd), EndOrLength); 440*0b57cec5SDimitry Andric } else if (Ctx.hasSameType(Field->getType(), Ctx.getSizeType())) { 441*0b57cec5SDimitry Andric // Length. 442*0b57cec5SDimitry Andric CGF.EmitStoreThroughLValue(RValue::get(Size), EndOrLength); 443*0b57cec5SDimitry Andric } else { 444*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "weird std::initializer_list"); 445*0b57cec5SDimitry Andric return; 446*0b57cec5SDimitry Andric } 447*0b57cec5SDimitry Andric } 448*0b57cec5SDimitry Andric 449*0b57cec5SDimitry Andric /// Determine if E is a trivial array filler, that is, one that is 450*0b57cec5SDimitry Andric /// equivalent to zero-initialization. 451*0b57cec5SDimitry Andric static bool isTrivialFiller(Expr *E) { 452*0b57cec5SDimitry Andric if (!E) 453*0b57cec5SDimitry Andric return true; 454*0b57cec5SDimitry Andric 455*0b57cec5SDimitry Andric if (isa<ImplicitValueInitExpr>(E)) 456*0b57cec5SDimitry Andric return true; 457*0b57cec5SDimitry Andric 458*0b57cec5SDimitry Andric if (auto *ILE = dyn_cast<InitListExpr>(E)) { 459*0b57cec5SDimitry Andric if (ILE->getNumInits()) 460*0b57cec5SDimitry Andric return false; 461*0b57cec5SDimitry Andric return isTrivialFiller(ILE->getArrayFiller()); 462*0b57cec5SDimitry Andric } 463*0b57cec5SDimitry Andric 464*0b57cec5SDimitry Andric if (auto *Cons = dyn_cast_or_null<CXXConstructExpr>(E)) 465*0b57cec5SDimitry Andric return Cons->getConstructor()->isDefaultConstructor() && 466*0b57cec5SDimitry Andric Cons->getConstructor()->isTrivial(); 467*0b57cec5SDimitry Andric 468*0b57cec5SDimitry Andric // FIXME: Are there other cases where we can avoid emitting an initializer? 469*0b57cec5SDimitry Andric return false; 470*0b57cec5SDimitry Andric } 471*0b57cec5SDimitry Andric 472*0b57cec5SDimitry Andric /// Emit initialization of an array from an initializer list. 473*0b57cec5SDimitry Andric void AggExprEmitter::EmitArrayInit(Address DestPtr, llvm::ArrayType *AType, 474*0b57cec5SDimitry Andric QualType ArrayQTy, InitListExpr *E) { 475*0b57cec5SDimitry Andric uint64_t NumInitElements = E->getNumInits(); 476*0b57cec5SDimitry Andric 477*0b57cec5SDimitry Andric uint64_t NumArrayElements = AType->getNumElements(); 478*0b57cec5SDimitry Andric assert(NumInitElements <= NumArrayElements); 479*0b57cec5SDimitry Andric 480*0b57cec5SDimitry Andric QualType elementType = 481*0b57cec5SDimitry Andric CGF.getContext().getAsArrayType(ArrayQTy)->getElementType(); 482*0b57cec5SDimitry Andric 483*0b57cec5SDimitry Andric // DestPtr is an array*. Construct an elementType* by drilling 484*0b57cec5SDimitry Andric // down a level. 485*0b57cec5SDimitry Andric llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0); 486*0b57cec5SDimitry Andric llvm::Value *indices[] = { zero, zero }; 487*0b57cec5SDimitry Andric llvm::Value *begin = 488*0b57cec5SDimitry Andric Builder.CreateInBoundsGEP(DestPtr.getPointer(), indices, "arrayinit.begin"); 489*0b57cec5SDimitry Andric 490*0b57cec5SDimitry Andric CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType); 491*0b57cec5SDimitry Andric CharUnits elementAlign = 492*0b57cec5SDimitry Andric DestPtr.getAlignment().alignmentOfArrayElement(elementSize); 493*0b57cec5SDimitry Andric 494*0b57cec5SDimitry Andric // Consider initializing the array by copying from a global. For this to be 495*0b57cec5SDimitry Andric // more efficient than per-element initialization, the size of the elements 496*0b57cec5SDimitry Andric // with explicit initializers should be large enough. 497*0b57cec5SDimitry Andric if (NumInitElements * elementSize.getQuantity() > 16 && 498*0b57cec5SDimitry Andric elementType.isTriviallyCopyableType(CGF.getContext())) { 499*0b57cec5SDimitry Andric CodeGen::CodeGenModule &CGM = CGF.CGM; 500480093f4SDimitry Andric ConstantEmitter Emitter(CGF); 501*0b57cec5SDimitry Andric LangAS AS = ArrayQTy.getAddressSpace(); 502*0b57cec5SDimitry Andric if (llvm::Constant *C = Emitter.tryEmitForInitializer(E, AS, ArrayQTy)) { 503*0b57cec5SDimitry Andric auto GV = new llvm::GlobalVariable( 504*0b57cec5SDimitry Andric CGM.getModule(), C->getType(), 505*0b57cec5SDimitry Andric CGM.isTypeConstant(ArrayQTy, /* ExcludeCtorDtor= */ true), 506*0b57cec5SDimitry Andric llvm::GlobalValue::PrivateLinkage, C, "constinit", 507*0b57cec5SDimitry Andric /* InsertBefore= */ nullptr, llvm::GlobalVariable::NotThreadLocal, 508*0b57cec5SDimitry Andric CGM.getContext().getTargetAddressSpace(AS)); 509*0b57cec5SDimitry Andric Emitter.finalize(GV); 510*0b57cec5SDimitry Andric CharUnits Align = CGM.getContext().getTypeAlignInChars(ArrayQTy); 511a7dea167SDimitry Andric GV->setAlignment(Align.getAsAlign()); 512*0b57cec5SDimitry Andric EmitFinalDestCopy(ArrayQTy, CGF.MakeAddrLValue(GV, ArrayQTy, Align)); 513*0b57cec5SDimitry Andric return; 514*0b57cec5SDimitry Andric } 515*0b57cec5SDimitry Andric } 516*0b57cec5SDimitry Andric 517*0b57cec5SDimitry Andric // Exception safety requires us to destroy all the 518*0b57cec5SDimitry Andric // already-constructed members if an initializer throws. 519*0b57cec5SDimitry Andric // For that, we'll need an EH cleanup. 520*0b57cec5SDimitry Andric QualType::DestructionKind dtorKind = elementType.isDestructedType(); 521*0b57cec5SDimitry Andric Address endOfInit = Address::invalid(); 522*0b57cec5SDimitry Andric EHScopeStack::stable_iterator cleanup; 523*0b57cec5SDimitry Andric llvm::Instruction *cleanupDominator = nullptr; 524*0b57cec5SDimitry Andric if (CGF.needsEHCleanup(dtorKind)) { 525*0b57cec5SDimitry Andric // In principle we could tell the cleanup where we are more 526*0b57cec5SDimitry Andric // directly, but the control flow can get so varied here that it 527*0b57cec5SDimitry Andric // would actually be quite complex. Therefore we go through an 528*0b57cec5SDimitry Andric // alloca. 529*0b57cec5SDimitry Andric endOfInit = CGF.CreateTempAlloca(begin->getType(), CGF.getPointerAlign(), 530*0b57cec5SDimitry Andric "arrayinit.endOfInit"); 531*0b57cec5SDimitry Andric cleanupDominator = Builder.CreateStore(begin, endOfInit); 532*0b57cec5SDimitry Andric CGF.pushIrregularPartialArrayCleanup(begin, endOfInit, elementType, 533*0b57cec5SDimitry Andric elementAlign, 534*0b57cec5SDimitry Andric CGF.getDestroyer(dtorKind)); 535*0b57cec5SDimitry Andric cleanup = CGF.EHStack.stable_begin(); 536*0b57cec5SDimitry Andric 537*0b57cec5SDimitry Andric // Otherwise, remember that we didn't need a cleanup. 538*0b57cec5SDimitry Andric } else { 539*0b57cec5SDimitry Andric dtorKind = QualType::DK_none; 540*0b57cec5SDimitry Andric } 541*0b57cec5SDimitry Andric 542*0b57cec5SDimitry Andric llvm::Value *one = llvm::ConstantInt::get(CGF.SizeTy, 1); 543*0b57cec5SDimitry Andric 544*0b57cec5SDimitry Andric // The 'current element to initialize'. The invariants on this 545*0b57cec5SDimitry Andric // variable are complicated. Essentially, after each iteration of 546*0b57cec5SDimitry Andric // the loop, it points to the last initialized element, except 547*0b57cec5SDimitry Andric // that it points to the beginning of the array before any 548*0b57cec5SDimitry Andric // elements have been initialized. 549*0b57cec5SDimitry Andric llvm::Value *element = begin; 550*0b57cec5SDimitry Andric 551*0b57cec5SDimitry Andric // Emit the explicit initializers. 552*0b57cec5SDimitry Andric for (uint64_t i = 0; i != NumInitElements; ++i) { 553*0b57cec5SDimitry Andric // Advance to the next element. 554*0b57cec5SDimitry Andric if (i > 0) { 555*0b57cec5SDimitry Andric element = Builder.CreateInBoundsGEP(element, one, "arrayinit.element"); 556*0b57cec5SDimitry Andric 557*0b57cec5SDimitry Andric // Tell the cleanup that it needs to destroy up to this 558*0b57cec5SDimitry Andric // element. TODO: some of these stores can be trivially 559*0b57cec5SDimitry Andric // observed to be unnecessary. 560*0b57cec5SDimitry Andric if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit); 561*0b57cec5SDimitry Andric } 562*0b57cec5SDimitry Andric 563*0b57cec5SDimitry Andric LValue elementLV = 564*0b57cec5SDimitry Andric CGF.MakeAddrLValue(Address(element, elementAlign), elementType); 565*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getInit(i), elementLV); 566*0b57cec5SDimitry Andric } 567*0b57cec5SDimitry Andric 568*0b57cec5SDimitry Andric // Check whether there's a non-trivial array-fill expression. 569*0b57cec5SDimitry Andric Expr *filler = E->getArrayFiller(); 570*0b57cec5SDimitry Andric bool hasTrivialFiller = isTrivialFiller(filler); 571*0b57cec5SDimitry Andric 572*0b57cec5SDimitry Andric // Any remaining elements need to be zero-initialized, possibly 573*0b57cec5SDimitry Andric // using the filler expression. We can skip this if the we're 574*0b57cec5SDimitry Andric // emitting to zeroed memory. 575*0b57cec5SDimitry Andric if (NumInitElements != NumArrayElements && 576*0b57cec5SDimitry Andric !(Dest.isZeroed() && hasTrivialFiller && 577*0b57cec5SDimitry Andric CGF.getTypes().isZeroInitializable(elementType))) { 578*0b57cec5SDimitry Andric 579*0b57cec5SDimitry Andric // Use an actual loop. This is basically 580*0b57cec5SDimitry Andric // do { *array++ = filler; } while (array != end); 581*0b57cec5SDimitry Andric 582*0b57cec5SDimitry Andric // Advance to the start of the rest of the array. 583*0b57cec5SDimitry Andric if (NumInitElements) { 584*0b57cec5SDimitry Andric element = Builder.CreateInBoundsGEP(element, one, "arrayinit.start"); 585*0b57cec5SDimitry Andric if (endOfInit.isValid()) Builder.CreateStore(element, endOfInit); 586*0b57cec5SDimitry Andric } 587*0b57cec5SDimitry Andric 588*0b57cec5SDimitry Andric // Compute the end of the array. 589*0b57cec5SDimitry Andric llvm::Value *end = Builder.CreateInBoundsGEP(begin, 590*0b57cec5SDimitry Andric llvm::ConstantInt::get(CGF.SizeTy, NumArrayElements), 591*0b57cec5SDimitry Andric "arrayinit.end"); 592*0b57cec5SDimitry Andric 593*0b57cec5SDimitry Andric llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 594*0b57cec5SDimitry Andric llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body"); 595*0b57cec5SDimitry Andric 596*0b57cec5SDimitry Andric // Jump into the body. 597*0b57cec5SDimitry Andric CGF.EmitBlock(bodyBB); 598*0b57cec5SDimitry Andric llvm::PHINode *currentElement = 599*0b57cec5SDimitry Andric Builder.CreatePHI(element->getType(), 2, "arrayinit.cur"); 600*0b57cec5SDimitry Andric currentElement->addIncoming(element, entryBB); 601*0b57cec5SDimitry Andric 602*0b57cec5SDimitry Andric // Emit the actual filler expression. 603*0b57cec5SDimitry Andric { 604*0b57cec5SDimitry Andric // C++1z [class.temporary]p5: 605*0b57cec5SDimitry Andric // when a default constructor is called to initialize an element of 606*0b57cec5SDimitry Andric // an array with no corresponding initializer [...] the destruction of 607*0b57cec5SDimitry Andric // every temporary created in a default argument is sequenced before 608*0b57cec5SDimitry Andric // the construction of the next array element, if any 609*0b57cec5SDimitry Andric CodeGenFunction::RunCleanupsScope CleanupsScope(CGF); 610*0b57cec5SDimitry Andric LValue elementLV = 611*0b57cec5SDimitry Andric CGF.MakeAddrLValue(Address(currentElement, elementAlign), elementType); 612*0b57cec5SDimitry Andric if (filler) 613*0b57cec5SDimitry Andric EmitInitializationToLValue(filler, elementLV); 614*0b57cec5SDimitry Andric else 615*0b57cec5SDimitry Andric EmitNullInitializationToLValue(elementLV); 616*0b57cec5SDimitry Andric } 617*0b57cec5SDimitry Andric 618*0b57cec5SDimitry Andric // Move on to the next element. 619*0b57cec5SDimitry Andric llvm::Value *nextElement = 620*0b57cec5SDimitry Andric Builder.CreateInBoundsGEP(currentElement, one, "arrayinit.next"); 621*0b57cec5SDimitry Andric 622*0b57cec5SDimitry Andric // Tell the EH cleanup that we finished with the last element. 623*0b57cec5SDimitry Andric if (endOfInit.isValid()) Builder.CreateStore(nextElement, endOfInit); 624*0b57cec5SDimitry Andric 625*0b57cec5SDimitry Andric // Leave the loop if we're done. 626*0b57cec5SDimitry Andric llvm::Value *done = Builder.CreateICmpEQ(nextElement, end, 627*0b57cec5SDimitry Andric "arrayinit.done"); 628*0b57cec5SDimitry Andric llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end"); 629*0b57cec5SDimitry Andric Builder.CreateCondBr(done, endBB, bodyBB); 630*0b57cec5SDimitry Andric currentElement->addIncoming(nextElement, Builder.GetInsertBlock()); 631*0b57cec5SDimitry Andric 632*0b57cec5SDimitry Andric CGF.EmitBlock(endBB); 633*0b57cec5SDimitry Andric } 634*0b57cec5SDimitry Andric 635*0b57cec5SDimitry Andric // Leave the partial-array cleanup if we entered one. 636*0b57cec5SDimitry Andric if (dtorKind) CGF.DeactivateCleanupBlock(cleanup, cleanupDominator); 637*0b57cec5SDimitry Andric } 638*0b57cec5SDimitry Andric 639*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 640*0b57cec5SDimitry Andric // Visitor Methods 641*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 642*0b57cec5SDimitry Andric 643*0b57cec5SDimitry Andric void AggExprEmitter::VisitMaterializeTemporaryExpr(MaterializeTemporaryExpr *E){ 644480093f4SDimitry Andric Visit(E->getSubExpr()); 645*0b57cec5SDimitry Andric } 646*0b57cec5SDimitry Andric 647*0b57cec5SDimitry Andric void AggExprEmitter::VisitOpaqueValueExpr(OpaqueValueExpr *e) { 648*0b57cec5SDimitry Andric // If this is a unique OVE, just visit its source expression. 649*0b57cec5SDimitry Andric if (e->isUnique()) 650*0b57cec5SDimitry Andric Visit(e->getSourceExpr()); 651*0b57cec5SDimitry Andric else 652*0b57cec5SDimitry Andric EmitFinalDestCopy(e->getType(), CGF.getOrCreateOpaqueLValueMapping(e)); 653*0b57cec5SDimitry Andric } 654*0b57cec5SDimitry Andric 655*0b57cec5SDimitry Andric void 656*0b57cec5SDimitry Andric AggExprEmitter::VisitCompoundLiteralExpr(CompoundLiteralExpr *E) { 657*0b57cec5SDimitry Andric if (Dest.isPotentiallyAliased() && 658*0b57cec5SDimitry Andric E->getType().isPODType(CGF.getContext())) { 659*0b57cec5SDimitry Andric // For a POD type, just emit a load of the lvalue + a copy, because our 660*0b57cec5SDimitry Andric // compound literal might alias the destination. 661*0b57cec5SDimitry Andric EmitAggLoadOfLValue(E); 662*0b57cec5SDimitry Andric return; 663*0b57cec5SDimitry Andric } 664*0b57cec5SDimitry Andric 665*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(E->getType()); 6665ffd83dbSDimitry Andric 6675ffd83dbSDimitry Andric // Block-scope compound literals are destroyed at the end of the enclosing 6685ffd83dbSDimitry Andric // scope in C. 6695ffd83dbSDimitry Andric bool Destruct = 6705ffd83dbSDimitry Andric !CGF.getLangOpts().CPlusPlus && !Slot.isExternallyDestructed(); 6715ffd83dbSDimitry Andric if (Destruct) 6725ffd83dbSDimitry Andric Slot.setExternallyDestructed(); 6735ffd83dbSDimitry Andric 674*0b57cec5SDimitry Andric CGF.EmitAggExpr(E->getInitializer(), Slot); 6755ffd83dbSDimitry Andric 6765ffd83dbSDimitry Andric if (Destruct) 6775ffd83dbSDimitry Andric if (QualType::DestructionKind DtorKind = E->getType().isDestructedType()) 6785ffd83dbSDimitry Andric CGF.pushLifetimeExtendedDestroy( 6795ffd83dbSDimitry Andric CGF.getCleanupKind(DtorKind), Slot.getAddress(), E->getType(), 6805ffd83dbSDimitry Andric CGF.getDestroyer(DtorKind), DtorKind & EHCleanup); 681*0b57cec5SDimitry Andric } 682*0b57cec5SDimitry Andric 683*0b57cec5SDimitry Andric /// Attempt to look through various unimportant expressions to find a 684*0b57cec5SDimitry Andric /// cast of the given kind. 6855ffd83dbSDimitry Andric static Expr *findPeephole(Expr *op, CastKind kind, const ASTContext &ctx) { 6865ffd83dbSDimitry Andric op = op->IgnoreParenNoopCasts(ctx); 6875ffd83dbSDimitry Andric if (auto castE = dyn_cast<CastExpr>(op)) { 688*0b57cec5SDimitry Andric if (castE->getCastKind() == kind) 689*0b57cec5SDimitry Andric return castE->getSubExpr(); 690*0b57cec5SDimitry Andric } 691*0b57cec5SDimitry Andric return nullptr; 692*0b57cec5SDimitry Andric } 693*0b57cec5SDimitry Andric 694*0b57cec5SDimitry Andric void AggExprEmitter::VisitCastExpr(CastExpr *E) { 695*0b57cec5SDimitry Andric if (const auto *ECE = dyn_cast<ExplicitCastExpr>(E)) 696*0b57cec5SDimitry Andric CGF.CGM.EmitExplicitCastExprType(ECE, &CGF); 697*0b57cec5SDimitry Andric switch (E->getCastKind()) { 698*0b57cec5SDimitry Andric case CK_Dynamic: { 699*0b57cec5SDimitry Andric // FIXME: Can this actually happen? We have no test coverage for it. 700*0b57cec5SDimitry Andric assert(isa<CXXDynamicCastExpr>(E) && "CK_Dynamic without a dynamic_cast?"); 701*0b57cec5SDimitry Andric LValue LV = CGF.EmitCheckedLValue(E->getSubExpr(), 702*0b57cec5SDimitry Andric CodeGenFunction::TCK_Load); 703*0b57cec5SDimitry Andric // FIXME: Do we also need to handle property references here? 704*0b57cec5SDimitry Andric if (LV.isSimple()) 705480093f4SDimitry Andric CGF.EmitDynamicCast(LV.getAddress(CGF), cast<CXXDynamicCastExpr>(E)); 706*0b57cec5SDimitry Andric else 707*0b57cec5SDimitry Andric CGF.CGM.ErrorUnsupported(E, "non-simple lvalue dynamic_cast"); 708*0b57cec5SDimitry Andric 709*0b57cec5SDimitry Andric if (!Dest.isIgnored()) 710*0b57cec5SDimitry Andric CGF.CGM.ErrorUnsupported(E, "lvalue dynamic_cast with a destination"); 711*0b57cec5SDimitry Andric break; 712*0b57cec5SDimitry Andric } 713*0b57cec5SDimitry Andric 714*0b57cec5SDimitry Andric case CK_ToUnion: { 715*0b57cec5SDimitry Andric // Evaluate even if the destination is ignored. 716*0b57cec5SDimitry Andric if (Dest.isIgnored()) { 717*0b57cec5SDimitry Andric CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(), 718*0b57cec5SDimitry Andric /*ignoreResult=*/true); 719*0b57cec5SDimitry Andric break; 720*0b57cec5SDimitry Andric } 721*0b57cec5SDimitry Andric 722*0b57cec5SDimitry Andric // GCC union extension 723*0b57cec5SDimitry Andric QualType Ty = E->getSubExpr()->getType(); 724*0b57cec5SDimitry Andric Address CastPtr = 725*0b57cec5SDimitry Andric Builder.CreateElementBitCast(Dest.getAddress(), CGF.ConvertType(Ty)); 726*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getSubExpr(), 727*0b57cec5SDimitry Andric CGF.MakeAddrLValue(CastPtr, Ty)); 728*0b57cec5SDimitry Andric break; 729*0b57cec5SDimitry Andric } 730*0b57cec5SDimitry Andric 731*0b57cec5SDimitry Andric case CK_LValueToRValueBitCast: { 732*0b57cec5SDimitry Andric if (Dest.isIgnored()) { 733*0b57cec5SDimitry Andric CGF.EmitAnyExpr(E->getSubExpr(), AggValueSlot::ignored(), 734*0b57cec5SDimitry Andric /*ignoreResult=*/true); 735*0b57cec5SDimitry Andric break; 736*0b57cec5SDimitry Andric } 737*0b57cec5SDimitry Andric 738*0b57cec5SDimitry Andric LValue SourceLV = CGF.EmitLValue(E->getSubExpr()); 739*0b57cec5SDimitry Andric Address SourceAddress = 740480093f4SDimitry Andric Builder.CreateElementBitCast(SourceLV.getAddress(CGF), CGF.Int8Ty); 741*0b57cec5SDimitry Andric Address DestAddress = 742*0b57cec5SDimitry Andric Builder.CreateElementBitCast(Dest.getAddress(), CGF.Int8Ty); 743*0b57cec5SDimitry Andric llvm::Value *SizeVal = llvm::ConstantInt::get( 744*0b57cec5SDimitry Andric CGF.SizeTy, 745*0b57cec5SDimitry Andric CGF.getContext().getTypeSizeInChars(E->getType()).getQuantity()); 746*0b57cec5SDimitry Andric Builder.CreateMemCpy(DestAddress, SourceAddress, SizeVal); 747*0b57cec5SDimitry Andric break; 748*0b57cec5SDimitry Andric } 749*0b57cec5SDimitry Andric 750*0b57cec5SDimitry Andric case CK_DerivedToBase: 751*0b57cec5SDimitry Andric case CK_BaseToDerived: 752*0b57cec5SDimitry Andric case CK_UncheckedDerivedToBase: { 753*0b57cec5SDimitry Andric llvm_unreachable("cannot perform hierarchy conversion in EmitAggExpr: " 754*0b57cec5SDimitry Andric "should have been unpacked before we got here"); 755*0b57cec5SDimitry Andric } 756*0b57cec5SDimitry Andric 757*0b57cec5SDimitry Andric case CK_NonAtomicToAtomic: 758*0b57cec5SDimitry Andric case CK_AtomicToNonAtomic: { 759*0b57cec5SDimitry Andric bool isToAtomic = (E->getCastKind() == CK_NonAtomicToAtomic); 760*0b57cec5SDimitry Andric 761*0b57cec5SDimitry Andric // Determine the atomic and value types. 762*0b57cec5SDimitry Andric QualType atomicType = E->getSubExpr()->getType(); 763*0b57cec5SDimitry Andric QualType valueType = E->getType(); 764*0b57cec5SDimitry Andric if (isToAtomic) std::swap(atomicType, valueType); 765*0b57cec5SDimitry Andric 766*0b57cec5SDimitry Andric assert(atomicType->isAtomicType()); 767*0b57cec5SDimitry Andric assert(CGF.getContext().hasSameUnqualifiedType(valueType, 768*0b57cec5SDimitry Andric atomicType->castAs<AtomicType>()->getValueType())); 769*0b57cec5SDimitry Andric 770*0b57cec5SDimitry Andric // Just recurse normally if we're ignoring the result or the 771*0b57cec5SDimitry Andric // atomic type doesn't change representation. 772*0b57cec5SDimitry Andric if (Dest.isIgnored() || !CGF.CGM.isPaddedAtomicType(atomicType)) { 773*0b57cec5SDimitry Andric return Visit(E->getSubExpr()); 774*0b57cec5SDimitry Andric } 775*0b57cec5SDimitry Andric 776*0b57cec5SDimitry Andric CastKind peepholeTarget = 777*0b57cec5SDimitry Andric (isToAtomic ? CK_AtomicToNonAtomic : CK_NonAtomicToAtomic); 778*0b57cec5SDimitry Andric 779*0b57cec5SDimitry Andric // These two cases are reverses of each other; try to peephole them. 7805ffd83dbSDimitry Andric if (Expr *op = 7815ffd83dbSDimitry Andric findPeephole(E->getSubExpr(), peepholeTarget, CGF.getContext())) { 782*0b57cec5SDimitry Andric assert(CGF.getContext().hasSameUnqualifiedType(op->getType(), 783*0b57cec5SDimitry Andric E->getType()) && 784*0b57cec5SDimitry Andric "peephole significantly changed types?"); 785*0b57cec5SDimitry Andric return Visit(op); 786*0b57cec5SDimitry Andric } 787*0b57cec5SDimitry Andric 788*0b57cec5SDimitry Andric // If we're converting an r-value of non-atomic type to an r-value 789*0b57cec5SDimitry Andric // of atomic type, just emit directly into the relevant sub-object. 790*0b57cec5SDimitry Andric if (isToAtomic) { 791*0b57cec5SDimitry Andric AggValueSlot valueDest = Dest; 792*0b57cec5SDimitry Andric if (!valueDest.isIgnored() && CGF.CGM.isPaddedAtomicType(atomicType)) { 793*0b57cec5SDimitry Andric // Zero-initialize. (Strictly speaking, we only need to initialize 794*0b57cec5SDimitry Andric // the padding at the end, but this is simpler.) 795*0b57cec5SDimitry Andric if (!Dest.isZeroed()) 796*0b57cec5SDimitry Andric CGF.EmitNullInitialization(Dest.getAddress(), atomicType); 797*0b57cec5SDimitry Andric 798*0b57cec5SDimitry Andric // Build a GEP to refer to the subobject. 799*0b57cec5SDimitry Andric Address valueAddr = 800*0b57cec5SDimitry Andric CGF.Builder.CreateStructGEP(valueDest.getAddress(), 0); 801*0b57cec5SDimitry Andric valueDest = AggValueSlot::forAddr(valueAddr, 802*0b57cec5SDimitry Andric valueDest.getQualifiers(), 803*0b57cec5SDimitry Andric valueDest.isExternallyDestructed(), 804*0b57cec5SDimitry Andric valueDest.requiresGCollection(), 805*0b57cec5SDimitry Andric valueDest.isPotentiallyAliased(), 806*0b57cec5SDimitry Andric AggValueSlot::DoesNotOverlap, 807*0b57cec5SDimitry Andric AggValueSlot::IsZeroed); 808*0b57cec5SDimitry Andric } 809*0b57cec5SDimitry Andric 810*0b57cec5SDimitry Andric CGF.EmitAggExpr(E->getSubExpr(), valueDest); 811*0b57cec5SDimitry Andric return; 812*0b57cec5SDimitry Andric } 813*0b57cec5SDimitry Andric 814*0b57cec5SDimitry Andric // Otherwise, we're converting an atomic type to a non-atomic type. 815*0b57cec5SDimitry Andric // Make an atomic temporary, emit into that, and then copy the value out. 816*0b57cec5SDimitry Andric AggValueSlot atomicSlot = 817*0b57cec5SDimitry Andric CGF.CreateAggTemp(atomicType, "atomic-to-nonatomic.temp"); 818*0b57cec5SDimitry Andric CGF.EmitAggExpr(E->getSubExpr(), atomicSlot); 819*0b57cec5SDimitry Andric 820*0b57cec5SDimitry Andric Address valueAddr = Builder.CreateStructGEP(atomicSlot.getAddress(), 0); 821*0b57cec5SDimitry Andric RValue rvalue = RValue::getAggregate(valueAddr, atomicSlot.isVolatile()); 822*0b57cec5SDimitry Andric return EmitFinalDestCopy(valueType, rvalue); 823*0b57cec5SDimitry Andric } 824*0b57cec5SDimitry Andric case CK_AddressSpaceConversion: 825*0b57cec5SDimitry Andric return Visit(E->getSubExpr()); 826*0b57cec5SDimitry Andric 827*0b57cec5SDimitry Andric case CK_LValueToRValue: 828*0b57cec5SDimitry Andric // If we're loading from a volatile type, force the destination 829*0b57cec5SDimitry Andric // into existence. 830*0b57cec5SDimitry Andric if (E->getSubExpr()->getType().isVolatileQualified()) { 8315ffd83dbSDimitry Andric bool Destruct = 8325ffd83dbSDimitry Andric !Dest.isExternallyDestructed() && 8335ffd83dbSDimitry Andric E->getType().isDestructedType() == QualType::DK_nontrivial_c_struct; 8345ffd83dbSDimitry Andric if (Destruct) 8355ffd83dbSDimitry Andric Dest.setExternallyDestructed(); 836*0b57cec5SDimitry Andric EnsureDest(E->getType()); 8375ffd83dbSDimitry Andric Visit(E->getSubExpr()); 8385ffd83dbSDimitry Andric 8395ffd83dbSDimitry Andric if (Destruct) 8405ffd83dbSDimitry Andric CGF.pushDestroy(QualType::DK_nontrivial_c_struct, Dest.getAddress(), 8415ffd83dbSDimitry Andric E->getType()); 8425ffd83dbSDimitry Andric 8435ffd83dbSDimitry Andric return; 844*0b57cec5SDimitry Andric } 845*0b57cec5SDimitry Andric 846*0b57cec5SDimitry Andric LLVM_FALLTHROUGH; 847*0b57cec5SDimitry Andric 848*0b57cec5SDimitry Andric 849*0b57cec5SDimitry Andric case CK_NoOp: 850*0b57cec5SDimitry Andric case CK_UserDefinedConversion: 851*0b57cec5SDimitry Andric case CK_ConstructorConversion: 852*0b57cec5SDimitry Andric assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(), 853*0b57cec5SDimitry Andric E->getType()) && 854*0b57cec5SDimitry Andric "Implicit cast types must be compatible"); 855*0b57cec5SDimitry Andric Visit(E->getSubExpr()); 856*0b57cec5SDimitry Andric break; 857*0b57cec5SDimitry Andric 858*0b57cec5SDimitry Andric case CK_LValueBitCast: 859*0b57cec5SDimitry Andric llvm_unreachable("should not be emitting lvalue bitcast as rvalue"); 860*0b57cec5SDimitry Andric 861*0b57cec5SDimitry Andric case CK_Dependent: 862*0b57cec5SDimitry Andric case CK_BitCast: 863*0b57cec5SDimitry Andric case CK_ArrayToPointerDecay: 864*0b57cec5SDimitry Andric case CK_FunctionToPointerDecay: 865*0b57cec5SDimitry Andric case CK_NullToPointer: 866*0b57cec5SDimitry Andric case CK_NullToMemberPointer: 867*0b57cec5SDimitry Andric case CK_BaseToDerivedMemberPointer: 868*0b57cec5SDimitry Andric case CK_DerivedToBaseMemberPointer: 869*0b57cec5SDimitry Andric case CK_MemberPointerToBoolean: 870*0b57cec5SDimitry Andric case CK_ReinterpretMemberPointer: 871*0b57cec5SDimitry Andric case CK_IntegralToPointer: 872*0b57cec5SDimitry Andric case CK_PointerToIntegral: 873*0b57cec5SDimitry Andric case CK_PointerToBoolean: 874*0b57cec5SDimitry Andric case CK_ToVoid: 875*0b57cec5SDimitry Andric case CK_VectorSplat: 876*0b57cec5SDimitry Andric case CK_IntegralCast: 877*0b57cec5SDimitry Andric case CK_BooleanToSignedIntegral: 878*0b57cec5SDimitry Andric case CK_IntegralToBoolean: 879*0b57cec5SDimitry Andric case CK_IntegralToFloating: 880*0b57cec5SDimitry Andric case CK_FloatingToIntegral: 881*0b57cec5SDimitry Andric case CK_FloatingToBoolean: 882*0b57cec5SDimitry Andric case CK_FloatingCast: 883*0b57cec5SDimitry Andric case CK_CPointerToObjCPointerCast: 884*0b57cec5SDimitry Andric case CK_BlockPointerToObjCPointerCast: 885*0b57cec5SDimitry Andric case CK_AnyPointerToBlockPointerCast: 886*0b57cec5SDimitry Andric case CK_ObjCObjectLValueCast: 887*0b57cec5SDimitry Andric case CK_FloatingRealToComplex: 888*0b57cec5SDimitry Andric case CK_FloatingComplexToReal: 889*0b57cec5SDimitry Andric case CK_FloatingComplexToBoolean: 890*0b57cec5SDimitry Andric case CK_FloatingComplexCast: 891*0b57cec5SDimitry Andric case CK_FloatingComplexToIntegralComplex: 892*0b57cec5SDimitry Andric case CK_IntegralRealToComplex: 893*0b57cec5SDimitry Andric case CK_IntegralComplexToReal: 894*0b57cec5SDimitry Andric case CK_IntegralComplexToBoolean: 895*0b57cec5SDimitry Andric case CK_IntegralComplexCast: 896*0b57cec5SDimitry Andric case CK_IntegralComplexToFloatingComplex: 897*0b57cec5SDimitry Andric case CK_ARCProduceObject: 898*0b57cec5SDimitry Andric case CK_ARCConsumeObject: 899*0b57cec5SDimitry Andric case CK_ARCReclaimReturnedObject: 900*0b57cec5SDimitry Andric case CK_ARCExtendBlockObject: 901*0b57cec5SDimitry Andric case CK_CopyAndAutoreleaseBlockObject: 902*0b57cec5SDimitry Andric case CK_BuiltinFnToFnPtr: 903*0b57cec5SDimitry Andric case CK_ZeroToOCLOpaqueType: 904*0b57cec5SDimitry Andric 905*0b57cec5SDimitry Andric case CK_IntToOCLSampler: 906*0b57cec5SDimitry Andric case CK_FixedPointCast: 907*0b57cec5SDimitry Andric case CK_FixedPointToBoolean: 908*0b57cec5SDimitry Andric case CK_FixedPointToIntegral: 909*0b57cec5SDimitry Andric case CK_IntegralToFixedPoint: 910*0b57cec5SDimitry Andric llvm_unreachable("cast kind invalid for aggregate types"); 911*0b57cec5SDimitry Andric } 912*0b57cec5SDimitry Andric } 913*0b57cec5SDimitry Andric 914*0b57cec5SDimitry Andric void AggExprEmitter::VisitCallExpr(const CallExpr *E) { 915*0b57cec5SDimitry Andric if (E->getCallReturnType(CGF.getContext())->isReferenceType()) { 916*0b57cec5SDimitry Andric EmitAggLoadOfLValue(E); 917*0b57cec5SDimitry Andric return; 918*0b57cec5SDimitry Andric } 919*0b57cec5SDimitry Andric 920*0b57cec5SDimitry Andric withReturnValueSlot(E, [&](ReturnValueSlot Slot) { 921*0b57cec5SDimitry Andric return CGF.EmitCallExpr(E, Slot); 922*0b57cec5SDimitry Andric }); 923*0b57cec5SDimitry Andric } 924*0b57cec5SDimitry Andric 925*0b57cec5SDimitry Andric void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) { 926*0b57cec5SDimitry Andric withReturnValueSlot(E, [&](ReturnValueSlot Slot) { 927*0b57cec5SDimitry Andric return CGF.EmitObjCMessageExpr(E, Slot); 928*0b57cec5SDimitry Andric }); 929*0b57cec5SDimitry Andric } 930*0b57cec5SDimitry Andric 931*0b57cec5SDimitry Andric void AggExprEmitter::VisitBinComma(const BinaryOperator *E) { 932*0b57cec5SDimitry Andric CGF.EmitIgnoredExpr(E->getLHS()); 933*0b57cec5SDimitry Andric Visit(E->getRHS()); 934*0b57cec5SDimitry Andric } 935*0b57cec5SDimitry Andric 936*0b57cec5SDimitry Andric void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) { 937*0b57cec5SDimitry Andric CodeGenFunction::StmtExprEvaluation eval(CGF); 938*0b57cec5SDimitry Andric CGF.EmitCompoundStmt(*E->getSubStmt(), true, Dest); 939*0b57cec5SDimitry Andric } 940*0b57cec5SDimitry Andric 941*0b57cec5SDimitry Andric enum CompareKind { 942*0b57cec5SDimitry Andric CK_Less, 943*0b57cec5SDimitry Andric CK_Greater, 944*0b57cec5SDimitry Andric CK_Equal, 945*0b57cec5SDimitry Andric }; 946*0b57cec5SDimitry Andric 947*0b57cec5SDimitry Andric static llvm::Value *EmitCompare(CGBuilderTy &Builder, CodeGenFunction &CGF, 948*0b57cec5SDimitry Andric const BinaryOperator *E, llvm::Value *LHS, 949*0b57cec5SDimitry Andric llvm::Value *RHS, CompareKind Kind, 950*0b57cec5SDimitry Andric const char *NameSuffix = "") { 951*0b57cec5SDimitry Andric QualType ArgTy = E->getLHS()->getType(); 952*0b57cec5SDimitry Andric if (const ComplexType *CT = ArgTy->getAs<ComplexType>()) 953*0b57cec5SDimitry Andric ArgTy = CT->getElementType(); 954*0b57cec5SDimitry Andric 955*0b57cec5SDimitry Andric if (const auto *MPT = ArgTy->getAs<MemberPointerType>()) { 956*0b57cec5SDimitry Andric assert(Kind == CK_Equal && 957*0b57cec5SDimitry Andric "member pointers may only be compared for equality"); 958*0b57cec5SDimitry Andric return CGF.CGM.getCXXABI().EmitMemberPointerComparison( 959*0b57cec5SDimitry Andric CGF, LHS, RHS, MPT, /*IsInequality*/ false); 960*0b57cec5SDimitry Andric } 961*0b57cec5SDimitry Andric 962*0b57cec5SDimitry Andric // Compute the comparison instructions for the specified comparison kind. 963*0b57cec5SDimitry Andric struct CmpInstInfo { 964*0b57cec5SDimitry Andric const char *Name; 965*0b57cec5SDimitry Andric llvm::CmpInst::Predicate FCmp; 966*0b57cec5SDimitry Andric llvm::CmpInst::Predicate SCmp; 967*0b57cec5SDimitry Andric llvm::CmpInst::Predicate UCmp; 968*0b57cec5SDimitry Andric }; 969*0b57cec5SDimitry Andric CmpInstInfo InstInfo = [&]() -> CmpInstInfo { 970*0b57cec5SDimitry Andric using FI = llvm::FCmpInst; 971*0b57cec5SDimitry Andric using II = llvm::ICmpInst; 972*0b57cec5SDimitry Andric switch (Kind) { 973*0b57cec5SDimitry Andric case CK_Less: 974*0b57cec5SDimitry Andric return {"cmp.lt", FI::FCMP_OLT, II::ICMP_SLT, II::ICMP_ULT}; 975*0b57cec5SDimitry Andric case CK_Greater: 976*0b57cec5SDimitry Andric return {"cmp.gt", FI::FCMP_OGT, II::ICMP_SGT, II::ICMP_UGT}; 977*0b57cec5SDimitry Andric case CK_Equal: 978*0b57cec5SDimitry Andric return {"cmp.eq", FI::FCMP_OEQ, II::ICMP_EQ, II::ICMP_EQ}; 979*0b57cec5SDimitry Andric } 980*0b57cec5SDimitry Andric llvm_unreachable("Unrecognised CompareKind enum"); 981*0b57cec5SDimitry Andric }(); 982*0b57cec5SDimitry Andric 983*0b57cec5SDimitry Andric if (ArgTy->hasFloatingRepresentation()) 984*0b57cec5SDimitry Andric return Builder.CreateFCmp(InstInfo.FCmp, LHS, RHS, 985*0b57cec5SDimitry Andric llvm::Twine(InstInfo.Name) + NameSuffix); 986*0b57cec5SDimitry Andric if (ArgTy->isIntegralOrEnumerationType() || ArgTy->isPointerType()) { 987*0b57cec5SDimitry Andric auto Inst = 988*0b57cec5SDimitry Andric ArgTy->hasSignedIntegerRepresentation() ? InstInfo.SCmp : InstInfo.UCmp; 989*0b57cec5SDimitry Andric return Builder.CreateICmp(Inst, LHS, RHS, 990*0b57cec5SDimitry Andric llvm::Twine(InstInfo.Name) + NameSuffix); 991*0b57cec5SDimitry Andric } 992*0b57cec5SDimitry Andric 993*0b57cec5SDimitry Andric llvm_unreachable("unsupported aggregate binary expression should have " 994*0b57cec5SDimitry Andric "already been handled"); 995*0b57cec5SDimitry Andric } 996*0b57cec5SDimitry Andric 997*0b57cec5SDimitry Andric void AggExprEmitter::VisitBinCmp(const BinaryOperator *E) { 998*0b57cec5SDimitry Andric using llvm::BasicBlock; 999*0b57cec5SDimitry Andric using llvm::PHINode; 1000*0b57cec5SDimitry Andric using llvm::Value; 1001*0b57cec5SDimitry Andric assert(CGF.getContext().hasSameType(E->getLHS()->getType(), 1002*0b57cec5SDimitry Andric E->getRHS()->getType())); 1003*0b57cec5SDimitry Andric const ComparisonCategoryInfo &CmpInfo = 1004*0b57cec5SDimitry Andric CGF.getContext().CompCategories.getInfoForType(E->getType()); 1005*0b57cec5SDimitry Andric assert(CmpInfo.Record->isTriviallyCopyable() && 1006*0b57cec5SDimitry Andric "cannot copy non-trivially copyable aggregate"); 1007*0b57cec5SDimitry Andric 1008*0b57cec5SDimitry Andric QualType ArgTy = E->getLHS()->getType(); 1009*0b57cec5SDimitry Andric 1010*0b57cec5SDimitry Andric if (!ArgTy->isIntegralOrEnumerationType() && !ArgTy->isRealFloatingType() && 1011*0b57cec5SDimitry Andric !ArgTy->isNullPtrType() && !ArgTy->isPointerType() && 1012*0b57cec5SDimitry Andric !ArgTy->isMemberPointerType() && !ArgTy->isAnyComplexType()) { 1013*0b57cec5SDimitry Andric return CGF.ErrorUnsupported(E, "aggregate three-way comparison"); 1014*0b57cec5SDimitry Andric } 1015*0b57cec5SDimitry Andric bool IsComplex = ArgTy->isAnyComplexType(); 1016*0b57cec5SDimitry Andric 1017*0b57cec5SDimitry Andric // Evaluate the operands to the expression and extract their values. 1018*0b57cec5SDimitry Andric auto EmitOperand = [&](Expr *E) -> std::pair<Value *, Value *> { 1019*0b57cec5SDimitry Andric RValue RV = CGF.EmitAnyExpr(E); 1020*0b57cec5SDimitry Andric if (RV.isScalar()) 1021*0b57cec5SDimitry Andric return {RV.getScalarVal(), nullptr}; 1022*0b57cec5SDimitry Andric if (RV.isAggregate()) 1023*0b57cec5SDimitry Andric return {RV.getAggregatePointer(), nullptr}; 1024*0b57cec5SDimitry Andric assert(RV.isComplex()); 1025*0b57cec5SDimitry Andric return RV.getComplexVal(); 1026*0b57cec5SDimitry Andric }; 1027*0b57cec5SDimitry Andric auto LHSValues = EmitOperand(E->getLHS()), 1028*0b57cec5SDimitry Andric RHSValues = EmitOperand(E->getRHS()); 1029*0b57cec5SDimitry Andric 1030*0b57cec5SDimitry Andric auto EmitCmp = [&](CompareKind K) { 1031*0b57cec5SDimitry Andric Value *Cmp = EmitCompare(Builder, CGF, E, LHSValues.first, RHSValues.first, 1032*0b57cec5SDimitry Andric K, IsComplex ? ".r" : ""); 1033*0b57cec5SDimitry Andric if (!IsComplex) 1034*0b57cec5SDimitry Andric return Cmp; 1035*0b57cec5SDimitry Andric assert(K == CompareKind::CK_Equal); 1036*0b57cec5SDimitry Andric Value *CmpImag = EmitCompare(Builder, CGF, E, LHSValues.second, 1037*0b57cec5SDimitry Andric RHSValues.second, K, ".i"); 1038*0b57cec5SDimitry Andric return Builder.CreateAnd(Cmp, CmpImag, "and.eq"); 1039*0b57cec5SDimitry Andric }; 1040*0b57cec5SDimitry Andric auto EmitCmpRes = [&](const ComparisonCategoryInfo::ValueInfo *VInfo) { 1041*0b57cec5SDimitry Andric return Builder.getInt(VInfo->getIntValue()); 1042*0b57cec5SDimitry Andric }; 1043*0b57cec5SDimitry Andric 1044*0b57cec5SDimitry Andric Value *Select; 1045*0b57cec5SDimitry Andric if (ArgTy->isNullPtrType()) { 1046*0b57cec5SDimitry Andric Select = EmitCmpRes(CmpInfo.getEqualOrEquiv()); 1047*0b57cec5SDimitry Andric } else if (!CmpInfo.isPartial()) { 1048*0b57cec5SDimitry Andric Value *SelectOne = 1049*0b57cec5SDimitry Andric Builder.CreateSelect(EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()), 1050*0b57cec5SDimitry Andric EmitCmpRes(CmpInfo.getGreater()), "sel.lt"); 1051*0b57cec5SDimitry Andric Select = Builder.CreateSelect(EmitCmp(CK_Equal), 1052*0b57cec5SDimitry Andric EmitCmpRes(CmpInfo.getEqualOrEquiv()), 1053*0b57cec5SDimitry Andric SelectOne, "sel.eq"); 1054*0b57cec5SDimitry Andric } else { 1055*0b57cec5SDimitry Andric Value *SelectEq = Builder.CreateSelect( 1056*0b57cec5SDimitry Andric EmitCmp(CK_Equal), EmitCmpRes(CmpInfo.getEqualOrEquiv()), 1057*0b57cec5SDimitry Andric EmitCmpRes(CmpInfo.getUnordered()), "sel.eq"); 1058*0b57cec5SDimitry Andric Value *SelectGT = Builder.CreateSelect(EmitCmp(CK_Greater), 1059*0b57cec5SDimitry Andric EmitCmpRes(CmpInfo.getGreater()), 1060*0b57cec5SDimitry Andric SelectEq, "sel.gt"); 1061*0b57cec5SDimitry Andric Select = Builder.CreateSelect( 1062*0b57cec5SDimitry Andric EmitCmp(CK_Less), EmitCmpRes(CmpInfo.getLess()), SelectGT, "sel.lt"); 1063*0b57cec5SDimitry Andric } 1064*0b57cec5SDimitry Andric // Create the return value in the destination slot. 1065*0b57cec5SDimitry Andric EnsureDest(E->getType()); 1066*0b57cec5SDimitry Andric LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType()); 1067*0b57cec5SDimitry Andric 1068*0b57cec5SDimitry Andric // Emit the address of the first (and only) field in the comparison category 1069*0b57cec5SDimitry Andric // type, and initialize it from the constant integer value selected above. 1070*0b57cec5SDimitry Andric LValue FieldLV = CGF.EmitLValueForFieldInitialization( 1071*0b57cec5SDimitry Andric DestLV, *CmpInfo.Record->field_begin()); 1072*0b57cec5SDimitry Andric CGF.EmitStoreThroughLValue(RValue::get(Select), FieldLV, /*IsInit*/ true); 1073*0b57cec5SDimitry Andric 1074*0b57cec5SDimitry Andric // All done! The result is in the Dest slot. 1075*0b57cec5SDimitry Andric } 1076*0b57cec5SDimitry Andric 1077*0b57cec5SDimitry Andric void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) { 1078*0b57cec5SDimitry Andric if (E->getOpcode() == BO_PtrMemD || E->getOpcode() == BO_PtrMemI) 1079*0b57cec5SDimitry Andric VisitPointerToDataMemberBinaryOperator(E); 1080*0b57cec5SDimitry Andric else 1081*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "aggregate binary expression"); 1082*0b57cec5SDimitry Andric } 1083*0b57cec5SDimitry Andric 1084*0b57cec5SDimitry Andric void AggExprEmitter::VisitPointerToDataMemberBinaryOperator( 1085*0b57cec5SDimitry Andric const BinaryOperator *E) { 1086*0b57cec5SDimitry Andric LValue LV = CGF.EmitPointerToDataMemberBinaryExpr(E); 1087*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), LV); 1088*0b57cec5SDimitry Andric } 1089*0b57cec5SDimitry Andric 1090*0b57cec5SDimitry Andric /// Is the value of the given expression possibly a reference to or 1091*0b57cec5SDimitry Andric /// into a __block variable? 1092*0b57cec5SDimitry Andric static bool isBlockVarRef(const Expr *E) { 1093*0b57cec5SDimitry Andric // Make sure we look through parens. 1094*0b57cec5SDimitry Andric E = E->IgnoreParens(); 1095*0b57cec5SDimitry Andric 1096*0b57cec5SDimitry Andric // Check for a direct reference to a __block variable. 1097*0b57cec5SDimitry Andric if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) { 1098*0b57cec5SDimitry Andric const VarDecl *var = dyn_cast<VarDecl>(DRE->getDecl()); 1099*0b57cec5SDimitry Andric return (var && var->hasAttr<BlocksAttr>()); 1100*0b57cec5SDimitry Andric } 1101*0b57cec5SDimitry Andric 1102*0b57cec5SDimitry Andric // More complicated stuff. 1103*0b57cec5SDimitry Andric 1104*0b57cec5SDimitry Andric // Binary operators. 1105*0b57cec5SDimitry Andric if (const BinaryOperator *op = dyn_cast<BinaryOperator>(E)) { 1106*0b57cec5SDimitry Andric // For an assignment or pointer-to-member operation, just care 1107*0b57cec5SDimitry Andric // about the LHS. 1108*0b57cec5SDimitry Andric if (op->isAssignmentOp() || op->isPtrMemOp()) 1109*0b57cec5SDimitry Andric return isBlockVarRef(op->getLHS()); 1110*0b57cec5SDimitry Andric 1111*0b57cec5SDimitry Andric // For a comma, just care about the RHS. 1112*0b57cec5SDimitry Andric if (op->getOpcode() == BO_Comma) 1113*0b57cec5SDimitry Andric return isBlockVarRef(op->getRHS()); 1114*0b57cec5SDimitry Andric 1115*0b57cec5SDimitry Andric // FIXME: pointer arithmetic? 1116*0b57cec5SDimitry Andric return false; 1117*0b57cec5SDimitry Andric 1118*0b57cec5SDimitry Andric // Check both sides of a conditional operator. 1119*0b57cec5SDimitry Andric } else if (const AbstractConditionalOperator *op 1120*0b57cec5SDimitry Andric = dyn_cast<AbstractConditionalOperator>(E)) { 1121*0b57cec5SDimitry Andric return isBlockVarRef(op->getTrueExpr()) 1122*0b57cec5SDimitry Andric || isBlockVarRef(op->getFalseExpr()); 1123*0b57cec5SDimitry Andric 1124*0b57cec5SDimitry Andric // OVEs are required to support BinaryConditionalOperators. 1125*0b57cec5SDimitry Andric } else if (const OpaqueValueExpr *op 1126*0b57cec5SDimitry Andric = dyn_cast<OpaqueValueExpr>(E)) { 1127*0b57cec5SDimitry Andric if (const Expr *src = op->getSourceExpr()) 1128*0b57cec5SDimitry Andric return isBlockVarRef(src); 1129*0b57cec5SDimitry Andric 1130*0b57cec5SDimitry Andric // Casts are necessary to get things like (*(int*)&var) = foo(). 1131*0b57cec5SDimitry Andric // We don't really care about the kind of cast here, except 1132*0b57cec5SDimitry Andric // we don't want to look through l2r casts, because it's okay 1133*0b57cec5SDimitry Andric // to get the *value* in a __block variable. 1134*0b57cec5SDimitry Andric } else if (const CastExpr *cast = dyn_cast<CastExpr>(E)) { 1135*0b57cec5SDimitry Andric if (cast->getCastKind() == CK_LValueToRValue) 1136*0b57cec5SDimitry Andric return false; 1137*0b57cec5SDimitry Andric return isBlockVarRef(cast->getSubExpr()); 1138*0b57cec5SDimitry Andric 1139*0b57cec5SDimitry Andric // Handle unary operators. Again, just aggressively look through 1140*0b57cec5SDimitry Andric // it, ignoring the operation. 1141*0b57cec5SDimitry Andric } else if (const UnaryOperator *uop = dyn_cast<UnaryOperator>(E)) { 1142*0b57cec5SDimitry Andric return isBlockVarRef(uop->getSubExpr()); 1143*0b57cec5SDimitry Andric 1144*0b57cec5SDimitry Andric // Look into the base of a field access. 1145*0b57cec5SDimitry Andric } else if (const MemberExpr *mem = dyn_cast<MemberExpr>(E)) { 1146*0b57cec5SDimitry Andric return isBlockVarRef(mem->getBase()); 1147*0b57cec5SDimitry Andric 1148*0b57cec5SDimitry Andric // Look into the base of a subscript. 1149*0b57cec5SDimitry Andric } else if (const ArraySubscriptExpr *sub = dyn_cast<ArraySubscriptExpr>(E)) { 1150*0b57cec5SDimitry Andric return isBlockVarRef(sub->getBase()); 1151*0b57cec5SDimitry Andric } 1152*0b57cec5SDimitry Andric 1153*0b57cec5SDimitry Andric return false; 1154*0b57cec5SDimitry Andric } 1155*0b57cec5SDimitry Andric 1156*0b57cec5SDimitry Andric void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) { 1157*0b57cec5SDimitry Andric // For an assignment to work, the value on the right has 1158*0b57cec5SDimitry Andric // to be compatible with the value on the left. 1159*0b57cec5SDimitry Andric assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(), 1160*0b57cec5SDimitry Andric E->getRHS()->getType()) 1161*0b57cec5SDimitry Andric && "Invalid assignment"); 1162*0b57cec5SDimitry Andric 1163*0b57cec5SDimitry Andric // If the LHS might be a __block variable, and the RHS can 1164*0b57cec5SDimitry Andric // potentially cause a block copy, we need to evaluate the RHS first 1165*0b57cec5SDimitry Andric // so that the assignment goes the right place. 1166*0b57cec5SDimitry Andric // This is pretty semantically fragile. 1167*0b57cec5SDimitry Andric if (isBlockVarRef(E->getLHS()) && 1168*0b57cec5SDimitry Andric E->getRHS()->HasSideEffects(CGF.getContext())) { 1169*0b57cec5SDimitry Andric // Ensure that we have a destination, and evaluate the RHS into that. 1170*0b57cec5SDimitry Andric EnsureDest(E->getRHS()->getType()); 1171*0b57cec5SDimitry Andric Visit(E->getRHS()); 1172*0b57cec5SDimitry Andric 1173*0b57cec5SDimitry Andric // Now emit the LHS and copy into it. 1174*0b57cec5SDimitry Andric LValue LHS = CGF.EmitCheckedLValue(E->getLHS(), CodeGenFunction::TCK_Store); 1175*0b57cec5SDimitry Andric 1176*0b57cec5SDimitry Andric // That copy is an atomic copy if the LHS is atomic. 1177*0b57cec5SDimitry Andric if (LHS.getType()->isAtomicType() || 1178*0b57cec5SDimitry Andric CGF.LValueIsSuitableForInlineAtomic(LHS)) { 1179*0b57cec5SDimitry Andric CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false); 1180*0b57cec5SDimitry Andric return; 1181*0b57cec5SDimitry Andric } 1182*0b57cec5SDimitry Andric 1183*0b57cec5SDimitry Andric EmitCopy(E->getLHS()->getType(), 1184480093f4SDimitry Andric AggValueSlot::forLValue(LHS, CGF, AggValueSlot::IsDestructed, 1185*0b57cec5SDimitry Andric needsGC(E->getLHS()->getType()), 1186*0b57cec5SDimitry Andric AggValueSlot::IsAliased, 1187*0b57cec5SDimitry Andric AggValueSlot::MayOverlap), 1188*0b57cec5SDimitry Andric Dest); 1189*0b57cec5SDimitry Andric return; 1190*0b57cec5SDimitry Andric } 1191*0b57cec5SDimitry Andric 1192*0b57cec5SDimitry Andric LValue LHS = CGF.EmitLValue(E->getLHS()); 1193*0b57cec5SDimitry Andric 1194*0b57cec5SDimitry Andric // If we have an atomic type, evaluate into the destination and then 1195*0b57cec5SDimitry Andric // do an atomic copy. 1196*0b57cec5SDimitry Andric if (LHS.getType()->isAtomicType() || 1197*0b57cec5SDimitry Andric CGF.LValueIsSuitableForInlineAtomic(LHS)) { 1198*0b57cec5SDimitry Andric EnsureDest(E->getRHS()->getType()); 1199*0b57cec5SDimitry Andric Visit(E->getRHS()); 1200*0b57cec5SDimitry Andric CGF.EmitAtomicStore(Dest.asRValue(), LHS, /*isInit*/ false); 1201*0b57cec5SDimitry Andric return; 1202*0b57cec5SDimitry Andric } 1203*0b57cec5SDimitry Andric 1204*0b57cec5SDimitry Andric // Codegen the RHS so that it stores directly into the LHS. 1205480093f4SDimitry Andric AggValueSlot LHSSlot = AggValueSlot::forLValue( 1206480093f4SDimitry Andric LHS, CGF, AggValueSlot::IsDestructed, needsGC(E->getLHS()->getType()), 1207480093f4SDimitry Andric AggValueSlot::IsAliased, AggValueSlot::MayOverlap); 1208*0b57cec5SDimitry Andric // A non-volatile aggregate destination might have volatile member. 1209*0b57cec5SDimitry Andric if (!LHSSlot.isVolatile() && 1210*0b57cec5SDimitry Andric CGF.hasVolatileMember(E->getLHS()->getType())) 1211*0b57cec5SDimitry Andric LHSSlot.setVolatile(true); 1212*0b57cec5SDimitry Andric 1213*0b57cec5SDimitry Andric CGF.EmitAggExpr(E->getRHS(), LHSSlot); 1214*0b57cec5SDimitry Andric 1215*0b57cec5SDimitry Andric // Copy into the destination if the assignment isn't ignored. 1216*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), LHS); 1217*0b57cec5SDimitry Andric } 1218*0b57cec5SDimitry Andric 1219*0b57cec5SDimitry Andric void AggExprEmitter:: 1220*0b57cec5SDimitry Andric VisitAbstractConditionalOperator(const AbstractConditionalOperator *E) { 1221*0b57cec5SDimitry Andric llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true"); 1222*0b57cec5SDimitry Andric llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false"); 1223*0b57cec5SDimitry Andric llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end"); 1224*0b57cec5SDimitry Andric 1225*0b57cec5SDimitry Andric // Bind the common expression if necessary. 1226*0b57cec5SDimitry Andric CodeGenFunction::OpaqueValueMapping binding(CGF, E); 1227*0b57cec5SDimitry Andric 1228*0b57cec5SDimitry Andric CodeGenFunction::ConditionalEvaluation eval(CGF); 1229*0b57cec5SDimitry Andric CGF.EmitBranchOnBoolExpr(E->getCond(), LHSBlock, RHSBlock, 1230*0b57cec5SDimitry Andric CGF.getProfileCount(E)); 1231*0b57cec5SDimitry Andric 1232*0b57cec5SDimitry Andric // Save whether the destination's lifetime is externally managed. 1233*0b57cec5SDimitry Andric bool isExternallyDestructed = Dest.isExternallyDestructed(); 1234*0b57cec5SDimitry Andric 1235*0b57cec5SDimitry Andric eval.begin(CGF); 1236*0b57cec5SDimitry Andric CGF.EmitBlock(LHSBlock); 1237*0b57cec5SDimitry Andric CGF.incrementProfileCounter(E); 1238*0b57cec5SDimitry Andric Visit(E->getTrueExpr()); 1239*0b57cec5SDimitry Andric eval.end(CGF); 1240*0b57cec5SDimitry Andric 1241*0b57cec5SDimitry Andric assert(CGF.HaveInsertPoint() && "expression evaluation ended with no IP!"); 1242*0b57cec5SDimitry Andric CGF.Builder.CreateBr(ContBlock); 1243*0b57cec5SDimitry Andric 1244*0b57cec5SDimitry Andric // If the result of an agg expression is unused, then the emission 1245*0b57cec5SDimitry Andric // of the LHS might need to create a destination slot. That's fine 1246*0b57cec5SDimitry Andric // with us, and we can safely emit the RHS into the same slot, but 1247*0b57cec5SDimitry Andric // we shouldn't claim that it's already being destructed. 1248*0b57cec5SDimitry Andric Dest.setExternallyDestructed(isExternallyDestructed); 1249*0b57cec5SDimitry Andric 1250*0b57cec5SDimitry Andric eval.begin(CGF); 1251*0b57cec5SDimitry Andric CGF.EmitBlock(RHSBlock); 1252*0b57cec5SDimitry Andric Visit(E->getFalseExpr()); 1253*0b57cec5SDimitry Andric eval.end(CGF); 1254*0b57cec5SDimitry Andric 1255*0b57cec5SDimitry Andric CGF.EmitBlock(ContBlock); 1256*0b57cec5SDimitry Andric } 1257*0b57cec5SDimitry Andric 1258*0b57cec5SDimitry Andric void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) { 1259*0b57cec5SDimitry Andric Visit(CE->getChosenSubExpr()); 1260*0b57cec5SDimitry Andric } 1261*0b57cec5SDimitry Andric 1262*0b57cec5SDimitry Andric void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) { 1263*0b57cec5SDimitry Andric Address ArgValue = Address::invalid(); 1264*0b57cec5SDimitry Andric Address ArgPtr = CGF.EmitVAArg(VE, ArgValue); 1265*0b57cec5SDimitry Andric 1266*0b57cec5SDimitry Andric // If EmitVAArg fails, emit an error. 1267*0b57cec5SDimitry Andric if (!ArgPtr.isValid()) { 1268*0b57cec5SDimitry Andric CGF.ErrorUnsupported(VE, "aggregate va_arg expression"); 1269*0b57cec5SDimitry Andric return; 1270*0b57cec5SDimitry Andric } 1271*0b57cec5SDimitry Andric 1272*0b57cec5SDimitry Andric EmitFinalDestCopy(VE->getType(), CGF.MakeAddrLValue(ArgPtr, VE->getType())); 1273*0b57cec5SDimitry Andric } 1274*0b57cec5SDimitry Andric 1275*0b57cec5SDimitry Andric void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) { 1276*0b57cec5SDimitry Andric // Ensure that we have a slot, but if we already do, remember 1277*0b57cec5SDimitry Andric // whether it was externally destructed. 1278*0b57cec5SDimitry Andric bool wasExternallyDestructed = Dest.isExternallyDestructed(); 1279*0b57cec5SDimitry Andric EnsureDest(E->getType()); 1280*0b57cec5SDimitry Andric 1281*0b57cec5SDimitry Andric // We're going to push a destructor if there isn't already one. 1282*0b57cec5SDimitry Andric Dest.setExternallyDestructed(); 1283*0b57cec5SDimitry Andric 1284*0b57cec5SDimitry Andric Visit(E->getSubExpr()); 1285*0b57cec5SDimitry Andric 1286*0b57cec5SDimitry Andric // Push that destructor we promised. 1287*0b57cec5SDimitry Andric if (!wasExternallyDestructed) 1288*0b57cec5SDimitry Andric CGF.EmitCXXTemporary(E->getTemporary(), E->getType(), Dest.getAddress()); 1289*0b57cec5SDimitry Andric } 1290*0b57cec5SDimitry Andric 1291*0b57cec5SDimitry Andric void 1292*0b57cec5SDimitry Andric AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) { 1293*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(E->getType()); 1294*0b57cec5SDimitry Andric CGF.EmitCXXConstructExpr(E, Slot); 1295*0b57cec5SDimitry Andric } 1296*0b57cec5SDimitry Andric 1297*0b57cec5SDimitry Andric void AggExprEmitter::VisitCXXInheritedCtorInitExpr( 1298*0b57cec5SDimitry Andric const CXXInheritedCtorInitExpr *E) { 1299*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(E->getType()); 1300*0b57cec5SDimitry Andric CGF.EmitInheritedCXXConstructorCall( 1301*0b57cec5SDimitry Andric E->getConstructor(), E->constructsVBase(), Slot.getAddress(), 1302*0b57cec5SDimitry Andric E->inheritedFromVBase(), E); 1303*0b57cec5SDimitry Andric } 1304*0b57cec5SDimitry Andric 1305*0b57cec5SDimitry Andric void 1306*0b57cec5SDimitry Andric AggExprEmitter::VisitLambdaExpr(LambdaExpr *E) { 1307*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(E->getType()); 1308*0b57cec5SDimitry Andric LValue SlotLV = CGF.MakeAddrLValue(Slot.getAddress(), E->getType()); 1309*0b57cec5SDimitry Andric 1310*0b57cec5SDimitry Andric // We'll need to enter cleanup scopes in case any of the element 1311*0b57cec5SDimitry Andric // initializers throws an exception. 1312*0b57cec5SDimitry Andric SmallVector<EHScopeStack::stable_iterator, 16> Cleanups; 1313*0b57cec5SDimitry Andric llvm::Instruction *CleanupDominator = nullptr; 1314*0b57cec5SDimitry Andric 1315*0b57cec5SDimitry Andric CXXRecordDecl::field_iterator CurField = E->getLambdaClass()->field_begin(); 1316*0b57cec5SDimitry Andric for (LambdaExpr::const_capture_init_iterator i = E->capture_init_begin(), 1317*0b57cec5SDimitry Andric e = E->capture_init_end(); 1318*0b57cec5SDimitry Andric i != e; ++i, ++CurField) { 1319*0b57cec5SDimitry Andric // Emit initialization 1320*0b57cec5SDimitry Andric LValue LV = CGF.EmitLValueForFieldInitialization(SlotLV, *CurField); 1321*0b57cec5SDimitry Andric if (CurField->hasCapturedVLAType()) { 1322*0b57cec5SDimitry Andric CGF.EmitLambdaVLACapture(CurField->getCapturedVLAType(), LV); 1323*0b57cec5SDimitry Andric continue; 1324*0b57cec5SDimitry Andric } 1325*0b57cec5SDimitry Andric 1326*0b57cec5SDimitry Andric EmitInitializationToLValue(*i, LV); 1327*0b57cec5SDimitry Andric 1328*0b57cec5SDimitry Andric // Push a destructor if necessary. 1329*0b57cec5SDimitry Andric if (QualType::DestructionKind DtorKind = 1330*0b57cec5SDimitry Andric CurField->getType().isDestructedType()) { 1331*0b57cec5SDimitry Andric assert(LV.isSimple()); 1332*0b57cec5SDimitry Andric if (CGF.needsEHCleanup(DtorKind)) { 1333*0b57cec5SDimitry Andric if (!CleanupDominator) 1334*0b57cec5SDimitry Andric CleanupDominator = CGF.Builder.CreateAlignedLoad( 1335*0b57cec5SDimitry Andric CGF.Int8Ty, 1336*0b57cec5SDimitry Andric llvm::Constant::getNullValue(CGF.Int8PtrTy), 1337*0b57cec5SDimitry Andric CharUnits::One()); // placeholder 1338*0b57cec5SDimitry Andric 1339480093f4SDimitry Andric CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), CurField->getType(), 1340*0b57cec5SDimitry Andric CGF.getDestroyer(DtorKind), false); 1341*0b57cec5SDimitry Andric Cleanups.push_back(CGF.EHStack.stable_begin()); 1342*0b57cec5SDimitry Andric } 1343*0b57cec5SDimitry Andric } 1344*0b57cec5SDimitry Andric } 1345*0b57cec5SDimitry Andric 1346*0b57cec5SDimitry Andric // Deactivate all the partial cleanups in reverse order, which 1347*0b57cec5SDimitry Andric // generally means popping them. 1348*0b57cec5SDimitry Andric for (unsigned i = Cleanups.size(); i != 0; --i) 1349*0b57cec5SDimitry Andric CGF.DeactivateCleanupBlock(Cleanups[i-1], CleanupDominator); 1350*0b57cec5SDimitry Andric 1351*0b57cec5SDimitry Andric // Destroy the placeholder if we made one. 1352*0b57cec5SDimitry Andric if (CleanupDominator) 1353*0b57cec5SDimitry Andric CleanupDominator->eraseFromParent(); 1354*0b57cec5SDimitry Andric } 1355*0b57cec5SDimitry Andric 1356*0b57cec5SDimitry Andric void AggExprEmitter::VisitExprWithCleanups(ExprWithCleanups *E) { 1357*0b57cec5SDimitry Andric CodeGenFunction::RunCleanupsScope cleanups(CGF); 1358*0b57cec5SDimitry Andric Visit(E->getSubExpr()); 1359*0b57cec5SDimitry Andric } 1360*0b57cec5SDimitry Andric 1361*0b57cec5SDimitry Andric void AggExprEmitter::VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E) { 1362*0b57cec5SDimitry Andric QualType T = E->getType(); 1363*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(T); 1364*0b57cec5SDimitry Andric EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T)); 1365*0b57cec5SDimitry Andric } 1366*0b57cec5SDimitry Andric 1367*0b57cec5SDimitry Andric void AggExprEmitter::VisitImplicitValueInitExpr(ImplicitValueInitExpr *E) { 1368*0b57cec5SDimitry Andric QualType T = E->getType(); 1369*0b57cec5SDimitry Andric AggValueSlot Slot = EnsureSlot(T); 1370*0b57cec5SDimitry Andric EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddress(), T)); 1371*0b57cec5SDimitry Andric } 1372*0b57cec5SDimitry Andric 1373*0b57cec5SDimitry Andric /// isSimpleZero - If emitting this value will obviously just cause a store of 1374*0b57cec5SDimitry Andric /// zero to memory, return true. This can return false if uncertain, so it just 1375*0b57cec5SDimitry Andric /// handles simple cases. 1376*0b57cec5SDimitry Andric static bool isSimpleZero(const Expr *E, CodeGenFunction &CGF) { 1377*0b57cec5SDimitry Andric E = E->IgnoreParens(); 1378*0b57cec5SDimitry Andric 1379*0b57cec5SDimitry Andric // 0 1380*0b57cec5SDimitry Andric if (const IntegerLiteral *IL = dyn_cast<IntegerLiteral>(E)) 1381*0b57cec5SDimitry Andric return IL->getValue() == 0; 1382*0b57cec5SDimitry Andric // +0.0 1383*0b57cec5SDimitry Andric if (const FloatingLiteral *FL = dyn_cast<FloatingLiteral>(E)) 1384*0b57cec5SDimitry Andric return FL->getValue().isPosZero(); 1385*0b57cec5SDimitry Andric // int() 1386*0b57cec5SDimitry Andric if ((isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) && 1387*0b57cec5SDimitry Andric CGF.getTypes().isZeroInitializable(E->getType())) 1388*0b57cec5SDimitry Andric return true; 1389*0b57cec5SDimitry Andric // (int*)0 - Null pointer expressions. 1390*0b57cec5SDimitry Andric if (const CastExpr *ICE = dyn_cast<CastExpr>(E)) 1391*0b57cec5SDimitry Andric return ICE->getCastKind() == CK_NullToPointer && 1392*0b57cec5SDimitry Andric CGF.getTypes().isPointerZeroInitializable(E->getType()) && 1393*0b57cec5SDimitry Andric !E->HasSideEffects(CGF.getContext()); 1394*0b57cec5SDimitry Andric // '\0' 1395*0b57cec5SDimitry Andric if (const CharacterLiteral *CL = dyn_cast<CharacterLiteral>(E)) 1396*0b57cec5SDimitry Andric return CL->getValue() == 0; 1397*0b57cec5SDimitry Andric 1398*0b57cec5SDimitry Andric // Otherwise, hard case: conservatively return false. 1399*0b57cec5SDimitry Andric return false; 1400*0b57cec5SDimitry Andric } 1401*0b57cec5SDimitry Andric 1402*0b57cec5SDimitry Andric 1403*0b57cec5SDimitry Andric void 1404*0b57cec5SDimitry Andric AggExprEmitter::EmitInitializationToLValue(Expr *E, LValue LV) { 1405*0b57cec5SDimitry Andric QualType type = LV.getType(); 1406*0b57cec5SDimitry Andric // FIXME: Ignore result? 1407*0b57cec5SDimitry Andric // FIXME: Are initializers affected by volatile? 1408*0b57cec5SDimitry Andric if (Dest.isZeroed() && isSimpleZero(E, CGF)) { 1409*0b57cec5SDimitry Andric // Storing "i32 0" to a zero'd memory location is a noop. 1410*0b57cec5SDimitry Andric return; 1411*0b57cec5SDimitry Andric } else if (isa<ImplicitValueInitExpr>(E) || isa<CXXScalarValueInitExpr>(E)) { 1412*0b57cec5SDimitry Andric return EmitNullInitializationToLValue(LV); 1413*0b57cec5SDimitry Andric } else if (isa<NoInitExpr>(E)) { 1414*0b57cec5SDimitry Andric // Do nothing. 1415*0b57cec5SDimitry Andric return; 1416*0b57cec5SDimitry Andric } else if (type->isReferenceType()) { 1417*0b57cec5SDimitry Andric RValue RV = CGF.EmitReferenceBindingToExpr(E); 1418*0b57cec5SDimitry Andric return CGF.EmitStoreThroughLValue(RV, LV); 1419*0b57cec5SDimitry Andric } 1420*0b57cec5SDimitry Andric 1421*0b57cec5SDimitry Andric switch (CGF.getEvaluationKind(type)) { 1422*0b57cec5SDimitry Andric case TEK_Complex: 1423*0b57cec5SDimitry Andric CGF.EmitComplexExprIntoLValue(E, LV, /*isInit*/ true); 1424*0b57cec5SDimitry Andric return; 1425*0b57cec5SDimitry Andric case TEK_Aggregate: 1426480093f4SDimitry Andric CGF.EmitAggExpr( 1427480093f4SDimitry Andric E, AggValueSlot::forLValue(LV, CGF, AggValueSlot::IsDestructed, 1428*0b57cec5SDimitry Andric AggValueSlot::DoesNotNeedGCBarriers, 1429*0b57cec5SDimitry Andric AggValueSlot::IsNotAliased, 1430480093f4SDimitry Andric AggValueSlot::MayOverlap, Dest.isZeroed())); 1431*0b57cec5SDimitry Andric return; 1432*0b57cec5SDimitry Andric case TEK_Scalar: 1433*0b57cec5SDimitry Andric if (LV.isSimple()) { 1434*0b57cec5SDimitry Andric CGF.EmitScalarInit(E, /*D=*/nullptr, LV, /*Captured=*/false); 1435*0b57cec5SDimitry Andric } else { 1436*0b57cec5SDimitry Andric CGF.EmitStoreThroughLValue(RValue::get(CGF.EmitScalarExpr(E)), LV); 1437*0b57cec5SDimitry Andric } 1438*0b57cec5SDimitry Andric return; 1439*0b57cec5SDimitry Andric } 1440*0b57cec5SDimitry Andric llvm_unreachable("bad evaluation kind"); 1441*0b57cec5SDimitry Andric } 1442*0b57cec5SDimitry Andric 1443*0b57cec5SDimitry Andric void AggExprEmitter::EmitNullInitializationToLValue(LValue lv) { 1444*0b57cec5SDimitry Andric QualType type = lv.getType(); 1445*0b57cec5SDimitry Andric 1446*0b57cec5SDimitry Andric // If the destination slot is already zeroed out before the aggregate is 1447*0b57cec5SDimitry Andric // copied into it, we don't have to emit any zeros here. 1448*0b57cec5SDimitry Andric if (Dest.isZeroed() && CGF.getTypes().isZeroInitializable(type)) 1449*0b57cec5SDimitry Andric return; 1450*0b57cec5SDimitry Andric 1451*0b57cec5SDimitry Andric if (CGF.hasScalarEvaluationKind(type)) { 1452*0b57cec5SDimitry Andric // For non-aggregates, we can store the appropriate null constant. 1453*0b57cec5SDimitry Andric llvm::Value *null = CGF.CGM.EmitNullConstant(type); 1454*0b57cec5SDimitry Andric // Note that the following is not equivalent to 1455*0b57cec5SDimitry Andric // EmitStoreThroughBitfieldLValue for ARC types. 1456*0b57cec5SDimitry Andric if (lv.isBitField()) { 1457*0b57cec5SDimitry Andric CGF.EmitStoreThroughBitfieldLValue(RValue::get(null), lv); 1458*0b57cec5SDimitry Andric } else { 1459*0b57cec5SDimitry Andric assert(lv.isSimple()); 1460*0b57cec5SDimitry Andric CGF.EmitStoreOfScalar(null, lv, /* isInitialization */ true); 1461*0b57cec5SDimitry Andric } 1462*0b57cec5SDimitry Andric } else { 1463*0b57cec5SDimitry Andric // There's a potential optimization opportunity in combining 1464*0b57cec5SDimitry Andric // memsets; that would be easy for arrays, but relatively 1465*0b57cec5SDimitry Andric // difficult for structures with the current code. 1466480093f4SDimitry Andric CGF.EmitNullInitialization(lv.getAddress(CGF), lv.getType()); 1467*0b57cec5SDimitry Andric } 1468*0b57cec5SDimitry Andric } 1469*0b57cec5SDimitry Andric 1470*0b57cec5SDimitry Andric void AggExprEmitter::VisitInitListExpr(InitListExpr *E) { 1471*0b57cec5SDimitry Andric #if 0 1472*0b57cec5SDimitry Andric // FIXME: Assess perf here? Figure out what cases are worth optimizing here 1473*0b57cec5SDimitry Andric // (Length of globals? Chunks of zeroed-out space?). 1474*0b57cec5SDimitry Andric // 1475*0b57cec5SDimitry Andric // If we can, prefer a copy from a global; this is a lot less code for long 1476*0b57cec5SDimitry Andric // globals, and it's easier for the current optimizers to analyze. 1477*0b57cec5SDimitry Andric if (llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, E->getType(), &CGF)) { 1478*0b57cec5SDimitry Andric llvm::GlobalVariable* GV = 1479*0b57cec5SDimitry Andric new llvm::GlobalVariable(CGF.CGM.getModule(), C->getType(), true, 1480*0b57cec5SDimitry Andric llvm::GlobalValue::InternalLinkage, C, ""); 1481*0b57cec5SDimitry Andric EmitFinalDestCopy(E->getType(), CGF.MakeAddrLValue(GV, E->getType())); 1482*0b57cec5SDimitry Andric return; 1483*0b57cec5SDimitry Andric } 1484*0b57cec5SDimitry Andric #endif 1485*0b57cec5SDimitry Andric if (E->hadArrayRangeDesignator()) 1486*0b57cec5SDimitry Andric CGF.ErrorUnsupported(E, "GNU array range designator extension"); 1487*0b57cec5SDimitry Andric 1488*0b57cec5SDimitry Andric if (E->isTransparent()) 1489*0b57cec5SDimitry Andric return Visit(E->getInit(0)); 1490*0b57cec5SDimitry Andric 1491*0b57cec5SDimitry Andric AggValueSlot Dest = EnsureSlot(E->getType()); 1492*0b57cec5SDimitry Andric 1493*0b57cec5SDimitry Andric LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType()); 1494*0b57cec5SDimitry Andric 1495*0b57cec5SDimitry Andric // Handle initialization of an array. 1496*0b57cec5SDimitry Andric if (E->getType()->isArrayType()) { 1497*0b57cec5SDimitry Andric auto AType = cast<llvm::ArrayType>(Dest.getAddress().getElementType()); 1498*0b57cec5SDimitry Andric EmitArrayInit(Dest.getAddress(), AType, E->getType(), E); 1499*0b57cec5SDimitry Andric return; 1500*0b57cec5SDimitry Andric } 1501*0b57cec5SDimitry Andric 1502*0b57cec5SDimitry Andric assert(E->getType()->isRecordType() && "Only support structs/unions here!"); 1503*0b57cec5SDimitry Andric 1504*0b57cec5SDimitry Andric // Do struct initialization; this code just sets each individual member 1505*0b57cec5SDimitry Andric // to the approprate value. This makes bitfield support automatic; 1506*0b57cec5SDimitry Andric // the disadvantage is that the generated code is more difficult for 1507*0b57cec5SDimitry Andric // the optimizer, especially with bitfields. 1508*0b57cec5SDimitry Andric unsigned NumInitElements = E->getNumInits(); 1509*0b57cec5SDimitry Andric RecordDecl *record = E->getType()->castAs<RecordType>()->getDecl(); 1510*0b57cec5SDimitry Andric 1511*0b57cec5SDimitry Andric // We'll need to enter cleanup scopes in case any of the element 1512*0b57cec5SDimitry Andric // initializers throws an exception. 1513*0b57cec5SDimitry Andric SmallVector<EHScopeStack::stable_iterator, 16> cleanups; 1514*0b57cec5SDimitry Andric llvm::Instruction *cleanupDominator = nullptr; 1515*0b57cec5SDimitry Andric auto addCleanup = [&](const EHScopeStack::stable_iterator &cleanup) { 1516*0b57cec5SDimitry Andric cleanups.push_back(cleanup); 1517*0b57cec5SDimitry Andric if (!cleanupDominator) // create placeholder once needed 1518*0b57cec5SDimitry Andric cleanupDominator = CGF.Builder.CreateAlignedLoad( 1519*0b57cec5SDimitry Andric CGF.Int8Ty, llvm::Constant::getNullValue(CGF.Int8PtrTy), 1520*0b57cec5SDimitry Andric CharUnits::One()); 1521*0b57cec5SDimitry Andric }; 1522*0b57cec5SDimitry Andric 1523*0b57cec5SDimitry Andric unsigned curInitIndex = 0; 1524*0b57cec5SDimitry Andric 1525*0b57cec5SDimitry Andric // Emit initialization of base classes. 1526*0b57cec5SDimitry Andric if (auto *CXXRD = dyn_cast<CXXRecordDecl>(record)) { 1527*0b57cec5SDimitry Andric assert(E->getNumInits() >= CXXRD->getNumBases() && 1528*0b57cec5SDimitry Andric "missing initializer for base class"); 1529*0b57cec5SDimitry Andric for (auto &Base : CXXRD->bases()) { 1530*0b57cec5SDimitry Andric assert(!Base.isVirtual() && "should not see vbases here"); 1531*0b57cec5SDimitry Andric auto *BaseRD = Base.getType()->getAsCXXRecordDecl(); 1532*0b57cec5SDimitry Andric Address V = CGF.GetAddressOfDirectBaseInCompleteClass( 1533*0b57cec5SDimitry Andric Dest.getAddress(), CXXRD, BaseRD, 1534*0b57cec5SDimitry Andric /*isBaseVirtual*/ false); 1535*0b57cec5SDimitry Andric AggValueSlot AggSlot = AggValueSlot::forAddr( 1536*0b57cec5SDimitry Andric V, Qualifiers(), 1537*0b57cec5SDimitry Andric AggValueSlot::IsDestructed, 1538*0b57cec5SDimitry Andric AggValueSlot::DoesNotNeedGCBarriers, 1539*0b57cec5SDimitry Andric AggValueSlot::IsNotAliased, 1540*0b57cec5SDimitry Andric CGF.getOverlapForBaseInit(CXXRD, BaseRD, Base.isVirtual())); 1541*0b57cec5SDimitry Andric CGF.EmitAggExpr(E->getInit(curInitIndex++), AggSlot); 1542*0b57cec5SDimitry Andric 1543*0b57cec5SDimitry Andric if (QualType::DestructionKind dtorKind = 1544*0b57cec5SDimitry Andric Base.getType().isDestructedType()) { 1545*0b57cec5SDimitry Andric CGF.pushDestroy(dtorKind, V, Base.getType()); 1546*0b57cec5SDimitry Andric addCleanup(CGF.EHStack.stable_begin()); 1547*0b57cec5SDimitry Andric } 1548*0b57cec5SDimitry Andric } 1549*0b57cec5SDimitry Andric } 1550*0b57cec5SDimitry Andric 1551*0b57cec5SDimitry Andric // Prepare a 'this' for CXXDefaultInitExprs. 1552*0b57cec5SDimitry Andric CodeGenFunction::FieldConstructionScope FCS(CGF, Dest.getAddress()); 1553*0b57cec5SDimitry Andric 1554*0b57cec5SDimitry Andric if (record->isUnion()) { 1555*0b57cec5SDimitry Andric // Only initialize one field of a union. The field itself is 1556*0b57cec5SDimitry Andric // specified by the initializer list. 1557*0b57cec5SDimitry Andric if (!E->getInitializedFieldInUnion()) { 1558*0b57cec5SDimitry Andric // Empty union; we have nothing to do. 1559*0b57cec5SDimitry Andric 1560*0b57cec5SDimitry Andric #ifndef NDEBUG 1561*0b57cec5SDimitry Andric // Make sure that it's really an empty and not a failure of 1562*0b57cec5SDimitry Andric // semantic analysis. 1563*0b57cec5SDimitry Andric for (const auto *Field : record->fields()) 1564*0b57cec5SDimitry Andric assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed"); 1565*0b57cec5SDimitry Andric #endif 1566*0b57cec5SDimitry Andric return; 1567*0b57cec5SDimitry Andric } 1568*0b57cec5SDimitry Andric 1569*0b57cec5SDimitry Andric // FIXME: volatility 1570*0b57cec5SDimitry Andric FieldDecl *Field = E->getInitializedFieldInUnion(); 1571*0b57cec5SDimitry Andric 1572*0b57cec5SDimitry Andric LValue FieldLoc = CGF.EmitLValueForFieldInitialization(DestLV, Field); 1573*0b57cec5SDimitry Andric if (NumInitElements) { 1574*0b57cec5SDimitry Andric // Store the initializer into the field 1575*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getInit(0), FieldLoc); 1576*0b57cec5SDimitry Andric } else { 1577*0b57cec5SDimitry Andric // Default-initialize to null. 1578*0b57cec5SDimitry Andric EmitNullInitializationToLValue(FieldLoc); 1579*0b57cec5SDimitry Andric } 1580*0b57cec5SDimitry Andric 1581*0b57cec5SDimitry Andric return; 1582*0b57cec5SDimitry Andric } 1583*0b57cec5SDimitry Andric 1584*0b57cec5SDimitry Andric // Here we iterate over the fields; this makes it simpler to both 1585*0b57cec5SDimitry Andric // default-initialize fields and skip over unnamed fields. 1586*0b57cec5SDimitry Andric for (const auto *field : record->fields()) { 1587*0b57cec5SDimitry Andric // We're done once we hit the flexible array member. 1588*0b57cec5SDimitry Andric if (field->getType()->isIncompleteArrayType()) 1589*0b57cec5SDimitry Andric break; 1590*0b57cec5SDimitry Andric 1591*0b57cec5SDimitry Andric // Always skip anonymous bitfields. 1592*0b57cec5SDimitry Andric if (field->isUnnamedBitfield()) 1593*0b57cec5SDimitry Andric continue; 1594*0b57cec5SDimitry Andric 1595*0b57cec5SDimitry Andric // We're done if we reach the end of the explicit initializers, we 1596*0b57cec5SDimitry Andric // have a zeroed object, and the rest of the fields are 1597*0b57cec5SDimitry Andric // zero-initializable. 1598*0b57cec5SDimitry Andric if (curInitIndex == NumInitElements && Dest.isZeroed() && 1599*0b57cec5SDimitry Andric CGF.getTypes().isZeroInitializable(E->getType())) 1600*0b57cec5SDimitry Andric break; 1601*0b57cec5SDimitry Andric 1602*0b57cec5SDimitry Andric 1603*0b57cec5SDimitry Andric LValue LV = CGF.EmitLValueForFieldInitialization(DestLV, field); 1604*0b57cec5SDimitry Andric // We never generate write-barries for initialized fields. 1605*0b57cec5SDimitry Andric LV.setNonGC(true); 1606*0b57cec5SDimitry Andric 1607*0b57cec5SDimitry Andric if (curInitIndex < NumInitElements) { 1608*0b57cec5SDimitry Andric // Store the initializer into the field. 1609*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getInit(curInitIndex++), LV); 1610*0b57cec5SDimitry Andric } else { 1611*0b57cec5SDimitry Andric // We're out of initializers; default-initialize to null 1612*0b57cec5SDimitry Andric EmitNullInitializationToLValue(LV); 1613*0b57cec5SDimitry Andric } 1614*0b57cec5SDimitry Andric 1615*0b57cec5SDimitry Andric // Push a destructor if necessary. 1616*0b57cec5SDimitry Andric // FIXME: if we have an array of structures, all explicitly 1617*0b57cec5SDimitry Andric // initialized, we can end up pushing a linear number of cleanups. 1618*0b57cec5SDimitry Andric bool pushedCleanup = false; 1619*0b57cec5SDimitry Andric if (QualType::DestructionKind dtorKind 1620*0b57cec5SDimitry Andric = field->getType().isDestructedType()) { 1621*0b57cec5SDimitry Andric assert(LV.isSimple()); 1622*0b57cec5SDimitry Andric if (CGF.needsEHCleanup(dtorKind)) { 1623480093f4SDimitry Andric CGF.pushDestroy(EHCleanup, LV.getAddress(CGF), field->getType(), 1624*0b57cec5SDimitry Andric CGF.getDestroyer(dtorKind), false); 1625*0b57cec5SDimitry Andric addCleanup(CGF.EHStack.stable_begin()); 1626*0b57cec5SDimitry Andric pushedCleanup = true; 1627*0b57cec5SDimitry Andric } 1628*0b57cec5SDimitry Andric } 1629*0b57cec5SDimitry Andric 1630*0b57cec5SDimitry Andric // If the GEP didn't get used because of a dead zero init or something 1631*0b57cec5SDimitry Andric // else, clean it up for -O0 builds and general tidiness. 1632*0b57cec5SDimitry Andric if (!pushedCleanup && LV.isSimple()) 1633*0b57cec5SDimitry Andric if (llvm::GetElementPtrInst *GEP = 1634480093f4SDimitry Andric dyn_cast<llvm::GetElementPtrInst>(LV.getPointer(CGF))) 1635*0b57cec5SDimitry Andric if (GEP->use_empty()) 1636*0b57cec5SDimitry Andric GEP->eraseFromParent(); 1637*0b57cec5SDimitry Andric } 1638*0b57cec5SDimitry Andric 1639*0b57cec5SDimitry Andric // Deactivate all the partial cleanups in reverse order, which 1640*0b57cec5SDimitry Andric // generally means popping them. 1641*0b57cec5SDimitry Andric assert((cleanupDominator || cleanups.empty()) && 1642*0b57cec5SDimitry Andric "Missing cleanupDominator before deactivating cleanup blocks"); 1643*0b57cec5SDimitry Andric for (unsigned i = cleanups.size(); i != 0; --i) 1644*0b57cec5SDimitry Andric CGF.DeactivateCleanupBlock(cleanups[i-1], cleanupDominator); 1645*0b57cec5SDimitry Andric 1646*0b57cec5SDimitry Andric // Destroy the placeholder if we made one. 1647*0b57cec5SDimitry Andric if (cleanupDominator) 1648*0b57cec5SDimitry Andric cleanupDominator->eraseFromParent(); 1649*0b57cec5SDimitry Andric } 1650*0b57cec5SDimitry Andric 1651*0b57cec5SDimitry Andric void AggExprEmitter::VisitArrayInitLoopExpr(const ArrayInitLoopExpr *E, 1652*0b57cec5SDimitry Andric llvm::Value *outerBegin) { 1653*0b57cec5SDimitry Andric // Emit the common subexpression. 1654*0b57cec5SDimitry Andric CodeGenFunction::OpaqueValueMapping binding(CGF, E->getCommonExpr()); 1655*0b57cec5SDimitry Andric 1656*0b57cec5SDimitry Andric Address destPtr = EnsureSlot(E->getType()).getAddress(); 1657*0b57cec5SDimitry Andric uint64_t numElements = E->getArraySize().getZExtValue(); 1658*0b57cec5SDimitry Andric 1659*0b57cec5SDimitry Andric if (!numElements) 1660*0b57cec5SDimitry Andric return; 1661*0b57cec5SDimitry Andric 1662*0b57cec5SDimitry Andric // destPtr is an array*. Construct an elementType* by drilling down a level. 1663*0b57cec5SDimitry Andric llvm::Value *zero = llvm::ConstantInt::get(CGF.SizeTy, 0); 1664*0b57cec5SDimitry Andric llvm::Value *indices[] = {zero, zero}; 1665*0b57cec5SDimitry Andric llvm::Value *begin = Builder.CreateInBoundsGEP(destPtr.getPointer(), indices, 1666*0b57cec5SDimitry Andric "arrayinit.begin"); 1667*0b57cec5SDimitry Andric 1668*0b57cec5SDimitry Andric // Prepare to special-case multidimensional array initialization: we avoid 1669*0b57cec5SDimitry Andric // emitting multiple destructor loops in that case. 1670*0b57cec5SDimitry Andric if (!outerBegin) 1671*0b57cec5SDimitry Andric outerBegin = begin; 1672*0b57cec5SDimitry Andric ArrayInitLoopExpr *InnerLoop = dyn_cast<ArrayInitLoopExpr>(E->getSubExpr()); 1673*0b57cec5SDimitry Andric 1674*0b57cec5SDimitry Andric QualType elementType = 1675*0b57cec5SDimitry Andric CGF.getContext().getAsArrayType(E->getType())->getElementType(); 1676*0b57cec5SDimitry Andric CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType); 1677*0b57cec5SDimitry Andric CharUnits elementAlign = 1678*0b57cec5SDimitry Andric destPtr.getAlignment().alignmentOfArrayElement(elementSize); 1679*0b57cec5SDimitry Andric 1680*0b57cec5SDimitry Andric llvm::BasicBlock *entryBB = Builder.GetInsertBlock(); 1681*0b57cec5SDimitry Andric llvm::BasicBlock *bodyBB = CGF.createBasicBlock("arrayinit.body"); 1682*0b57cec5SDimitry Andric 1683*0b57cec5SDimitry Andric // Jump into the body. 1684*0b57cec5SDimitry Andric CGF.EmitBlock(bodyBB); 1685*0b57cec5SDimitry Andric llvm::PHINode *index = 1686*0b57cec5SDimitry Andric Builder.CreatePHI(zero->getType(), 2, "arrayinit.index"); 1687*0b57cec5SDimitry Andric index->addIncoming(zero, entryBB); 1688*0b57cec5SDimitry Andric llvm::Value *element = Builder.CreateInBoundsGEP(begin, index); 1689*0b57cec5SDimitry Andric 1690*0b57cec5SDimitry Andric // Prepare for a cleanup. 1691*0b57cec5SDimitry Andric QualType::DestructionKind dtorKind = elementType.isDestructedType(); 1692*0b57cec5SDimitry Andric EHScopeStack::stable_iterator cleanup; 1693*0b57cec5SDimitry Andric if (CGF.needsEHCleanup(dtorKind) && !InnerLoop) { 1694*0b57cec5SDimitry Andric if (outerBegin->getType() != element->getType()) 1695*0b57cec5SDimitry Andric outerBegin = Builder.CreateBitCast(outerBegin, element->getType()); 1696*0b57cec5SDimitry Andric CGF.pushRegularPartialArrayCleanup(outerBegin, element, elementType, 1697*0b57cec5SDimitry Andric elementAlign, 1698*0b57cec5SDimitry Andric CGF.getDestroyer(dtorKind)); 1699*0b57cec5SDimitry Andric cleanup = CGF.EHStack.stable_begin(); 1700*0b57cec5SDimitry Andric } else { 1701*0b57cec5SDimitry Andric dtorKind = QualType::DK_none; 1702*0b57cec5SDimitry Andric } 1703*0b57cec5SDimitry Andric 1704*0b57cec5SDimitry Andric // Emit the actual filler expression. 1705*0b57cec5SDimitry Andric { 1706*0b57cec5SDimitry Andric // Temporaries created in an array initialization loop are destroyed 1707*0b57cec5SDimitry Andric // at the end of each iteration. 1708*0b57cec5SDimitry Andric CodeGenFunction::RunCleanupsScope CleanupsScope(CGF); 1709*0b57cec5SDimitry Andric CodeGenFunction::ArrayInitLoopExprScope Scope(CGF, index); 1710*0b57cec5SDimitry Andric LValue elementLV = 1711*0b57cec5SDimitry Andric CGF.MakeAddrLValue(Address(element, elementAlign), elementType); 1712*0b57cec5SDimitry Andric 1713*0b57cec5SDimitry Andric if (InnerLoop) { 1714*0b57cec5SDimitry Andric // If the subexpression is an ArrayInitLoopExpr, share its cleanup. 1715*0b57cec5SDimitry Andric auto elementSlot = AggValueSlot::forLValue( 1716480093f4SDimitry Andric elementLV, CGF, AggValueSlot::IsDestructed, 1717480093f4SDimitry Andric AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased, 1718*0b57cec5SDimitry Andric AggValueSlot::DoesNotOverlap); 1719*0b57cec5SDimitry Andric AggExprEmitter(CGF, elementSlot, false) 1720*0b57cec5SDimitry Andric .VisitArrayInitLoopExpr(InnerLoop, outerBegin); 1721*0b57cec5SDimitry Andric } else 1722*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getSubExpr(), elementLV); 1723*0b57cec5SDimitry Andric } 1724*0b57cec5SDimitry Andric 1725*0b57cec5SDimitry Andric // Move on to the next element. 1726*0b57cec5SDimitry Andric llvm::Value *nextIndex = Builder.CreateNUWAdd( 1727*0b57cec5SDimitry Andric index, llvm::ConstantInt::get(CGF.SizeTy, 1), "arrayinit.next"); 1728*0b57cec5SDimitry Andric index->addIncoming(nextIndex, Builder.GetInsertBlock()); 1729*0b57cec5SDimitry Andric 1730*0b57cec5SDimitry Andric // Leave the loop if we're done. 1731*0b57cec5SDimitry Andric llvm::Value *done = Builder.CreateICmpEQ( 1732*0b57cec5SDimitry Andric nextIndex, llvm::ConstantInt::get(CGF.SizeTy, numElements), 1733*0b57cec5SDimitry Andric "arrayinit.done"); 1734*0b57cec5SDimitry Andric llvm::BasicBlock *endBB = CGF.createBasicBlock("arrayinit.end"); 1735*0b57cec5SDimitry Andric Builder.CreateCondBr(done, endBB, bodyBB); 1736*0b57cec5SDimitry Andric 1737*0b57cec5SDimitry Andric CGF.EmitBlock(endBB); 1738*0b57cec5SDimitry Andric 1739*0b57cec5SDimitry Andric // Leave the partial-array cleanup if we entered one. 1740*0b57cec5SDimitry Andric if (dtorKind) 1741*0b57cec5SDimitry Andric CGF.DeactivateCleanupBlock(cleanup, index); 1742*0b57cec5SDimitry Andric } 1743*0b57cec5SDimitry Andric 1744*0b57cec5SDimitry Andric void AggExprEmitter::VisitDesignatedInitUpdateExpr(DesignatedInitUpdateExpr *E) { 1745*0b57cec5SDimitry Andric AggValueSlot Dest = EnsureSlot(E->getType()); 1746*0b57cec5SDimitry Andric 1747*0b57cec5SDimitry Andric LValue DestLV = CGF.MakeAddrLValue(Dest.getAddress(), E->getType()); 1748*0b57cec5SDimitry Andric EmitInitializationToLValue(E->getBase(), DestLV); 1749*0b57cec5SDimitry Andric VisitInitListExpr(E->getUpdater()); 1750*0b57cec5SDimitry Andric } 1751*0b57cec5SDimitry Andric 1752*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 1753*0b57cec5SDimitry Andric // Entry Points into this File 1754*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 1755*0b57cec5SDimitry Andric 1756*0b57cec5SDimitry Andric /// GetNumNonZeroBytesInInit - Get an approximate count of the number of 1757*0b57cec5SDimitry Andric /// non-zero bytes that will be stored when outputting the initializer for the 1758*0b57cec5SDimitry Andric /// specified initializer expression. 1759*0b57cec5SDimitry Andric static CharUnits GetNumNonZeroBytesInInit(const Expr *E, CodeGenFunction &CGF) { 1760*0b57cec5SDimitry Andric E = E->IgnoreParens(); 1761*0b57cec5SDimitry Andric 1762*0b57cec5SDimitry Andric // 0 and 0.0 won't require any non-zero stores! 1763*0b57cec5SDimitry Andric if (isSimpleZero(E, CGF)) return CharUnits::Zero(); 1764*0b57cec5SDimitry Andric 1765*0b57cec5SDimitry Andric // If this is an initlist expr, sum up the size of sizes of the (present) 1766*0b57cec5SDimitry Andric // elements. If this is something weird, assume the whole thing is non-zero. 1767*0b57cec5SDimitry Andric const InitListExpr *ILE = dyn_cast<InitListExpr>(E); 1768*0b57cec5SDimitry Andric while (ILE && ILE->isTransparent()) 1769*0b57cec5SDimitry Andric ILE = dyn_cast<InitListExpr>(ILE->getInit(0)); 1770*0b57cec5SDimitry Andric if (!ILE || !CGF.getTypes().isZeroInitializable(ILE->getType())) 1771*0b57cec5SDimitry Andric return CGF.getContext().getTypeSizeInChars(E->getType()); 1772*0b57cec5SDimitry Andric 1773*0b57cec5SDimitry Andric // InitListExprs for structs have to be handled carefully. If there are 1774*0b57cec5SDimitry Andric // reference members, we need to consider the size of the reference, not the 1775*0b57cec5SDimitry Andric // referencee. InitListExprs for unions and arrays can't have references. 1776*0b57cec5SDimitry Andric if (const RecordType *RT = E->getType()->getAs<RecordType>()) { 1777*0b57cec5SDimitry Andric if (!RT->isUnionType()) { 1778a7dea167SDimitry Andric RecordDecl *SD = RT->getDecl(); 1779*0b57cec5SDimitry Andric CharUnits NumNonZeroBytes = CharUnits::Zero(); 1780*0b57cec5SDimitry Andric 1781*0b57cec5SDimitry Andric unsigned ILEElement = 0; 1782*0b57cec5SDimitry Andric if (auto *CXXRD = dyn_cast<CXXRecordDecl>(SD)) 1783*0b57cec5SDimitry Andric while (ILEElement != CXXRD->getNumBases()) 1784*0b57cec5SDimitry Andric NumNonZeroBytes += 1785*0b57cec5SDimitry Andric GetNumNonZeroBytesInInit(ILE->getInit(ILEElement++), CGF); 1786*0b57cec5SDimitry Andric for (const auto *Field : SD->fields()) { 1787*0b57cec5SDimitry Andric // We're done once we hit the flexible array member or run out of 1788*0b57cec5SDimitry Andric // InitListExpr elements. 1789*0b57cec5SDimitry Andric if (Field->getType()->isIncompleteArrayType() || 1790*0b57cec5SDimitry Andric ILEElement == ILE->getNumInits()) 1791*0b57cec5SDimitry Andric break; 1792*0b57cec5SDimitry Andric if (Field->isUnnamedBitfield()) 1793*0b57cec5SDimitry Andric continue; 1794*0b57cec5SDimitry Andric 1795*0b57cec5SDimitry Andric const Expr *E = ILE->getInit(ILEElement++); 1796*0b57cec5SDimitry Andric 1797*0b57cec5SDimitry Andric // Reference values are always non-null and have the width of a pointer. 1798*0b57cec5SDimitry Andric if (Field->getType()->isReferenceType()) 1799*0b57cec5SDimitry Andric NumNonZeroBytes += CGF.getContext().toCharUnitsFromBits( 1800*0b57cec5SDimitry Andric CGF.getTarget().getPointerWidth(0)); 1801*0b57cec5SDimitry Andric else 1802*0b57cec5SDimitry Andric NumNonZeroBytes += GetNumNonZeroBytesInInit(E, CGF); 1803*0b57cec5SDimitry Andric } 1804*0b57cec5SDimitry Andric 1805*0b57cec5SDimitry Andric return NumNonZeroBytes; 1806*0b57cec5SDimitry Andric } 1807*0b57cec5SDimitry Andric } 1808*0b57cec5SDimitry Andric 1809*0b57cec5SDimitry Andric 1810*0b57cec5SDimitry Andric CharUnits NumNonZeroBytes = CharUnits::Zero(); 1811*0b57cec5SDimitry Andric for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) 1812*0b57cec5SDimitry Andric NumNonZeroBytes += GetNumNonZeroBytesInInit(ILE->getInit(i), CGF); 1813*0b57cec5SDimitry Andric return NumNonZeroBytes; 1814*0b57cec5SDimitry Andric } 1815*0b57cec5SDimitry Andric 1816*0b57cec5SDimitry Andric /// CheckAggExprForMemSetUse - If the initializer is large and has a lot of 1817*0b57cec5SDimitry Andric /// zeros in it, emit a memset and avoid storing the individual zeros. 1818*0b57cec5SDimitry Andric /// 1819*0b57cec5SDimitry Andric static void CheckAggExprForMemSetUse(AggValueSlot &Slot, const Expr *E, 1820*0b57cec5SDimitry Andric CodeGenFunction &CGF) { 1821*0b57cec5SDimitry Andric // If the slot is already known to be zeroed, nothing to do. Don't mess with 1822*0b57cec5SDimitry Andric // volatile stores. 1823*0b57cec5SDimitry Andric if (Slot.isZeroed() || Slot.isVolatile() || !Slot.getAddress().isValid()) 1824*0b57cec5SDimitry Andric return; 1825*0b57cec5SDimitry Andric 1826*0b57cec5SDimitry Andric // C++ objects with a user-declared constructor don't need zero'ing. 1827*0b57cec5SDimitry Andric if (CGF.getLangOpts().CPlusPlus) 1828*0b57cec5SDimitry Andric if (const RecordType *RT = CGF.getContext() 1829*0b57cec5SDimitry Andric .getBaseElementType(E->getType())->getAs<RecordType>()) { 1830*0b57cec5SDimitry Andric const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); 1831*0b57cec5SDimitry Andric if (RD->hasUserDeclaredConstructor()) 1832*0b57cec5SDimitry Andric return; 1833*0b57cec5SDimitry Andric } 1834*0b57cec5SDimitry Andric 1835*0b57cec5SDimitry Andric // If the type is 16-bytes or smaller, prefer individual stores over memset. 1836*0b57cec5SDimitry Andric CharUnits Size = Slot.getPreferredSize(CGF.getContext(), E->getType()); 1837*0b57cec5SDimitry Andric if (Size <= CharUnits::fromQuantity(16)) 1838*0b57cec5SDimitry Andric return; 1839*0b57cec5SDimitry Andric 1840*0b57cec5SDimitry Andric // Check to see if over 3/4 of the initializer are known to be zero. If so, 1841*0b57cec5SDimitry Andric // we prefer to emit memset + individual stores for the rest. 1842*0b57cec5SDimitry Andric CharUnits NumNonZeroBytes = GetNumNonZeroBytesInInit(E, CGF); 1843*0b57cec5SDimitry Andric if (NumNonZeroBytes*4 > Size) 1844*0b57cec5SDimitry Andric return; 1845*0b57cec5SDimitry Andric 1846*0b57cec5SDimitry Andric // Okay, it seems like a good idea to use an initial memset, emit the call. 1847*0b57cec5SDimitry Andric llvm::Constant *SizeVal = CGF.Builder.getInt64(Size.getQuantity()); 1848*0b57cec5SDimitry Andric 1849*0b57cec5SDimitry Andric Address Loc = Slot.getAddress(); 1850*0b57cec5SDimitry Andric Loc = CGF.Builder.CreateElementBitCast(Loc, CGF.Int8Ty); 1851*0b57cec5SDimitry Andric CGF.Builder.CreateMemSet(Loc, CGF.Builder.getInt8(0), SizeVal, false); 1852*0b57cec5SDimitry Andric 1853*0b57cec5SDimitry Andric // Tell the AggExprEmitter that the slot is known zero. 1854*0b57cec5SDimitry Andric Slot.setZeroed(); 1855*0b57cec5SDimitry Andric } 1856*0b57cec5SDimitry Andric 1857*0b57cec5SDimitry Andric 1858*0b57cec5SDimitry Andric 1859*0b57cec5SDimitry Andric 1860*0b57cec5SDimitry Andric /// EmitAggExpr - Emit the computation of the specified expression of aggregate 1861*0b57cec5SDimitry Andric /// type. The result is computed into DestPtr. Note that if DestPtr is null, 1862*0b57cec5SDimitry Andric /// the value of the aggregate expression is not needed. If VolatileDest is 1863*0b57cec5SDimitry Andric /// true, DestPtr cannot be 0. 1864*0b57cec5SDimitry Andric void CodeGenFunction::EmitAggExpr(const Expr *E, AggValueSlot Slot) { 1865*0b57cec5SDimitry Andric assert(E && hasAggregateEvaluationKind(E->getType()) && 1866*0b57cec5SDimitry Andric "Invalid aggregate expression to emit"); 1867*0b57cec5SDimitry Andric assert((Slot.getAddress().isValid() || Slot.isIgnored()) && 1868*0b57cec5SDimitry Andric "slot has bits but no address"); 1869*0b57cec5SDimitry Andric 1870*0b57cec5SDimitry Andric // Optimize the slot if possible. 1871*0b57cec5SDimitry Andric CheckAggExprForMemSetUse(Slot, E, *this); 1872*0b57cec5SDimitry Andric 1873*0b57cec5SDimitry Andric AggExprEmitter(*this, Slot, Slot.isIgnored()).Visit(const_cast<Expr*>(E)); 1874*0b57cec5SDimitry Andric } 1875*0b57cec5SDimitry Andric 1876*0b57cec5SDimitry Andric LValue CodeGenFunction::EmitAggExprToLValue(const Expr *E) { 1877*0b57cec5SDimitry Andric assert(hasAggregateEvaluationKind(E->getType()) && "Invalid argument!"); 1878*0b57cec5SDimitry Andric Address Temp = CreateMemTemp(E->getType()); 1879*0b57cec5SDimitry Andric LValue LV = MakeAddrLValue(Temp, E->getType()); 1880480093f4SDimitry Andric EmitAggExpr(E, AggValueSlot::forLValue( 1881480093f4SDimitry Andric LV, *this, AggValueSlot::IsNotDestructed, 1882*0b57cec5SDimitry Andric AggValueSlot::DoesNotNeedGCBarriers, 1883480093f4SDimitry Andric AggValueSlot::IsNotAliased, AggValueSlot::DoesNotOverlap)); 1884*0b57cec5SDimitry Andric return LV; 1885*0b57cec5SDimitry Andric } 1886*0b57cec5SDimitry Andric 1887*0b57cec5SDimitry Andric AggValueSlot::Overlap_t 1888*0b57cec5SDimitry Andric CodeGenFunction::getOverlapForFieldInit(const FieldDecl *FD) { 1889*0b57cec5SDimitry Andric if (!FD->hasAttr<NoUniqueAddressAttr>() || !FD->getType()->isRecordType()) 1890*0b57cec5SDimitry Andric return AggValueSlot::DoesNotOverlap; 1891*0b57cec5SDimitry Andric 1892*0b57cec5SDimitry Andric // If the field lies entirely within the enclosing class's nvsize, its tail 1893*0b57cec5SDimitry Andric // padding cannot overlap any already-initialized object. (The only subobjects 1894*0b57cec5SDimitry Andric // with greater addresses that might already be initialized are vbases.) 1895*0b57cec5SDimitry Andric const RecordDecl *ClassRD = FD->getParent(); 1896*0b57cec5SDimitry Andric const ASTRecordLayout &Layout = getContext().getASTRecordLayout(ClassRD); 1897*0b57cec5SDimitry Andric if (Layout.getFieldOffset(FD->getFieldIndex()) + 1898*0b57cec5SDimitry Andric getContext().getTypeSize(FD->getType()) <= 1899*0b57cec5SDimitry Andric (uint64_t)getContext().toBits(Layout.getNonVirtualSize())) 1900*0b57cec5SDimitry Andric return AggValueSlot::DoesNotOverlap; 1901*0b57cec5SDimitry Andric 1902*0b57cec5SDimitry Andric // The tail padding may contain values we need to preserve. 1903*0b57cec5SDimitry Andric return AggValueSlot::MayOverlap; 1904*0b57cec5SDimitry Andric } 1905*0b57cec5SDimitry Andric 1906*0b57cec5SDimitry Andric AggValueSlot::Overlap_t CodeGenFunction::getOverlapForBaseInit( 1907*0b57cec5SDimitry Andric const CXXRecordDecl *RD, const CXXRecordDecl *BaseRD, bool IsVirtual) { 1908*0b57cec5SDimitry Andric // If the most-derived object is a field declared with [[no_unique_address]], 1909*0b57cec5SDimitry Andric // the tail padding of any virtual base could be reused for other subobjects 1910*0b57cec5SDimitry Andric // of that field's class. 1911*0b57cec5SDimitry Andric if (IsVirtual) 1912*0b57cec5SDimitry Andric return AggValueSlot::MayOverlap; 1913*0b57cec5SDimitry Andric 1914*0b57cec5SDimitry Andric // If the base class is laid out entirely within the nvsize of the derived 1915*0b57cec5SDimitry Andric // class, its tail padding cannot yet be initialized, so we can issue 1916*0b57cec5SDimitry Andric // stores at the full width of the base class. 1917*0b57cec5SDimitry Andric const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1918*0b57cec5SDimitry Andric if (Layout.getBaseClassOffset(BaseRD) + 1919*0b57cec5SDimitry Andric getContext().getASTRecordLayout(BaseRD).getSize() <= 1920*0b57cec5SDimitry Andric Layout.getNonVirtualSize()) 1921*0b57cec5SDimitry Andric return AggValueSlot::DoesNotOverlap; 1922*0b57cec5SDimitry Andric 1923*0b57cec5SDimitry Andric // The tail padding may contain values we need to preserve. 1924*0b57cec5SDimitry Andric return AggValueSlot::MayOverlap; 1925*0b57cec5SDimitry Andric } 1926*0b57cec5SDimitry Andric 1927*0b57cec5SDimitry Andric void CodeGenFunction::EmitAggregateCopy(LValue Dest, LValue Src, QualType Ty, 1928*0b57cec5SDimitry Andric AggValueSlot::Overlap_t MayOverlap, 1929*0b57cec5SDimitry Andric bool isVolatile) { 1930*0b57cec5SDimitry Andric assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex"); 1931*0b57cec5SDimitry Andric 1932480093f4SDimitry Andric Address DestPtr = Dest.getAddress(*this); 1933480093f4SDimitry Andric Address SrcPtr = Src.getAddress(*this); 1934*0b57cec5SDimitry Andric 1935*0b57cec5SDimitry Andric if (getLangOpts().CPlusPlus) { 1936*0b57cec5SDimitry Andric if (const RecordType *RT = Ty->getAs<RecordType>()) { 1937*0b57cec5SDimitry Andric CXXRecordDecl *Record = cast<CXXRecordDecl>(RT->getDecl()); 1938*0b57cec5SDimitry Andric assert((Record->hasTrivialCopyConstructor() || 1939*0b57cec5SDimitry Andric Record->hasTrivialCopyAssignment() || 1940*0b57cec5SDimitry Andric Record->hasTrivialMoveConstructor() || 1941*0b57cec5SDimitry Andric Record->hasTrivialMoveAssignment() || 1942*0b57cec5SDimitry Andric Record->isUnion()) && 1943*0b57cec5SDimitry Andric "Trying to aggregate-copy a type without a trivial copy/move " 1944*0b57cec5SDimitry Andric "constructor or assignment operator"); 1945*0b57cec5SDimitry Andric // Ignore empty classes in C++. 1946*0b57cec5SDimitry Andric if (Record->isEmpty()) 1947*0b57cec5SDimitry Andric return; 1948*0b57cec5SDimitry Andric } 1949*0b57cec5SDimitry Andric } 1950*0b57cec5SDimitry Andric 19515ffd83dbSDimitry Andric if (getLangOpts().CUDAIsDevice) { 19525ffd83dbSDimitry Andric if (Ty->isCUDADeviceBuiltinSurfaceType()) { 19535ffd83dbSDimitry Andric if (getTargetHooks().emitCUDADeviceBuiltinSurfaceDeviceCopy(*this, Dest, 19545ffd83dbSDimitry Andric Src)) 19555ffd83dbSDimitry Andric return; 19565ffd83dbSDimitry Andric } else if (Ty->isCUDADeviceBuiltinTextureType()) { 19575ffd83dbSDimitry Andric if (getTargetHooks().emitCUDADeviceBuiltinTextureDeviceCopy(*this, Dest, 19585ffd83dbSDimitry Andric Src)) 19595ffd83dbSDimitry Andric return; 19605ffd83dbSDimitry Andric } 19615ffd83dbSDimitry Andric } 19625ffd83dbSDimitry Andric 1963*0b57cec5SDimitry Andric // Aggregate assignment turns into llvm.memcpy. This is almost valid per 1964*0b57cec5SDimitry Andric // C99 6.5.16.1p3, which states "If the value being stored in an object is 1965*0b57cec5SDimitry Andric // read from another object that overlaps in anyway the storage of the first 1966*0b57cec5SDimitry Andric // object, then the overlap shall be exact and the two objects shall have 1967*0b57cec5SDimitry Andric // qualified or unqualified versions of a compatible type." 1968*0b57cec5SDimitry Andric // 1969*0b57cec5SDimitry Andric // memcpy is not defined if the source and destination pointers are exactly 1970*0b57cec5SDimitry Andric // equal, but other compilers do this optimization, and almost every memcpy 1971*0b57cec5SDimitry Andric // implementation handles this case safely. If there is a libc that does not 1972*0b57cec5SDimitry Andric // safely handle this, we can add a target hook. 1973*0b57cec5SDimitry Andric 1974*0b57cec5SDimitry Andric // Get data size info for this aggregate. Don't copy the tail padding if this 1975*0b57cec5SDimitry Andric // might be a potentially-overlapping subobject, since the tail padding might 1976*0b57cec5SDimitry Andric // be occupied by a different object. Otherwise, copying it is fine. 1977*0b57cec5SDimitry Andric std::pair<CharUnits, CharUnits> TypeInfo; 1978*0b57cec5SDimitry Andric if (MayOverlap) 1979*0b57cec5SDimitry Andric TypeInfo = getContext().getTypeInfoDataSizeInChars(Ty); 1980*0b57cec5SDimitry Andric else 1981*0b57cec5SDimitry Andric TypeInfo = getContext().getTypeInfoInChars(Ty); 1982*0b57cec5SDimitry Andric 1983*0b57cec5SDimitry Andric llvm::Value *SizeVal = nullptr; 1984*0b57cec5SDimitry Andric if (TypeInfo.first.isZero()) { 1985*0b57cec5SDimitry Andric // But note that getTypeInfo returns 0 for a VLA. 1986*0b57cec5SDimitry Andric if (auto *VAT = dyn_cast_or_null<VariableArrayType>( 1987*0b57cec5SDimitry Andric getContext().getAsArrayType(Ty))) { 1988*0b57cec5SDimitry Andric QualType BaseEltTy; 1989*0b57cec5SDimitry Andric SizeVal = emitArrayLength(VAT, BaseEltTy, DestPtr); 1990*0b57cec5SDimitry Andric TypeInfo = getContext().getTypeInfoInChars(BaseEltTy); 1991*0b57cec5SDimitry Andric assert(!TypeInfo.first.isZero()); 1992*0b57cec5SDimitry Andric SizeVal = Builder.CreateNUWMul( 1993*0b57cec5SDimitry Andric SizeVal, 1994*0b57cec5SDimitry Andric llvm::ConstantInt::get(SizeTy, TypeInfo.first.getQuantity())); 1995*0b57cec5SDimitry Andric } 1996*0b57cec5SDimitry Andric } 1997*0b57cec5SDimitry Andric if (!SizeVal) { 1998*0b57cec5SDimitry Andric SizeVal = llvm::ConstantInt::get(SizeTy, TypeInfo.first.getQuantity()); 1999*0b57cec5SDimitry Andric } 2000*0b57cec5SDimitry Andric 2001*0b57cec5SDimitry Andric // FIXME: If we have a volatile struct, the optimizer can remove what might 2002*0b57cec5SDimitry Andric // appear to be `extra' memory ops: 2003*0b57cec5SDimitry Andric // 2004*0b57cec5SDimitry Andric // volatile struct { int i; } a, b; 2005*0b57cec5SDimitry Andric // 2006*0b57cec5SDimitry Andric // int main() { 2007*0b57cec5SDimitry Andric // a = b; 2008*0b57cec5SDimitry Andric // a = b; 2009*0b57cec5SDimitry Andric // } 2010*0b57cec5SDimitry Andric // 2011*0b57cec5SDimitry Andric // we need to use a different call here. We use isVolatile to indicate when 2012*0b57cec5SDimitry Andric // either the source or the destination is volatile. 2013*0b57cec5SDimitry Andric 2014*0b57cec5SDimitry Andric DestPtr = Builder.CreateElementBitCast(DestPtr, Int8Ty); 2015*0b57cec5SDimitry Andric SrcPtr = Builder.CreateElementBitCast(SrcPtr, Int8Ty); 2016*0b57cec5SDimitry Andric 2017*0b57cec5SDimitry Andric // Don't do any of the memmove_collectable tests if GC isn't set. 2018*0b57cec5SDimitry Andric if (CGM.getLangOpts().getGC() == LangOptions::NonGC) { 2019*0b57cec5SDimitry Andric // fall through 2020*0b57cec5SDimitry Andric } else if (const RecordType *RecordTy = Ty->getAs<RecordType>()) { 2021*0b57cec5SDimitry Andric RecordDecl *Record = RecordTy->getDecl(); 2022*0b57cec5SDimitry Andric if (Record->hasObjectMember()) { 2023*0b57cec5SDimitry Andric CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr, 2024*0b57cec5SDimitry Andric SizeVal); 2025*0b57cec5SDimitry Andric return; 2026*0b57cec5SDimitry Andric } 2027*0b57cec5SDimitry Andric } else if (Ty->isArrayType()) { 2028*0b57cec5SDimitry Andric QualType BaseType = getContext().getBaseElementType(Ty); 2029*0b57cec5SDimitry Andric if (const RecordType *RecordTy = BaseType->getAs<RecordType>()) { 2030*0b57cec5SDimitry Andric if (RecordTy->getDecl()->hasObjectMember()) { 2031*0b57cec5SDimitry Andric CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr, 2032*0b57cec5SDimitry Andric SizeVal); 2033*0b57cec5SDimitry Andric return; 2034*0b57cec5SDimitry Andric } 2035*0b57cec5SDimitry Andric } 2036*0b57cec5SDimitry Andric } 2037*0b57cec5SDimitry Andric 2038*0b57cec5SDimitry Andric auto Inst = Builder.CreateMemCpy(DestPtr, SrcPtr, SizeVal, isVolatile); 2039*0b57cec5SDimitry Andric 2040*0b57cec5SDimitry Andric // Determine the metadata to describe the position of any padding in this 2041*0b57cec5SDimitry Andric // memcpy, as well as the TBAA tags for the members of the struct, in case 2042*0b57cec5SDimitry Andric // the optimizer wishes to expand it in to scalar memory operations. 2043*0b57cec5SDimitry Andric if (llvm::MDNode *TBAAStructTag = CGM.getTBAAStructInfo(Ty)) 2044*0b57cec5SDimitry Andric Inst->setMetadata(llvm::LLVMContext::MD_tbaa_struct, TBAAStructTag); 2045*0b57cec5SDimitry Andric 2046*0b57cec5SDimitry Andric if (CGM.getCodeGenOpts().NewStructPathTBAA) { 2047*0b57cec5SDimitry Andric TBAAAccessInfo TBAAInfo = CGM.mergeTBAAInfoForMemoryTransfer( 2048*0b57cec5SDimitry Andric Dest.getTBAAInfo(), Src.getTBAAInfo()); 2049*0b57cec5SDimitry Andric CGM.DecorateInstructionWithTBAA(Inst, TBAAInfo); 2050*0b57cec5SDimitry Andric } 2051*0b57cec5SDimitry Andric } 2052