1 //===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code to emit Aggregate Expr nodes as LLVM code.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "CodeGenFunction.h"
15 #include "CodeGenModule.h"
16 #include "CGObjCRuntime.h"
17 #include "clang/AST/ASTContext.h"
18 #include "clang/AST/DeclCXX.h"
19 #include "clang/AST/StmtVisitor.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/GlobalVariable.h"
23 #include "llvm/Intrinsics.h"
24 using namespace clang;
25 using namespace CodeGen;
26 
27 //===----------------------------------------------------------------------===//
28 //                        Aggregate Expression Emitter
29 //===----------------------------------------------------------------------===//
30 
31 namespace  {
32 class AggExprEmitter : public StmtVisitor<AggExprEmitter> {
33   CodeGenFunction &CGF;
34   CGBuilderTy &Builder;
35   AggValueSlot Dest;
36   bool IgnoreResult;
37   bool RequiresGCollection;
38 
39   ReturnValueSlot getReturnValueSlot() const {
40     // If the destination slot requires garbage collection, we can't
41     // use the real return value slot, because we have to use the GC
42     // API.
43     if (RequiresGCollection) return ReturnValueSlot();
44 
45     return ReturnValueSlot(Dest.getAddr(), Dest.isVolatile());
46   }
47 
48   AggValueSlot EnsureSlot(QualType T) {
49     if (!Dest.isIgnored()) return Dest;
50     return CGF.CreateAggTemp(T, "agg.tmp.ensured");
51   }
52 
53 public:
54   AggExprEmitter(CodeGenFunction &cgf, AggValueSlot Dest,
55                  bool ignore, bool requiresGCollection)
56     : CGF(cgf), Builder(CGF.Builder), Dest(Dest),
57       IgnoreResult(ignore), RequiresGCollection(requiresGCollection) {
58   }
59 
60   //===--------------------------------------------------------------------===//
61   //                               Utilities
62   //===--------------------------------------------------------------------===//
63 
64   /// EmitAggLoadOfLValue - Given an expression with aggregate type that
65   /// represents a value lvalue, this method emits the address of the lvalue,
66   /// then loads the result into DestPtr.
67   void EmitAggLoadOfLValue(const Expr *E);
68 
69   /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
70   void EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore = false);
71   void EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore = false);
72 
73   void EmitGCMove(const Expr *E, RValue Src);
74 
75   bool TypeRequiresGCollection(QualType T);
76 
77   //===--------------------------------------------------------------------===//
78   //                            Visitor Methods
79   //===--------------------------------------------------------------------===//
80 
81   void VisitStmt(Stmt *S) {
82     CGF.ErrorUnsupported(S, "aggregate expression");
83   }
84   void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); }
85   void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); }
86 
87   // l-values.
88   void VisitDeclRefExpr(DeclRefExpr *DRE) { EmitAggLoadOfLValue(DRE); }
89   void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); }
90   void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); }
91   void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); }
92   void VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
93     EmitAggLoadOfLValue(E);
94   }
95   void VisitArraySubscriptExpr(ArraySubscriptExpr *E) {
96     EmitAggLoadOfLValue(E);
97   }
98   void VisitBlockDeclRefExpr(const BlockDeclRefExpr *E) {
99     EmitAggLoadOfLValue(E);
100   }
101   void VisitPredefinedExpr(const PredefinedExpr *E) {
102     EmitAggLoadOfLValue(E);
103   }
104 
105   // Operators.
106   void VisitCastExpr(CastExpr *E);
107   void VisitCallExpr(const CallExpr *E);
108   void VisitStmtExpr(const StmtExpr *E);
109   void VisitBinaryOperator(const BinaryOperator *BO);
110   void VisitPointerToDataMemberBinaryOperator(const BinaryOperator *BO);
111   void VisitBinAssign(const BinaryOperator *E);
112   void VisitBinComma(const BinaryOperator *E);
113 
114   void VisitObjCMessageExpr(ObjCMessageExpr *E);
115   void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) {
116     EmitAggLoadOfLValue(E);
117   }
118   void VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E);
119   void VisitObjCImplicitSetterGetterRefExpr(ObjCImplicitSetterGetterRefExpr *E);
120 
121   void VisitConditionalOperator(const ConditionalOperator *CO);
122   void VisitChooseExpr(const ChooseExpr *CE);
123   void VisitInitListExpr(InitListExpr *E);
124   void VisitImplicitValueInitExpr(ImplicitValueInitExpr *E);
125   void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
126     Visit(DAE->getExpr());
127   }
128   void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E);
129   void VisitCXXConstructExpr(const CXXConstructExpr *E);
130   void VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E);
131   void VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E);
132   void VisitCXXTypeidExpr(CXXTypeidExpr *E) { EmitAggLoadOfLValue(E); }
133 
134   void VisitVAArgExpr(VAArgExpr *E);
135 
136   void EmitInitializationToLValue(Expr *E, LValue Address, QualType T);
137   void EmitNullInitializationToLValue(LValue Address, QualType T);
138   //  case Expr::ChooseExprClass:
139   void VisitCXXThrowExpr(const CXXThrowExpr *E) { CGF.EmitCXXThrowExpr(E); }
140 };
141 }  // end anonymous namespace.
142 
143 //===----------------------------------------------------------------------===//
144 //                                Utilities
145 //===----------------------------------------------------------------------===//
146 
147 /// EmitAggLoadOfLValue - Given an expression with aggregate type that
148 /// represents a value lvalue, this method emits the address of the lvalue,
149 /// then loads the result into DestPtr.
150 void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) {
151   LValue LV = CGF.EmitLValue(E);
152   EmitFinalDestCopy(E, LV);
153 }
154 
155 /// \brief True if the given aggregate type requires special GC API calls.
156 bool AggExprEmitter::TypeRequiresGCollection(QualType T) {
157   // Only record types have members that might require garbage collection.
158   const RecordType *RecordTy = T->getAs<RecordType>();
159   if (!RecordTy) return false;
160 
161   // Don't mess with non-trivial C++ types.
162   RecordDecl *Record = RecordTy->getDecl();
163   if (isa<CXXRecordDecl>(Record) &&
164       (!cast<CXXRecordDecl>(Record)->hasTrivialCopyConstructor() ||
165        !cast<CXXRecordDecl>(Record)->hasTrivialDestructor()))
166     return false;
167 
168   // Check whether the type has an object member.
169   return Record->hasObjectMember();
170 }
171 
172 /// \brief Perform the final move to DestPtr if RequiresGCollection is set.
173 ///
174 /// The idea is that you do something like this:
175 ///   RValue Result = EmitSomething(..., getReturnValueSlot());
176 ///   EmitGCMove(E, Result);
177 /// If GC doesn't interfere, this will cause the result to be emitted
178 /// directly into the return value slot.  If GC does interfere, a final
179 /// move will be performed.
180 void AggExprEmitter::EmitGCMove(const Expr *E, RValue Src) {
181   if (RequiresGCollection) {
182     std::pair<uint64_t, unsigned> TypeInfo =
183       CGF.getContext().getTypeInfo(E->getType());
184     unsigned long size = TypeInfo.first/8;
185     const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
186     llvm::Value *SizeVal = llvm::ConstantInt::get(SizeTy, size);
187     CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF, Dest.getAddr(),
188                                                     Src.getAggregateAddr(),
189                                                     SizeVal);
190   }
191 }
192 
193 /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
194 void AggExprEmitter::EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore) {
195   assert(Src.isAggregate() && "value must be aggregate value!");
196 
197   // If Dest is ignored, then we're evaluating an aggregate expression
198   // in a context (like an expression statement) that doesn't care
199   // about the result.  C says that an lvalue-to-rvalue conversion is
200   // performed in these cases; C++ says that it is not.  In either
201   // case, we don't actually need to do anything unless the value is
202   // volatile.
203   if (Dest.isIgnored()) {
204     if (!Src.isVolatileQualified() ||
205         CGF.CGM.getLangOptions().CPlusPlus ||
206         (IgnoreResult && Ignore))
207       return;
208 
209     // If the source is volatile, we must read from it; to do that, we need
210     // some place to put it.
211     Dest = CGF.CreateAggTemp(E->getType(), "agg.tmp");
212   }
213 
214   if (RequiresGCollection) {
215     std::pair<uint64_t, unsigned> TypeInfo =
216     CGF.getContext().getTypeInfo(E->getType());
217     unsigned long size = TypeInfo.first/8;
218     const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
219     llvm::Value *SizeVal = llvm::ConstantInt::get(SizeTy, size);
220     CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF,
221                                                       Dest.getAddr(),
222                                                       Src.getAggregateAddr(),
223                                                       SizeVal);
224     return;
225   }
226   // If the result of the assignment is used, copy the LHS there also.
227   // FIXME: Pass VolatileDest as well.  I think we also need to merge volatile
228   // from the source as well, as we can't eliminate it if either operand
229   // is volatile, unless copy has volatile for both source and destination..
230   CGF.EmitAggregateCopy(Dest.getAddr(), Src.getAggregateAddr(), E->getType(),
231                         Dest.isVolatile()|Src.isVolatileQualified());
232 }
233 
234 /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
235 void AggExprEmitter::EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore) {
236   assert(Src.isSimple() && "Can't have aggregate bitfield, vector, etc");
237 
238   EmitFinalDestCopy(E, RValue::getAggregate(Src.getAddress(),
239                                             Src.isVolatileQualified()),
240                     Ignore);
241 }
242 
243 //===----------------------------------------------------------------------===//
244 //                            Visitor Methods
245 //===----------------------------------------------------------------------===//
246 
247 void AggExprEmitter::VisitCastExpr(CastExpr *E) {
248   if (Dest.isIgnored() && E->getCastKind() != CK_Dynamic) {
249     Visit(E->getSubExpr());
250     return;
251   }
252 
253   switch (E->getCastKind()) {
254   default: assert(0 && "Unhandled cast kind!");
255 
256   case CK_Dynamic: {
257     assert(isa<CXXDynamicCastExpr>(E) && "CK_Dynamic without a dynamic_cast?");
258     LValue LV = CGF.EmitCheckedLValue(E->getSubExpr());
259     // FIXME: Do we also need to handle property references here?
260     if (LV.isSimple())
261       CGF.EmitDynamicCast(LV.getAddress(), cast<CXXDynamicCastExpr>(E));
262     else
263       CGF.CGM.ErrorUnsupported(E, "non-simple lvalue dynamic_cast");
264 
265     if (!Dest.isIgnored())
266       CGF.CGM.ErrorUnsupported(E, "lvalue dynamic_cast with a destination");
267     break;
268   }
269 
270   case CK_ToUnion: {
271     // GCC union extension
272     QualType Ty = E->getSubExpr()->getType();
273     QualType PtrTy = CGF.getContext().getPointerType(Ty);
274     llvm::Value *CastPtr = Builder.CreateBitCast(Dest.getAddr(),
275                                                  CGF.ConvertType(PtrTy));
276     EmitInitializationToLValue(E->getSubExpr(), CGF.MakeAddrLValue(CastPtr, Ty),
277                                Ty);
278     break;
279   }
280 
281   case CK_DerivedToBase:
282   case CK_BaseToDerived:
283   case CK_UncheckedDerivedToBase: {
284     assert(0 && "cannot perform hierarchy conversion in EmitAggExpr: "
285                 "should have been unpacked before we got here");
286     break;
287   }
288 
289   // FIXME: Remove the CK_Unknown check here.
290   case CK_Unknown:
291   case CK_NoOp:
292   case CK_UserDefinedConversion:
293   case CK_ConstructorConversion:
294     assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(),
295                                                    E->getType()) &&
296            "Implicit cast types must be compatible");
297     Visit(E->getSubExpr());
298     break;
299 
300   case CK_LValueBitCast:
301     llvm_unreachable("there are no lvalue bit-casts on aggregates");
302     break;
303   }
304 }
305 
306 void AggExprEmitter::VisitCallExpr(const CallExpr *E) {
307   if (E->getCallReturnType()->isReferenceType()) {
308     EmitAggLoadOfLValue(E);
309     return;
310   }
311 
312   RValue RV = CGF.EmitCallExpr(E, getReturnValueSlot());
313   EmitGCMove(E, RV);
314 }
315 
316 void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) {
317   RValue RV = CGF.EmitObjCMessageExpr(E, getReturnValueSlot());
318   EmitGCMove(E, RV);
319 }
320 
321 void AggExprEmitter::VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E) {
322   RValue RV = CGF.EmitObjCPropertyGet(E, getReturnValueSlot());
323   EmitGCMove(E, RV);
324 }
325 
326 void AggExprEmitter::VisitObjCImplicitSetterGetterRefExpr(
327                                    ObjCImplicitSetterGetterRefExpr *E) {
328   RValue RV = CGF.EmitObjCPropertyGet(E, getReturnValueSlot());
329   EmitGCMove(E, RV);
330 }
331 
332 void AggExprEmitter::VisitBinComma(const BinaryOperator *E) {
333   CGF.EmitAnyExpr(E->getLHS(), AggValueSlot::ignored(), true);
334   Visit(E->getRHS());
335 }
336 
337 void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) {
338   CGF.EmitCompoundStmt(*E->getSubStmt(), true, Dest);
339 }
340 
341 void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) {
342   if (E->getOpcode() == BO_PtrMemD || E->getOpcode() == BO_PtrMemI)
343     VisitPointerToDataMemberBinaryOperator(E);
344   else
345     CGF.ErrorUnsupported(E, "aggregate binary expression");
346 }
347 
348 void AggExprEmitter::VisitPointerToDataMemberBinaryOperator(
349                                                     const BinaryOperator *E) {
350   LValue LV = CGF.EmitPointerToDataMemberBinaryExpr(E);
351   EmitFinalDestCopy(E, LV);
352 }
353 
354 void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) {
355   // For an assignment to work, the value on the right has
356   // to be compatible with the value on the left.
357   assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(),
358                                                  E->getRHS()->getType())
359          && "Invalid assignment");
360   LValue LHS = CGF.EmitLValue(E->getLHS());
361 
362   // We have to special case property setters, otherwise we must have
363   // a simple lvalue (no aggregates inside vectors, bitfields).
364   if (LHS.isPropertyRef()) {
365     AggValueSlot Slot = EnsureSlot(E->getRHS()->getType());
366     CGF.EmitAggExpr(E->getRHS(), Slot);
367     CGF.EmitObjCPropertySet(LHS.getPropertyRefExpr(), Slot.asRValue());
368   } else if (LHS.isKVCRef()) {
369     AggValueSlot Slot = EnsureSlot(E->getRHS()->getType());
370     CGF.EmitAggExpr(E->getRHS(), Slot);
371     CGF.EmitObjCPropertySet(LHS.getKVCRefExpr(), Slot.asRValue());
372   } else {
373     bool RequiresGCollection = false;
374     if (CGF.getContext().getLangOptions().getGCMode())
375       RequiresGCollection = TypeRequiresGCollection(E->getLHS()->getType());
376 
377     // Codegen the RHS so that it stores directly into the LHS.
378     AggValueSlot LHSSlot = AggValueSlot::forLValue(LHS, true);
379     CGF.EmitAggExpr(E->getRHS(), LHSSlot, false, RequiresGCollection);
380     EmitFinalDestCopy(E, LHS, true);
381   }
382 }
383 
384 void AggExprEmitter::VisitConditionalOperator(const ConditionalOperator *E) {
385   if (!E->getLHS()) {
386     CGF.ErrorUnsupported(E, "conditional operator with missing LHS");
387     return;
388   }
389 
390   llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true");
391   llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false");
392   llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end");
393 
394   CGF.EmitBranchOnBoolExpr(E->getCond(), LHSBlock, RHSBlock);
395 
396   CGF.BeginConditionalBranch();
397   CGF.EmitBlock(LHSBlock);
398 
399   // Handle the GNU extension for missing LHS.
400   assert(E->getLHS() && "Must have LHS for aggregate value");
401 
402   Visit(E->getLHS());
403   CGF.EndConditionalBranch();
404   CGF.EmitBranch(ContBlock);
405 
406   CGF.BeginConditionalBranch();
407   CGF.EmitBlock(RHSBlock);
408 
409   Visit(E->getRHS());
410   CGF.EndConditionalBranch();
411   CGF.EmitBranch(ContBlock);
412 
413   CGF.EmitBlock(ContBlock);
414 }
415 
416 void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) {
417   Visit(CE->getChosenSubExpr(CGF.getContext()));
418 }
419 
420 void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) {
421   llvm::Value *ArgValue = CGF.EmitVAListRef(VE->getSubExpr());
422   llvm::Value *ArgPtr = CGF.EmitVAArg(ArgValue, VE->getType());
423 
424   if (!ArgPtr) {
425     CGF.ErrorUnsupported(VE, "aggregate va_arg expression");
426     return;
427   }
428 
429   EmitFinalDestCopy(VE, CGF.MakeAddrLValue(ArgPtr, VE->getType()));
430 }
431 
432 void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
433   // Ensure that we have a slot, but if we already do, remember
434   // whether its lifetime was externally managed.
435   bool WasManaged = Dest.isLifetimeExternallyManaged();
436   Dest = EnsureSlot(E->getType());
437   Dest.setLifetimeExternallyManaged();
438 
439   Visit(E->getSubExpr());
440 
441   // Set up the temporary's destructor if its lifetime wasn't already
442   // being managed.
443   if (!WasManaged)
444     CGF.EmitCXXTemporary(E->getTemporary(), Dest.getAddr());
445 }
446 
447 void
448 AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) {
449   AggValueSlot Slot = EnsureSlot(E->getType());
450   CGF.EmitCXXConstructExpr(E, Slot);
451 }
452 
453 void AggExprEmitter::VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E) {
454   CGF.EmitCXXExprWithTemporaries(E, Dest);
455 }
456 
457 void AggExprEmitter::VisitCXXScalarValueInitExpr(CXXScalarValueInitExpr *E) {
458   QualType T = E->getType();
459   AggValueSlot Slot = EnsureSlot(T);
460   EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddr(), T), T);
461 }
462 
463 void AggExprEmitter::VisitImplicitValueInitExpr(ImplicitValueInitExpr *E) {
464   QualType T = E->getType();
465   AggValueSlot Slot = EnsureSlot(T);
466   EmitNullInitializationToLValue(CGF.MakeAddrLValue(Slot.getAddr(), T), T);
467 }
468 
469 void
470 AggExprEmitter::EmitInitializationToLValue(Expr* E, LValue LV, QualType T) {
471   // FIXME: Ignore result?
472   // FIXME: Are initializers affected by volatile?
473   if (isa<ImplicitValueInitExpr>(E)) {
474     EmitNullInitializationToLValue(LV, T);
475   } else if (T->isReferenceType()) {
476     RValue RV = CGF.EmitReferenceBindingToExpr(E, /*InitializedDecl=*/0);
477     CGF.EmitStoreThroughLValue(RV, LV, T);
478   } else if (T->isAnyComplexType()) {
479     CGF.EmitComplexExprIntoAddr(E, LV.getAddress(), false);
480   } else if (CGF.hasAggregateLLVMType(T)) {
481     CGF.EmitAggExpr(E, AggValueSlot::forAddr(LV.getAddress(), false, true));
482   } else {
483     CGF.EmitStoreThroughLValue(CGF.EmitAnyExpr(E), LV, T);
484   }
485 }
486 
487 void AggExprEmitter::EmitNullInitializationToLValue(LValue LV, QualType T) {
488   if (!CGF.hasAggregateLLVMType(T)) {
489     // For non-aggregates, we can store zero
490     llvm::Value *Null = llvm::Constant::getNullValue(CGF.ConvertType(T));
491     CGF.EmitStoreThroughLValue(RValue::get(Null), LV, T);
492   } else {
493     // There's a potential optimization opportunity in combining
494     // memsets; that would be easy for arrays, but relatively
495     // difficult for structures with the current code.
496     CGF.EmitNullInitialization(LV.getAddress(), T);
497   }
498 }
499 
500 void AggExprEmitter::VisitInitListExpr(InitListExpr *E) {
501 #if 0
502   // FIXME: Assess perf here?  Figure out what cases are worth optimizing here
503   // (Length of globals? Chunks of zeroed-out space?).
504   //
505   // If we can, prefer a copy from a global; this is a lot less code for long
506   // globals, and it's easier for the current optimizers to analyze.
507   if (llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, E->getType(), &CGF)) {
508     llvm::GlobalVariable* GV =
509     new llvm::GlobalVariable(CGF.CGM.getModule(), C->getType(), true,
510                              llvm::GlobalValue::InternalLinkage, C, "");
511     EmitFinalDestCopy(E, CGF.MakeAddrLValue(GV, E->getType()));
512     return;
513   }
514 #endif
515   if (E->hadArrayRangeDesignator())
516     CGF.ErrorUnsupported(E, "GNU array range designator extension");
517 
518   llvm::Value *DestPtr = Dest.getAddr();
519 
520   // Handle initialization of an array.
521   if (E->getType()->isArrayType()) {
522     const llvm::PointerType *APType =
523       cast<llvm::PointerType>(DestPtr->getType());
524     const llvm::ArrayType *AType =
525       cast<llvm::ArrayType>(APType->getElementType());
526 
527     uint64_t NumInitElements = E->getNumInits();
528 
529     if (E->getNumInits() > 0) {
530       QualType T1 = E->getType();
531       QualType T2 = E->getInit(0)->getType();
532       if (CGF.getContext().hasSameUnqualifiedType(T1, T2)) {
533         EmitAggLoadOfLValue(E->getInit(0));
534         return;
535       }
536     }
537 
538     uint64_t NumArrayElements = AType->getNumElements();
539     QualType ElementType = CGF.getContext().getCanonicalType(E->getType());
540     ElementType = CGF.getContext().getAsArrayType(ElementType)->getElementType();
541 
542     // FIXME: were we intentionally ignoring address spaces and GC attributes?
543 
544     for (uint64_t i = 0; i != NumArrayElements; ++i) {
545       llvm::Value *NextVal = Builder.CreateStructGEP(DestPtr, i, ".array");
546       LValue LV = CGF.MakeAddrLValue(NextVal, ElementType);
547       if (i < NumInitElements)
548         EmitInitializationToLValue(E->getInit(i), LV, ElementType);
549 
550       else
551         EmitNullInitializationToLValue(LV, ElementType);
552     }
553     return;
554   }
555 
556   assert(E->getType()->isRecordType() && "Only support structs/unions here!");
557 
558   // Do struct initialization; this code just sets each individual member
559   // to the approprate value.  This makes bitfield support automatic;
560   // the disadvantage is that the generated code is more difficult for
561   // the optimizer, especially with bitfields.
562   unsigned NumInitElements = E->getNumInits();
563   RecordDecl *SD = E->getType()->getAs<RecordType>()->getDecl();
564 
565   // If we're initializing the whole aggregate, just do it in place.
566   // FIXME: This is a hack around an AST bug (PR6537).
567   if (NumInitElements == 1 && E->getType() == E->getInit(0)->getType()) {
568     EmitInitializationToLValue(E->getInit(0),
569                                CGF.MakeAddrLValue(DestPtr, E->getType()),
570                                E->getType());
571     return;
572   }
573 
574 
575   if (E->getType()->isUnionType()) {
576     // Only initialize one field of a union. The field itself is
577     // specified by the initializer list.
578     if (!E->getInitializedFieldInUnion()) {
579       // Empty union; we have nothing to do.
580 
581 #ifndef NDEBUG
582       // Make sure that it's really an empty and not a failure of
583       // semantic analysis.
584       for (RecordDecl::field_iterator Field = SD->field_begin(),
585                                    FieldEnd = SD->field_end();
586            Field != FieldEnd; ++Field)
587         assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed");
588 #endif
589       return;
590     }
591 
592     // FIXME: volatility
593     FieldDecl *Field = E->getInitializedFieldInUnion();
594     LValue FieldLoc = CGF.EmitLValueForFieldInitialization(DestPtr, Field, 0);
595 
596     if (NumInitElements) {
597       // Store the initializer into the field
598       EmitInitializationToLValue(E->getInit(0), FieldLoc, Field->getType());
599     } else {
600       // Default-initialize to null
601       EmitNullInitializationToLValue(FieldLoc, Field->getType());
602     }
603 
604     return;
605   }
606 
607   // Here we iterate over the fields; this makes it simpler to both
608   // default-initialize fields and skip over unnamed fields.
609   unsigned CurInitVal = 0;
610   for (RecordDecl::field_iterator Field = SD->field_begin(),
611                                FieldEnd = SD->field_end();
612        Field != FieldEnd; ++Field) {
613     // We're done once we hit the flexible array member
614     if (Field->getType()->isIncompleteArrayType())
615       break;
616 
617     if (Field->isUnnamedBitfield())
618       continue;
619 
620     // FIXME: volatility
621     LValue FieldLoc = CGF.EmitLValueForFieldInitialization(DestPtr, *Field, 0);
622     // We never generate write-barries for initialized fields.
623     FieldLoc.setNonGC(true);
624     if (CurInitVal < NumInitElements) {
625       // Store the initializer into the field.
626       EmitInitializationToLValue(E->getInit(CurInitVal++), FieldLoc,
627                                  Field->getType());
628     } else {
629       // We're out of initalizers; default-initialize to null
630       EmitNullInitializationToLValue(FieldLoc, Field->getType());
631     }
632   }
633 }
634 
635 //===----------------------------------------------------------------------===//
636 //                        Entry Points into this File
637 //===----------------------------------------------------------------------===//
638 
639 /// EmitAggExpr - Emit the computation of the specified expression of aggregate
640 /// type.  The result is computed into DestPtr.  Note that if DestPtr is null,
641 /// the value of the aggregate expression is not needed.  If VolatileDest is
642 /// true, DestPtr cannot be 0.
643 ///
644 /// \param IsInitializer - true if this evaluation is initializing an
645 /// object whose lifetime is already being managed.
646 //
647 // FIXME: Take Qualifiers object.
648 void CodeGenFunction::EmitAggExpr(const Expr *E, AggValueSlot Slot,
649                                   bool IgnoreResult,
650                                   bool RequiresGCollection) {
651   assert(E && hasAggregateLLVMType(E->getType()) &&
652          "Invalid aggregate expression to emit");
653   assert((Slot.getAddr() != 0 || Slot.isIgnored())
654          && "slot has bits but no address");
655 
656   AggExprEmitter(*this, Slot, IgnoreResult, RequiresGCollection)
657     .Visit(const_cast<Expr*>(E));
658 }
659 
660 LValue CodeGenFunction::EmitAggExprToLValue(const Expr *E) {
661   assert(hasAggregateLLVMType(E->getType()) && "Invalid argument!");
662   llvm::Value *Temp = CreateMemTemp(E->getType());
663   LValue LV = MakeAddrLValue(Temp, E->getType());
664   AggValueSlot Slot
665     = AggValueSlot::forAddr(Temp, LV.isVolatileQualified(), false);
666   EmitAggExpr(E, Slot);
667   return LV;
668 }
669 
670 void CodeGenFunction::EmitAggregateCopy(llvm::Value *DestPtr,
671                                         llvm::Value *SrcPtr, QualType Ty,
672                                         bool isVolatile) {
673   assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
674 
675   if (getContext().getLangOptions().CPlusPlus) {
676     if (const RecordType *RT = Ty->getAs<RecordType>()) {
677       CXXRecordDecl *Record = cast<CXXRecordDecl>(RT->getDecl());
678       assert((Record->hasTrivialCopyConstructor() ||
679               Record->hasTrivialCopyAssignment()) &&
680              "Trying to aggregate-copy a type without a trivial copy "
681              "constructor or assignment operator");
682       // Ignore empty classes in C++.
683       if (Record->isEmpty())
684         return;
685     }
686   }
687 
688   // Aggregate assignment turns into llvm.memcpy.  This is almost valid per
689   // C99 6.5.16.1p3, which states "If the value being stored in an object is
690   // read from another object that overlaps in anyway the storage of the first
691   // object, then the overlap shall be exact and the two objects shall have
692   // qualified or unqualified versions of a compatible type."
693   //
694   // memcpy is not defined if the source and destination pointers are exactly
695   // equal, but other compilers do this optimization, and almost every memcpy
696   // implementation handles this case safely.  If there is a libc that does not
697   // safely handle this, we can add a target hook.
698 
699   // Get size and alignment info for this aggregate.
700   std::pair<uint64_t, unsigned> TypeInfo = getContext().getTypeInfo(Ty);
701 
702   // FIXME: Handle variable sized types.
703 
704   // FIXME: If we have a volatile struct, the optimizer can remove what might
705   // appear to be `extra' memory ops:
706   //
707   // volatile struct { int i; } a, b;
708   //
709   // int main() {
710   //   a = b;
711   //   a = b;
712   // }
713   //
714   // we need to use a different call here.  We use isVolatile to indicate when
715   // either the source or the destination is volatile.
716 
717   const llvm::PointerType *DPT = cast<llvm::PointerType>(DestPtr->getType());
718   const llvm::Type *DBP =
719     llvm::Type::getInt8PtrTy(VMContext, DPT->getAddressSpace());
720   DestPtr = Builder.CreateBitCast(DestPtr, DBP, "tmp");
721 
722   const llvm::PointerType *SPT = cast<llvm::PointerType>(SrcPtr->getType());
723   const llvm::Type *SBP =
724     llvm::Type::getInt8PtrTy(VMContext, SPT->getAddressSpace());
725   SrcPtr = Builder.CreateBitCast(SrcPtr, SBP, "tmp");
726 
727   if (const RecordType *RecordTy = Ty->getAs<RecordType>()) {
728     RecordDecl *Record = RecordTy->getDecl();
729     if (Record->hasObjectMember()) {
730       unsigned long size = TypeInfo.first/8;
731       const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
732       llvm::Value *SizeVal = llvm::ConstantInt::get(SizeTy, size);
733       CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
734                                                     SizeVal);
735       return;
736     }
737   } else if (getContext().getAsArrayType(Ty)) {
738     QualType BaseType = getContext().getBaseElementType(Ty);
739     if (const RecordType *RecordTy = BaseType->getAs<RecordType>()) {
740       if (RecordTy->getDecl()->hasObjectMember()) {
741         unsigned long size = TypeInfo.first/8;
742         const llvm::Type *SizeTy = ConvertType(getContext().getSizeType());
743         llvm::Value *SizeVal = llvm::ConstantInt::get(SizeTy, size);
744         CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this, DestPtr, SrcPtr,
745                                                       SizeVal);
746         return;
747       }
748     }
749   }
750 
751   Builder.CreateCall5(CGM.getMemCpyFn(DestPtr->getType(), SrcPtr->getType(),
752                                       IntPtrTy),
753                       DestPtr, SrcPtr,
754                       // TypeInfo.first describes size in bits.
755                       llvm::ConstantInt::get(IntPtrTy, TypeInfo.first/8),
756                       Builder.getInt32(TypeInfo.second/8),
757                       Builder.getInt1(isVolatile));
758 }
759