1 //===--- CGExprAgg.cpp - Emit LLVM Code from Aggregate Expressions --------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This contains code to emit Aggregate Expr nodes as LLVM code.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "CodeGenFunction.h"
15 #include "CodeGenModule.h"
16 #include "CGObjCRuntime.h"
17 #include "clang/AST/ASTContext.h"
18 #include "clang/AST/DeclCXX.h"
19 #include "clang/AST/StmtVisitor.h"
20 #include "llvm/Constants.h"
21 #include "llvm/Function.h"
22 #include "llvm/GlobalVariable.h"
23 #include "llvm/Support/Compiler.h"
24 #include "llvm/Intrinsics.h"
25 using namespace clang;
26 using namespace CodeGen;
27 
28 //===----------------------------------------------------------------------===//
29 //                        Aggregate Expression Emitter
30 //===----------------------------------------------------------------------===//
31 
32 namespace  {
33 class VISIBILITY_HIDDEN AggExprEmitter : public StmtVisitor<AggExprEmitter> {
34   CodeGenFunction &CGF;
35   CGBuilderTy &Builder;
36   llvm::Value *DestPtr;
37   bool VolatileDest;
38   bool IgnoreResult;
39   bool IsInitializer;
40   bool RequiresGCollection;
41 public:
42   AggExprEmitter(CodeGenFunction &cgf, llvm::Value *destPtr, bool v,
43                  bool ignore, bool isinit, bool requiresGCollection)
44     : CGF(cgf), Builder(CGF.Builder),
45       DestPtr(destPtr), VolatileDest(v), IgnoreResult(ignore),
46       IsInitializer(isinit), RequiresGCollection(requiresGCollection) {
47   }
48 
49   //===--------------------------------------------------------------------===//
50   //                               Utilities
51   //===--------------------------------------------------------------------===//
52 
53   /// EmitAggLoadOfLValue - Given an expression with aggregate type that
54   /// represents a value lvalue, this method emits the address of the lvalue,
55   /// then loads the result into DestPtr.
56   void EmitAggLoadOfLValue(const Expr *E);
57 
58   /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
59   void EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore = false);
60   void EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore = false);
61 
62   //===--------------------------------------------------------------------===//
63   //                            Visitor Methods
64   //===--------------------------------------------------------------------===//
65 
66   void VisitStmt(Stmt *S) {
67     CGF.ErrorUnsupported(S, "aggregate expression");
68   }
69   void VisitParenExpr(ParenExpr *PE) { Visit(PE->getSubExpr()); }
70   void VisitUnaryExtension(UnaryOperator *E) { Visit(E->getSubExpr()); }
71 
72   // l-values.
73   void VisitDeclRefExpr(DeclRefExpr *DRE) { EmitAggLoadOfLValue(DRE); }
74   void VisitMemberExpr(MemberExpr *ME) { EmitAggLoadOfLValue(ME); }
75   void VisitUnaryDeref(UnaryOperator *E) { EmitAggLoadOfLValue(E); }
76   void VisitStringLiteral(StringLiteral *E) { EmitAggLoadOfLValue(E); }
77   void VisitCompoundLiteralExpr(CompoundLiteralExpr *E) {
78     EmitAggLoadOfLValue(E);
79   }
80   void VisitArraySubscriptExpr(ArraySubscriptExpr *E) {
81     EmitAggLoadOfLValue(E);
82   }
83   void VisitBlockDeclRefExpr(const BlockDeclRefExpr *E) {
84     EmitAggLoadOfLValue(E);
85   }
86   void VisitPredefinedExpr(const PredefinedExpr *E) {
87     EmitAggLoadOfLValue(E);
88   }
89 
90   // Operators.
91   void VisitCastExpr(CastExpr *E);
92   void VisitCallExpr(const CallExpr *E);
93   void VisitStmtExpr(const StmtExpr *E);
94   void VisitBinaryOperator(const BinaryOperator *BO);
95   void VisitBinAssign(const BinaryOperator *E);
96   void VisitBinComma(const BinaryOperator *E);
97 
98   void VisitObjCMessageExpr(ObjCMessageExpr *E);
99   void VisitObjCIvarRefExpr(ObjCIvarRefExpr *E) {
100     EmitAggLoadOfLValue(E);
101   }
102   void VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E);
103   void VisitObjCImplicitSetterGetterRefExpr(ObjCImplicitSetterGetterRefExpr *E);
104 
105   void VisitConditionalOperator(const ConditionalOperator *CO);
106   void VisitChooseExpr(const ChooseExpr *CE);
107   void VisitInitListExpr(InitListExpr *E);
108   void VisitCXXDefaultArgExpr(CXXDefaultArgExpr *DAE) {
109     Visit(DAE->getExpr());
110   }
111   void VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E);
112   void VisitCXXConstructExpr(const CXXConstructExpr *E);
113   void VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E);
114 
115   void VisitVAArgExpr(VAArgExpr *E);
116 
117   void EmitInitializationToLValue(Expr *E, LValue Address);
118   void EmitNullInitializationToLValue(LValue Address, QualType T);
119   //  case Expr::ChooseExprClass:
120 
121 };
122 }  // end anonymous namespace.
123 
124 //===----------------------------------------------------------------------===//
125 //                                Utilities
126 //===----------------------------------------------------------------------===//
127 
128 /// EmitAggLoadOfLValue - Given an expression with aggregate type that
129 /// represents a value lvalue, this method emits the address of the lvalue,
130 /// then loads the result into DestPtr.
131 void AggExprEmitter::EmitAggLoadOfLValue(const Expr *E) {
132   LValue LV = CGF.EmitLValue(E);
133   EmitFinalDestCopy(E, LV);
134 }
135 
136 /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
137 void AggExprEmitter::EmitFinalDestCopy(const Expr *E, RValue Src, bool Ignore) {
138   assert(Src.isAggregate() && "value must be aggregate value!");
139 
140   // If the result is ignored, don't copy from the value.
141   if (DestPtr == 0) {
142     if (!Src.isVolatileQualified() || (IgnoreResult && Ignore))
143       return;
144     // If the source is volatile, we must read from it; to do that, we need
145     // some place to put it.
146     DestPtr = CGF.CreateTempAlloca(CGF.ConvertType(E->getType()), "agg.tmp");
147   }
148 
149   if (RequiresGCollection) {
150     CGF.CGM.getObjCRuntime().EmitGCMemmoveCollectable(CGF,
151                                               DestPtr, Src.getAggregateAddr(),
152                                               E->getType());
153     return;
154   }
155   // If the result of the assignment is used, copy the LHS there also.
156   // FIXME: Pass VolatileDest as well.  I think we also need to merge volatile
157   // from the source as well, as we can't eliminate it if either operand
158   // is volatile, unless copy has volatile for both source and destination..
159   CGF.EmitAggregateCopy(DestPtr, Src.getAggregateAddr(), E->getType(),
160                         VolatileDest|Src.isVolatileQualified());
161 }
162 
163 /// EmitFinalDestCopy - Perform the final copy to DestPtr, if desired.
164 void AggExprEmitter::EmitFinalDestCopy(const Expr *E, LValue Src, bool Ignore) {
165   assert(Src.isSimple() && "Can't have aggregate bitfield, vector, etc");
166 
167   EmitFinalDestCopy(E, RValue::getAggregate(Src.getAddress(),
168                                             Src.isVolatileQualified()),
169                     Ignore);
170 }
171 
172 //===----------------------------------------------------------------------===//
173 //                            Visitor Methods
174 //===----------------------------------------------------------------------===//
175 
176 void AggExprEmitter::VisitCastExpr(CastExpr *E) {
177   if (E->getCastKind() == CastExpr::CK_ToUnion) {
178     // GCC union extension
179     QualType PtrTy =
180         CGF.getContext().getPointerType(E->getSubExpr()->getType());
181     llvm::Value *CastPtr = Builder.CreateBitCast(DestPtr,
182                                                  CGF.ConvertType(PtrTy));
183     EmitInitializationToLValue(E->getSubExpr(),
184                                LValue::MakeAddr(CastPtr, 0));
185     return;
186   }
187   if (E->getCastKind() == CastExpr::CK_UserDefinedConversion) {
188     if (const CXXFunctionalCastExpr *CXXFExpr =
189           dyn_cast<CXXFunctionalCastExpr>(E))
190       CGF.EmitCXXFunctionalCastExpr(CXXFExpr);
191     else
192       if (isa<CStyleCastExpr>(E))
193         Visit(E->getSubExpr());
194     return;
195   }
196 
197   // FIXME: Remove the CK_Unknown check here.
198   assert((E->getCastKind() == CastExpr::CK_NoOp ||
199           E->getCastKind() == CastExpr::CK_Unknown) &&
200          "Only no-op casts allowed!");
201   assert(CGF.getContext().hasSameUnqualifiedType(E->getSubExpr()->getType(),
202                                                  E->getType()) &&
203          "Implicit cast types must be compatible");
204   Visit(E->getSubExpr());
205 }
206 
207 void AggExprEmitter::VisitCallExpr(const CallExpr *E) {
208   if (E->getCallReturnType()->isReferenceType()) {
209     EmitAggLoadOfLValue(E);
210     return;
211   }
212 
213   RValue RV = CGF.EmitCallExpr(E);
214   EmitFinalDestCopy(E, RV);
215 }
216 
217 void AggExprEmitter::VisitObjCMessageExpr(ObjCMessageExpr *E) {
218   RValue RV = CGF.EmitObjCMessageExpr(E);
219   EmitFinalDestCopy(E, RV);
220 }
221 
222 void AggExprEmitter::VisitObjCPropertyRefExpr(ObjCPropertyRefExpr *E) {
223   RValue RV = CGF.EmitObjCPropertyGet(E);
224   EmitFinalDestCopy(E, RV);
225 }
226 
227 void AggExprEmitter::VisitObjCImplicitSetterGetterRefExpr(
228                                    ObjCImplicitSetterGetterRefExpr *E) {
229   RValue RV = CGF.EmitObjCPropertyGet(E);
230   EmitFinalDestCopy(E, RV);
231 }
232 
233 void AggExprEmitter::VisitBinComma(const BinaryOperator *E) {
234   CGF.EmitAnyExpr(E->getLHS(), 0, false, true);
235   CGF.EmitAggExpr(E->getRHS(), DestPtr, VolatileDest,
236                   /*IgnoreResult=*/false, IsInitializer);
237 }
238 
239 void AggExprEmitter::VisitStmtExpr(const StmtExpr *E) {
240   CGF.EmitCompoundStmt(*E->getSubStmt(), true, DestPtr, VolatileDest);
241 }
242 
243 void AggExprEmitter::VisitBinaryOperator(const BinaryOperator *E) {
244   CGF.ErrorUnsupported(E, "aggregate binary expression");
245 }
246 
247 void AggExprEmitter::VisitBinAssign(const BinaryOperator *E) {
248   // For an assignment to work, the value on the right has
249   // to be compatible with the value on the left.
250   assert(CGF.getContext().hasSameUnqualifiedType(E->getLHS()->getType(),
251                                                  E->getRHS()->getType())
252          && "Invalid assignment");
253   LValue LHS = CGF.EmitLValue(E->getLHS());
254 
255   // We have to special case property setters, otherwise we must have
256   // a simple lvalue (no aggregates inside vectors, bitfields).
257   if (LHS.isPropertyRef()) {
258     llvm::Value *AggLoc = DestPtr;
259     if (!AggLoc)
260       AggLoc = CGF.CreateTempAlloca(CGF.ConvertType(E->getRHS()->getType()));
261     CGF.EmitAggExpr(E->getRHS(), AggLoc, VolatileDest);
262     CGF.EmitObjCPropertySet(LHS.getPropertyRefExpr(),
263                             RValue::getAggregate(AggLoc, VolatileDest));
264   } else if (LHS.isKVCRef()) {
265     llvm::Value *AggLoc = DestPtr;
266     if (!AggLoc)
267       AggLoc = CGF.CreateTempAlloca(CGF.ConvertType(E->getRHS()->getType()));
268     CGF.EmitAggExpr(E->getRHS(), AggLoc, VolatileDest);
269     CGF.EmitObjCPropertySet(LHS.getKVCRefExpr(),
270                             RValue::getAggregate(AggLoc, VolatileDest));
271   } else {
272     bool RequiresGCollection = false;
273     if (CGF.getContext().getLangOptions().NeXTRuntime) {
274       QualType LHSTy = E->getLHS()->getType();
275       if (const RecordType *FDTTy = LHSTy.getTypePtr()->getAs<RecordType>())
276         RequiresGCollection = FDTTy->getDecl()->hasObjectMember();
277     }
278     // Codegen the RHS so that it stores directly into the LHS.
279     CGF.EmitAggExpr(E->getRHS(), LHS.getAddress(), LHS.isVolatileQualified(),
280                     false, false, RequiresGCollection);
281     EmitFinalDestCopy(E, LHS, true);
282   }
283 }
284 
285 void AggExprEmitter::VisitConditionalOperator(const ConditionalOperator *E) {
286   llvm::BasicBlock *LHSBlock = CGF.createBasicBlock("cond.true");
287   llvm::BasicBlock *RHSBlock = CGF.createBasicBlock("cond.false");
288   llvm::BasicBlock *ContBlock = CGF.createBasicBlock("cond.end");
289 
290   llvm::Value *Cond = CGF.EvaluateExprAsBool(E->getCond());
291   Builder.CreateCondBr(Cond, LHSBlock, RHSBlock);
292 
293   CGF.PushConditionalTempDestruction();
294   CGF.EmitBlock(LHSBlock);
295 
296   // Handle the GNU extension for missing LHS.
297   assert(E->getLHS() && "Must have LHS for aggregate value");
298 
299   Visit(E->getLHS());
300   CGF.PopConditionalTempDestruction();
301   CGF.EmitBranch(ContBlock);
302 
303   CGF.PushConditionalTempDestruction();
304   CGF.EmitBlock(RHSBlock);
305 
306   Visit(E->getRHS());
307   CGF.PopConditionalTempDestruction();
308   CGF.EmitBranch(ContBlock);
309 
310   CGF.EmitBlock(ContBlock);
311 }
312 
313 void AggExprEmitter::VisitChooseExpr(const ChooseExpr *CE) {
314   Visit(CE->getChosenSubExpr(CGF.getContext()));
315 }
316 
317 void AggExprEmitter::VisitVAArgExpr(VAArgExpr *VE) {
318   llvm::Value *ArgValue = CGF.EmitVAListRef(VE->getSubExpr());
319   llvm::Value *ArgPtr = CGF.EmitVAArg(ArgValue, VE->getType());
320 
321   if (!ArgPtr) {
322     CGF.ErrorUnsupported(VE, "aggregate va_arg expression");
323     return;
324   }
325 
326   EmitFinalDestCopy(VE, LValue::MakeAddr(ArgPtr, 0));
327 }
328 
329 void AggExprEmitter::VisitCXXBindTemporaryExpr(CXXBindTemporaryExpr *E) {
330   llvm::Value *Val = DestPtr;
331 
332   if (!Val) {
333     // Create a temporary variable.
334     Val = CGF.CreateTempAlloca(CGF.ConvertTypeForMem(E->getType()), "tmp");
335 
336     // FIXME: volatile
337     CGF.EmitAggExpr(E->getSubExpr(), Val, false);
338   } else
339     Visit(E->getSubExpr());
340 
341   // Don't make this a live temporary if we're emitting an initializer expr.
342   if (!IsInitializer)
343     CGF.PushCXXTemporary(E->getTemporary(), Val);
344 }
345 
346 void
347 AggExprEmitter::VisitCXXConstructExpr(const CXXConstructExpr *E) {
348   llvm::Value *Val = DestPtr;
349 
350   if (!Val) {
351     // Create a temporary variable.
352     Val = CGF.CreateTempAlloca(CGF.ConvertTypeForMem(E->getType()), "tmp");
353   }
354 
355   CGF.EmitCXXConstructExpr(Val, E);
356 }
357 
358 void AggExprEmitter::VisitCXXExprWithTemporaries(CXXExprWithTemporaries *E) {
359   CGF.EmitCXXExprWithTemporaries(E, DestPtr, VolatileDest, IsInitializer);
360 }
361 
362 void AggExprEmitter::EmitInitializationToLValue(Expr* E, LValue LV) {
363   // FIXME: Ignore result?
364   // FIXME: Are initializers affected by volatile?
365   if (isa<ImplicitValueInitExpr>(E)) {
366     EmitNullInitializationToLValue(LV, E->getType());
367   } else if (E->getType()->isComplexType()) {
368     CGF.EmitComplexExprIntoAddr(E, LV.getAddress(), false);
369   } else if (CGF.hasAggregateLLVMType(E->getType())) {
370     CGF.EmitAnyExpr(E, LV.getAddress(), false);
371   } else {
372     CGF.EmitStoreThroughLValue(CGF.EmitAnyExpr(E), LV, E->getType());
373   }
374 }
375 
376 void AggExprEmitter::EmitNullInitializationToLValue(LValue LV, QualType T) {
377   if (!CGF.hasAggregateLLVMType(T)) {
378     // For non-aggregates, we can store zero
379     llvm::Value *Null = llvm::Constant::getNullValue(CGF.ConvertType(T));
380     CGF.EmitStoreThroughLValue(RValue::get(Null), LV, T);
381   } else {
382     // Otherwise, just memset the whole thing to zero.  This is legal
383     // because in LLVM, all default initializers are guaranteed to have a
384     // bit pattern of all zeros.
385     // FIXME: That isn't true for member pointers!
386     // There's a potential optimization opportunity in combining
387     // memsets; that would be easy for arrays, but relatively
388     // difficult for structures with the current code.
389     CGF.EmitMemSetToZero(LV.getAddress(), T);
390   }
391 }
392 
393 void AggExprEmitter::VisitInitListExpr(InitListExpr *E) {
394 #if 0
395   // FIXME: Disabled while we figure out what to do about
396   // test/CodeGen/bitfield.c
397   //
398   // If we can, prefer a copy from a global; this is a lot less code for long
399   // globals, and it's easier for the current optimizers to analyze.
400   // FIXME: Should we really be doing this? Should we try to avoid cases where
401   // we emit a global with a lot of zeros?  Should we try to avoid short
402   // globals?
403   if (E->isConstantInitializer(CGF.getContext(), 0)) {
404     llvm::Constant* C = CGF.CGM.EmitConstantExpr(E, &CGF);
405     llvm::GlobalVariable* GV =
406     new llvm::GlobalVariable(C->getType(), true,
407                              llvm::GlobalValue::InternalLinkage,
408                              C, "", &CGF.CGM.getModule(), 0);
409     EmitFinalDestCopy(E, LValue::MakeAddr(GV, 0));
410     return;
411   }
412 #endif
413   if (E->hadArrayRangeDesignator()) {
414     CGF.ErrorUnsupported(E, "GNU array range designator extension");
415   }
416 
417   // Handle initialization of an array.
418   if (E->getType()->isArrayType()) {
419     const llvm::PointerType *APType =
420       cast<llvm::PointerType>(DestPtr->getType());
421     const llvm::ArrayType *AType =
422       cast<llvm::ArrayType>(APType->getElementType());
423 
424     uint64_t NumInitElements = E->getNumInits();
425 
426     if (E->getNumInits() > 0) {
427       QualType T1 = E->getType();
428       QualType T2 = E->getInit(0)->getType();
429       if (CGF.getContext().hasSameUnqualifiedType(T1, T2)) {
430         EmitAggLoadOfLValue(E->getInit(0));
431         return;
432       }
433     }
434 
435     uint64_t NumArrayElements = AType->getNumElements();
436     QualType ElementType = CGF.getContext().getCanonicalType(E->getType());
437     ElementType = CGF.getContext().getAsArrayType(ElementType)->getElementType();
438 
439     unsigned CVRqualifier = ElementType.getCVRQualifiers();
440 
441     for (uint64_t i = 0; i != NumArrayElements; ++i) {
442       llvm::Value *NextVal = Builder.CreateStructGEP(DestPtr, i, ".array");
443       if (i < NumInitElements)
444         EmitInitializationToLValue(E->getInit(i),
445                                    LValue::MakeAddr(NextVal, CVRqualifier));
446       else
447         EmitNullInitializationToLValue(LValue::MakeAddr(NextVal, CVRqualifier),
448                                        ElementType);
449     }
450     return;
451   }
452 
453   assert(E->getType()->isRecordType() && "Only support structs/unions here!");
454 
455   // Do struct initialization; this code just sets each individual member
456   // to the approprate value.  This makes bitfield support automatic;
457   // the disadvantage is that the generated code is more difficult for
458   // the optimizer, especially with bitfields.
459   unsigned NumInitElements = E->getNumInits();
460   RecordDecl *SD = E->getType()->getAs<RecordType>()->getDecl();
461   unsigned CurInitVal = 0;
462 
463   if (E->getType()->isUnionType()) {
464     // Only initialize one field of a union. The field itself is
465     // specified by the initializer list.
466     if (!E->getInitializedFieldInUnion()) {
467       // Empty union; we have nothing to do.
468 
469 #ifndef NDEBUG
470       // Make sure that it's really an empty and not a failure of
471       // semantic analysis.
472       for (RecordDecl::field_iterator Field = SD->field_begin(),
473                                    FieldEnd = SD->field_end();
474            Field != FieldEnd; ++Field)
475         assert(Field->isUnnamedBitfield() && "Only unnamed bitfields allowed");
476 #endif
477       return;
478     }
479 
480     // FIXME: volatility
481     FieldDecl *Field = E->getInitializedFieldInUnion();
482     LValue FieldLoc = CGF.EmitLValueForField(DestPtr, Field, true, 0);
483 
484     if (NumInitElements) {
485       // Store the initializer into the field
486       EmitInitializationToLValue(E->getInit(0), FieldLoc);
487     } else {
488       // Default-initialize to null
489       EmitNullInitializationToLValue(FieldLoc, Field->getType());
490     }
491 
492     return;
493   }
494 
495   // Here we iterate over the fields; this makes it simpler to both
496   // default-initialize fields and skip over unnamed fields.
497   for (RecordDecl::field_iterator Field = SD->field_begin(),
498                                FieldEnd = SD->field_end();
499        Field != FieldEnd; ++Field) {
500     // We're done once we hit the flexible array member
501     if (Field->getType()->isIncompleteArrayType())
502       break;
503 
504     if (Field->isUnnamedBitfield())
505       continue;
506 
507     // FIXME: volatility
508     LValue FieldLoc = CGF.EmitLValueForField(DestPtr, *Field, false, 0);
509     // We never generate write-barries for initialized fields.
510     LValue::SetObjCNonGC(FieldLoc, true);
511     if (CurInitVal < NumInitElements) {
512       // Store the initializer into the field
513       EmitInitializationToLValue(E->getInit(CurInitVal++), FieldLoc);
514     } else {
515       // We're out of initalizers; default-initialize to null
516       EmitNullInitializationToLValue(FieldLoc, Field->getType());
517     }
518   }
519 }
520 
521 //===----------------------------------------------------------------------===//
522 //                        Entry Points into this File
523 //===----------------------------------------------------------------------===//
524 
525 /// EmitAggExpr - Emit the computation of the specified expression of aggregate
526 /// type.  The result is computed into DestPtr.  Note that if DestPtr is null,
527 /// the value of the aggregate expression is not needed.  If VolatileDest is
528 /// true, DestPtr cannot be 0.
529 void CodeGenFunction::EmitAggExpr(const Expr *E, llvm::Value *DestPtr,
530                                   bool VolatileDest, bool IgnoreResult,
531                                   bool IsInitializer,
532                                   bool RequiresGCollection) {
533   assert(E && hasAggregateLLVMType(E->getType()) &&
534          "Invalid aggregate expression to emit");
535   assert ((DestPtr != 0 || VolatileDest == false)
536           && "volatile aggregate can't be 0");
537 
538   AggExprEmitter(*this, DestPtr, VolatileDest, IgnoreResult, IsInitializer,
539                  RequiresGCollection)
540     .Visit(const_cast<Expr*>(E));
541 }
542 
543 void CodeGenFunction::EmitAggregateClear(llvm::Value *DestPtr, QualType Ty) {
544   assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
545 
546   EmitMemSetToZero(DestPtr, Ty);
547 }
548 
549 void CodeGenFunction::EmitAggregateCopy(llvm::Value *DestPtr,
550                                         llvm::Value *SrcPtr, QualType Ty,
551                                         bool isVolatile) {
552   assert(!Ty->isAnyComplexType() && "Shouldn't happen for complex");
553 
554   // Aggregate assignment turns into llvm.memcpy.  This is almost valid per
555   // C99 6.5.16.1p3, which states "If the value being stored in an object is
556   // read from another object that overlaps in anyway the storage of the first
557   // object, then the overlap shall be exact and the two objects shall have
558   // qualified or unqualified versions of a compatible type."
559   //
560   // memcpy is not defined if the source and destination pointers are exactly
561   // equal, but other compilers do this optimization, and almost every memcpy
562   // implementation handles this case safely.  If there is a libc that does not
563   // safely handle this, we can add a target hook.
564   const llvm::Type *BP =
565                 llvm::PointerType::getUnqual(llvm::Type::getInt8Ty(VMContext));
566   if (DestPtr->getType() != BP)
567     DestPtr = Builder.CreateBitCast(DestPtr, BP, "tmp");
568   if (SrcPtr->getType() != BP)
569     SrcPtr = Builder.CreateBitCast(SrcPtr, BP, "tmp");
570 
571   // Get size and alignment info for this aggregate.
572   std::pair<uint64_t, unsigned> TypeInfo = getContext().getTypeInfo(Ty);
573 
574   // FIXME: Handle variable sized types.
575   const llvm::Type *IntPtr =
576           llvm::IntegerType::get(VMContext, LLVMPointerWidth);
577 
578   // FIXME: If we have a volatile struct, the optimizer can remove what might
579   // appear to be `extra' memory ops:
580   //
581   // volatile struct { int i; } a, b;
582   //
583   // int main() {
584   //   a = b;
585   //   a = b;
586   // }
587   //
588   // we need to use a differnt call here.  We use isVolatile to indicate when
589   // either the source or the destination is volatile.
590   Builder.CreateCall4(CGM.getMemCpyFn(),
591                       DestPtr, SrcPtr,
592                       // TypeInfo.first describes size in bits.
593                       llvm::ConstantInt::get(IntPtr, TypeInfo.first/8),
594                       llvm::ConstantInt::get(llvm::Type::getInt32Ty(VMContext),
595                                              TypeInfo.second/8));
596 }
597