1 //===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This is the internal per-function state used for llvm translation.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef CLANG_CODEGEN_CODEGENFUNCTION_H
15 #define CLANG_CODEGEN_CODEGENFUNCTION_H
16 
17 #include "clang/AST/Type.h"
18 #include "clang/AST/ExprCXX.h"
19 #include "clang/AST/ExprObjC.h"
20 #include "clang/AST/CharUnits.h"
21 #include "clang/Basic/ABI.h"
22 #include "clang/Basic/TargetInfo.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/SmallVector.h"
25 #include "llvm/Support/ValueHandle.h"
26 #include "CodeGenModule.h"
27 #include "CGBuilder.h"
28 #include "CGValue.h"
29 
30 namespace llvm {
31   class BasicBlock;
32   class LLVMContext;
33   class MDNode;
34   class Module;
35   class SwitchInst;
36   class Twine;
37   class Value;
38   class CallSite;
39 }
40 
41 namespace clang {
42   class APValue;
43   class ASTContext;
44   class CXXDestructorDecl;
45   class CXXForRangeStmt;
46   class CXXTryStmt;
47   class Decl;
48   class LabelDecl;
49   class EnumConstantDecl;
50   class FunctionDecl;
51   class FunctionProtoType;
52   class LabelStmt;
53   class ObjCContainerDecl;
54   class ObjCInterfaceDecl;
55   class ObjCIvarDecl;
56   class ObjCMethodDecl;
57   class ObjCImplementationDecl;
58   class ObjCPropertyImplDecl;
59   class TargetInfo;
60   class TargetCodeGenInfo;
61   class VarDecl;
62   class ObjCForCollectionStmt;
63   class ObjCAtTryStmt;
64   class ObjCAtThrowStmt;
65   class ObjCAtSynchronizedStmt;
66 
67 namespace CodeGen {
68   class CodeGenTypes;
69   class CGDebugInfo;
70   class CGFunctionInfo;
71   class CGRecordLayout;
72   class CGBlockInfo;
73   class CGCXXABI;
74   class BlockFlags;
75   class BlockFieldFlags;
76 
77 /// A branch fixup.  These are required when emitting a goto to a
78 /// label which hasn't been emitted yet.  The goto is optimistically
79 /// emitted as a branch to the basic block for the label, and (if it
80 /// occurs in a scope with non-trivial cleanups) a fixup is added to
81 /// the innermost cleanup.  When a (normal) cleanup is popped, any
82 /// unresolved fixups in that scope are threaded through the cleanup.
83 struct BranchFixup {
84   /// The block containing the terminator which needs to be modified
85   /// into a switch if this fixup is resolved into the current scope.
86   /// If null, LatestBranch points directly to the destination.
87   llvm::BasicBlock *OptimisticBranchBlock;
88 
89   /// The ultimate destination of the branch.
90   ///
91   /// This can be set to null to indicate that this fixup was
92   /// successfully resolved.
93   llvm::BasicBlock *Destination;
94 
95   /// The destination index value.
96   unsigned DestinationIndex;
97 
98   /// The initial branch of the fixup.
99   llvm::BranchInst *InitialBranch;
100 };
101 
102 template <class T> struct InvariantValue {
103   typedef T type;
104   typedef T saved_type;
105   static bool needsSaving(type value) { return false; }
106   static saved_type save(CodeGenFunction &CGF, type value) { return value; }
107   static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
108 };
109 
110 /// A metaprogramming class for ensuring that a value will dominate an
111 /// arbitrary position in a function.
112 template <class T> struct DominatingValue : InvariantValue<T> {};
113 
114 template <class T, bool mightBeInstruction =
115             llvm::is_base_of<llvm::Value, T>::value &&
116             !llvm::is_base_of<llvm::Constant, T>::value &&
117             !llvm::is_base_of<llvm::BasicBlock, T>::value>
118 struct DominatingPointer;
119 template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
120 // template <class T> struct DominatingPointer<T,true> at end of file
121 
122 template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
123 
124 enum CleanupKind {
125   EHCleanup = 0x1,
126   NormalCleanup = 0x2,
127   NormalAndEHCleanup = EHCleanup | NormalCleanup,
128 
129   InactiveCleanup = 0x4,
130   InactiveEHCleanup = EHCleanup | InactiveCleanup,
131   InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
132   InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
133 };
134 
135 /// A stack of scopes which respond to exceptions, including cleanups
136 /// and catch blocks.
137 class EHScopeStack {
138 public:
139   /// A saved depth on the scope stack.  This is necessary because
140   /// pushing scopes onto the stack invalidates iterators.
141   class stable_iterator {
142     friend class EHScopeStack;
143 
144     /// Offset from StartOfData to EndOfBuffer.
145     ptrdiff_t Size;
146 
147     stable_iterator(ptrdiff_t Size) : Size(Size) {}
148 
149   public:
150     static stable_iterator invalid() { return stable_iterator(-1); }
151     stable_iterator() : Size(-1) {}
152 
153     bool isValid() const { return Size >= 0; }
154 
155     /// Returns true if this scope encloses I.
156     /// Returns false if I is invalid.
157     /// This scope must be valid.
158     bool encloses(stable_iterator I) const { return Size <= I.Size; }
159 
160     /// Returns true if this scope strictly encloses I: that is,
161     /// if it encloses I and is not I.
162     /// Returns false is I is invalid.
163     /// This scope must be valid.
164     bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
165 
166     friend bool operator==(stable_iterator A, stable_iterator B) {
167       return A.Size == B.Size;
168     }
169     friend bool operator!=(stable_iterator A, stable_iterator B) {
170       return A.Size != B.Size;
171     }
172   };
173 
174   /// Information for lazily generating a cleanup.  Subclasses must be
175   /// POD-like: cleanups will not be destructed, and they will be
176   /// allocated on the cleanup stack and freely copied and moved
177   /// around.
178   ///
179   /// Cleanup implementations should generally be declared in an
180   /// anonymous namespace.
181   class Cleanup {
182   public:
183     // Anchor the construction vtable.  We use the destructor because
184     // gcc gives an obnoxious warning if there are virtual methods
185     // with an accessible non-virtual destructor.  Unfortunately,
186     // declaring this destructor makes it non-trivial, but there
187     // doesn't seem to be any other way around this warning.
188     //
189     // This destructor will never be called.
190     virtual ~Cleanup();
191 
192     /// Emit the cleanup.  For normal cleanups, this is run in the
193     /// same EH context as when the cleanup was pushed, i.e. the
194     /// immediately-enclosing context of the cleanup scope.  For
195     /// EH cleanups, this is run in a terminate context.
196     ///
197     // \param IsForEHCleanup true if this is for an EH cleanup, false
198     ///  if for a normal cleanup.
199     virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0;
200   };
201 
202   /// UnconditionalCleanupN stores its N parameters and just passes
203   /// them to the real cleanup function.
204   template <class T, class A0>
205   class UnconditionalCleanup1 : public Cleanup {
206     A0 a0;
207   public:
208     UnconditionalCleanup1(A0 a0) : a0(a0) {}
209     void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) {
210       T::Emit(CGF, IsForEHCleanup, a0);
211     }
212   };
213 
214   template <class T, class A0, class A1>
215   class UnconditionalCleanup2 : public Cleanup {
216     A0 a0; A1 a1;
217   public:
218     UnconditionalCleanup2(A0 a0, A1 a1) : a0(a0), a1(a1) {}
219     void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) {
220       T::Emit(CGF, IsForEHCleanup, a0, a1);
221     }
222   };
223 
224   /// ConditionalCleanupN stores the saved form of its N parameters,
225   /// then restores them and performs the cleanup.
226   template <class T, class A0>
227   class ConditionalCleanup1 : public Cleanup {
228     typedef typename DominatingValue<A0>::saved_type A0_saved;
229     A0_saved a0_saved;
230 
231     void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) {
232       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
233       T::Emit(CGF, IsForEHCleanup, a0);
234     }
235 
236   public:
237     ConditionalCleanup1(A0_saved a0)
238       : a0_saved(a0) {}
239   };
240 
241   template <class T, class A0, class A1>
242   class ConditionalCleanup2 : public Cleanup {
243     typedef typename DominatingValue<A0>::saved_type A0_saved;
244     typedef typename DominatingValue<A1>::saved_type A1_saved;
245     A0_saved a0_saved;
246     A1_saved a1_saved;
247 
248     void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) {
249       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
250       A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
251       T::Emit(CGF, IsForEHCleanup, a0, a1);
252     }
253 
254   public:
255     ConditionalCleanup2(A0_saved a0, A1_saved a1)
256       : a0_saved(a0), a1_saved(a1) {}
257   };
258 
259 private:
260   // The implementation for this class is in CGException.h and
261   // CGException.cpp; the definition is here because it's used as a
262   // member of CodeGenFunction.
263 
264   /// The start of the scope-stack buffer, i.e. the allocated pointer
265   /// for the buffer.  All of these pointers are either simultaneously
266   /// null or simultaneously valid.
267   char *StartOfBuffer;
268 
269   /// The end of the buffer.
270   char *EndOfBuffer;
271 
272   /// The first valid entry in the buffer.
273   char *StartOfData;
274 
275   /// The innermost normal cleanup on the stack.
276   stable_iterator InnermostNormalCleanup;
277 
278   /// The innermost EH cleanup on the stack.
279   stable_iterator InnermostEHCleanup;
280 
281   /// The number of catches on the stack.
282   unsigned CatchDepth;
283 
284   /// The current EH destination index.  Reset to FirstCatchIndex
285   /// whenever the last EH cleanup is popped.
286   unsigned NextEHDestIndex;
287   enum { FirstEHDestIndex = 1 };
288 
289   /// The current set of branch fixups.  A branch fixup is a jump to
290   /// an as-yet unemitted label, i.e. a label for which we don't yet
291   /// know the EH stack depth.  Whenever we pop a cleanup, we have
292   /// to thread all the current branch fixups through it.
293   ///
294   /// Fixups are recorded as the Use of the respective branch or
295   /// switch statement.  The use points to the final destination.
296   /// When popping out of a cleanup, these uses are threaded through
297   /// the cleanup and adjusted to point to the new cleanup.
298   ///
299   /// Note that branches are allowed to jump into protected scopes
300   /// in certain situations;  e.g. the following code is legal:
301   ///     struct A { ~A(); }; // trivial ctor, non-trivial dtor
302   ///     goto foo;
303   ///     A a;
304   ///    foo:
305   ///     bar();
306   llvm::SmallVector<BranchFixup, 8> BranchFixups;
307 
308   char *allocate(size_t Size);
309 
310   void *pushCleanup(CleanupKind K, size_t DataSize);
311 
312 public:
313   EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
314                    InnermostNormalCleanup(stable_end()),
315                    InnermostEHCleanup(stable_end()),
316                    CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {}
317   ~EHScopeStack() { delete[] StartOfBuffer; }
318 
319   // Variadic templates would make this not terrible.
320 
321   /// Push a lazily-created cleanup on the stack.
322   template <class T>
323   void pushCleanup(CleanupKind Kind) {
324     void *Buffer = pushCleanup(Kind, sizeof(T));
325     Cleanup *Obj = new(Buffer) T();
326     (void) Obj;
327   }
328 
329   /// Push a lazily-created cleanup on the stack.
330   template <class T, class A0>
331   void pushCleanup(CleanupKind Kind, A0 a0) {
332     void *Buffer = pushCleanup(Kind, sizeof(T));
333     Cleanup *Obj = new(Buffer) T(a0);
334     (void) Obj;
335   }
336 
337   /// Push a lazily-created cleanup on the stack.
338   template <class T, class A0, class A1>
339   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
340     void *Buffer = pushCleanup(Kind, sizeof(T));
341     Cleanup *Obj = new(Buffer) T(a0, a1);
342     (void) Obj;
343   }
344 
345   /// Push a lazily-created cleanup on the stack.
346   template <class T, class A0, class A1, class A2>
347   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
348     void *Buffer = pushCleanup(Kind, sizeof(T));
349     Cleanup *Obj = new(Buffer) T(a0, a1, a2);
350     (void) Obj;
351   }
352 
353   /// Push a lazily-created cleanup on the stack.
354   template <class T, class A0, class A1, class A2, class A3>
355   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
356     void *Buffer = pushCleanup(Kind, sizeof(T));
357     Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
358     (void) Obj;
359   }
360 
361   /// Push a lazily-created cleanup on the stack.
362   template <class T, class A0, class A1, class A2, class A3, class A4>
363   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
364     void *Buffer = pushCleanup(Kind, sizeof(T));
365     Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
366     (void) Obj;
367   }
368 
369   // Feel free to add more variants of the following:
370 
371   /// Push a cleanup with non-constant storage requirements on the
372   /// stack.  The cleanup type must provide an additional static method:
373   ///   static size_t getExtraSize(size_t);
374   /// The argument to this method will be the value N, which will also
375   /// be passed as the first argument to the constructor.
376   ///
377   /// The data stored in the extra storage must obey the same
378   /// restrictions as normal cleanup member data.
379   ///
380   /// The pointer returned from this method is valid until the cleanup
381   /// stack is modified.
382   template <class T, class A0, class A1, class A2>
383   T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) {
384     void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
385     return new (Buffer) T(N, a0, a1, a2);
386   }
387 
388   /// Pops a cleanup scope off the stack.  This should only be called
389   /// by CodeGenFunction::PopCleanupBlock.
390   void popCleanup();
391 
392   /// Push a set of catch handlers on the stack.  The catch is
393   /// uninitialized and will need to have the given number of handlers
394   /// set on it.
395   class EHCatchScope *pushCatch(unsigned NumHandlers);
396 
397   /// Pops a catch scope off the stack.
398   void popCatch();
399 
400   /// Push an exceptions filter on the stack.
401   class EHFilterScope *pushFilter(unsigned NumFilters);
402 
403   /// Pops an exceptions filter off the stack.
404   void popFilter();
405 
406   /// Push a terminate handler on the stack.
407   void pushTerminate();
408 
409   /// Pops a terminate handler off the stack.
410   void popTerminate();
411 
412   /// Determines whether the exception-scopes stack is empty.
413   bool empty() const { return StartOfData == EndOfBuffer; }
414 
415   bool requiresLandingPad() const {
416     return (CatchDepth || hasEHCleanups());
417   }
418 
419   /// Determines whether there are any normal cleanups on the stack.
420   bool hasNormalCleanups() const {
421     return InnermostNormalCleanup != stable_end();
422   }
423 
424   /// Returns the innermost normal cleanup on the stack, or
425   /// stable_end() if there are no normal cleanups.
426   stable_iterator getInnermostNormalCleanup() const {
427     return InnermostNormalCleanup;
428   }
429   stable_iterator getInnermostActiveNormalCleanup() const; // CGException.h
430 
431   /// Determines whether there are any EH cleanups on the stack.
432   bool hasEHCleanups() const {
433     return InnermostEHCleanup != stable_end();
434   }
435 
436   /// Returns the innermost EH cleanup on the stack, or stable_end()
437   /// if there are no EH cleanups.
438   stable_iterator getInnermostEHCleanup() const {
439     return InnermostEHCleanup;
440   }
441   stable_iterator getInnermostActiveEHCleanup() const; // CGException.h
442 
443   /// An unstable reference to a scope-stack depth.  Invalidated by
444   /// pushes but not pops.
445   class iterator;
446 
447   /// Returns an iterator pointing to the innermost EH scope.
448   iterator begin() const;
449 
450   /// Returns an iterator pointing to the outermost EH scope.
451   iterator end() const;
452 
453   /// Create a stable reference to the top of the EH stack.  The
454   /// returned reference is valid until that scope is popped off the
455   /// stack.
456   stable_iterator stable_begin() const {
457     return stable_iterator(EndOfBuffer - StartOfData);
458   }
459 
460   /// Create a stable reference to the bottom of the EH stack.
461   static stable_iterator stable_end() {
462     return stable_iterator(0);
463   }
464 
465   /// Translates an iterator into a stable_iterator.
466   stable_iterator stabilize(iterator it) const;
467 
468   /// Finds the nearest cleanup enclosing the given iterator.
469   /// Returns stable_iterator::invalid() if there are no such cleanups.
470   stable_iterator getEnclosingEHCleanup(iterator it) const;
471 
472   /// Turn a stable reference to a scope depth into a unstable pointer
473   /// to the EH stack.
474   iterator find(stable_iterator save) const;
475 
476   /// Removes the cleanup pointed to by the given stable_iterator.
477   void removeCleanup(stable_iterator save);
478 
479   /// Add a branch fixup to the current cleanup scope.
480   BranchFixup &addBranchFixup() {
481     assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
482     BranchFixups.push_back(BranchFixup());
483     return BranchFixups.back();
484   }
485 
486   unsigned getNumBranchFixups() const { return BranchFixups.size(); }
487   BranchFixup &getBranchFixup(unsigned I) {
488     assert(I < getNumBranchFixups());
489     return BranchFixups[I];
490   }
491 
492   /// Pops lazily-removed fixups from the end of the list.  This
493   /// should only be called by procedures which have just popped a
494   /// cleanup or resolved one or more fixups.
495   void popNullFixups();
496 
497   /// Clears the branch-fixups list.  This should only be called by
498   /// ResolveAllBranchFixups.
499   void clearFixups() { BranchFixups.clear(); }
500 
501   /// Gets the next EH destination index.
502   unsigned getNextEHDestIndex() { return NextEHDestIndex++; }
503 };
504 
505 /// CodeGenFunction - This class organizes the per-function state that is used
506 /// while generating LLVM code.
507 class CodeGenFunction : public CodeGenTypeCache {
508   CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT
509   void operator=(const CodeGenFunction&);  // DO NOT IMPLEMENT
510 
511   friend class CGCXXABI;
512 public:
513   /// A jump destination is an abstract label, branching to which may
514   /// require a jump out through normal cleanups.
515   struct JumpDest {
516     JumpDest() : Block(0), ScopeDepth(), Index(0) {}
517     JumpDest(llvm::BasicBlock *Block,
518              EHScopeStack::stable_iterator Depth,
519              unsigned Index)
520       : Block(Block), ScopeDepth(Depth), Index(Index) {}
521 
522     bool isValid() const { return Block != 0; }
523     llvm::BasicBlock *getBlock() const { return Block; }
524     EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
525     unsigned getDestIndex() const { return Index; }
526 
527   private:
528     llvm::BasicBlock *Block;
529     EHScopeStack::stable_iterator ScopeDepth;
530     unsigned Index;
531   };
532 
533   /// An unwind destination is an abstract label, branching to which
534   /// may require a jump out through EH cleanups.
535   struct UnwindDest {
536     UnwindDest() : Block(0), ScopeDepth(), Index(0) {}
537     UnwindDest(llvm::BasicBlock *Block,
538                EHScopeStack::stable_iterator Depth,
539                unsigned Index)
540       : Block(Block), ScopeDepth(Depth), Index(Index) {}
541 
542     bool isValid() const { return Block != 0; }
543     llvm::BasicBlock *getBlock() const { return Block; }
544     EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
545     unsigned getDestIndex() const { return Index; }
546 
547   private:
548     llvm::BasicBlock *Block;
549     EHScopeStack::stable_iterator ScopeDepth;
550     unsigned Index;
551   };
552 
553   CodeGenModule &CGM;  // Per-module state.
554   const TargetInfo &Target;
555 
556   typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy;
557   CGBuilderTy Builder;
558 
559   /// CurFuncDecl - Holds the Decl for the current function or ObjC method.
560   /// This excludes BlockDecls.
561   const Decl *CurFuncDecl;
562   /// CurCodeDecl - This is the inner-most code context, which includes blocks.
563   const Decl *CurCodeDecl;
564   const CGFunctionInfo *CurFnInfo;
565   QualType FnRetTy;
566   llvm::Function *CurFn;
567 
568   /// CurGD - The GlobalDecl for the current function being compiled.
569   GlobalDecl CurGD;
570 
571   /// ReturnBlock - Unified return block.
572   JumpDest ReturnBlock;
573 
574   /// ReturnValue - The temporary alloca to hold the return value. This is null
575   /// iff the function has no return value.
576   llvm::Value *ReturnValue;
577 
578   /// RethrowBlock - Unified rethrow block.
579   UnwindDest RethrowBlock;
580 
581   /// AllocaInsertPoint - This is an instruction in the entry block before which
582   /// we prefer to insert allocas.
583   llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
584 
585   bool CatchUndefined;
586 
587   const CodeGen::CGBlockInfo *BlockInfo;
588   llvm::Value *BlockPointer;
589 
590   /// \brief A mapping from NRVO variables to the flags used to indicate
591   /// when the NRVO has been applied to this variable.
592   llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
593 
594   EHScopeStack EHStack;
595 
596   /// i32s containing the indexes of the cleanup destinations.
597   llvm::AllocaInst *NormalCleanupDest;
598   llvm::AllocaInst *EHCleanupDest;
599 
600   unsigned NextCleanupDestIndex;
601 
602   /// The exception slot.  All landing pads write the current
603   /// exception pointer into this alloca.
604   llvm::Value *ExceptionSlot;
605 
606   /// Emits a landing pad for the current EH stack.
607   llvm::BasicBlock *EmitLandingPad();
608 
609   llvm::BasicBlock *getInvokeDestImpl();
610 
611   /// Set up the last cleaup that was pushed as a conditional
612   /// full-expression cleanup.
613   void initFullExprCleanup();
614 
615   template <class T>
616   typename DominatingValue<T>::saved_type saveValueInCond(T value) {
617     return DominatingValue<T>::save(*this, value);
618   }
619 
620 public:
621   /// ObjCEHValueStack - Stack of Objective-C exception values, used for
622   /// rethrows.
623   llvm::SmallVector<llvm::Value*, 8> ObjCEHValueStack;
624 
625   // A struct holding information about a finally block's IR
626   // generation.  For now, doesn't actually hold anything.
627   struct FinallyInfo {
628   };
629 
630   FinallyInfo EnterFinallyBlock(const Stmt *Stmt,
631                                 llvm::Constant *BeginCatchFn,
632                                 llvm::Constant *EndCatchFn,
633                                 llvm::Constant *RethrowFn);
634   void ExitFinallyBlock(FinallyInfo &FinallyInfo);
635 
636   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
637   /// current full-expression.  Safe against the possibility that
638   /// we're currently inside a conditionally-evaluated expression.
639   template <class T, class A0>
640   void pushFullExprCleanup(CleanupKind kind, A0 a0) {
641     // If we're not in a conditional branch, or if none of the
642     // arguments requires saving, then use the unconditional cleanup.
643     if (!isInConditionalBranch()) {
644       typedef EHScopeStack::UnconditionalCleanup1<T, A0> CleanupType;
645       return EHStack.pushCleanup<CleanupType>(kind, a0);
646     }
647 
648     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
649 
650     typedef EHScopeStack::ConditionalCleanup1<T, A0> CleanupType;
651     EHStack.pushCleanup<CleanupType>(kind, a0_saved);
652     initFullExprCleanup();
653   }
654 
655   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
656   /// current full-expression.  Safe against the possibility that
657   /// we're currently inside a conditionally-evaluated expression.
658   template <class T, class A0, class A1>
659   void pushFullExprCleanup(CleanupKind kind, A0 a0, A1 a1) {
660     // If we're not in a conditional branch, or if none of the
661     // arguments requires saving, then use the unconditional cleanup.
662     if (!isInConditionalBranch()) {
663       typedef EHScopeStack::UnconditionalCleanup2<T, A0, A1> CleanupType;
664       return EHStack.pushCleanup<CleanupType>(kind, a0, a1);
665     }
666 
667     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
668     typename DominatingValue<A1>::saved_type a1_saved = saveValueInCond(a1);
669 
670     typedef EHScopeStack::ConditionalCleanup2<T, A0, A1> CleanupType;
671     EHStack.pushCleanup<CleanupType>(kind, a0_saved, a1_saved);
672     initFullExprCleanup();
673   }
674 
675   /// PushDestructorCleanup - Push a cleanup to call the
676   /// complete-object destructor of an object of the given type at the
677   /// given address.  Does nothing if T is not a C++ class type with a
678   /// non-trivial destructor.
679   void PushDestructorCleanup(QualType T, llvm::Value *Addr);
680 
681   /// PushDestructorCleanup - Push a cleanup to call the
682   /// complete-object variant of the given destructor on the object at
683   /// the given address.
684   void PushDestructorCleanup(const CXXDestructorDecl *Dtor,
685                              llvm::Value *Addr);
686 
687   /// PopCleanupBlock - Will pop the cleanup entry on the stack and
688   /// process all branch fixups.
689   void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
690 
691   /// DeactivateCleanupBlock - Deactivates the given cleanup block.
692   /// The block cannot be reactivated.  Pops it if it's the top of the
693   /// stack.
694   void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
695 
696   /// ActivateCleanupBlock - Activates an initially-inactive cleanup.
697   /// Cannot be used to resurrect a deactivated cleanup.
698   void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
699 
700   /// \brief Enters a new scope for capturing cleanups, all of which
701   /// will be executed once the scope is exited.
702   class RunCleanupsScope {
703     CodeGenFunction& CGF;
704     EHScopeStack::stable_iterator CleanupStackDepth;
705     bool OldDidCallStackSave;
706     bool PerformCleanup;
707 
708     RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT
709     RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT
710 
711   public:
712     /// \brief Enter a new cleanup scope.
713     explicit RunCleanupsScope(CodeGenFunction &CGF)
714       : CGF(CGF), PerformCleanup(true)
715     {
716       CleanupStackDepth = CGF.EHStack.stable_begin();
717       OldDidCallStackSave = CGF.DidCallStackSave;
718       CGF.DidCallStackSave = false;
719     }
720 
721     /// \brief Exit this cleanup scope, emitting any accumulated
722     /// cleanups.
723     ~RunCleanupsScope() {
724       if (PerformCleanup) {
725         CGF.DidCallStackSave = OldDidCallStackSave;
726         CGF.PopCleanupBlocks(CleanupStackDepth);
727       }
728     }
729 
730     /// \brief Determine whether this scope requires any cleanups.
731     bool requiresCleanups() const {
732       return CGF.EHStack.stable_begin() != CleanupStackDepth;
733     }
734 
735     /// \brief Force the emission of cleanups now, instead of waiting
736     /// until this object is destroyed.
737     void ForceCleanup() {
738       assert(PerformCleanup && "Already forced cleanup");
739       CGF.DidCallStackSave = OldDidCallStackSave;
740       CGF.PopCleanupBlocks(CleanupStackDepth);
741       PerformCleanup = false;
742     }
743   };
744 
745 
746   /// PopCleanupBlocks - Takes the old cleanup stack size and emits
747   /// the cleanup blocks that have been added.
748   void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
749 
750   void ResolveBranchFixups(llvm::BasicBlock *Target);
751 
752   /// The given basic block lies in the current EH scope, but may be a
753   /// target of a potentially scope-crossing jump; get a stable handle
754   /// to which we can perform this jump later.
755   JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) {
756     return JumpDest(Target,
757                     EHStack.getInnermostNormalCleanup(),
758                     NextCleanupDestIndex++);
759   }
760 
761   /// The given basic block lies in the current EH scope, but may be a
762   /// target of a potentially scope-crossing jump; get a stable handle
763   /// to which we can perform this jump later.
764   JumpDest getJumpDestInCurrentScope(llvm::StringRef Name = llvm::StringRef()) {
765     return getJumpDestInCurrentScope(createBasicBlock(Name));
766   }
767 
768   /// EmitBranchThroughCleanup - Emit a branch from the current insert
769   /// block through the normal cleanup handling code (if any) and then
770   /// on to \arg Dest.
771   void EmitBranchThroughCleanup(JumpDest Dest);
772 
773   /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
774   /// specified destination obviously has no cleanups to run.  'false' is always
775   /// a conservatively correct answer for this method.
776   bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const;
777 
778   /// EmitBranchThroughEHCleanup - Emit a branch from the current
779   /// insert block through the EH cleanup handling code (if any) and
780   /// then on to \arg Dest.
781   void EmitBranchThroughEHCleanup(UnwindDest Dest);
782 
783   /// getRethrowDest - Returns the unified outermost-scope rethrow
784   /// destination.
785   UnwindDest getRethrowDest();
786 
787   /// An object to manage conditionally-evaluated expressions.
788   class ConditionalEvaluation {
789     llvm::BasicBlock *StartBB;
790 
791   public:
792     ConditionalEvaluation(CodeGenFunction &CGF)
793       : StartBB(CGF.Builder.GetInsertBlock()) {}
794 
795     void begin(CodeGenFunction &CGF) {
796       assert(CGF.OutermostConditional != this);
797       if (!CGF.OutermostConditional)
798         CGF.OutermostConditional = this;
799     }
800 
801     void end(CodeGenFunction &CGF) {
802       assert(CGF.OutermostConditional != 0);
803       if (CGF.OutermostConditional == this)
804         CGF.OutermostConditional = 0;
805     }
806 
807     /// Returns a block which will be executed prior to each
808     /// evaluation of the conditional code.
809     llvm::BasicBlock *getStartingBlock() const {
810       return StartBB;
811     }
812   };
813 
814   /// isInConditionalBranch - Return true if we're currently emitting
815   /// one branch or the other of a conditional expression.
816   bool isInConditionalBranch() const { return OutermostConditional != 0; }
817 
818   /// An RAII object to record that we're evaluating a statement
819   /// expression.
820   class StmtExprEvaluation {
821     CodeGenFunction &CGF;
822 
823     /// We have to save the outermost conditional: cleanups in a
824     /// statement expression aren't conditional just because the
825     /// StmtExpr is.
826     ConditionalEvaluation *SavedOutermostConditional;
827 
828   public:
829     StmtExprEvaluation(CodeGenFunction &CGF)
830       : CGF(CGF), SavedOutermostConditional(CGF.OutermostConditional) {
831       CGF.OutermostConditional = 0;
832     }
833 
834     ~StmtExprEvaluation() {
835       CGF.OutermostConditional = SavedOutermostConditional;
836       CGF.EnsureInsertPoint();
837     }
838   };
839 
840   /// An object which temporarily prevents a value from being
841   /// destroyed by aggressive peephole optimizations that assume that
842   /// all uses of a value have been realized in the IR.
843   class PeepholeProtection {
844     llvm::Instruction *Inst;
845     friend class CodeGenFunction;
846 
847   public:
848     PeepholeProtection() : Inst(0) {}
849   };
850 
851   /// An RAII object to set (and then clear) a mapping for an OpaqueValueExpr.
852   class OpaqueValueMapping {
853     CodeGenFunction &CGF;
854     const OpaqueValueExpr *OpaqueValue;
855     bool BoundLValue;
856     CodeGenFunction::PeepholeProtection Protection;
857 
858   public:
859     static bool shouldBindAsLValue(const Expr *expr) {
860       return expr->isGLValue() || expr->getType()->isRecordType();
861     }
862 
863     /// Build the opaque value mapping for the given conditional
864     /// operator if it's the GNU ?: extension.  This is a common
865     /// enough pattern that the convenience operator is really
866     /// helpful.
867     ///
868     OpaqueValueMapping(CodeGenFunction &CGF,
869                        const AbstractConditionalOperator *op) : CGF(CGF) {
870       if (isa<ConditionalOperator>(op)) {
871         OpaqueValue = 0;
872         BoundLValue = false;
873         return;
874       }
875 
876       const BinaryConditionalOperator *e = cast<BinaryConditionalOperator>(op);
877       init(e->getOpaqueValue(), e->getCommon());
878     }
879 
880     OpaqueValueMapping(CodeGenFunction &CGF,
881                        const OpaqueValueExpr *opaqueValue,
882                        LValue lvalue)
883       : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(true) {
884       assert(opaqueValue && "no opaque value expression!");
885       assert(shouldBindAsLValue(opaqueValue));
886       initLValue(lvalue);
887     }
888 
889     OpaqueValueMapping(CodeGenFunction &CGF,
890                        const OpaqueValueExpr *opaqueValue,
891                        RValue rvalue)
892       : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(false) {
893       assert(opaqueValue && "no opaque value expression!");
894       assert(!shouldBindAsLValue(opaqueValue));
895       initRValue(rvalue);
896     }
897 
898     void pop() {
899       assert(OpaqueValue && "mapping already popped!");
900       popImpl();
901       OpaqueValue = 0;
902     }
903 
904     ~OpaqueValueMapping() {
905       if (OpaqueValue) popImpl();
906     }
907 
908   private:
909     void popImpl() {
910       if (BoundLValue)
911         CGF.OpaqueLValues.erase(OpaqueValue);
912       else {
913         CGF.OpaqueRValues.erase(OpaqueValue);
914         CGF.unprotectFromPeepholes(Protection);
915       }
916     }
917 
918     void init(const OpaqueValueExpr *ov, const Expr *e) {
919       OpaqueValue = ov;
920       BoundLValue = shouldBindAsLValue(ov);
921       assert(BoundLValue == shouldBindAsLValue(e)
922              && "inconsistent expression value kinds!");
923       if (BoundLValue)
924         initLValue(CGF.EmitLValue(e));
925       else
926         initRValue(CGF.EmitAnyExpr(e));
927     }
928 
929     void initLValue(const LValue &lv) {
930       CGF.OpaqueLValues.insert(std::make_pair(OpaqueValue, lv));
931     }
932 
933     void initRValue(const RValue &rv) {
934       // Work around an extremely aggressive peephole optimization in
935       // EmitScalarConversion which assumes that all other uses of a
936       // value are extant.
937       Protection = CGF.protectFromPeepholes(rv);
938       CGF.OpaqueRValues.insert(std::make_pair(OpaqueValue, rv));
939     }
940   };
941 
942   /// getByrefValueFieldNumber - Given a declaration, returns the LLVM field
943   /// number that holds the value.
944   unsigned getByRefValueLLVMField(const ValueDecl *VD) const;
945 
946   /// BuildBlockByrefAddress - Computes address location of the
947   /// variable which is declared as __block.
948   llvm::Value *BuildBlockByrefAddress(llvm::Value *BaseAddr,
949                                       const VarDecl *V);
950 private:
951   CGDebugInfo *DebugInfo;
952   bool DisableDebugInfo;
953 
954   /// IndirectBranch - The first time an indirect goto is seen we create a block
955   /// with an indirect branch.  Every time we see the address of a label taken,
956   /// we add the label to the indirect goto.  Every subsequent indirect goto is
957   /// codegen'd as a jump to the IndirectBranch's basic block.
958   llvm::IndirectBrInst *IndirectBranch;
959 
960   /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C
961   /// decls.
962   typedef llvm::DenseMap<const Decl*, llvm::Value*> DeclMapTy;
963   DeclMapTy LocalDeclMap;
964 
965   /// LabelMap - This keeps track of the LLVM basic block for each C label.
966   llvm::DenseMap<const LabelDecl*, JumpDest> LabelMap;
967 
968   // BreakContinueStack - This keeps track of where break and continue
969   // statements should jump to.
970   struct BreakContinue {
971     BreakContinue(JumpDest Break, JumpDest Continue)
972       : BreakBlock(Break), ContinueBlock(Continue) {}
973 
974     JumpDest BreakBlock;
975     JumpDest ContinueBlock;
976   };
977   llvm::SmallVector<BreakContinue, 8> BreakContinueStack;
978 
979   /// SwitchInsn - This is nearest current switch instruction. It is null if if
980   /// current context is not in a switch.
981   llvm::SwitchInst *SwitchInsn;
982 
983   /// CaseRangeBlock - This block holds if condition check for last case
984   /// statement range in current switch instruction.
985   llvm::BasicBlock *CaseRangeBlock;
986 
987   /// OpaqueLValues - Keeps track of the current set of opaque value
988   /// expressions.
989   llvm::DenseMap<const OpaqueValueExpr *, LValue> OpaqueLValues;
990   llvm::DenseMap<const OpaqueValueExpr *, RValue> OpaqueRValues;
991 
992   // VLASizeMap - This keeps track of the associated size for each VLA type.
993   // We track this by the size expression rather than the type itself because
994   // in certain situations, like a const qualifier applied to an VLA typedef,
995   // multiple VLA types can share the same size expression.
996   // FIXME: Maybe this could be a stack of maps that is pushed/popped as we
997   // enter/leave scopes.
998   llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap;
999 
1000   /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid
1001   /// calling llvm.stacksave for multiple VLAs in the same scope.
1002   bool DidCallStackSave;
1003 
1004   /// A block containing a single 'unreachable' instruction.  Created
1005   /// lazily by getUnreachableBlock().
1006   llvm::BasicBlock *UnreachableBlock;
1007 
1008   /// CXXThisDecl - When generating code for a C++ member function,
1009   /// this will hold the implicit 'this' declaration.
1010   ImplicitParamDecl *CXXThisDecl;
1011   llvm::Value *CXXThisValue;
1012 
1013   /// CXXVTTDecl - When generating code for a base object constructor or
1014   /// base object destructor with virtual bases, this will hold the implicit
1015   /// VTT parameter.
1016   ImplicitParamDecl *CXXVTTDecl;
1017   llvm::Value *CXXVTTValue;
1018 
1019   /// OutermostConditional - Points to the outermost active
1020   /// conditional control.  This is used so that we know if a
1021   /// temporary should be destroyed conditionally.
1022   ConditionalEvaluation *OutermostConditional;
1023 
1024 
1025   /// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM
1026   /// type as well as the field number that contains the actual data.
1027   llvm::DenseMap<const ValueDecl *, std::pair<const llvm::Type *,
1028                                               unsigned> > ByRefValueInfo;
1029 
1030   llvm::BasicBlock *TerminateLandingPad;
1031   llvm::BasicBlock *TerminateHandler;
1032   llvm::BasicBlock *TrapBB;
1033 
1034 public:
1035   CodeGenFunction(CodeGenModule &cgm);
1036 
1037   CodeGenTypes &getTypes() const { return CGM.getTypes(); }
1038   ASTContext &getContext() const;
1039   CGDebugInfo *getDebugInfo() {
1040     if (DisableDebugInfo)
1041       return NULL;
1042     return DebugInfo;
1043   }
1044   void disableDebugInfo() { DisableDebugInfo = true; }
1045   void enableDebugInfo() { DisableDebugInfo = false; }
1046 
1047 
1048   const LangOptions &getLangOptions() const { return CGM.getLangOptions(); }
1049 
1050   /// Returns a pointer to the function's exception object slot, which
1051   /// is assigned in every landing pad.
1052   llvm::Value *getExceptionSlot();
1053 
1054   llvm::Value *getNormalCleanupDestSlot();
1055   llvm::Value *getEHCleanupDestSlot();
1056 
1057   llvm::BasicBlock *getUnreachableBlock() {
1058     if (!UnreachableBlock) {
1059       UnreachableBlock = createBasicBlock("unreachable");
1060       new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock);
1061     }
1062     return UnreachableBlock;
1063   }
1064 
1065   llvm::BasicBlock *getInvokeDest() {
1066     if (!EHStack.requiresLandingPad()) return 0;
1067     return getInvokeDestImpl();
1068   }
1069 
1070   llvm::LLVMContext &getLLVMContext() { return CGM.getLLVMContext(); }
1071 
1072   //===--------------------------------------------------------------------===//
1073   //                                  Objective-C
1074   //===--------------------------------------------------------------------===//
1075 
1076   void GenerateObjCMethod(const ObjCMethodDecl *OMD);
1077 
1078   void StartObjCMethod(const ObjCMethodDecl *MD,
1079                        const ObjCContainerDecl *CD);
1080 
1081   /// GenerateObjCGetter - Synthesize an Objective-C property getter function.
1082   void GenerateObjCGetter(ObjCImplementationDecl *IMP,
1083                           const ObjCPropertyImplDecl *PID);
1084   void GenerateObjCGetterBody(ObjCIvarDecl *Ivar, bool IsAtomic, bool IsStrong);
1085   void GenerateObjCAtomicSetterBody(ObjCMethodDecl *OMD,
1086                                     ObjCIvarDecl *Ivar);
1087 
1088   void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP,
1089                                   ObjCMethodDecl *MD, bool ctor);
1090 
1091   /// GenerateObjCSetter - Synthesize an Objective-C property setter function
1092   /// for the given property.
1093   void GenerateObjCSetter(ObjCImplementationDecl *IMP,
1094                           const ObjCPropertyImplDecl *PID);
1095   bool IndirectObjCSetterArg(const CGFunctionInfo &FI);
1096   bool IvarTypeWithAggrGCObjects(QualType Ty);
1097 
1098   //===--------------------------------------------------------------------===//
1099   //                                  Block Bits
1100   //===--------------------------------------------------------------------===//
1101 
1102   llvm::Value *EmitBlockLiteral(const BlockExpr *);
1103   llvm::Constant *BuildDescriptorBlockDecl(const BlockExpr *,
1104                                            const CGBlockInfo &Info,
1105                                            const llvm::StructType *,
1106                                            llvm::Constant *BlockVarLayout);
1107 
1108   llvm::Function *GenerateBlockFunction(GlobalDecl GD,
1109                                         const CGBlockInfo &Info,
1110                                         const Decl *OuterFuncDecl,
1111                                         const DeclMapTy &ldm);
1112 
1113   llvm::Constant *GenerateCopyHelperFunction(const CGBlockInfo &blockInfo);
1114   llvm::Constant *GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo);
1115 
1116   void BuildBlockRelease(llvm::Value *DeclPtr, BlockFieldFlags flags);
1117 
1118   class AutoVarEmission;
1119 
1120   void emitByrefStructureInit(const AutoVarEmission &emission);
1121   void enterByrefCleanup(const AutoVarEmission &emission);
1122 
1123   llvm::Value *LoadBlockStruct() {
1124     assert(BlockPointer && "no block pointer set!");
1125     return BlockPointer;
1126   }
1127 
1128   void AllocateBlockCXXThisPointer(const CXXThisExpr *E);
1129   void AllocateBlockDecl(const BlockDeclRefExpr *E);
1130   llvm::Value *GetAddrOfBlockDecl(const BlockDeclRefExpr *E) {
1131     return GetAddrOfBlockDecl(E->getDecl(), E->isByRef());
1132   }
1133   llvm::Value *GetAddrOfBlockDecl(const VarDecl *var, bool ByRef);
1134   const llvm::Type *BuildByRefType(const VarDecl *var);
1135 
1136   void GenerateCode(GlobalDecl GD, llvm::Function *Fn,
1137                     const CGFunctionInfo &FnInfo);
1138   void StartFunction(GlobalDecl GD, QualType RetTy,
1139                      llvm::Function *Fn,
1140                      const CGFunctionInfo &FnInfo,
1141                      const FunctionArgList &Args,
1142                      SourceLocation StartLoc);
1143 
1144   void EmitConstructorBody(FunctionArgList &Args);
1145   void EmitDestructorBody(FunctionArgList &Args);
1146   void EmitFunctionBody(FunctionArgList &Args);
1147 
1148   /// EmitReturnBlock - Emit the unified return block, trying to avoid its
1149   /// emission when possible.
1150   void EmitReturnBlock();
1151 
1152   /// FinishFunction - Complete IR generation of the current function. It is
1153   /// legal to call this function even if there is no current insertion point.
1154   void FinishFunction(SourceLocation EndLoc=SourceLocation());
1155 
1156   /// GenerateThunk - Generate a thunk for the given method.
1157   void GenerateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
1158                      GlobalDecl GD, const ThunkInfo &Thunk);
1159 
1160   void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type,
1161                         FunctionArgList &Args);
1162 
1163   /// InitializeVTablePointer - Initialize the vtable pointer of the given
1164   /// subobject.
1165   ///
1166   void InitializeVTablePointer(BaseSubobject Base,
1167                                const CXXRecordDecl *NearestVBase,
1168                                CharUnits OffsetFromNearestVBase,
1169                                llvm::Constant *VTable,
1170                                const CXXRecordDecl *VTableClass);
1171 
1172   typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy;
1173   void InitializeVTablePointers(BaseSubobject Base,
1174                                 const CXXRecordDecl *NearestVBase,
1175                                 CharUnits OffsetFromNearestVBase,
1176                                 bool BaseIsNonVirtualPrimaryBase,
1177                                 llvm::Constant *VTable,
1178                                 const CXXRecordDecl *VTableClass,
1179                                 VisitedVirtualBasesSetTy& VBases);
1180 
1181   void InitializeVTablePointers(const CXXRecordDecl *ClassDecl);
1182 
1183   /// GetVTablePtr - Return the Value of the vtable pointer member pointed
1184   /// to by This.
1185   llvm::Value *GetVTablePtr(llvm::Value *This, const llvm::Type *Ty);
1186 
1187   /// EnterDtorCleanups - Enter the cleanups necessary to complete the
1188   /// given phase of destruction for a destructor.  The end result
1189   /// should call destructors on members and base classes in reverse
1190   /// order of their construction.
1191   void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type);
1192 
1193   /// ShouldInstrumentFunction - Return true if the current function should be
1194   /// instrumented with __cyg_profile_func_* calls
1195   bool ShouldInstrumentFunction();
1196 
1197   /// EmitFunctionInstrumentation - Emit LLVM code to call the specified
1198   /// instrumentation function with the current function and the call site, if
1199   /// function instrumentation is enabled.
1200   void EmitFunctionInstrumentation(const char *Fn);
1201 
1202   /// EmitMCountInstrumentation - Emit call to .mcount.
1203   void EmitMCountInstrumentation();
1204 
1205   /// EmitFunctionProlog - Emit the target specific LLVM code to load the
1206   /// arguments for the given function. This is also responsible for naming the
1207   /// LLVM function arguments.
1208   void EmitFunctionProlog(const CGFunctionInfo &FI,
1209                           llvm::Function *Fn,
1210                           const FunctionArgList &Args);
1211 
1212   /// EmitFunctionEpilog - Emit the target specific LLVM code to return the
1213   /// given temporary.
1214   void EmitFunctionEpilog(const CGFunctionInfo &FI);
1215 
1216   /// EmitStartEHSpec - Emit the start of the exception spec.
1217   void EmitStartEHSpec(const Decl *D);
1218 
1219   /// EmitEndEHSpec - Emit the end of the exception spec.
1220   void EmitEndEHSpec(const Decl *D);
1221 
1222   /// getTerminateLandingPad - Return a landing pad that just calls terminate.
1223   llvm::BasicBlock *getTerminateLandingPad();
1224 
1225   /// getTerminateHandler - Return a handler (not a landing pad, just
1226   /// a catch handler) that just calls terminate.  This is used when
1227   /// a terminate scope encloses a try.
1228   llvm::BasicBlock *getTerminateHandler();
1229 
1230   const llvm::Type *ConvertTypeForMem(QualType T);
1231   const llvm::Type *ConvertType(QualType T);
1232   const llvm::Type *ConvertType(const TypeDecl *T) {
1233     return ConvertType(getContext().getTypeDeclType(T));
1234   }
1235 
1236   /// LoadObjCSelf - Load the value of self. This function is only valid while
1237   /// generating code for an Objective-C method.
1238   llvm::Value *LoadObjCSelf();
1239 
1240   /// TypeOfSelfObject - Return type of object that this self represents.
1241   QualType TypeOfSelfObject();
1242 
1243   /// hasAggregateLLVMType - Return true if the specified AST type will map into
1244   /// an aggregate LLVM type or is void.
1245   static bool hasAggregateLLVMType(QualType T);
1246 
1247   /// createBasicBlock - Create an LLVM basic block.
1248   llvm::BasicBlock *createBasicBlock(llvm::StringRef name = "",
1249                                      llvm::Function *parent = 0,
1250                                      llvm::BasicBlock *before = 0) {
1251 #ifdef NDEBUG
1252     return llvm::BasicBlock::Create(getLLVMContext(), "", parent, before);
1253 #else
1254     return llvm::BasicBlock::Create(getLLVMContext(), name, parent, before);
1255 #endif
1256   }
1257 
1258   /// getBasicBlockForLabel - Return the LLVM basicblock that the specified
1259   /// label maps to.
1260   JumpDest getJumpDestForLabel(const LabelDecl *S);
1261 
1262   /// SimplifyForwardingBlocks - If the given basic block is only a branch to
1263   /// another basic block, simplify it. This assumes that no other code could
1264   /// potentially reference the basic block.
1265   void SimplifyForwardingBlocks(llvm::BasicBlock *BB);
1266 
1267   /// EmitBlock - Emit the given block \arg BB and set it as the insert point,
1268   /// adding a fall-through branch from the current insert block if
1269   /// necessary. It is legal to call this function even if there is no current
1270   /// insertion point.
1271   ///
1272   /// IsFinished - If true, indicates that the caller has finished emitting
1273   /// branches to the given block and does not expect to emit code into it. This
1274   /// means the block can be ignored if it is unreachable.
1275   void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false);
1276 
1277   /// EmitBranch - Emit a branch to the specified basic block from the current
1278   /// insert block, taking care to avoid creation of branches from dummy
1279   /// blocks. It is legal to call this function even if there is no current
1280   /// insertion point.
1281   ///
1282   /// This function clears the current insertion point. The caller should follow
1283   /// calls to this function with calls to Emit*Block prior to generation new
1284   /// code.
1285   void EmitBranch(llvm::BasicBlock *Block);
1286 
1287   /// HaveInsertPoint - True if an insertion point is defined. If not, this
1288   /// indicates that the current code being emitted is unreachable.
1289   bool HaveInsertPoint() const {
1290     return Builder.GetInsertBlock() != 0;
1291   }
1292 
1293   /// EnsureInsertPoint - Ensure that an insertion point is defined so that
1294   /// emitted IR has a place to go. Note that by definition, if this function
1295   /// creates a block then that block is unreachable; callers may do better to
1296   /// detect when no insertion point is defined and simply skip IR generation.
1297   void EnsureInsertPoint() {
1298     if (!HaveInsertPoint())
1299       EmitBlock(createBasicBlock());
1300   }
1301 
1302   /// ErrorUnsupported - Print out an error that codegen doesn't support the
1303   /// specified stmt yet.
1304   void ErrorUnsupported(const Stmt *S, const char *Type,
1305                         bool OmitOnError=false);
1306 
1307   //===--------------------------------------------------------------------===//
1308   //                                  Helpers
1309   //===--------------------------------------------------------------------===//
1310 
1311   LValue MakeAddrLValue(llvm::Value *V, QualType T, unsigned Alignment = 0) {
1312     return LValue::MakeAddr(V, T, Alignment, getContext(),
1313                             CGM.getTBAAInfo(T));
1314   }
1315 
1316   /// CreateTempAlloca - This creates a alloca and inserts it into the entry
1317   /// block. The caller is responsible for setting an appropriate alignment on
1318   /// the alloca.
1319   llvm::AllocaInst *CreateTempAlloca(const llvm::Type *Ty,
1320                                      const llvm::Twine &Name = "tmp");
1321 
1322   /// InitTempAlloca - Provide an initial value for the given alloca.
1323   void InitTempAlloca(llvm::AllocaInst *Alloca, llvm::Value *Value);
1324 
1325   /// CreateIRTemp - Create a temporary IR object of the given type, with
1326   /// appropriate alignment. This routine should only be used when an temporary
1327   /// value needs to be stored into an alloca (for example, to avoid explicit
1328   /// PHI construction), but the type is the IR type, not the type appropriate
1329   /// for storing in memory.
1330   llvm::AllocaInst *CreateIRTemp(QualType T, const llvm::Twine &Name = "tmp");
1331 
1332   /// CreateMemTemp - Create a temporary memory object of the given type, with
1333   /// appropriate alignment.
1334   llvm::AllocaInst *CreateMemTemp(QualType T, const llvm::Twine &Name = "tmp");
1335 
1336   /// CreateAggTemp - Create a temporary memory object for the given
1337   /// aggregate type.
1338   AggValueSlot CreateAggTemp(QualType T, const llvm::Twine &Name = "tmp") {
1339     return AggValueSlot::forAddr(CreateMemTemp(T, Name), false, false);
1340   }
1341 
1342   /// Emit a cast to void* in the appropriate address space.
1343   llvm::Value *EmitCastToVoidPtr(llvm::Value *value);
1344 
1345   /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
1346   /// expression and compare the result against zero, returning an Int1Ty value.
1347   llvm::Value *EvaluateExprAsBool(const Expr *E);
1348 
1349   /// EmitIgnoredExpr - Emit an expression in a context which ignores the result.
1350   void EmitIgnoredExpr(const Expr *E);
1351 
1352   /// EmitAnyExpr - Emit code to compute the specified expression which can have
1353   /// any type.  The result is returned as an RValue struct.  If this is an
1354   /// aggregate expression, the aggloc/agglocvolatile arguments indicate where
1355   /// the result should be returned.
1356   ///
1357   /// \param IgnoreResult - True if the resulting value isn't used.
1358   RValue EmitAnyExpr(const Expr *E,
1359                      AggValueSlot AggSlot = AggValueSlot::ignored(),
1360                      bool IgnoreResult = false);
1361 
1362   // EmitVAListRef - Emit a "reference" to a va_list; this is either the address
1363   // or the value of the expression, depending on how va_list is defined.
1364   llvm::Value *EmitVAListRef(const Expr *E);
1365 
1366   /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will
1367   /// always be accessible even if no aggregate location is provided.
1368   RValue EmitAnyExprToTemp(const Expr *E);
1369 
1370   /// EmitAnyExprToMem - Emits the code necessary to evaluate an
1371   /// arbitrary expression into the given memory location.
1372   void EmitAnyExprToMem(const Expr *E, llvm::Value *Location,
1373                         bool IsLocationVolatile,
1374                         bool IsInitializer);
1375 
1376   /// EmitExprAsInit - Emits the code necessary to initialize a
1377   /// location in memory with the given initializer.
1378   void EmitExprAsInit(const Expr *init, const VarDecl *var,
1379                       llvm::Value *loc, CharUnits alignment,
1380                       bool capturedByInit);
1381 
1382   /// EmitAggregateCopy - Emit an aggrate copy.
1383   ///
1384   /// \param isVolatile - True iff either the source or the destination is
1385   /// volatile.
1386   void EmitAggregateCopy(llvm::Value *DestPtr, llvm::Value *SrcPtr,
1387                          QualType EltTy, bool isVolatile=false);
1388 
1389   /// StartBlock - Start new block named N. If insert block is a dummy block
1390   /// then reuse it.
1391   void StartBlock(const char *N);
1392 
1393   /// GetAddrOfStaticLocalVar - Return the address of a static local variable.
1394   llvm::Constant *GetAddrOfStaticLocalVar(const VarDecl *BVD) {
1395     return cast<llvm::Constant>(GetAddrOfLocalVar(BVD));
1396   }
1397 
1398   /// GetAddrOfLocalVar - Return the address of a local variable.
1399   llvm::Value *GetAddrOfLocalVar(const VarDecl *VD) {
1400     llvm::Value *Res = LocalDeclMap[VD];
1401     assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!");
1402     return Res;
1403   }
1404 
1405   /// getOpaqueLValueMapping - Given an opaque value expression (which
1406   /// must be mapped to an l-value), return its mapping.
1407   const LValue &getOpaqueLValueMapping(const OpaqueValueExpr *e) {
1408     assert(OpaqueValueMapping::shouldBindAsLValue(e));
1409 
1410     llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator
1411       it = OpaqueLValues.find(e);
1412     assert(it != OpaqueLValues.end() && "no mapping for opaque value!");
1413     return it->second;
1414   }
1415 
1416   /// getOpaqueRValueMapping - Given an opaque value expression (which
1417   /// must be mapped to an r-value), return its mapping.
1418   const RValue &getOpaqueRValueMapping(const OpaqueValueExpr *e) {
1419     assert(!OpaqueValueMapping::shouldBindAsLValue(e));
1420 
1421     llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator
1422       it = OpaqueRValues.find(e);
1423     assert(it != OpaqueRValues.end() && "no mapping for opaque value!");
1424     return it->second;
1425   }
1426 
1427   /// getAccessedFieldNo - Given an encoded value and a result number, return
1428   /// the input field number being accessed.
1429   static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts);
1430 
1431   llvm::BlockAddress *GetAddrOfLabel(const LabelDecl *L);
1432   llvm::BasicBlock *GetIndirectGotoBlock();
1433 
1434   /// EmitNullInitialization - Generate code to set a value of the given type to
1435   /// null, If the type contains data member pointers, they will be initialized
1436   /// to -1 in accordance with the Itanium C++ ABI.
1437   void EmitNullInitialization(llvm::Value *DestPtr, QualType Ty);
1438 
1439   // EmitVAArg - Generate code to get an argument from the passed in pointer
1440   // and update it accordingly. The return value is a pointer to the argument.
1441   // FIXME: We should be able to get rid of this method and use the va_arg
1442   // instruction in LLVM instead once it works well enough.
1443   llvm::Value *EmitVAArg(llvm::Value *VAListAddr, QualType Ty);
1444 
1445   /// EmitVLASize - Generate code for any VLA size expressions that might occur
1446   /// in a variably modified type. If Ty is a VLA, will return the value that
1447   /// corresponds to the size in bytes of the VLA type. Will return 0 otherwise.
1448   ///
1449   /// This function can be called with a null (unreachable) insert point.
1450   llvm::Value *EmitVLASize(QualType Ty);
1451 
1452   // GetVLASize - Returns an LLVM value that corresponds to the size in bytes
1453   // of a variable length array type.
1454   llvm::Value *GetVLASize(const VariableArrayType *);
1455 
1456   /// LoadCXXThis - Load the value of 'this'. This function is only valid while
1457   /// generating code for an C++ member function.
1458   llvm::Value *LoadCXXThis() {
1459     assert(CXXThisValue && "no 'this' value for this function");
1460     return CXXThisValue;
1461   }
1462 
1463   /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have
1464   /// virtual bases.
1465   llvm::Value *LoadCXXVTT() {
1466     assert(CXXVTTValue && "no VTT value for this function");
1467     return CXXVTTValue;
1468   }
1469 
1470   /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a
1471   /// complete class to the given direct base.
1472   llvm::Value *
1473   GetAddressOfDirectBaseInCompleteClass(llvm::Value *Value,
1474                                         const CXXRecordDecl *Derived,
1475                                         const CXXRecordDecl *Base,
1476                                         bool BaseIsVirtual);
1477 
1478   /// GetAddressOfBaseClass - This function will add the necessary delta to the
1479   /// load of 'this' and returns address of the base class.
1480   llvm::Value *GetAddressOfBaseClass(llvm::Value *Value,
1481                                      const CXXRecordDecl *Derived,
1482                                      CastExpr::path_const_iterator PathBegin,
1483                                      CastExpr::path_const_iterator PathEnd,
1484                                      bool NullCheckValue);
1485 
1486   llvm::Value *GetAddressOfDerivedClass(llvm::Value *Value,
1487                                         const CXXRecordDecl *Derived,
1488                                         CastExpr::path_const_iterator PathBegin,
1489                                         CastExpr::path_const_iterator PathEnd,
1490                                         bool NullCheckValue);
1491 
1492   llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This,
1493                                          const CXXRecordDecl *ClassDecl,
1494                                          const CXXRecordDecl *BaseClassDecl);
1495 
1496   void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1497                                       CXXCtorType CtorType,
1498                                       const FunctionArgList &Args);
1499   void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type,
1500                               bool ForVirtualBase, llvm::Value *This,
1501                               CallExpr::const_arg_iterator ArgBeg,
1502                               CallExpr::const_arg_iterator ArgEnd);
1503 
1504   void EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
1505                               llvm::Value *This, llvm::Value *Src,
1506                               CallExpr::const_arg_iterator ArgBeg,
1507                               CallExpr::const_arg_iterator ArgEnd);
1508 
1509   void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
1510                                   const ConstantArrayType *ArrayTy,
1511                                   llvm::Value *ArrayPtr,
1512                                   CallExpr::const_arg_iterator ArgBeg,
1513                                   CallExpr::const_arg_iterator ArgEnd,
1514                                   bool ZeroInitialization = false);
1515 
1516   void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
1517                                   llvm::Value *NumElements,
1518                                   llvm::Value *ArrayPtr,
1519                                   CallExpr::const_arg_iterator ArgBeg,
1520                                   CallExpr::const_arg_iterator ArgEnd,
1521                                   bool ZeroInitialization = false);
1522 
1523   void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1524                                  const ArrayType *Array,
1525                                  llvm::Value *This);
1526 
1527   void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D,
1528                                  llvm::Value *NumElements,
1529                                  llvm::Value *This);
1530 
1531   llvm::Function *GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D,
1532                                                   const ArrayType *Array,
1533                                                   llvm::Value *This);
1534 
1535   void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type,
1536                              bool ForVirtualBase, llvm::Value *This);
1537 
1538   void EmitNewArrayInitializer(const CXXNewExpr *E, llvm::Value *NewPtr,
1539                                llvm::Value *NumElements);
1540 
1541   void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr);
1542 
1543   llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E);
1544   void EmitCXXDeleteExpr(const CXXDeleteExpr *E);
1545 
1546   void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr,
1547                       QualType DeleteTy);
1548 
1549   llvm::Value* EmitCXXTypeidExpr(const CXXTypeidExpr *E);
1550   llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE);
1551 
1552   void EmitCheck(llvm::Value *, unsigned Size);
1553 
1554   llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV,
1555                                        bool isInc, bool isPre);
1556   ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
1557                                          bool isInc, bool isPre);
1558   //===--------------------------------------------------------------------===//
1559   //                            Declaration Emission
1560   //===--------------------------------------------------------------------===//
1561 
1562   /// EmitDecl - Emit a declaration.
1563   ///
1564   /// This function can be called with a null (unreachable) insert point.
1565   void EmitDecl(const Decl &D);
1566 
1567   /// EmitVarDecl - Emit a local variable declaration.
1568   ///
1569   /// This function can be called with a null (unreachable) insert point.
1570   void EmitVarDecl(const VarDecl &D);
1571 
1572   typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D,
1573                              llvm::Value *Address);
1574 
1575   /// EmitAutoVarDecl - Emit an auto variable declaration.
1576   ///
1577   /// This function can be called with a null (unreachable) insert point.
1578   void EmitAutoVarDecl(const VarDecl &D);
1579 
1580   class AutoVarEmission {
1581     friend class CodeGenFunction;
1582 
1583     const VarDecl *Variable;
1584 
1585     /// The alignment of the variable.
1586     CharUnits Alignment;
1587 
1588     /// The address of the alloca.  Null if the variable was emitted
1589     /// as a global constant.
1590     llvm::Value *Address;
1591 
1592     llvm::Value *NRVOFlag;
1593 
1594     /// True if the variable is a __block variable.
1595     bool IsByRef;
1596 
1597     /// True if the variable is of aggregate type and has a constant
1598     /// initializer.
1599     bool IsConstantAggregate;
1600 
1601     struct Invalid {};
1602     AutoVarEmission(Invalid) : Variable(0) {}
1603 
1604     AutoVarEmission(const VarDecl &variable)
1605       : Variable(&variable), Address(0), NRVOFlag(0),
1606         IsByRef(false), IsConstantAggregate(false) {}
1607 
1608     bool wasEmittedAsGlobal() const { return Address == 0; }
1609 
1610   public:
1611     static AutoVarEmission invalid() { return AutoVarEmission(Invalid()); }
1612 
1613     /// Returns the address of the object within this declaration.
1614     /// Note that this does not chase the forwarding pointer for
1615     /// __block decls.
1616     llvm::Value *getObjectAddress(CodeGenFunction &CGF) const {
1617       if (!IsByRef) return Address;
1618 
1619       return CGF.Builder.CreateStructGEP(Address,
1620                                          CGF.getByRefValueLLVMField(Variable),
1621                                          Variable->getNameAsString());
1622     }
1623   };
1624   AutoVarEmission EmitAutoVarAlloca(const VarDecl &var);
1625   void EmitAutoVarInit(const AutoVarEmission &emission);
1626   void EmitAutoVarCleanups(const AutoVarEmission &emission);
1627 
1628   void EmitStaticVarDecl(const VarDecl &D,
1629                          llvm::GlobalValue::LinkageTypes Linkage);
1630 
1631   /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl.
1632   void EmitParmDecl(const VarDecl &D, llvm::Value *Arg, unsigned ArgNo);
1633 
1634   /// protectFromPeepholes - Protect a value that we're intending to
1635   /// store to the side, but which will probably be used later, from
1636   /// aggressive peepholing optimizations that might delete it.
1637   ///
1638   /// Pass the result to unprotectFromPeepholes to declare that
1639   /// protection is no longer required.
1640   ///
1641   /// There's no particular reason why this shouldn't apply to
1642   /// l-values, it's just that no existing peepholes work on pointers.
1643   PeepholeProtection protectFromPeepholes(RValue rvalue);
1644   void unprotectFromPeepholes(PeepholeProtection protection);
1645 
1646   //===--------------------------------------------------------------------===//
1647   //                             Statement Emission
1648   //===--------------------------------------------------------------------===//
1649 
1650   /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info.
1651   void EmitStopPoint(const Stmt *S);
1652 
1653   /// EmitStmt - Emit the code for the statement \arg S. It is legal to call
1654   /// this function even if there is no current insertion point.
1655   ///
1656   /// This function may clear the current insertion point; callers should use
1657   /// EnsureInsertPoint if they wish to subsequently generate code without first
1658   /// calling EmitBlock, EmitBranch, or EmitStmt.
1659   void EmitStmt(const Stmt *S);
1660 
1661   /// EmitSimpleStmt - Try to emit a "simple" statement which does not
1662   /// necessarily require an insertion point or debug information; typically
1663   /// because the statement amounts to a jump or a container of other
1664   /// statements.
1665   ///
1666   /// \return True if the statement was handled.
1667   bool EmitSimpleStmt(const Stmt *S);
1668 
1669   RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
1670                           AggValueSlot AVS = AggValueSlot::ignored());
1671 
1672   /// EmitLabel - Emit the block for the given label. It is legal to call this
1673   /// function even if there is no current insertion point.
1674   void EmitLabel(const LabelDecl *D); // helper for EmitLabelStmt.
1675 
1676   void EmitLabelStmt(const LabelStmt &S);
1677   void EmitGotoStmt(const GotoStmt &S);
1678   void EmitIndirectGotoStmt(const IndirectGotoStmt &S);
1679   void EmitIfStmt(const IfStmt &S);
1680   void EmitWhileStmt(const WhileStmt &S);
1681   void EmitDoStmt(const DoStmt &S);
1682   void EmitForStmt(const ForStmt &S);
1683   void EmitReturnStmt(const ReturnStmt &S);
1684   void EmitDeclStmt(const DeclStmt &S);
1685   void EmitBreakStmt(const BreakStmt &S);
1686   void EmitContinueStmt(const ContinueStmt &S);
1687   void EmitSwitchStmt(const SwitchStmt &S);
1688   void EmitDefaultStmt(const DefaultStmt &S);
1689   void EmitCaseStmt(const CaseStmt &S);
1690   void EmitCaseStmtRange(const CaseStmt &S);
1691   void EmitAsmStmt(const AsmStmt &S);
1692 
1693   void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S);
1694   void EmitObjCAtTryStmt(const ObjCAtTryStmt &S);
1695   void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S);
1696   void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S);
1697 
1698   llvm::Constant *getUnwindResumeOrRethrowFn();
1699   void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
1700   void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
1701 
1702   void EmitCXXTryStmt(const CXXTryStmt &S);
1703   void EmitCXXForRangeStmt(const CXXForRangeStmt &S);
1704 
1705   //===--------------------------------------------------------------------===//
1706   //                         LValue Expression Emission
1707   //===--------------------------------------------------------------------===//
1708 
1709   /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type.
1710   RValue GetUndefRValue(QualType Ty);
1711 
1712   /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E
1713   /// and issue an ErrorUnsupported style diagnostic (using the
1714   /// provided Name).
1715   RValue EmitUnsupportedRValue(const Expr *E,
1716                                const char *Name);
1717 
1718   /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue
1719   /// an ErrorUnsupported style diagnostic (using the provided Name).
1720   LValue EmitUnsupportedLValue(const Expr *E,
1721                                const char *Name);
1722 
1723   /// EmitLValue - Emit code to compute a designator that specifies the location
1724   /// of the expression.
1725   ///
1726   /// This can return one of two things: a simple address or a bitfield
1727   /// reference.  In either case, the LLVM Value* in the LValue structure is
1728   /// guaranteed to be an LLVM pointer type.
1729   ///
1730   /// If this returns a bitfield reference, nothing about the pointee type of
1731   /// the LLVM value is known: For example, it may not be a pointer to an
1732   /// integer.
1733   ///
1734   /// If this returns a normal address, and if the lvalue's C type is fixed
1735   /// size, this method guarantees that the returned pointer type will point to
1736   /// an LLVM type of the same size of the lvalue's type.  If the lvalue has a
1737   /// variable length type, this is not possible.
1738   ///
1739   LValue EmitLValue(const Expr *E);
1740 
1741   /// EmitCheckedLValue - Same as EmitLValue but additionally we generate
1742   /// checking code to guard against undefined behavior.  This is only
1743   /// suitable when we know that the address will be used to access the
1744   /// object.
1745   LValue EmitCheckedLValue(const Expr *E);
1746 
1747   /// EmitToMemory - Change a scalar value from its value
1748   /// representation to its in-memory representation.
1749   llvm::Value *EmitToMemory(llvm::Value *Value, QualType Ty);
1750 
1751   /// EmitFromMemory - Change a scalar value from its memory
1752   /// representation to its value representation.
1753   llvm::Value *EmitFromMemory(llvm::Value *Value, QualType Ty);
1754 
1755   /// EmitLoadOfScalar - Load a scalar value from an address, taking
1756   /// care to appropriately convert from the memory representation to
1757   /// the LLVM value representation.
1758   llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile,
1759                                 unsigned Alignment, QualType Ty,
1760                                 llvm::MDNode *TBAAInfo = 0);
1761 
1762   /// EmitStoreOfScalar - Store a scalar value to an address, taking
1763   /// care to appropriately convert from the memory representation to
1764   /// the LLVM value representation.
1765   void EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr,
1766                          bool Volatile, unsigned Alignment, QualType Ty,
1767                          llvm::MDNode *TBAAInfo = 0);
1768 
1769   /// EmitLoadOfLValue - Given an expression that represents a value lvalue,
1770   /// this method emits the address of the lvalue, then loads the result as an
1771   /// rvalue, returning the rvalue.
1772   RValue EmitLoadOfLValue(LValue V, QualType LVType);
1773   RValue EmitLoadOfExtVectorElementLValue(LValue V, QualType LVType);
1774   RValue EmitLoadOfBitfieldLValue(LValue LV, QualType ExprType);
1775   RValue EmitLoadOfPropertyRefLValue(LValue LV,
1776                                  ReturnValueSlot Return = ReturnValueSlot());
1777 
1778   /// EmitStoreThroughLValue - Store the specified rvalue into the specified
1779   /// lvalue, where both are guaranteed to the have the same type, and that type
1780   /// is 'Ty'.
1781   void EmitStoreThroughLValue(RValue Src, LValue Dst, QualType Ty);
1782   void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst,
1783                                                 QualType Ty);
1784   void EmitStoreThroughPropertyRefLValue(RValue Src, LValue Dst);
1785 
1786   /// EmitStoreThroughLValue - Store Src into Dst with same constraints as
1787   /// EmitStoreThroughLValue.
1788   ///
1789   /// \param Result [out] - If non-null, this will be set to a Value* for the
1790   /// bit-field contents after the store, appropriate for use as the result of
1791   /// an assignment to the bit-field.
1792   void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst, QualType Ty,
1793                                       llvm::Value **Result=0);
1794 
1795   /// Emit an l-value for an assignment (simple or compound) of complex type.
1796   LValue EmitComplexAssignmentLValue(const BinaryOperator *E);
1797   LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E);
1798 
1799   // Note: only available for agg return types
1800   LValue EmitBinaryOperatorLValue(const BinaryOperator *E);
1801   LValue EmitCompoundAssignmentLValue(const CompoundAssignOperator *E);
1802   // Note: only available for agg return types
1803   LValue EmitCallExprLValue(const CallExpr *E);
1804   // Note: only available for agg return types
1805   LValue EmitVAArgExprLValue(const VAArgExpr *E);
1806   LValue EmitDeclRefLValue(const DeclRefExpr *E);
1807   LValue EmitStringLiteralLValue(const StringLiteral *E);
1808   LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E);
1809   LValue EmitPredefinedLValue(const PredefinedExpr *E);
1810   LValue EmitUnaryOpLValue(const UnaryOperator *E);
1811   LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E);
1812   LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E);
1813   LValue EmitMemberExpr(const MemberExpr *E);
1814   LValue EmitObjCIsaExpr(const ObjCIsaExpr *E);
1815   LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E);
1816   LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E);
1817   LValue EmitCastLValue(const CastExpr *E);
1818   LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E);
1819   LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e);
1820 
1821   llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface,
1822                               const ObjCIvarDecl *Ivar);
1823   LValue EmitLValueForAnonRecordField(llvm::Value* Base,
1824                                       const IndirectFieldDecl* Field,
1825                                       unsigned CVRQualifiers);
1826   LValue EmitLValueForField(llvm::Value* Base, const FieldDecl* Field,
1827                             unsigned CVRQualifiers);
1828 
1829   /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that
1830   /// if the Field is a reference, this will return the address of the reference
1831   /// and not the address of the value stored in the reference.
1832   LValue EmitLValueForFieldInitialization(llvm::Value* Base,
1833                                           const FieldDecl* Field,
1834                                           unsigned CVRQualifiers);
1835 
1836   LValue EmitLValueForIvar(QualType ObjectTy,
1837                            llvm::Value* Base, const ObjCIvarDecl *Ivar,
1838                            unsigned CVRQualifiers);
1839 
1840   LValue EmitLValueForBitfield(llvm::Value* Base, const FieldDecl* Field,
1841                                 unsigned CVRQualifiers);
1842 
1843   LValue EmitBlockDeclRefLValue(const BlockDeclRefExpr *E);
1844 
1845   LValue EmitCXXConstructLValue(const CXXConstructExpr *E);
1846   LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E);
1847   LValue EmitExprWithCleanupsLValue(const ExprWithCleanups *E);
1848   LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E);
1849 
1850   LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E);
1851   LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E);
1852   LValue EmitObjCPropertyRefLValue(const ObjCPropertyRefExpr *E);
1853   LValue EmitStmtExprLValue(const StmtExpr *E);
1854   LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E);
1855   LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E);
1856   void   EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init);
1857 
1858   //===--------------------------------------------------------------------===//
1859   //                         Scalar Expression Emission
1860   //===--------------------------------------------------------------------===//
1861 
1862   /// EmitCall - Generate a call of the given function, expecting the given
1863   /// result type, and using the given argument list which specifies both the
1864   /// LLVM arguments and the types they were derived from.
1865   ///
1866   /// \param TargetDecl - If given, the decl of the function in a direct call;
1867   /// used to set attributes on the call (noreturn, etc.).
1868   RValue EmitCall(const CGFunctionInfo &FnInfo,
1869                   llvm::Value *Callee,
1870                   ReturnValueSlot ReturnValue,
1871                   const CallArgList &Args,
1872                   const Decl *TargetDecl = 0,
1873                   llvm::Instruction **callOrInvoke = 0);
1874 
1875   RValue EmitCall(QualType FnType, llvm::Value *Callee,
1876                   ReturnValueSlot ReturnValue,
1877                   CallExpr::const_arg_iterator ArgBeg,
1878                   CallExpr::const_arg_iterator ArgEnd,
1879                   const Decl *TargetDecl = 0);
1880   RValue EmitCallExpr(const CallExpr *E,
1881                       ReturnValueSlot ReturnValue = ReturnValueSlot());
1882 
1883   llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
1884                                   llvm::Value * const *ArgBegin,
1885                                   llvm::Value * const *ArgEnd,
1886                                   const llvm::Twine &Name = "");
1887 
1888   llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This,
1889                                 const llvm::Type *Ty);
1890   llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type,
1891                                 llvm::Value *This, const llvm::Type *Ty);
1892   llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD,
1893                                          NestedNameSpecifier *Qual,
1894                                          const llvm::Type *Ty);
1895 
1896   llvm::Value *BuildAppleKextVirtualDestructorCall(const CXXDestructorDecl *DD,
1897                                                    CXXDtorType Type,
1898                                                    const CXXRecordDecl *RD);
1899 
1900   RValue EmitCXXMemberCall(const CXXMethodDecl *MD,
1901                            llvm::Value *Callee,
1902                            ReturnValueSlot ReturnValue,
1903                            llvm::Value *This,
1904                            llvm::Value *VTT,
1905                            CallExpr::const_arg_iterator ArgBeg,
1906                            CallExpr::const_arg_iterator ArgEnd);
1907   RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E,
1908                                ReturnValueSlot ReturnValue);
1909   RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
1910                                       ReturnValueSlot ReturnValue);
1911 
1912   RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
1913                                        const CXXMethodDecl *MD,
1914                                        ReturnValueSlot ReturnValue);
1915 
1916 
1917   RValue EmitBuiltinExpr(const FunctionDecl *FD,
1918                          unsigned BuiltinID, const CallExpr *E);
1919 
1920   RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue);
1921 
1922   /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call
1923   /// is unhandled by the current target.
1924   llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
1925 
1926   llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
1927   llvm::Value *EmitNeonCall(llvm::Function *F,
1928                             llvm::SmallVectorImpl<llvm::Value*> &O,
1929                             const char *name,
1930                             unsigned shift = 0, bool rightshift = false);
1931   llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx);
1932   llvm::Value *EmitNeonShiftVector(llvm::Value *V, const llvm::Type *Ty,
1933                                    bool negateForRightShift);
1934 
1935   llvm::Value *BuildVector(const llvm::SmallVectorImpl<llvm::Value*> &Ops);
1936   llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
1937   llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
1938 
1939   llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E);
1940   llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E);
1941   llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E);
1942   RValue EmitObjCMessageExpr(const ObjCMessageExpr *E,
1943                              ReturnValueSlot Return = ReturnValueSlot());
1944 
1945   /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in
1946   /// expression. Will emit a temporary variable if E is not an LValue.
1947   RValue EmitReferenceBindingToExpr(const Expr* E,
1948                                     const NamedDecl *InitializedDecl);
1949 
1950   //===--------------------------------------------------------------------===//
1951   //                           Expression Emission
1952   //===--------------------------------------------------------------------===//
1953 
1954   // Expressions are broken into three classes: scalar, complex, aggregate.
1955 
1956   /// EmitScalarExpr - Emit the computation of the specified expression of LLVM
1957   /// scalar type, returning the result.
1958   llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false);
1959 
1960   /// EmitScalarConversion - Emit a conversion from the specified type to the
1961   /// specified destination type, both of which are LLVM scalar types.
1962   llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy,
1963                                     QualType DstTy);
1964 
1965   /// EmitComplexToScalarConversion - Emit a conversion from the specified
1966   /// complex type to the specified destination type, where the destination type
1967   /// is an LLVM scalar type.
1968   llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy,
1969                                              QualType DstTy);
1970 
1971 
1972   /// EmitAggExpr - Emit the computation of the specified expression
1973   /// of aggregate type.  The result is computed into the given slot,
1974   /// which may be null to indicate that the value is not needed.
1975   void EmitAggExpr(const Expr *E, AggValueSlot AS, bool IgnoreResult = false);
1976 
1977   /// EmitAggExprToLValue - Emit the computation of the specified expression of
1978   /// aggregate type into a temporary LValue.
1979   LValue EmitAggExprToLValue(const Expr *E);
1980 
1981   /// EmitGCMemmoveCollectable - Emit special API for structs with object
1982   /// pointers.
1983   void EmitGCMemmoveCollectable(llvm::Value *DestPtr, llvm::Value *SrcPtr,
1984                                 QualType Ty);
1985 
1986   /// EmitComplexExpr - Emit the computation of the specified expression of
1987   /// complex type, returning the result.
1988   ComplexPairTy EmitComplexExpr(const Expr *E,
1989                                 bool IgnoreReal = false,
1990                                 bool IgnoreImag = false);
1991 
1992   /// EmitComplexExprIntoAddr - Emit the computation of the specified expression
1993   /// of complex type, storing into the specified Value*.
1994   void EmitComplexExprIntoAddr(const Expr *E, llvm::Value *DestAddr,
1995                                bool DestIsVolatile);
1996 
1997   /// StoreComplexToAddr - Store a complex number into the specified address.
1998   void StoreComplexToAddr(ComplexPairTy V, llvm::Value *DestAddr,
1999                           bool DestIsVolatile);
2000   /// LoadComplexFromAddr - Load a complex number from the specified address.
2001   ComplexPairTy LoadComplexFromAddr(llvm::Value *SrcAddr, bool SrcIsVolatile);
2002 
2003   /// CreateStaticVarDecl - Create a zero-initialized LLVM global for
2004   /// a static local variable.
2005   llvm::GlobalVariable *CreateStaticVarDecl(const VarDecl &D,
2006                                             const char *Separator,
2007                                        llvm::GlobalValue::LinkageTypes Linkage);
2008 
2009   /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the
2010   /// global variable that has already been created for it.  If the initializer
2011   /// has a different type than GV does, this may free GV and return a different
2012   /// one.  Otherwise it just returns GV.
2013   llvm::GlobalVariable *
2014   AddInitializerToStaticVarDecl(const VarDecl &D,
2015                                 llvm::GlobalVariable *GV);
2016 
2017 
2018   /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++
2019   /// variable with global storage.
2020   void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr);
2021 
2022   /// EmitCXXGlobalDtorRegistration - Emits a call to register the global ptr
2023   /// with the C++ runtime so that its destructor will be called at exit.
2024   void EmitCXXGlobalDtorRegistration(llvm::Constant *DtorFn,
2025                                      llvm::Constant *DeclPtr);
2026 
2027   /// Emit code in this function to perform a guarded variable
2028   /// initialization.  Guarded initializations are used when it's not
2029   /// possible to prove that an initialization will be done exactly
2030   /// once, e.g. with a static local variable or a static data member
2031   /// of a class template.
2032   void EmitCXXGuardedInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr);
2033 
2034   /// GenerateCXXGlobalInitFunc - Generates code for initializing global
2035   /// variables.
2036   void GenerateCXXGlobalInitFunc(llvm::Function *Fn,
2037                                  llvm::Constant **Decls,
2038                                  unsigned NumDecls);
2039 
2040   /// GenerateCXXGlobalDtorFunc - Generates code for destroying global
2041   /// variables.
2042   void GenerateCXXGlobalDtorFunc(llvm::Function *Fn,
2043                                  const std::vector<std::pair<llvm::WeakVH,
2044                                    llvm::Constant*> > &DtorsAndObjects);
2045 
2046   void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
2047                                         const VarDecl *D,
2048                                         llvm::GlobalVariable *Addr);
2049 
2050   void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest);
2051 
2052   void EmitSynthesizedCXXCopyCtor(llvm::Value *Dest, llvm::Value *Src,
2053                                   const Expr *Exp);
2054 
2055   RValue EmitExprWithCleanups(const ExprWithCleanups *E,
2056                               AggValueSlot Slot =AggValueSlot::ignored());
2057 
2058   void EmitCXXThrowExpr(const CXXThrowExpr *E);
2059 
2060   //===--------------------------------------------------------------------===//
2061   //                             Internal Helpers
2062   //===--------------------------------------------------------------------===//
2063 
2064   /// ContainsLabel - Return true if the statement contains a label in it.  If
2065   /// this statement is not executed normally, it not containing a label means
2066   /// that we can just remove the code.
2067   static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false);
2068 
2069   /// containsBreak - Return true if the statement contains a break out of it.
2070   /// If the statement (recursively) contains a switch or loop with a break
2071   /// inside of it, this is fine.
2072   static bool containsBreak(const Stmt *S);
2073 
2074   /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
2075   /// to a constant, or if it does but contains a label, return false.  If it
2076   /// constant folds return true and set the boolean result in Result.
2077   bool ConstantFoldsToSimpleInteger(const Expr *Cond, bool &Result);
2078 
2079   /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
2080   /// to a constant, or if it does but contains a label, return false.  If it
2081   /// constant folds return true and set the folded value.
2082   bool ConstantFoldsToSimpleInteger(const Expr *Cond, llvm::APInt &Result);
2083 
2084   /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an
2085   /// if statement) to the specified blocks.  Based on the condition, this might
2086   /// try to simplify the codegen of the conditional based on the branch.
2087   void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock,
2088                             llvm::BasicBlock *FalseBlock);
2089 
2090   /// getTrapBB - Create a basic block that will call the trap intrinsic.  We'll
2091   /// generate a branch around the created basic block as necessary.
2092   llvm::BasicBlock *getTrapBB();
2093 
2094   /// EmitCallArg - Emit a single call argument.
2095   void EmitCallArg(CallArgList &args, const Expr *E, QualType ArgType);
2096 
2097   /// EmitDelegateCallArg - We are performing a delegate call; that
2098   /// is, the current function is delegating to another one.  Produce
2099   /// a r-value suitable for passing the given parameter.
2100   void EmitDelegateCallArg(CallArgList &args, const VarDecl *param);
2101 
2102 private:
2103   void EmitReturnOfRValue(RValue RV, QualType Ty);
2104 
2105   /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty
2106   /// from function arguments into \arg Dst. See ABIArgInfo::Expand.
2107   ///
2108   /// \param AI - The first function argument of the expansion.
2109   /// \return The argument following the last expanded function
2110   /// argument.
2111   llvm::Function::arg_iterator
2112   ExpandTypeFromArgs(QualType Ty, LValue Dst,
2113                      llvm::Function::arg_iterator AI);
2114 
2115   /// ExpandTypeToArgs - Expand an RValue \arg Src, with the LLVM type for \arg
2116   /// Ty, into individual arguments on the provided vector \arg Args. See
2117   /// ABIArgInfo::Expand.
2118   void ExpandTypeToArgs(QualType Ty, RValue Src,
2119                         llvm::SmallVector<llvm::Value*, 16> &Args);
2120 
2121   llvm::Value* EmitAsmInput(const AsmStmt &S,
2122                             const TargetInfo::ConstraintInfo &Info,
2123                             const Expr *InputExpr, std::string &ConstraintStr);
2124 
2125   llvm::Value* EmitAsmInputLValue(const AsmStmt &S,
2126                                   const TargetInfo::ConstraintInfo &Info,
2127                                   LValue InputValue, QualType InputType,
2128                                   std::string &ConstraintStr);
2129 
2130   /// EmitCallArgs - Emit call arguments for a function.
2131   /// The CallArgTypeInfo parameter is used for iterating over the known
2132   /// argument types of the function being called.
2133   template<typename T>
2134   void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo,
2135                     CallExpr::const_arg_iterator ArgBeg,
2136                     CallExpr::const_arg_iterator ArgEnd) {
2137       CallExpr::const_arg_iterator Arg = ArgBeg;
2138 
2139     // First, use the argument types that the type info knows about
2140     if (CallArgTypeInfo) {
2141       for (typename T::arg_type_iterator I = CallArgTypeInfo->arg_type_begin(),
2142            E = CallArgTypeInfo->arg_type_end(); I != E; ++I, ++Arg) {
2143         assert(Arg != ArgEnd && "Running over edge of argument list!");
2144         QualType ArgType = *I;
2145 #ifndef NDEBUG
2146         QualType ActualArgType = Arg->getType();
2147         if (ArgType->isPointerType() && ActualArgType->isPointerType()) {
2148           QualType ActualBaseType =
2149             ActualArgType->getAs<PointerType>()->getPointeeType();
2150           QualType ArgBaseType =
2151             ArgType->getAs<PointerType>()->getPointeeType();
2152           if (ArgBaseType->isVariableArrayType()) {
2153             if (const VariableArrayType *VAT =
2154                 getContext().getAsVariableArrayType(ActualBaseType)) {
2155               if (!VAT->getSizeExpr())
2156                 ActualArgType = ArgType;
2157             }
2158           }
2159         }
2160         assert(getContext().getCanonicalType(ArgType.getNonReferenceType()).
2161                getTypePtr() ==
2162                getContext().getCanonicalType(ActualArgType).getTypePtr() &&
2163                "type mismatch in call argument!");
2164 #endif
2165         EmitCallArg(Args, *Arg, ArgType);
2166       }
2167 
2168       // Either we've emitted all the call args, or we have a call to a
2169       // variadic function.
2170       assert((Arg == ArgEnd || CallArgTypeInfo->isVariadic()) &&
2171              "Extra arguments in non-variadic function!");
2172 
2173     }
2174 
2175     // If we still have any arguments, emit them using the type of the argument.
2176     for (; Arg != ArgEnd; ++Arg)
2177       EmitCallArg(Args, *Arg, Arg->getType());
2178   }
2179 
2180   const TargetCodeGenInfo &getTargetHooks() const {
2181     return CGM.getTargetCodeGenInfo();
2182   }
2183 
2184   void EmitDeclMetadata();
2185 
2186   CodeGenModule::ByrefHelpers *
2187   buildByrefHelpers(const llvm::StructType &byrefType,
2188                     const AutoVarEmission &emission);
2189 };
2190 
2191 /// Helper class with most of the code for saving a value for a
2192 /// conditional expression cleanup.
2193 struct DominatingLLVMValue {
2194   typedef llvm::PointerIntPair<llvm::Value*, 1, bool> saved_type;
2195 
2196   /// Answer whether the given value needs extra work to be saved.
2197   static bool needsSaving(llvm::Value *value) {
2198     // If it's not an instruction, we don't need to save.
2199     if (!isa<llvm::Instruction>(value)) return false;
2200 
2201     // If it's an instruction in the entry block, we don't need to save.
2202     llvm::BasicBlock *block = cast<llvm::Instruction>(value)->getParent();
2203     return (block != &block->getParent()->getEntryBlock());
2204   }
2205 
2206   /// Try to save the given value.
2207   static saved_type save(CodeGenFunction &CGF, llvm::Value *value) {
2208     if (!needsSaving(value)) return saved_type(value, false);
2209 
2210     // Otherwise we need an alloca.
2211     llvm::Value *alloca =
2212       CGF.CreateTempAlloca(value->getType(), "cond-cleanup.save");
2213     CGF.Builder.CreateStore(value, alloca);
2214 
2215     return saved_type(alloca, true);
2216   }
2217 
2218   static llvm::Value *restore(CodeGenFunction &CGF, saved_type value) {
2219     if (!value.getInt()) return value.getPointer();
2220     return CGF.Builder.CreateLoad(value.getPointer());
2221   }
2222 };
2223 
2224 /// A partial specialization of DominatingValue for llvm::Values that
2225 /// might be llvm::Instructions.
2226 template <class T> struct DominatingPointer<T,true> : DominatingLLVMValue {
2227   typedef T *type;
2228   static type restore(CodeGenFunction &CGF, saved_type value) {
2229     return static_cast<T*>(DominatingLLVMValue::restore(CGF, value));
2230   }
2231 };
2232 
2233 /// A specialization of DominatingValue for RValue.
2234 template <> struct DominatingValue<RValue> {
2235   typedef RValue type;
2236   class saved_type {
2237     enum Kind { ScalarLiteral, ScalarAddress, AggregateLiteral,
2238                 AggregateAddress, ComplexAddress };
2239 
2240     llvm::Value *Value;
2241     Kind K;
2242     saved_type(llvm::Value *v, Kind k) : Value(v), K(k) {}
2243 
2244   public:
2245     static bool needsSaving(RValue value);
2246     static saved_type save(CodeGenFunction &CGF, RValue value);
2247     RValue restore(CodeGenFunction &CGF);
2248 
2249     // implementations in CGExprCXX.cpp
2250   };
2251 
2252   static bool needsSaving(type value) {
2253     return saved_type::needsSaving(value);
2254   }
2255   static saved_type save(CodeGenFunction &CGF, type value) {
2256     return saved_type::save(CGF, value);
2257   }
2258   static type restore(CodeGenFunction &CGF, saved_type value) {
2259     return value.restore(CGF);
2260   }
2261 };
2262 
2263 }  // end namespace CodeGen
2264 }  // end namespace clang
2265 
2266 #endif
2267