1 //===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This is the internal per-function state used for llvm translation.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef CLANG_CODEGEN_CODEGENFUNCTION_H
15 #define CLANG_CODEGEN_CODEGENFUNCTION_H
16 
17 #include "clang/AST/Type.h"
18 #include "clang/AST/ExprCXX.h"
19 #include "clang/AST/ExprObjC.h"
20 #include "clang/AST/CharUnits.h"
21 #include "clang/Frontend/CodeGenOptions.h"
22 #include "clang/Basic/ABI.h"
23 #include "clang/Basic/TargetInfo.h"
24 #include "llvm/ADT/ArrayRef.h"
25 #include "llvm/ADT/DenseMap.h"
26 #include "llvm/ADT/SmallVector.h"
27 #include "llvm/Support/ValueHandle.h"
28 #include "CodeGenModule.h"
29 #include "CGBuilder.h"
30 #include "CGValue.h"
31 
32 namespace llvm {
33   class BasicBlock;
34   class LLVMContext;
35   class MDNode;
36   class Module;
37   class SwitchInst;
38   class Twine;
39   class Value;
40   class CallSite;
41 }
42 
43 namespace clang {
44   class APValue;
45   class ASTContext;
46   class CXXDestructorDecl;
47   class CXXForRangeStmt;
48   class CXXTryStmt;
49   class Decl;
50   class LabelDecl;
51   class EnumConstantDecl;
52   class FunctionDecl;
53   class FunctionProtoType;
54   class LabelStmt;
55   class ObjCContainerDecl;
56   class ObjCInterfaceDecl;
57   class ObjCIvarDecl;
58   class ObjCMethodDecl;
59   class ObjCImplementationDecl;
60   class ObjCPropertyImplDecl;
61   class TargetInfo;
62   class TargetCodeGenInfo;
63   class VarDecl;
64   class ObjCForCollectionStmt;
65   class ObjCAtTryStmt;
66   class ObjCAtThrowStmt;
67   class ObjCAtSynchronizedStmt;
68   class ObjCAutoreleasePoolStmt;
69 
70 namespace CodeGen {
71   class CodeGenTypes;
72   class CGDebugInfo;
73   class CGFunctionInfo;
74   class CGRecordLayout;
75   class CGBlockInfo;
76   class CGCXXABI;
77   class BlockFlags;
78   class BlockFieldFlags;
79 
80 /// A branch fixup.  These are required when emitting a goto to a
81 /// label which hasn't been emitted yet.  The goto is optimistically
82 /// emitted as a branch to the basic block for the label, and (if it
83 /// occurs in a scope with non-trivial cleanups) a fixup is added to
84 /// the innermost cleanup.  When a (normal) cleanup is popped, any
85 /// unresolved fixups in that scope are threaded through the cleanup.
86 struct BranchFixup {
87   /// The block containing the terminator which needs to be modified
88   /// into a switch if this fixup is resolved into the current scope.
89   /// If null, LatestBranch points directly to the destination.
90   llvm::BasicBlock *OptimisticBranchBlock;
91 
92   /// The ultimate destination of the branch.
93   ///
94   /// This can be set to null to indicate that this fixup was
95   /// successfully resolved.
96   llvm::BasicBlock *Destination;
97 
98   /// The destination index value.
99   unsigned DestinationIndex;
100 
101   /// The initial branch of the fixup.
102   llvm::BranchInst *InitialBranch;
103 };
104 
105 template <class T> struct InvariantValue {
106   typedef T type;
107   typedef T saved_type;
108   static bool needsSaving(type value) { return false; }
109   static saved_type save(CodeGenFunction &CGF, type value) { return value; }
110   static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
111 };
112 
113 /// A metaprogramming class for ensuring that a value will dominate an
114 /// arbitrary position in a function.
115 template <class T> struct DominatingValue : InvariantValue<T> {};
116 
117 template <class T, bool mightBeInstruction =
118             llvm::is_base_of<llvm::Value, T>::value &&
119             !llvm::is_base_of<llvm::Constant, T>::value &&
120             !llvm::is_base_of<llvm::BasicBlock, T>::value>
121 struct DominatingPointer;
122 template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
123 // template <class T> struct DominatingPointer<T,true> at end of file
124 
125 template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
126 
127 enum CleanupKind {
128   EHCleanup = 0x1,
129   NormalCleanup = 0x2,
130   NormalAndEHCleanup = EHCleanup | NormalCleanup,
131 
132   InactiveCleanup = 0x4,
133   InactiveEHCleanup = EHCleanup | InactiveCleanup,
134   InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
135   InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
136 };
137 
138 /// A stack of scopes which respond to exceptions, including cleanups
139 /// and catch blocks.
140 class EHScopeStack {
141 public:
142   /// A saved depth on the scope stack.  This is necessary because
143   /// pushing scopes onto the stack invalidates iterators.
144   class stable_iterator {
145     friend class EHScopeStack;
146 
147     /// Offset from StartOfData to EndOfBuffer.
148     ptrdiff_t Size;
149 
150     stable_iterator(ptrdiff_t Size) : Size(Size) {}
151 
152   public:
153     static stable_iterator invalid() { return stable_iterator(-1); }
154     stable_iterator() : Size(-1) {}
155 
156     bool isValid() const { return Size >= 0; }
157 
158     /// Returns true if this scope encloses I.
159     /// Returns false if I is invalid.
160     /// This scope must be valid.
161     bool encloses(stable_iterator I) const { return Size <= I.Size; }
162 
163     /// Returns true if this scope strictly encloses I: that is,
164     /// if it encloses I and is not I.
165     /// Returns false is I is invalid.
166     /// This scope must be valid.
167     bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
168 
169     friend bool operator==(stable_iterator A, stable_iterator B) {
170       return A.Size == B.Size;
171     }
172     friend bool operator!=(stable_iterator A, stable_iterator B) {
173       return A.Size != B.Size;
174     }
175   };
176 
177   /// Information for lazily generating a cleanup.  Subclasses must be
178   /// POD-like: cleanups will not be destructed, and they will be
179   /// allocated on the cleanup stack and freely copied and moved
180   /// around.
181   ///
182   /// Cleanup implementations should generally be declared in an
183   /// anonymous namespace.
184   class Cleanup {
185     // Anchor the construction vtable.
186     virtual void anchor();
187   public:
188     /// Generation flags.
189     class Flags {
190       enum {
191         F_IsForEH             = 0x1,
192         F_IsNormalCleanupKind = 0x2,
193         F_IsEHCleanupKind     = 0x4
194       };
195       unsigned flags;
196 
197     public:
198       Flags() : flags(0) {}
199 
200       /// isForEH - true if the current emission is for an EH cleanup.
201       bool isForEHCleanup() const { return flags & F_IsForEH; }
202       bool isForNormalCleanup() const { return !isForEHCleanup(); }
203       void setIsForEHCleanup() { flags |= F_IsForEH; }
204 
205       bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
206       void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
207 
208       /// isEHCleanupKind - true if the cleanup was pushed as an EH
209       /// cleanup.
210       bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
211       void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
212     };
213 
214     // Provide a virtual destructor to suppress a very common warning
215     // that unfortunately cannot be suppressed without this.  Cleanups
216     // should not rely on this destructor ever being called.
217     virtual ~Cleanup() {}
218 
219     /// Emit the cleanup.  For normal cleanups, this is run in the
220     /// same EH context as when the cleanup was pushed, i.e. the
221     /// immediately-enclosing context of the cleanup scope.  For
222     /// EH cleanups, this is run in a terminate context.
223     ///
224     // \param IsForEHCleanup true if this is for an EH cleanup, false
225     ///  if for a normal cleanup.
226     virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
227   };
228 
229   /// ConditionalCleanupN stores the saved form of its N parameters,
230   /// then restores them and performs the cleanup.
231   template <class T, class A0>
232   class ConditionalCleanup1 : public Cleanup {
233     typedef typename DominatingValue<A0>::saved_type A0_saved;
234     A0_saved a0_saved;
235 
236     void Emit(CodeGenFunction &CGF, Flags flags) {
237       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
238       T(a0).Emit(CGF, flags);
239     }
240 
241   public:
242     ConditionalCleanup1(A0_saved a0)
243       : a0_saved(a0) {}
244   };
245 
246   template <class T, class A0, class A1>
247   class ConditionalCleanup2 : public Cleanup {
248     typedef typename DominatingValue<A0>::saved_type A0_saved;
249     typedef typename DominatingValue<A1>::saved_type A1_saved;
250     A0_saved a0_saved;
251     A1_saved a1_saved;
252 
253     void Emit(CodeGenFunction &CGF, Flags flags) {
254       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
255       A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
256       T(a0, a1).Emit(CGF, flags);
257     }
258 
259   public:
260     ConditionalCleanup2(A0_saved a0, A1_saved a1)
261       : a0_saved(a0), a1_saved(a1) {}
262   };
263 
264   template <class T, class A0, class A1, class A2>
265   class ConditionalCleanup3 : public Cleanup {
266     typedef typename DominatingValue<A0>::saved_type A0_saved;
267     typedef typename DominatingValue<A1>::saved_type A1_saved;
268     typedef typename DominatingValue<A2>::saved_type A2_saved;
269     A0_saved a0_saved;
270     A1_saved a1_saved;
271     A2_saved a2_saved;
272 
273     void Emit(CodeGenFunction &CGF, Flags flags) {
274       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
275       A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
276       A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
277       T(a0, a1, a2).Emit(CGF, flags);
278     }
279 
280   public:
281     ConditionalCleanup3(A0_saved a0, A1_saved a1, A2_saved a2)
282       : a0_saved(a0), a1_saved(a1), a2_saved(a2) {}
283   };
284 
285   template <class T, class A0, class A1, class A2, class A3>
286   class ConditionalCleanup4 : public Cleanup {
287     typedef typename DominatingValue<A0>::saved_type A0_saved;
288     typedef typename DominatingValue<A1>::saved_type A1_saved;
289     typedef typename DominatingValue<A2>::saved_type A2_saved;
290     typedef typename DominatingValue<A3>::saved_type A3_saved;
291     A0_saved a0_saved;
292     A1_saved a1_saved;
293     A2_saved a2_saved;
294     A3_saved a3_saved;
295 
296     void Emit(CodeGenFunction &CGF, Flags flags) {
297       A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
298       A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
299       A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
300       A3 a3 = DominatingValue<A3>::restore(CGF, a3_saved);
301       T(a0, a1, a2, a3).Emit(CGF, flags);
302     }
303 
304   public:
305     ConditionalCleanup4(A0_saved a0, A1_saved a1, A2_saved a2, A3_saved a3)
306       : a0_saved(a0), a1_saved(a1), a2_saved(a2), a3_saved(a3) {}
307   };
308 
309 private:
310   // The implementation for this class is in CGException.h and
311   // CGException.cpp; the definition is here because it's used as a
312   // member of CodeGenFunction.
313 
314   /// The start of the scope-stack buffer, i.e. the allocated pointer
315   /// for the buffer.  All of these pointers are either simultaneously
316   /// null or simultaneously valid.
317   char *StartOfBuffer;
318 
319   /// The end of the buffer.
320   char *EndOfBuffer;
321 
322   /// The first valid entry in the buffer.
323   char *StartOfData;
324 
325   /// The innermost normal cleanup on the stack.
326   stable_iterator InnermostNormalCleanup;
327 
328   /// The innermost EH scope on the stack.
329   stable_iterator InnermostEHScope;
330 
331   /// The current set of branch fixups.  A branch fixup is a jump to
332   /// an as-yet unemitted label, i.e. a label for which we don't yet
333   /// know the EH stack depth.  Whenever we pop a cleanup, we have
334   /// to thread all the current branch fixups through it.
335   ///
336   /// Fixups are recorded as the Use of the respective branch or
337   /// switch statement.  The use points to the final destination.
338   /// When popping out of a cleanup, these uses are threaded through
339   /// the cleanup and adjusted to point to the new cleanup.
340   ///
341   /// Note that branches are allowed to jump into protected scopes
342   /// in certain situations;  e.g. the following code is legal:
343   ///     struct A { ~A(); }; // trivial ctor, non-trivial dtor
344   ///     goto foo;
345   ///     A a;
346   ///    foo:
347   ///     bar();
348   SmallVector<BranchFixup, 8> BranchFixups;
349 
350   char *allocate(size_t Size);
351 
352   void *pushCleanup(CleanupKind K, size_t DataSize);
353 
354 public:
355   EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
356                    InnermostNormalCleanup(stable_end()),
357                    InnermostEHScope(stable_end()) {}
358   ~EHScopeStack() { delete[] StartOfBuffer; }
359 
360   // Variadic templates would make this not terrible.
361 
362   /// Push a lazily-created cleanup on the stack.
363   template <class T>
364   void pushCleanup(CleanupKind Kind) {
365     void *Buffer = pushCleanup(Kind, sizeof(T));
366     Cleanup *Obj = new(Buffer) T();
367     (void) Obj;
368   }
369 
370   /// Push a lazily-created cleanup on the stack.
371   template <class T, class A0>
372   void pushCleanup(CleanupKind Kind, A0 a0) {
373     void *Buffer = pushCleanup(Kind, sizeof(T));
374     Cleanup *Obj = new(Buffer) T(a0);
375     (void) Obj;
376   }
377 
378   /// Push a lazily-created cleanup on the stack.
379   template <class T, class A0, class A1>
380   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
381     void *Buffer = pushCleanup(Kind, sizeof(T));
382     Cleanup *Obj = new(Buffer) T(a0, a1);
383     (void) Obj;
384   }
385 
386   /// Push a lazily-created cleanup on the stack.
387   template <class T, class A0, class A1, class A2>
388   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
389     void *Buffer = pushCleanup(Kind, sizeof(T));
390     Cleanup *Obj = new(Buffer) T(a0, a1, a2);
391     (void) Obj;
392   }
393 
394   /// Push a lazily-created cleanup on the stack.
395   template <class T, class A0, class A1, class A2, class A3>
396   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
397     void *Buffer = pushCleanup(Kind, sizeof(T));
398     Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
399     (void) Obj;
400   }
401 
402   /// Push a lazily-created cleanup on the stack.
403   template <class T, class A0, class A1, class A2, class A3, class A4>
404   void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
405     void *Buffer = pushCleanup(Kind, sizeof(T));
406     Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
407     (void) Obj;
408   }
409 
410   // Feel free to add more variants of the following:
411 
412   /// Push a cleanup with non-constant storage requirements on the
413   /// stack.  The cleanup type must provide an additional static method:
414   ///   static size_t getExtraSize(size_t);
415   /// The argument to this method will be the value N, which will also
416   /// be passed as the first argument to the constructor.
417   ///
418   /// The data stored in the extra storage must obey the same
419   /// restrictions as normal cleanup member data.
420   ///
421   /// The pointer returned from this method is valid until the cleanup
422   /// stack is modified.
423   template <class T, class A0, class A1, class A2>
424   T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) {
425     void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
426     return new (Buffer) T(N, a0, a1, a2);
427   }
428 
429   /// Pops a cleanup scope off the stack.  This is private to CGCleanup.cpp.
430   void popCleanup();
431 
432   /// Push a set of catch handlers on the stack.  The catch is
433   /// uninitialized and will need to have the given number of handlers
434   /// set on it.
435   class EHCatchScope *pushCatch(unsigned NumHandlers);
436 
437   /// Pops a catch scope off the stack.  This is private to CGException.cpp.
438   void popCatch();
439 
440   /// Push an exceptions filter on the stack.
441   class EHFilterScope *pushFilter(unsigned NumFilters);
442 
443   /// Pops an exceptions filter off the stack.
444   void popFilter();
445 
446   /// Push a terminate handler on the stack.
447   void pushTerminate();
448 
449   /// Pops a terminate handler off the stack.
450   void popTerminate();
451 
452   /// Determines whether the exception-scopes stack is empty.
453   bool empty() const { return StartOfData == EndOfBuffer; }
454 
455   bool requiresLandingPad() const {
456     return InnermostEHScope != stable_end();
457   }
458 
459   /// Determines whether there are any normal cleanups on the stack.
460   bool hasNormalCleanups() const {
461     return InnermostNormalCleanup != stable_end();
462   }
463 
464   /// Returns the innermost normal cleanup on the stack, or
465   /// stable_end() if there are no normal cleanups.
466   stable_iterator getInnermostNormalCleanup() const {
467     return InnermostNormalCleanup;
468   }
469   stable_iterator getInnermostActiveNormalCleanup() const;
470 
471   stable_iterator getInnermostEHScope() const {
472     return InnermostEHScope;
473   }
474 
475   stable_iterator getInnermostActiveEHScope() const;
476 
477   /// An unstable reference to a scope-stack depth.  Invalidated by
478   /// pushes but not pops.
479   class iterator;
480 
481   /// Returns an iterator pointing to the innermost EH scope.
482   iterator begin() const;
483 
484   /// Returns an iterator pointing to the outermost EH scope.
485   iterator end() const;
486 
487   /// Create a stable reference to the top of the EH stack.  The
488   /// returned reference is valid until that scope is popped off the
489   /// stack.
490   stable_iterator stable_begin() const {
491     return stable_iterator(EndOfBuffer - StartOfData);
492   }
493 
494   /// Create a stable reference to the bottom of the EH stack.
495   static stable_iterator stable_end() {
496     return stable_iterator(0);
497   }
498 
499   /// Translates an iterator into a stable_iterator.
500   stable_iterator stabilize(iterator it) const;
501 
502   /// Turn a stable reference to a scope depth into a unstable pointer
503   /// to the EH stack.
504   iterator find(stable_iterator save) const;
505 
506   /// Removes the cleanup pointed to by the given stable_iterator.
507   void removeCleanup(stable_iterator save);
508 
509   /// Add a branch fixup to the current cleanup scope.
510   BranchFixup &addBranchFixup() {
511     assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
512     BranchFixups.push_back(BranchFixup());
513     return BranchFixups.back();
514   }
515 
516   unsigned getNumBranchFixups() const { return BranchFixups.size(); }
517   BranchFixup &getBranchFixup(unsigned I) {
518     assert(I < getNumBranchFixups());
519     return BranchFixups[I];
520   }
521 
522   /// Pops lazily-removed fixups from the end of the list.  This
523   /// should only be called by procedures which have just popped a
524   /// cleanup or resolved one or more fixups.
525   void popNullFixups();
526 
527   /// Clears the branch-fixups list.  This should only be called by
528   /// ResolveAllBranchFixups.
529   void clearFixups() { BranchFixups.clear(); }
530 };
531 
532 /// CodeGenFunction - This class organizes the per-function state that is used
533 /// while generating LLVM code.
534 class CodeGenFunction : public CodeGenTypeCache {
535   CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT
536   void operator=(const CodeGenFunction&);  // DO NOT IMPLEMENT
537 
538   friend class CGCXXABI;
539 public:
540   /// A jump destination is an abstract label, branching to which may
541   /// require a jump out through normal cleanups.
542   struct JumpDest {
543     JumpDest() : Block(0), ScopeDepth(), Index(0) {}
544     JumpDest(llvm::BasicBlock *Block,
545              EHScopeStack::stable_iterator Depth,
546              unsigned Index)
547       : Block(Block), ScopeDepth(Depth), Index(Index) {}
548 
549     bool isValid() const { return Block != 0; }
550     llvm::BasicBlock *getBlock() const { return Block; }
551     EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; }
552     unsigned getDestIndex() const { return Index; }
553 
554   private:
555     llvm::BasicBlock *Block;
556     EHScopeStack::stable_iterator ScopeDepth;
557     unsigned Index;
558   };
559 
560   CodeGenModule &CGM;  // Per-module state.
561   const TargetInfo &Target;
562 
563   typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy;
564   CGBuilderTy Builder;
565 
566   /// CurFuncDecl - Holds the Decl for the current function or ObjC method.
567   /// This excludes BlockDecls.
568   const Decl *CurFuncDecl;
569   /// CurCodeDecl - This is the inner-most code context, which includes blocks.
570   const Decl *CurCodeDecl;
571   const CGFunctionInfo *CurFnInfo;
572   QualType FnRetTy;
573   llvm::Function *CurFn;
574 
575   /// CurGD - The GlobalDecl for the current function being compiled.
576   GlobalDecl CurGD;
577 
578   /// PrologueCleanupDepth - The cleanup depth enclosing all the
579   /// cleanups associated with the parameters.
580   EHScopeStack::stable_iterator PrologueCleanupDepth;
581 
582   /// ReturnBlock - Unified return block.
583   JumpDest ReturnBlock;
584 
585   /// ReturnValue - The temporary alloca to hold the return value. This is null
586   /// iff the function has no return value.
587   llvm::Value *ReturnValue;
588 
589   /// AllocaInsertPoint - This is an instruction in the entry block before which
590   /// we prefer to insert allocas.
591   llvm::AssertingVH<llvm::Instruction> AllocaInsertPt;
592 
593   bool CatchUndefined;
594 
595   /// In ARC, whether we should autorelease the return value.
596   bool AutoreleaseResult;
597 
598   const CodeGen::CGBlockInfo *BlockInfo;
599   llvm::Value *BlockPointer;
600 
601   /// \brief A mapping from NRVO variables to the flags used to indicate
602   /// when the NRVO has been applied to this variable.
603   llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags;
604 
605   EHScopeStack EHStack;
606 
607   /// i32s containing the indexes of the cleanup destinations.
608   llvm::AllocaInst *NormalCleanupDest;
609 
610   unsigned NextCleanupDestIndex;
611 
612   /// EHResumeBlock - Unified block containing a call to llvm.eh.resume.
613   llvm::BasicBlock *EHResumeBlock;
614 
615   /// The exception slot.  All landing pads write the current
616   /// exception pointer into this alloca.
617   llvm::Value *ExceptionSlot;
618 
619   /// The selector slot.  Under the MandatoryCleanup model, all
620   /// landing pads write the current selector value into this alloca.
621   llvm::AllocaInst *EHSelectorSlot;
622 
623   /// Emits a landing pad for the current EH stack.
624   llvm::BasicBlock *EmitLandingPad();
625 
626   llvm::BasicBlock *getInvokeDestImpl();
627 
628   /// Set up the last cleaup that was pushed as a conditional
629   /// full-expression cleanup.
630   void initFullExprCleanup();
631 
632   template <class T>
633   typename DominatingValue<T>::saved_type saveValueInCond(T value) {
634     return DominatingValue<T>::save(*this, value);
635   }
636 
637 public:
638   /// ObjCEHValueStack - Stack of Objective-C exception values, used for
639   /// rethrows.
640   SmallVector<llvm::Value*, 8> ObjCEHValueStack;
641 
642   /// A class controlling the emission of a finally block.
643   class FinallyInfo {
644     /// Where the catchall's edge through the cleanup should go.
645     JumpDest RethrowDest;
646 
647     /// A function to call to enter the catch.
648     llvm::Constant *BeginCatchFn;
649 
650     /// An i1 variable indicating whether or not the @finally is
651     /// running for an exception.
652     llvm::AllocaInst *ForEHVar;
653 
654     /// An i8* variable into which the exception pointer to rethrow
655     /// has been saved.
656     llvm::AllocaInst *SavedExnVar;
657 
658   public:
659     void enter(CodeGenFunction &CGF, const Stmt *Finally,
660                llvm::Constant *beginCatchFn, llvm::Constant *endCatchFn,
661                llvm::Constant *rethrowFn);
662     void exit(CodeGenFunction &CGF);
663   };
664 
665   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
666   /// current full-expression.  Safe against the possibility that
667   /// we're currently inside a conditionally-evaluated expression.
668   template <class T, class A0>
669   void pushFullExprCleanup(CleanupKind kind, A0 a0) {
670     // If we're not in a conditional branch, or if none of the
671     // arguments requires saving, then use the unconditional cleanup.
672     if (!isInConditionalBranch())
673       return EHStack.pushCleanup<T>(kind, a0);
674 
675     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
676 
677     typedef EHScopeStack::ConditionalCleanup1<T, A0> CleanupType;
678     EHStack.pushCleanup<CleanupType>(kind, a0_saved);
679     initFullExprCleanup();
680   }
681 
682   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
683   /// current full-expression.  Safe against the possibility that
684   /// we're currently inside a conditionally-evaluated expression.
685   template <class T, class A0, class A1>
686   void pushFullExprCleanup(CleanupKind kind, A0 a0, A1 a1) {
687     // If we're not in a conditional branch, or if none of the
688     // arguments requires saving, then use the unconditional cleanup.
689     if (!isInConditionalBranch())
690       return EHStack.pushCleanup<T>(kind, a0, a1);
691 
692     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
693     typename DominatingValue<A1>::saved_type a1_saved = saveValueInCond(a1);
694 
695     typedef EHScopeStack::ConditionalCleanup2<T, A0, A1> CleanupType;
696     EHStack.pushCleanup<CleanupType>(kind, a0_saved, a1_saved);
697     initFullExprCleanup();
698   }
699 
700   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
701   /// current full-expression.  Safe against the possibility that
702   /// we're currently inside a conditionally-evaluated expression.
703   template <class T, class A0, class A1, class A2>
704   void pushFullExprCleanup(CleanupKind kind, A0 a0, A1 a1, A2 a2) {
705     // If we're not in a conditional branch, or if none of the
706     // arguments requires saving, then use the unconditional cleanup.
707     if (!isInConditionalBranch()) {
708       return EHStack.pushCleanup<T>(kind, a0, a1, a2);
709     }
710 
711     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
712     typename DominatingValue<A1>::saved_type a1_saved = saveValueInCond(a1);
713     typename DominatingValue<A2>::saved_type a2_saved = saveValueInCond(a2);
714 
715     typedef EHScopeStack::ConditionalCleanup3<T, A0, A1, A2> CleanupType;
716     EHStack.pushCleanup<CleanupType>(kind, a0_saved, a1_saved, a2_saved);
717     initFullExprCleanup();
718   }
719 
720   /// pushFullExprCleanup - Push a cleanup to be run at the end of the
721   /// current full-expression.  Safe against the possibility that
722   /// we're currently inside a conditionally-evaluated expression.
723   template <class T, class A0, class A1, class A2, class A3>
724   void pushFullExprCleanup(CleanupKind kind, A0 a0, A1 a1, A2 a2, A3 a3) {
725     // If we're not in a conditional branch, or if none of the
726     // arguments requires saving, then use the unconditional cleanup.
727     if (!isInConditionalBranch()) {
728       return EHStack.pushCleanup<T>(kind, a0, a1, a2, a3);
729     }
730 
731     typename DominatingValue<A0>::saved_type a0_saved = saveValueInCond(a0);
732     typename DominatingValue<A1>::saved_type a1_saved = saveValueInCond(a1);
733     typename DominatingValue<A2>::saved_type a2_saved = saveValueInCond(a2);
734     typename DominatingValue<A3>::saved_type a3_saved = saveValueInCond(a3);
735 
736     typedef EHScopeStack::ConditionalCleanup4<T, A0, A1, A2, A3> CleanupType;
737     EHStack.pushCleanup<CleanupType>(kind, a0_saved, a1_saved,
738                                      a2_saved, a3_saved);
739     initFullExprCleanup();
740   }
741 
742   /// PushDestructorCleanup - Push a cleanup to call the
743   /// complete-object destructor of an object of the given type at the
744   /// given address.  Does nothing if T is not a C++ class type with a
745   /// non-trivial destructor.
746   void PushDestructorCleanup(QualType T, llvm::Value *Addr);
747 
748   /// PushDestructorCleanup - Push a cleanup to call the
749   /// complete-object variant of the given destructor on the object at
750   /// the given address.
751   void PushDestructorCleanup(const CXXDestructorDecl *Dtor,
752                              llvm::Value *Addr);
753 
754   /// PopCleanupBlock - Will pop the cleanup entry on the stack and
755   /// process all branch fixups.
756   void PopCleanupBlock(bool FallThroughIsBranchThrough = false);
757 
758   /// DeactivateCleanupBlock - Deactivates the given cleanup block.
759   /// The block cannot be reactivated.  Pops it if it's the top of the
760   /// stack.
761   void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
762 
763   /// ActivateCleanupBlock - Activates an initially-inactive cleanup.
764   /// Cannot be used to resurrect a deactivated cleanup.
765   void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup);
766 
767   /// \brief Enters a new scope for capturing cleanups, all of which
768   /// will be executed once the scope is exited.
769   class RunCleanupsScope {
770     CodeGenFunction& CGF;
771     EHScopeStack::stable_iterator CleanupStackDepth;
772     bool OldDidCallStackSave;
773     bool PerformCleanup;
774 
775     RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT
776     RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT
777 
778   public:
779     /// \brief Enter a new cleanup scope.
780     explicit RunCleanupsScope(CodeGenFunction &CGF)
781       : CGF(CGF), PerformCleanup(true)
782     {
783       CleanupStackDepth = CGF.EHStack.stable_begin();
784       OldDidCallStackSave = CGF.DidCallStackSave;
785       CGF.DidCallStackSave = false;
786     }
787 
788     /// \brief Exit this cleanup scope, emitting any accumulated
789     /// cleanups.
790     ~RunCleanupsScope() {
791       if (PerformCleanup) {
792         CGF.DidCallStackSave = OldDidCallStackSave;
793         CGF.PopCleanupBlocks(CleanupStackDepth);
794       }
795     }
796 
797     /// \brief Determine whether this scope requires any cleanups.
798     bool requiresCleanups() const {
799       return CGF.EHStack.stable_begin() != CleanupStackDepth;
800     }
801 
802     /// \brief Force the emission of cleanups now, instead of waiting
803     /// until this object is destroyed.
804     void ForceCleanup() {
805       assert(PerformCleanup && "Already forced cleanup");
806       CGF.DidCallStackSave = OldDidCallStackSave;
807       CGF.PopCleanupBlocks(CleanupStackDepth);
808       PerformCleanup = false;
809     }
810   };
811 
812 
813   /// PopCleanupBlocks - Takes the old cleanup stack size and emits
814   /// the cleanup blocks that have been added.
815   void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize);
816 
817   void ResolveBranchFixups(llvm::BasicBlock *Target);
818 
819   /// The given basic block lies in the current EH scope, but may be a
820   /// target of a potentially scope-crossing jump; get a stable handle
821   /// to which we can perform this jump later.
822   JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) {
823     return JumpDest(Target,
824                     EHStack.getInnermostNormalCleanup(),
825                     NextCleanupDestIndex++);
826   }
827 
828   /// The given basic block lies in the current EH scope, but may be a
829   /// target of a potentially scope-crossing jump; get a stable handle
830   /// to which we can perform this jump later.
831   JumpDest getJumpDestInCurrentScope(StringRef Name = StringRef()) {
832     return getJumpDestInCurrentScope(createBasicBlock(Name));
833   }
834 
835   /// EmitBranchThroughCleanup - Emit a branch from the current insert
836   /// block through the normal cleanup handling code (if any) and then
837   /// on to \arg Dest.
838   void EmitBranchThroughCleanup(JumpDest Dest);
839 
840   /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
841   /// specified destination obviously has no cleanups to run.  'false' is always
842   /// a conservatively correct answer for this method.
843   bool isObviouslyBranchWithoutCleanups(JumpDest Dest) const;
844 
845   /// popCatchScope - Pops the catch scope at the top of the EHScope
846   /// stack, emitting any required code (other than the catch handlers
847   /// themselves).
848   void popCatchScope();
849 
850   llvm::BasicBlock *getEHResumeBlock();
851   llvm::BasicBlock *getEHDispatchBlock(EHScopeStack::stable_iterator scope);
852 
853   /// An object to manage conditionally-evaluated expressions.
854   class ConditionalEvaluation {
855     llvm::BasicBlock *StartBB;
856 
857   public:
858     ConditionalEvaluation(CodeGenFunction &CGF)
859       : StartBB(CGF.Builder.GetInsertBlock()) {}
860 
861     void begin(CodeGenFunction &CGF) {
862       assert(CGF.OutermostConditional != this);
863       if (!CGF.OutermostConditional)
864         CGF.OutermostConditional = this;
865     }
866 
867     void end(CodeGenFunction &CGF) {
868       assert(CGF.OutermostConditional != 0);
869       if (CGF.OutermostConditional == this)
870         CGF.OutermostConditional = 0;
871     }
872 
873     /// Returns a block which will be executed prior to each
874     /// evaluation of the conditional code.
875     llvm::BasicBlock *getStartingBlock() const {
876       return StartBB;
877     }
878   };
879 
880   /// isInConditionalBranch - Return true if we're currently emitting
881   /// one branch or the other of a conditional expression.
882   bool isInConditionalBranch() const { return OutermostConditional != 0; }
883 
884   /// An RAII object to record that we're evaluating a statement
885   /// expression.
886   class StmtExprEvaluation {
887     CodeGenFunction &CGF;
888 
889     /// We have to save the outermost conditional: cleanups in a
890     /// statement expression aren't conditional just because the
891     /// StmtExpr is.
892     ConditionalEvaluation *SavedOutermostConditional;
893 
894   public:
895     StmtExprEvaluation(CodeGenFunction &CGF)
896       : CGF(CGF), SavedOutermostConditional(CGF.OutermostConditional) {
897       CGF.OutermostConditional = 0;
898     }
899 
900     ~StmtExprEvaluation() {
901       CGF.OutermostConditional = SavedOutermostConditional;
902       CGF.EnsureInsertPoint();
903     }
904   };
905 
906   /// An object which temporarily prevents a value from being
907   /// destroyed by aggressive peephole optimizations that assume that
908   /// all uses of a value have been realized in the IR.
909   class PeepholeProtection {
910     llvm::Instruction *Inst;
911     friend class CodeGenFunction;
912 
913   public:
914     PeepholeProtection() : Inst(0) {}
915   };
916 
917   /// An RAII object to set (and then clear) a mapping for an OpaqueValueExpr.
918   class OpaqueValueMapping {
919     CodeGenFunction &CGF;
920     const OpaqueValueExpr *OpaqueValue;
921     bool BoundLValue;
922     CodeGenFunction::PeepholeProtection Protection;
923 
924   public:
925     static bool shouldBindAsLValue(const Expr *expr) {
926       return expr->isGLValue() || expr->getType()->isRecordType();
927     }
928 
929     /// Build the opaque value mapping for the given conditional
930     /// operator if it's the GNU ?: extension.  This is a common
931     /// enough pattern that the convenience operator is really
932     /// helpful.
933     ///
934     OpaqueValueMapping(CodeGenFunction &CGF,
935                        const AbstractConditionalOperator *op) : CGF(CGF) {
936       if (isa<ConditionalOperator>(op)) {
937         OpaqueValue = 0;
938         BoundLValue = false;
939         return;
940       }
941 
942       const BinaryConditionalOperator *e = cast<BinaryConditionalOperator>(op);
943       init(e->getOpaqueValue(), e->getCommon());
944     }
945 
946     OpaqueValueMapping(CodeGenFunction &CGF,
947                        const OpaqueValueExpr *opaqueValue,
948                        LValue lvalue)
949       : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(true) {
950       assert(opaqueValue && "no opaque value expression!");
951       assert(shouldBindAsLValue(opaqueValue));
952       initLValue(lvalue);
953     }
954 
955     OpaqueValueMapping(CodeGenFunction &CGF,
956                        const OpaqueValueExpr *opaqueValue,
957                        RValue rvalue)
958       : CGF(CGF), OpaqueValue(opaqueValue), BoundLValue(false) {
959       assert(opaqueValue && "no opaque value expression!");
960       assert(!shouldBindAsLValue(opaqueValue));
961       initRValue(rvalue);
962     }
963 
964     void pop() {
965       assert(OpaqueValue && "mapping already popped!");
966       popImpl();
967       OpaqueValue = 0;
968     }
969 
970     ~OpaqueValueMapping() {
971       if (OpaqueValue) popImpl();
972     }
973 
974   private:
975     void popImpl() {
976       if (BoundLValue)
977         CGF.OpaqueLValues.erase(OpaqueValue);
978       else {
979         CGF.OpaqueRValues.erase(OpaqueValue);
980         CGF.unprotectFromPeepholes(Protection);
981       }
982     }
983 
984     void init(const OpaqueValueExpr *ov, const Expr *e) {
985       OpaqueValue = ov;
986       BoundLValue = shouldBindAsLValue(ov);
987       assert(BoundLValue == shouldBindAsLValue(e)
988              && "inconsistent expression value kinds!");
989       if (BoundLValue)
990         initLValue(CGF.EmitLValue(e));
991       else
992         initRValue(CGF.EmitAnyExpr(e));
993     }
994 
995     void initLValue(const LValue &lv) {
996       CGF.OpaqueLValues.insert(std::make_pair(OpaqueValue, lv));
997     }
998 
999     void initRValue(const RValue &rv) {
1000       // Work around an extremely aggressive peephole optimization in
1001       // EmitScalarConversion which assumes that all other uses of a
1002       // value are extant.
1003       Protection = CGF.protectFromPeepholes(rv);
1004       CGF.OpaqueRValues.insert(std::make_pair(OpaqueValue, rv));
1005     }
1006   };
1007 
1008   /// getByrefValueFieldNumber - Given a declaration, returns the LLVM field
1009   /// number that holds the value.
1010   unsigned getByRefValueLLVMField(const ValueDecl *VD) const;
1011 
1012   /// BuildBlockByrefAddress - Computes address location of the
1013   /// variable which is declared as __block.
1014   llvm::Value *BuildBlockByrefAddress(llvm::Value *BaseAddr,
1015                                       const VarDecl *V);
1016 private:
1017   CGDebugInfo *DebugInfo;
1018   bool DisableDebugInfo;
1019 
1020   /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid
1021   /// calling llvm.stacksave for multiple VLAs in the same scope.
1022   bool DidCallStackSave;
1023 
1024   /// IndirectBranch - The first time an indirect goto is seen we create a block
1025   /// with an indirect branch.  Every time we see the address of a label taken,
1026   /// we add the label to the indirect goto.  Every subsequent indirect goto is
1027   /// codegen'd as a jump to the IndirectBranch's basic block.
1028   llvm::IndirectBrInst *IndirectBranch;
1029 
1030   /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C
1031   /// decls.
1032   typedef llvm::DenseMap<const Decl*, llvm::Value*> DeclMapTy;
1033   DeclMapTy LocalDeclMap;
1034 
1035   /// LabelMap - This keeps track of the LLVM basic block for each C label.
1036   llvm::DenseMap<const LabelDecl*, JumpDest> LabelMap;
1037 
1038   // BreakContinueStack - This keeps track of where break and continue
1039   // statements should jump to.
1040   struct BreakContinue {
1041     BreakContinue(JumpDest Break, JumpDest Continue)
1042       : BreakBlock(Break), ContinueBlock(Continue) {}
1043 
1044     JumpDest BreakBlock;
1045     JumpDest ContinueBlock;
1046   };
1047   SmallVector<BreakContinue, 8> BreakContinueStack;
1048 
1049   /// SwitchInsn - This is nearest current switch instruction. It is null if if
1050   /// current context is not in a switch.
1051   llvm::SwitchInst *SwitchInsn;
1052 
1053   /// CaseRangeBlock - This block holds if condition check for last case
1054   /// statement range in current switch instruction.
1055   llvm::BasicBlock *CaseRangeBlock;
1056 
1057   /// OpaqueLValues - Keeps track of the current set of opaque value
1058   /// expressions.
1059   llvm::DenseMap<const OpaqueValueExpr *, LValue> OpaqueLValues;
1060   llvm::DenseMap<const OpaqueValueExpr *, RValue> OpaqueRValues;
1061 
1062   // VLASizeMap - This keeps track of the associated size for each VLA type.
1063   // We track this by the size expression rather than the type itself because
1064   // in certain situations, like a const qualifier applied to an VLA typedef,
1065   // multiple VLA types can share the same size expression.
1066   // FIXME: Maybe this could be a stack of maps that is pushed/popped as we
1067   // enter/leave scopes.
1068   llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap;
1069 
1070   /// A block containing a single 'unreachable' instruction.  Created
1071   /// lazily by getUnreachableBlock().
1072   llvm::BasicBlock *UnreachableBlock;
1073 
1074   /// CXXThisDecl - When generating code for a C++ member function,
1075   /// this will hold the implicit 'this' declaration.
1076   ImplicitParamDecl *CXXThisDecl;
1077   llvm::Value *CXXThisValue;
1078 
1079   /// CXXVTTDecl - When generating code for a base object constructor or
1080   /// base object destructor with virtual bases, this will hold the implicit
1081   /// VTT parameter.
1082   ImplicitParamDecl *CXXVTTDecl;
1083   llvm::Value *CXXVTTValue;
1084 
1085   /// OutermostConditional - Points to the outermost active
1086   /// conditional control.  This is used so that we know if a
1087   /// temporary should be destroyed conditionally.
1088   ConditionalEvaluation *OutermostConditional;
1089 
1090 
1091   /// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM
1092   /// type as well as the field number that contains the actual data.
1093   llvm::DenseMap<const ValueDecl *, std::pair<llvm::Type *,
1094                                               unsigned> > ByRefValueInfo;
1095 
1096   llvm::BasicBlock *TerminateLandingPad;
1097   llvm::BasicBlock *TerminateHandler;
1098   llvm::BasicBlock *TrapBB;
1099 
1100 public:
1101   CodeGenFunction(CodeGenModule &cgm);
1102 
1103   CodeGenTypes &getTypes() const { return CGM.getTypes(); }
1104   ASTContext &getContext() const { return CGM.getContext(); }
1105   CGDebugInfo *getDebugInfo() {
1106     if (DisableDebugInfo)
1107       return NULL;
1108     return DebugInfo;
1109   }
1110   void disableDebugInfo() { DisableDebugInfo = true; }
1111   void enableDebugInfo() { DisableDebugInfo = false; }
1112 
1113   bool shouldUseFusedARCCalls() {
1114     return CGM.getCodeGenOpts().OptimizationLevel == 0;
1115   }
1116 
1117   const LangOptions &getLangOptions() const { return CGM.getLangOptions(); }
1118 
1119   /// Returns a pointer to the function's exception object slot, which
1120   /// is assigned in every landing pad.
1121   llvm::Value *getExceptionSlot();
1122   llvm::Value *getEHSelectorSlot();
1123 
1124   llvm::Value *getNormalCleanupDestSlot();
1125 
1126   llvm::BasicBlock *getUnreachableBlock() {
1127     if (!UnreachableBlock) {
1128       UnreachableBlock = createBasicBlock("unreachable");
1129       new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock);
1130     }
1131     return UnreachableBlock;
1132   }
1133 
1134   llvm::BasicBlock *getInvokeDest() {
1135     if (!EHStack.requiresLandingPad()) return 0;
1136     return getInvokeDestImpl();
1137   }
1138 
1139   llvm::LLVMContext &getLLVMContext() { return CGM.getLLVMContext(); }
1140 
1141   //===--------------------------------------------------------------------===//
1142   //                                  Cleanups
1143   //===--------------------------------------------------------------------===//
1144 
1145   typedef void Destroyer(CodeGenFunction &CGF, llvm::Value *addr, QualType ty);
1146 
1147   void pushIrregularPartialArrayCleanup(llvm::Value *arrayBegin,
1148                                         llvm::Value *arrayEndPointer,
1149                                         QualType elementType,
1150                                         Destroyer &destroyer);
1151   void pushRegularPartialArrayCleanup(llvm::Value *arrayBegin,
1152                                       llvm::Value *arrayEnd,
1153                                       QualType elementType,
1154                                       Destroyer &destroyer);
1155 
1156   void pushDestroy(QualType::DestructionKind dtorKind,
1157                    llvm::Value *addr, QualType type);
1158   void pushDestroy(CleanupKind kind, llvm::Value *addr, QualType type,
1159                    Destroyer &destroyer, bool useEHCleanupForArray);
1160   void emitDestroy(llvm::Value *addr, QualType type, Destroyer &destroyer,
1161                    bool useEHCleanupForArray);
1162   llvm::Function *generateDestroyHelper(llvm::Constant *addr,
1163                                         QualType type,
1164                                         Destroyer &destroyer,
1165                                         bool useEHCleanupForArray);
1166   void emitArrayDestroy(llvm::Value *begin, llvm::Value *end,
1167                         QualType type, Destroyer &destroyer,
1168                         bool checkZeroLength, bool useEHCleanup);
1169 
1170   Destroyer &getDestroyer(QualType::DestructionKind destructionKind);
1171 
1172   /// Determines whether an EH cleanup is required to destroy a type
1173   /// with the given destruction kind.
1174   bool needsEHCleanup(QualType::DestructionKind kind) {
1175     switch (kind) {
1176     case QualType::DK_none:
1177       return false;
1178     case QualType::DK_cxx_destructor:
1179     case QualType::DK_objc_weak_lifetime:
1180       return getLangOptions().Exceptions;
1181     case QualType::DK_objc_strong_lifetime:
1182       return getLangOptions().Exceptions &&
1183              CGM.getCodeGenOpts().ObjCAutoRefCountExceptions;
1184     }
1185     llvm_unreachable("bad destruction kind");
1186   }
1187 
1188   CleanupKind getCleanupKind(QualType::DestructionKind kind) {
1189     return (needsEHCleanup(kind) ? NormalAndEHCleanup : NormalCleanup);
1190   }
1191 
1192   //===--------------------------------------------------------------------===//
1193   //                                  Objective-C
1194   //===--------------------------------------------------------------------===//
1195 
1196   void GenerateObjCMethod(const ObjCMethodDecl *OMD);
1197 
1198   void StartObjCMethod(const ObjCMethodDecl *MD,
1199                        const ObjCContainerDecl *CD,
1200                        SourceLocation StartLoc);
1201 
1202   /// GenerateObjCGetter - Synthesize an Objective-C property getter function.
1203   void GenerateObjCGetter(ObjCImplementationDecl *IMP,
1204                           const ObjCPropertyImplDecl *PID);
1205   void generateObjCGetterBody(const ObjCImplementationDecl *classImpl,
1206                               const ObjCPropertyImplDecl *propImpl);
1207 
1208   void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP,
1209                                   ObjCMethodDecl *MD, bool ctor);
1210 
1211   /// GenerateObjCSetter - Synthesize an Objective-C property setter function
1212   /// for the given property.
1213   void GenerateObjCSetter(ObjCImplementationDecl *IMP,
1214                           const ObjCPropertyImplDecl *PID);
1215   void generateObjCSetterBody(const ObjCImplementationDecl *classImpl,
1216                               const ObjCPropertyImplDecl *propImpl);
1217   bool IndirectObjCSetterArg(const CGFunctionInfo &FI);
1218   bool IvarTypeWithAggrGCObjects(QualType Ty);
1219 
1220   //===--------------------------------------------------------------------===//
1221   //                                  Block Bits
1222   //===--------------------------------------------------------------------===//
1223 
1224   llvm::Value *EmitBlockLiteral(const BlockExpr *);
1225   llvm::Constant *BuildDescriptorBlockDecl(const BlockExpr *,
1226                                            const CGBlockInfo &Info,
1227                                            llvm::StructType *,
1228                                            llvm::Constant *BlockVarLayout);
1229 
1230   llvm::Function *GenerateBlockFunction(GlobalDecl GD,
1231                                         const CGBlockInfo &Info,
1232                                         const Decl *OuterFuncDecl,
1233                                         const DeclMapTy &ldm);
1234 
1235   llvm::Constant *GenerateCopyHelperFunction(const CGBlockInfo &blockInfo);
1236   llvm::Constant *GenerateDestroyHelperFunction(const CGBlockInfo &blockInfo);
1237 
1238   void BuildBlockRelease(llvm::Value *DeclPtr, BlockFieldFlags flags);
1239 
1240   class AutoVarEmission;
1241 
1242   void emitByrefStructureInit(const AutoVarEmission &emission);
1243   void enterByrefCleanup(const AutoVarEmission &emission);
1244 
1245   llvm::Value *LoadBlockStruct() {
1246     assert(BlockPointer && "no block pointer set!");
1247     return BlockPointer;
1248   }
1249 
1250   void AllocateBlockCXXThisPointer(const CXXThisExpr *E);
1251   void AllocateBlockDecl(const BlockDeclRefExpr *E);
1252   llvm::Value *GetAddrOfBlockDecl(const BlockDeclRefExpr *E) {
1253     return GetAddrOfBlockDecl(E->getDecl(), E->isByRef());
1254   }
1255   llvm::Value *GetAddrOfBlockDecl(const VarDecl *var, bool ByRef);
1256   llvm::Type *BuildByRefType(const VarDecl *var);
1257 
1258   void GenerateCode(GlobalDecl GD, llvm::Function *Fn,
1259                     const CGFunctionInfo &FnInfo);
1260   void StartFunction(GlobalDecl GD, QualType RetTy,
1261                      llvm::Function *Fn,
1262                      const CGFunctionInfo &FnInfo,
1263                      const FunctionArgList &Args,
1264                      SourceLocation StartLoc);
1265 
1266   void EmitConstructorBody(FunctionArgList &Args);
1267   void EmitDestructorBody(FunctionArgList &Args);
1268   void EmitFunctionBody(FunctionArgList &Args);
1269 
1270   /// EmitReturnBlock - Emit the unified return block, trying to avoid its
1271   /// emission when possible.
1272   void EmitReturnBlock();
1273 
1274   /// FinishFunction - Complete IR generation of the current function. It is
1275   /// legal to call this function even if there is no current insertion point.
1276   void FinishFunction(SourceLocation EndLoc=SourceLocation());
1277 
1278   /// GenerateThunk - Generate a thunk for the given method.
1279   void GenerateThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
1280                      GlobalDecl GD, const ThunkInfo &Thunk);
1281 
1282   void GenerateVarArgsThunk(llvm::Function *Fn, const CGFunctionInfo &FnInfo,
1283                             GlobalDecl GD, const ThunkInfo &Thunk);
1284 
1285   void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type,
1286                         FunctionArgList &Args);
1287 
1288   /// InitializeVTablePointer - Initialize the vtable pointer of the given
1289   /// subobject.
1290   ///
1291   void InitializeVTablePointer(BaseSubobject Base,
1292                                const CXXRecordDecl *NearestVBase,
1293                                CharUnits OffsetFromNearestVBase,
1294                                llvm::Constant *VTable,
1295                                const CXXRecordDecl *VTableClass);
1296 
1297   typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy;
1298   void InitializeVTablePointers(BaseSubobject Base,
1299                                 const CXXRecordDecl *NearestVBase,
1300                                 CharUnits OffsetFromNearestVBase,
1301                                 bool BaseIsNonVirtualPrimaryBase,
1302                                 llvm::Constant *VTable,
1303                                 const CXXRecordDecl *VTableClass,
1304                                 VisitedVirtualBasesSetTy& VBases);
1305 
1306   void InitializeVTablePointers(const CXXRecordDecl *ClassDecl);
1307 
1308   /// GetVTablePtr - Return the Value of the vtable pointer member pointed
1309   /// to by This.
1310   llvm::Value *GetVTablePtr(llvm::Value *This, llvm::Type *Ty);
1311 
1312   /// EnterDtorCleanups - Enter the cleanups necessary to complete the
1313   /// given phase of destruction for a destructor.  The end result
1314   /// should call destructors on members and base classes in reverse
1315   /// order of their construction.
1316   void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type);
1317 
1318   /// ShouldInstrumentFunction - Return true if the current function should be
1319   /// instrumented with __cyg_profile_func_* calls
1320   bool ShouldInstrumentFunction();
1321 
1322   /// EmitFunctionInstrumentation - Emit LLVM code to call the specified
1323   /// instrumentation function with the current function and the call site, if
1324   /// function instrumentation is enabled.
1325   void EmitFunctionInstrumentation(const char *Fn);
1326 
1327   /// EmitMCountInstrumentation - Emit call to .mcount.
1328   void EmitMCountInstrumentation();
1329 
1330   /// EmitFunctionProlog - Emit the target specific LLVM code to load the
1331   /// arguments for the given function. This is also responsible for naming the
1332   /// LLVM function arguments.
1333   void EmitFunctionProlog(const CGFunctionInfo &FI,
1334                           llvm::Function *Fn,
1335                           const FunctionArgList &Args);
1336 
1337   /// EmitFunctionEpilog - Emit the target specific LLVM code to return the
1338   /// given temporary.
1339   void EmitFunctionEpilog(const CGFunctionInfo &FI);
1340 
1341   /// EmitStartEHSpec - Emit the start of the exception spec.
1342   void EmitStartEHSpec(const Decl *D);
1343 
1344   /// EmitEndEHSpec - Emit the end of the exception spec.
1345   void EmitEndEHSpec(const Decl *D);
1346 
1347   /// getTerminateLandingPad - Return a landing pad that just calls terminate.
1348   llvm::BasicBlock *getTerminateLandingPad();
1349 
1350   /// getTerminateHandler - Return a handler (not a landing pad, just
1351   /// a catch handler) that just calls terminate.  This is used when
1352   /// a terminate scope encloses a try.
1353   llvm::BasicBlock *getTerminateHandler();
1354 
1355   llvm::Type *ConvertTypeForMem(QualType T);
1356   llvm::Type *ConvertType(QualType T);
1357   llvm::Type *ConvertType(const TypeDecl *T) {
1358     return ConvertType(getContext().getTypeDeclType(T));
1359   }
1360 
1361   /// LoadObjCSelf - Load the value of self. This function is only valid while
1362   /// generating code for an Objective-C method.
1363   llvm::Value *LoadObjCSelf();
1364 
1365   /// TypeOfSelfObject - Return type of object that this self represents.
1366   QualType TypeOfSelfObject();
1367 
1368   /// hasAggregateLLVMType - Return true if the specified AST type will map into
1369   /// an aggregate LLVM type or is void.
1370   static bool hasAggregateLLVMType(QualType T);
1371 
1372   /// createBasicBlock - Create an LLVM basic block.
1373   llvm::BasicBlock *createBasicBlock(StringRef name = "",
1374                                      llvm::Function *parent = 0,
1375                                      llvm::BasicBlock *before = 0) {
1376 #ifdef NDEBUG
1377     return llvm::BasicBlock::Create(getLLVMContext(), "", parent, before);
1378 #else
1379     return llvm::BasicBlock::Create(getLLVMContext(), name, parent, before);
1380 #endif
1381   }
1382 
1383   /// getBasicBlockForLabel - Return the LLVM basicblock that the specified
1384   /// label maps to.
1385   JumpDest getJumpDestForLabel(const LabelDecl *S);
1386 
1387   /// SimplifyForwardingBlocks - If the given basic block is only a branch to
1388   /// another basic block, simplify it. This assumes that no other code could
1389   /// potentially reference the basic block.
1390   void SimplifyForwardingBlocks(llvm::BasicBlock *BB);
1391 
1392   /// EmitBlock - Emit the given block \arg BB and set it as the insert point,
1393   /// adding a fall-through branch from the current insert block if
1394   /// necessary. It is legal to call this function even if there is no current
1395   /// insertion point.
1396   ///
1397   /// IsFinished - If true, indicates that the caller has finished emitting
1398   /// branches to the given block and does not expect to emit code into it. This
1399   /// means the block can be ignored if it is unreachable.
1400   void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false);
1401 
1402   /// EmitBlockAfterUses - Emit the given block somewhere hopefully
1403   /// near its uses, and leave the insertion point in it.
1404   void EmitBlockAfterUses(llvm::BasicBlock *BB);
1405 
1406   /// EmitBranch - Emit a branch to the specified basic block from the current
1407   /// insert block, taking care to avoid creation of branches from dummy
1408   /// blocks. It is legal to call this function even if there is no current
1409   /// insertion point.
1410   ///
1411   /// This function clears the current insertion point. The caller should follow
1412   /// calls to this function with calls to Emit*Block prior to generation new
1413   /// code.
1414   void EmitBranch(llvm::BasicBlock *Block);
1415 
1416   /// HaveInsertPoint - True if an insertion point is defined. If not, this
1417   /// indicates that the current code being emitted is unreachable.
1418   bool HaveInsertPoint() const {
1419     return Builder.GetInsertBlock() != 0;
1420   }
1421 
1422   /// EnsureInsertPoint - Ensure that an insertion point is defined so that
1423   /// emitted IR has a place to go. Note that by definition, if this function
1424   /// creates a block then that block is unreachable; callers may do better to
1425   /// detect when no insertion point is defined and simply skip IR generation.
1426   void EnsureInsertPoint() {
1427     if (!HaveInsertPoint())
1428       EmitBlock(createBasicBlock());
1429   }
1430 
1431   /// ErrorUnsupported - Print out an error that codegen doesn't support the
1432   /// specified stmt yet.
1433   void ErrorUnsupported(const Stmt *S, const char *Type,
1434                         bool OmitOnError=false);
1435 
1436   //===--------------------------------------------------------------------===//
1437   //                                  Helpers
1438   //===--------------------------------------------------------------------===//
1439 
1440   LValue MakeAddrLValue(llvm::Value *V, QualType T, unsigned Alignment = 0) {
1441     return LValue::MakeAddr(V, T, Alignment, getContext(),
1442                             CGM.getTBAAInfo(T));
1443   }
1444 
1445   /// CreateTempAlloca - This creates a alloca and inserts it into the entry
1446   /// block. The caller is responsible for setting an appropriate alignment on
1447   /// the alloca.
1448   llvm::AllocaInst *CreateTempAlloca(llvm::Type *Ty,
1449                                      const Twine &Name = "tmp");
1450 
1451   /// InitTempAlloca - Provide an initial value for the given alloca.
1452   void InitTempAlloca(llvm::AllocaInst *Alloca, llvm::Value *Value);
1453 
1454   /// CreateIRTemp - Create a temporary IR object of the given type, with
1455   /// appropriate alignment. This routine should only be used when an temporary
1456   /// value needs to be stored into an alloca (for example, to avoid explicit
1457   /// PHI construction), but the type is the IR type, not the type appropriate
1458   /// for storing in memory.
1459   llvm::AllocaInst *CreateIRTemp(QualType T, const Twine &Name = "tmp");
1460 
1461   /// CreateMemTemp - Create a temporary memory object of the given type, with
1462   /// appropriate alignment.
1463   llvm::AllocaInst *CreateMemTemp(QualType T, const Twine &Name = "tmp");
1464 
1465   /// CreateAggTemp - Create a temporary memory object for the given
1466   /// aggregate type.
1467   AggValueSlot CreateAggTemp(QualType T, const Twine &Name = "tmp") {
1468     return AggValueSlot::forAddr(CreateMemTemp(T, Name), T.getQualifiers(),
1469                                  AggValueSlot::IsNotDestructed,
1470                                  AggValueSlot::DoesNotNeedGCBarriers,
1471                                  AggValueSlot::IsNotAliased);
1472   }
1473 
1474   /// Emit a cast to void* in the appropriate address space.
1475   llvm::Value *EmitCastToVoidPtr(llvm::Value *value);
1476 
1477   /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
1478   /// expression and compare the result against zero, returning an Int1Ty value.
1479   llvm::Value *EvaluateExprAsBool(const Expr *E);
1480 
1481   /// EmitIgnoredExpr - Emit an expression in a context which ignores the result.
1482   void EmitIgnoredExpr(const Expr *E);
1483 
1484   /// EmitAnyExpr - Emit code to compute the specified expression which can have
1485   /// any type.  The result is returned as an RValue struct.  If this is an
1486   /// aggregate expression, the aggloc/agglocvolatile arguments indicate where
1487   /// the result should be returned.
1488   ///
1489   /// \param IgnoreResult - True if the resulting value isn't used.
1490   RValue EmitAnyExpr(const Expr *E,
1491                      AggValueSlot AggSlot = AggValueSlot::ignored(),
1492                      bool IgnoreResult = false);
1493 
1494   // EmitVAListRef - Emit a "reference" to a va_list; this is either the address
1495   // or the value of the expression, depending on how va_list is defined.
1496   llvm::Value *EmitVAListRef(const Expr *E);
1497 
1498   /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will
1499   /// always be accessible even if no aggregate location is provided.
1500   RValue EmitAnyExprToTemp(const Expr *E);
1501 
1502   /// EmitAnyExprToMem - Emits the code necessary to evaluate an
1503   /// arbitrary expression into the given memory location.
1504   void EmitAnyExprToMem(const Expr *E, llvm::Value *Location,
1505                         Qualifiers Quals, bool IsInitializer);
1506 
1507   /// EmitExprAsInit - Emits the code necessary to initialize a
1508   /// location in memory with the given initializer.
1509   void EmitExprAsInit(const Expr *init, const ValueDecl *D,
1510                       LValue lvalue, bool capturedByInit);
1511 
1512   /// EmitAggregateCopy - Emit an aggrate copy.
1513   ///
1514   /// \param isVolatile - True iff either the source or the destination is
1515   /// volatile.
1516   void EmitAggregateCopy(llvm::Value *DestPtr, llvm::Value *SrcPtr,
1517                          QualType EltTy, bool isVolatile=false);
1518 
1519   /// StartBlock - Start new block named N. If insert block is a dummy block
1520   /// then reuse it.
1521   void StartBlock(const char *N);
1522 
1523   /// GetAddrOfStaticLocalVar - Return the address of a static local variable.
1524   llvm::Constant *GetAddrOfStaticLocalVar(const VarDecl *BVD) {
1525     return cast<llvm::Constant>(GetAddrOfLocalVar(BVD));
1526   }
1527 
1528   /// GetAddrOfLocalVar - Return the address of a local variable.
1529   llvm::Value *GetAddrOfLocalVar(const VarDecl *VD) {
1530     llvm::Value *Res = LocalDeclMap[VD];
1531     assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!");
1532     return Res;
1533   }
1534 
1535   /// getOpaqueLValueMapping - Given an opaque value expression (which
1536   /// must be mapped to an l-value), return its mapping.
1537   const LValue &getOpaqueLValueMapping(const OpaqueValueExpr *e) {
1538     assert(OpaqueValueMapping::shouldBindAsLValue(e));
1539 
1540     llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator
1541       it = OpaqueLValues.find(e);
1542     assert(it != OpaqueLValues.end() && "no mapping for opaque value!");
1543     return it->second;
1544   }
1545 
1546   /// getOpaqueRValueMapping - Given an opaque value expression (which
1547   /// must be mapped to an r-value), return its mapping.
1548   const RValue &getOpaqueRValueMapping(const OpaqueValueExpr *e) {
1549     assert(!OpaqueValueMapping::shouldBindAsLValue(e));
1550 
1551     llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator
1552       it = OpaqueRValues.find(e);
1553     assert(it != OpaqueRValues.end() && "no mapping for opaque value!");
1554     return it->second;
1555   }
1556 
1557   /// getAccessedFieldNo - Given an encoded value and a result number, return
1558   /// the input field number being accessed.
1559   static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts);
1560 
1561   llvm::BlockAddress *GetAddrOfLabel(const LabelDecl *L);
1562   llvm::BasicBlock *GetIndirectGotoBlock();
1563 
1564   /// EmitNullInitialization - Generate code to set a value of the given type to
1565   /// null, If the type contains data member pointers, they will be initialized
1566   /// to -1 in accordance with the Itanium C++ ABI.
1567   void EmitNullInitialization(llvm::Value *DestPtr, QualType Ty);
1568 
1569   // EmitVAArg - Generate code to get an argument from the passed in pointer
1570   // and update it accordingly. The return value is a pointer to the argument.
1571   // FIXME: We should be able to get rid of this method and use the va_arg
1572   // instruction in LLVM instead once it works well enough.
1573   llvm::Value *EmitVAArg(llvm::Value *VAListAddr, QualType Ty);
1574 
1575   /// emitArrayLength - Compute the length of an array, even if it's a
1576   /// VLA, and drill down to the base element type.
1577   llvm::Value *emitArrayLength(const ArrayType *arrayType,
1578                                QualType &baseType,
1579                                llvm::Value *&addr);
1580 
1581   /// EmitVLASize - Capture all the sizes for the VLA expressions in
1582   /// the given variably-modified type and store them in the VLASizeMap.
1583   ///
1584   /// This function can be called with a null (unreachable) insert point.
1585   void EmitVariablyModifiedType(QualType Ty);
1586 
1587   /// getVLASize - Returns an LLVM value that corresponds to the size,
1588   /// in non-variably-sized elements, of a variable length array type,
1589   /// plus that largest non-variably-sized element type.  Assumes that
1590   /// the type has already been emitted with EmitVariablyModifiedType.
1591   std::pair<llvm::Value*,QualType> getVLASize(const VariableArrayType *vla);
1592   std::pair<llvm::Value*,QualType> getVLASize(QualType vla);
1593 
1594   /// LoadCXXThis - Load the value of 'this'. This function is only valid while
1595   /// generating code for an C++ member function.
1596   llvm::Value *LoadCXXThis() {
1597     assert(CXXThisValue && "no 'this' value for this function");
1598     return CXXThisValue;
1599   }
1600 
1601   /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have
1602   /// virtual bases.
1603   llvm::Value *LoadCXXVTT() {
1604     assert(CXXVTTValue && "no VTT value for this function");
1605     return CXXVTTValue;
1606   }
1607 
1608   /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a
1609   /// complete class to the given direct base.
1610   llvm::Value *
1611   GetAddressOfDirectBaseInCompleteClass(llvm::Value *Value,
1612                                         const CXXRecordDecl *Derived,
1613                                         const CXXRecordDecl *Base,
1614                                         bool BaseIsVirtual);
1615 
1616   /// GetAddressOfBaseClass - This function will add the necessary delta to the
1617   /// load of 'this' and returns address of the base class.
1618   llvm::Value *GetAddressOfBaseClass(llvm::Value *Value,
1619                                      const CXXRecordDecl *Derived,
1620                                      CastExpr::path_const_iterator PathBegin,
1621                                      CastExpr::path_const_iterator PathEnd,
1622                                      bool NullCheckValue);
1623 
1624   llvm::Value *GetAddressOfDerivedClass(llvm::Value *Value,
1625                                         const CXXRecordDecl *Derived,
1626                                         CastExpr::path_const_iterator PathBegin,
1627                                         CastExpr::path_const_iterator PathEnd,
1628                                         bool NullCheckValue);
1629 
1630   llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This,
1631                                          const CXXRecordDecl *ClassDecl,
1632                                          const CXXRecordDecl *BaseClassDecl);
1633 
1634   void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
1635                                       CXXCtorType CtorType,
1636                                       const FunctionArgList &Args);
1637   // It's important not to confuse this and the previous function. Delegating
1638   // constructors are the C++0x feature. The constructor delegate optimization
1639   // is used to reduce duplication in the base and complete consturctors where
1640   // they are substantially the same.
1641   void EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
1642                                         const FunctionArgList &Args);
1643   void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type,
1644                               bool ForVirtualBase, llvm::Value *This,
1645                               CallExpr::const_arg_iterator ArgBeg,
1646                               CallExpr::const_arg_iterator ArgEnd);
1647 
1648   void EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
1649                               llvm::Value *This, llvm::Value *Src,
1650                               CallExpr::const_arg_iterator ArgBeg,
1651                               CallExpr::const_arg_iterator ArgEnd);
1652 
1653   void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
1654                                   const ConstantArrayType *ArrayTy,
1655                                   llvm::Value *ArrayPtr,
1656                                   CallExpr::const_arg_iterator ArgBeg,
1657                                   CallExpr::const_arg_iterator ArgEnd,
1658                                   bool ZeroInitialization = false);
1659 
1660   void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D,
1661                                   llvm::Value *NumElements,
1662                                   llvm::Value *ArrayPtr,
1663                                   CallExpr::const_arg_iterator ArgBeg,
1664                                   CallExpr::const_arg_iterator ArgEnd,
1665                                   bool ZeroInitialization = false);
1666 
1667   static Destroyer destroyCXXObject;
1668 
1669   void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type,
1670                              bool ForVirtualBase, llvm::Value *This);
1671 
1672   void EmitNewArrayInitializer(const CXXNewExpr *E, QualType elementType,
1673                                llvm::Value *NewPtr, llvm::Value *NumElements);
1674 
1675   void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr);
1676 
1677   llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E);
1678   void EmitCXXDeleteExpr(const CXXDeleteExpr *E);
1679 
1680   void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr,
1681                       QualType DeleteTy);
1682 
1683   llvm::Value* EmitCXXTypeidExpr(const CXXTypeidExpr *E);
1684   llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE);
1685 
1686   void EmitCheck(llvm::Value *, unsigned Size);
1687 
1688   llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV,
1689                                        bool isInc, bool isPre);
1690   ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
1691                                          bool isInc, bool isPre);
1692   //===--------------------------------------------------------------------===//
1693   //                            Declaration Emission
1694   //===--------------------------------------------------------------------===//
1695 
1696   /// EmitDecl - Emit a declaration.
1697   ///
1698   /// This function can be called with a null (unreachable) insert point.
1699   void EmitDecl(const Decl &D);
1700 
1701   /// EmitVarDecl - Emit a local variable declaration.
1702   ///
1703   /// This function can be called with a null (unreachable) insert point.
1704   void EmitVarDecl(const VarDecl &D);
1705 
1706   void EmitScalarInit(const Expr *init, const ValueDecl *D,
1707                       LValue lvalue, bool capturedByInit);
1708   void EmitScalarInit(llvm::Value *init, LValue lvalue);
1709 
1710   typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D,
1711                              llvm::Value *Address);
1712 
1713   /// EmitAutoVarDecl - Emit an auto variable declaration.
1714   ///
1715   /// This function can be called with a null (unreachable) insert point.
1716   void EmitAutoVarDecl(const VarDecl &D);
1717 
1718   class AutoVarEmission {
1719     friend class CodeGenFunction;
1720 
1721     const VarDecl *Variable;
1722 
1723     /// The alignment of the variable.
1724     CharUnits Alignment;
1725 
1726     /// The address of the alloca.  Null if the variable was emitted
1727     /// as a global constant.
1728     llvm::Value *Address;
1729 
1730     llvm::Value *NRVOFlag;
1731 
1732     /// True if the variable is a __block variable.
1733     bool IsByRef;
1734 
1735     /// True if the variable is of aggregate type and has a constant
1736     /// initializer.
1737     bool IsConstantAggregate;
1738 
1739     struct Invalid {};
1740     AutoVarEmission(Invalid) : Variable(0) {}
1741 
1742     AutoVarEmission(const VarDecl &variable)
1743       : Variable(&variable), Address(0), NRVOFlag(0),
1744         IsByRef(false), IsConstantAggregate(false) {}
1745 
1746     bool wasEmittedAsGlobal() const { return Address == 0; }
1747 
1748   public:
1749     static AutoVarEmission invalid() { return AutoVarEmission(Invalid()); }
1750 
1751     /// Returns the address of the object within this declaration.
1752     /// Note that this does not chase the forwarding pointer for
1753     /// __block decls.
1754     llvm::Value *getObjectAddress(CodeGenFunction &CGF) const {
1755       if (!IsByRef) return Address;
1756 
1757       return CGF.Builder.CreateStructGEP(Address,
1758                                          CGF.getByRefValueLLVMField(Variable),
1759                                          Variable->getNameAsString());
1760     }
1761   };
1762   AutoVarEmission EmitAutoVarAlloca(const VarDecl &var);
1763   void EmitAutoVarInit(const AutoVarEmission &emission);
1764   void EmitAutoVarCleanups(const AutoVarEmission &emission);
1765   void emitAutoVarTypeCleanup(const AutoVarEmission &emission,
1766                               QualType::DestructionKind dtorKind);
1767 
1768   void EmitStaticVarDecl(const VarDecl &D,
1769                          llvm::GlobalValue::LinkageTypes Linkage);
1770 
1771   /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl.
1772   void EmitParmDecl(const VarDecl &D, llvm::Value *Arg, unsigned ArgNo);
1773 
1774   /// protectFromPeepholes - Protect a value that we're intending to
1775   /// store to the side, but which will probably be used later, from
1776   /// aggressive peepholing optimizations that might delete it.
1777   ///
1778   /// Pass the result to unprotectFromPeepholes to declare that
1779   /// protection is no longer required.
1780   ///
1781   /// There's no particular reason why this shouldn't apply to
1782   /// l-values, it's just that no existing peepholes work on pointers.
1783   PeepholeProtection protectFromPeepholes(RValue rvalue);
1784   void unprotectFromPeepholes(PeepholeProtection protection);
1785 
1786   //===--------------------------------------------------------------------===//
1787   //                             Statement Emission
1788   //===--------------------------------------------------------------------===//
1789 
1790   /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info.
1791   void EmitStopPoint(const Stmt *S);
1792 
1793   /// EmitStmt - Emit the code for the statement \arg S. It is legal to call
1794   /// this function even if there is no current insertion point.
1795   ///
1796   /// This function may clear the current insertion point; callers should use
1797   /// EnsureInsertPoint if they wish to subsequently generate code without first
1798   /// calling EmitBlock, EmitBranch, or EmitStmt.
1799   void EmitStmt(const Stmt *S);
1800 
1801   /// EmitSimpleStmt - Try to emit a "simple" statement which does not
1802   /// necessarily require an insertion point or debug information; typically
1803   /// because the statement amounts to a jump or a container of other
1804   /// statements.
1805   ///
1806   /// \return True if the statement was handled.
1807   bool EmitSimpleStmt(const Stmt *S);
1808 
1809   RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false,
1810                           AggValueSlot AVS = AggValueSlot::ignored());
1811 
1812   /// EmitLabel - Emit the block for the given label. It is legal to call this
1813   /// function even if there is no current insertion point.
1814   void EmitLabel(const LabelDecl *D); // helper for EmitLabelStmt.
1815 
1816   void EmitLabelStmt(const LabelStmt &S);
1817   void EmitGotoStmt(const GotoStmt &S);
1818   void EmitIndirectGotoStmt(const IndirectGotoStmt &S);
1819   void EmitIfStmt(const IfStmt &S);
1820   void EmitWhileStmt(const WhileStmt &S);
1821   void EmitDoStmt(const DoStmt &S);
1822   void EmitForStmt(const ForStmt &S);
1823   void EmitReturnStmt(const ReturnStmt &S);
1824   void EmitDeclStmt(const DeclStmt &S);
1825   void EmitBreakStmt(const BreakStmt &S);
1826   void EmitContinueStmt(const ContinueStmt &S);
1827   void EmitSwitchStmt(const SwitchStmt &S);
1828   void EmitDefaultStmt(const DefaultStmt &S);
1829   void EmitCaseStmt(const CaseStmt &S);
1830   void EmitCaseStmtRange(const CaseStmt &S);
1831   void EmitAsmStmt(const AsmStmt &S);
1832 
1833   void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S);
1834   void EmitObjCAtTryStmt(const ObjCAtTryStmt &S);
1835   void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S);
1836   void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S);
1837   void EmitObjCAutoreleasePoolStmt(const ObjCAutoreleasePoolStmt &S);
1838 
1839   llvm::Constant *getUnwindResumeFn();
1840   llvm::Constant *getUnwindResumeOrRethrowFn();
1841   void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
1842   void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false);
1843 
1844   void EmitCXXTryStmt(const CXXTryStmt &S);
1845   void EmitCXXForRangeStmt(const CXXForRangeStmt &S);
1846 
1847   //===--------------------------------------------------------------------===//
1848   //                         LValue Expression Emission
1849   //===--------------------------------------------------------------------===//
1850 
1851   /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type.
1852   RValue GetUndefRValue(QualType Ty);
1853 
1854   /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E
1855   /// and issue an ErrorUnsupported style diagnostic (using the
1856   /// provided Name).
1857   RValue EmitUnsupportedRValue(const Expr *E,
1858                                const char *Name);
1859 
1860   /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue
1861   /// an ErrorUnsupported style diagnostic (using the provided Name).
1862   LValue EmitUnsupportedLValue(const Expr *E,
1863                                const char *Name);
1864 
1865   /// EmitLValue - Emit code to compute a designator that specifies the location
1866   /// of the expression.
1867   ///
1868   /// This can return one of two things: a simple address or a bitfield
1869   /// reference.  In either case, the LLVM Value* in the LValue structure is
1870   /// guaranteed to be an LLVM pointer type.
1871   ///
1872   /// If this returns a bitfield reference, nothing about the pointee type of
1873   /// the LLVM value is known: For example, it may not be a pointer to an
1874   /// integer.
1875   ///
1876   /// If this returns a normal address, and if the lvalue's C type is fixed
1877   /// size, this method guarantees that the returned pointer type will point to
1878   /// an LLVM type of the same size of the lvalue's type.  If the lvalue has a
1879   /// variable length type, this is not possible.
1880   ///
1881   LValue EmitLValue(const Expr *E);
1882 
1883   /// EmitCheckedLValue - Same as EmitLValue but additionally we generate
1884   /// checking code to guard against undefined behavior.  This is only
1885   /// suitable when we know that the address will be used to access the
1886   /// object.
1887   LValue EmitCheckedLValue(const Expr *E);
1888 
1889   /// EmitToMemory - Change a scalar value from its value
1890   /// representation to its in-memory representation.
1891   llvm::Value *EmitToMemory(llvm::Value *Value, QualType Ty);
1892 
1893   /// EmitFromMemory - Change a scalar value from its memory
1894   /// representation to its value representation.
1895   llvm::Value *EmitFromMemory(llvm::Value *Value, QualType Ty);
1896 
1897   /// EmitLoadOfScalar - Load a scalar value from an address, taking
1898   /// care to appropriately convert from the memory representation to
1899   /// the LLVM value representation.
1900   llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile,
1901                                 unsigned Alignment, QualType Ty,
1902                                 llvm::MDNode *TBAAInfo = 0);
1903 
1904   /// EmitLoadOfScalar - Load a scalar value from an address, taking
1905   /// care to appropriately convert from the memory representation to
1906   /// the LLVM value representation.  The l-value must be a simple
1907   /// l-value.
1908   llvm::Value *EmitLoadOfScalar(LValue lvalue);
1909 
1910   /// EmitStoreOfScalar - Store a scalar value to an address, taking
1911   /// care to appropriately convert from the memory representation to
1912   /// the LLVM value representation.
1913   void EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr,
1914                          bool Volatile, unsigned Alignment, QualType Ty,
1915                          llvm::MDNode *TBAAInfo = 0);
1916 
1917   /// EmitStoreOfScalar - Store a scalar value to an address, taking
1918   /// care to appropriately convert from the memory representation to
1919   /// the LLVM value representation.  The l-value must be a simple
1920   /// l-value.
1921   void EmitStoreOfScalar(llvm::Value *value, LValue lvalue);
1922 
1923   /// EmitLoadOfLValue - Given an expression that represents a value lvalue,
1924   /// this method emits the address of the lvalue, then loads the result as an
1925   /// rvalue, returning the rvalue.
1926   RValue EmitLoadOfLValue(LValue V);
1927   RValue EmitLoadOfExtVectorElementLValue(LValue V);
1928   RValue EmitLoadOfBitfieldLValue(LValue LV);
1929   RValue EmitLoadOfPropertyRefLValue(LValue LV,
1930                                  ReturnValueSlot Return = ReturnValueSlot());
1931 
1932   /// EmitStoreThroughLValue - Store the specified rvalue into the specified
1933   /// lvalue, where both are guaranteed to the have the same type, and that type
1934   /// is 'Ty'.
1935   void EmitStoreThroughLValue(RValue Src, LValue Dst);
1936   void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst);
1937   void EmitStoreThroughPropertyRefLValue(RValue Src, LValue Dst);
1938 
1939   /// EmitStoreThroughLValue - Store Src into Dst with same constraints as
1940   /// EmitStoreThroughLValue.
1941   ///
1942   /// \param Result [out] - If non-null, this will be set to a Value* for the
1943   /// bit-field contents after the store, appropriate for use as the result of
1944   /// an assignment to the bit-field.
1945   void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst,
1946                                       llvm::Value **Result=0);
1947 
1948   /// Emit an l-value for an assignment (simple or compound) of complex type.
1949   LValue EmitComplexAssignmentLValue(const BinaryOperator *E);
1950   LValue EmitComplexCompoundAssignmentLValue(const CompoundAssignOperator *E);
1951 
1952   // Note: only available for agg return types
1953   LValue EmitBinaryOperatorLValue(const BinaryOperator *E);
1954   LValue EmitCompoundAssignmentLValue(const CompoundAssignOperator *E);
1955   // Note: only available for agg return types
1956   LValue EmitCallExprLValue(const CallExpr *E);
1957   // Note: only available for agg return types
1958   LValue EmitVAArgExprLValue(const VAArgExpr *E);
1959   LValue EmitDeclRefLValue(const DeclRefExpr *E);
1960   LValue EmitStringLiteralLValue(const StringLiteral *E);
1961   LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E);
1962   LValue EmitPredefinedLValue(const PredefinedExpr *E);
1963   LValue EmitUnaryOpLValue(const UnaryOperator *E);
1964   LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E);
1965   LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E);
1966   LValue EmitMemberExpr(const MemberExpr *E);
1967   LValue EmitObjCIsaExpr(const ObjCIsaExpr *E);
1968   LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E);
1969   LValue EmitConditionalOperatorLValue(const AbstractConditionalOperator *E);
1970   LValue EmitCastLValue(const CastExpr *E);
1971   LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E);
1972   LValue EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *E);
1973   LValue EmitOpaqueValueLValue(const OpaqueValueExpr *e);
1974 
1975   llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface,
1976                               const ObjCIvarDecl *Ivar);
1977   LValue EmitLValueForAnonRecordField(llvm::Value* Base,
1978                                       const IndirectFieldDecl* Field,
1979                                       unsigned CVRQualifiers);
1980   LValue EmitLValueForField(llvm::Value* Base, const FieldDecl* Field,
1981                             unsigned CVRQualifiers);
1982 
1983   /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that
1984   /// if the Field is a reference, this will return the address of the reference
1985   /// and not the address of the value stored in the reference.
1986   LValue EmitLValueForFieldInitialization(llvm::Value* Base,
1987                                           const FieldDecl* Field,
1988                                           unsigned CVRQualifiers);
1989 
1990   LValue EmitLValueForIvar(QualType ObjectTy,
1991                            llvm::Value* Base, const ObjCIvarDecl *Ivar,
1992                            unsigned CVRQualifiers);
1993 
1994   LValue EmitLValueForBitfield(llvm::Value* Base, const FieldDecl* Field,
1995                                 unsigned CVRQualifiers);
1996 
1997   LValue EmitBlockDeclRefLValue(const BlockDeclRefExpr *E);
1998 
1999   LValue EmitCXXConstructLValue(const CXXConstructExpr *E);
2000   LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E);
2001   LValue EmitExprWithCleanupsLValue(const ExprWithCleanups *E);
2002   LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E);
2003 
2004   LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E);
2005   LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E);
2006   LValue EmitObjCPropertyRefLValue(const ObjCPropertyRefExpr *E);
2007   LValue EmitStmtExprLValue(const StmtExpr *E);
2008   LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E);
2009   LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E);
2010   void   EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init);
2011 
2012   //===--------------------------------------------------------------------===//
2013   //                         Scalar Expression Emission
2014   //===--------------------------------------------------------------------===//
2015 
2016   /// EmitCall - Generate a call of the given function, expecting the given
2017   /// result type, and using the given argument list which specifies both the
2018   /// LLVM arguments and the types they were derived from.
2019   ///
2020   /// \param TargetDecl - If given, the decl of the function in a direct call;
2021   /// used to set attributes on the call (noreturn, etc.).
2022   RValue EmitCall(const CGFunctionInfo &FnInfo,
2023                   llvm::Value *Callee,
2024                   ReturnValueSlot ReturnValue,
2025                   const CallArgList &Args,
2026                   const Decl *TargetDecl = 0,
2027                   llvm::Instruction **callOrInvoke = 0);
2028 
2029   RValue EmitCall(QualType FnType, llvm::Value *Callee,
2030                   ReturnValueSlot ReturnValue,
2031                   CallExpr::const_arg_iterator ArgBeg,
2032                   CallExpr::const_arg_iterator ArgEnd,
2033                   const Decl *TargetDecl = 0);
2034   RValue EmitCallExpr(const CallExpr *E,
2035                       ReturnValueSlot ReturnValue = ReturnValueSlot());
2036 
2037   llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
2038                                   ArrayRef<llvm::Value *> Args,
2039                                   const Twine &Name = "");
2040   llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee,
2041                                   const Twine &Name = "");
2042 
2043   llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This,
2044                                 llvm::Type *Ty);
2045   llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type,
2046                                 llvm::Value *This, llvm::Type *Ty);
2047   llvm::Value *BuildAppleKextVirtualCall(const CXXMethodDecl *MD,
2048                                          NestedNameSpecifier *Qual,
2049                                          llvm::Type *Ty);
2050 
2051   llvm::Value *BuildAppleKextVirtualDestructorCall(const CXXDestructorDecl *DD,
2052                                                    CXXDtorType Type,
2053                                                    const CXXRecordDecl *RD);
2054 
2055   RValue EmitCXXMemberCall(const CXXMethodDecl *MD,
2056                            llvm::Value *Callee,
2057                            ReturnValueSlot ReturnValue,
2058                            llvm::Value *This,
2059                            llvm::Value *VTT,
2060                            CallExpr::const_arg_iterator ArgBeg,
2061                            CallExpr::const_arg_iterator ArgEnd);
2062   RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E,
2063                                ReturnValueSlot ReturnValue);
2064   RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
2065                                       ReturnValueSlot ReturnValue);
2066 
2067   llvm::Value *EmitCXXOperatorMemberCallee(const CXXOperatorCallExpr *E,
2068                                            const CXXMethodDecl *MD,
2069                                            llvm::Value *This);
2070   RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
2071                                        const CXXMethodDecl *MD,
2072                                        ReturnValueSlot ReturnValue);
2073 
2074 
2075   RValue EmitBuiltinExpr(const FunctionDecl *FD,
2076                          unsigned BuiltinID, const CallExpr *E);
2077 
2078   RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue);
2079 
2080   /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call
2081   /// is unhandled by the current target.
2082   llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
2083 
2084   llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
2085   llvm::Value *EmitNeonCall(llvm::Function *F,
2086                             SmallVectorImpl<llvm::Value*> &O,
2087                             const char *name,
2088                             unsigned shift = 0, bool rightshift = false);
2089   llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx);
2090   llvm::Value *EmitNeonShiftVector(llvm::Value *V, llvm::Type *Ty,
2091                                    bool negateForRightShift);
2092 
2093   llvm::Value *BuildVector(const SmallVectorImpl<llvm::Value*> &Ops);
2094   llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E);
2095   llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E);
2096 
2097   llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E);
2098   llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E);
2099   llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E);
2100   RValue EmitObjCMessageExpr(const ObjCMessageExpr *E,
2101                              ReturnValueSlot Return = ReturnValueSlot());
2102 
2103   /// Retrieves the default cleanup kind for an ARC cleanup.
2104   /// Except under -fobjc-arc-eh, ARC cleanups are normal-only.
2105   CleanupKind getARCCleanupKind() {
2106     return CGM.getCodeGenOpts().ObjCAutoRefCountExceptions
2107              ? NormalAndEHCleanup : NormalCleanup;
2108   }
2109 
2110   // ARC primitives.
2111   void EmitARCInitWeak(llvm::Value *value, llvm::Value *addr);
2112   void EmitARCDestroyWeak(llvm::Value *addr);
2113   llvm::Value *EmitARCLoadWeak(llvm::Value *addr);
2114   llvm::Value *EmitARCLoadWeakRetained(llvm::Value *addr);
2115   llvm::Value *EmitARCStoreWeak(llvm::Value *value, llvm::Value *addr,
2116                                 bool ignored);
2117   void EmitARCCopyWeak(llvm::Value *dst, llvm::Value *src);
2118   void EmitARCMoveWeak(llvm::Value *dst, llvm::Value *src);
2119   llvm::Value *EmitARCRetainAutorelease(QualType type, llvm::Value *value);
2120   llvm::Value *EmitARCRetainAutoreleaseNonBlock(llvm::Value *value);
2121   llvm::Value *EmitARCStoreStrong(LValue lvalue, llvm::Value *value,
2122                                   bool ignored);
2123   llvm::Value *EmitARCStoreStrongCall(llvm::Value *addr, llvm::Value *value,
2124                                       bool ignored);
2125   llvm::Value *EmitARCRetain(QualType type, llvm::Value *value);
2126   llvm::Value *EmitARCRetainNonBlock(llvm::Value *value);
2127   llvm::Value *EmitARCRetainBlock(llvm::Value *value);
2128   void EmitARCRelease(llvm::Value *value, bool precise);
2129   llvm::Value *EmitARCAutorelease(llvm::Value *value);
2130   llvm::Value *EmitARCAutoreleaseReturnValue(llvm::Value *value);
2131   llvm::Value *EmitARCRetainAutoreleaseReturnValue(llvm::Value *value);
2132   llvm::Value *EmitARCRetainAutoreleasedReturnValue(llvm::Value *value);
2133 
2134   std::pair<LValue,llvm::Value*>
2135   EmitARCStoreAutoreleasing(const BinaryOperator *e);
2136   std::pair<LValue,llvm::Value*>
2137   EmitARCStoreStrong(const BinaryOperator *e, bool ignored);
2138 
2139   llvm::Value *EmitObjCProduceObject(QualType T, llvm::Value *Ptr);
2140   llvm::Value *EmitObjCConsumeObject(QualType T, llvm::Value *Ptr);
2141   llvm::Value *EmitObjCExtendObjectLifetime(QualType T, llvm::Value *Ptr);
2142 
2143   llvm::Value *EmitARCRetainScalarExpr(const Expr *expr);
2144   llvm::Value *EmitARCRetainAutoreleaseScalarExpr(const Expr *expr);
2145 
2146   static Destroyer destroyARCStrongImprecise;
2147   static Destroyer destroyARCStrongPrecise;
2148   static Destroyer destroyARCWeak;
2149 
2150   void EmitObjCAutoreleasePoolPop(llvm::Value *Ptr);
2151   llvm::Value *EmitObjCAutoreleasePoolPush();
2152   llvm::Value *EmitObjCMRRAutoreleasePoolPush();
2153   void EmitObjCAutoreleasePoolCleanup(llvm::Value *Ptr);
2154   void EmitObjCMRRAutoreleasePoolPop(llvm::Value *Ptr);
2155 
2156   /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in
2157   /// expression. Will emit a temporary variable if E is not an LValue.
2158   RValue EmitReferenceBindingToExpr(const Expr* E,
2159                                     const NamedDecl *InitializedDecl);
2160 
2161   //===--------------------------------------------------------------------===//
2162   //                           Expression Emission
2163   //===--------------------------------------------------------------------===//
2164 
2165   // Expressions are broken into three classes: scalar, complex, aggregate.
2166 
2167   /// EmitScalarExpr - Emit the computation of the specified expression of LLVM
2168   /// scalar type, returning the result.
2169   llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false);
2170 
2171   /// EmitScalarConversion - Emit a conversion from the specified type to the
2172   /// specified destination type, both of which are LLVM scalar types.
2173   llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy,
2174                                     QualType DstTy);
2175 
2176   /// EmitComplexToScalarConversion - Emit a conversion from the specified
2177   /// complex type to the specified destination type, where the destination type
2178   /// is an LLVM scalar type.
2179   llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy,
2180                                              QualType DstTy);
2181 
2182 
2183   /// EmitAggExpr - Emit the computation of the specified expression
2184   /// of aggregate type.  The result is computed into the given slot,
2185   /// which may be null to indicate that the value is not needed.
2186   void EmitAggExpr(const Expr *E, AggValueSlot AS, bool IgnoreResult = false);
2187 
2188   /// EmitAggExprToLValue - Emit the computation of the specified expression of
2189   /// aggregate type into a temporary LValue.
2190   LValue EmitAggExprToLValue(const Expr *E);
2191 
2192   /// EmitGCMemmoveCollectable - Emit special API for structs with object
2193   /// pointers.
2194   void EmitGCMemmoveCollectable(llvm::Value *DestPtr, llvm::Value *SrcPtr,
2195                                 QualType Ty);
2196 
2197   /// EmitExtendGCLifetime - Given a pointer to an Objective-C object,
2198   /// make sure it survives garbage collection until this point.
2199   void EmitExtendGCLifetime(llvm::Value *object);
2200 
2201   /// EmitComplexExpr - Emit the computation of the specified expression of
2202   /// complex type, returning the result.
2203   ComplexPairTy EmitComplexExpr(const Expr *E,
2204                                 bool IgnoreReal = false,
2205                                 bool IgnoreImag = false);
2206 
2207   /// EmitComplexExprIntoAddr - Emit the computation of the specified expression
2208   /// of complex type, storing into the specified Value*.
2209   void EmitComplexExprIntoAddr(const Expr *E, llvm::Value *DestAddr,
2210                                bool DestIsVolatile);
2211 
2212   /// StoreComplexToAddr - Store a complex number into the specified address.
2213   void StoreComplexToAddr(ComplexPairTy V, llvm::Value *DestAddr,
2214                           bool DestIsVolatile);
2215   /// LoadComplexFromAddr - Load a complex number from the specified address.
2216   ComplexPairTy LoadComplexFromAddr(llvm::Value *SrcAddr, bool SrcIsVolatile);
2217 
2218   /// CreateStaticVarDecl - Create a zero-initialized LLVM global for
2219   /// a static local variable.
2220   llvm::GlobalVariable *CreateStaticVarDecl(const VarDecl &D,
2221                                             const char *Separator,
2222                                        llvm::GlobalValue::LinkageTypes Linkage);
2223 
2224   /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the
2225   /// global variable that has already been created for it.  If the initializer
2226   /// has a different type than GV does, this may free GV and return a different
2227   /// one.  Otherwise it just returns GV.
2228   llvm::GlobalVariable *
2229   AddInitializerToStaticVarDecl(const VarDecl &D,
2230                                 llvm::GlobalVariable *GV);
2231 
2232 
2233   /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++
2234   /// variable with global storage.
2235   void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr);
2236 
2237   /// EmitCXXGlobalDtorRegistration - Emits a call to register the global ptr
2238   /// with the C++ runtime so that its destructor will be called at exit.
2239   void EmitCXXGlobalDtorRegistration(llvm::Constant *DtorFn,
2240                                      llvm::Constant *DeclPtr);
2241 
2242   /// Emit code in this function to perform a guarded variable
2243   /// initialization.  Guarded initializations are used when it's not
2244   /// possible to prove that an initialization will be done exactly
2245   /// once, e.g. with a static local variable or a static data member
2246   /// of a class template.
2247   void EmitCXXGuardedInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr);
2248 
2249   /// GenerateCXXGlobalInitFunc - Generates code for initializing global
2250   /// variables.
2251   void GenerateCXXGlobalInitFunc(llvm::Function *Fn,
2252                                  llvm::Constant **Decls,
2253                                  unsigned NumDecls);
2254 
2255   /// GenerateCXXGlobalDtorFunc - Generates code for destroying global
2256   /// variables.
2257   void GenerateCXXGlobalDtorFunc(llvm::Function *Fn,
2258                                  const std::vector<std::pair<llvm::WeakVH,
2259                                    llvm::Constant*> > &DtorsAndObjects);
2260 
2261   void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn,
2262                                         const VarDecl *D,
2263                                         llvm::GlobalVariable *Addr);
2264 
2265   void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest);
2266 
2267   void EmitSynthesizedCXXCopyCtor(llvm::Value *Dest, llvm::Value *Src,
2268                                   const Expr *Exp);
2269 
2270   RValue EmitExprWithCleanups(const ExprWithCleanups *E,
2271                               AggValueSlot Slot =AggValueSlot::ignored());
2272 
2273   void EmitCXXThrowExpr(const CXXThrowExpr *E);
2274 
2275   //===--------------------------------------------------------------------===//
2276   //                         Annotations Emission
2277   //===--------------------------------------------------------------------===//
2278 
2279   /// Emit an annotation call (intrinsic or builtin).
2280   llvm::Value *EmitAnnotationCall(llvm::Value *AnnotationFn,
2281                                   llvm::Value *AnnotatedVal,
2282                                   llvm::StringRef AnnotationStr,
2283                                   SourceLocation Location);
2284 
2285   /// Emit local annotations for the local variable V, declared by D.
2286   void EmitVarAnnotations(const VarDecl *D, llvm::Value *V);
2287 
2288   /// Emit field annotations for the given field & value. Returns the
2289   /// annotation result.
2290   llvm::Value *EmitFieldAnnotations(const FieldDecl *D, llvm::Value *V);
2291 
2292   //===--------------------------------------------------------------------===//
2293   //                             Internal Helpers
2294   //===--------------------------------------------------------------------===//
2295 
2296   /// ContainsLabel - Return true if the statement contains a label in it.  If
2297   /// this statement is not executed normally, it not containing a label means
2298   /// that we can just remove the code.
2299   static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false);
2300 
2301   /// containsBreak - Return true if the statement contains a break out of it.
2302   /// If the statement (recursively) contains a switch or loop with a break
2303   /// inside of it, this is fine.
2304   static bool containsBreak(const Stmt *S);
2305 
2306   /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
2307   /// to a constant, or if it does but contains a label, return false.  If it
2308   /// constant folds return true and set the boolean result in Result.
2309   bool ConstantFoldsToSimpleInteger(const Expr *Cond, bool &Result);
2310 
2311   /// ConstantFoldsToSimpleInteger - If the specified expression does not fold
2312   /// to a constant, or if it does but contains a label, return false.  If it
2313   /// constant folds return true and set the folded value.
2314   bool ConstantFoldsToSimpleInteger(const Expr *Cond, llvm::APInt &Result);
2315 
2316   /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an
2317   /// if statement) to the specified blocks.  Based on the condition, this might
2318   /// try to simplify the codegen of the conditional based on the branch.
2319   void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock,
2320                             llvm::BasicBlock *FalseBlock);
2321 
2322   /// getTrapBB - Create a basic block that will call the trap intrinsic.  We'll
2323   /// generate a branch around the created basic block as necessary.
2324   llvm::BasicBlock *getTrapBB();
2325 
2326   /// EmitCallArg - Emit a single call argument.
2327   void EmitCallArg(CallArgList &args, const Expr *E, QualType ArgType);
2328 
2329   /// EmitDelegateCallArg - We are performing a delegate call; that
2330   /// is, the current function is delegating to another one.  Produce
2331   /// a r-value suitable for passing the given parameter.
2332   void EmitDelegateCallArg(CallArgList &args, const VarDecl *param);
2333 
2334 private:
2335   void EmitReturnOfRValue(RValue RV, QualType Ty);
2336 
2337   /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty
2338   /// from function arguments into \arg Dst. See ABIArgInfo::Expand.
2339   ///
2340   /// \param AI - The first function argument of the expansion.
2341   /// \return The argument following the last expanded function
2342   /// argument.
2343   llvm::Function::arg_iterator
2344   ExpandTypeFromArgs(QualType Ty, LValue Dst,
2345                      llvm::Function::arg_iterator AI);
2346 
2347   /// ExpandTypeToArgs - Expand an RValue \arg Src, with the LLVM type for \arg
2348   /// Ty, into individual arguments on the provided vector \arg Args. See
2349   /// ABIArgInfo::Expand.
2350   void ExpandTypeToArgs(QualType Ty, RValue Src,
2351                         SmallVector<llvm::Value*, 16> &Args,
2352                         llvm::FunctionType *IRFuncTy);
2353 
2354   llvm::Value* EmitAsmInput(const AsmStmt &S,
2355                             const TargetInfo::ConstraintInfo &Info,
2356                             const Expr *InputExpr, std::string &ConstraintStr);
2357 
2358   llvm::Value* EmitAsmInputLValue(const AsmStmt &S,
2359                                   const TargetInfo::ConstraintInfo &Info,
2360                                   LValue InputValue, QualType InputType,
2361                                   std::string &ConstraintStr);
2362 
2363   /// EmitCallArgs - Emit call arguments for a function.
2364   /// The CallArgTypeInfo parameter is used for iterating over the known
2365   /// argument types of the function being called.
2366   template<typename T>
2367   void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo,
2368                     CallExpr::const_arg_iterator ArgBeg,
2369                     CallExpr::const_arg_iterator ArgEnd) {
2370       CallExpr::const_arg_iterator Arg = ArgBeg;
2371 
2372     // First, use the argument types that the type info knows about
2373     if (CallArgTypeInfo) {
2374       for (typename T::arg_type_iterator I = CallArgTypeInfo->arg_type_begin(),
2375            E = CallArgTypeInfo->arg_type_end(); I != E; ++I, ++Arg) {
2376         assert(Arg != ArgEnd && "Running over edge of argument list!");
2377         QualType ArgType = *I;
2378 #ifndef NDEBUG
2379         QualType ActualArgType = Arg->getType();
2380         if (ArgType->isPointerType() && ActualArgType->isPointerType()) {
2381           QualType ActualBaseType =
2382             ActualArgType->getAs<PointerType>()->getPointeeType();
2383           QualType ArgBaseType =
2384             ArgType->getAs<PointerType>()->getPointeeType();
2385           if (ArgBaseType->isVariableArrayType()) {
2386             if (const VariableArrayType *VAT =
2387                 getContext().getAsVariableArrayType(ActualBaseType)) {
2388               if (!VAT->getSizeExpr())
2389                 ActualArgType = ArgType;
2390             }
2391           }
2392         }
2393         assert(getContext().getCanonicalType(ArgType.getNonReferenceType()).
2394                getTypePtr() ==
2395                getContext().getCanonicalType(ActualArgType).getTypePtr() &&
2396                "type mismatch in call argument!");
2397 #endif
2398         EmitCallArg(Args, *Arg, ArgType);
2399       }
2400 
2401       // Either we've emitted all the call args, or we have a call to a
2402       // variadic function.
2403       assert((Arg == ArgEnd || CallArgTypeInfo->isVariadic()) &&
2404              "Extra arguments in non-variadic function!");
2405 
2406     }
2407 
2408     // If we still have any arguments, emit them using the type of the argument.
2409     for (; Arg != ArgEnd; ++Arg)
2410       EmitCallArg(Args, *Arg, Arg->getType());
2411   }
2412 
2413   const TargetCodeGenInfo &getTargetHooks() const {
2414     return CGM.getTargetCodeGenInfo();
2415   }
2416 
2417   void EmitDeclMetadata();
2418 
2419   CodeGenModule::ByrefHelpers *
2420   buildByrefHelpers(llvm::StructType &byrefType,
2421                     const AutoVarEmission &emission);
2422 };
2423 
2424 /// Helper class with most of the code for saving a value for a
2425 /// conditional expression cleanup.
2426 struct DominatingLLVMValue {
2427   typedef llvm::PointerIntPair<llvm::Value*, 1, bool> saved_type;
2428 
2429   /// Answer whether the given value needs extra work to be saved.
2430   static bool needsSaving(llvm::Value *value) {
2431     // If it's not an instruction, we don't need to save.
2432     if (!isa<llvm::Instruction>(value)) return false;
2433 
2434     // If it's an instruction in the entry block, we don't need to save.
2435     llvm::BasicBlock *block = cast<llvm::Instruction>(value)->getParent();
2436     return (block != &block->getParent()->getEntryBlock());
2437   }
2438 
2439   /// Try to save the given value.
2440   static saved_type save(CodeGenFunction &CGF, llvm::Value *value) {
2441     if (!needsSaving(value)) return saved_type(value, false);
2442 
2443     // Otherwise we need an alloca.
2444     llvm::Value *alloca =
2445       CGF.CreateTempAlloca(value->getType(), "cond-cleanup.save");
2446     CGF.Builder.CreateStore(value, alloca);
2447 
2448     return saved_type(alloca, true);
2449   }
2450 
2451   static llvm::Value *restore(CodeGenFunction &CGF, saved_type value) {
2452     if (!value.getInt()) return value.getPointer();
2453     return CGF.Builder.CreateLoad(value.getPointer());
2454   }
2455 };
2456 
2457 /// A partial specialization of DominatingValue for llvm::Values that
2458 /// might be llvm::Instructions.
2459 template <class T> struct DominatingPointer<T,true> : DominatingLLVMValue {
2460   typedef T *type;
2461   static type restore(CodeGenFunction &CGF, saved_type value) {
2462     return static_cast<T*>(DominatingLLVMValue::restore(CGF, value));
2463   }
2464 };
2465 
2466 /// A specialization of DominatingValue for RValue.
2467 template <> struct DominatingValue<RValue> {
2468   typedef RValue type;
2469   class saved_type {
2470     enum Kind { ScalarLiteral, ScalarAddress, AggregateLiteral,
2471                 AggregateAddress, ComplexAddress };
2472 
2473     llvm::Value *Value;
2474     Kind K;
2475     saved_type(llvm::Value *v, Kind k) : Value(v), K(k) {}
2476 
2477   public:
2478     static bool needsSaving(RValue value);
2479     static saved_type save(CodeGenFunction &CGF, RValue value);
2480     RValue restore(CodeGenFunction &CGF);
2481 
2482     // implementations in CGExprCXX.cpp
2483   };
2484 
2485   static bool needsSaving(type value) {
2486     return saved_type::needsSaving(value);
2487   }
2488   static saved_type save(CodeGenFunction &CGF, type value) {
2489     return saved_type::save(CGF, value);
2490   }
2491   static type restore(CodeGenFunction &CGF, saved_type value) {
2492     return value.restore(CGF);
2493   }
2494 };
2495 
2496 }  // end namespace CodeGen
2497 }  // end namespace clang
2498 
2499 #endif
2500