1 //===-- CodeGenFunction.h - Per-Function state for LLVM CodeGen -*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This is the internal per-function state used for llvm translation. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef CLANG_CODEGEN_CODEGENFUNCTION_H 15 #define CLANG_CODEGEN_CODEGENFUNCTION_H 16 17 #include "clang/AST/Type.h" 18 #include "clang/AST/ExprCXX.h" 19 #include "clang/AST/ExprObjC.h" 20 #include "clang/AST/CharUnits.h" 21 #include "clang/Basic/TargetInfo.h" 22 #include "llvm/ADT/DenseMap.h" 23 #include "llvm/ADT/SmallVector.h" 24 #include "llvm/Support/ValueHandle.h" 25 #include "CodeGenModule.h" 26 #include "CGBlocks.h" 27 #include "CGBuilder.h" 28 #include "CGCall.h" 29 #include "CGCXX.h" 30 #include "CGValue.h" 31 32 namespace llvm { 33 class BasicBlock; 34 class LLVMContext; 35 class MDNode; 36 class Module; 37 class SwitchInst; 38 class Twine; 39 class Value; 40 class CallSite; 41 } 42 43 namespace clang { 44 class APValue; 45 class ASTContext; 46 class CXXDestructorDecl; 47 class CXXTryStmt; 48 class Decl; 49 class EnumConstantDecl; 50 class FunctionDecl; 51 class FunctionProtoType; 52 class LabelStmt; 53 class ObjCContainerDecl; 54 class ObjCInterfaceDecl; 55 class ObjCIvarDecl; 56 class ObjCMethodDecl; 57 class ObjCImplementationDecl; 58 class ObjCPropertyImplDecl; 59 class TargetInfo; 60 class TargetCodeGenInfo; 61 class VarDecl; 62 class ObjCForCollectionStmt; 63 class ObjCAtTryStmt; 64 class ObjCAtThrowStmt; 65 class ObjCAtSynchronizedStmt; 66 67 namespace CodeGen { 68 class CodeGenTypes; 69 class CGDebugInfo; 70 class CGFunctionInfo; 71 class CGRecordLayout; 72 class CGBlockInfo; 73 class CGCXXABI; 74 75 /// A branch fixup. These are required when emitting a goto to a 76 /// label which hasn't been emitted yet. The goto is optimistically 77 /// emitted as a branch to the basic block for the label, and (if it 78 /// occurs in a scope with non-trivial cleanups) a fixup is added to 79 /// the innermost cleanup. When a (normal) cleanup is popped, any 80 /// unresolved fixups in that scope are threaded through the cleanup. 81 struct BranchFixup { 82 /// The block containing the terminator which needs to be modified 83 /// into a switch if this fixup is resolved into the current scope. 84 /// If null, LatestBranch points directly to the destination. 85 llvm::BasicBlock *OptimisticBranchBlock; 86 87 /// The ultimate destination of the branch. 88 /// 89 /// This can be set to null to indicate that this fixup was 90 /// successfully resolved. 91 llvm::BasicBlock *Destination; 92 93 /// The destination index value. 94 unsigned DestinationIndex; 95 96 /// The initial branch of the fixup. 97 llvm::BranchInst *InitialBranch; 98 }; 99 100 enum CleanupKind { 101 EHCleanup = 0x1, 102 NormalCleanup = 0x2, 103 NormalAndEHCleanup = EHCleanup | NormalCleanup, 104 105 InactiveCleanup = 0x4, 106 InactiveEHCleanup = EHCleanup | InactiveCleanup, 107 InactiveNormalCleanup = NormalCleanup | InactiveCleanup, 108 InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup 109 }; 110 111 /// A stack of scopes which respond to exceptions, including cleanups 112 /// and catch blocks. 113 class EHScopeStack { 114 public: 115 /// A saved depth on the scope stack. This is necessary because 116 /// pushing scopes onto the stack invalidates iterators. 117 class stable_iterator { 118 friend class EHScopeStack; 119 120 /// Offset from StartOfData to EndOfBuffer. 121 ptrdiff_t Size; 122 123 stable_iterator(ptrdiff_t Size) : Size(Size) {} 124 125 public: 126 static stable_iterator invalid() { return stable_iterator(-1); } 127 stable_iterator() : Size(-1) {} 128 129 bool isValid() const { return Size >= 0; } 130 131 /// Returns true if this scope encloses I. 132 /// Returns false if I is invalid. 133 /// This scope must be valid. 134 bool encloses(stable_iterator I) const { return Size <= I.Size; } 135 136 /// Returns true if this scope strictly encloses I: that is, 137 /// if it encloses I and is not I. 138 /// Returns false is I is invalid. 139 /// This scope must be valid. 140 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } 141 142 friend bool operator==(stable_iterator A, stable_iterator B) { 143 return A.Size == B.Size; 144 } 145 friend bool operator!=(stable_iterator A, stable_iterator B) { 146 return A.Size != B.Size; 147 } 148 }; 149 150 /// Information for lazily generating a cleanup. Subclasses must be 151 /// POD-like: cleanups will not be destructed, and they will be 152 /// allocated on the cleanup stack and freely copied and moved 153 /// around. 154 /// 155 /// Cleanup implementations should generally be declared in an 156 /// anonymous namespace. 157 class Cleanup { 158 public: 159 // Anchor the construction vtable. We use the destructor because 160 // gcc gives an obnoxious warning if there are virtual methods 161 // with an accessible non-virtual destructor. Unfortunately, 162 // declaring this destructor makes it non-trivial, but there 163 // doesn't seem to be any other way around this warning. 164 // 165 // This destructor will never be called. 166 virtual ~Cleanup(); 167 168 /// Emit the cleanup. For normal cleanups, this is run in the 169 /// same EH context as when the cleanup was pushed, i.e. the 170 /// immediately-enclosing context of the cleanup scope. For 171 /// EH cleanups, this is run in a terminate context. 172 /// 173 // \param IsForEHCleanup true if this is for an EH cleanup, false 174 /// if for a normal cleanup. 175 virtual void Emit(CodeGenFunction &CGF, bool IsForEHCleanup) = 0; 176 }; 177 178 private: 179 // The implementation for this class is in CGException.h and 180 // CGException.cpp; the definition is here because it's used as a 181 // member of CodeGenFunction. 182 183 /// The start of the scope-stack buffer, i.e. the allocated pointer 184 /// for the buffer. All of these pointers are either simultaneously 185 /// null or simultaneously valid. 186 char *StartOfBuffer; 187 188 /// The end of the buffer. 189 char *EndOfBuffer; 190 191 /// The first valid entry in the buffer. 192 char *StartOfData; 193 194 /// The innermost normal cleanup on the stack. 195 stable_iterator InnermostNormalCleanup; 196 197 /// The innermost EH cleanup on the stack. 198 stable_iterator InnermostEHCleanup; 199 200 /// The number of catches on the stack. 201 unsigned CatchDepth; 202 203 /// The current EH destination index. Reset to FirstCatchIndex 204 /// whenever the last EH cleanup is popped. 205 unsigned NextEHDestIndex; 206 enum { FirstEHDestIndex = 1 }; 207 208 /// The current set of branch fixups. A branch fixup is a jump to 209 /// an as-yet unemitted label, i.e. a label for which we don't yet 210 /// know the EH stack depth. Whenever we pop a cleanup, we have 211 /// to thread all the current branch fixups through it. 212 /// 213 /// Fixups are recorded as the Use of the respective branch or 214 /// switch statement. The use points to the final destination. 215 /// When popping out of a cleanup, these uses are threaded through 216 /// the cleanup and adjusted to point to the new cleanup. 217 /// 218 /// Note that branches are allowed to jump into protected scopes 219 /// in certain situations; e.g. the following code is legal: 220 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor 221 /// goto foo; 222 /// A a; 223 /// foo: 224 /// bar(); 225 llvm::SmallVector<BranchFixup, 8> BranchFixups; 226 227 char *allocate(size_t Size); 228 229 void *pushCleanup(CleanupKind K, size_t DataSize); 230 231 public: 232 EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), 233 InnermostNormalCleanup(stable_end()), 234 InnermostEHCleanup(stable_end()), 235 CatchDepth(0), NextEHDestIndex(FirstEHDestIndex) {} 236 ~EHScopeStack() { delete[] StartOfBuffer; } 237 238 // Variadic templates would make this not terrible. 239 240 /// Push a lazily-created cleanup on the stack. 241 template <class T> 242 void pushCleanup(CleanupKind Kind) { 243 void *Buffer = pushCleanup(Kind, sizeof(T)); 244 Cleanup *Obj = new(Buffer) T(); 245 (void) Obj; 246 } 247 248 /// Push a lazily-created cleanup on the stack. 249 template <class T, class A0> 250 void pushCleanup(CleanupKind Kind, A0 a0) { 251 void *Buffer = pushCleanup(Kind, sizeof(T)); 252 Cleanup *Obj = new(Buffer) T(a0); 253 (void) Obj; 254 } 255 256 /// Push a lazily-created cleanup on the stack. 257 template <class T, class A0, class A1> 258 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) { 259 void *Buffer = pushCleanup(Kind, sizeof(T)); 260 Cleanup *Obj = new(Buffer) T(a0, a1); 261 (void) Obj; 262 } 263 264 /// Push a lazily-created cleanup on the stack. 265 template <class T, class A0, class A1, class A2> 266 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { 267 void *Buffer = pushCleanup(Kind, sizeof(T)); 268 Cleanup *Obj = new(Buffer) T(a0, a1, a2); 269 (void) Obj; 270 } 271 272 /// Push a lazily-created cleanup on the stack. 273 template <class T, class A0, class A1, class A2, class A3> 274 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { 275 void *Buffer = pushCleanup(Kind, sizeof(T)); 276 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3); 277 (void) Obj; 278 } 279 280 /// Push a lazily-created cleanup on the stack. 281 template <class T, class A0, class A1, class A2, class A3, class A4> 282 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) { 283 void *Buffer = pushCleanup(Kind, sizeof(T)); 284 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4); 285 (void) Obj; 286 } 287 288 // Feel free to add more variants of the following: 289 290 /// Push a cleanup with non-constant storage requirements on the 291 /// stack. The cleanup type must provide an additional static method: 292 /// static size_t getExtraSize(size_t); 293 /// The argument to this method will be the value N, which will also 294 /// be passed as the first argument to the constructor. 295 /// 296 /// The data stored in the extra storage must obey the same 297 /// restrictions as normal cleanup member data. 298 /// 299 /// The pointer returned from this method is valid until the cleanup 300 /// stack is modified. 301 template <class T, class A0, class A1, class A2> 302 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) { 303 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); 304 return new (Buffer) T(N, a0, a1, a2); 305 } 306 307 /// Pops a cleanup scope off the stack. This should only be called 308 /// by CodeGenFunction::PopCleanupBlock. 309 void popCleanup(); 310 311 /// Push a set of catch handlers on the stack. The catch is 312 /// uninitialized and will need to have the given number of handlers 313 /// set on it. 314 class EHCatchScope *pushCatch(unsigned NumHandlers); 315 316 /// Pops a catch scope off the stack. 317 void popCatch(); 318 319 /// Push an exceptions filter on the stack. 320 class EHFilterScope *pushFilter(unsigned NumFilters); 321 322 /// Pops an exceptions filter off the stack. 323 void popFilter(); 324 325 /// Push a terminate handler on the stack. 326 void pushTerminate(); 327 328 /// Pops a terminate handler off the stack. 329 void popTerminate(); 330 331 /// Determines whether the exception-scopes stack is empty. 332 bool empty() const { return StartOfData == EndOfBuffer; } 333 334 bool requiresLandingPad() const { 335 return (CatchDepth || hasEHCleanups()); 336 } 337 338 /// Determines whether there are any normal cleanups on the stack. 339 bool hasNormalCleanups() const { 340 return InnermostNormalCleanup != stable_end(); 341 } 342 343 /// Returns the innermost normal cleanup on the stack, or 344 /// stable_end() if there are no normal cleanups. 345 stable_iterator getInnermostNormalCleanup() const { 346 return InnermostNormalCleanup; 347 } 348 stable_iterator getInnermostActiveNormalCleanup() const; // CGException.h 349 350 /// Determines whether there are any EH cleanups on the stack. 351 bool hasEHCleanups() const { 352 return InnermostEHCleanup != stable_end(); 353 } 354 355 /// Returns the innermost EH cleanup on the stack, or stable_end() 356 /// if there are no EH cleanups. 357 stable_iterator getInnermostEHCleanup() const { 358 return InnermostEHCleanup; 359 } 360 stable_iterator getInnermostActiveEHCleanup() const; // CGException.h 361 362 /// An unstable reference to a scope-stack depth. Invalidated by 363 /// pushes but not pops. 364 class iterator; 365 366 /// Returns an iterator pointing to the innermost EH scope. 367 iterator begin() const; 368 369 /// Returns an iterator pointing to the outermost EH scope. 370 iterator end() const; 371 372 /// Create a stable reference to the top of the EH stack. The 373 /// returned reference is valid until that scope is popped off the 374 /// stack. 375 stable_iterator stable_begin() const { 376 return stable_iterator(EndOfBuffer - StartOfData); 377 } 378 379 /// Create a stable reference to the bottom of the EH stack. 380 static stable_iterator stable_end() { 381 return stable_iterator(0); 382 } 383 384 /// Translates an iterator into a stable_iterator. 385 stable_iterator stabilize(iterator it) const; 386 387 /// Finds the nearest cleanup enclosing the given iterator. 388 /// Returns stable_iterator::invalid() if there are no such cleanups. 389 stable_iterator getEnclosingEHCleanup(iterator it) const; 390 391 /// Turn a stable reference to a scope depth into a unstable pointer 392 /// to the EH stack. 393 iterator find(stable_iterator save) const; 394 395 /// Removes the cleanup pointed to by the given stable_iterator. 396 void removeCleanup(stable_iterator save); 397 398 /// Add a branch fixup to the current cleanup scope. 399 BranchFixup &addBranchFixup() { 400 assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); 401 BranchFixups.push_back(BranchFixup()); 402 return BranchFixups.back(); 403 } 404 405 unsigned getNumBranchFixups() const { return BranchFixups.size(); } 406 BranchFixup &getBranchFixup(unsigned I) { 407 assert(I < getNumBranchFixups()); 408 return BranchFixups[I]; 409 } 410 411 /// Pops lazily-removed fixups from the end of the list. This 412 /// should only be called by procedures which have just popped a 413 /// cleanup or resolved one or more fixups. 414 void popNullFixups(); 415 416 /// Clears the branch-fixups list. This should only be called by 417 /// ResolveAllBranchFixups. 418 void clearFixups() { BranchFixups.clear(); } 419 420 /// Gets the next EH destination index. 421 unsigned getNextEHDestIndex() { return NextEHDestIndex++; } 422 }; 423 424 /// CodeGenFunction - This class organizes the per-function state that is used 425 /// while generating LLVM code. 426 class CodeGenFunction : public BlockFunction { 427 CodeGenFunction(const CodeGenFunction&); // DO NOT IMPLEMENT 428 void operator=(const CodeGenFunction&); // DO NOT IMPLEMENT 429 430 friend class CGCXXABI; 431 public: 432 /// A jump destination is an abstract label, branching to which may 433 /// require a jump out through normal cleanups. 434 struct JumpDest { 435 JumpDest() : Block(0), ScopeDepth(), Index(0) {} 436 JumpDest(llvm::BasicBlock *Block, 437 EHScopeStack::stable_iterator Depth, 438 unsigned Index) 439 : Block(Block), ScopeDepth(Depth), Index(Index) {} 440 441 bool isValid() const { return Block != 0; } 442 llvm::BasicBlock *getBlock() const { return Block; } 443 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 444 unsigned getDestIndex() const { return Index; } 445 446 private: 447 llvm::BasicBlock *Block; 448 EHScopeStack::stable_iterator ScopeDepth; 449 unsigned Index; 450 }; 451 452 /// An unwind destination is an abstract label, branching to which 453 /// may require a jump out through EH cleanups. 454 struct UnwindDest { 455 UnwindDest() : Block(0), ScopeDepth(), Index(0) {} 456 UnwindDest(llvm::BasicBlock *Block, 457 EHScopeStack::stable_iterator Depth, 458 unsigned Index) 459 : Block(Block), ScopeDepth(Depth), Index(Index) {} 460 461 bool isValid() const { return Block != 0; } 462 llvm::BasicBlock *getBlock() const { return Block; } 463 EHScopeStack::stable_iterator getScopeDepth() const { return ScopeDepth; } 464 unsigned getDestIndex() const { return Index; } 465 466 private: 467 llvm::BasicBlock *Block; 468 EHScopeStack::stable_iterator ScopeDepth; 469 unsigned Index; 470 }; 471 472 CodeGenModule &CGM; // Per-module state. 473 const TargetInfo &Target; 474 475 typedef std::pair<llvm::Value *, llvm::Value *> ComplexPairTy; 476 CGBuilderTy Builder; 477 478 /// CurFuncDecl - Holds the Decl for the current function or ObjC method. 479 /// This excludes BlockDecls. 480 const Decl *CurFuncDecl; 481 /// CurCodeDecl - This is the inner-most code context, which includes blocks. 482 const Decl *CurCodeDecl; 483 const CGFunctionInfo *CurFnInfo; 484 QualType FnRetTy; 485 llvm::Function *CurFn; 486 487 /// CurGD - The GlobalDecl for the current function being compiled. 488 GlobalDecl CurGD; 489 490 /// ReturnBlock - Unified return block. 491 JumpDest ReturnBlock; 492 493 /// ReturnValue - The temporary alloca to hold the return value. This is null 494 /// iff the function has no return value. 495 llvm::Value *ReturnValue; 496 497 /// RethrowBlock - Unified rethrow block. 498 UnwindDest RethrowBlock; 499 500 /// AllocaInsertPoint - This is an instruction in the entry block before which 501 /// we prefer to insert allocas. 502 llvm::AssertingVH<llvm::Instruction> AllocaInsertPt; 503 504 // intptr_t, i32, i64 505 const llvm::IntegerType *IntPtrTy, *Int32Ty, *Int64Ty; 506 uint32_t LLVMPointerWidth; 507 508 bool Exceptions; 509 bool CatchUndefined; 510 511 /// \brief A mapping from NRVO variables to the flags used to indicate 512 /// when the NRVO has been applied to this variable. 513 llvm::DenseMap<const VarDecl *, llvm::Value *> NRVOFlags; 514 515 /// \brief A mapping from 'Save' expression in a conditional expression 516 /// to the IR for this expression. Used to implement IR gen. for Gnu 517 /// extension's missing LHS expression in a conditional operator expression. 518 llvm::DenseMap<const Expr *, llvm::Value *> ConditionalSaveExprs; 519 llvm::DenseMap<const Expr *, ComplexPairTy> ConditionalSaveComplexExprs; 520 llvm::DenseMap<const Expr *, LValue> ConditionalSaveLValueExprs; 521 522 EHScopeStack EHStack; 523 524 /// i32s containing the indexes of the cleanup destinations. 525 llvm::AllocaInst *NormalCleanupDest; 526 llvm::AllocaInst *EHCleanupDest; 527 528 unsigned NextCleanupDestIndex; 529 530 /// The exception slot. All landing pads write the current 531 /// exception pointer into this alloca. 532 llvm::Value *ExceptionSlot; 533 534 /// Emits a landing pad for the current EH stack. 535 llvm::BasicBlock *EmitLandingPad(); 536 537 llvm::BasicBlock *getInvokeDestImpl(); 538 539 public: 540 /// ObjCEHValueStack - Stack of Objective-C exception values, used for 541 /// rethrows. 542 llvm::SmallVector<llvm::Value*, 8> ObjCEHValueStack; 543 544 // A struct holding information about a finally block's IR 545 // generation. For now, doesn't actually hold anything. 546 struct FinallyInfo { 547 }; 548 549 FinallyInfo EnterFinallyBlock(const Stmt *Stmt, 550 llvm::Constant *BeginCatchFn, 551 llvm::Constant *EndCatchFn, 552 llvm::Constant *RethrowFn); 553 void ExitFinallyBlock(FinallyInfo &FinallyInfo); 554 555 /// PushDestructorCleanup - Push a cleanup to call the 556 /// complete-object destructor of an object of the given type at the 557 /// given address. Does nothing if T is not a C++ class type with a 558 /// non-trivial destructor. 559 void PushDestructorCleanup(QualType T, llvm::Value *Addr); 560 561 /// PushDestructorCleanup - Push a cleanup to call the 562 /// complete-object variant of the given destructor on the object at 563 /// the given address. 564 void PushDestructorCleanup(const CXXDestructorDecl *Dtor, 565 llvm::Value *Addr); 566 567 /// PopCleanupBlock - Will pop the cleanup entry on the stack and 568 /// process all branch fixups. 569 void PopCleanupBlock(bool FallThroughIsBranchThrough = false); 570 571 /// DeactivateCleanupBlock - Deactivates the given cleanup block. 572 /// The block cannot be reactivated. Pops it if it's the top of the 573 /// stack. 574 void DeactivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 575 576 /// ActivateCleanupBlock - Activates an initially-inactive cleanup. 577 /// Cannot be used to resurrect a deactivated cleanup. 578 void ActivateCleanupBlock(EHScopeStack::stable_iterator Cleanup); 579 580 /// \brief Enters a new scope for capturing cleanups, all of which 581 /// will be executed once the scope is exited. 582 class RunCleanupsScope { 583 CodeGenFunction& CGF; 584 EHScopeStack::stable_iterator CleanupStackDepth; 585 bool OldDidCallStackSave; 586 bool PerformCleanup; 587 588 RunCleanupsScope(const RunCleanupsScope &); // DO NOT IMPLEMENT 589 RunCleanupsScope &operator=(const RunCleanupsScope &); // DO NOT IMPLEMENT 590 591 public: 592 /// \brief Enter a new cleanup scope. 593 explicit RunCleanupsScope(CodeGenFunction &CGF) 594 : CGF(CGF), PerformCleanup(true) 595 { 596 CleanupStackDepth = CGF.EHStack.stable_begin(); 597 OldDidCallStackSave = CGF.DidCallStackSave; 598 CGF.DidCallStackSave = false; 599 } 600 601 /// \brief Exit this cleanup scope, emitting any accumulated 602 /// cleanups. 603 ~RunCleanupsScope() { 604 if (PerformCleanup) { 605 CGF.DidCallStackSave = OldDidCallStackSave; 606 CGF.PopCleanupBlocks(CleanupStackDepth); 607 } 608 } 609 610 /// \brief Determine whether this scope requires any cleanups. 611 bool requiresCleanups() const { 612 return CGF.EHStack.stable_begin() != CleanupStackDepth; 613 } 614 615 /// \brief Force the emission of cleanups now, instead of waiting 616 /// until this object is destroyed. 617 void ForceCleanup() { 618 assert(PerformCleanup && "Already forced cleanup"); 619 CGF.DidCallStackSave = OldDidCallStackSave; 620 CGF.PopCleanupBlocks(CleanupStackDepth); 621 PerformCleanup = false; 622 } 623 }; 624 625 626 /// PopCleanupBlocks - Takes the old cleanup stack size and emits 627 /// the cleanup blocks that have been added. 628 void PopCleanupBlocks(EHScopeStack::stable_iterator OldCleanupStackSize); 629 630 void ResolveBranchFixups(llvm::BasicBlock *Target); 631 632 /// The given basic block lies in the current EH scope, but may be a 633 /// target of a potentially scope-crossing jump; get a stable handle 634 /// to which we can perform this jump later. 635 JumpDest getJumpDestInCurrentScope(llvm::BasicBlock *Target) { 636 return JumpDest(Target, 637 EHStack.getInnermostNormalCleanup(), 638 NextCleanupDestIndex++); 639 } 640 641 /// The given basic block lies in the current EH scope, but may be a 642 /// target of a potentially scope-crossing jump; get a stable handle 643 /// to which we can perform this jump later. 644 JumpDest getJumpDestInCurrentScope(const char *Name = 0) { 645 return getJumpDestInCurrentScope(createBasicBlock(Name)); 646 } 647 648 /// EmitBranchThroughCleanup - Emit a branch from the current insert 649 /// block through the normal cleanup handling code (if any) and then 650 /// on to \arg Dest. 651 void EmitBranchThroughCleanup(JumpDest Dest); 652 653 /// EmitBranchThroughEHCleanup - Emit a branch from the current 654 /// insert block through the EH cleanup handling code (if any) and 655 /// then on to \arg Dest. 656 void EmitBranchThroughEHCleanup(UnwindDest Dest); 657 658 /// getRethrowDest - Returns the unified outermost-scope rethrow 659 /// destination. 660 UnwindDest getRethrowDest(); 661 662 /// BeginConditionalBranch - Should be called before a conditional part of an 663 /// expression is emitted. For example, before the RHS of the expression below 664 /// is emitted: 665 /// 666 /// b && f(T()); 667 /// 668 /// This is used to make sure that any temporaries created in the conditional 669 /// branch are only destroyed if the branch is taken. 670 void BeginConditionalBranch() { 671 ++ConditionalBranchLevel; 672 } 673 674 /// EndConditionalBranch - Should be called after a conditional part of an 675 /// expression has been emitted. 676 void EndConditionalBranch() { 677 assert(ConditionalBranchLevel != 0 && 678 "Conditional branch mismatch!"); 679 680 --ConditionalBranchLevel; 681 } 682 683 /// isInConditionalBranch - Return true if we're currently emitting 684 /// one branch or the other of a conditional expression. 685 bool isInConditionalBranch() const { return ConditionalBranchLevel != 0; } 686 687 private: 688 CGDebugInfo *DebugInfo; 689 690 /// IndirectBranch - The first time an indirect goto is seen we create a block 691 /// with an indirect branch. Every time we see the address of a label taken, 692 /// we add the label to the indirect goto. Every subsequent indirect goto is 693 /// codegen'd as a jump to the IndirectBranch's basic block. 694 llvm::IndirectBrInst *IndirectBranch; 695 696 /// LocalDeclMap - This keeps track of the LLVM allocas or globals for local C 697 /// decls. 698 llvm::DenseMap<const Decl*, llvm::Value*> LocalDeclMap; 699 700 /// LabelMap - This keeps track of the LLVM basic block for each C label. 701 llvm::DenseMap<const LabelStmt*, JumpDest> LabelMap; 702 703 // BreakContinueStack - This keeps track of where break and continue 704 // statements should jump to. 705 struct BreakContinue { 706 BreakContinue(JumpDest Break, JumpDest Continue) 707 : BreakBlock(Break), ContinueBlock(Continue) {} 708 709 JumpDest BreakBlock; 710 JumpDest ContinueBlock; 711 }; 712 llvm::SmallVector<BreakContinue, 8> BreakContinueStack; 713 714 /// SwitchInsn - This is nearest current switch instruction. It is null if if 715 /// current context is not in a switch. 716 llvm::SwitchInst *SwitchInsn; 717 718 /// CaseRangeBlock - This block holds if condition check for last case 719 /// statement range in current switch instruction. 720 llvm::BasicBlock *CaseRangeBlock; 721 722 // VLASizeMap - This keeps track of the associated size for each VLA type. 723 // We track this by the size expression rather than the type itself because 724 // in certain situations, like a const qualifier applied to an VLA typedef, 725 // multiple VLA types can share the same size expression. 726 // FIXME: Maybe this could be a stack of maps that is pushed/popped as we 727 // enter/leave scopes. 728 llvm::DenseMap<const Expr*, llvm::Value*> VLASizeMap; 729 730 /// DidCallStackSave - Whether llvm.stacksave has been called. Used to avoid 731 /// calling llvm.stacksave for multiple VLAs in the same scope. 732 bool DidCallStackSave; 733 734 /// A block containing a single 'unreachable' instruction. Created 735 /// lazily by getUnreachableBlock(). 736 llvm::BasicBlock *UnreachableBlock; 737 738 /// CXXThisDecl - When generating code for a C++ member function, 739 /// this will hold the implicit 'this' declaration. 740 ImplicitParamDecl *CXXThisDecl; 741 llvm::Value *CXXThisValue; 742 743 /// CXXVTTDecl - When generating code for a base object constructor or 744 /// base object destructor with virtual bases, this will hold the implicit 745 /// VTT parameter. 746 ImplicitParamDecl *CXXVTTDecl; 747 llvm::Value *CXXVTTValue; 748 749 /// ConditionalBranchLevel - Contains the nesting level of the current 750 /// conditional branch. This is used so that we know if a temporary should be 751 /// destroyed conditionally. 752 unsigned ConditionalBranchLevel; 753 754 755 /// ByrefValueInfoMap - For each __block variable, contains a pair of the LLVM 756 /// type as well as the field number that contains the actual data. 757 llvm::DenseMap<const ValueDecl *, std::pair<const llvm::Type *, 758 unsigned> > ByRefValueInfo; 759 760 /// getByrefValueFieldNumber - Given a declaration, returns the LLVM field 761 /// number that holds the value. 762 unsigned getByRefValueLLVMField(const ValueDecl *VD) const; 763 764 llvm::BasicBlock *TerminateLandingPad; 765 llvm::BasicBlock *TerminateHandler; 766 llvm::BasicBlock *TrapBB; 767 768 public: 769 CodeGenFunction(CodeGenModule &cgm); 770 771 CodeGenTypes &getTypes() const { return CGM.getTypes(); } 772 ASTContext &getContext() const; 773 CGDebugInfo *getDebugInfo() { return DebugInfo; } 774 775 /// Returns a pointer to the function's exception object slot, which 776 /// is assigned in every landing pad. 777 llvm::Value *getExceptionSlot(); 778 779 llvm::Value *getNormalCleanupDestSlot(); 780 llvm::Value *getEHCleanupDestSlot(); 781 782 llvm::BasicBlock *getUnreachableBlock() { 783 if (!UnreachableBlock) { 784 UnreachableBlock = createBasicBlock("unreachable"); 785 new llvm::UnreachableInst(getLLVMContext(), UnreachableBlock); 786 } 787 return UnreachableBlock; 788 } 789 790 llvm::BasicBlock *getInvokeDest() { 791 if (!EHStack.requiresLandingPad()) return 0; 792 return getInvokeDestImpl(); 793 } 794 795 llvm::LLVMContext &getLLVMContext() { return VMContext; } 796 797 //===--------------------------------------------------------------------===// 798 // Objective-C 799 //===--------------------------------------------------------------------===// 800 801 void GenerateObjCMethod(const ObjCMethodDecl *OMD); 802 803 void StartObjCMethod(const ObjCMethodDecl *MD, 804 const ObjCContainerDecl *CD); 805 806 /// GenerateObjCGetter - Synthesize an Objective-C property getter function. 807 void GenerateObjCGetter(ObjCImplementationDecl *IMP, 808 const ObjCPropertyImplDecl *PID); 809 void GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP, 810 ObjCMethodDecl *MD, bool ctor); 811 812 /// GenerateObjCSetter - Synthesize an Objective-C property setter function 813 /// for the given property. 814 void GenerateObjCSetter(ObjCImplementationDecl *IMP, 815 const ObjCPropertyImplDecl *PID); 816 bool IndirectObjCSetterArg(const CGFunctionInfo &FI); 817 bool IvarTypeWithAggrGCObjects(QualType Ty); 818 819 //===--------------------------------------------------------------------===// 820 // Block Bits 821 //===--------------------------------------------------------------------===// 822 823 llvm::Value *BuildBlockLiteralTmp(const BlockExpr *); 824 llvm::Constant *BuildDescriptorBlockDecl(const BlockExpr *, 825 const CGBlockInfo &Info, 826 const llvm::StructType *, 827 llvm::Constant *BlockVarLayout, 828 std::vector<HelperInfo> *); 829 830 llvm::Function *GenerateBlockFunction(GlobalDecl GD, 831 const BlockExpr *BExpr, 832 CGBlockInfo &Info, 833 const Decl *OuterFuncDecl, 834 llvm::Constant *& BlockVarLayout, 835 llvm::DenseMap<const Decl*, llvm::Value*> ldm); 836 837 llvm::Value *LoadBlockStruct(); 838 839 void AllocateBlockCXXThisPointer(const CXXThisExpr *E); 840 void AllocateBlockDecl(const BlockDeclRefExpr *E); 841 llvm::Value *GetAddrOfBlockDecl(const BlockDeclRefExpr *E) { 842 return GetAddrOfBlockDecl(E->getDecl(), E->isByRef()); 843 } 844 llvm::Value *GetAddrOfBlockDecl(const ValueDecl *D, bool ByRef); 845 const llvm::Type *BuildByRefType(const ValueDecl *D); 846 847 void GenerateCode(GlobalDecl GD, llvm::Function *Fn); 848 void StartFunction(GlobalDecl GD, QualType RetTy, 849 llvm::Function *Fn, 850 const FunctionArgList &Args, 851 SourceLocation StartLoc); 852 853 void EmitConstructorBody(FunctionArgList &Args); 854 void EmitDestructorBody(FunctionArgList &Args); 855 void EmitFunctionBody(FunctionArgList &Args); 856 857 /// EmitReturnBlock - Emit the unified return block, trying to avoid its 858 /// emission when possible. 859 void EmitReturnBlock(); 860 861 /// FinishFunction - Complete IR generation of the current function. It is 862 /// legal to call this function even if there is no current insertion point. 863 void FinishFunction(SourceLocation EndLoc=SourceLocation()); 864 865 /// GenerateThunk - Generate a thunk for the given method. 866 void GenerateThunk(llvm::Function *Fn, GlobalDecl GD, const ThunkInfo &Thunk); 867 868 void EmitCtorPrologue(const CXXConstructorDecl *CD, CXXCtorType Type, 869 FunctionArgList &Args); 870 871 /// InitializeVTablePointer - Initialize the vtable pointer of the given 872 /// subobject. 873 /// 874 void InitializeVTablePointer(BaseSubobject Base, 875 const CXXRecordDecl *NearestVBase, 876 uint64_t OffsetFromNearestVBase, 877 llvm::Constant *VTable, 878 const CXXRecordDecl *VTableClass); 879 880 typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBasesSetTy; 881 void InitializeVTablePointers(BaseSubobject Base, 882 const CXXRecordDecl *NearestVBase, 883 uint64_t OffsetFromNearestVBase, 884 bool BaseIsNonVirtualPrimaryBase, 885 llvm::Constant *VTable, 886 const CXXRecordDecl *VTableClass, 887 VisitedVirtualBasesSetTy& VBases); 888 889 void InitializeVTablePointers(const CXXRecordDecl *ClassDecl); 890 891 892 /// EnterDtorCleanups - Enter the cleanups necessary to complete the 893 /// given phase of destruction for a destructor. The end result 894 /// should call destructors on members and base classes in reverse 895 /// order of their construction. 896 void EnterDtorCleanups(const CXXDestructorDecl *Dtor, CXXDtorType Type); 897 898 /// ShouldInstrumentFunction - Return true if the current function should be 899 /// instrumented with __cyg_profile_func_* calls 900 bool ShouldInstrumentFunction(); 901 902 /// EmitFunctionInstrumentation - Emit LLVM code to call the specified 903 /// instrumentation function with the current function and the call site, if 904 /// function instrumentation is enabled. 905 void EmitFunctionInstrumentation(const char *Fn); 906 907 /// EmitFunctionProlog - Emit the target specific LLVM code to load the 908 /// arguments for the given function. This is also responsible for naming the 909 /// LLVM function arguments. 910 void EmitFunctionProlog(const CGFunctionInfo &FI, 911 llvm::Function *Fn, 912 const FunctionArgList &Args); 913 914 /// EmitFunctionEpilog - Emit the target specific LLVM code to return the 915 /// given temporary. 916 void EmitFunctionEpilog(const CGFunctionInfo &FI); 917 918 /// EmitStartEHSpec - Emit the start of the exception spec. 919 void EmitStartEHSpec(const Decl *D); 920 921 /// EmitEndEHSpec - Emit the end of the exception spec. 922 void EmitEndEHSpec(const Decl *D); 923 924 /// getTerminateLandingPad - Return a landing pad that just calls terminate. 925 llvm::BasicBlock *getTerminateLandingPad(); 926 927 /// getTerminateHandler - Return a handler (not a landing pad, just 928 /// a catch handler) that just calls terminate. This is used when 929 /// a terminate scope encloses a try. 930 llvm::BasicBlock *getTerminateHandler(); 931 932 const llvm::Type *ConvertTypeForMem(QualType T); 933 const llvm::Type *ConvertType(QualType T); 934 const llvm::Type *ConvertType(const TypeDecl *T) { 935 return ConvertType(getContext().getTypeDeclType(T)); 936 } 937 938 /// LoadObjCSelf - Load the value of self. This function is only valid while 939 /// generating code for an Objective-C method. 940 llvm::Value *LoadObjCSelf(); 941 942 /// TypeOfSelfObject - Return type of object that this self represents. 943 QualType TypeOfSelfObject(); 944 945 /// hasAggregateLLVMType - Return true if the specified AST type will map into 946 /// an aggregate LLVM type or is void. 947 static bool hasAggregateLLVMType(QualType T); 948 949 /// createBasicBlock - Create an LLVM basic block. 950 llvm::BasicBlock *createBasicBlock(const char *Name="", 951 llvm::Function *Parent=0, 952 llvm::BasicBlock *InsertBefore=0) { 953 #ifdef NDEBUG 954 return llvm::BasicBlock::Create(VMContext, "", Parent, InsertBefore); 955 #else 956 return llvm::BasicBlock::Create(VMContext, Name, Parent, InsertBefore); 957 #endif 958 } 959 960 /// getBasicBlockForLabel - Return the LLVM basicblock that the specified 961 /// label maps to. 962 JumpDest getJumpDestForLabel(const LabelStmt *S); 963 964 /// SimplifyForwardingBlocks - If the given basic block is only a branch to 965 /// another basic block, simplify it. This assumes that no other code could 966 /// potentially reference the basic block. 967 void SimplifyForwardingBlocks(llvm::BasicBlock *BB); 968 969 /// EmitBlock - Emit the given block \arg BB and set it as the insert point, 970 /// adding a fall-through branch from the current insert block if 971 /// necessary. It is legal to call this function even if there is no current 972 /// insertion point. 973 /// 974 /// IsFinished - If true, indicates that the caller has finished emitting 975 /// branches to the given block and does not expect to emit code into it. This 976 /// means the block can be ignored if it is unreachable. 977 void EmitBlock(llvm::BasicBlock *BB, bool IsFinished=false); 978 979 /// EmitBranch - Emit a branch to the specified basic block from the current 980 /// insert block, taking care to avoid creation of branches from dummy 981 /// blocks. It is legal to call this function even if there is no current 982 /// insertion point. 983 /// 984 /// This function clears the current insertion point. The caller should follow 985 /// calls to this function with calls to Emit*Block prior to generation new 986 /// code. 987 void EmitBranch(llvm::BasicBlock *Block); 988 989 /// HaveInsertPoint - True if an insertion point is defined. If not, this 990 /// indicates that the current code being emitted is unreachable. 991 bool HaveInsertPoint() const { 992 return Builder.GetInsertBlock() != 0; 993 } 994 995 /// EnsureInsertPoint - Ensure that an insertion point is defined so that 996 /// emitted IR has a place to go. Note that by definition, if this function 997 /// creates a block then that block is unreachable; callers may do better to 998 /// detect when no insertion point is defined and simply skip IR generation. 999 void EnsureInsertPoint() { 1000 if (!HaveInsertPoint()) 1001 EmitBlock(createBasicBlock()); 1002 } 1003 1004 /// ErrorUnsupported - Print out an error that codegen doesn't support the 1005 /// specified stmt yet. 1006 void ErrorUnsupported(const Stmt *S, const char *Type, 1007 bool OmitOnError=false); 1008 1009 //===--------------------------------------------------------------------===// 1010 // Helpers 1011 //===--------------------------------------------------------------------===// 1012 1013 LValue MakeAddrLValue(llvm::Value *V, QualType T, unsigned Alignment = 0) { 1014 return LValue::MakeAddr(V, T, Alignment, getContext(), 1015 CGM.getTBAAInfo(T)); 1016 } 1017 1018 /// CreateTempAlloca - This creates a alloca and inserts it into the entry 1019 /// block. The caller is responsible for setting an appropriate alignment on 1020 /// the alloca. 1021 llvm::AllocaInst *CreateTempAlloca(const llvm::Type *Ty, 1022 const llvm::Twine &Name = "tmp"); 1023 1024 /// InitTempAlloca - Provide an initial value for the given alloca. 1025 void InitTempAlloca(llvm::AllocaInst *Alloca, llvm::Value *Value); 1026 1027 /// CreateIRTemp - Create a temporary IR object of the given type, with 1028 /// appropriate alignment. This routine should only be used when an temporary 1029 /// value needs to be stored into an alloca (for example, to avoid explicit 1030 /// PHI construction), but the type is the IR type, not the type appropriate 1031 /// for storing in memory. 1032 llvm::AllocaInst *CreateIRTemp(QualType T, const llvm::Twine &Name = "tmp"); 1033 1034 /// CreateMemTemp - Create a temporary memory object of the given type, with 1035 /// appropriate alignment. 1036 llvm::AllocaInst *CreateMemTemp(QualType T, const llvm::Twine &Name = "tmp"); 1037 1038 /// CreateAggTemp - Create a temporary memory object for the given 1039 /// aggregate type. 1040 AggValueSlot CreateAggTemp(QualType T, const llvm::Twine &Name = "tmp") { 1041 return AggValueSlot::forAddr(CreateMemTemp(T, Name), false, false); 1042 } 1043 1044 /// EvaluateExprAsBool - Perform the usual unary conversions on the specified 1045 /// expression and compare the result against zero, returning an Int1Ty value. 1046 llvm::Value *EvaluateExprAsBool(const Expr *E); 1047 1048 /// EmitAnyExpr - Emit code to compute the specified expression which can have 1049 /// any type. The result is returned as an RValue struct. If this is an 1050 /// aggregate expression, the aggloc/agglocvolatile arguments indicate where 1051 /// the result should be returned. 1052 /// 1053 /// \param IgnoreResult - True if the resulting value isn't used. 1054 RValue EmitAnyExpr(const Expr *E, 1055 AggValueSlot AggSlot = AggValueSlot::ignored(), 1056 bool IgnoreResult = false); 1057 1058 // EmitVAListRef - Emit a "reference" to a va_list; this is either the address 1059 // or the value of the expression, depending on how va_list is defined. 1060 llvm::Value *EmitVAListRef(const Expr *E); 1061 1062 /// EmitAnyExprToTemp - Similary to EmitAnyExpr(), however, the result will 1063 /// always be accessible even if no aggregate location is provided. 1064 RValue EmitAnyExprToTemp(const Expr *E); 1065 1066 /// EmitsAnyExprToMem - Emits the code necessary to evaluate an 1067 /// arbitrary expression into the given memory location. 1068 void EmitAnyExprToMem(const Expr *E, llvm::Value *Location, 1069 bool IsLocationVolatile, 1070 bool IsInitializer); 1071 1072 /// EmitAggregateCopy - Emit an aggrate copy. 1073 /// 1074 /// \param isVolatile - True iff either the source or the destination is 1075 /// volatile. 1076 void EmitAggregateCopy(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1077 QualType EltTy, bool isVolatile=false); 1078 1079 /// StartBlock - Start new block named N. If insert block is a dummy block 1080 /// then reuse it. 1081 void StartBlock(const char *N); 1082 1083 /// GetAddrOfStaticLocalVar - Return the address of a static local variable. 1084 llvm::Constant *GetAddrOfStaticLocalVar(const VarDecl *BVD) { 1085 return cast<llvm::Constant>(GetAddrOfLocalVar(BVD)); 1086 } 1087 1088 /// GetAddrOfLocalVar - Return the address of a local variable. 1089 llvm::Value *GetAddrOfLocalVar(const VarDecl *VD) { 1090 llvm::Value *Res = LocalDeclMap[VD]; 1091 assert(Res && "Invalid argument to GetAddrOfLocalVar(), no decl!"); 1092 return Res; 1093 } 1094 1095 /// getAccessedFieldNo - Given an encoded value and a result number, return 1096 /// the input field number being accessed. 1097 static unsigned getAccessedFieldNo(unsigned Idx, const llvm::Constant *Elts); 1098 1099 llvm::BlockAddress *GetAddrOfLabel(const LabelStmt *L); 1100 llvm::BasicBlock *GetIndirectGotoBlock(); 1101 1102 /// EmitNullInitialization - Generate code to set a value of the given type to 1103 /// null, If the type contains data member pointers, they will be initialized 1104 /// to -1 in accordance with the Itanium C++ ABI. 1105 void EmitNullInitialization(llvm::Value *DestPtr, QualType Ty); 1106 1107 // EmitVAArg - Generate code to get an argument from the passed in pointer 1108 // and update it accordingly. The return value is a pointer to the argument. 1109 // FIXME: We should be able to get rid of this method and use the va_arg 1110 // instruction in LLVM instead once it works well enough. 1111 llvm::Value *EmitVAArg(llvm::Value *VAListAddr, QualType Ty); 1112 1113 /// EmitVLASize - Generate code for any VLA size expressions that might occur 1114 /// in a variably modified type. If Ty is a VLA, will return the value that 1115 /// corresponds to the size in bytes of the VLA type. Will return 0 otherwise. 1116 /// 1117 /// This function can be called with a null (unreachable) insert point. 1118 llvm::Value *EmitVLASize(QualType Ty); 1119 1120 // GetVLASize - Returns an LLVM value that corresponds to the size in bytes 1121 // of a variable length array type. 1122 llvm::Value *GetVLASize(const VariableArrayType *); 1123 1124 /// LoadCXXThis - Load the value of 'this'. This function is only valid while 1125 /// generating code for an C++ member function. 1126 llvm::Value *LoadCXXThis() { 1127 assert(CXXThisValue && "no 'this' value for this function"); 1128 return CXXThisValue; 1129 } 1130 1131 /// LoadCXXVTT - Load the VTT parameter to base constructors/destructors have 1132 /// virtual bases. 1133 llvm::Value *LoadCXXVTT() { 1134 assert(CXXVTTValue && "no VTT value for this function"); 1135 return CXXVTTValue; 1136 } 1137 1138 /// GetAddressOfBaseOfCompleteClass - Convert the given pointer to a 1139 /// complete class to the given direct base. 1140 llvm::Value * 1141 GetAddressOfDirectBaseInCompleteClass(llvm::Value *Value, 1142 const CXXRecordDecl *Derived, 1143 const CXXRecordDecl *Base, 1144 bool BaseIsVirtual); 1145 1146 /// GetAddressOfBaseClass - This function will add the necessary delta to the 1147 /// load of 'this' and returns address of the base class. 1148 llvm::Value *GetAddressOfBaseClass(llvm::Value *Value, 1149 const CXXRecordDecl *Derived, 1150 CastExpr::path_const_iterator PathBegin, 1151 CastExpr::path_const_iterator PathEnd, 1152 bool NullCheckValue); 1153 1154 llvm::Value *GetAddressOfDerivedClass(llvm::Value *Value, 1155 const CXXRecordDecl *Derived, 1156 CastExpr::path_const_iterator PathBegin, 1157 CastExpr::path_const_iterator PathEnd, 1158 bool NullCheckValue); 1159 1160 llvm::Value *GetVirtualBaseClassOffset(llvm::Value *This, 1161 const CXXRecordDecl *ClassDecl, 1162 const CXXRecordDecl *BaseClassDecl); 1163 1164 void EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor, 1165 CXXCtorType CtorType, 1166 const FunctionArgList &Args); 1167 void EmitCXXConstructorCall(const CXXConstructorDecl *D, CXXCtorType Type, 1168 bool ForVirtualBase, llvm::Value *This, 1169 CallExpr::const_arg_iterator ArgBeg, 1170 CallExpr::const_arg_iterator ArgEnd); 1171 1172 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1173 const ConstantArrayType *ArrayTy, 1174 llvm::Value *ArrayPtr, 1175 CallExpr::const_arg_iterator ArgBeg, 1176 CallExpr::const_arg_iterator ArgEnd, 1177 bool ZeroInitialization = false); 1178 1179 void EmitCXXAggrConstructorCall(const CXXConstructorDecl *D, 1180 llvm::Value *NumElements, 1181 llvm::Value *ArrayPtr, 1182 CallExpr::const_arg_iterator ArgBeg, 1183 CallExpr::const_arg_iterator ArgEnd, 1184 bool ZeroInitialization = false); 1185 1186 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1187 const ArrayType *Array, 1188 llvm::Value *This); 1189 1190 void EmitCXXAggrDestructorCall(const CXXDestructorDecl *D, 1191 llvm::Value *NumElements, 1192 llvm::Value *This); 1193 1194 llvm::Function *GenerateCXXAggrDestructorHelper(const CXXDestructorDecl *D, 1195 const ArrayType *Array, 1196 llvm::Value *This); 1197 1198 void EmitCXXDestructorCall(const CXXDestructorDecl *D, CXXDtorType Type, 1199 bool ForVirtualBase, llvm::Value *This); 1200 1201 void EmitNewArrayInitializer(const CXXNewExpr *E, llvm::Value *NewPtr, 1202 llvm::Value *NumElements); 1203 1204 void EmitCXXTemporary(const CXXTemporary *Temporary, llvm::Value *Ptr); 1205 1206 llvm::Value *EmitCXXNewExpr(const CXXNewExpr *E); 1207 void EmitCXXDeleteExpr(const CXXDeleteExpr *E); 1208 1209 void EmitDeleteCall(const FunctionDecl *DeleteFD, llvm::Value *Ptr, 1210 QualType DeleteTy); 1211 1212 llvm::Value* EmitCXXTypeidExpr(const CXXTypeidExpr *E); 1213 llvm::Value *EmitDynamicCast(llvm::Value *V, const CXXDynamicCastExpr *DCE); 1214 1215 void EmitCheck(llvm::Value *, unsigned Size); 1216 1217 llvm::Value *EmitScalarPrePostIncDec(const UnaryOperator *E, LValue LV, 1218 bool isInc, bool isPre); 1219 ComplexPairTy EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV, 1220 bool isInc, bool isPre); 1221 //===--------------------------------------------------------------------===// 1222 // Declaration Emission 1223 //===--------------------------------------------------------------------===// 1224 1225 /// EmitDecl - Emit a declaration. 1226 /// 1227 /// This function can be called with a null (unreachable) insert point. 1228 void EmitDecl(const Decl &D); 1229 1230 /// EmitVarDecl - Emit a local variable declaration. 1231 /// 1232 /// This function can be called with a null (unreachable) insert point. 1233 void EmitVarDecl(const VarDecl &D); 1234 1235 typedef void SpecialInitFn(CodeGenFunction &Init, const VarDecl &D, 1236 llvm::Value *Address); 1237 1238 /// EmitAutoVarDecl - Emit an auto variable declaration. 1239 /// 1240 /// This function can be called with a null (unreachable) insert point. 1241 void EmitAutoVarDecl(const VarDecl &D, SpecialInitFn *SpecialInit = 0); 1242 1243 void EmitStaticVarDecl(const VarDecl &D, 1244 llvm::GlobalValue::LinkageTypes Linkage); 1245 1246 /// EmitParmDecl - Emit a ParmVarDecl or an ImplicitParamDecl. 1247 void EmitParmDecl(const VarDecl &D, llvm::Value *Arg); 1248 1249 //===--------------------------------------------------------------------===// 1250 // Statement Emission 1251 //===--------------------------------------------------------------------===// 1252 1253 /// EmitStopPoint - Emit a debug stoppoint if we are emitting debug info. 1254 void EmitStopPoint(const Stmt *S); 1255 1256 /// EmitStmt - Emit the code for the statement \arg S. It is legal to call 1257 /// this function even if there is no current insertion point. 1258 /// 1259 /// This function may clear the current insertion point; callers should use 1260 /// EnsureInsertPoint if they wish to subsequently generate code without first 1261 /// calling EmitBlock, EmitBranch, or EmitStmt. 1262 void EmitStmt(const Stmt *S); 1263 1264 /// EmitSimpleStmt - Try to emit a "simple" statement which does not 1265 /// necessarily require an insertion point or debug information; typically 1266 /// because the statement amounts to a jump or a container of other 1267 /// statements. 1268 /// 1269 /// \return True if the statement was handled. 1270 bool EmitSimpleStmt(const Stmt *S); 1271 1272 RValue EmitCompoundStmt(const CompoundStmt &S, bool GetLast = false, 1273 AggValueSlot AVS = AggValueSlot::ignored()); 1274 1275 /// EmitLabel - Emit the block for the given label. It is legal to call this 1276 /// function even if there is no current insertion point. 1277 void EmitLabel(const LabelStmt &S); // helper for EmitLabelStmt. 1278 1279 void EmitLabelStmt(const LabelStmt &S); 1280 void EmitGotoStmt(const GotoStmt &S); 1281 void EmitIndirectGotoStmt(const IndirectGotoStmt &S); 1282 void EmitIfStmt(const IfStmt &S); 1283 void EmitWhileStmt(const WhileStmt &S); 1284 void EmitDoStmt(const DoStmt &S); 1285 void EmitForStmt(const ForStmt &S); 1286 void EmitReturnStmt(const ReturnStmt &S); 1287 void EmitDeclStmt(const DeclStmt &S); 1288 void EmitBreakStmt(const BreakStmt &S); 1289 void EmitContinueStmt(const ContinueStmt &S); 1290 void EmitSwitchStmt(const SwitchStmt &S); 1291 void EmitDefaultStmt(const DefaultStmt &S); 1292 void EmitCaseStmt(const CaseStmt &S); 1293 void EmitCaseStmtRange(const CaseStmt &S); 1294 void EmitAsmStmt(const AsmStmt &S); 1295 1296 void EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S); 1297 void EmitObjCAtTryStmt(const ObjCAtTryStmt &S); 1298 void EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S); 1299 void EmitObjCAtSynchronizedStmt(const ObjCAtSynchronizedStmt &S); 1300 1301 llvm::Constant *getUnwindResumeOrRethrowFn(); 1302 void EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1303 void ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock = false); 1304 1305 void EmitCXXTryStmt(const CXXTryStmt &S); 1306 1307 //===--------------------------------------------------------------------===// 1308 // LValue Expression Emission 1309 //===--------------------------------------------------------------------===// 1310 1311 /// GetUndefRValue - Get an appropriate 'undef' rvalue for the given type. 1312 RValue GetUndefRValue(QualType Ty); 1313 1314 /// EmitUnsupportedRValue - Emit a dummy r-value using the type of E 1315 /// and issue an ErrorUnsupported style diagnostic (using the 1316 /// provided Name). 1317 RValue EmitUnsupportedRValue(const Expr *E, 1318 const char *Name); 1319 1320 /// EmitUnsupportedLValue - Emit a dummy l-value using the type of E and issue 1321 /// an ErrorUnsupported style diagnostic (using the provided Name). 1322 LValue EmitUnsupportedLValue(const Expr *E, 1323 const char *Name); 1324 1325 /// EmitLValue - Emit code to compute a designator that specifies the location 1326 /// of the expression. 1327 /// 1328 /// This can return one of two things: a simple address or a bitfield 1329 /// reference. In either case, the LLVM Value* in the LValue structure is 1330 /// guaranteed to be an LLVM pointer type. 1331 /// 1332 /// If this returns a bitfield reference, nothing about the pointee type of 1333 /// the LLVM value is known: For example, it may not be a pointer to an 1334 /// integer. 1335 /// 1336 /// If this returns a normal address, and if the lvalue's C type is fixed 1337 /// size, this method guarantees that the returned pointer type will point to 1338 /// an LLVM type of the same size of the lvalue's type. If the lvalue has a 1339 /// variable length type, this is not possible. 1340 /// 1341 LValue EmitLValue(const Expr *E); 1342 1343 /// EmitCheckedLValue - Same as EmitLValue but additionally we generate 1344 /// checking code to guard against undefined behavior. This is only 1345 /// suitable when we know that the address will be used to access the 1346 /// object. 1347 LValue EmitCheckedLValue(const Expr *E); 1348 1349 /// EmitLoadOfScalar - Load a scalar value from an address, taking 1350 /// care to appropriately convert from the memory representation to 1351 /// the LLVM value representation. 1352 llvm::Value *EmitLoadOfScalar(llvm::Value *Addr, bool Volatile, 1353 unsigned Alignment, QualType Ty, 1354 llvm::MDNode *TBAAInfo = 0); 1355 1356 /// EmitStoreOfScalar - Store a scalar value to an address, taking 1357 /// care to appropriately convert from the memory representation to 1358 /// the LLVM value representation. 1359 void EmitStoreOfScalar(llvm::Value *Value, llvm::Value *Addr, 1360 bool Volatile, unsigned Alignment, QualType Ty, 1361 llvm::MDNode *TBAAInfo = 0); 1362 1363 /// EmitLoadOfLValue - Given an expression that represents a value lvalue, 1364 /// this method emits the address of the lvalue, then loads the result as an 1365 /// rvalue, returning the rvalue. 1366 RValue EmitLoadOfLValue(LValue V, QualType LVType); 1367 RValue EmitLoadOfExtVectorElementLValue(LValue V, QualType LVType); 1368 RValue EmitLoadOfBitfieldLValue(LValue LV, QualType ExprType); 1369 RValue EmitLoadOfPropertyRefLValue(LValue LV, QualType ExprType); 1370 RValue EmitLoadOfKVCRefLValue(LValue LV, QualType ExprType); 1371 1372 1373 /// EmitStoreThroughLValue - Store the specified rvalue into the specified 1374 /// lvalue, where both are guaranteed to the have the same type, and that type 1375 /// is 'Ty'. 1376 void EmitStoreThroughLValue(RValue Src, LValue Dst, QualType Ty); 1377 void EmitStoreThroughExtVectorComponentLValue(RValue Src, LValue Dst, 1378 QualType Ty); 1379 void EmitStoreThroughPropertyRefLValue(RValue Src, LValue Dst, QualType Ty); 1380 void EmitStoreThroughKVCRefLValue(RValue Src, LValue Dst, QualType Ty); 1381 1382 /// EmitStoreThroughLValue - Store Src into Dst with same constraints as 1383 /// EmitStoreThroughLValue. 1384 /// 1385 /// \param Result [out] - If non-null, this will be set to a Value* for the 1386 /// bit-field contents after the store, appropriate for use as the result of 1387 /// an assignment to the bit-field. 1388 void EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst, QualType Ty, 1389 llvm::Value **Result=0); 1390 1391 // Note: only availabe for agg return types 1392 LValue EmitBinaryOperatorLValue(const BinaryOperator *E); 1393 LValue EmitCompoundAssignOperatorLValue(const CompoundAssignOperator *E); 1394 // Note: only available for agg return types 1395 LValue EmitCallExprLValue(const CallExpr *E); 1396 // Note: only available for agg return types 1397 LValue EmitVAArgExprLValue(const VAArgExpr *E); 1398 LValue EmitDeclRefLValue(const DeclRefExpr *E); 1399 LValue EmitStringLiteralLValue(const StringLiteral *E); 1400 LValue EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E); 1401 LValue EmitPredefinedLValue(const PredefinedExpr *E); 1402 LValue EmitUnaryOpLValue(const UnaryOperator *E); 1403 LValue EmitArraySubscriptExpr(const ArraySubscriptExpr *E); 1404 LValue EmitExtVectorElementExpr(const ExtVectorElementExpr *E); 1405 LValue EmitMemberExpr(const MemberExpr *E); 1406 LValue EmitObjCIsaExpr(const ObjCIsaExpr *E); 1407 LValue EmitCompoundLiteralLValue(const CompoundLiteralExpr *E); 1408 LValue EmitConditionalOperatorLValue(const ConditionalOperator *E); 1409 LValue EmitCastLValue(const CastExpr *E); 1410 LValue EmitNullInitializationLValue(const CXXScalarValueInitExpr *E); 1411 1412 llvm::Value *EmitIvarOffset(const ObjCInterfaceDecl *Interface, 1413 const ObjCIvarDecl *Ivar); 1414 LValue EmitLValueForAnonRecordField(llvm::Value* Base, 1415 const FieldDecl* Field, 1416 unsigned CVRQualifiers); 1417 LValue EmitLValueForField(llvm::Value* Base, const FieldDecl* Field, 1418 unsigned CVRQualifiers); 1419 1420 /// EmitLValueForFieldInitialization - Like EmitLValueForField, except that 1421 /// if the Field is a reference, this will return the address of the reference 1422 /// and not the address of the value stored in the reference. 1423 LValue EmitLValueForFieldInitialization(llvm::Value* Base, 1424 const FieldDecl* Field, 1425 unsigned CVRQualifiers); 1426 1427 LValue EmitLValueForIvar(QualType ObjectTy, 1428 llvm::Value* Base, const ObjCIvarDecl *Ivar, 1429 unsigned CVRQualifiers); 1430 1431 LValue EmitLValueForBitfield(llvm::Value* Base, const FieldDecl* Field, 1432 unsigned CVRQualifiers); 1433 1434 LValue EmitBlockDeclRefLValue(const BlockDeclRefExpr *E); 1435 1436 LValue EmitCXXConstructLValue(const CXXConstructExpr *E); 1437 LValue EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E); 1438 LValue EmitCXXExprWithTemporariesLValue(const CXXExprWithTemporaries *E); 1439 LValue EmitCXXTypeidLValue(const CXXTypeidExpr *E); 1440 1441 LValue EmitObjCMessageExprLValue(const ObjCMessageExpr *E); 1442 LValue EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E); 1443 LValue EmitObjCPropertyRefLValue(const ObjCPropertyRefExpr *E); 1444 LValue EmitObjCKVCRefLValue(const ObjCImplicitSetterGetterRefExpr *E); 1445 LValue EmitStmtExprLValue(const StmtExpr *E); 1446 LValue EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E); 1447 LValue EmitObjCSelectorLValue(const ObjCSelectorExpr *E); 1448 void EmitDeclRefExprDbgValue(const DeclRefExpr *E, llvm::Constant *Init); 1449 //===--------------------------------------------------------------------===// 1450 // Scalar Expression Emission 1451 //===--------------------------------------------------------------------===// 1452 1453 /// EmitCall - Generate a call of the given function, expecting the given 1454 /// result type, and using the given argument list which specifies both the 1455 /// LLVM arguments and the types they were derived from. 1456 /// 1457 /// \param TargetDecl - If given, the decl of the function in a direct call; 1458 /// used to set attributes on the call (noreturn, etc.). 1459 RValue EmitCall(const CGFunctionInfo &FnInfo, 1460 llvm::Value *Callee, 1461 ReturnValueSlot ReturnValue, 1462 const CallArgList &Args, 1463 const Decl *TargetDecl = 0, 1464 llvm::Instruction **callOrInvoke = 0); 1465 1466 RValue EmitCall(QualType FnType, llvm::Value *Callee, 1467 ReturnValueSlot ReturnValue, 1468 CallExpr::const_arg_iterator ArgBeg, 1469 CallExpr::const_arg_iterator ArgEnd, 1470 const Decl *TargetDecl = 0); 1471 RValue EmitCallExpr(const CallExpr *E, 1472 ReturnValueSlot ReturnValue = ReturnValueSlot()); 1473 1474 llvm::CallSite EmitCallOrInvoke(llvm::Value *Callee, 1475 llvm::Value * const *ArgBegin, 1476 llvm::Value * const *ArgEnd, 1477 const llvm::Twine &Name = ""); 1478 1479 llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *This, 1480 const llvm::Type *Ty); 1481 llvm::Value *BuildVirtualCall(const CXXDestructorDecl *DD, CXXDtorType Type, 1482 llvm::Value *&This, const llvm::Type *Ty); 1483 1484 RValue EmitCXXMemberCall(const CXXMethodDecl *MD, 1485 llvm::Value *Callee, 1486 ReturnValueSlot ReturnValue, 1487 llvm::Value *This, 1488 llvm::Value *VTT, 1489 CallExpr::const_arg_iterator ArgBeg, 1490 CallExpr::const_arg_iterator ArgEnd); 1491 RValue EmitCXXMemberCallExpr(const CXXMemberCallExpr *E, 1492 ReturnValueSlot ReturnValue); 1493 RValue EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E, 1494 ReturnValueSlot ReturnValue); 1495 1496 RValue EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E, 1497 const CXXMethodDecl *MD, 1498 ReturnValueSlot ReturnValue); 1499 1500 1501 RValue EmitBuiltinExpr(const FunctionDecl *FD, 1502 unsigned BuiltinID, const CallExpr *E); 1503 1504 RValue EmitBlockCallExpr(const CallExpr *E, ReturnValueSlot ReturnValue); 1505 1506 /// EmitTargetBuiltinExpr - Emit the given builtin call. Returns 0 if the call 1507 /// is unhandled by the current target. 1508 llvm::Value *EmitTargetBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1509 1510 llvm::Value *EmitARMBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1511 llvm::Value *EmitNeonCall(llvm::Function *F, 1512 llvm::SmallVectorImpl<llvm::Value*> &O, 1513 const char *name, bool splat = false, 1514 unsigned shift = 0, bool rightshift = false); 1515 llvm::Value *EmitNeonSplat(llvm::Value *V, llvm::Constant *Idx, 1516 bool widen = false); 1517 llvm::Value *EmitNeonShiftVector(llvm::Value *V, const llvm::Type *Ty, 1518 bool negateForRightShift); 1519 1520 llvm::Value *BuildVector(const llvm::SmallVectorImpl<llvm::Value*> &Ops); 1521 llvm::Value *EmitX86BuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1522 llvm::Value *EmitPPCBuiltinExpr(unsigned BuiltinID, const CallExpr *E); 1523 1524 llvm::Value *EmitObjCProtocolExpr(const ObjCProtocolExpr *E); 1525 llvm::Value *EmitObjCStringLiteral(const ObjCStringLiteral *E); 1526 llvm::Value *EmitObjCSelectorExpr(const ObjCSelectorExpr *E); 1527 RValue EmitObjCMessageExpr(const ObjCMessageExpr *E, 1528 ReturnValueSlot Return = ReturnValueSlot()); 1529 RValue EmitObjCPropertyGet(const Expr *E, 1530 ReturnValueSlot Return = ReturnValueSlot()); 1531 RValue EmitObjCSuperPropertyGet(const Expr *Exp, const Selector &S, 1532 ReturnValueSlot Return = ReturnValueSlot()); 1533 void EmitObjCPropertySet(const Expr *E, RValue Src); 1534 void EmitObjCSuperPropertySet(const Expr *E, const Selector &S, RValue Src); 1535 1536 1537 /// EmitReferenceBindingToExpr - Emits a reference binding to the passed in 1538 /// expression. Will emit a temporary variable if E is not an LValue. 1539 RValue EmitReferenceBindingToExpr(const Expr* E, 1540 const NamedDecl *InitializedDecl); 1541 1542 //===--------------------------------------------------------------------===// 1543 // Expression Emission 1544 //===--------------------------------------------------------------------===// 1545 1546 // Expressions are broken into three classes: scalar, complex, aggregate. 1547 1548 /// EmitScalarExpr - Emit the computation of the specified expression of LLVM 1549 /// scalar type, returning the result. 1550 llvm::Value *EmitScalarExpr(const Expr *E , bool IgnoreResultAssign = false); 1551 1552 /// EmitScalarConversion - Emit a conversion from the specified type to the 1553 /// specified destination type, both of which are LLVM scalar types. 1554 llvm::Value *EmitScalarConversion(llvm::Value *Src, QualType SrcTy, 1555 QualType DstTy); 1556 1557 /// EmitComplexToScalarConversion - Emit a conversion from the specified 1558 /// complex type to the specified destination type, where the destination type 1559 /// is an LLVM scalar type. 1560 llvm::Value *EmitComplexToScalarConversion(ComplexPairTy Src, QualType SrcTy, 1561 QualType DstTy); 1562 1563 1564 /// EmitAggExpr - Emit the computation of the specified expression 1565 /// of aggregate type. The result is computed into the given slot, 1566 /// which may be null to indicate that the value is not needed. 1567 void EmitAggExpr(const Expr *E, AggValueSlot AS, bool IgnoreResult = false); 1568 1569 /// EmitAggExprToLValue - Emit the computation of the specified expression of 1570 /// aggregate type into a temporary LValue. 1571 LValue EmitAggExprToLValue(const Expr *E); 1572 1573 /// EmitGCMemmoveCollectable - Emit special API for structs with object 1574 /// pointers. 1575 void EmitGCMemmoveCollectable(llvm::Value *DestPtr, llvm::Value *SrcPtr, 1576 QualType Ty); 1577 1578 /// EmitComplexExpr - Emit the computation of the specified expression of 1579 /// complex type, returning the result. 1580 ComplexPairTy EmitComplexExpr(const Expr *E, bool IgnoreReal = false, 1581 bool IgnoreImag = false, 1582 bool IgnoreRealAssign = false, 1583 bool IgnoreImagAssign = false); 1584 1585 /// EmitComplexExprIntoAddr - Emit the computation of the specified expression 1586 /// of complex type, storing into the specified Value*. 1587 void EmitComplexExprIntoAddr(const Expr *E, llvm::Value *DestAddr, 1588 bool DestIsVolatile); 1589 1590 /// StoreComplexToAddr - Store a complex number into the specified address. 1591 void StoreComplexToAddr(ComplexPairTy V, llvm::Value *DestAddr, 1592 bool DestIsVolatile); 1593 /// LoadComplexFromAddr - Load a complex number from the specified address. 1594 ComplexPairTy LoadComplexFromAddr(llvm::Value *SrcAddr, bool SrcIsVolatile); 1595 1596 /// CreateStaticVarDecl - Create a zero-initialized LLVM global for 1597 /// a static local variable. 1598 llvm::GlobalVariable *CreateStaticVarDecl(const VarDecl &D, 1599 const char *Separator, 1600 llvm::GlobalValue::LinkageTypes Linkage); 1601 1602 /// AddInitializerToStaticVarDecl - Add the initializer for 'D' to the 1603 /// global variable that has already been created for it. If the initializer 1604 /// has a different type than GV does, this may free GV and return a different 1605 /// one. Otherwise it just returns GV. 1606 llvm::GlobalVariable * 1607 AddInitializerToStaticVarDecl(const VarDecl &D, 1608 llvm::GlobalVariable *GV); 1609 1610 1611 /// EmitCXXGlobalVarDeclInit - Create the initializer for a C++ 1612 /// variable with global storage. 1613 void EmitCXXGlobalVarDeclInit(const VarDecl &D, llvm::Constant *DeclPtr); 1614 1615 /// EmitCXXGlobalDtorRegistration - Emits a call to register the global ptr 1616 /// with the C++ runtime so that its destructor will be called at exit. 1617 void EmitCXXGlobalDtorRegistration(llvm::Constant *DtorFn, 1618 llvm::Constant *DeclPtr); 1619 1620 void EmitCXXStaticLocalInit(const VarDecl &D, llvm::GlobalVariable *DeclPtr); 1621 1622 /// GenerateCXXGlobalInitFunc - Generates code for initializing global 1623 /// variables. 1624 void GenerateCXXGlobalInitFunc(llvm::Function *Fn, 1625 llvm::Constant **Decls, 1626 unsigned NumDecls); 1627 1628 /// GenerateCXXGlobalDtorFunc - Generates code for destroying global 1629 /// variables. 1630 void GenerateCXXGlobalDtorFunc(llvm::Function *Fn, 1631 const std::vector<std::pair<llvm::WeakVH, 1632 llvm::Constant*> > &DtorsAndObjects); 1633 1634 void GenerateCXXGlobalVarDeclInitFunc(llvm::Function *Fn, const VarDecl *D); 1635 1636 void EmitCXXConstructExpr(const CXXConstructExpr *E, AggValueSlot Dest); 1637 1638 RValue EmitCXXExprWithTemporaries(const CXXExprWithTemporaries *E, 1639 AggValueSlot Slot 1640 = AggValueSlot::ignored()); 1641 1642 void EmitCXXThrowExpr(const CXXThrowExpr *E); 1643 1644 //===--------------------------------------------------------------------===// 1645 // Internal Helpers 1646 //===--------------------------------------------------------------------===// 1647 1648 /// ContainsLabel - Return true if the statement contains a label in it. If 1649 /// this statement is not executed normally, it not containing a label means 1650 /// that we can just remove the code. 1651 static bool ContainsLabel(const Stmt *S, bool IgnoreCaseStmts = false); 1652 1653 /// ConstantFoldsToSimpleInteger - If the specified expression does not fold 1654 /// to a constant, or if it does but contains a label, return 0. If it 1655 /// constant folds to 'true' and does not contain a label, return 1, if it 1656 /// constant folds to 'false' and does not contain a label, return -1. 1657 int ConstantFoldsToSimpleInteger(const Expr *Cond); 1658 1659 /// EmitBranchOnBoolExpr - Emit a branch on a boolean condition (e.g. for an 1660 /// if statement) to the specified blocks. Based on the condition, this might 1661 /// try to simplify the codegen of the conditional based on the branch. 1662 void EmitBranchOnBoolExpr(const Expr *Cond, llvm::BasicBlock *TrueBlock, 1663 llvm::BasicBlock *FalseBlock); 1664 1665 /// getTrapBB - Create a basic block that will call the trap intrinsic. We'll 1666 /// generate a branch around the created basic block as necessary. 1667 llvm::BasicBlock *getTrapBB(); 1668 1669 /// EmitCallArg - Emit a single call argument. 1670 RValue EmitCallArg(const Expr *E, QualType ArgType); 1671 1672 /// EmitDelegateCallArg - We are performing a delegate call; that 1673 /// is, the current function is delegating to another one. Produce 1674 /// a r-value suitable for passing the given parameter. 1675 RValue EmitDelegateCallArg(const VarDecl *Param); 1676 1677 private: 1678 void EmitReturnOfRValue(RValue RV, QualType Ty); 1679 1680 /// ExpandTypeFromArgs - Reconstruct a structure of type \arg Ty 1681 /// from function arguments into \arg Dst. See ABIArgInfo::Expand. 1682 /// 1683 /// \param AI - The first function argument of the expansion. 1684 /// \return The argument following the last expanded function 1685 /// argument. 1686 llvm::Function::arg_iterator 1687 ExpandTypeFromArgs(QualType Ty, LValue Dst, 1688 llvm::Function::arg_iterator AI); 1689 1690 /// ExpandTypeToArgs - Expand an RValue \arg Src, with the LLVM type for \arg 1691 /// Ty, into individual arguments on the provided vector \arg Args. See 1692 /// ABIArgInfo::Expand. 1693 void ExpandTypeToArgs(QualType Ty, RValue Src, 1694 llvm::SmallVector<llvm::Value*, 16> &Args); 1695 1696 llvm::Value* EmitAsmInput(const AsmStmt &S, 1697 const TargetInfo::ConstraintInfo &Info, 1698 const Expr *InputExpr, std::string &ConstraintStr); 1699 1700 llvm::Value* EmitAsmInputLValue(const AsmStmt &S, 1701 const TargetInfo::ConstraintInfo &Info, 1702 LValue InputValue, QualType InputType, 1703 std::string &ConstraintStr); 1704 1705 /// EmitCallArgs - Emit call arguments for a function. 1706 /// The CallArgTypeInfo parameter is used for iterating over the known 1707 /// argument types of the function being called. 1708 template<typename T> 1709 void EmitCallArgs(CallArgList& Args, const T* CallArgTypeInfo, 1710 CallExpr::const_arg_iterator ArgBeg, 1711 CallExpr::const_arg_iterator ArgEnd) { 1712 CallExpr::const_arg_iterator Arg = ArgBeg; 1713 1714 // First, use the argument types that the type info knows about 1715 if (CallArgTypeInfo) { 1716 for (typename T::arg_type_iterator I = CallArgTypeInfo->arg_type_begin(), 1717 E = CallArgTypeInfo->arg_type_end(); I != E; ++I, ++Arg) { 1718 assert(Arg != ArgEnd && "Running over edge of argument list!"); 1719 QualType ArgType = *I; 1720 #ifndef NDEBUG 1721 QualType ActualArgType = Arg->getType(); 1722 if (ArgType->isPointerType() && ActualArgType->isPointerType()) { 1723 QualType ActualBaseType = 1724 ActualArgType->getAs<PointerType>()->getPointeeType(); 1725 QualType ArgBaseType = 1726 ArgType->getAs<PointerType>()->getPointeeType(); 1727 if (ArgBaseType->isVariableArrayType()) { 1728 if (const VariableArrayType *VAT = 1729 getContext().getAsVariableArrayType(ActualBaseType)) { 1730 if (!VAT->getSizeExpr()) 1731 ActualArgType = ArgType; 1732 } 1733 } 1734 } 1735 assert(getContext().getCanonicalType(ArgType.getNonReferenceType()). 1736 getTypePtr() == 1737 getContext().getCanonicalType(ActualArgType).getTypePtr() && 1738 "type mismatch in call argument!"); 1739 #endif 1740 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1741 ArgType)); 1742 } 1743 1744 // Either we've emitted all the call args, or we have a call to a 1745 // variadic function. 1746 assert((Arg == ArgEnd || CallArgTypeInfo->isVariadic()) && 1747 "Extra arguments in non-variadic function!"); 1748 1749 } 1750 1751 // If we still have any arguments, emit them using the type of the argument. 1752 for (; Arg != ArgEnd; ++Arg) { 1753 QualType ArgType = Arg->getType(); 1754 Args.push_back(std::make_pair(EmitCallArg(*Arg, ArgType), 1755 ArgType)); 1756 } 1757 } 1758 1759 const TargetCodeGenInfo &getTargetHooks() const { 1760 return CGM.getTargetCodeGenInfo(); 1761 } 1762 1763 void EmitDeclMetadata(); 1764 }; 1765 1766 /// CGBlockInfo - Information to generate a block literal. 1767 class CGBlockInfo { 1768 public: 1769 /// Name - The name of the block, kindof. 1770 const char *Name; 1771 1772 /// DeclRefs - Variables from parent scopes that have been 1773 /// imported into this block. 1774 llvm::SmallVector<const BlockDeclRefExpr *, 8> DeclRefs; 1775 1776 /// InnerBlocks - This block and the blocks it encloses. 1777 llvm::SmallPtrSet<const DeclContext *, 4> InnerBlocks; 1778 1779 /// CXXThisRef - Non-null if 'this' was required somewhere, in 1780 /// which case this is that expression. 1781 const CXXThisExpr *CXXThisRef; 1782 1783 /// NeedsObjCSelf - True if something in this block has an implicit 1784 /// reference to 'self'. 1785 bool NeedsObjCSelf; 1786 1787 /// These are initialized by GenerateBlockFunction. 1788 bool BlockHasCopyDispose; 1789 CharUnits BlockSize; 1790 CharUnits BlockAlign; 1791 llvm::SmallVector<const Expr*, 8> BlockLayout; 1792 1793 CGBlockInfo(const char *Name); 1794 }; 1795 1796 } // end namespace CodeGen 1797 } // end namespace clang 1798 1799 #endif 1800