1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains code dealing with the IR generation for cleanups
10 // and related information.
11 //
12 // A "cleanup" is a piece of code which needs to be executed whenever
13 // control transfers out of a particular scope.  This can be
14 // conditionalized to occur only on exceptional control flow, only on
15 // normal control flow, or both.
16 //
17 //===----------------------------------------------------------------------===//
18 
19 #include "CGCleanup.h"
20 #include "CodeGenFunction.h"
21 #include "llvm/Support/SaveAndRestore.h"
22 
23 using namespace clang;
24 using namespace CodeGen;
25 
26 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27   if (rv.isScalar())
28     return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29   if (rv.isAggregate())
30     return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
31   return true;
32 }
33 
34 DominatingValue<RValue>::saved_type
35 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36   if (rv.isScalar()) {
37     llvm::Value *V = rv.getScalarVal();
38 
39     // These automatically dominate and don't need to be saved.
40     if (!DominatingLLVMValue::needsSaving(V))
41       return saved_type(V, ScalarLiteral);
42 
43     // Everything else needs an alloca.
44     Address addr =
45       CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
46     CGF.Builder.CreateStore(V, addr);
47     return saved_type(addr.getPointer(), ScalarAddress);
48   }
49 
50   if (rv.isComplex()) {
51     CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
52     llvm::Type *ComplexTy =
53         llvm::StructType::get(V.first->getType(), V.second->getType());
54     Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
55     CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
56     CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
57     return saved_type(addr.getPointer(), ComplexAddress);
58   }
59 
60   assert(rv.isAggregate());
61   Address V = rv.getAggregateAddress(); // TODO: volatile?
62   if (!DominatingLLVMValue::needsSaving(V.getPointer()))
63     return saved_type(V.getPointer(), AggregateLiteral,
64                       V.getAlignment().getQuantity());
65 
66   Address addr =
67     CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
68   CGF.Builder.CreateStore(V.getPointer(), addr);
69   return saved_type(addr.getPointer(), AggregateAddress,
70                     V.getAlignment().getQuantity());
71 }
72 
73 /// Given a saved r-value produced by SaveRValue, perform the code
74 /// necessary to restore it to usability at the current insertion
75 /// point.
76 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
77   auto getSavingAddress = [&](llvm::Value *value) {
78     auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
79     return Address(value, CharUnits::fromQuantity(alignment));
80   };
81   switch (K) {
82   case ScalarLiteral:
83     return RValue::get(Value);
84   case ScalarAddress:
85     return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
86   case AggregateLiteral:
87     return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
88   case AggregateAddress: {
89     auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
90     return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
91   }
92   case ComplexAddress: {
93     Address address = getSavingAddress(Value);
94     llvm::Value *real =
95         CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
96     llvm::Value *imag =
97         CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
98     return RValue::getComplex(real, imag);
99   }
100   }
101 
102   llvm_unreachable("bad saved r-value kind");
103 }
104 
105 /// Push an entry of the given size onto this protected-scope stack.
106 char *EHScopeStack::allocate(size_t Size) {
107   Size = llvm::alignTo(Size, ScopeStackAlignment);
108   if (!StartOfBuffer) {
109     unsigned Capacity = 1024;
110     while (Capacity < Size) Capacity *= 2;
111     StartOfBuffer = new char[Capacity];
112     StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
113   } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
114     unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
115     unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
116 
117     unsigned NewCapacity = CurrentCapacity;
118     do {
119       NewCapacity *= 2;
120     } while (NewCapacity < UsedCapacity + Size);
121 
122     char *NewStartOfBuffer = new char[NewCapacity];
123     char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
124     char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
125     memcpy(NewStartOfData, StartOfData, UsedCapacity);
126     delete [] StartOfBuffer;
127     StartOfBuffer = NewStartOfBuffer;
128     EndOfBuffer = NewEndOfBuffer;
129     StartOfData = NewStartOfData;
130   }
131 
132   assert(StartOfBuffer + Size <= StartOfData);
133   StartOfData -= Size;
134   return StartOfData;
135 }
136 
137 void EHScopeStack::deallocate(size_t Size) {
138   StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
139 }
140 
141 bool EHScopeStack::containsOnlyLifetimeMarkers(
142     EHScopeStack::stable_iterator Old) const {
143   for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
144     EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
145     if (!cleanup || !cleanup->isLifetimeMarker())
146       return false;
147   }
148 
149   return true;
150 }
151 
152 bool EHScopeStack::requiresLandingPad() const {
153   for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
154     // Skip lifetime markers.
155     if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
156       if (cleanup->isLifetimeMarker()) {
157         si = cleanup->getEnclosingEHScope();
158         continue;
159       }
160     return true;
161   }
162 
163   return false;
164 }
165 
166 EHScopeStack::stable_iterator
167 EHScopeStack::getInnermostActiveNormalCleanup() const {
168   for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
169          si != se; ) {
170     EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
171     if (cleanup.isActive()) return si;
172     si = cleanup.getEnclosingNormalCleanup();
173   }
174   return stable_end();
175 }
176 
177 
178 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
179   char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
180   bool IsNormalCleanup = Kind & NormalCleanup;
181   bool IsEHCleanup = Kind & EHCleanup;
182   bool IsActive = !(Kind & InactiveCleanup);
183   bool IsLifetimeMarker = Kind & LifetimeMarker;
184   EHCleanupScope *Scope =
185     new (Buffer) EHCleanupScope(IsNormalCleanup,
186                                 IsEHCleanup,
187                                 IsActive,
188                                 Size,
189                                 BranchFixups.size(),
190                                 InnermostNormalCleanup,
191                                 InnermostEHScope);
192   if (IsNormalCleanup)
193     InnermostNormalCleanup = stable_begin();
194   if (IsEHCleanup)
195     InnermostEHScope = stable_begin();
196   if (IsLifetimeMarker)
197     Scope->setLifetimeMarker();
198 
199   return Scope->getCleanupBuffer();
200 }
201 
202 void EHScopeStack::popCleanup() {
203   assert(!empty() && "popping exception stack when not empty");
204 
205   assert(isa<EHCleanupScope>(*begin()));
206   EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
207   InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
208   InnermostEHScope = Cleanup.getEnclosingEHScope();
209   deallocate(Cleanup.getAllocatedSize());
210 
211   // Destroy the cleanup.
212   Cleanup.Destroy();
213 
214   // Check whether we can shrink the branch-fixups stack.
215   if (!BranchFixups.empty()) {
216     // If we no longer have any normal cleanups, all the fixups are
217     // complete.
218     if (!hasNormalCleanups())
219       BranchFixups.clear();
220 
221     // Otherwise we can still trim out unnecessary nulls.
222     else
223       popNullFixups();
224   }
225 }
226 
227 EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
228   assert(getInnermostEHScope() == stable_end());
229   char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
230   EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
231   InnermostEHScope = stable_begin();
232   return filter;
233 }
234 
235 void EHScopeStack::popFilter() {
236   assert(!empty() && "popping exception stack when not empty");
237 
238   EHFilterScope &filter = cast<EHFilterScope>(*begin());
239   deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
240 
241   InnermostEHScope = filter.getEnclosingEHScope();
242 }
243 
244 EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
245   char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
246   EHCatchScope *scope =
247     new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
248   InnermostEHScope = stable_begin();
249   return scope;
250 }
251 
252 void EHScopeStack::pushTerminate() {
253   char *Buffer = allocate(EHTerminateScope::getSize());
254   new (Buffer) EHTerminateScope(InnermostEHScope);
255   InnermostEHScope = stable_begin();
256 }
257 
258 /// Remove any 'null' fixups on the stack.  However, we can't pop more
259 /// fixups than the fixup depth on the innermost normal cleanup, or
260 /// else fixups that we try to add to that cleanup will end up in the
261 /// wrong place.  We *could* try to shrink fixup depths, but that's
262 /// actually a lot of work for little benefit.
263 void EHScopeStack::popNullFixups() {
264   // We expect this to only be called when there's still an innermost
265   // normal cleanup;  otherwise there really shouldn't be any fixups.
266   assert(hasNormalCleanups());
267 
268   EHScopeStack::iterator it = find(InnermostNormalCleanup);
269   unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
270   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
271 
272   while (BranchFixups.size() > MinSize &&
273          BranchFixups.back().Destination == nullptr)
274     BranchFixups.pop_back();
275 }
276 
277 Address CodeGenFunction::createCleanupActiveFlag() {
278   // Create a variable to decide whether the cleanup needs to be run.
279   Address active = CreateTempAllocaWithoutCast(
280       Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
281 
282   // Initialize it to false at a site that's guaranteed to be run
283   // before each evaluation.
284   setBeforeOutermostConditional(Builder.getFalse(), active);
285 
286   // Initialize it to true at the current location.
287   Builder.CreateStore(Builder.getTrue(), active);
288 
289   return active;
290 }
291 
292 void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
293   // Set that as the active flag in the cleanup.
294   EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
295   assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
296   cleanup.setActiveFlag(ActiveFlag);
297 
298   if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
299   if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
300 }
301 
302 void EHScopeStack::Cleanup::anchor() {}
303 
304 static void createStoreInstBefore(llvm::Value *value, Address addr,
305                                   llvm::Instruction *beforeInst) {
306   auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
307   store->setAlignment(addr.getAlignment().getAsAlign());
308 }
309 
310 static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
311                                             llvm::Instruction *beforeInst) {
312   auto load = new llvm::LoadInst(addr.getElementType(), addr.getPointer(), name,
313                                  beforeInst);
314   load->setAlignment(addr.getAlignment().getAsAlign());
315   return load;
316 }
317 
318 /// All the branch fixups on the EH stack have propagated out past the
319 /// outermost normal cleanup; resolve them all by adding cases to the
320 /// given switch instruction.
321 static void ResolveAllBranchFixups(CodeGenFunction &CGF,
322                                    llvm::SwitchInst *Switch,
323                                    llvm::BasicBlock *CleanupEntry) {
324   llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
325 
326   for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
327     // Skip this fixup if its destination isn't set.
328     BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
329     if (Fixup.Destination == nullptr) continue;
330 
331     // If there isn't an OptimisticBranchBlock, then InitialBranch is
332     // still pointing directly to its destination; forward it to the
333     // appropriate cleanup entry.  This is required in the specific
334     // case of
335     //   { std::string s; goto lbl; }
336     //   lbl:
337     // i.e. where there's an unresolved fixup inside a single cleanup
338     // entry which we're currently popping.
339     if (Fixup.OptimisticBranchBlock == nullptr) {
340       createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
341                             CGF.getNormalCleanupDestSlot(),
342                             Fixup.InitialBranch);
343       Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
344     }
345 
346     // Don't add this case to the switch statement twice.
347     if (!CasesAdded.insert(Fixup.Destination).second)
348       continue;
349 
350     Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
351                     Fixup.Destination);
352   }
353 
354   CGF.EHStack.clearFixups();
355 }
356 
357 /// Transitions the terminator of the given exit-block of a cleanup to
358 /// be a cleanup switch.
359 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
360                                                    llvm::BasicBlock *Block) {
361   // If it's a branch, turn it into a switch whose default
362   // destination is its original target.
363   llvm::Instruction *Term = Block->getTerminator();
364   assert(Term && "can't transition block without terminator");
365 
366   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
367     assert(Br->isUnconditional());
368     auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
369                                      "cleanup.dest", Term);
370     llvm::SwitchInst *Switch =
371       llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
372     Br->eraseFromParent();
373     return Switch;
374   } else {
375     return cast<llvm::SwitchInst>(Term);
376   }
377 }
378 
379 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
380   assert(Block && "resolving a null target block");
381   if (!EHStack.getNumBranchFixups()) return;
382 
383   assert(EHStack.hasNormalCleanups() &&
384          "branch fixups exist with no normal cleanups on stack");
385 
386   llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
387   bool ResolvedAny = false;
388 
389   for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
390     // Skip this fixup if its destination doesn't match.
391     BranchFixup &Fixup = EHStack.getBranchFixup(I);
392     if (Fixup.Destination != Block) continue;
393 
394     Fixup.Destination = nullptr;
395     ResolvedAny = true;
396 
397     // If it doesn't have an optimistic branch block, LatestBranch is
398     // already pointing to the right place.
399     llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
400     if (!BranchBB)
401       continue;
402 
403     // Don't process the same optimistic branch block twice.
404     if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
405       continue;
406 
407     llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
408 
409     // Add a case to the switch.
410     Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
411   }
412 
413   if (ResolvedAny)
414     EHStack.popNullFixups();
415 }
416 
417 /// Pops cleanup blocks until the given savepoint is reached.
418 void CodeGenFunction::PopCleanupBlocks(
419     EHScopeStack::stable_iterator Old,
420     std::initializer_list<llvm::Value **> ValuesToReload) {
421   assert(Old.isValid());
422 
423   bool HadBranches = false;
424   while (EHStack.stable_begin() != Old) {
425     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
426     HadBranches |= Scope.hasBranches();
427 
428     // As long as Old strictly encloses the scope's enclosing normal
429     // cleanup, we're going to emit another normal cleanup which
430     // fallthrough can propagate through.
431     bool FallThroughIsBranchThrough =
432       Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
433 
434     PopCleanupBlock(FallThroughIsBranchThrough);
435   }
436 
437   // If we didn't have any branches, the insertion point before cleanups must
438   // dominate the current insertion point and we don't need to reload any
439   // values.
440   if (!HadBranches)
441     return;
442 
443   // Spill and reload all values that the caller wants to be live at the current
444   // insertion point.
445   for (llvm::Value **ReloadedValue : ValuesToReload) {
446     auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
447     if (!Inst)
448       continue;
449 
450     // Don't spill static allocas, they dominate all cleanups. These are created
451     // by binding a reference to a local variable or temporary.
452     auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
453     if (AI && AI->isStaticAlloca())
454       continue;
455 
456     Address Tmp =
457         CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
458 
459     // Find an insertion point after Inst and spill it to the temporary.
460     llvm::BasicBlock::iterator InsertBefore;
461     if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
462       InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
463     else
464       InsertBefore = std::next(Inst->getIterator());
465     CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
466 
467     // Reload the value at the current insertion point.
468     *ReloadedValue = Builder.CreateLoad(Tmp);
469   }
470 }
471 
472 /// Pops cleanup blocks until the given savepoint is reached, then add the
473 /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
474 void CodeGenFunction::PopCleanupBlocks(
475     EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
476     std::initializer_list<llvm::Value **> ValuesToReload) {
477   PopCleanupBlocks(Old, ValuesToReload);
478 
479   // Move our deferred cleanups onto the EH stack.
480   for (size_t I = OldLifetimeExtendedSize,
481               E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
482     // Alignment should be guaranteed by the vptrs in the individual cleanups.
483     assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
484            "misaligned cleanup stack entry");
485 
486     LifetimeExtendedCleanupHeader &Header =
487         reinterpret_cast<LifetimeExtendedCleanupHeader&>(
488             LifetimeExtendedCleanupStack[I]);
489     I += sizeof(Header);
490 
491     EHStack.pushCopyOfCleanup(Header.getKind(),
492                               &LifetimeExtendedCleanupStack[I],
493                               Header.getSize());
494     I += Header.getSize();
495 
496     if (Header.isConditional()) {
497       Address ActiveFlag =
498           reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
499       initFullExprCleanupWithFlag(ActiveFlag);
500       I += sizeof(ActiveFlag);
501     }
502   }
503   LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
504 }
505 
506 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
507                                            EHCleanupScope &Scope) {
508   assert(Scope.isNormalCleanup());
509   llvm::BasicBlock *Entry = Scope.getNormalBlock();
510   if (!Entry) {
511     Entry = CGF.createBasicBlock("cleanup");
512     Scope.setNormalBlock(Entry);
513   }
514   return Entry;
515 }
516 
517 /// Attempts to reduce a cleanup's entry block to a fallthrough.  This
518 /// is basically llvm::MergeBlockIntoPredecessor, except
519 /// simplified/optimized for the tighter constraints on cleanup blocks.
520 ///
521 /// Returns the new block, whatever it is.
522 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
523                                               llvm::BasicBlock *Entry) {
524   llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
525   if (!Pred) return Entry;
526 
527   llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
528   if (!Br || Br->isConditional()) return Entry;
529   assert(Br->getSuccessor(0) == Entry);
530 
531   // If we were previously inserting at the end of the cleanup entry
532   // block, we'll need to continue inserting at the end of the
533   // predecessor.
534   bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
535   assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
536 
537   // Kill the branch.
538   Br->eraseFromParent();
539 
540   // Replace all uses of the entry with the predecessor, in case there
541   // are phis in the cleanup.
542   Entry->replaceAllUsesWith(Pred);
543 
544   // Merge the blocks.
545   Pred->getInstList().splice(Pred->end(), Entry->getInstList());
546 
547   // Kill the entry block.
548   Entry->eraseFromParent();
549 
550   if (WasInsertBlock)
551     CGF.Builder.SetInsertPoint(Pred);
552 
553   return Pred;
554 }
555 
556 static void EmitCleanup(CodeGenFunction &CGF,
557                         EHScopeStack::Cleanup *Fn,
558                         EHScopeStack::Cleanup::Flags flags,
559                         Address ActiveFlag) {
560   // If there's an active flag, load it and skip the cleanup if it's
561   // false.
562   llvm::BasicBlock *ContBB = nullptr;
563   if (ActiveFlag.isValid()) {
564     ContBB = CGF.createBasicBlock("cleanup.done");
565     llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
566     llvm::Value *IsActive
567       = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
568     CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
569     CGF.EmitBlock(CleanupBB);
570   }
571 
572   // Ask the cleanup to emit itself.
573   Fn->Emit(CGF, flags);
574   assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
575 
576   // Emit the continuation block if there was an active flag.
577   if (ActiveFlag.isValid())
578     CGF.EmitBlock(ContBB);
579 }
580 
581 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
582                                           llvm::BasicBlock *From,
583                                           llvm::BasicBlock *To) {
584   // Exit is the exit block of a cleanup, so it always terminates in
585   // an unconditional branch or a switch.
586   llvm::Instruction *Term = Exit->getTerminator();
587 
588   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
589     assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
590     Br->setSuccessor(0, To);
591   } else {
592     llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
593     for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
594       if (Switch->getSuccessor(I) == From)
595         Switch->setSuccessor(I, To);
596   }
597 }
598 
599 /// We don't need a normal entry block for the given cleanup.
600 /// Optimistic fixup branches can cause these blocks to come into
601 /// existence anyway;  if so, destroy it.
602 ///
603 /// The validity of this transformation is very much specific to the
604 /// exact ways in which we form branches to cleanup entries.
605 static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
606                                          EHCleanupScope &scope) {
607   llvm::BasicBlock *entry = scope.getNormalBlock();
608   if (!entry) return;
609 
610   // Replace all the uses with unreachable.
611   llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
612   for (llvm::BasicBlock::use_iterator
613          i = entry->use_begin(), e = entry->use_end(); i != e; ) {
614     llvm::Use &use = *i;
615     ++i;
616 
617     use.set(unreachableBB);
618 
619     // The only uses should be fixup switches.
620     llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
621     if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
622       // Replace the switch with a branch.
623       llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
624 
625       // The switch operand is a load from the cleanup-dest alloca.
626       llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
627 
628       // Destroy the switch.
629       si->eraseFromParent();
630 
631       // Destroy the load.
632       assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
633       assert(condition->use_empty());
634       condition->eraseFromParent();
635     }
636   }
637 
638   assert(entry->use_empty());
639   delete entry;
640 }
641 
642 /// Pops a cleanup block.  If the block includes a normal cleanup, the
643 /// current insertion point is threaded through the cleanup, as are
644 /// any branch fixups on the cleanup.
645 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
646   assert(!EHStack.empty() && "cleanup stack is empty!");
647   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
648   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
649   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
650 
651   // Remember activation information.
652   bool IsActive = Scope.isActive();
653   Address NormalActiveFlag =
654     Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
655                                           : Address::invalid();
656   Address EHActiveFlag =
657     Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
658                                       : Address::invalid();
659 
660   // Check whether we need an EH cleanup.  This is only true if we've
661   // generated a lazy EH cleanup block.
662   llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
663   assert(Scope.hasEHBranches() == (EHEntry != nullptr));
664   bool RequiresEHCleanup = (EHEntry != nullptr);
665   EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
666 
667   // Check the three conditions which might require a normal cleanup:
668 
669   // - whether there are branch fix-ups through this cleanup
670   unsigned FixupDepth = Scope.getFixupDepth();
671   bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
672 
673   // - whether there are branch-throughs or branch-afters
674   bool HasExistingBranches = Scope.hasBranches();
675 
676   // - whether there's a fallthrough
677   llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
678   bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
679 
680   // Branch-through fall-throughs leave the insertion point set to the
681   // end of the last cleanup, which points to the current scope.  The
682   // rest of IR gen doesn't need to worry about this; it only happens
683   // during the execution of PopCleanupBlocks().
684   bool HasPrebranchedFallthrough =
685     (FallthroughSource && FallthroughSource->getTerminator());
686 
687   // If this is a normal cleanup, then having a prebranched
688   // fallthrough implies that the fallthrough source unconditionally
689   // jumps here.
690   assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
691          (Scope.getNormalBlock() &&
692           FallthroughSource->getTerminator()->getSuccessor(0)
693             == Scope.getNormalBlock()));
694 
695   bool RequiresNormalCleanup = false;
696   if (Scope.isNormalCleanup() &&
697       (HasFixups || HasExistingBranches || HasFallthrough)) {
698     RequiresNormalCleanup = true;
699   }
700 
701   // If we have a prebranched fallthrough into an inactive normal
702   // cleanup, rewrite it so that it leads to the appropriate place.
703   if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
704     llvm::BasicBlock *prebranchDest;
705 
706     // If the prebranch is semantically branching through the next
707     // cleanup, just forward it to the next block, leaving the
708     // insertion point in the prebranched block.
709     if (FallthroughIsBranchThrough) {
710       EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
711       prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
712 
713     // Otherwise, we need to make a new block.  If the normal cleanup
714     // isn't being used at all, we could actually reuse the normal
715     // entry block, but this is simpler, and it avoids conflicts with
716     // dead optimistic fixup branches.
717     } else {
718       prebranchDest = createBasicBlock("forwarded-prebranch");
719       EmitBlock(prebranchDest);
720     }
721 
722     llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
723     assert(normalEntry && !normalEntry->use_empty());
724 
725     ForwardPrebranchedFallthrough(FallthroughSource,
726                                   normalEntry, prebranchDest);
727   }
728 
729   // If we don't need the cleanup at all, we're done.
730   if (!RequiresNormalCleanup && !RequiresEHCleanup) {
731     destroyOptimisticNormalEntry(*this, Scope);
732     EHStack.popCleanup(); // safe because there are no fixups
733     assert(EHStack.getNumBranchFixups() == 0 ||
734            EHStack.hasNormalCleanups());
735     return;
736   }
737 
738   // Copy the cleanup emission data out.  This uses either a stack
739   // array or malloc'd memory, depending on the size, which is
740   // behavior that SmallVector would provide, if we could use it
741   // here. Unfortunately, if you ask for a SmallVector<char>, the
742   // alignment isn't sufficient.
743   auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
744   alignas(EHScopeStack::ScopeStackAlignment) char
745       CleanupBufferStack[8 * sizeof(void *)];
746   std::unique_ptr<char[]> CleanupBufferHeap;
747   size_t CleanupSize = Scope.getCleanupSize();
748   EHScopeStack::Cleanup *Fn;
749 
750   if (CleanupSize <= sizeof(CleanupBufferStack)) {
751     memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
752     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
753   } else {
754     CleanupBufferHeap.reset(new char[CleanupSize]);
755     memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
756     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
757   }
758 
759   EHScopeStack::Cleanup::Flags cleanupFlags;
760   if (Scope.isNormalCleanup())
761     cleanupFlags.setIsNormalCleanupKind();
762   if (Scope.isEHCleanup())
763     cleanupFlags.setIsEHCleanupKind();
764 
765   if (!RequiresNormalCleanup) {
766     destroyOptimisticNormalEntry(*this, Scope);
767     EHStack.popCleanup();
768   } else {
769     // If we have a fallthrough and no other need for the cleanup,
770     // emit it directly.
771     if (HasFallthrough && !HasPrebranchedFallthrough &&
772         !HasFixups && !HasExistingBranches) {
773 
774       destroyOptimisticNormalEntry(*this, Scope);
775       EHStack.popCleanup();
776 
777       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
778 
779     // Otherwise, the best approach is to thread everything through
780     // the cleanup block and then try to clean up after ourselves.
781     } else {
782       // Force the entry block to exist.
783       llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
784 
785       // I.  Set up the fallthrough edge in.
786 
787       CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
788 
789       // If there's a fallthrough, we need to store the cleanup
790       // destination index.  For fall-throughs this is always zero.
791       if (HasFallthrough) {
792         if (!HasPrebranchedFallthrough)
793           Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
794 
795       // Otherwise, save and clear the IP if we don't have fallthrough
796       // because the cleanup is inactive.
797       } else if (FallthroughSource) {
798         assert(!IsActive && "source without fallthrough for active cleanup");
799         savedInactiveFallthroughIP = Builder.saveAndClearIP();
800       }
801 
802       // II.  Emit the entry block.  This implicitly branches to it if
803       // we have fallthrough.  All the fixups and existing branches
804       // should already be branched to it.
805       EmitBlock(NormalEntry);
806 
807       // III.  Figure out where we're going and build the cleanup
808       // epilogue.
809 
810       bool HasEnclosingCleanups =
811         (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
812 
813       // Compute the branch-through dest if we need it:
814       //   - if there are branch-throughs threaded through the scope
815       //   - if fall-through is a branch-through
816       //   - if there are fixups that will be optimistically forwarded
817       //     to the enclosing cleanup
818       llvm::BasicBlock *BranchThroughDest = nullptr;
819       if (Scope.hasBranchThroughs() ||
820           (FallthroughSource && FallthroughIsBranchThrough) ||
821           (HasFixups && HasEnclosingCleanups)) {
822         assert(HasEnclosingCleanups);
823         EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
824         BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
825       }
826 
827       llvm::BasicBlock *FallthroughDest = nullptr;
828       SmallVector<llvm::Instruction*, 2> InstsToAppend;
829 
830       // If there's exactly one branch-after and no other threads,
831       // we can route it without a switch.
832       if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
833           Scope.getNumBranchAfters() == 1) {
834         assert(!BranchThroughDest || !IsActive);
835 
836         // Clean up the possibly dead store to the cleanup dest slot.
837         llvm::Instruction *NormalCleanupDestSlot =
838             cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
839         if (NormalCleanupDestSlot->hasOneUse()) {
840           NormalCleanupDestSlot->user_back()->eraseFromParent();
841           NormalCleanupDestSlot->eraseFromParent();
842           NormalCleanupDest = Address::invalid();
843         }
844 
845         llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
846         InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
847 
848       // Build a switch-out if we need it:
849       //   - if there are branch-afters threaded through the scope
850       //   - if fall-through is a branch-after
851       //   - if there are fixups that have nowhere left to go and
852       //     so must be immediately resolved
853       } else if (Scope.getNumBranchAfters() ||
854                  (HasFallthrough && !FallthroughIsBranchThrough) ||
855                  (HasFixups && !HasEnclosingCleanups)) {
856 
857         llvm::BasicBlock *Default =
858           (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
859 
860         // TODO: base this on the number of branch-afters and fixups
861         const unsigned SwitchCapacity = 10;
862 
863         llvm::LoadInst *Load =
864           createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
865                                nullptr);
866         llvm::SwitchInst *Switch =
867           llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
868 
869         InstsToAppend.push_back(Load);
870         InstsToAppend.push_back(Switch);
871 
872         // Branch-after fallthrough.
873         if (FallthroughSource && !FallthroughIsBranchThrough) {
874           FallthroughDest = createBasicBlock("cleanup.cont");
875           if (HasFallthrough)
876             Switch->addCase(Builder.getInt32(0), FallthroughDest);
877         }
878 
879         for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
880           Switch->addCase(Scope.getBranchAfterIndex(I),
881                           Scope.getBranchAfterBlock(I));
882         }
883 
884         // If there aren't any enclosing cleanups, we can resolve all
885         // the fixups now.
886         if (HasFixups && !HasEnclosingCleanups)
887           ResolveAllBranchFixups(*this, Switch, NormalEntry);
888       } else {
889         // We should always have a branch-through destination in this case.
890         assert(BranchThroughDest);
891         InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
892       }
893 
894       // IV.  Pop the cleanup and emit it.
895       EHStack.popCleanup();
896       assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
897 
898       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
899 
900       // Append the prepared cleanup prologue from above.
901       llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
902       for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
903         NormalExit->getInstList().push_back(InstsToAppend[I]);
904 
905       // Optimistically hope that any fixups will continue falling through.
906       for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
907            I < E; ++I) {
908         BranchFixup &Fixup = EHStack.getBranchFixup(I);
909         if (!Fixup.Destination) continue;
910         if (!Fixup.OptimisticBranchBlock) {
911           createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
912                                 getNormalCleanupDestSlot(),
913                                 Fixup.InitialBranch);
914           Fixup.InitialBranch->setSuccessor(0, NormalEntry);
915         }
916         Fixup.OptimisticBranchBlock = NormalExit;
917       }
918 
919       // V.  Set up the fallthrough edge out.
920 
921       // Case 1: a fallthrough source exists but doesn't branch to the
922       // cleanup because the cleanup is inactive.
923       if (!HasFallthrough && FallthroughSource) {
924         // Prebranched fallthrough was forwarded earlier.
925         // Non-prebranched fallthrough doesn't need to be forwarded.
926         // Either way, all we need to do is restore the IP we cleared before.
927         assert(!IsActive);
928         Builder.restoreIP(savedInactiveFallthroughIP);
929 
930       // Case 2: a fallthrough source exists and should branch to the
931       // cleanup, but we're not supposed to branch through to the next
932       // cleanup.
933       } else if (HasFallthrough && FallthroughDest) {
934         assert(!FallthroughIsBranchThrough);
935         EmitBlock(FallthroughDest);
936 
937       // Case 3: a fallthrough source exists and should branch to the
938       // cleanup and then through to the next.
939       } else if (HasFallthrough) {
940         // Everything is already set up for this.
941 
942       // Case 4: no fallthrough source exists.
943       } else {
944         Builder.ClearInsertionPoint();
945       }
946 
947       // VI.  Assorted cleaning.
948 
949       // Check whether we can merge NormalEntry into a single predecessor.
950       // This might invalidate (non-IR) pointers to NormalEntry.
951       llvm::BasicBlock *NewNormalEntry =
952         SimplifyCleanupEntry(*this, NormalEntry);
953 
954       // If it did invalidate those pointers, and NormalEntry was the same
955       // as NormalExit, go back and patch up the fixups.
956       if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
957         for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
958                I < E; ++I)
959           EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
960     }
961   }
962 
963   assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
964 
965   // Emit the EH cleanup if required.
966   if (RequiresEHCleanup) {
967     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
968 
969     EmitBlock(EHEntry);
970 
971     llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
972 
973     // Push a terminate scope or cleanupendpad scope around the potentially
974     // throwing cleanups. For funclet EH personalities, the cleanupendpad models
975     // program termination when cleanups throw.
976     bool PushedTerminate = false;
977     SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
978         CurrentFuncletPad);
979     llvm::CleanupPadInst *CPI = nullptr;
980 
981     const EHPersonality &Personality = EHPersonality::get(*this);
982     if (Personality.usesFuncletPads()) {
983       llvm::Value *ParentPad = CurrentFuncletPad;
984       if (!ParentPad)
985         ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
986       CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
987     }
988 
989     // Non-MSVC personalities need to terminate when an EH cleanup throws.
990     if (!Personality.isMSVCPersonality()) {
991       EHStack.pushTerminate();
992       PushedTerminate = true;
993     }
994 
995     // We only actually emit the cleanup code if the cleanup is either
996     // active or was used before it was deactivated.
997     if (EHActiveFlag.isValid() || IsActive) {
998       cleanupFlags.setIsForEHCleanup();
999       EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
1000     }
1001 
1002     if (CPI)
1003       Builder.CreateCleanupRet(CPI, NextAction);
1004     else
1005       Builder.CreateBr(NextAction);
1006 
1007     // Leave the terminate scope.
1008     if (PushedTerminate)
1009       EHStack.popTerminate();
1010 
1011     Builder.restoreIP(SavedIP);
1012 
1013     SimplifyCleanupEntry(*this, EHEntry);
1014   }
1015 }
1016 
1017 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
1018 /// specified destination obviously has no cleanups to run.  'false' is always
1019 /// a conservatively correct answer for this method.
1020 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
1021   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1022          && "stale jump destination");
1023 
1024   // Calculate the innermost active normal cleanup.
1025   EHScopeStack::stable_iterator TopCleanup =
1026     EHStack.getInnermostActiveNormalCleanup();
1027 
1028   // If we're not in an active normal cleanup scope, or if the
1029   // destination scope is within the innermost active normal cleanup
1030   // scope, we don't need to worry about fixups.
1031   if (TopCleanup == EHStack.stable_end() ||
1032       TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
1033     return true;
1034 
1035   // Otherwise, we might need some cleanups.
1036   return false;
1037 }
1038 
1039 
1040 /// Terminate the current block by emitting a branch which might leave
1041 /// the current cleanup-protected scope.  The target scope may not yet
1042 /// be known, in which case this will require a fixup.
1043 ///
1044 /// As a side-effect, this method clears the insertion point.
1045 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
1046   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1047          && "stale jump destination");
1048 
1049   if (!HaveInsertPoint())
1050     return;
1051 
1052   // Create the branch.
1053   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
1054 
1055   // Calculate the innermost active normal cleanup.
1056   EHScopeStack::stable_iterator
1057     TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1058 
1059   // If we're not in an active normal cleanup scope, or if the
1060   // destination scope is within the innermost active normal cleanup
1061   // scope, we don't need to worry about fixups.
1062   if (TopCleanup == EHStack.stable_end() ||
1063       TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
1064     Builder.ClearInsertionPoint();
1065     return;
1066   }
1067 
1068   // If we can't resolve the destination cleanup scope, just add this
1069   // to the current cleanup scope as a branch fixup.
1070   if (!Dest.getScopeDepth().isValid()) {
1071     BranchFixup &Fixup = EHStack.addBranchFixup();
1072     Fixup.Destination = Dest.getBlock();
1073     Fixup.DestinationIndex = Dest.getDestIndex();
1074     Fixup.InitialBranch = BI;
1075     Fixup.OptimisticBranchBlock = nullptr;
1076 
1077     Builder.ClearInsertionPoint();
1078     return;
1079   }
1080 
1081   // Otherwise, thread through all the normal cleanups in scope.
1082 
1083   // Store the index at the start.
1084   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1085   createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
1086 
1087   // Adjust BI to point to the first cleanup block.
1088   {
1089     EHCleanupScope &Scope =
1090       cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1091     BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
1092   }
1093 
1094   // Add this destination to all the scopes involved.
1095   EHScopeStack::stable_iterator I = TopCleanup;
1096   EHScopeStack::stable_iterator E = Dest.getScopeDepth();
1097   if (E.strictlyEncloses(I)) {
1098     while (true) {
1099       EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1100       assert(Scope.isNormalCleanup());
1101       I = Scope.getEnclosingNormalCleanup();
1102 
1103       // If this is the last cleanup we're propagating through, tell it
1104       // that there's a resolved jump moving through it.
1105       if (!E.strictlyEncloses(I)) {
1106         Scope.addBranchAfter(Index, Dest.getBlock());
1107         break;
1108       }
1109 
1110       // Otherwise, tell the scope that there's a jump propagating
1111       // through it.  If this isn't new information, all the rest of
1112       // the work has been done before.
1113       if (!Scope.addBranchThrough(Dest.getBlock()))
1114         break;
1115     }
1116   }
1117 
1118   Builder.ClearInsertionPoint();
1119 }
1120 
1121 static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1122                                   EHScopeStack::stable_iterator C) {
1123   // If we needed a normal block for any reason, that counts.
1124   if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1125     return true;
1126 
1127   // Check whether any enclosed cleanups were needed.
1128   for (EHScopeStack::stable_iterator
1129          I = EHStack.getInnermostNormalCleanup();
1130          I != C; ) {
1131     assert(C.strictlyEncloses(I));
1132     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1133     if (S.getNormalBlock()) return true;
1134     I = S.getEnclosingNormalCleanup();
1135   }
1136 
1137   return false;
1138 }
1139 
1140 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1141                               EHScopeStack::stable_iterator cleanup) {
1142   // If we needed an EH block for any reason, that counts.
1143   if (EHStack.find(cleanup)->hasEHBranches())
1144     return true;
1145 
1146   // Check whether any enclosed cleanups were needed.
1147   for (EHScopeStack::stable_iterator
1148          i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1149     assert(cleanup.strictlyEncloses(i));
1150 
1151     EHScope &scope = *EHStack.find(i);
1152     if (scope.hasEHBranches())
1153       return true;
1154 
1155     i = scope.getEnclosingEHScope();
1156   }
1157 
1158   return false;
1159 }
1160 
1161 enum ForActivation_t {
1162   ForActivation,
1163   ForDeactivation
1164 };
1165 
1166 /// The given cleanup block is changing activation state.  Configure a
1167 /// cleanup variable if necessary.
1168 ///
1169 /// It would be good if we had some way of determining if there were
1170 /// extra uses *after* the change-over point.
1171 static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1172                                         EHScopeStack::stable_iterator C,
1173                                         ForActivation_t kind,
1174                                         llvm::Instruction *dominatingIP) {
1175   EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1176 
1177   // We always need the flag if we're activating the cleanup in a
1178   // conditional context, because we have to assume that the current
1179   // location doesn't necessarily dominate the cleanup's code.
1180   bool isActivatedInConditional =
1181     (kind == ForActivation && CGF.isInConditionalBranch());
1182 
1183   bool needFlag = false;
1184 
1185   // Calculate whether the cleanup was used:
1186 
1187   //   - as a normal cleanup
1188   if (Scope.isNormalCleanup() &&
1189       (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
1190     Scope.setTestFlagInNormalCleanup();
1191     needFlag = true;
1192   }
1193 
1194   //  - as an EH cleanup
1195   if (Scope.isEHCleanup() &&
1196       (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1197     Scope.setTestFlagInEHCleanup();
1198     needFlag = true;
1199   }
1200 
1201   // If it hasn't yet been used as either, we're done.
1202   if (!needFlag) return;
1203 
1204   Address var = Scope.getActiveFlag();
1205   if (!var.isValid()) {
1206     var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
1207                                "cleanup.isactive");
1208     Scope.setActiveFlag(var);
1209 
1210     assert(dominatingIP && "no existing variable and no dominating IP!");
1211 
1212     // Initialize to true or false depending on whether it was
1213     // active up to this point.
1214     llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
1215 
1216     // If we're in a conditional block, ignore the dominating IP and
1217     // use the outermost conditional branch.
1218     if (CGF.isInConditionalBranch()) {
1219       CGF.setBeforeOutermostConditional(value, var);
1220     } else {
1221       createStoreInstBefore(value, var, dominatingIP);
1222     }
1223   }
1224 
1225   CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1226 }
1227 
1228 /// Activate a cleanup that was created in an inactivated state.
1229 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
1230                                            llvm::Instruction *dominatingIP) {
1231   assert(C != EHStack.stable_end() && "activating bottom of stack?");
1232   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1233   assert(!Scope.isActive() && "double activation");
1234 
1235   SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1236 
1237   Scope.setActive(true);
1238 }
1239 
1240 /// Deactive a cleanup that was created in an active state.
1241 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
1242                                              llvm::Instruction *dominatingIP) {
1243   assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1244   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1245   assert(Scope.isActive() && "double deactivation");
1246 
1247   // If it's the top of the stack, just pop it, but do so only if it belongs
1248   // to the current RunCleanupsScope.
1249   if (C == EHStack.stable_begin() &&
1250       CurrentCleanupScopeDepth.strictlyEncloses(C)) {
1251     // If it's a normal cleanup, we need to pretend that the
1252     // fallthrough is unreachable.
1253     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1254     PopCleanupBlock();
1255     Builder.restoreIP(SavedIP);
1256     return;
1257   }
1258 
1259   // Otherwise, follow the general case.
1260   SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1261 
1262   Scope.setActive(false);
1263 }
1264 
1265 Address CodeGenFunction::getNormalCleanupDestSlot() {
1266   if (!NormalCleanupDest.isValid())
1267     NormalCleanupDest =
1268       CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1269   return NormalCleanupDest;
1270 }
1271 
1272 /// Emits all the code to cause the given temporary to be cleaned up.
1273 void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
1274                                        QualType TempType,
1275                                        Address Ptr) {
1276   pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1277               /*useEHCleanup*/ true);
1278 }
1279