1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file contains code dealing with the IR generation for cleanups
11 // and related information.
12 //
13 // A "cleanup" is a piece of code which needs to be executed whenever
14 // control transfers out of a particular scope.  This can be
15 // conditionalized to occur only on exceptional control flow, only on
16 // normal control flow, or both.
17 //
18 //===----------------------------------------------------------------------===//
19 
20 #include "CodeGenFunction.h"
21 #include "CGCleanup.h"
22 
23 using namespace clang;
24 using namespace CodeGen;
25 
26 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27   if (rv.isScalar())
28     return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29   if (rv.isAggregate())
30     return DominatingLLVMValue::needsSaving(rv.getAggregateAddr());
31   return true;
32 }
33 
34 DominatingValue<RValue>::saved_type
35 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36   if (rv.isScalar()) {
37     llvm::Value *V = rv.getScalarVal();
38 
39     // These automatically dominate and don't need to be saved.
40     if (!DominatingLLVMValue::needsSaving(V))
41       return saved_type(V, ScalarLiteral);
42 
43     // Everything else needs an alloca.
44     llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
45     CGF.Builder.CreateStore(V, addr);
46     return saved_type(addr, ScalarAddress);
47   }
48 
49   if (rv.isComplex()) {
50     CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
51     const llvm::Type *ComplexTy =
52       llvm::StructType::get(CGF.getLLVMContext(),
53                             V.first->getType(), V.second->getType(),
54                             (void*) 0);
55     llvm::Value *addr = CGF.CreateTempAlloca(ComplexTy, "saved-complex");
56     CGF.StoreComplexToAddr(V, addr, /*volatile*/ false);
57     return saved_type(addr, ComplexAddress);
58   }
59 
60   assert(rv.isAggregate());
61   llvm::Value *V = rv.getAggregateAddr(); // TODO: volatile?
62   if (!DominatingLLVMValue::needsSaving(V))
63     return saved_type(V, AggregateLiteral);
64 
65   llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue");
66   CGF.Builder.CreateStore(V, addr);
67   return saved_type(addr, AggregateAddress);
68 }
69 
70 /// Given a saved r-value produced by SaveRValue, perform the code
71 /// necessary to restore it to usability at the current insertion
72 /// point.
73 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
74   switch (K) {
75   case ScalarLiteral:
76     return RValue::get(Value);
77   case ScalarAddress:
78     return RValue::get(CGF.Builder.CreateLoad(Value));
79   case AggregateLiteral:
80     return RValue::getAggregate(Value);
81   case AggregateAddress:
82     return RValue::getAggregate(CGF.Builder.CreateLoad(Value));
83   case ComplexAddress:
84     return RValue::getComplex(CGF.LoadComplexFromAddr(Value, false));
85   }
86 
87   llvm_unreachable("bad saved r-value kind");
88   return RValue();
89 }
90 
91 /// Push an entry of the given size onto this protected-scope stack.
92 char *EHScopeStack::allocate(size_t Size) {
93   if (!StartOfBuffer) {
94     unsigned Capacity = 1024;
95     while (Capacity < Size) Capacity *= 2;
96     StartOfBuffer = new char[Capacity];
97     StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
98   } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
99     unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
100     unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
101 
102     unsigned NewCapacity = CurrentCapacity;
103     do {
104       NewCapacity *= 2;
105     } while (NewCapacity < UsedCapacity + Size);
106 
107     char *NewStartOfBuffer = new char[NewCapacity];
108     char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
109     char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
110     memcpy(NewStartOfData, StartOfData, UsedCapacity);
111     delete [] StartOfBuffer;
112     StartOfBuffer = NewStartOfBuffer;
113     EndOfBuffer = NewEndOfBuffer;
114     StartOfData = NewStartOfData;
115   }
116 
117   assert(StartOfBuffer + Size <= StartOfData);
118   StartOfData -= Size;
119   return StartOfData;
120 }
121 
122 EHScopeStack::stable_iterator
123 EHScopeStack::getEnclosingEHCleanup(iterator it) const {
124   assert(it != end());
125   do {
126     if (isa<EHCleanupScope>(*it)) {
127       if (cast<EHCleanupScope>(*it).isEHCleanup())
128         return stabilize(it);
129       return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
130     }
131     ++it;
132   } while (it != end());
133   return stable_end();
134 }
135 
136 
137 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
138   assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
139   char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
140   bool IsNormalCleanup = Kind & NormalCleanup;
141   bool IsEHCleanup = Kind & EHCleanup;
142   bool IsActive = !(Kind & InactiveCleanup);
143   EHCleanupScope *Scope =
144     new (Buffer) EHCleanupScope(IsNormalCleanup,
145                                 IsEHCleanup,
146                                 IsActive,
147                                 Size,
148                                 BranchFixups.size(),
149                                 InnermostNormalCleanup,
150                                 InnermostEHCleanup);
151   if (IsNormalCleanup)
152     InnermostNormalCleanup = stable_begin();
153   if (IsEHCleanup)
154     InnermostEHCleanup = stable_begin();
155 
156   return Scope->getCleanupBuffer();
157 }
158 
159 void EHScopeStack::popCleanup() {
160   assert(!empty() && "popping exception stack when not empty");
161 
162   assert(isa<EHCleanupScope>(*begin()));
163   EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
164   InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
165   InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
166   StartOfData += Cleanup.getAllocatedSize();
167 
168   if (empty()) NextEHDestIndex = FirstEHDestIndex;
169 
170   // Destroy the cleanup.
171   Cleanup.~EHCleanupScope();
172 
173   // Check whether we can shrink the branch-fixups stack.
174   if (!BranchFixups.empty()) {
175     // If we no longer have any normal cleanups, all the fixups are
176     // complete.
177     if (!hasNormalCleanups())
178       BranchFixups.clear();
179 
180     // Otherwise we can still trim out unnecessary nulls.
181     else
182       popNullFixups();
183   }
184 }
185 
186 EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) {
187   char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters));
188   CatchDepth++;
189   return new (Buffer) EHFilterScope(NumFilters);
190 }
191 
192 void EHScopeStack::popFilter() {
193   assert(!empty() && "popping exception stack when not empty");
194 
195   EHFilterScope &Filter = cast<EHFilterScope>(*begin());
196   StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters());
197 
198   if (empty()) NextEHDestIndex = FirstEHDestIndex;
199 
200   assert(CatchDepth > 0 && "mismatched filter push/pop");
201   CatchDepth--;
202 }
203 
204 EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) {
205   char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers));
206   CatchDepth++;
207   EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers);
208   for (unsigned I = 0; I != NumHandlers; ++I)
209     Scope->getHandlers()[I].Index = getNextEHDestIndex();
210   return Scope;
211 }
212 
213 void EHScopeStack::pushTerminate() {
214   char *Buffer = allocate(EHTerminateScope::getSize());
215   CatchDepth++;
216   new (Buffer) EHTerminateScope(getNextEHDestIndex());
217 }
218 
219 /// Remove any 'null' fixups on the stack.  However, we can't pop more
220 /// fixups than the fixup depth on the innermost normal cleanup, or
221 /// else fixups that we try to add to that cleanup will end up in the
222 /// wrong place.  We *could* try to shrink fixup depths, but that's
223 /// actually a lot of work for little benefit.
224 void EHScopeStack::popNullFixups() {
225   // We expect this to only be called when there's still an innermost
226   // normal cleanup;  otherwise there really shouldn't be any fixups.
227   assert(hasNormalCleanups());
228 
229   EHScopeStack::iterator it = find(InnermostNormalCleanup);
230   unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
231   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
232 
233   while (BranchFixups.size() > MinSize &&
234          BranchFixups.back().Destination == 0)
235     BranchFixups.pop_back();
236 }
237 
238 void CodeGenFunction::initFullExprCleanup() {
239   // Create a variable to decide whether the cleanup needs to be run.
240   llvm::AllocaInst *active
241     = CreateTempAlloca(Builder.getInt1Ty(), "cleanup.cond");
242 
243   // Initialize it to false at a site that's guaranteed to be run
244   // before each evaluation.
245   llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
246   new llvm::StoreInst(Builder.getFalse(), active, &block->back());
247 
248   // Initialize it to true at the current location.
249   Builder.CreateStore(Builder.getTrue(), active);
250 
251   // Set that as the active flag in the cleanup.
252   EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
253   assert(cleanup.getActiveFlag() == 0 && "cleanup already has active flag?");
254   cleanup.setActiveFlag(active);
255 
256   if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
257   if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
258 }
259 
260 EHScopeStack::Cleanup::~Cleanup() {
261   llvm_unreachable("Cleanup is indestructable");
262 }
263 
264 /// All the branch fixups on the EH stack have propagated out past the
265 /// outermost normal cleanup; resolve them all by adding cases to the
266 /// given switch instruction.
267 static void ResolveAllBranchFixups(CodeGenFunction &CGF,
268                                    llvm::SwitchInst *Switch,
269                                    llvm::BasicBlock *CleanupEntry) {
270   llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
271 
272   for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
273     // Skip this fixup if its destination isn't set.
274     BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
275     if (Fixup.Destination == 0) continue;
276 
277     // If there isn't an OptimisticBranchBlock, then InitialBranch is
278     // still pointing directly to its destination; forward it to the
279     // appropriate cleanup entry.  This is required in the specific
280     // case of
281     //   { std::string s; goto lbl; }
282     //   lbl:
283     // i.e. where there's an unresolved fixup inside a single cleanup
284     // entry which we're currently popping.
285     if (Fixup.OptimisticBranchBlock == 0) {
286       new llvm::StoreInst(CGF.Builder.getInt32(Fixup.DestinationIndex),
287                           CGF.getNormalCleanupDestSlot(),
288                           Fixup.InitialBranch);
289       Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
290     }
291 
292     // Don't add this case to the switch statement twice.
293     if (!CasesAdded.insert(Fixup.Destination)) continue;
294 
295     Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
296                     Fixup.Destination);
297   }
298 
299   CGF.EHStack.clearFixups();
300 }
301 
302 /// Transitions the terminator of the given exit-block of a cleanup to
303 /// be a cleanup switch.
304 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
305                                                    llvm::BasicBlock *Block) {
306   // If it's a branch, turn it into a switch whose default
307   // destination is its original target.
308   llvm::TerminatorInst *Term = Block->getTerminator();
309   assert(Term && "can't transition block without terminator");
310 
311   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
312     assert(Br->isUnconditional());
313     llvm::LoadInst *Load =
314       new llvm::LoadInst(CGF.getNormalCleanupDestSlot(), "cleanup.dest", Term);
315     llvm::SwitchInst *Switch =
316       llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
317     Br->eraseFromParent();
318     return Switch;
319   } else {
320     return cast<llvm::SwitchInst>(Term);
321   }
322 }
323 
324 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
325   assert(Block && "resolving a null target block");
326   if (!EHStack.getNumBranchFixups()) return;
327 
328   assert(EHStack.hasNormalCleanups() &&
329          "branch fixups exist with no normal cleanups on stack");
330 
331   llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
332   bool ResolvedAny = false;
333 
334   for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
335     // Skip this fixup if its destination doesn't match.
336     BranchFixup &Fixup = EHStack.getBranchFixup(I);
337     if (Fixup.Destination != Block) continue;
338 
339     Fixup.Destination = 0;
340     ResolvedAny = true;
341 
342     // If it doesn't have an optimistic branch block, LatestBranch is
343     // already pointing to the right place.
344     llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
345     if (!BranchBB)
346       continue;
347 
348     // Don't process the same optimistic branch block twice.
349     if (!ModifiedOptimisticBlocks.insert(BranchBB))
350       continue;
351 
352     llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
353 
354     // Add a case to the switch.
355     Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
356   }
357 
358   if (ResolvedAny)
359     EHStack.popNullFixups();
360 }
361 
362 /// Pops cleanup blocks until the given savepoint is reached.
363 void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
364   assert(Old.isValid());
365 
366   while (EHStack.stable_begin() != Old) {
367     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
368 
369     // As long as Old strictly encloses the scope's enclosing normal
370     // cleanup, we're going to emit another normal cleanup which
371     // fallthrough can propagate through.
372     bool FallThroughIsBranchThrough =
373       Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
374 
375     PopCleanupBlock(FallThroughIsBranchThrough);
376   }
377 }
378 
379 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
380                                            EHCleanupScope &Scope) {
381   assert(Scope.isNormalCleanup());
382   llvm::BasicBlock *Entry = Scope.getNormalBlock();
383   if (!Entry) {
384     Entry = CGF.createBasicBlock("cleanup");
385     Scope.setNormalBlock(Entry);
386   }
387   return Entry;
388 }
389 
390 static llvm::BasicBlock *CreateEHEntry(CodeGenFunction &CGF,
391                                        EHCleanupScope &Scope) {
392   assert(Scope.isEHCleanup());
393   llvm::BasicBlock *Entry = Scope.getEHBlock();
394   if (!Entry) {
395     Entry = CGF.createBasicBlock("eh.cleanup");
396     Scope.setEHBlock(Entry);
397   }
398   return Entry;
399 }
400 
401 /// Attempts to reduce a cleanup's entry block to a fallthrough.  This
402 /// is basically llvm::MergeBlockIntoPredecessor, except
403 /// simplified/optimized for the tighter constraints on cleanup blocks.
404 ///
405 /// Returns the new block, whatever it is.
406 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
407                                               llvm::BasicBlock *Entry) {
408   llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
409   if (!Pred) return Entry;
410 
411   llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
412   if (!Br || Br->isConditional()) return Entry;
413   assert(Br->getSuccessor(0) == Entry);
414 
415   // If we were previously inserting at the end of the cleanup entry
416   // block, we'll need to continue inserting at the end of the
417   // predecessor.
418   bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
419   assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
420 
421   // Kill the branch.
422   Br->eraseFromParent();
423 
424   // Merge the blocks.
425   Pred->getInstList().splice(Pred->end(), Entry->getInstList());
426 
427   // Replace all uses of the entry with the predecessor, in case there
428   // are phis in the cleanup.
429   Entry->replaceAllUsesWith(Pred);
430 
431   // Kill the entry block.
432   Entry->eraseFromParent();
433 
434   if (WasInsertBlock)
435     CGF.Builder.SetInsertPoint(Pred);
436 
437   return Pred;
438 }
439 
440 static void EmitCleanup(CodeGenFunction &CGF,
441                         EHScopeStack::Cleanup *Fn,
442                         bool ForEH,
443                         llvm::Value *ActiveFlag) {
444   // EH cleanups always occur within a terminate scope.
445   if (ForEH) CGF.EHStack.pushTerminate();
446 
447   // If there's an active flag, load it and skip the cleanup if it's
448   // false.
449   llvm::BasicBlock *ContBB = 0;
450   if (ActiveFlag) {
451     ContBB = CGF.createBasicBlock("cleanup.done");
452     llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
453     llvm::Value *IsActive
454       = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
455     CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
456     CGF.EmitBlock(CleanupBB);
457   }
458 
459   // Ask the cleanup to emit itself.
460   Fn->Emit(CGF, ForEH);
461   assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
462 
463   // Emit the continuation block if there was an active flag.
464   if (ActiveFlag)
465     CGF.EmitBlock(ContBB);
466 
467   // Leave the terminate scope.
468   if (ForEH) CGF.EHStack.popTerminate();
469 }
470 
471 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
472                                           llvm::BasicBlock *From,
473                                           llvm::BasicBlock *To) {
474   // Exit is the exit block of a cleanup, so it always terminates in
475   // an unconditional branch or a switch.
476   llvm::TerminatorInst *Term = Exit->getTerminator();
477 
478   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
479     assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
480     Br->setSuccessor(0, To);
481   } else {
482     llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
483     for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
484       if (Switch->getSuccessor(I) == From)
485         Switch->setSuccessor(I, To);
486   }
487 }
488 
489 /// Pops a cleanup block.  If the block includes a normal cleanup, the
490 /// current insertion point is threaded through the cleanup, as are
491 /// any branch fixups on the cleanup.
492 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
493   assert(!EHStack.empty() && "cleanup stack is empty!");
494   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
495   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
496   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
497 
498   // Remember activation information.
499   bool IsActive = Scope.isActive();
500   llvm::Value *NormalActiveFlag =
501     Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() : 0;
502   llvm::Value *EHActiveFlag =
503     Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() : 0;
504 
505   // Check whether we need an EH cleanup.  This is only true if we've
506   // generated a lazy EH cleanup block.
507   bool RequiresEHCleanup = Scope.hasEHBranches();
508 
509   // Check the three conditions which might require a normal cleanup:
510 
511   // - whether there are branch fix-ups through this cleanup
512   unsigned FixupDepth = Scope.getFixupDepth();
513   bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
514 
515   // - whether there are branch-throughs or branch-afters
516   bool HasExistingBranches = Scope.hasBranches();
517 
518   // - whether there's a fallthrough
519   llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
520   bool HasFallthrough = (FallthroughSource != 0 && IsActive);
521 
522   // Branch-through fall-throughs leave the insertion point set to the
523   // end of the last cleanup, which points to the current scope.  The
524   // rest of IR gen doesn't need to worry about this; it only happens
525   // during the execution of PopCleanupBlocks().
526   bool HasPrebranchedFallthrough =
527     (FallthroughSource && FallthroughSource->getTerminator());
528 
529   // If this is a normal cleanup, then having a prebranched
530   // fallthrough implies that the fallthrough source unconditionally
531   // jumps here.
532   assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
533          (Scope.getNormalBlock() &&
534           FallthroughSource->getTerminator()->getSuccessor(0)
535             == Scope.getNormalBlock()));
536 
537   bool RequiresNormalCleanup = false;
538   if (Scope.isNormalCleanup() &&
539       (HasFixups || HasExistingBranches || HasFallthrough)) {
540     RequiresNormalCleanup = true;
541   }
542 
543   // Even if we don't need the normal cleanup, we might still have
544   // prebranched fallthrough to worry about.
545   if (Scope.isNormalCleanup() && !RequiresNormalCleanup &&
546       HasPrebranchedFallthrough) {
547     assert(!IsActive);
548 
549     llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
550 
551     // If we're branching through this cleanup, just forward the
552     // prebranched fallthrough to the next cleanup, leaving the insert
553     // point in the old block.
554     if (FallthroughIsBranchThrough) {
555       EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
556       llvm::BasicBlock *EnclosingEntry =
557         CreateNormalEntry(*this, cast<EHCleanupScope>(S));
558 
559       ForwardPrebranchedFallthrough(FallthroughSource,
560                                     NormalEntry, EnclosingEntry);
561       assert(NormalEntry->use_empty() &&
562              "uses of entry remain after forwarding?");
563       delete NormalEntry;
564 
565     // Otherwise, we're branching out;  just emit the next block.
566     } else {
567       EmitBlock(NormalEntry);
568       SimplifyCleanupEntry(*this, NormalEntry);
569     }
570   }
571 
572   // If we don't need the cleanup at all, we're done.
573   if (!RequiresNormalCleanup && !RequiresEHCleanup) {
574     EHStack.popCleanup(); // safe because there are no fixups
575     assert(EHStack.getNumBranchFixups() == 0 ||
576            EHStack.hasNormalCleanups());
577     return;
578   }
579 
580   // Copy the cleanup emission data out.  Note that SmallVector
581   // guarantees maximal alignment for its buffer regardless of its
582   // type parameter.
583   llvm::SmallVector<char, 8*sizeof(void*)> CleanupBuffer;
584   CleanupBuffer.reserve(Scope.getCleanupSize());
585   memcpy(CleanupBuffer.data(),
586          Scope.getCleanupBuffer(), Scope.getCleanupSize());
587   CleanupBuffer.set_size(Scope.getCleanupSize());
588   EHScopeStack::Cleanup *Fn =
589     reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
590 
591   // We want to emit the EH cleanup after the normal cleanup, but go
592   // ahead and do the setup for the EH cleanup while the scope is still
593   // alive.
594   llvm::BasicBlock *EHEntry = 0;
595   llvm::SmallVector<llvm::Instruction*, 2> EHInstsToAppend;
596   if (RequiresEHCleanup) {
597     EHEntry = CreateEHEntry(*this, Scope);
598 
599     // Figure out the branch-through dest if necessary.
600     llvm::BasicBlock *EHBranchThroughDest = 0;
601     if (Scope.hasEHBranchThroughs()) {
602       assert(Scope.getEnclosingEHCleanup() != EHStack.stable_end());
603       EHScope &S = *EHStack.find(Scope.getEnclosingEHCleanup());
604       EHBranchThroughDest = CreateEHEntry(*this, cast<EHCleanupScope>(S));
605     }
606 
607     // If we have exactly one branch-after and no branch-throughs, we
608     // can dispatch it without a switch.
609     if (!Scope.hasEHBranchThroughs() &&
610         Scope.getNumEHBranchAfters() == 1) {
611       assert(!EHBranchThroughDest);
612 
613       // TODO: remove the spurious eh.cleanup.dest stores if this edge
614       // never went through any switches.
615       llvm::BasicBlock *BranchAfterDest = Scope.getEHBranchAfterBlock(0);
616       EHInstsToAppend.push_back(llvm::BranchInst::Create(BranchAfterDest));
617 
618     // Otherwise, if we have any branch-afters, we need a switch.
619     } else if (Scope.getNumEHBranchAfters()) {
620       // The default of the switch belongs to the branch-throughs if
621       // they exist.
622       llvm::BasicBlock *Default =
623         (EHBranchThroughDest ? EHBranchThroughDest : getUnreachableBlock());
624 
625       const unsigned SwitchCapacity = Scope.getNumEHBranchAfters();
626 
627       llvm::LoadInst *Load =
628         new llvm::LoadInst(getEHCleanupDestSlot(), "cleanup.dest");
629       llvm::SwitchInst *Switch =
630         llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
631 
632       EHInstsToAppend.push_back(Load);
633       EHInstsToAppend.push_back(Switch);
634 
635       for (unsigned I = 0, E = Scope.getNumEHBranchAfters(); I != E; ++I)
636         Switch->addCase(Scope.getEHBranchAfterIndex(I),
637                         Scope.getEHBranchAfterBlock(I));
638 
639     // Otherwise, we have only branch-throughs; jump to the next EH
640     // cleanup.
641     } else {
642       assert(EHBranchThroughDest);
643       EHInstsToAppend.push_back(llvm::BranchInst::Create(EHBranchThroughDest));
644     }
645   }
646 
647   if (!RequiresNormalCleanup) {
648     EHStack.popCleanup();
649   } else {
650     // If we have a fallthrough and no other need for the cleanup,
651     // emit it directly.
652     if (HasFallthrough && !HasPrebranchedFallthrough &&
653         !HasFixups && !HasExistingBranches) {
654 
655       // Fixups can cause us to optimistically create a normal block,
656       // only to later have no real uses for it.  Just delete it in
657       // this case.
658       // TODO: we can potentially simplify all the uses after this.
659       if (Scope.getNormalBlock()) {
660         Scope.getNormalBlock()->replaceAllUsesWith(getUnreachableBlock());
661         delete Scope.getNormalBlock();
662       }
663 
664       EHStack.popCleanup();
665 
666       EmitCleanup(*this, Fn, /*ForEH*/ false, NormalActiveFlag);
667 
668     // Otherwise, the best approach is to thread everything through
669     // the cleanup block and then try to clean up after ourselves.
670     } else {
671       // Force the entry block to exist.
672       llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
673 
674       // I.  Set up the fallthrough edge in.
675 
676       // If there's a fallthrough, we need to store the cleanup
677       // destination index.  For fall-throughs this is always zero.
678       if (HasFallthrough) {
679         if (!HasPrebranchedFallthrough)
680           Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
681 
682       // Otherwise, clear the IP if we don't have fallthrough because
683       // the cleanup is inactive.  We don't need to save it because
684       // it's still just FallthroughSource.
685       } else if (FallthroughSource) {
686         assert(!IsActive && "source without fallthrough for active cleanup");
687         Builder.ClearInsertionPoint();
688       }
689 
690       // II.  Emit the entry block.  This implicitly branches to it if
691       // we have fallthrough.  All the fixups and existing branches
692       // should already be branched to it.
693       EmitBlock(NormalEntry);
694 
695       // III.  Figure out where we're going and build the cleanup
696       // epilogue.
697 
698       bool HasEnclosingCleanups =
699         (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
700 
701       // Compute the branch-through dest if we need it:
702       //   - if there are branch-throughs threaded through the scope
703       //   - if fall-through is a branch-through
704       //   - if there are fixups that will be optimistically forwarded
705       //     to the enclosing cleanup
706       llvm::BasicBlock *BranchThroughDest = 0;
707       if (Scope.hasBranchThroughs() ||
708           (FallthroughSource && FallthroughIsBranchThrough) ||
709           (HasFixups && HasEnclosingCleanups)) {
710         assert(HasEnclosingCleanups);
711         EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
712         BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
713       }
714 
715       llvm::BasicBlock *FallthroughDest = 0;
716       llvm::SmallVector<llvm::Instruction*, 2> InstsToAppend;
717 
718       // If there's exactly one branch-after and no other threads,
719       // we can route it without a switch.
720       if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
721           Scope.getNumBranchAfters() == 1) {
722         assert(!BranchThroughDest || !IsActive);
723 
724         // TODO: clean up the possibly dead stores to the cleanup dest slot.
725         llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
726         InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
727 
728       // Build a switch-out if we need it:
729       //   - if there are branch-afters threaded through the scope
730       //   - if fall-through is a branch-after
731       //   - if there are fixups that have nowhere left to go and
732       //     so must be immediately resolved
733       } else if (Scope.getNumBranchAfters() ||
734                  (HasFallthrough && !FallthroughIsBranchThrough) ||
735                  (HasFixups && !HasEnclosingCleanups)) {
736 
737         llvm::BasicBlock *Default =
738           (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
739 
740         // TODO: base this on the number of branch-afters and fixups
741         const unsigned SwitchCapacity = 10;
742 
743         llvm::LoadInst *Load =
744           new llvm::LoadInst(getNormalCleanupDestSlot(), "cleanup.dest");
745         llvm::SwitchInst *Switch =
746           llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
747 
748         InstsToAppend.push_back(Load);
749         InstsToAppend.push_back(Switch);
750 
751         // Branch-after fallthrough.
752         if (FallthroughSource && !FallthroughIsBranchThrough) {
753           FallthroughDest = createBasicBlock("cleanup.cont");
754           if (HasFallthrough)
755             Switch->addCase(Builder.getInt32(0), FallthroughDest);
756         }
757 
758         for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
759           Switch->addCase(Scope.getBranchAfterIndex(I),
760                           Scope.getBranchAfterBlock(I));
761         }
762 
763         // If there aren't any enclosing cleanups, we can resolve all
764         // the fixups now.
765         if (HasFixups && !HasEnclosingCleanups)
766           ResolveAllBranchFixups(*this, Switch, NormalEntry);
767       } else {
768         // We should always have a branch-through destination in this case.
769         assert(BranchThroughDest);
770         InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
771       }
772 
773       // IV.  Pop the cleanup and emit it.
774       EHStack.popCleanup();
775       assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
776 
777       EmitCleanup(*this, Fn, /*ForEH*/ false, NormalActiveFlag);
778 
779       // Append the prepared cleanup prologue from above.
780       llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
781       for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
782         NormalExit->getInstList().push_back(InstsToAppend[I]);
783 
784       // Optimistically hope that any fixups will continue falling through.
785       for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
786            I < E; ++I) {
787         BranchFixup &Fixup = EHStack.getBranchFixup(I);
788         if (!Fixup.Destination) continue;
789         if (!Fixup.OptimisticBranchBlock) {
790           new llvm::StoreInst(Builder.getInt32(Fixup.DestinationIndex),
791                               getNormalCleanupDestSlot(),
792                               Fixup.InitialBranch);
793           Fixup.InitialBranch->setSuccessor(0, NormalEntry);
794         }
795         Fixup.OptimisticBranchBlock = NormalExit;
796       }
797 
798       // V.  Set up the fallthrough edge out.
799 
800       // Case 1: a fallthrough source exists but shouldn't branch to
801       // the cleanup because the cleanup is inactive.
802       if (!HasFallthrough && FallthroughSource) {
803         assert(!IsActive);
804 
805         // If we have a prebranched fallthrough, that needs to be
806         // forwarded to the right block.
807         if (HasPrebranchedFallthrough) {
808           llvm::BasicBlock *Next;
809           if (FallthroughIsBranchThrough) {
810             Next = BranchThroughDest;
811             assert(!FallthroughDest);
812           } else {
813             Next = FallthroughDest;
814           }
815 
816           ForwardPrebranchedFallthrough(FallthroughSource, NormalEntry, Next);
817         }
818         Builder.SetInsertPoint(FallthroughSource);
819 
820       // Case 2: a fallthrough source exists and should branch to the
821       // cleanup, but we're not supposed to branch through to the next
822       // cleanup.
823       } else if (HasFallthrough && FallthroughDest) {
824         assert(!FallthroughIsBranchThrough);
825         EmitBlock(FallthroughDest);
826 
827       // Case 3: a fallthrough source exists and should branch to the
828       // cleanup and then through to the next.
829       } else if (HasFallthrough) {
830         // Everything is already set up for this.
831 
832       // Case 4: no fallthrough source exists.
833       } else {
834         Builder.ClearInsertionPoint();
835       }
836 
837       // VI.  Assorted cleaning.
838 
839       // Check whether we can merge NormalEntry into a single predecessor.
840       // This might invalidate (non-IR) pointers to NormalEntry.
841       llvm::BasicBlock *NewNormalEntry =
842         SimplifyCleanupEntry(*this, NormalEntry);
843 
844       // If it did invalidate those pointers, and NormalEntry was the same
845       // as NormalExit, go back and patch up the fixups.
846       if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
847         for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
848                I < E; ++I)
849           EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
850     }
851   }
852 
853   assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
854 
855   // Emit the EH cleanup if required.
856   if (RequiresEHCleanup) {
857     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
858 
859     EmitBlock(EHEntry);
860     EmitCleanup(*this, Fn, /*ForEH*/ true, EHActiveFlag);
861 
862     // Append the prepared cleanup prologue from above.
863     llvm::BasicBlock *EHExit = Builder.GetInsertBlock();
864     for (unsigned I = 0, E = EHInstsToAppend.size(); I != E; ++I)
865       EHExit->getInstList().push_back(EHInstsToAppend[I]);
866 
867     Builder.restoreIP(SavedIP);
868 
869     SimplifyCleanupEntry(*this, EHEntry);
870   }
871 }
872 
873 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
874 /// specified destination obviously has no cleanups to run.  'false' is always
875 /// a conservatively correct answer for this method.
876 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
877   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
878          && "stale jump destination");
879 
880   // Calculate the innermost active normal cleanup.
881   EHScopeStack::stable_iterator TopCleanup =
882     EHStack.getInnermostActiveNormalCleanup();
883 
884   // If we're not in an active normal cleanup scope, or if the
885   // destination scope is within the innermost active normal cleanup
886   // scope, we don't need to worry about fixups.
887   if (TopCleanup == EHStack.stable_end() ||
888       TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
889     return true;
890 
891   // Otherwise, we might need some cleanups.
892   return false;
893 }
894 
895 
896 /// Terminate the current block by emitting a branch which might leave
897 /// the current cleanup-protected scope.  The target scope may not yet
898 /// be known, in which case this will require a fixup.
899 ///
900 /// As a side-effect, this method clears the insertion point.
901 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
902   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
903          && "stale jump destination");
904 
905   if (!HaveInsertPoint())
906     return;
907 
908   // Create the branch.
909   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
910 
911   // Calculate the innermost active normal cleanup.
912   EHScopeStack::stable_iterator
913     TopCleanup = EHStack.getInnermostActiveNormalCleanup();
914 
915   // If we're not in an active normal cleanup scope, or if the
916   // destination scope is within the innermost active normal cleanup
917   // scope, we don't need to worry about fixups.
918   if (TopCleanup == EHStack.stable_end() ||
919       TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
920     Builder.ClearInsertionPoint();
921     return;
922   }
923 
924   // If we can't resolve the destination cleanup scope, just add this
925   // to the current cleanup scope as a branch fixup.
926   if (!Dest.getScopeDepth().isValid()) {
927     BranchFixup &Fixup = EHStack.addBranchFixup();
928     Fixup.Destination = Dest.getBlock();
929     Fixup.DestinationIndex = Dest.getDestIndex();
930     Fixup.InitialBranch = BI;
931     Fixup.OptimisticBranchBlock = 0;
932 
933     Builder.ClearInsertionPoint();
934     return;
935   }
936 
937   // Otherwise, thread through all the normal cleanups in scope.
938 
939   // Store the index at the start.
940   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
941   new llvm::StoreInst(Index, getNormalCleanupDestSlot(), BI);
942 
943   // Adjust BI to point to the first cleanup block.
944   {
945     EHCleanupScope &Scope =
946       cast<EHCleanupScope>(*EHStack.find(TopCleanup));
947     BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
948   }
949 
950   // Add this destination to all the scopes involved.
951   EHScopeStack::stable_iterator I = TopCleanup;
952   EHScopeStack::stable_iterator E = Dest.getScopeDepth();
953   if (E.strictlyEncloses(I)) {
954     while (true) {
955       EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
956       assert(Scope.isNormalCleanup());
957       I = Scope.getEnclosingNormalCleanup();
958 
959       // If this is the last cleanup we're propagating through, tell it
960       // that there's a resolved jump moving through it.
961       if (!E.strictlyEncloses(I)) {
962         Scope.addBranchAfter(Index, Dest.getBlock());
963         break;
964       }
965 
966       // Otherwise, tell the scope that there's a jump propoagating
967       // through it.  If this isn't new information, all the rest of
968       // the work has been done before.
969       if (!Scope.addBranchThrough(Dest.getBlock()))
970         break;
971     }
972   }
973 
974   Builder.ClearInsertionPoint();
975 }
976 
977 void CodeGenFunction::EmitBranchThroughEHCleanup(UnwindDest Dest) {
978   // We should never get invalid scope depths for an UnwindDest; that
979   // implies that the destination wasn't set up correctly.
980   assert(Dest.getScopeDepth().isValid() && "invalid scope depth on EH dest?");
981 
982   if (!HaveInsertPoint())
983     return;
984 
985   // Create the branch.
986   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
987 
988   // Calculate the innermost active cleanup.
989   EHScopeStack::stable_iterator
990     InnermostCleanup = EHStack.getInnermostActiveEHCleanup();
991 
992   // If the destination is in the same EH cleanup scope as us, we
993   // don't need to thread through anything.
994   if (InnermostCleanup.encloses(Dest.getScopeDepth())) {
995     Builder.ClearInsertionPoint();
996     return;
997   }
998   assert(InnermostCleanup != EHStack.stable_end());
999 
1000   // Store the index at the start.
1001   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1002   new llvm::StoreInst(Index, getEHCleanupDestSlot(), BI);
1003 
1004   // Adjust BI to point to the first cleanup block.
1005   {
1006     EHCleanupScope &Scope =
1007       cast<EHCleanupScope>(*EHStack.find(InnermostCleanup));
1008     BI->setSuccessor(0, CreateEHEntry(*this, Scope));
1009   }
1010 
1011   // Add this destination to all the scopes involved.
1012   for (EHScopeStack::stable_iterator
1013          I = InnermostCleanup, E = Dest.getScopeDepth(); ; ) {
1014     assert(E.strictlyEncloses(I));
1015     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1016     assert(Scope.isEHCleanup());
1017     I = Scope.getEnclosingEHCleanup();
1018 
1019     // If this is the last cleanup we're propagating through, add this
1020     // as a branch-after.
1021     if (I == E) {
1022       Scope.addEHBranchAfter(Index, Dest.getBlock());
1023       break;
1024     }
1025 
1026     // Otherwise, add it as a branch-through.  If this isn't new
1027     // information, all the rest of the work has been done before.
1028     if (!Scope.addEHBranchThrough(Dest.getBlock()))
1029       break;
1030   }
1031 
1032   Builder.ClearInsertionPoint();
1033 }
1034 
1035 static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1036                                   EHScopeStack::stable_iterator C) {
1037   // If we needed a normal block for any reason, that counts.
1038   if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1039     return true;
1040 
1041   // Check whether any enclosed cleanups were needed.
1042   for (EHScopeStack::stable_iterator
1043          I = EHStack.getInnermostNormalCleanup();
1044          I != C; ) {
1045     assert(C.strictlyEncloses(I));
1046     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1047     if (S.getNormalBlock()) return true;
1048     I = S.getEnclosingNormalCleanup();
1049   }
1050 
1051   return false;
1052 }
1053 
1054 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1055                               EHScopeStack::stable_iterator C) {
1056   // If we needed an EH block for any reason, that counts.
1057   if (cast<EHCleanupScope>(*EHStack.find(C)).getEHBlock())
1058     return true;
1059 
1060   // Check whether any enclosed cleanups were needed.
1061   for (EHScopeStack::stable_iterator
1062          I = EHStack.getInnermostEHCleanup(); I != C; ) {
1063     assert(C.strictlyEncloses(I));
1064     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1065     if (S.getEHBlock()) return true;
1066     I = S.getEnclosingEHCleanup();
1067   }
1068 
1069   return false;
1070 }
1071 
1072 enum ForActivation_t {
1073   ForActivation,
1074   ForDeactivation
1075 };
1076 
1077 /// The given cleanup block is changing activation state.  Configure a
1078 /// cleanup variable if necessary.
1079 ///
1080 /// It would be good if we had some way of determining if there were
1081 /// extra uses *after* the change-over point.
1082 static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1083                                         EHScopeStack::stable_iterator C,
1084                                         ForActivation_t Kind) {
1085   EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1086 
1087   // We always need the flag if we're activating the cleanup, because
1088   // we have to assume that the current location doesn't necessarily
1089   // dominate all future uses of the cleanup.
1090   bool NeedFlag = (Kind == ForActivation);
1091 
1092   // Calculate whether the cleanup was used:
1093 
1094   //   - as a normal cleanup
1095   if (Scope.isNormalCleanup() && IsUsedAsNormalCleanup(CGF.EHStack, C)) {
1096     Scope.setTestFlagInNormalCleanup();
1097     NeedFlag = true;
1098   }
1099 
1100   //  - as an EH cleanup
1101   if (Scope.isEHCleanup() && IsUsedAsEHCleanup(CGF.EHStack, C)) {
1102     Scope.setTestFlagInEHCleanup();
1103     NeedFlag = true;
1104   }
1105 
1106   // If it hasn't yet been used as either, we're done.
1107   if (!NeedFlag) return;
1108 
1109   llvm::AllocaInst *Var = Scope.getActiveFlag();
1110   if (!Var) {
1111     Var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive");
1112     Scope.setActiveFlag(Var);
1113 
1114     // Initialize to true or false depending on whether it was
1115     // active up to this point.
1116     CGF.InitTempAlloca(Var, CGF.Builder.getInt1(Kind == ForDeactivation));
1117   }
1118 
1119   CGF.Builder.CreateStore(CGF.Builder.getInt1(Kind == ForActivation), Var);
1120 }
1121 
1122 /// Activate a cleanup that was created in an inactivated state.
1123 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C) {
1124   assert(C != EHStack.stable_end() && "activating bottom of stack?");
1125   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1126   assert(!Scope.isActive() && "double activation");
1127 
1128   SetupCleanupBlockActivation(*this, C, ForActivation);
1129 
1130   Scope.setActive(true);
1131 }
1132 
1133 /// Deactive a cleanup that was created in an active state.
1134 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C) {
1135   assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1136   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1137   assert(Scope.isActive() && "double deactivation");
1138 
1139   // If it's the top of the stack, just pop it.
1140   if (C == EHStack.stable_begin()) {
1141     // If it's a normal cleanup, we need to pretend that the
1142     // fallthrough is unreachable.
1143     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1144     PopCleanupBlock();
1145     Builder.restoreIP(SavedIP);
1146     return;
1147   }
1148 
1149   // Otherwise, follow the general case.
1150   SetupCleanupBlockActivation(*this, C, ForDeactivation);
1151 
1152   Scope.setActive(false);
1153 }
1154 
1155 llvm::Value *CodeGenFunction::getNormalCleanupDestSlot() {
1156   if (!NormalCleanupDest)
1157     NormalCleanupDest =
1158       CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1159   return NormalCleanupDest;
1160 }
1161 
1162 llvm::Value *CodeGenFunction::getEHCleanupDestSlot() {
1163   if (!EHCleanupDest)
1164     EHCleanupDest =
1165       CreateTempAlloca(Builder.getInt32Ty(), "eh.cleanup.dest.slot");
1166   return EHCleanupDest;
1167 }
1168