1 //===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains code dealing with the IR generation for cleanups
10 // and related information.
11 //
12 // A "cleanup" is a piece of code which needs to be executed whenever
13 // control transfers out of a particular scope.  This can be
14 // conditionalized to occur only on exceptional control flow, only on
15 // normal control flow, or both.
16 //
17 //===----------------------------------------------------------------------===//
18 
19 #include "CGCleanup.h"
20 #include "CodeGenFunction.h"
21 #include "llvm/Support/SaveAndRestore.h"
22 
23 using namespace clang;
24 using namespace CodeGen;
25 
26 bool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) {
27   if (rv.isScalar())
28     return DominatingLLVMValue::needsSaving(rv.getScalarVal());
29   if (rv.isAggregate())
30     return DominatingLLVMValue::needsSaving(rv.getAggregatePointer());
31   return true;
32 }
33 
34 DominatingValue<RValue>::saved_type
35 DominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) {
36   if (rv.isScalar()) {
37     llvm::Value *V = rv.getScalarVal();
38 
39     // These automatically dominate and don't need to be saved.
40     if (!DominatingLLVMValue::needsSaving(V))
41       return saved_type(V, ScalarLiteral);
42 
43     // Everything else needs an alloca.
44     Address addr =
45       CGF.CreateDefaultAlignTempAlloca(V->getType(), "saved-rvalue");
46     CGF.Builder.CreateStore(V, addr);
47     return saved_type(addr.getPointer(), ScalarAddress);
48   }
49 
50   if (rv.isComplex()) {
51     CodeGenFunction::ComplexPairTy V = rv.getComplexVal();
52     llvm::Type *ComplexTy =
53         llvm::StructType::get(V.first->getType(), V.second->getType());
54     Address addr = CGF.CreateDefaultAlignTempAlloca(ComplexTy, "saved-complex");
55     CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0));
56     CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1));
57     return saved_type(addr.getPointer(), ComplexAddress);
58   }
59 
60   assert(rv.isAggregate());
61   Address V = rv.getAggregateAddress(); // TODO: volatile?
62   if (!DominatingLLVMValue::needsSaving(V.getPointer()))
63     return saved_type(V.getPointer(), AggregateLiteral,
64                       V.getAlignment().getQuantity());
65 
66   Address addr =
67     CGF.CreateTempAlloca(V.getType(), CGF.getPointerAlign(), "saved-rvalue");
68   CGF.Builder.CreateStore(V.getPointer(), addr);
69   return saved_type(addr.getPointer(), AggregateAddress,
70                     V.getAlignment().getQuantity());
71 }
72 
73 /// Given a saved r-value produced by SaveRValue, perform the code
74 /// necessary to restore it to usability at the current insertion
75 /// point.
76 RValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) {
77   auto getSavingAddress = [&](llvm::Value *value) {
78     auto alignment = cast<llvm::AllocaInst>(value)->getAlignment();
79     return Address(value, CharUnits::fromQuantity(alignment));
80   };
81   switch (K) {
82   case ScalarLiteral:
83     return RValue::get(Value);
84   case ScalarAddress:
85     return RValue::get(CGF.Builder.CreateLoad(getSavingAddress(Value)));
86   case AggregateLiteral:
87     return RValue::getAggregate(Address(Value, CharUnits::fromQuantity(Align)));
88   case AggregateAddress: {
89     auto addr = CGF.Builder.CreateLoad(getSavingAddress(Value));
90     return RValue::getAggregate(Address(addr, CharUnits::fromQuantity(Align)));
91   }
92   case ComplexAddress: {
93     Address address = getSavingAddress(Value);
94     llvm::Value *real =
95         CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 0));
96     llvm::Value *imag =
97         CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(address, 1));
98     return RValue::getComplex(real, imag);
99   }
100   }
101 
102   llvm_unreachable("bad saved r-value kind");
103 }
104 
105 /// Push an entry of the given size onto this protected-scope stack.
106 char *EHScopeStack::allocate(size_t Size) {
107   Size = llvm::alignTo(Size, ScopeStackAlignment);
108   if (!StartOfBuffer) {
109     unsigned Capacity = 1024;
110     while (Capacity < Size) Capacity *= 2;
111     StartOfBuffer = new char[Capacity];
112     StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
113   } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
114     unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
115     unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
116 
117     unsigned NewCapacity = CurrentCapacity;
118     do {
119       NewCapacity *= 2;
120     } while (NewCapacity < UsedCapacity + Size);
121 
122     char *NewStartOfBuffer = new char[NewCapacity];
123     char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
124     char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
125     memcpy(NewStartOfData, StartOfData, UsedCapacity);
126     delete [] StartOfBuffer;
127     StartOfBuffer = NewStartOfBuffer;
128     EndOfBuffer = NewEndOfBuffer;
129     StartOfData = NewStartOfData;
130   }
131 
132   assert(StartOfBuffer + Size <= StartOfData);
133   StartOfData -= Size;
134   return StartOfData;
135 }
136 
137 void EHScopeStack::deallocate(size_t Size) {
138   StartOfData += llvm::alignTo(Size, ScopeStackAlignment);
139 }
140 
141 bool EHScopeStack::containsOnlyLifetimeMarkers(
142     EHScopeStack::stable_iterator Old) const {
143   for (EHScopeStack::iterator it = begin(); stabilize(it) != Old; it++) {
144     EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*it);
145     if (!cleanup || !cleanup->isLifetimeMarker())
146       return false;
147   }
148 
149   return true;
150 }
151 
152 bool EHScopeStack::requiresLandingPad() const {
153   for (stable_iterator si = getInnermostEHScope(); si != stable_end(); ) {
154     // Skip lifetime markers.
155     if (auto *cleanup = dyn_cast<EHCleanupScope>(&*find(si)))
156       if (cleanup->isLifetimeMarker()) {
157         si = cleanup->getEnclosingEHScope();
158         continue;
159       }
160     return true;
161   }
162 
163   return false;
164 }
165 
166 EHScopeStack::stable_iterator
167 EHScopeStack::getInnermostActiveNormalCleanup() const {
168   for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end();
169          si != se; ) {
170     EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si));
171     if (cleanup.isActive()) return si;
172     si = cleanup.getEnclosingNormalCleanup();
173   }
174   return stable_end();
175 }
176 
177 
178 void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
179   char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
180   bool IsNormalCleanup = Kind & NormalCleanup;
181   bool IsEHCleanup = Kind & EHCleanup;
182   bool IsLifetimeMarker = Kind & LifetimeMarker;
183 
184   // Per C++ [except.terminate], it is implementation-defined whether none,
185   // some, or all cleanups are called before std::terminate. Thus, when
186   // terminate is the current EH scope, we may skip adding any EH cleanup
187   // scopes.
188   if (InnermostEHScope != stable_end() &&
189       find(InnermostEHScope)->getKind() == EHScope::Terminate)
190     IsEHCleanup = false;
191 
192   EHCleanupScope *Scope =
193     new (Buffer) EHCleanupScope(IsNormalCleanup,
194                                 IsEHCleanup,
195                                 Size,
196                                 BranchFixups.size(),
197                                 InnermostNormalCleanup,
198                                 InnermostEHScope);
199   if (IsNormalCleanup)
200     InnermostNormalCleanup = stable_begin();
201   if (IsEHCleanup)
202     InnermostEHScope = stable_begin();
203   if (IsLifetimeMarker)
204     Scope->setLifetimeMarker();
205 
206   // With Windows -EHa, Invoke llvm.seh.scope.begin() for EHCleanup
207   if (CGF->getLangOpts().EHAsynch && IsEHCleanup && !IsLifetimeMarker &&
208       CGF->getTarget().getCXXABI().isMicrosoft())
209     CGF->EmitSehCppScopeBegin();
210 
211   return Scope->getCleanupBuffer();
212 }
213 
214 void EHScopeStack::popCleanup() {
215   assert(!empty() && "popping exception stack when not empty");
216 
217   assert(isa<EHCleanupScope>(*begin()));
218   EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
219   InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
220   InnermostEHScope = Cleanup.getEnclosingEHScope();
221   deallocate(Cleanup.getAllocatedSize());
222 
223   // Destroy the cleanup.
224   Cleanup.Destroy();
225 
226   // Check whether we can shrink the branch-fixups stack.
227   if (!BranchFixups.empty()) {
228     // If we no longer have any normal cleanups, all the fixups are
229     // complete.
230     if (!hasNormalCleanups())
231       BranchFixups.clear();
232 
233     // Otherwise we can still trim out unnecessary nulls.
234     else
235       popNullFixups();
236   }
237 }
238 
239 EHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) {
240   assert(getInnermostEHScope() == stable_end());
241   char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters));
242   EHFilterScope *filter = new (buffer) EHFilterScope(numFilters);
243   InnermostEHScope = stable_begin();
244   return filter;
245 }
246 
247 void EHScopeStack::popFilter() {
248   assert(!empty() && "popping exception stack when not empty");
249 
250   EHFilterScope &filter = cast<EHFilterScope>(*begin());
251   deallocate(EHFilterScope::getSizeForNumFilters(filter.getNumFilters()));
252 
253   InnermostEHScope = filter.getEnclosingEHScope();
254 }
255 
256 EHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) {
257   char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers));
258   EHCatchScope *scope =
259     new (buffer) EHCatchScope(numHandlers, InnermostEHScope);
260   InnermostEHScope = stable_begin();
261   return scope;
262 }
263 
264 void EHScopeStack::pushTerminate() {
265   char *Buffer = allocate(EHTerminateScope::getSize());
266   new (Buffer) EHTerminateScope(InnermostEHScope);
267   InnermostEHScope = stable_begin();
268 }
269 
270 /// Remove any 'null' fixups on the stack.  However, we can't pop more
271 /// fixups than the fixup depth on the innermost normal cleanup, or
272 /// else fixups that we try to add to that cleanup will end up in the
273 /// wrong place.  We *could* try to shrink fixup depths, but that's
274 /// actually a lot of work for little benefit.
275 void EHScopeStack::popNullFixups() {
276   // We expect this to only be called when there's still an innermost
277   // normal cleanup;  otherwise there really shouldn't be any fixups.
278   assert(hasNormalCleanups());
279 
280   EHScopeStack::iterator it = find(InnermostNormalCleanup);
281   unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
282   assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
283 
284   while (BranchFixups.size() > MinSize &&
285          BranchFixups.back().Destination == nullptr)
286     BranchFixups.pop_back();
287 }
288 
289 Address CodeGenFunction::createCleanupActiveFlag() {
290   // Create a variable to decide whether the cleanup needs to be run.
291   Address active = CreateTempAllocaWithoutCast(
292       Builder.getInt1Ty(), CharUnits::One(), "cleanup.cond");
293 
294   // Initialize it to false at a site that's guaranteed to be run
295   // before each evaluation.
296   setBeforeOutermostConditional(Builder.getFalse(), active);
297 
298   // Initialize it to true at the current location.
299   Builder.CreateStore(Builder.getTrue(), active);
300 
301   return active;
302 }
303 
304 void CodeGenFunction::initFullExprCleanupWithFlag(Address ActiveFlag) {
305   // Set that as the active flag in the cleanup.
306   EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
307   assert(!cleanup.hasActiveFlag() && "cleanup already has active flag?");
308   cleanup.setActiveFlag(ActiveFlag);
309 
310   if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
311   if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
312 }
313 
314 void EHScopeStack::Cleanup::anchor() {}
315 
316 static void createStoreInstBefore(llvm::Value *value, Address addr,
317                                   llvm::Instruction *beforeInst) {
318   auto store = new llvm::StoreInst(value, addr.getPointer(), beforeInst);
319   store->setAlignment(addr.getAlignment().getAsAlign());
320 }
321 
322 static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
323                                             llvm::Instruction *beforeInst) {
324   return new llvm::LoadInst(addr.getElementType(), addr.getPointer(), name,
325                             false, addr.getAlignment().getAsAlign(),
326                             beforeInst);
327 }
328 
329 /// All the branch fixups on the EH stack have propagated out past the
330 /// outermost normal cleanup; resolve them all by adding cases to the
331 /// given switch instruction.
332 static void ResolveAllBranchFixups(CodeGenFunction &CGF,
333                                    llvm::SwitchInst *Switch,
334                                    llvm::BasicBlock *CleanupEntry) {
335   llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded;
336 
337   for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) {
338     // Skip this fixup if its destination isn't set.
339     BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I);
340     if (Fixup.Destination == nullptr) continue;
341 
342     // If there isn't an OptimisticBranchBlock, then InitialBranch is
343     // still pointing directly to its destination; forward it to the
344     // appropriate cleanup entry.  This is required in the specific
345     // case of
346     //   { std::string s; goto lbl; }
347     //   lbl:
348     // i.e. where there's an unresolved fixup inside a single cleanup
349     // entry which we're currently popping.
350     if (Fixup.OptimisticBranchBlock == nullptr) {
351       createStoreInstBefore(CGF.Builder.getInt32(Fixup.DestinationIndex),
352                             CGF.getNormalCleanupDestSlot(),
353                             Fixup.InitialBranch);
354       Fixup.InitialBranch->setSuccessor(0, CleanupEntry);
355     }
356 
357     // Don't add this case to the switch statement twice.
358     if (!CasesAdded.insert(Fixup.Destination).second)
359       continue;
360 
361     Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex),
362                     Fixup.Destination);
363   }
364 
365   CGF.EHStack.clearFixups();
366 }
367 
368 /// Transitions the terminator of the given exit-block of a cleanup to
369 /// be a cleanup switch.
370 static llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF,
371                                                    llvm::BasicBlock *Block) {
372   // If it's a branch, turn it into a switch whose default
373   // destination is its original target.
374   llvm::Instruction *Term = Block->getTerminator();
375   assert(Term && "can't transition block without terminator");
376 
377   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
378     assert(Br->isUnconditional());
379     auto Load = createLoadInstBefore(CGF.getNormalCleanupDestSlot(),
380                                      "cleanup.dest", Term);
381     llvm::SwitchInst *Switch =
382       llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block);
383     Br->eraseFromParent();
384     return Switch;
385   } else {
386     return cast<llvm::SwitchInst>(Term);
387   }
388 }
389 
390 void CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) {
391   assert(Block && "resolving a null target block");
392   if (!EHStack.getNumBranchFixups()) return;
393 
394   assert(EHStack.hasNormalCleanups() &&
395          "branch fixups exist with no normal cleanups on stack");
396 
397   llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks;
398   bool ResolvedAny = false;
399 
400   for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) {
401     // Skip this fixup if its destination doesn't match.
402     BranchFixup &Fixup = EHStack.getBranchFixup(I);
403     if (Fixup.Destination != Block) continue;
404 
405     Fixup.Destination = nullptr;
406     ResolvedAny = true;
407 
408     // If it doesn't have an optimistic branch block, LatestBranch is
409     // already pointing to the right place.
410     llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock;
411     if (!BranchBB)
412       continue;
413 
414     // Don't process the same optimistic branch block twice.
415     if (!ModifiedOptimisticBlocks.insert(BranchBB).second)
416       continue;
417 
418     llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB);
419 
420     // Add a case to the switch.
421     Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block);
422   }
423 
424   if (ResolvedAny)
425     EHStack.popNullFixups();
426 }
427 
428 /// Pops cleanup blocks until the given savepoint is reached.
429 void CodeGenFunction::PopCleanupBlocks(
430     EHScopeStack::stable_iterator Old,
431     std::initializer_list<llvm::Value **> ValuesToReload) {
432   assert(Old.isValid());
433 
434   bool HadBranches = false;
435   while (EHStack.stable_begin() != Old) {
436     EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
437     HadBranches |= Scope.hasBranches();
438 
439     // As long as Old strictly encloses the scope's enclosing normal
440     // cleanup, we're going to emit another normal cleanup which
441     // fallthrough can propagate through.
442     bool FallThroughIsBranchThrough =
443       Old.strictlyEncloses(Scope.getEnclosingNormalCleanup());
444 
445     PopCleanupBlock(FallThroughIsBranchThrough);
446   }
447 
448   // If we didn't have any branches, the insertion point before cleanups must
449   // dominate the current insertion point and we don't need to reload any
450   // values.
451   if (!HadBranches)
452     return;
453 
454   // Spill and reload all values that the caller wants to be live at the current
455   // insertion point.
456   for (llvm::Value **ReloadedValue : ValuesToReload) {
457     auto *Inst = dyn_cast_or_null<llvm::Instruction>(*ReloadedValue);
458     if (!Inst)
459       continue;
460 
461     // Don't spill static allocas, they dominate all cleanups. These are created
462     // by binding a reference to a local variable or temporary.
463     auto *AI = dyn_cast<llvm::AllocaInst>(Inst);
464     if (AI && AI->isStaticAlloca())
465       continue;
466 
467     Address Tmp =
468         CreateDefaultAlignTempAlloca(Inst->getType(), "tmp.exprcleanup");
469 
470     // Find an insertion point after Inst and spill it to the temporary.
471     llvm::BasicBlock::iterator InsertBefore;
472     if (auto *Invoke = dyn_cast<llvm::InvokeInst>(Inst))
473       InsertBefore = Invoke->getNormalDest()->getFirstInsertionPt();
474     else
475       InsertBefore = std::next(Inst->getIterator());
476     CGBuilderTy(CGM, &*InsertBefore).CreateStore(Inst, Tmp);
477 
478     // Reload the value at the current insertion point.
479     *ReloadedValue = Builder.CreateLoad(Tmp);
480   }
481 }
482 
483 /// Pops cleanup blocks until the given savepoint is reached, then add the
484 /// cleanups from the given savepoint in the lifetime-extended cleanups stack.
485 void CodeGenFunction::PopCleanupBlocks(
486     EHScopeStack::stable_iterator Old, size_t OldLifetimeExtendedSize,
487     std::initializer_list<llvm::Value **> ValuesToReload) {
488   PopCleanupBlocks(Old, ValuesToReload);
489 
490   // Move our deferred cleanups onto the EH stack.
491   for (size_t I = OldLifetimeExtendedSize,
492               E = LifetimeExtendedCleanupStack.size(); I != E; /**/) {
493     // Alignment should be guaranteed by the vptrs in the individual cleanups.
494     assert((I % alignof(LifetimeExtendedCleanupHeader) == 0) &&
495            "misaligned cleanup stack entry");
496 
497     LifetimeExtendedCleanupHeader &Header =
498         reinterpret_cast<LifetimeExtendedCleanupHeader&>(
499             LifetimeExtendedCleanupStack[I]);
500     I += sizeof(Header);
501 
502     EHStack.pushCopyOfCleanup(Header.getKind(),
503                               &LifetimeExtendedCleanupStack[I],
504                               Header.getSize());
505     I += Header.getSize();
506 
507     if (Header.isConditional()) {
508       Address ActiveFlag =
509           reinterpret_cast<Address &>(LifetimeExtendedCleanupStack[I]);
510       initFullExprCleanupWithFlag(ActiveFlag);
511       I += sizeof(ActiveFlag);
512     }
513   }
514   LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize);
515 }
516 
517 static llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF,
518                                            EHCleanupScope &Scope) {
519   assert(Scope.isNormalCleanup());
520   llvm::BasicBlock *Entry = Scope.getNormalBlock();
521   if (!Entry) {
522     Entry = CGF.createBasicBlock("cleanup");
523     Scope.setNormalBlock(Entry);
524   }
525   return Entry;
526 }
527 
528 /// Attempts to reduce a cleanup's entry block to a fallthrough.  This
529 /// is basically llvm::MergeBlockIntoPredecessor, except
530 /// simplified/optimized for the tighter constraints on cleanup blocks.
531 ///
532 /// Returns the new block, whatever it is.
533 static llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF,
534                                               llvm::BasicBlock *Entry) {
535   llvm::BasicBlock *Pred = Entry->getSinglePredecessor();
536   if (!Pred) return Entry;
537 
538   llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator());
539   if (!Br || Br->isConditional()) return Entry;
540   assert(Br->getSuccessor(0) == Entry);
541 
542   // If we were previously inserting at the end of the cleanup entry
543   // block, we'll need to continue inserting at the end of the
544   // predecessor.
545   bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry;
546   assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end());
547 
548   // Kill the branch.
549   Br->eraseFromParent();
550 
551   // Replace all uses of the entry with the predecessor, in case there
552   // are phis in the cleanup.
553   Entry->replaceAllUsesWith(Pred);
554 
555   // Merge the blocks.
556   Pred->getInstList().splice(Pred->end(), Entry->getInstList());
557 
558   // Kill the entry block.
559   Entry->eraseFromParent();
560 
561   if (WasInsertBlock)
562     CGF.Builder.SetInsertPoint(Pred);
563 
564   return Pred;
565 }
566 
567 static void EmitCleanup(CodeGenFunction &CGF,
568                         EHScopeStack::Cleanup *Fn,
569                         EHScopeStack::Cleanup::Flags flags,
570                         Address ActiveFlag) {
571   // If there's an active flag, load it and skip the cleanup if it's
572   // false.
573   llvm::BasicBlock *ContBB = nullptr;
574   if (ActiveFlag.isValid()) {
575     ContBB = CGF.createBasicBlock("cleanup.done");
576     llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action");
577     llvm::Value *IsActive
578       = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active");
579     CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB);
580     CGF.EmitBlock(CleanupBB);
581   }
582 
583   // Ask the cleanup to emit itself.
584   Fn->Emit(CGF, flags);
585   assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
586 
587   // Emit the continuation block if there was an active flag.
588   if (ActiveFlag.isValid())
589     CGF.EmitBlock(ContBB);
590 }
591 
592 static void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit,
593                                           llvm::BasicBlock *From,
594                                           llvm::BasicBlock *To) {
595   // Exit is the exit block of a cleanup, so it always terminates in
596   // an unconditional branch or a switch.
597   llvm::Instruction *Term = Exit->getTerminator();
598 
599   if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) {
600     assert(Br->isUnconditional() && Br->getSuccessor(0) == From);
601     Br->setSuccessor(0, To);
602   } else {
603     llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term);
604     for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I)
605       if (Switch->getSuccessor(I) == From)
606         Switch->setSuccessor(I, To);
607   }
608 }
609 
610 /// We don't need a normal entry block for the given cleanup.
611 /// Optimistic fixup branches can cause these blocks to come into
612 /// existence anyway;  if so, destroy it.
613 ///
614 /// The validity of this transformation is very much specific to the
615 /// exact ways in which we form branches to cleanup entries.
616 static void destroyOptimisticNormalEntry(CodeGenFunction &CGF,
617                                          EHCleanupScope &scope) {
618   llvm::BasicBlock *entry = scope.getNormalBlock();
619   if (!entry) return;
620 
621   // Replace all the uses with unreachable.
622   llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock();
623   for (llvm::BasicBlock::use_iterator
624          i = entry->use_begin(), e = entry->use_end(); i != e; ) {
625     llvm::Use &use = *i;
626     ++i;
627 
628     use.set(unreachableBB);
629 
630     // The only uses should be fixup switches.
631     llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser());
632     if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) {
633       // Replace the switch with a branch.
634       llvm::BranchInst::Create(si->case_begin()->getCaseSuccessor(), si);
635 
636       // The switch operand is a load from the cleanup-dest alloca.
637       llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
638 
639       // Destroy the switch.
640       si->eraseFromParent();
641 
642       // Destroy the load.
643       assert(condition->getOperand(0) == CGF.NormalCleanupDest.getPointer());
644       assert(condition->use_empty());
645       condition->eraseFromParent();
646     }
647   }
648 
649   assert(entry->use_empty());
650   delete entry;
651 }
652 
653 /// Pops a cleanup block.  If the block includes a normal cleanup, the
654 /// current insertion point is threaded through the cleanup, as are
655 /// any branch fixups on the cleanup.
656 void CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) {
657   assert(!EHStack.empty() && "cleanup stack is empty!");
658   assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
659   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
660   assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
661 
662   // Remember activation information.
663   bool IsActive = Scope.isActive();
664   Address NormalActiveFlag =
665     Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag()
666                                           : Address::invalid();
667   Address EHActiveFlag =
668     Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag()
669                                       : Address::invalid();
670 
671   // Check whether we need an EH cleanup.  This is only true if we've
672   // generated a lazy EH cleanup block.
673   llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock();
674   assert(Scope.hasEHBranches() == (EHEntry != nullptr));
675   bool RequiresEHCleanup = (EHEntry != nullptr);
676   EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope();
677 
678   // Check the three conditions which might require a normal cleanup:
679 
680   // - whether there are branch fix-ups through this cleanup
681   unsigned FixupDepth = Scope.getFixupDepth();
682   bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
683 
684   // - whether there are branch-throughs or branch-afters
685   bool HasExistingBranches = Scope.hasBranches();
686 
687   // - whether there's a fallthrough
688   llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
689   bool HasFallthrough = (FallthroughSource != nullptr && IsActive);
690 
691   // Branch-through fall-throughs leave the insertion point set to the
692   // end of the last cleanup, which points to the current scope.  The
693   // rest of IR gen doesn't need to worry about this; it only happens
694   // during the execution of PopCleanupBlocks().
695   bool HasPrebranchedFallthrough =
696     (FallthroughSource && FallthroughSource->getTerminator());
697 
698   // If this is a normal cleanup, then having a prebranched
699   // fallthrough implies that the fallthrough source unconditionally
700   // jumps here.
701   assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough ||
702          (Scope.getNormalBlock() &&
703           FallthroughSource->getTerminator()->getSuccessor(0)
704             == Scope.getNormalBlock()));
705 
706   bool RequiresNormalCleanup = false;
707   if (Scope.isNormalCleanup() &&
708       (HasFixups || HasExistingBranches || HasFallthrough)) {
709     RequiresNormalCleanup = true;
710   }
711 
712   // If we have a prebranched fallthrough into an inactive normal
713   // cleanup, rewrite it so that it leads to the appropriate place.
714   if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) {
715     llvm::BasicBlock *prebranchDest;
716 
717     // If the prebranch is semantically branching through the next
718     // cleanup, just forward it to the next block, leaving the
719     // insertion point in the prebranched block.
720     if (FallthroughIsBranchThrough) {
721       EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup());
722       prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing));
723 
724     // Otherwise, we need to make a new block.  If the normal cleanup
725     // isn't being used at all, we could actually reuse the normal
726     // entry block, but this is simpler, and it avoids conflicts with
727     // dead optimistic fixup branches.
728     } else {
729       prebranchDest = createBasicBlock("forwarded-prebranch");
730       EmitBlock(prebranchDest);
731     }
732 
733     llvm::BasicBlock *normalEntry = Scope.getNormalBlock();
734     assert(normalEntry && !normalEntry->use_empty());
735 
736     ForwardPrebranchedFallthrough(FallthroughSource,
737                                   normalEntry, prebranchDest);
738   }
739 
740   // If we don't need the cleanup at all, we're done.
741   if (!RequiresNormalCleanup && !RequiresEHCleanup) {
742     destroyOptimisticNormalEntry(*this, Scope);
743     EHStack.popCleanup(); // safe because there are no fixups
744     assert(EHStack.getNumBranchFixups() == 0 ||
745            EHStack.hasNormalCleanups());
746     return;
747   }
748 
749   // Copy the cleanup emission data out.  This uses either a stack
750   // array or malloc'd memory, depending on the size, which is
751   // behavior that SmallVector would provide, if we could use it
752   // here. Unfortunately, if you ask for a SmallVector<char>, the
753   // alignment isn't sufficient.
754   auto *CleanupSource = reinterpret_cast<char *>(Scope.getCleanupBuffer());
755   alignas(EHScopeStack::ScopeStackAlignment) char
756       CleanupBufferStack[8 * sizeof(void *)];
757   std::unique_ptr<char[]> CleanupBufferHeap;
758   size_t CleanupSize = Scope.getCleanupSize();
759   EHScopeStack::Cleanup *Fn;
760 
761   if (CleanupSize <= sizeof(CleanupBufferStack)) {
762     memcpy(CleanupBufferStack, CleanupSource, CleanupSize);
763     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferStack);
764   } else {
765     CleanupBufferHeap.reset(new char[CleanupSize]);
766     memcpy(CleanupBufferHeap.get(), CleanupSource, CleanupSize);
767     Fn = reinterpret_cast<EHScopeStack::Cleanup *>(CleanupBufferHeap.get());
768   }
769 
770   EHScopeStack::Cleanup::Flags cleanupFlags;
771   if (Scope.isNormalCleanup())
772     cleanupFlags.setIsNormalCleanupKind();
773   if (Scope.isEHCleanup())
774     cleanupFlags.setIsEHCleanupKind();
775 
776   // Under -EHa, invoke seh.scope.end() to mark scope end before dtor
777   bool IsEHa = getLangOpts().EHAsynch && !Scope.isLifetimeMarker();
778   const EHPersonality &Personality = EHPersonality::get(*this);
779   if (!RequiresNormalCleanup) {
780     // Mark CPP scope end for passed-by-value Arg temp
781     //   per Windows ABI which is "normally" Cleanup in callee
782     if (IsEHa && getInvokeDest()) {
783       if (Personality.isMSVCXXPersonality())
784         EmitSehCppScopeEnd();
785     }
786     destroyOptimisticNormalEntry(*this, Scope);
787     EHStack.popCleanup();
788   } else {
789     // If we have a fallthrough and no other need for the cleanup,
790     // emit it directly.
791     if (HasFallthrough && !HasPrebranchedFallthrough && !HasFixups &&
792         !HasExistingBranches) {
793 
794       // mark SEH scope end for fall-through flow
795       if (IsEHa && getInvokeDest()) {
796         if (Personality.isMSVCXXPersonality())
797           EmitSehCppScopeEnd();
798         else
799           EmitSehTryScopeEnd();
800       }
801 
802       destroyOptimisticNormalEntry(*this, Scope);
803       EHStack.popCleanup();
804 
805       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
806 
807     // Otherwise, the best approach is to thread everything through
808     // the cleanup block and then try to clean up after ourselves.
809     } else {
810       // Force the entry block to exist.
811       llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope);
812 
813       // I.  Set up the fallthrough edge in.
814 
815       CGBuilderTy::InsertPoint savedInactiveFallthroughIP;
816 
817       // If there's a fallthrough, we need to store the cleanup
818       // destination index.  For fall-throughs this is always zero.
819       if (HasFallthrough) {
820         if (!HasPrebranchedFallthrough)
821           Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot());
822 
823       // Otherwise, save and clear the IP if we don't have fallthrough
824       // because the cleanup is inactive.
825       } else if (FallthroughSource) {
826         assert(!IsActive && "source without fallthrough for active cleanup");
827         savedInactiveFallthroughIP = Builder.saveAndClearIP();
828       }
829 
830       // II.  Emit the entry block.  This implicitly branches to it if
831       // we have fallthrough.  All the fixups and existing branches
832       // should already be branched to it.
833       EmitBlock(NormalEntry);
834 
835       // intercept normal cleanup to mark SEH scope end
836       if (IsEHa) {
837         if (Personality.isMSVCXXPersonality())
838           EmitSehCppScopeEnd();
839         else
840           EmitSehTryScopeEnd();
841       }
842 
843       // III.  Figure out where we're going and build the cleanup
844       // epilogue.
845 
846       bool HasEnclosingCleanups =
847         (Scope.getEnclosingNormalCleanup() != EHStack.stable_end());
848 
849       // Compute the branch-through dest if we need it:
850       //   - if there are branch-throughs threaded through the scope
851       //   - if fall-through is a branch-through
852       //   - if there are fixups that will be optimistically forwarded
853       //     to the enclosing cleanup
854       llvm::BasicBlock *BranchThroughDest = nullptr;
855       if (Scope.hasBranchThroughs() ||
856           (FallthroughSource && FallthroughIsBranchThrough) ||
857           (HasFixups && HasEnclosingCleanups)) {
858         assert(HasEnclosingCleanups);
859         EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup());
860         BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S));
861       }
862 
863       llvm::BasicBlock *FallthroughDest = nullptr;
864       SmallVector<llvm::Instruction*, 2> InstsToAppend;
865 
866       // If there's exactly one branch-after and no other threads,
867       // we can route it without a switch.
868       if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough &&
869           Scope.getNumBranchAfters() == 1) {
870         assert(!BranchThroughDest || !IsActive);
871 
872         // Clean up the possibly dead store to the cleanup dest slot.
873         llvm::Instruction *NormalCleanupDestSlot =
874             cast<llvm::Instruction>(getNormalCleanupDestSlot().getPointer());
875         if (NormalCleanupDestSlot->hasOneUse()) {
876           NormalCleanupDestSlot->user_back()->eraseFromParent();
877           NormalCleanupDestSlot->eraseFromParent();
878           NormalCleanupDest = Address::invalid();
879         }
880 
881         llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0);
882         InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter));
883 
884       // Build a switch-out if we need it:
885       //   - if there are branch-afters threaded through the scope
886       //   - if fall-through is a branch-after
887       //   - if there are fixups that have nowhere left to go and
888       //     so must be immediately resolved
889       } else if (Scope.getNumBranchAfters() ||
890                  (HasFallthrough && !FallthroughIsBranchThrough) ||
891                  (HasFixups && !HasEnclosingCleanups)) {
892 
893         llvm::BasicBlock *Default =
894           (BranchThroughDest ? BranchThroughDest : getUnreachableBlock());
895 
896         // TODO: base this on the number of branch-afters and fixups
897         const unsigned SwitchCapacity = 10;
898 
899         // pass the abnormal exit flag to Fn (SEH cleanup)
900         cleanupFlags.setHasExitSwitch();
901 
902         llvm::LoadInst *Load =
903           createLoadInstBefore(getNormalCleanupDestSlot(), "cleanup.dest",
904                                nullptr);
905         llvm::SwitchInst *Switch =
906           llvm::SwitchInst::Create(Load, Default, SwitchCapacity);
907 
908         InstsToAppend.push_back(Load);
909         InstsToAppend.push_back(Switch);
910 
911         // Branch-after fallthrough.
912         if (FallthroughSource && !FallthroughIsBranchThrough) {
913           FallthroughDest = createBasicBlock("cleanup.cont");
914           if (HasFallthrough)
915             Switch->addCase(Builder.getInt32(0), FallthroughDest);
916         }
917 
918         for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) {
919           Switch->addCase(Scope.getBranchAfterIndex(I),
920                           Scope.getBranchAfterBlock(I));
921         }
922 
923         // If there aren't any enclosing cleanups, we can resolve all
924         // the fixups now.
925         if (HasFixups && !HasEnclosingCleanups)
926           ResolveAllBranchFixups(*this, Switch, NormalEntry);
927       } else {
928         // We should always have a branch-through destination in this case.
929         assert(BranchThroughDest);
930         InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest));
931       }
932 
933       // IV.  Pop the cleanup and emit it.
934       EHStack.popCleanup();
935       assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups);
936 
937       EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag);
938 
939       // Append the prepared cleanup prologue from above.
940       llvm::BasicBlock *NormalExit = Builder.GetInsertBlock();
941       for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I)
942         NormalExit->getInstList().push_back(InstsToAppend[I]);
943 
944       // Optimistically hope that any fixups will continue falling through.
945       for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
946            I < E; ++I) {
947         BranchFixup &Fixup = EHStack.getBranchFixup(I);
948         if (!Fixup.Destination) continue;
949         if (!Fixup.OptimisticBranchBlock) {
950           createStoreInstBefore(Builder.getInt32(Fixup.DestinationIndex),
951                                 getNormalCleanupDestSlot(),
952                                 Fixup.InitialBranch);
953           Fixup.InitialBranch->setSuccessor(0, NormalEntry);
954         }
955         Fixup.OptimisticBranchBlock = NormalExit;
956       }
957 
958       // V.  Set up the fallthrough edge out.
959 
960       // Case 1: a fallthrough source exists but doesn't branch to the
961       // cleanup because the cleanup is inactive.
962       if (!HasFallthrough && FallthroughSource) {
963         // Prebranched fallthrough was forwarded earlier.
964         // Non-prebranched fallthrough doesn't need to be forwarded.
965         // Either way, all we need to do is restore the IP we cleared before.
966         assert(!IsActive);
967         Builder.restoreIP(savedInactiveFallthroughIP);
968 
969       // Case 2: a fallthrough source exists and should branch to the
970       // cleanup, but we're not supposed to branch through to the next
971       // cleanup.
972       } else if (HasFallthrough && FallthroughDest) {
973         assert(!FallthroughIsBranchThrough);
974         EmitBlock(FallthroughDest);
975 
976       // Case 3: a fallthrough source exists and should branch to the
977       // cleanup and then through to the next.
978       } else if (HasFallthrough) {
979         // Everything is already set up for this.
980 
981       // Case 4: no fallthrough source exists.
982       } else {
983         Builder.ClearInsertionPoint();
984       }
985 
986       // VI.  Assorted cleaning.
987 
988       // Check whether we can merge NormalEntry into a single predecessor.
989       // This might invalidate (non-IR) pointers to NormalEntry.
990       llvm::BasicBlock *NewNormalEntry =
991         SimplifyCleanupEntry(*this, NormalEntry);
992 
993       // If it did invalidate those pointers, and NormalEntry was the same
994       // as NormalExit, go back and patch up the fixups.
995       if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit)
996         for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
997                I < E; ++I)
998           EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry;
999     }
1000   }
1001 
1002   assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0);
1003 
1004   // Emit the EH cleanup if required.
1005   if (RequiresEHCleanup) {
1006     CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1007 
1008     EmitBlock(EHEntry);
1009 
1010     llvm::BasicBlock *NextAction = getEHDispatchBlock(EHParent);
1011 
1012     // Push a terminate scope or cleanupendpad scope around the potentially
1013     // throwing cleanups. For funclet EH personalities, the cleanupendpad models
1014     // program termination when cleanups throw.
1015     bool PushedTerminate = false;
1016     SaveAndRestore<llvm::Instruction *> RestoreCurrentFuncletPad(
1017         CurrentFuncletPad);
1018     llvm::CleanupPadInst *CPI = nullptr;
1019 
1020     const EHPersonality &Personality = EHPersonality::get(*this);
1021     if (Personality.usesFuncletPads()) {
1022       llvm::Value *ParentPad = CurrentFuncletPad;
1023       if (!ParentPad)
1024         ParentPad = llvm::ConstantTokenNone::get(CGM.getLLVMContext());
1025       CurrentFuncletPad = CPI = Builder.CreateCleanupPad(ParentPad);
1026     }
1027 
1028     // Non-MSVC personalities need to terminate when an EH cleanup throws.
1029     if (!Personality.isMSVCPersonality()) {
1030       EHStack.pushTerminate();
1031       PushedTerminate = true;
1032     }
1033 
1034     // We only actually emit the cleanup code if the cleanup is either
1035     // active or was used before it was deactivated.
1036     if (EHActiveFlag.isValid() || IsActive) {
1037       cleanupFlags.setIsForEHCleanup();
1038       EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag);
1039     }
1040 
1041     if (CPI)
1042       Builder.CreateCleanupRet(CPI, NextAction);
1043     else
1044       Builder.CreateBr(NextAction);
1045 
1046     // Leave the terminate scope.
1047     if (PushedTerminate)
1048       EHStack.popTerminate();
1049 
1050     Builder.restoreIP(SavedIP);
1051 
1052     SimplifyCleanupEntry(*this, EHEntry);
1053   }
1054 }
1055 
1056 /// isObviouslyBranchWithoutCleanups - Return true if a branch to the
1057 /// specified destination obviously has no cleanups to run.  'false' is always
1058 /// a conservatively correct answer for this method.
1059 bool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const {
1060   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1061          && "stale jump destination");
1062 
1063   // Calculate the innermost active normal cleanup.
1064   EHScopeStack::stable_iterator TopCleanup =
1065     EHStack.getInnermostActiveNormalCleanup();
1066 
1067   // If we're not in an active normal cleanup scope, or if the
1068   // destination scope is within the innermost active normal cleanup
1069   // scope, we don't need to worry about fixups.
1070   if (TopCleanup == EHStack.stable_end() ||
1071       TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid
1072     return true;
1073 
1074   // Otherwise, we might need some cleanups.
1075   return false;
1076 }
1077 
1078 
1079 /// Terminate the current block by emitting a branch which might leave
1080 /// the current cleanup-protected scope.  The target scope may not yet
1081 /// be known, in which case this will require a fixup.
1082 ///
1083 /// As a side-effect, this method clears the insertion point.
1084 void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
1085   assert(Dest.getScopeDepth().encloses(EHStack.stable_begin())
1086          && "stale jump destination");
1087 
1088   if (!HaveInsertPoint())
1089     return;
1090 
1091   // Create the branch.
1092   llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock());
1093 
1094   // Calculate the innermost active normal cleanup.
1095   EHScopeStack::stable_iterator
1096     TopCleanup = EHStack.getInnermostActiveNormalCleanup();
1097 
1098   // If we're not in an active normal cleanup scope, or if the
1099   // destination scope is within the innermost active normal cleanup
1100   // scope, we don't need to worry about fixups.
1101   if (TopCleanup == EHStack.stable_end() ||
1102       TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid
1103     Builder.ClearInsertionPoint();
1104     return;
1105   }
1106 
1107   // If we can't resolve the destination cleanup scope, just add this
1108   // to the current cleanup scope as a branch fixup.
1109   if (!Dest.getScopeDepth().isValid()) {
1110     BranchFixup &Fixup = EHStack.addBranchFixup();
1111     Fixup.Destination = Dest.getBlock();
1112     Fixup.DestinationIndex = Dest.getDestIndex();
1113     Fixup.InitialBranch = BI;
1114     Fixup.OptimisticBranchBlock = nullptr;
1115 
1116     Builder.ClearInsertionPoint();
1117     return;
1118   }
1119 
1120   // Otherwise, thread through all the normal cleanups in scope.
1121 
1122   // Store the index at the start.
1123   llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex());
1124   createStoreInstBefore(Index, getNormalCleanupDestSlot(), BI);
1125 
1126   // Adjust BI to point to the first cleanup block.
1127   {
1128     EHCleanupScope &Scope =
1129       cast<EHCleanupScope>(*EHStack.find(TopCleanup));
1130     BI->setSuccessor(0, CreateNormalEntry(*this, Scope));
1131   }
1132 
1133   // Add this destination to all the scopes involved.
1134   EHScopeStack::stable_iterator I = TopCleanup;
1135   EHScopeStack::stable_iterator E = Dest.getScopeDepth();
1136   if (E.strictlyEncloses(I)) {
1137     while (true) {
1138       EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I));
1139       assert(Scope.isNormalCleanup());
1140       I = Scope.getEnclosingNormalCleanup();
1141 
1142       // If this is the last cleanup we're propagating through, tell it
1143       // that there's a resolved jump moving through it.
1144       if (!E.strictlyEncloses(I)) {
1145         Scope.addBranchAfter(Index, Dest.getBlock());
1146         break;
1147       }
1148 
1149       // Otherwise, tell the scope that there's a jump propagating
1150       // through it.  If this isn't new information, all the rest of
1151       // the work has been done before.
1152       if (!Scope.addBranchThrough(Dest.getBlock()))
1153         break;
1154     }
1155   }
1156 
1157   Builder.ClearInsertionPoint();
1158 }
1159 
1160 static bool IsUsedAsNormalCleanup(EHScopeStack &EHStack,
1161                                   EHScopeStack::stable_iterator C) {
1162   // If we needed a normal block for any reason, that counts.
1163   if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock())
1164     return true;
1165 
1166   // Check whether any enclosed cleanups were needed.
1167   for (EHScopeStack::stable_iterator
1168          I = EHStack.getInnermostNormalCleanup();
1169          I != C; ) {
1170     assert(C.strictlyEncloses(I));
1171     EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I));
1172     if (S.getNormalBlock()) return true;
1173     I = S.getEnclosingNormalCleanup();
1174   }
1175 
1176   return false;
1177 }
1178 
1179 static bool IsUsedAsEHCleanup(EHScopeStack &EHStack,
1180                               EHScopeStack::stable_iterator cleanup) {
1181   // If we needed an EH block for any reason, that counts.
1182   if (EHStack.find(cleanup)->hasEHBranches())
1183     return true;
1184 
1185   // Check whether any enclosed cleanups were needed.
1186   for (EHScopeStack::stable_iterator
1187          i = EHStack.getInnermostEHScope(); i != cleanup; ) {
1188     assert(cleanup.strictlyEncloses(i));
1189 
1190     EHScope &scope = *EHStack.find(i);
1191     if (scope.hasEHBranches())
1192       return true;
1193 
1194     i = scope.getEnclosingEHScope();
1195   }
1196 
1197   return false;
1198 }
1199 
1200 enum ForActivation_t {
1201   ForActivation,
1202   ForDeactivation
1203 };
1204 
1205 /// The given cleanup block is changing activation state.  Configure a
1206 /// cleanup variable if necessary.
1207 ///
1208 /// It would be good if we had some way of determining if there were
1209 /// extra uses *after* the change-over point.
1210 static void SetupCleanupBlockActivation(CodeGenFunction &CGF,
1211                                         EHScopeStack::stable_iterator C,
1212                                         ForActivation_t kind,
1213                                         llvm::Instruction *dominatingIP) {
1214   EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C));
1215 
1216   // We always need the flag if we're activating the cleanup in a
1217   // conditional context, because we have to assume that the current
1218   // location doesn't necessarily dominate the cleanup's code.
1219   bool isActivatedInConditional =
1220     (kind == ForActivation && CGF.isInConditionalBranch());
1221 
1222   bool needFlag = false;
1223 
1224   // Calculate whether the cleanup was used:
1225 
1226   //   - as a normal cleanup
1227   if (Scope.isNormalCleanup() &&
1228       (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) {
1229     Scope.setTestFlagInNormalCleanup();
1230     needFlag = true;
1231   }
1232 
1233   //  - as an EH cleanup
1234   if (Scope.isEHCleanup() &&
1235       (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) {
1236     Scope.setTestFlagInEHCleanup();
1237     needFlag = true;
1238   }
1239 
1240   // If it hasn't yet been used as either, we're done.
1241   if (!needFlag) return;
1242 
1243   Address var = Scope.getActiveFlag();
1244   if (!var.isValid()) {
1245     var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), CharUnits::One(),
1246                                "cleanup.isactive");
1247     Scope.setActiveFlag(var);
1248 
1249     assert(dominatingIP && "no existing variable and no dominating IP!");
1250 
1251     // Initialize to true or false depending on whether it was
1252     // active up to this point.
1253     llvm::Constant *value = CGF.Builder.getInt1(kind == ForDeactivation);
1254 
1255     // If we're in a conditional block, ignore the dominating IP and
1256     // use the outermost conditional branch.
1257     if (CGF.isInConditionalBranch()) {
1258       CGF.setBeforeOutermostConditional(value, var);
1259     } else {
1260       createStoreInstBefore(value, var, dominatingIP);
1261     }
1262   }
1263 
1264   CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var);
1265 }
1266 
1267 /// Activate a cleanup that was created in an inactivated state.
1268 void CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C,
1269                                            llvm::Instruction *dominatingIP) {
1270   assert(C != EHStack.stable_end() && "activating bottom of stack?");
1271   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1272   assert(!Scope.isActive() && "double activation");
1273 
1274   SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP);
1275 
1276   Scope.setActive(true);
1277 }
1278 
1279 /// Deactive a cleanup that was created in an active state.
1280 void CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C,
1281                                              llvm::Instruction *dominatingIP) {
1282   assert(C != EHStack.stable_end() && "deactivating bottom of stack?");
1283   EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C));
1284   assert(Scope.isActive() && "double deactivation");
1285 
1286   // If it's the top of the stack, just pop it, but do so only if it belongs
1287   // to the current RunCleanupsScope.
1288   if (C == EHStack.stable_begin() &&
1289       CurrentCleanupScopeDepth.strictlyEncloses(C)) {
1290     // Per comment below, checking EHAsynch is not really necessary
1291     // it's there to assure zero-impact w/o EHAsynch option
1292     if (!Scope.isNormalCleanup() && getLangOpts().EHAsynch) {
1293       PopCleanupBlock();
1294     } else {
1295       // If it's a normal cleanup, we need to pretend that the
1296       // fallthrough is unreachable.
1297       CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
1298       PopCleanupBlock();
1299       Builder.restoreIP(SavedIP);
1300     }
1301     return;
1302   }
1303 
1304   // Otherwise, follow the general case.
1305   SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP);
1306 
1307   Scope.setActive(false);
1308 }
1309 
1310 Address CodeGenFunction::getNormalCleanupDestSlot() {
1311   if (!NormalCleanupDest.isValid())
1312     NormalCleanupDest =
1313       CreateDefaultAlignTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot");
1314   return NormalCleanupDest;
1315 }
1316 
1317 /// Emits all the code to cause the given temporary to be cleaned up.
1318 void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
1319                                        QualType TempType,
1320                                        Address Ptr) {
1321   pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject,
1322               /*useEHCleanup*/ true);
1323 }
1324 
1325 // Need to set "funclet" in OperandBundle properly for noThrow
1326 //       intrinsic (see CGCall.cpp)
1327 static void EmitSehScope(CodeGenFunction &CGF,
1328                          llvm::FunctionCallee &SehCppScope) {
1329   llvm::BasicBlock *InvokeDest = CGF.getInvokeDest();
1330   assert(CGF.Builder.GetInsertBlock() && InvokeDest);
1331   llvm::BasicBlock *Cont = CGF.createBasicBlock("invoke.cont");
1332   SmallVector<llvm::OperandBundleDef, 1> BundleList =
1333       CGF.getBundlesForFunclet(SehCppScope.getCallee());
1334   if (CGF.CurrentFuncletPad)
1335     BundleList.emplace_back("funclet", CGF.CurrentFuncletPad);
1336   CGF.Builder.CreateInvoke(SehCppScope, Cont, InvokeDest, None, BundleList);
1337   CGF.EmitBlock(Cont);
1338 }
1339 
1340 // Invoke a llvm.seh.scope.begin at the beginning of a CPP scope for -EHa
1341 void CodeGenFunction::EmitSehCppScopeBegin() {
1342   assert(getLangOpts().EHAsynch);
1343   llvm::FunctionType *FTy =
1344       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1345   llvm::FunctionCallee SehCppScope =
1346       CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.begin");
1347   EmitSehScope(*this, SehCppScope);
1348 }
1349 
1350 // Invoke a llvm.seh.scope.end at the end of a CPP scope for -EHa
1351 //   llvm.seh.scope.end is emitted before popCleanup, so it's "invoked"
1352 void CodeGenFunction::EmitSehCppScopeEnd() {
1353   assert(getLangOpts().EHAsynch);
1354   llvm::FunctionType *FTy =
1355       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1356   llvm::FunctionCallee SehCppScope =
1357       CGM.CreateRuntimeFunction(FTy, "llvm.seh.scope.end");
1358   EmitSehScope(*this, SehCppScope);
1359 }
1360 
1361 // Invoke a llvm.seh.try.begin at the beginning of a SEH scope for -EHa
1362 void CodeGenFunction::EmitSehTryScopeBegin() {
1363   assert(getLangOpts().EHAsynch);
1364   llvm::FunctionType *FTy =
1365       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1366   llvm::FunctionCallee SehCppScope =
1367       CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.begin");
1368   EmitSehScope(*this, SehCppScope);
1369 }
1370 
1371 // Invoke a llvm.seh.try.end at the end of a SEH scope for -EHa
1372 void CodeGenFunction::EmitSehTryScopeEnd() {
1373   assert(getLangOpts().EHAsynch);
1374   llvm::FunctionType *FTy =
1375       llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
1376   llvm::FunctionCallee SehCppScope =
1377       CGM.CreateRuntimeFunction(FTy, "llvm.seh.try.end");
1378   EmitSehScope(*this, SehCppScope);
1379 }
1380