150f02cb2SNick Lewycky //===-- Analysis.cpp - CodeGen LLVM IR Analysis Utilities -----------------===//
2450aa64fSDan Gohman //
3450aa64fSDan Gohman //                     The LLVM Compiler Infrastructure
4450aa64fSDan Gohman //
5450aa64fSDan Gohman // This file is distributed under the University of Illinois Open Source
6450aa64fSDan Gohman // License. See LICENSE.TXT for details.
7450aa64fSDan Gohman //
8450aa64fSDan Gohman //===----------------------------------------------------------------------===//
9450aa64fSDan Gohman //
10db5028bdSEric Christopher // This file defines several CodeGen-specific LLVM IR analysis utilities.
11450aa64fSDan Gohman //
12450aa64fSDan Gohman //===----------------------------------------------------------------------===//
13450aa64fSDan Gohman 
1409fc276dSEric Christopher #include "llvm/CodeGen/Analysis.h"
15dda00098SEric Christopher #include "llvm/Analysis/ValueTracking.h"
16ed0881b2SChandler Carruth #include "llvm/CodeGen/MachineFunction.h"
1716193552SDavid Majnemer #include "llvm/CodeGen/MachineModuleInfo.h"
189fb823bbSChandler Carruth #include "llvm/IR/DataLayout.h"
199fb823bbSChandler Carruth #include "llvm/IR/DerivedTypes.h"
209fb823bbSChandler Carruth #include "llvm/IR/Function.h"
219fb823bbSChandler Carruth #include "llvm/IR/Instructions.h"
229fb823bbSChandler Carruth #include "llvm/IR/IntrinsicInst.h"
239fb823bbSChandler Carruth #include "llvm/IR/LLVMContext.h"
249fb823bbSChandler Carruth #include "llvm/IR/Module.h"
25450aa64fSDan Gohman #include "llvm/Support/ErrorHandling.h"
26450aa64fSDan Gohman #include "llvm/Support/MathExtras.h"
27ed0881b2SChandler Carruth #include "llvm/Target/TargetLowering.h"
2816193552SDavid Majnemer #include "llvm/Target/TargetInstrInfo.h"
29d913448bSEric Christopher #include "llvm/Target/TargetSubtargetInfo.h"
30f21434ccSRafael Espindola #include "llvm/Transforms/Utils/GlobalStatus.h"
31d913448bSEric Christopher 
32450aa64fSDan Gohman using namespace llvm;
33450aa64fSDan Gohman 
348923cc54SMehdi Amini /// Compute the linearized index of a member in a nested aggregate/struct/array
358923cc54SMehdi Amini /// by recursing and accumulating CurIndex as long as there are indices in the
368923cc54SMehdi Amini /// index list.
37229907cdSChris Lattner unsigned llvm::ComputeLinearIndex(Type *Ty,
38450aa64fSDan Gohman                                   const unsigned *Indices,
39450aa64fSDan Gohman                                   const unsigned *IndicesEnd,
40450aa64fSDan Gohman                                   unsigned CurIndex) {
41450aa64fSDan Gohman   // Base case: We're done.
42450aa64fSDan Gohman   if (Indices && Indices == IndicesEnd)
43450aa64fSDan Gohman     return CurIndex;
44450aa64fSDan Gohman 
45450aa64fSDan Gohman   // Given a struct type, recursively traverse the elements.
46229907cdSChris Lattner   if (StructType *STy = dyn_cast<StructType>(Ty)) {
47450aa64fSDan Gohman     for (StructType::element_iterator EB = STy->element_begin(),
48450aa64fSDan Gohman                                       EI = EB,
49450aa64fSDan Gohman                                       EE = STy->element_end();
50450aa64fSDan Gohman         EI != EE; ++EI) {
51450aa64fSDan Gohman       if (Indices && *Indices == unsigned(EI - EB))
52aadc5596SDan Gohman         return ComputeLinearIndex(*EI, Indices+1, IndicesEnd, CurIndex);
53c0196b1bSCraig Topper       CurIndex = ComputeLinearIndex(*EI, nullptr, nullptr, CurIndex);
54450aa64fSDan Gohman     }
557b068f6bSMehdi Amini     assert(!Indices && "Unexpected out of bound");
56450aa64fSDan Gohman     return CurIndex;
57450aa64fSDan Gohman   }
58450aa64fSDan Gohman   // Given an array type, recursively traverse the elements.
59229907cdSChris Lattner   else if (ArrayType *ATy = dyn_cast<ArrayType>(Ty)) {
60229907cdSChris Lattner     Type *EltTy = ATy->getElementType();
618923cc54SMehdi Amini     unsigned NumElts = ATy->getNumElements();
628923cc54SMehdi Amini     // Compute the Linear offset when jumping one element of the array
638923cc54SMehdi Amini     unsigned EltLinearOffset = ComputeLinearIndex(EltTy, nullptr, nullptr, 0);
647b068f6bSMehdi Amini     if (Indices) {
657b068f6bSMehdi Amini       assert(*Indices < NumElts && "Unexpected out of bound");
668923cc54SMehdi Amini       // If the indice is inside the array, compute the index to the requested
678923cc54SMehdi Amini       // elt and recurse inside the element with the end of the indices list
688923cc54SMehdi Amini       CurIndex += EltLinearOffset* *Indices;
69aadc5596SDan Gohman       return ComputeLinearIndex(EltTy, Indices+1, IndicesEnd, CurIndex);
70450aa64fSDan Gohman     }
718923cc54SMehdi Amini     CurIndex += EltLinearOffset*NumElts;
72450aa64fSDan Gohman     return CurIndex;
73450aa64fSDan Gohman   }
74450aa64fSDan Gohman   // We haven't found the type we're looking for, so keep searching.
75450aa64fSDan Gohman   return CurIndex + 1;
76450aa64fSDan Gohman }
77450aa64fSDan Gohman 
78450aa64fSDan Gohman /// ComputeValueVTs - Given an LLVM IR type, compute a sequence of
79450aa64fSDan Gohman /// EVTs that represent all the individual underlying
80450aa64fSDan Gohman /// non-aggregate types that comprise it.
81450aa64fSDan Gohman ///
82450aa64fSDan Gohman /// If Offsets is non-null, it points to a vector to be filled in
83450aa64fSDan Gohman /// with the in-memory offsets of each of the individual values.
84450aa64fSDan Gohman ///
8556228dabSMehdi Amini void llvm::ComputeValueVTs(const TargetLowering &TLI, const DataLayout &DL,
8656228dabSMehdi Amini                            Type *Ty, SmallVectorImpl<EVT> &ValueVTs,
87450aa64fSDan Gohman                            SmallVectorImpl<uint64_t> *Offsets,
88450aa64fSDan Gohman                            uint64_t StartingOffset) {
89450aa64fSDan Gohman   // Given a struct type, recursively traverse the elements.
90229907cdSChris Lattner   if (StructType *STy = dyn_cast<StructType>(Ty)) {
9156228dabSMehdi Amini     const StructLayout *SL = DL.getStructLayout(STy);
92450aa64fSDan Gohman     for (StructType::element_iterator EB = STy->element_begin(),
93450aa64fSDan Gohman                                       EI = EB,
94450aa64fSDan Gohman                                       EE = STy->element_end();
95450aa64fSDan Gohman          EI != EE; ++EI)
9656228dabSMehdi Amini       ComputeValueVTs(TLI, DL, *EI, ValueVTs, Offsets,
97450aa64fSDan Gohman                       StartingOffset + SL->getElementOffset(EI - EB));
98450aa64fSDan Gohman     return;
99450aa64fSDan Gohman   }
100450aa64fSDan Gohman   // Given an array type, recursively traverse the elements.
101229907cdSChris Lattner   if (ArrayType *ATy = dyn_cast<ArrayType>(Ty)) {
102229907cdSChris Lattner     Type *EltTy = ATy->getElementType();
10356228dabSMehdi Amini     uint64_t EltSize = DL.getTypeAllocSize(EltTy);
104450aa64fSDan Gohman     for (unsigned i = 0, e = ATy->getNumElements(); i != e; ++i)
10556228dabSMehdi Amini       ComputeValueVTs(TLI, DL, EltTy, ValueVTs, Offsets,
106450aa64fSDan Gohman                       StartingOffset + i * EltSize);
107450aa64fSDan Gohman     return;
108450aa64fSDan Gohman   }
109450aa64fSDan Gohman   // Interpret void as zero return values.
110450aa64fSDan Gohman   if (Ty->isVoidTy())
111450aa64fSDan Gohman     return;
112450aa64fSDan Gohman   // Base case: we can get an EVT for this LLVM IR type.
11344ede33aSMehdi Amini   ValueVTs.push_back(TLI.getValueType(DL, Ty));
114450aa64fSDan Gohman   if (Offsets)
115450aa64fSDan Gohman     Offsets->push_back(StartingOffset);
116450aa64fSDan Gohman }
117450aa64fSDan Gohman 
118450aa64fSDan Gohman /// ExtractTypeInfo - Returns the type info, possibly bitcast, encoded in V.
119283bc2edSReid Kleckner GlobalValue *llvm::ExtractTypeInfo(Value *V) {
120450aa64fSDan Gohman   V = V->stripPointerCasts();
121283bc2edSReid Kleckner   GlobalValue *GV = dyn_cast<GlobalValue>(V);
122283bc2edSReid Kleckner   GlobalVariable *Var = dyn_cast<GlobalVariable>(V);
123450aa64fSDan Gohman 
124283bc2edSReid Kleckner   if (Var && Var->getName() == "llvm.eh.catch.all.value") {
125283bc2edSReid Kleckner     assert(Var->hasInitializer() &&
126450aa64fSDan Gohman            "The EH catch-all value must have an initializer");
127283bc2edSReid Kleckner     Value *Init = Var->getInitializer();
128283bc2edSReid Kleckner     GV = dyn_cast<GlobalValue>(Init);
129450aa64fSDan Gohman     if (!GV) V = cast<ConstantPointerNull>(Init);
130450aa64fSDan Gohman   }
131450aa64fSDan Gohman 
132450aa64fSDan Gohman   assert((GV || isa<ConstantPointerNull>(V)) &&
133450aa64fSDan Gohman          "TypeInfo must be a global variable or NULL");
134450aa64fSDan Gohman   return GV;
135450aa64fSDan Gohman }
136450aa64fSDan Gohman 
137450aa64fSDan Gohman /// hasInlineAsmMemConstraint - Return true if the inline asm instruction being
138450aa64fSDan Gohman /// processed uses a memory 'm' constraint.
139450aa64fSDan Gohman bool
140e8360b71SJohn Thompson llvm::hasInlineAsmMemConstraint(InlineAsm::ConstraintInfoVector &CInfos,
141450aa64fSDan Gohman                                 const TargetLowering &TLI) {
142450aa64fSDan Gohman   for (unsigned i = 0, e = CInfos.size(); i != e; ++i) {
143450aa64fSDan Gohman     InlineAsm::ConstraintInfo &CI = CInfos[i];
144450aa64fSDan Gohman     for (unsigned j = 0, ee = CI.Codes.size(); j != ee; ++j) {
145450aa64fSDan Gohman       TargetLowering::ConstraintType CType = TLI.getConstraintType(CI.Codes[j]);
146450aa64fSDan Gohman       if (CType == TargetLowering::C_Memory)
147450aa64fSDan Gohman         return true;
148450aa64fSDan Gohman     }
149450aa64fSDan Gohman 
150450aa64fSDan Gohman     // Indirect operand accesses access memory.
151450aa64fSDan Gohman     if (CI.isIndirect)
152450aa64fSDan Gohman       return true;
153450aa64fSDan Gohman   }
154450aa64fSDan Gohman 
155450aa64fSDan Gohman   return false;
156450aa64fSDan Gohman }
157450aa64fSDan Gohman 
158450aa64fSDan Gohman /// getFCmpCondCode - Return the ISD condition code corresponding to
159450aa64fSDan Gohman /// the given LLVM IR floating-point condition code.  This includes
160450aa64fSDan Gohman /// consideration of global floating-point math flags.
161450aa64fSDan Gohman ///
162450aa64fSDan Gohman ISD::CondCode llvm::getFCmpCondCode(FCmpInst::Predicate Pred) {
163450aa64fSDan Gohman   switch (Pred) {
16450f02cb2SNick Lewycky   case FCmpInst::FCMP_FALSE: return ISD::SETFALSE;
16550f02cb2SNick Lewycky   case FCmpInst::FCMP_OEQ:   return ISD::SETOEQ;
16650f02cb2SNick Lewycky   case FCmpInst::FCMP_OGT:   return ISD::SETOGT;
16750f02cb2SNick Lewycky   case FCmpInst::FCMP_OGE:   return ISD::SETOGE;
16850f02cb2SNick Lewycky   case FCmpInst::FCMP_OLT:   return ISD::SETOLT;
16950f02cb2SNick Lewycky   case FCmpInst::FCMP_OLE:   return ISD::SETOLE;
17050f02cb2SNick Lewycky   case FCmpInst::FCMP_ONE:   return ISD::SETONE;
17150f02cb2SNick Lewycky   case FCmpInst::FCMP_ORD:   return ISD::SETO;
17250f02cb2SNick Lewycky   case FCmpInst::FCMP_UNO:   return ISD::SETUO;
17350f02cb2SNick Lewycky   case FCmpInst::FCMP_UEQ:   return ISD::SETUEQ;
17450f02cb2SNick Lewycky   case FCmpInst::FCMP_UGT:   return ISD::SETUGT;
17550f02cb2SNick Lewycky   case FCmpInst::FCMP_UGE:   return ISD::SETUGE;
17650f02cb2SNick Lewycky   case FCmpInst::FCMP_ULT:   return ISD::SETULT;
17750f02cb2SNick Lewycky   case FCmpInst::FCMP_ULE:   return ISD::SETULE;
17850f02cb2SNick Lewycky   case FCmpInst::FCMP_UNE:   return ISD::SETUNE;
17950f02cb2SNick Lewycky   case FCmpInst::FCMP_TRUE:  return ISD::SETTRUE;
18046a9f016SDavid Blaikie   default: llvm_unreachable("Invalid FCmp predicate opcode!");
181450aa64fSDan Gohman   }
18250f02cb2SNick Lewycky }
18350f02cb2SNick Lewycky 
18450f02cb2SNick Lewycky ISD::CondCode llvm::getFCmpCodeWithoutNaN(ISD::CondCode CC) {
18550f02cb2SNick Lewycky   switch (CC) {
18650f02cb2SNick Lewycky     case ISD::SETOEQ: case ISD::SETUEQ: return ISD::SETEQ;
18750f02cb2SNick Lewycky     case ISD::SETONE: case ISD::SETUNE: return ISD::SETNE;
18850f02cb2SNick Lewycky     case ISD::SETOLT: case ISD::SETULT: return ISD::SETLT;
18950f02cb2SNick Lewycky     case ISD::SETOLE: case ISD::SETULE: return ISD::SETLE;
19050f02cb2SNick Lewycky     case ISD::SETOGT: case ISD::SETUGT: return ISD::SETGT;
19150f02cb2SNick Lewycky     case ISD::SETOGE: case ISD::SETUGE: return ISD::SETGE;
19246a9f016SDavid Blaikie     default: return CC;
19350f02cb2SNick Lewycky   }
194450aa64fSDan Gohman }
195450aa64fSDan Gohman 
196450aa64fSDan Gohman /// getICmpCondCode - Return the ISD condition code corresponding to
197450aa64fSDan Gohman /// the given LLVM IR integer condition code.
198450aa64fSDan Gohman ///
199450aa64fSDan Gohman ISD::CondCode llvm::getICmpCondCode(ICmpInst::Predicate Pred) {
200450aa64fSDan Gohman   switch (Pred) {
201450aa64fSDan Gohman   case ICmpInst::ICMP_EQ:  return ISD::SETEQ;
202450aa64fSDan Gohman   case ICmpInst::ICMP_NE:  return ISD::SETNE;
203450aa64fSDan Gohman   case ICmpInst::ICMP_SLE: return ISD::SETLE;
204450aa64fSDan Gohman   case ICmpInst::ICMP_ULE: return ISD::SETULE;
205450aa64fSDan Gohman   case ICmpInst::ICMP_SGE: return ISD::SETGE;
206450aa64fSDan Gohman   case ICmpInst::ICMP_UGE: return ISD::SETUGE;
207450aa64fSDan Gohman   case ICmpInst::ICMP_SLT: return ISD::SETLT;
208450aa64fSDan Gohman   case ICmpInst::ICMP_ULT: return ISD::SETULT;
209450aa64fSDan Gohman   case ICmpInst::ICMP_SGT: return ISD::SETGT;
210450aa64fSDan Gohman   case ICmpInst::ICMP_UGT: return ISD::SETUGT;
211450aa64fSDan Gohman   default:
212450aa64fSDan Gohman     llvm_unreachable("Invalid ICmp predicate opcode!");
213450aa64fSDan Gohman   }
214450aa64fSDan Gohman }
215450aa64fSDan Gohman 
216ffc44549SStephen Lin static bool isNoopBitcast(Type *T1, Type *T2,
217c0659fadSMichael Gottesman                           const TargetLoweringBase& TLI) {
218ffc44549SStephen Lin   return T1 == T2 || (T1->isPointerTy() && T2->isPointerTy()) ||
219ffc44549SStephen Lin          (isa<VectorType>(T1) && isa<VectorType>(T2) &&
220ffc44549SStephen Lin           TLI.isTypeLegal(EVT::getEVT(T1)) && TLI.isTypeLegal(EVT::getEVT(T2)));
221ffc44549SStephen Lin }
2224f3615deSChris Lattner 
223a4415854STim Northover /// Look through operations that will be free to find the earliest source of
224a4415854STim Northover /// this value.
225a4415854STim Northover ///
226a4415854STim Northover /// @param ValLoc If V has aggegate type, we will be interested in a particular
227a4415854STim Northover /// scalar component. This records its address; the reverse of this list gives a
228a4415854STim Northover /// sequence of indices appropriate for an extractvalue to locate the important
229a4415854STim Northover /// value. This value is updated during the function and on exit will indicate
230a4415854STim Northover /// similar information for the Value returned.
231a4415854STim Northover ///
232a4415854STim Northover /// @param DataBits If this function looks through truncate instructions, this
233a4415854STim Northover /// will record the smallest size attained.
234a4415854STim Northover static const Value *getNoopInput(const Value *V,
235a4415854STim Northover                                  SmallVectorImpl<unsigned> &ValLoc,
236a4415854STim Northover                                  unsigned &DataBits,
23744ede33aSMehdi Amini                                  const TargetLoweringBase &TLI,
23844ede33aSMehdi Amini                                  const DataLayout &DL) {
239ffc44549SStephen Lin   while (true) {
240ffc44549SStephen Lin     // Try to look through V1; if V1 is not an instruction, it can't be looked
241ffc44549SStephen Lin     // through.
242a4415854STim Northover     const Instruction *I = dyn_cast<Instruction>(V);
243a4415854STim Northover     if (!I || I->getNumOperands() == 0) return V;
244c0196b1bSCraig Topper     const Value *NoopInput = nullptr;
245a4415854STim Northover 
246182fe3eeSChris Lattner     Value *Op = I->getOperand(0);
247a4415854STim Northover     if (isa<BitCastInst>(I)) {
2484f3615deSChris Lattner       // Look through truly no-op bitcasts.
249ffc44549SStephen Lin       if (isNoopBitcast(Op->getType(), I->getType(), TLI))
250ffc44549SStephen Lin         NoopInput = Op;
251ffc44549SStephen Lin     } else if (isa<GetElementPtrInst>(I)) {
252ffc44549SStephen Lin       // Look through getelementptr
253ffc44549SStephen Lin       if (cast<GetElementPtrInst>(I)->hasAllZeroIndices())
254ffc44549SStephen Lin         NoopInput = Op;
255ffc44549SStephen Lin     } else if (isa<IntToPtrInst>(I)) {
256182fe3eeSChris Lattner       // Look through inttoptr.
257ffc44549SStephen Lin       // Make sure this isn't a truncating or extending cast.  We could
258ffc44549SStephen Lin       // support this eventually, but don't bother for now.
259ffc44549SStephen Lin       if (!isa<VectorType>(I->getType()) &&
26044ede33aSMehdi Amini           DL.getPointerSizeInBits() ==
261182fe3eeSChris Lattner               cast<IntegerType>(Op->getType())->getBitWidth())
262ffc44549SStephen Lin         NoopInput = Op;
263ffc44549SStephen Lin     } else if (isa<PtrToIntInst>(I)) {
264182fe3eeSChris Lattner       // Look through ptrtoint.
265ffc44549SStephen Lin       // Make sure this isn't a truncating or extending cast.  We could
266ffc44549SStephen Lin       // support this eventually, but don't bother for now.
267ffc44549SStephen Lin       if (!isa<VectorType>(I->getType()) &&
26844ede33aSMehdi Amini           DL.getPointerSizeInBits() ==
269182fe3eeSChris Lattner               cast<IntegerType>(I->getType())->getBitWidth())
270ffc44549SStephen Lin         NoopInput = Op;
271a4415854STim Northover     } else if (isa<TruncInst>(I) &&
272a4415854STim Northover                TLI.allowTruncateForTailCall(Op->getType(), I->getType())) {
273a4415854STim Northover       DataBits = std::min(DataBits, I->getType()->getPrimitiveSizeInBits());
274a4415854STim Northover       NoopInput = Op;
275*8a41319dSAhmed Bougacha     } else if (auto CS = ImmutableCallSite(I)) {
276*8a41319dSAhmed Bougacha       const Value *ReturnedOp = CS.getReturnedArgOperand();
2776aff744eSAhmed Bougacha       if (ReturnedOp && isNoopBitcast(ReturnedOp->getType(), I->getType(), TLI))
2786aff744eSAhmed Bougacha         NoopInput = ReturnedOp;
279a4415854STim Northover     } else if (const InsertValueInst *IVI = dyn_cast<InsertValueInst>(V)) {
280a4415854STim Northover       // Value may come from either the aggregate or the scalar
281a4415854STim Northover       ArrayRef<unsigned> InsertLoc = IVI->getIndices();
282e4310fe9STim Northover       if (ValLoc.size() >= InsertLoc.size() &&
283e4310fe9STim Northover           std::equal(InsertLoc.begin(), InsertLoc.end(), ValLoc.rbegin())) {
284a4415854STim Northover         // The type being inserted is a nested sub-type of the aggregate; we
285a4415854STim Northover         // have to remove those initial indices to get the location we're
286a4415854STim Northover         // interested in for the operand.
287a4415854STim Northover         ValLoc.resize(ValLoc.size() - InsertLoc.size());
288a4415854STim Northover         NoopInput = IVI->getInsertedValueOperand();
289a4415854STim Northover       } else {
290a4415854STim Northover         // The struct we're inserting into has the value we're interested in, no
291a4415854STim Northover         // change of address.
292a4415854STim Northover         NoopInput = Op;
293a4415854STim Northover       }
294a4415854STim Northover     } else if (const ExtractValueInst *EVI = dyn_cast<ExtractValueInst>(V)) {
295a4415854STim Northover       // The part we're interested in will inevitably be some sub-section of the
296a4415854STim Northover       // previous aggregate. Combine the two paths to obtain the true address of
297a4415854STim Northover       // our element.
298a4415854STim Northover       ArrayRef<unsigned> ExtractLoc = EVI->getIndices();
2994f6ac162SBenjamin Kramer       ValLoc.append(ExtractLoc.rbegin(), ExtractLoc.rend());
300a4415854STim Northover       NoopInput = Op;
301a4415854STim Northover     }
302a4415854STim Northover     // Terminate if we couldn't find anything to look through.
303a4415854STim Northover     if (!NoopInput)
304a4415854STim Northover       return V;
305a4415854STim Northover 
306a4415854STim Northover     V = NoopInput;
307ffc44549SStephen Lin   }
308182fe3eeSChris Lattner }
309182fe3eeSChris Lattner 
310a4415854STim Northover /// Return true if this scalar return value only has bits discarded on its path
311a4415854STim Northover /// from the "tail call" to the "ret". This includes the obvious noop
312a4415854STim Northover /// instructions handled by getNoopInput above as well as free truncations (or
313a4415854STim Northover /// extensions prior to the call).
314a4415854STim Northover static bool slotOnlyDiscardsData(const Value *RetVal, const Value *CallVal,
315a4415854STim Northover                                  SmallVectorImpl<unsigned> &RetIndices,
316a4415854STim Northover                                  SmallVectorImpl<unsigned> &CallIndices,
317707d68f0STim Northover                                  bool AllowDifferingSizes,
31844ede33aSMehdi Amini                                  const TargetLoweringBase &TLI,
31944ede33aSMehdi Amini                                  const DataLayout &DL) {
3204f3615deSChris Lattner 
321a4415854STim Northover   // Trace the sub-value needed by the return value as far back up the graph as
322a4415854STim Northover   // possible, in the hope that it will intersect with the value produced by the
323a4415854STim Northover   // call. In the simple case with no "returned" attribute, the hope is actually
324a4415854STim Northover   // that we end up back at the tail call instruction itself.
325a4415854STim Northover   unsigned BitsRequired = UINT_MAX;
32644ede33aSMehdi Amini   RetVal = getNoopInput(RetVal, RetIndices, BitsRequired, TLI, DL);
327ffc44549SStephen Lin 
328a4415854STim Northover   // If this slot in the value returned is undef, it doesn't matter what the
329a4415854STim Northover   // call puts there, it'll be fine.
330a4415854STim Northover   if (isa<UndefValue>(RetVal))
331a4415854STim Northover     return true;
332ffc44549SStephen Lin 
333a4415854STim Northover   // Now do a similar search up through the graph to find where the value
334a4415854STim Northover   // actually returned by the "tail call" comes from. In the simple case without
335a4415854STim Northover   // a "returned" attribute, the search will be blocked immediately and the loop
336a4415854STim Northover   // a Noop.
337a4415854STim Northover   unsigned BitsProvided = UINT_MAX;
33844ede33aSMehdi Amini   CallVal = getNoopInput(CallVal, CallIndices, BitsProvided, TLI, DL);
339a4415854STim Northover 
340a4415854STim Northover   // There's no hope if we can't actually trace them to (the same part of!) the
341a4415854STim Northover   // same value.
342a4415854STim Northover   if (CallVal != RetVal || CallIndices != RetIndices)
343a4415854STim Northover     return false;
344a4415854STim Northover 
345a4415854STim Northover   // However, intervening truncates may have made the call non-tail. Make sure
346a4415854STim Northover   // all the bits that are needed by the "ret" have been provided by the "tail
347a4415854STim Northover   // call". FIXME: with sufficiently cunning bit-tracking, we could look through
348a4415854STim Northover   // extensions too.
349707d68f0STim Northover   if (BitsProvided < BitsRequired ||
350707d68f0STim Northover       (!AllowDifferingSizes && BitsProvided != BitsRequired))
351a4415854STim Northover     return false;
352a4415854STim Northover 
353ffc44549SStephen Lin   return true;
354ffc44549SStephen Lin }
355a4415854STim Northover 
356a4415854STim Northover /// For an aggregate type, determine whether a given index is within bounds or
357a4415854STim Northover /// not.
358a4415854STim Northover static bool indexReallyValid(CompositeType *T, unsigned Idx) {
359a4415854STim Northover   if (ArrayType *AT = dyn_cast<ArrayType>(T))
360a4415854STim Northover     return Idx < AT->getNumElements();
361a4415854STim Northover 
362a4415854STim Northover   return Idx < cast<StructType>(T)->getNumElements();
363ffc44549SStephen Lin }
364a4415854STim Northover 
365a4415854STim Northover /// Move the given iterators to the next leaf type in depth first traversal.
366a4415854STim Northover ///
367a4415854STim Northover /// Performs a depth-first traversal of the type as specified by its arguments,
368a4415854STim Northover /// stopping at the next leaf node (which may be a legitimate scalar type or an
369a4415854STim Northover /// empty struct or array).
370a4415854STim Northover ///
371a4415854STim Northover /// @param SubTypes List of the partial components making up the type from
372a4415854STim Northover /// outermost to innermost non-empty aggregate. The element currently
373a4415854STim Northover /// represented is SubTypes.back()->getTypeAtIndex(Path.back() - 1).
374a4415854STim Northover ///
375a4415854STim Northover /// @param Path Set of extractvalue indices leading from the outermost type
376a4415854STim Northover /// (SubTypes[0]) to the leaf node currently represented.
377a4415854STim Northover ///
378a4415854STim Northover /// @returns true if a new type was found, false otherwise. Calling this
379a4415854STim Northover /// function again on a finished iterator will repeatedly return
380a4415854STim Northover /// false. SubTypes.back()->getTypeAtIndex(Path.back()) is either an empty
381a4415854STim Northover /// aggregate or a non-aggregate
382df03449aSBenjamin Kramer static bool advanceToNextLeafType(SmallVectorImpl<CompositeType *> &SubTypes,
383a4415854STim Northover                                   SmallVectorImpl<unsigned> &Path) {
384a4415854STim Northover   // First march back up the tree until we can successfully increment one of the
385a4415854STim Northover   // coordinates in Path.
386a4415854STim Northover   while (!Path.empty() && !indexReallyValid(SubTypes.back(), Path.back() + 1)) {
387a4415854STim Northover     Path.pop_back();
388a4415854STim Northover     SubTypes.pop_back();
389a4415854STim Northover   }
390a4415854STim Northover 
391a4415854STim Northover   // If we reached the top, then the iterator is done.
392a4415854STim Northover   if (Path.empty())
393a4415854STim Northover     return false;
394a4415854STim Northover 
395a4415854STim Northover   // We know there's *some* valid leaf now, so march back down the tree picking
396a4415854STim Northover   // out the left-most element at each node.
397a4415854STim Northover   ++Path.back();
398a4415854STim Northover   Type *DeeperType = SubTypes.back()->getTypeAtIndex(Path.back());
399a4415854STim Northover   while (DeeperType->isAggregateType()) {
400a4415854STim Northover     CompositeType *CT = cast<CompositeType>(DeeperType);
401a4415854STim Northover     if (!indexReallyValid(CT, 0))
402a4415854STim Northover       return true;
403a4415854STim Northover 
404a4415854STim Northover     SubTypes.push_back(CT);
405a4415854STim Northover     Path.push_back(0);
406a4415854STim Northover 
407a4415854STim Northover     DeeperType = CT->getTypeAtIndex(0U);
408a4415854STim Northover   }
409a4415854STim Northover 
410ffc44549SStephen Lin   return true;
411ffc44549SStephen Lin }
412a4415854STim Northover 
413a4415854STim Northover /// Find the first non-empty, scalar-like type in Next and setup the iterator
414a4415854STim Northover /// components.
415a4415854STim Northover ///
416a4415854STim Northover /// Assuming Next is an aggregate of some kind, this function will traverse the
417a4415854STim Northover /// tree from left to right (i.e. depth-first) looking for the first
418a4415854STim Northover /// non-aggregate type which will play a role in function return.
419a4415854STim Northover ///
420a4415854STim Northover /// For example, if Next was {[0 x i64], {{}, i32, {}}, i32} then we would setup
421a4415854STim Northover /// Path as [1, 1] and SubTypes as [Next, {{}, i32, {}}] to represent the first
422a4415854STim Northover /// i32 in that type.
423a4415854STim Northover static bool firstRealType(Type *Next,
424a4415854STim Northover                           SmallVectorImpl<CompositeType *> &SubTypes,
425a4415854STim Northover                           SmallVectorImpl<unsigned> &Path) {
426a4415854STim Northover   // First initialise the iterator components to the first "leaf" node
427a4415854STim Northover   // (i.e. node with no valid sub-type at any index, so {} does count as a leaf
428a4415854STim Northover   // despite nominally being an aggregate).
429a4415854STim Northover   while (Next->isAggregateType() &&
430a4415854STim Northover          indexReallyValid(cast<CompositeType>(Next), 0)) {
431a4415854STim Northover     SubTypes.push_back(cast<CompositeType>(Next));
432a4415854STim Northover     Path.push_back(0);
433a4415854STim Northover     Next = cast<CompositeType>(Next)->getTypeAtIndex(0U);
434ffc44549SStephen Lin   }
435ffc44549SStephen Lin 
436a4415854STim Northover   // If there's no Path now, Next was originally scalar already (or empty
437a4415854STim Northover   // leaf). We're done.
438a4415854STim Northover   if (Path.empty())
439a4415854STim Northover     return true;
440ffc44549SStephen Lin 
441a4415854STim Northover   // Otherwise, use normal iteration to keep looking through the tree until we
442a4415854STim Northover   // find a non-aggregate type.
443a4415854STim Northover   while (SubTypes.back()->getTypeAtIndex(Path.back())->isAggregateType()) {
444a4415854STim Northover     if (!advanceToNextLeafType(SubTypes, Path))
445ffc44549SStephen Lin       return false;
446ffc44549SStephen Lin   }
4474f3615deSChris Lattner 
448a4415854STim Northover   return true;
449a4415854STim Northover }
450a4415854STim Northover 
451a4415854STim Northover /// Set the iterator data-structures to the next non-empty, non-aggregate
452a4415854STim Northover /// subtype.
453df03449aSBenjamin Kramer static bool nextRealType(SmallVectorImpl<CompositeType *> &SubTypes,
454a4415854STim Northover                          SmallVectorImpl<unsigned> &Path) {
455a4415854STim Northover   do {
456a4415854STim Northover     if (!advanceToNextLeafType(SubTypes, Path))
457a4415854STim Northover       return false;
458a4415854STim Northover 
459a4415854STim Northover     assert(!Path.empty() && "found a leaf but didn't set the path?");
460a4415854STim Northover   } while (SubTypes.back()->getTypeAtIndex(Path.back())->isAggregateType());
461a4415854STim Northover 
462a4415854STim Northover   return true;
463a4415854STim Northover }
464a4415854STim Northover 
465a4415854STim Northover 
466450aa64fSDan Gohman /// Test if the given instruction is in a position to be optimized
467450aa64fSDan Gohman /// with a tail-call. This roughly means that it's in a block with
468450aa64fSDan Gohman /// a return and there's nothing that needs to be scheduled
469450aa64fSDan Gohman /// between it and the return.
470450aa64fSDan Gohman ///
471450aa64fSDan Gohman /// This function only tests target-independent requirements.
472480872b4SJuergen Ributzka bool llvm::isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM) {
473450aa64fSDan Gohman   const Instruction *I = CS.getInstruction();
474450aa64fSDan Gohman   const BasicBlock *ExitBB = I->getParent();
475450aa64fSDan Gohman   const TerminatorInst *Term = ExitBB->getTerminator();
476450aa64fSDan Gohman   const ReturnInst *Ret = dyn_cast<ReturnInst>(Term);
477450aa64fSDan Gohman 
478450aa64fSDan Gohman   // The block must end in a return statement or unreachable.
479450aa64fSDan Gohman   //
480450aa64fSDan Gohman   // FIXME: Decline tailcall if it's not guaranteed and if the block ends in
481450aa64fSDan Gohman   // an unreachable, for now. The way tailcall optimization is currently
482450aa64fSDan Gohman   // implemented means it will add an epilogue followed by a jump. That is
483450aa64fSDan Gohman   // not profitable. Also, if the callee is a special function (e.g.
484450aa64fSDan Gohman   // longjmp on x86), it can end up causing miscompilation that has not
485450aa64fSDan Gohman   // been fully understood.
486450aa64fSDan Gohman   if (!Ret &&
4874ce9863dSJuergen Ributzka       (!TM.Options.GuaranteedTailCallOpt || !isa<UnreachableInst>(Term)))
4884f3615deSChris Lattner     return false;
489450aa64fSDan Gohman 
490450aa64fSDan Gohman   // If I will have a chain, make sure no other instruction that will have a
491450aa64fSDan Gohman   // chain interposes between I and the return.
4920a92f86fSDavid Majnemer   if (I->mayHaveSideEffects() || I->mayReadFromMemory() ||
4930a92f86fSDavid Majnemer       !isSafeToSpeculativelyExecute(I))
494b6d0bd48SBenjamin Kramer     for (BasicBlock::const_iterator BBI = std::prev(ExitBB->end(), 2);; --BBI) {
495450aa64fSDan Gohman       if (&*BBI == I)
496450aa64fSDan Gohman         break;
497450aa64fSDan Gohman       // Debug info intrinsics do not get in the way of tail call optimization.
498450aa64fSDan Gohman       if (isa<DbgInfoIntrinsic>(BBI))
499450aa64fSDan Gohman         continue;
500450aa64fSDan Gohman       if (BBI->mayHaveSideEffects() || BBI->mayReadFromMemory() ||
501980f8f26SDuncan P. N. Exon Smith           !isSafeToSpeculativelyExecute(&*BBI))
502450aa64fSDan Gohman         return false;
503450aa64fSDan Gohman     }
504450aa64fSDan Gohman 
505f734a8baSEric Christopher   const Function *F = ExitBB->getParent();
506d913448bSEric Christopher   return returnTypeIsEligibleForTailCall(
507f734a8baSEric Christopher       F, I, Ret, *TM.getSubtargetImpl(*F)->getTargetLowering());
508ce0e4c26SMichael Gottesman }
509ce0e4c26SMichael Gottesman 
510f79af6f8SMichael Kuperstein bool llvm::attributesPermitTailCall(const Function *F, const Instruction *I,
511f79af6f8SMichael Kuperstein                                     const ReturnInst *Ret,
512f79af6f8SMichael Kuperstein                                     const TargetLoweringBase &TLI,
513f79af6f8SMichael Kuperstein                                     bool *AllowDifferingSizes) {
514f79af6f8SMichael Kuperstein   // ADS may be null, so don't write to it directly.
515f79af6f8SMichael Kuperstein   bool DummyADS;
516f79af6f8SMichael Kuperstein   bool &ADS = AllowDifferingSizes ? *AllowDifferingSizes : DummyADS;
517f79af6f8SMichael Kuperstein   ADS = true;
518f79af6f8SMichael Kuperstein 
519f79af6f8SMichael Kuperstein   AttrBuilder CallerAttrs(F->getAttributes(),
520f79af6f8SMichael Kuperstein                           AttributeSet::ReturnIndex);
521f79af6f8SMichael Kuperstein   AttrBuilder CalleeAttrs(cast<CallInst>(I)->getAttributes(),
522f79af6f8SMichael Kuperstein                           AttributeSet::ReturnIndex);
523f79af6f8SMichael Kuperstein 
524f79af6f8SMichael Kuperstein   // Noalias is completely benign as far as calling convention goes, it
525f79af6f8SMichael Kuperstein   // shouldn't affect whether the call is a tail call.
526807f732cSBjorn Pettersson   CallerAttrs.removeAttribute(Attribute::NoAlias);
527807f732cSBjorn Pettersson   CalleeAttrs.removeAttribute(Attribute::NoAlias);
528f79af6f8SMichael Kuperstein 
529f79af6f8SMichael Kuperstein   if (CallerAttrs.contains(Attribute::ZExt)) {
530f79af6f8SMichael Kuperstein     if (!CalleeAttrs.contains(Attribute::ZExt))
531f79af6f8SMichael Kuperstein       return false;
532f79af6f8SMichael Kuperstein 
533f79af6f8SMichael Kuperstein     ADS = false;
534f79af6f8SMichael Kuperstein     CallerAttrs.removeAttribute(Attribute::ZExt);
535f79af6f8SMichael Kuperstein     CalleeAttrs.removeAttribute(Attribute::ZExt);
536f79af6f8SMichael Kuperstein   } else if (CallerAttrs.contains(Attribute::SExt)) {
537f79af6f8SMichael Kuperstein     if (!CalleeAttrs.contains(Attribute::SExt))
538f79af6f8SMichael Kuperstein       return false;
539f79af6f8SMichael Kuperstein 
540f79af6f8SMichael Kuperstein     ADS = false;
541f79af6f8SMichael Kuperstein     CallerAttrs.removeAttribute(Attribute::SExt);
542f79af6f8SMichael Kuperstein     CalleeAttrs.removeAttribute(Attribute::SExt);
543f79af6f8SMichael Kuperstein   }
544f79af6f8SMichael Kuperstein 
545f79af6f8SMichael Kuperstein   // If they're still different, there's some facet we don't understand
546f79af6f8SMichael Kuperstein   // (currently only "inreg", but in future who knows). It may be OK but the
547f79af6f8SMichael Kuperstein   // only safe option is to reject the tail call.
548f79af6f8SMichael Kuperstein   return CallerAttrs == CalleeAttrs;
549f79af6f8SMichael Kuperstein }
550f79af6f8SMichael Kuperstein 
551ce0e4c26SMichael Gottesman bool llvm::returnTypeIsEligibleForTailCall(const Function *F,
552ce0e4c26SMichael Gottesman                                            const Instruction *I,
553ce0e4c26SMichael Gottesman                                            const ReturnInst *Ret,
554ce0e4c26SMichael Gottesman                                            const TargetLoweringBase &TLI) {
555450aa64fSDan Gohman   // If the block ends with a void return or unreachable, it doesn't matter
556450aa64fSDan Gohman   // what the call's return type is.
557450aa64fSDan Gohman   if (!Ret || Ret->getNumOperands() == 0) return true;
558450aa64fSDan Gohman 
559450aa64fSDan Gohman   // If the return value is undef, it doesn't matter what the call's
560450aa64fSDan Gohman   // return type is.
561450aa64fSDan Gohman   if (isa<UndefValue>(Ret->getOperand(0))) return true;
562450aa64fSDan Gohman 
563707d68f0STim Northover   // Make sure the attributes attached to each return are compatible.
564f79af6f8SMichael Kuperstein   bool AllowDifferingSizes;
565f79af6f8SMichael Kuperstein   if (!attributesPermitTailCall(F, I, Ret, TLI, &AllowDifferingSizes))
566450aa64fSDan Gohman     return false;
567450aa64fSDan Gohman 
568a4415854STim Northover   const Value *RetVal = Ret->getOperand(0), *CallVal = I;
569a4415854STim Northover   SmallVector<unsigned, 4> RetPath, CallPath;
570a4415854STim Northover   SmallVector<CompositeType *, 4> RetSubTypes, CallSubTypes;
571a4415854STim Northover 
572a4415854STim Northover   bool RetEmpty = !firstRealType(RetVal->getType(), RetSubTypes, RetPath);
573a4415854STim Northover   bool CallEmpty = !firstRealType(CallVal->getType(), CallSubTypes, CallPath);
574a4415854STim Northover 
575a4415854STim Northover   // Nothing's actually returned, it doesn't matter what the callee put there
576a4415854STim Northover   // it's a valid tail call.
577a4415854STim Northover   if (RetEmpty)
578a4415854STim Northover     return true;
579a4415854STim Northover 
580a4415854STim Northover   // Iterate pairwise through each of the value types making up the tail call
581a4415854STim Northover   // and the corresponding return. For each one we want to know whether it's
582a4415854STim Northover   // essentially going directly from the tail call to the ret, via operations
583a4415854STim Northover   // that end up not generating any code.
584a4415854STim Northover   //
585a4415854STim Northover   // We allow a certain amount of covariance here. For example it's permitted
586a4415854STim Northover   // for the tail call to define more bits than the ret actually cares about
587a4415854STim Northover   // (e.g. via a truncate).
588a4415854STim Northover   do {
589a4415854STim Northover     if (CallEmpty) {
590a4415854STim Northover       // We've exhausted the values produced by the tail call instruction, the
591a4415854STim Northover       // rest are essentially undef. The type doesn't really matter, but we need
592a4415854STim Northover       // *something*.
593a4415854STim Northover       Type *SlotType = RetSubTypes.back()->getTypeAtIndex(RetPath.back());
594a4415854STim Northover       CallVal = UndefValue::get(SlotType);
595a4415854STim Northover     }
596a4415854STim Northover 
597a4415854STim Northover     // The manipulations performed when we're looking through an insertvalue or
598a4415854STim Northover     // an extractvalue would happen at the front of the RetPath list, so since
599a4415854STim Northover     // we have to copy it anyway it's more efficient to create a reversed copy.
6004f6ac162SBenjamin Kramer     SmallVector<unsigned, 4> TmpRetPath(RetPath.rbegin(), RetPath.rend());
6014f6ac162SBenjamin Kramer     SmallVector<unsigned, 4> TmpCallPath(CallPath.rbegin(), CallPath.rend());
602a4415854STim Northover 
603a4415854STim Northover     // Finally, we can check whether the value produced by the tail call at this
604a4415854STim Northover     // index is compatible with the value we return.
605707d68f0STim Northover     if (!slotOnlyDiscardsData(RetVal, CallVal, TmpRetPath, TmpCallPath,
60644ede33aSMehdi Amini                               AllowDifferingSizes, TLI,
60744ede33aSMehdi Amini                               F->getParent()->getDataLayout()))
608a4415854STim Northover       return false;
609a4415854STim Northover 
610a4415854STim Northover     CallEmpty  = !nextRealType(CallSubTypes, CallPath);
611a4415854STim Northover   } while(nextRealType(RetSubTypes, RetPath));
612a4415854STim Northover 
613a4415854STim Northover   return true;
614450aa64fSDan Gohman }
615f21434ccSRafael Espindola 
616f21434ccSRafael Espindola bool llvm::canBeOmittedFromSymbolTable(const GlobalValue *GV) {
617f21434ccSRafael Espindola   if (!GV->hasLinkOnceODRLinkage())
618f21434ccSRafael Espindola     return false;
619f21434ccSRafael Espindola 
62096efdd61SPeter Collingbourne   // We assume that anyone who sets global unnamed_addr on a non-constant knows
62196efdd61SPeter Collingbourne   // what they're doing.
62296efdd61SPeter Collingbourne   if (GV->hasGlobalUnnamedAddr())
623f21434ccSRafael Espindola     return true;
624f21434ccSRafael Espindola 
625f21434ccSRafael Espindola   // If it is a non constant variable, it needs to be uniqued across shared
626f21434ccSRafael Espindola   // objects.
627f21434ccSRafael Espindola   if (const GlobalVariable *Var = dyn_cast<GlobalVariable>(GV)) {
628f21434ccSRafael Espindola     if (!Var->isConstant())
629f21434ccSRafael Espindola       return false;
630f21434ccSRafael Espindola   }
631f21434ccSRafael Espindola 
63296efdd61SPeter Collingbourne   return GV->hasAtLeastLocalUnnamedAddr();
633f21434ccSRafael Espindola }
63416193552SDavid Majnemer 
63516193552SDavid Majnemer static void collectFuncletMembers(
63616193552SDavid Majnemer     DenseMap<const MachineBasicBlock *, int> &FuncletMembership, int Funclet,
63716193552SDavid Majnemer     const MachineBasicBlock *MBB) {
638734d7c32SDavid Majnemer   SmallVector<const MachineBasicBlock *, 16> Worklist = {MBB};
639734d7c32SDavid Majnemer   while (!Worklist.empty()) {
640734d7c32SDavid Majnemer     const MachineBasicBlock *Visiting = Worklist.pop_back_val();
641734d7c32SDavid Majnemer     // Don't follow blocks which start new funclets.
642734d7c32SDavid Majnemer     if (Visiting->isEHPad() && Visiting != MBB)
643734d7c32SDavid Majnemer       continue;
644734d7c32SDavid Majnemer 
6457b54b525SDavid Blaikie     // Add this MBB to our funclet.
646734d7c32SDavid Majnemer     auto P = FuncletMembership.insert(std::make_pair(Visiting, Funclet));
6477b54b525SDavid Blaikie 
64816193552SDavid Majnemer     // Don't revisit blocks.
6497b54b525SDavid Blaikie     if (!P.second) {
6507b54b525SDavid Blaikie       assert(P.first->second == Funclet && "MBB is part of two funclets!");
651734d7c32SDavid Majnemer       continue;
65216193552SDavid Majnemer     }
65316193552SDavid Majnemer 
65416193552SDavid Majnemer     // Returns are boundaries where funclet transfer can occur, don't follow
65516193552SDavid Majnemer     // successors.
656734d7c32SDavid Majnemer     if (Visiting->isReturnBlock())
657734d7c32SDavid Majnemer       continue;
65816193552SDavid Majnemer 
659734d7c32SDavid Majnemer     for (const MachineBasicBlock *Succ : Visiting->successors())
660734d7c32SDavid Majnemer       Worklist.push_back(Succ);
661734d7c32SDavid Majnemer   }
66216193552SDavid Majnemer }
66316193552SDavid Majnemer 
66416193552SDavid Majnemer DenseMap<const MachineBasicBlock *, int>
66516193552SDavid Majnemer llvm::getFuncletMembership(const MachineFunction &MF) {
66616193552SDavid Majnemer   DenseMap<const MachineBasicBlock *, int> FuncletMembership;
66716193552SDavid Majnemer 
66816193552SDavid Majnemer   // We don't have anything to do if there aren't any EH pads.
669d0ee66c2SMatthias Braun   if (!MF.hasEHFunclets())
67016193552SDavid Majnemer     return FuncletMembership;
67116193552SDavid Majnemer 
672e4f9b09bSDavid Majnemer   int EntryBBNumber = MF.front().getNumber();
67316193552SDavid Majnemer   bool IsSEH = isAsynchronousEHPersonality(
67416193552SDavid Majnemer       classifyEHPersonality(MF.getFunction()->getPersonalityFn()));
67516193552SDavid Majnemer 
67616193552SDavid Majnemer   const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
67716193552SDavid Majnemer   SmallVector<const MachineBasicBlock *, 16> FuncletBlocks;
678e4f9b09bSDavid Majnemer   SmallVector<const MachineBasicBlock *, 16> UnreachableBlocks;
679e4f9b09bSDavid Majnemer   SmallVector<const MachineBasicBlock *, 16> SEHCatchPads;
68016193552SDavid Majnemer   SmallVector<std::pair<const MachineBasicBlock *, int>, 16> CatchRetSuccessors;
68116193552SDavid Majnemer   for (const MachineBasicBlock &MBB : MF) {
682e4f9b09bSDavid Majnemer     if (MBB.isEHFuncletEntry()) {
68316193552SDavid Majnemer       FuncletBlocks.push_back(&MBB);
684e4f9b09bSDavid Majnemer     } else if (IsSEH && MBB.isEHPad()) {
685e4f9b09bSDavid Majnemer       SEHCatchPads.push_back(&MBB);
686e4f9b09bSDavid Majnemer     } else if (MBB.pred_empty()) {
687e4f9b09bSDavid Majnemer       UnreachableBlocks.push_back(&MBB);
688e4f9b09bSDavid Majnemer     }
68916193552SDavid Majnemer 
69016193552SDavid Majnemer     MachineBasicBlock::const_iterator MBBI = MBB.getFirstTerminator();
6912e7af979SDuncan P. N. Exon Smith 
69216193552SDavid Majnemer     // CatchPads are not funclets for SEH so do not consider CatchRet to
69316193552SDavid Majnemer     // transfer control to another funclet.
69426f9e9ebSReid Kleckner     if (MBBI == MBB.end() || MBBI->getOpcode() != TII->getCatchReturnOpcode())
69516193552SDavid Majnemer       continue;
69616193552SDavid Majnemer 
697e4f9b09bSDavid Majnemer     // FIXME: SEH CatchPads are not necessarily in the parent function:
698e4f9b09bSDavid Majnemer     // they could be inside a finally block.
69916193552SDavid Majnemer     const MachineBasicBlock *Successor = MBBI->getOperand(0).getMBB();
70016193552SDavid Majnemer     const MachineBasicBlock *SuccessorColor = MBBI->getOperand(1).getMBB();
701e4f9b09bSDavid Majnemer     CatchRetSuccessors.push_back(
702e4f9b09bSDavid Majnemer         {Successor, IsSEH ? EntryBBNumber : SuccessorColor->getNumber()});
70316193552SDavid Majnemer   }
70416193552SDavid Majnemer 
70516193552SDavid Majnemer   // We don't have anything to do if there aren't any EH pads.
70616193552SDavid Majnemer   if (FuncletBlocks.empty())
70716193552SDavid Majnemer     return FuncletMembership;
70816193552SDavid Majnemer 
70916193552SDavid Majnemer   // Identify all the basic blocks reachable from the function entry.
710980f8f26SDuncan P. N. Exon Smith   collectFuncletMembers(FuncletMembership, EntryBBNumber, &MF.front());
711e4f9b09bSDavid Majnemer   // All blocks not part of a funclet are in the parent function.
712e4f9b09bSDavid Majnemer   for (const MachineBasicBlock *MBB : UnreachableBlocks)
713e4f9b09bSDavid Majnemer     collectFuncletMembers(FuncletMembership, EntryBBNumber, MBB);
71416193552SDavid Majnemer   // Next, identify all the blocks inside the funclets.
71516193552SDavid Majnemer   for (const MachineBasicBlock *MBB : FuncletBlocks)
71616193552SDavid Majnemer     collectFuncletMembers(FuncletMembership, MBB->getNumber(), MBB);
717e4f9b09bSDavid Majnemer   // SEH CatchPads aren't really funclets, handle them separately.
718e4f9b09bSDavid Majnemer   for (const MachineBasicBlock *MBB : SEHCatchPads)
719e4f9b09bSDavid Majnemer     collectFuncletMembers(FuncletMembership, EntryBBNumber, MBB);
72016193552SDavid Majnemer   // Finally, identify all the targets of a catchret.
72116193552SDavid Majnemer   for (std::pair<const MachineBasicBlock *, int> CatchRetPair :
72216193552SDavid Majnemer        CatchRetSuccessors)
72316193552SDavid Majnemer     collectFuncletMembers(FuncletMembership, CatchRetPair.second,
72416193552SDavid Majnemer                           CatchRetPair.first);
72516193552SDavid Majnemer   return FuncletMembership;
72616193552SDavid Majnemer }
727