1 //===- ScopHelper.cpp - Some Helper Functions for Scop.  ------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Small functions that help with Scop and LLVM-IR.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "polly/Support/ScopHelper.h"
15 #include "polly/Options.h"
16 #include "polly/ScopInfo.h"
17 #include "polly/Support/SCEVValidator.h"
18 #include "llvm/Analysis/LoopInfo.h"
19 #include "llvm/Analysis/RegionInfo.h"
20 #include "llvm/Analysis/ScalarEvolution.h"
21 #include "llvm/Analysis/ScalarEvolutionExpander.h"
22 #include "llvm/Analysis/ScalarEvolutionExpressions.h"
23 #include "llvm/IR/CFG.h"
24 #include "llvm/IR/IntrinsicInst.h"
25 #include "llvm/Support/Debug.h"
26 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
27 
28 using namespace llvm;
29 using namespace polly;
30 
31 #define DEBUG_TYPE "polly-scop-helper"
32 
33 static cl::opt<bool> PollyAllowErrorBlocks(
34     "polly-allow-error-blocks",
35     cl::desc("Allow to speculate on the execution of 'error blocks'."),
36     cl::Hidden, cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory));
37 
38 static cl::list<std::string> DebugFunctions(
39     "polly-debug-func",
40     cl::desc("Allow calls to the specified functions in SCoPs even if their "
41              "side-effects are unknown. This can be used to do debug output in "
42              "Polly-transformed code."),
43     cl::Hidden, cl::ZeroOrMore, cl::CommaSeparated, cl::cat(PollyCategory));
44 
45 // Ensures that there is just one predecessor to the entry node from outside the
46 // region.
47 // The identity of the region entry node is preserved.
48 static void simplifyRegionEntry(Region *R, DominatorTree *DT, LoopInfo *LI,
49                                 RegionInfo *RI) {
50   BasicBlock *EnteringBB = R->getEnteringBlock();
51   BasicBlock *Entry = R->getEntry();
52 
53   // Before (one of):
54   //
55   //                       \    /            //
56   //                      EnteringBB         //
57   //                        |    \------>    //
58   //   \   /                |                //
59   //   Entry <--\         Entry <--\         //
60   //   /   \    /         /   \    /         //
61   //        ....               ....          //
62 
63   // Create single entry edge if the region has multiple entry edges.
64   if (!EnteringBB) {
65     SmallVector<BasicBlock *, 4> Preds;
66     for (BasicBlock *P : predecessors(Entry))
67       if (!R->contains(P))
68         Preds.push_back(P);
69 
70     BasicBlock *NewEntering =
71         SplitBlockPredecessors(Entry, Preds, ".region_entering", DT, LI);
72 
73     if (RI) {
74       // The exit block of predecessing regions must be changed to NewEntering
75       for (BasicBlock *ExitPred : predecessors(NewEntering)) {
76         Region *RegionOfPred = RI->getRegionFor(ExitPred);
77         if (RegionOfPred->getExit() != Entry)
78           continue;
79 
80         while (!RegionOfPred->isTopLevelRegion() &&
81                RegionOfPred->getExit() == Entry) {
82           RegionOfPred->replaceExit(NewEntering);
83           RegionOfPred = RegionOfPred->getParent();
84         }
85       }
86 
87       // Make all ancestors use EnteringBB as entry; there might be edges to it
88       Region *AncestorR = R->getParent();
89       RI->setRegionFor(NewEntering, AncestorR);
90       while (!AncestorR->isTopLevelRegion() && AncestorR->getEntry() == Entry) {
91         AncestorR->replaceEntry(NewEntering);
92         AncestorR = AncestorR->getParent();
93       }
94     }
95 
96     EnteringBB = NewEntering;
97   }
98   assert(R->getEnteringBlock() == EnteringBB);
99 
100   // After:
101   //
102   //    \    /       //
103   //  EnteringBB     //
104   //      |          //
105   //      |          //
106   //    Entry <--\   //
107   //    /   \    /   //
108   //         ....    //
109 }
110 
111 // Ensure that the region has a single block that branches to the exit node.
112 static void simplifyRegionExit(Region *R, DominatorTree *DT, LoopInfo *LI,
113                                RegionInfo *RI) {
114   BasicBlock *ExitBB = R->getExit();
115   BasicBlock *ExitingBB = R->getExitingBlock();
116 
117   // Before:
118   //
119   //   (Region)   ______/  //
120   //      \  |   /         //
121   //       ExitBB          //
122   //       /    \          //
123 
124   if (!ExitingBB) {
125     SmallVector<BasicBlock *, 4> Preds;
126     for (BasicBlock *P : predecessors(ExitBB))
127       if (R->contains(P))
128         Preds.push_back(P);
129 
130     //  Preds[0] Preds[1]      otherBB //
131     //         \  |  ________/         //
132     //          \ | /                  //
133     //           BB                    //
134     ExitingBB =
135         SplitBlockPredecessors(ExitBB, Preds, ".region_exiting", DT, LI);
136     // Preds[0] Preds[1]      otherBB  //
137     //        \  /           /         //
138     // BB.region_exiting    /          //
139     //                  \  /           //
140     //                   BB            //
141 
142     if (RI)
143       RI->setRegionFor(ExitingBB, R);
144 
145     // Change the exit of nested regions, but not the region itself,
146     R->replaceExitRecursive(ExitingBB);
147     R->replaceExit(ExitBB);
148   }
149   assert(ExitingBB == R->getExitingBlock());
150 
151   // After:
152   //
153   //     \   /                //
154   //    ExitingBB     _____/  //
155   //          \      /        //
156   //           ExitBB         //
157   //           /    \         //
158 }
159 
160 void polly::simplifyRegion(Region *R, DominatorTree *DT, LoopInfo *LI,
161                            RegionInfo *RI) {
162   assert(R && !R->isTopLevelRegion());
163   assert(!RI || RI == R->getRegionInfo());
164   assert((!RI || DT) &&
165          "RegionInfo requires DominatorTree to be updated as well");
166 
167   simplifyRegionEntry(R, DT, LI, RI);
168   simplifyRegionExit(R, DT, LI, RI);
169   assert(R->isSimple());
170 }
171 
172 // Split the block into two successive blocks.
173 //
174 // Like llvm::SplitBlock, but also preserves RegionInfo
175 static BasicBlock *splitBlock(BasicBlock *Old, Instruction *SplitPt,
176                               DominatorTree *DT, llvm::LoopInfo *LI,
177                               RegionInfo *RI) {
178   assert(Old && SplitPt);
179 
180   // Before:
181   //
182   //  \   /  //
183   //   Old   //
184   //  /   \  //
185 
186   BasicBlock *NewBlock = llvm::SplitBlock(Old, SplitPt, DT, LI);
187 
188   if (RI) {
189     Region *R = RI->getRegionFor(Old);
190     RI->setRegionFor(NewBlock, R);
191   }
192 
193   // After:
194   //
195   //   \   /    //
196   //    Old     //
197   //     |      //
198   //  NewBlock  //
199   //   /   \    //
200 
201   return NewBlock;
202 }
203 
204 void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, DominatorTree *DT,
205                                      LoopInfo *LI, RegionInfo *RI) {
206   // Find first non-alloca instruction. Every basic block has a non-alloca
207   // instruction, as every well formed basic block has a terminator.
208   BasicBlock::iterator I = EntryBlock->begin();
209   while (isa<AllocaInst>(I))
210     ++I;
211 
212   // splitBlock updates DT, LI and RI.
213   splitBlock(EntryBlock, &*I, DT, LI, RI);
214 }
215 
216 void polly::splitEntryBlockForAlloca(BasicBlock *EntryBlock, Pass *P) {
217   auto *DTWP = P->getAnalysisIfAvailable<DominatorTreeWrapperPass>();
218   auto *DT = DTWP ? &DTWP->getDomTree() : nullptr;
219   auto *LIWP = P->getAnalysisIfAvailable<LoopInfoWrapperPass>();
220   auto *LI = LIWP ? &LIWP->getLoopInfo() : nullptr;
221   RegionInfoPass *RIP = P->getAnalysisIfAvailable<RegionInfoPass>();
222   RegionInfo *RI = RIP ? &RIP->getRegionInfo() : nullptr;
223 
224   // splitBlock updates DT, LI and RI.
225   polly::splitEntryBlockForAlloca(EntryBlock, DT, LI, RI);
226 }
227 
228 /// The SCEVExpander will __not__ generate any code for an existing SDiv/SRem
229 /// instruction but just use it, if it is referenced as a SCEVUnknown. We want
230 /// however to generate new code if the instruction is in the analyzed region
231 /// and we generate code outside/in front of that region. Hence, we generate the
232 /// code for the SDiv/SRem operands in front of the analyzed region and then
233 /// create a new SDiv/SRem operation there too.
234 struct ScopExpander : SCEVVisitor<ScopExpander, const SCEV *> {
235   friend struct SCEVVisitor<ScopExpander, const SCEV *>;
236 
237   explicit ScopExpander(const Region &R, ScalarEvolution &SE,
238                         const DataLayout &DL, const char *Name, ValueMapT *VMap,
239                         BasicBlock *RTCBB)
240       : Expander(SCEVExpander(SE, DL, Name)), SE(SE), Name(Name), R(R),
241         VMap(VMap), RTCBB(RTCBB) {}
242 
243   Value *expandCodeFor(const SCEV *E, Type *Ty, Instruction *I) {
244     // If we generate code in the region we will immediately fall back to the
245     // SCEVExpander, otherwise we will stop at all unknowns in the SCEV and if
246     // needed replace them by copies computed in the entering block.
247     if (!R.contains(I))
248       E = visit(E);
249     return Expander.expandCodeFor(E, Ty, I);
250   }
251 
252 private:
253   SCEVExpander Expander;
254   ScalarEvolution &SE;
255   const char *Name;
256   const Region &R;
257   ValueMapT *VMap;
258   BasicBlock *RTCBB;
259 
260   const SCEV *visitGenericInst(const SCEVUnknown *E, Instruction *Inst,
261                                Instruction *IP) {
262     if (!Inst || !R.contains(Inst))
263       return E;
264 
265     assert(!Inst->mayThrow() && !Inst->mayReadOrWriteMemory() &&
266            !isa<PHINode>(Inst));
267 
268     auto *InstClone = Inst->clone();
269     for (auto &Op : Inst->operands()) {
270       assert(SE.isSCEVable(Op->getType()));
271       auto *OpSCEV = SE.getSCEV(Op);
272       auto *OpClone = expandCodeFor(OpSCEV, Op->getType(), IP);
273       InstClone->replaceUsesOfWith(Op, OpClone);
274     }
275 
276     InstClone->setName(Name + Inst->getName());
277     InstClone->insertBefore(IP);
278     return SE.getSCEV(InstClone);
279   }
280 
281   const SCEV *visitUnknown(const SCEVUnknown *E) {
282 
283     // If a value mapping was given try if the underlying value is remapped.
284     Value *NewVal = VMap ? VMap->lookup(E->getValue()) : nullptr;
285     if (NewVal) {
286       auto *NewE = SE.getSCEV(NewVal);
287 
288       // While the mapped value might be different the SCEV representation might
289       // not be. To this end we will check before we go into recursion here.
290       if (E != NewE)
291         return visit(NewE);
292     }
293 
294     Instruction *Inst = dyn_cast<Instruction>(E->getValue());
295     Instruction *IP;
296     if (Inst && !R.contains(Inst))
297       IP = Inst;
298     else if (Inst && RTCBB->getParent() == Inst->getFunction())
299       IP = RTCBB->getTerminator();
300     else
301       IP = RTCBB->getParent()->getEntryBlock().getTerminator();
302 
303     if (!Inst || (Inst->getOpcode() != Instruction::SRem &&
304                   Inst->getOpcode() != Instruction::SDiv))
305       return visitGenericInst(E, Inst, IP);
306 
307     const SCEV *LHSScev = SE.getSCEV(Inst->getOperand(0));
308     const SCEV *RHSScev = SE.getSCEV(Inst->getOperand(1));
309 
310     if (!SE.isKnownNonZero(RHSScev))
311       RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1));
312 
313     Value *LHS = expandCodeFor(LHSScev, E->getType(), IP);
314     Value *RHS = expandCodeFor(RHSScev, E->getType(), IP);
315 
316     Inst = BinaryOperator::Create((Instruction::BinaryOps)Inst->getOpcode(),
317                                   LHS, RHS, Inst->getName() + Name, IP);
318     return SE.getSCEV(Inst);
319   }
320 
321   /// The following functions will just traverse the SCEV and rebuild it with
322   /// the new operands returned by the traversal.
323   ///
324   ///{
325   const SCEV *visitConstant(const SCEVConstant *E) { return E; }
326   const SCEV *visitTruncateExpr(const SCEVTruncateExpr *E) {
327     return SE.getTruncateExpr(visit(E->getOperand()), E->getType());
328   }
329   const SCEV *visitZeroExtendExpr(const SCEVZeroExtendExpr *E) {
330     return SE.getZeroExtendExpr(visit(E->getOperand()), E->getType());
331   }
332   const SCEV *visitSignExtendExpr(const SCEVSignExtendExpr *E) {
333     return SE.getSignExtendExpr(visit(E->getOperand()), E->getType());
334   }
335   const SCEV *visitUDivExpr(const SCEVUDivExpr *E) {
336     auto *RHSScev = visit(E->getRHS());
337     if (!SE.isKnownNonZero(RHSScev))
338       RHSScev = SE.getUMaxExpr(RHSScev, SE.getConstant(E->getType(), 1));
339     return SE.getUDivExpr(visit(E->getLHS()), RHSScev);
340   }
341   const SCEV *visitAddExpr(const SCEVAddExpr *E) {
342     SmallVector<const SCEV *, 4> NewOps;
343     for (const SCEV *Op : E->operands())
344       NewOps.push_back(visit(Op));
345     return SE.getAddExpr(NewOps);
346   }
347   const SCEV *visitMulExpr(const SCEVMulExpr *E) {
348     SmallVector<const SCEV *, 4> NewOps;
349     for (const SCEV *Op : E->operands())
350       NewOps.push_back(visit(Op));
351     return SE.getMulExpr(NewOps);
352   }
353   const SCEV *visitUMaxExpr(const SCEVUMaxExpr *E) {
354     SmallVector<const SCEV *, 4> NewOps;
355     for (const SCEV *Op : E->operands())
356       NewOps.push_back(visit(Op));
357     return SE.getUMaxExpr(NewOps);
358   }
359   const SCEV *visitSMaxExpr(const SCEVSMaxExpr *E) {
360     SmallVector<const SCEV *, 4> NewOps;
361     for (const SCEV *Op : E->operands())
362       NewOps.push_back(visit(Op));
363     return SE.getSMaxExpr(NewOps);
364   }
365   const SCEV *visitAddRecExpr(const SCEVAddRecExpr *E) {
366     SmallVector<const SCEV *, 4> NewOps;
367     for (const SCEV *Op : E->operands())
368       NewOps.push_back(visit(Op));
369     return SE.getAddRecExpr(NewOps, E->getLoop(), E->getNoWrapFlags());
370   }
371   ///}
372 };
373 
374 Value *polly::expandCodeFor(Scop &S, ScalarEvolution &SE, const DataLayout &DL,
375                             const char *Name, const SCEV *E, Type *Ty,
376                             Instruction *IP, ValueMapT *VMap,
377                             BasicBlock *RTCBB) {
378   ScopExpander Expander(S.getRegion(), SE, DL, Name, VMap, RTCBB);
379   return Expander.expandCodeFor(E, Ty, IP);
380 }
381 
382 bool polly::isErrorBlock(BasicBlock &BB, const Region &R, LoopInfo &LI,
383                          const DominatorTree &DT) {
384   if (!PollyAllowErrorBlocks)
385     return false;
386 
387   if (isa<UnreachableInst>(BB.getTerminator()))
388     return true;
389 
390   if (LI.isLoopHeader(&BB))
391     return false;
392 
393   // Basic blocks that are always executed are not considered error blocks,
394   // as their execution can not be a rare event.
395   bool DominatesAllPredecessors = true;
396   if (R.isTopLevelRegion()) {
397     for (BasicBlock &I : *R.getEntry()->getParent())
398       if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I))
399         DominatesAllPredecessors = false;
400   } else {
401     for (auto Pred : predecessors(R.getExit()))
402       if (R.contains(Pred) && !DT.dominates(&BB, Pred))
403         DominatesAllPredecessors = false;
404   }
405 
406   if (DominatesAllPredecessors)
407     return false;
408 
409   for (Instruction &Inst : BB)
410     if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
411       if (isDebugCall(CI))
412         continue;
413 
414       if (isIgnoredIntrinsic(CI))
415         continue;
416 
417       // memset, memcpy and memmove are modeled intrinsics.
418       if (isa<MemSetInst>(CI) || isa<MemTransferInst>(CI))
419         continue;
420 
421       if (!CI->doesNotAccessMemory())
422         return true;
423       if (CI->doesNotReturn())
424         return true;
425     }
426 
427   return false;
428 }
429 
430 Value *polly::getConditionFromTerminator(TerminatorInst *TI) {
431   if (BranchInst *BR = dyn_cast<BranchInst>(TI)) {
432     if (BR->isUnconditional())
433       return ConstantInt::getTrue(Type::getInt1Ty(TI->getContext()));
434 
435     return BR->getCondition();
436   }
437 
438   if (SwitchInst *SI = dyn_cast<SwitchInst>(TI))
439     return SI->getCondition();
440 
441   return nullptr;
442 }
443 
444 bool polly::isHoistableLoad(LoadInst *LInst, Region &R, LoopInfo &LI,
445                             ScalarEvolution &SE, const DominatorTree &DT) {
446   Loop *L = LI.getLoopFor(LInst->getParent());
447   auto *Ptr = LInst->getPointerOperand();
448   const SCEV *PtrSCEV = SE.getSCEVAtScope(Ptr, L);
449   while (L && R.contains(L)) {
450     if (!SE.isLoopInvariant(PtrSCEV, L))
451       return false;
452     L = L->getParentLoop();
453   }
454 
455   for (auto *User : Ptr->users()) {
456     auto *UserI = dyn_cast<Instruction>(User);
457     if (!UserI || !R.contains(UserI))
458       continue;
459     if (!UserI->mayWriteToMemory())
460       continue;
461 
462     auto &BB = *UserI->getParent();
463     if (DT.dominates(&BB, LInst->getParent()))
464       return false;
465 
466     bool DominatesAllPredecessors = true;
467     if (R.isTopLevelRegion()) {
468       for (BasicBlock &I : *R.getEntry()->getParent())
469         if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I))
470           DominatesAllPredecessors = false;
471     } else {
472       for (auto Pred : predecessors(R.getExit()))
473         if (R.contains(Pred) && !DT.dominates(&BB, Pred))
474           DominatesAllPredecessors = false;
475     }
476 
477     if (!DominatesAllPredecessors)
478       continue;
479 
480     return false;
481   }
482 
483   return true;
484 }
485 
486 bool polly::isIgnoredIntrinsic(const Value *V) {
487   if (auto *IT = dyn_cast<IntrinsicInst>(V)) {
488     switch (IT->getIntrinsicID()) {
489     // Lifetime markers are supported/ignored.
490     case llvm::Intrinsic::lifetime_start:
491     case llvm::Intrinsic::lifetime_end:
492     // Invariant markers are supported/ignored.
493     case llvm::Intrinsic::invariant_start:
494     case llvm::Intrinsic::invariant_end:
495     // Some misc annotations are supported/ignored.
496     case llvm::Intrinsic::var_annotation:
497     case llvm::Intrinsic::ptr_annotation:
498     case llvm::Intrinsic::annotation:
499     case llvm::Intrinsic::donothing:
500     case llvm::Intrinsic::assume:
501     // Some debug info intrinsics are supported/ignored.
502     case llvm::Intrinsic::dbg_value:
503     case llvm::Intrinsic::dbg_declare:
504       return true;
505     default:
506       break;
507     }
508   }
509   return false;
510 }
511 
512 bool polly::canSynthesize(const Value *V, const Scop &S, ScalarEvolution *SE,
513                           Loop *Scope) {
514   if (!V || !SE->isSCEVable(V->getType()))
515     return false;
516 
517   const InvariantLoadsSetTy &ILS = S.getRequiredInvariantLoads();
518   if (const SCEV *Scev = SE->getSCEVAtScope(const_cast<Value *>(V), Scope))
519     if (!isa<SCEVCouldNotCompute>(Scev))
520       if (!hasScalarDepsInsideRegion(Scev, &S.getRegion(), Scope, false, ILS))
521         return true;
522 
523   return false;
524 }
525 
526 llvm::BasicBlock *polly::getUseBlock(const llvm::Use &U) {
527   Instruction *UI = dyn_cast<Instruction>(U.getUser());
528   if (!UI)
529     return nullptr;
530 
531   if (PHINode *PHI = dyn_cast<PHINode>(UI))
532     return PHI->getIncomingBlock(U);
533 
534   return UI->getParent();
535 }
536 
537 std::tuple<std::vector<const SCEV *>, std::vector<int>>
538 polly::getIndexExpressionsFromGEP(GetElementPtrInst *GEP, ScalarEvolution &SE) {
539   std::vector<const SCEV *> Subscripts;
540   std::vector<int> Sizes;
541 
542   Type *Ty = GEP->getPointerOperandType();
543 
544   bool DroppedFirstDim = false;
545 
546   for (unsigned i = 1; i < GEP->getNumOperands(); i++) {
547 
548     const SCEV *Expr = SE.getSCEV(GEP->getOperand(i));
549 
550     if (i == 1) {
551       if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
552         Ty = PtrTy->getElementType();
553       } else if (auto *ArrayTy = dyn_cast<ArrayType>(Ty)) {
554         Ty = ArrayTy->getElementType();
555       } else {
556         Subscripts.clear();
557         Sizes.clear();
558         break;
559       }
560       if (auto *Const = dyn_cast<SCEVConstant>(Expr))
561         if (Const->getValue()->isZero()) {
562           DroppedFirstDim = true;
563           continue;
564         }
565       Subscripts.push_back(Expr);
566       continue;
567     }
568 
569     auto *ArrayTy = dyn_cast<ArrayType>(Ty);
570     if (!ArrayTy) {
571       Subscripts.clear();
572       Sizes.clear();
573       break;
574     }
575 
576     Subscripts.push_back(Expr);
577     if (!(DroppedFirstDim && i == 2))
578       Sizes.push_back(ArrayTy->getNumElements());
579 
580     Ty = ArrayTy->getElementType();
581   }
582 
583   return std::make_tuple(Subscripts, Sizes);
584 }
585 
586 llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::Loop *L, llvm::LoopInfo &LI,
587                                            const BoxedLoopsSetTy &BoxedLoops) {
588   while (BoxedLoops.count(L))
589     L = L->getParentLoop();
590   return L;
591 }
592 
593 llvm::Loop *polly::getFirstNonBoxedLoopFor(llvm::BasicBlock *BB,
594                                            llvm::LoopInfo &LI,
595                                            const BoxedLoopsSetTy &BoxedLoops) {
596   Loop *L = LI.getLoopFor(BB);
597   return getFirstNonBoxedLoopFor(L, LI, BoxedLoops);
598 }
599 
600 bool polly::isDebugCall(Instruction *Inst) {
601   auto *CI = dyn_cast<CallInst>(Inst);
602   if (!CI)
603     return false;
604 
605   Function *CF = CI->getCalledFunction();
606   if (!CF)
607     return false;
608 
609   return std::find(DebugFunctions.begin(), DebugFunctions.end(),
610                    CF->getName()) != DebugFunctions.end();
611 }
612 
613 static bool hasDebugCall(BasicBlock *BB) {
614   for (Instruction &Inst : *BB) {
615     if (isDebugCall(&Inst))
616       return true;
617   }
618   return false;
619 }
620 
621 bool polly::hasDebugCall(ScopStmt *Stmt) {
622   // Quick skip if no debug functions have been defined.
623   if (DebugFunctions.empty())
624     return false;
625 
626   if (!Stmt)
627     return false;
628 
629   for (Instruction *Inst : Stmt->getInstructions())
630     if (isDebugCall(Inst))
631       return true;
632 
633   if (Stmt->isRegionStmt()) {
634     for (BasicBlock *RBB : Stmt->getRegion()->blocks())
635       if (RBB != Stmt->getEntryBlock() && ::hasDebugCall(RBB))
636         return true;
637   }
638 
639   return false;
640 }
641