1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/Analysis/CGSCCPassManager.h"
10 #include "llvm/ADT/ArrayRef.h"
11 #include "llvm/ADT/Optional.h"
12 #include "llvm/ADT/STLExtras.h"
13 #include "llvm/ADT/SetVector.h"
14 #include "llvm/ADT/SmallPtrSet.h"
15 #include "llvm/ADT/SmallVector.h"
16 #include "llvm/ADT/iterator_range.h"
17 #include "llvm/Analysis/LazyCallGraph.h"
18 #include "llvm/IR/CallSite.h"
19 #include "llvm/IR/Constant.h"
20 #include "llvm/IR/InstIterator.h"
21 #include "llvm/IR/Instruction.h"
22 #include "llvm/IR/PassManager.h"
23 #include "llvm/IR/PassManagerImpl.h"
24 #include "llvm/Support/Casting.h"
25 #include "llvm/Support/Debug.h"
26 #include "llvm/Support/raw_ostream.h"
27 #include "llvm/Support/TimeProfiler.h"
28 #include <algorithm>
29 #include <cassert>
30 #include <iterator>
31 
32 #define DEBUG_TYPE "cgscc"
33 
34 using namespace llvm;
35 
36 // Explicit template instantiations and specialization definitions for core
37 // template typedefs.
38 namespace llvm {
39 
40 // Explicit instantiations for the core proxy templates.
41 template class AllAnalysesOn<LazyCallGraph::SCC>;
42 template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
43 template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
44                            LazyCallGraph &, CGSCCUpdateResult &>;
45 template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
46 template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
47                                          LazyCallGraph::SCC, LazyCallGraph &>;
48 template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
49 
50 /// Explicitly specialize the pass manager run method to handle call graph
51 /// updates.
52 template <>
53 PreservedAnalyses
54 PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
55             CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
56                                       CGSCCAnalysisManager &AM,
57                                       LazyCallGraph &G, CGSCCUpdateResult &UR) {
58   // Request PassInstrumentation from analysis manager, will use it to run
59   // instrumenting callbacks for the passes later.
60   PassInstrumentation PI =
61       AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
62 
63   PreservedAnalyses PA = PreservedAnalyses::all();
64 
65   if (DebugLogging)
66     dbgs() << "Starting CGSCC pass manager run.\n";
67 
68   // The SCC may be refined while we are running passes over it, so set up
69   // a pointer that we can update.
70   LazyCallGraph::SCC *C = &InitialC;
71 
72   for (auto &Pass : Passes) {
73     if (DebugLogging)
74       dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
75 
76     // Check the PassInstrumentation's BeforePass callbacks before running the
77     // pass, skip its execution completely if asked to (callback returns false).
78     if (!PI.runBeforePass(*Pass, *C))
79       continue;
80 
81     PreservedAnalyses PassPA;
82     {
83       TimeTraceScope TimeScope(Pass->name());
84       PassPA = Pass->run(*C, AM, G, UR);
85     }
86 
87     if (UR.InvalidatedSCCs.count(C))
88       PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass);
89     else
90       PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C);
91 
92     // Update the SCC if necessary.
93     C = UR.UpdatedC ? UR.UpdatedC : C;
94 
95     // If the CGSCC pass wasn't able to provide a valid updated SCC, the
96     // current SCC may simply need to be skipped if invalid.
97     if (UR.InvalidatedSCCs.count(C)) {
98       LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
99       break;
100     }
101     // Check that we didn't miss any update scenario.
102     assert(C->begin() != C->end() && "Cannot have an empty SCC!");
103 
104     // Update the analysis manager as each pass runs and potentially
105     // invalidates analyses.
106     AM.invalidate(*C, PassPA);
107 
108     // Finally, we intersect the final preserved analyses to compute the
109     // aggregate preserved set for this pass manager.
110     PA.intersect(std::move(PassPA));
111 
112     // FIXME: Historically, the pass managers all called the LLVM context's
113     // yield function here. We don't have a generic way to acquire the
114     // context and it isn't yet clear what the right pattern is for yielding
115     // in the new pass manager so it is currently omitted.
116     // ...getContext().yield();
117   }
118 
119   // Before we mark all of *this* SCC's analyses as preserved below, intersect
120   // this with the cross-SCC preserved analysis set. This is used to allow
121   // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
122   // for them.
123   UR.CrossSCCPA.intersect(PA);
124 
125   // Invalidation was handled after each pass in the above loop for the current
126   // SCC. Therefore, the remaining analysis results in the AnalysisManager are
127   // preserved. We mark this with a set so that we don't need to inspect each
128   // one individually.
129   PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
130 
131   if (DebugLogging)
132     dbgs() << "Finished CGSCC pass manager run.\n";
133 
134   return PA;
135 }
136 
137 bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
138     Module &M, const PreservedAnalyses &PA,
139     ModuleAnalysisManager::Invalidator &Inv) {
140   // If literally everything is preserved, we're done.
141   if (PA.areAllPreserved())
142     return false; // This is still a valid proxy.
143 
144   // If this proxy or the call graph is going to be invalidated, we also need
145   // to clear all the keys coming from that analysis.
146   //
147   // We also directly invalidate the FAM's module proxy if necessary, and if
148   // that proxy isn't preserved we can't preserve this proxy either. We rely on
149   // it to handle module -> function analysis invalidation in the face of
150   // structural changes and so if it's unavailable we conservatively clear the
151   // entire SCC layer as well rather than trying to do invalidation ourselves.
152   auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
153   if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
154       Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
155       Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
156     InnerAM->clear();
157 
158     // And the proxy itself should be marked as invalid so that we can observe
159     // the new call graph. This isn't strictly necessary because we cheat
160     // above, but is still useful.
161     return true;
162   }
163 
164   // Directly check if the relevant set is preserved so we can short circuit
165   // invalidating SCCs below.
166   bool AreSCCAnalysesPreserved =
167       PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
168 
169   // Ok, we have a graph, so we can propagate the invalidation down into it.
170   G->buildRefSCCs();
171   for (auto &RC : G->postorder_ref_sccs())
172     for (auto &C : RC) {
173       Optional<PreservedAnalyses> InnerPA;
174 
175       // Check to see whether the preserved set needs to be adjusted based on
176       // module-level analysis invalidation triggering deferred invalidation
177       // for this SCC.
178       if (auto *OuterProxy =
179               InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
180         for (const auto &OuterInvalidationPair :
181              OuterProxy->getOuterInvalidations()) {
182           AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
183           const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
184           if (Inv.invalidate(OuterAnalysisID, M, PA)) {
185             if (!InnerPA)
186               InnerPA = PA;
187             for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
188               InnerPA->abandon(InnerAnalysisID);
189           }
190         }
191 
192       // Check if we needed a custom PA set. If so we'll need to run the inner
193       // invalidation.
194       if (InnerPA) {
195         InnerAM->invalidate(C, *InnerPA);
196         continue;
197       }
198 
199       // Otherwise we only need to do invalidation if the original PA set didn't
200       // preserve all SCC analyses.
201       if (!AreSCCAnalysesPreserved)
202         InnerAM->invalidate(C, PA);
203     }
204 
205   // Return false to indicate that this result is still a valid proxy.
206   return false;
207 }
208 
209 template <>
210 CGSCCAnalysisManagerModuleProxy::Result
211 CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
212   // Force the Function analysis manager to also be available so that it can
213   // be accessed in an SCC analysis and proxied onward to function passes.
214   // FIXME: It is pretty awkward to just drop the result here and assert that
215   // we can find it again later.
216   (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
217 
218   return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
219 }
220 
221 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
222 
223 FunctionAnalysisManagerCGSCCProxy::Result
224 FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
225                                        CGSCCAnalysisManager &AM,
226                                        LazyCallGraph &CG) {
227   // Collect the FunctionAnalysisManager from the Module layer and use that to
228   // build the proxy result.
229   //
230   // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
231   // invalidate the function analyses.
232   auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
233   Module &M = *C.begin()->getFunction().getParent();
234   auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
235   assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
236                      "proxy is run on the module prior to entering the CGSCC "
237                      "walk.");
238 
239   // Note that we special-case invalidation handling of this proxy in the CGSCC
240   // analysis manager's Module proxy. This avoids the need to do anything
241   // special here to recompute all of this if ever the FAM's module proxy goes
242   // away.
243   return Result(FAMProxy->getManager());
244 }
245 
246 bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
247     LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
248     CGSCCAnalysisManager::Invalidator &Inv) {
249   // If literally everything is preserved, we're done.
250   if (PA.areAllPreserved())
251     return false; // This is still a valid proxy.
252 
253   // If this proxy isn't marked as preserved, then even if the result remains
254   // valid, the key itself may no longer be valid, so we clear everything.
255   //
256   // Note that in order to preserve this proxy, a module pass must ensure that
257   // the FAM has been completely updated to handle the deletion of functions.
258   // Specifically, any FAM-cached results for those functions need to have been
259   // forcibly cleared. When preserved, this proxy will only invalidate results
260   // cached on functions *still in the module* at the end of the module pass.
261   auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
262   if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
263     for (LazyCallGraph::Node &N : C)
264       FAM->clear(N.getFunction(), N.getFunction().getName());
265 
266     return true;
267   }
268 
269   // Directly check if the relevant set is preserved.
270   bool AreFunctionAnalysesPreserved =
271       PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
272 
273   // Now walk all the functions to see if any inner analysis invalidation is
274   // necessary.
275   for (LazyCallGraph::Node &N : C) {
276     Function &F = N.getFunction();
277     Optional<PreservedAnalyses> FunctionPA;
278 
279     // Check to see whether the preserved set needs to be pruned based on
280     // SCC-level analysis invalidation that triggers deferred invalidation
281     // registered with the outer analysis manager proxy for this function.
282     if (auto *OuterProxy =
283             FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
284       for (const auto &OuterInvalidationPair :
285            OuterProxy->getOuterInvalidations()) {
286         AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
287         const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
288         if (Inv.invalidate(OuterAnalysisID, C, PA)) {
289           if (!FunctionPA)
290             FunctionPA = PA;
291           for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
292             FunctionPA->abandon(InnerAnalysisID);
293         }
294       }
295 
296     // Check if we needed a custom PA set, and if so we'll need to run the
297     // inner invalidation.
298     if (FunctionPA) {
299       FAM->invalidate(F, *FunctionPA);
300       continue;
301     }
302 
303     // Otherwise we only need to do invalidation if the original PA set didn't
304     // preserve all function analyses.
305     if (!AreFunctionAnalysesPreserved)
306       FAM->invalidate(F, PA);
307   }
308 
309   // Return false to indicate that this result is still a valid proxy.
310   return false;
311 }
312 
313 } // end namespace llvm
314 
315 /// When a new SCC is created for the graph and there might be function
316 /// analysis results cached for the functions now in that SCC two forms of
317 /// updates are required.
318 ///
319 /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
320 /// created so that any subsequent invalidation events to the SCC are
321 /// propagated to the function analysis results cached for functions within it.
322 ///
323 /// Second, if any of the functions within the SCC have analysis results with
324 /// outer analysis dependencies, then those dependencies would point to the
325 /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
326 /// function analyses so that they don't retain stale handles.
327 static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
328                                          LazyCallGraph &G,
329                                          CGSCCAnalysisManager &AM) {
330   // Get the relevant function analysis manager.
331   auto &FAM =
332       AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
333 
334   // Now walk the functions in this SCC and invalidate any function analysis
335   // results that might have outer dependencies on an SCC analysis.
336   for (LazyCallGraph::Node &N : C) {
337     Function &F = N.getFunction();
338 
339     auto *OuterProxy =
340         FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
341     if (!OuterProxy)
342       // No outer analyses were queried, nothing to do.
343       continue;
344 
345     // Forcibly abandon all the inner analyses with dependencies, but
346     // invalidate nothing else.
347     auto PA = PreservedAnalyses::all();
348     for (const auto &OuterInvalidationPair :
349          OuterProxy->getOuterInvalidations()) {
350       const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
351       for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
352         PA.abandon(InnerAnalysisID);
353     }
354 
355     // Now invalidate anything we found.
356     FAM.invalidate(F, PA);
357   }
358 }
359 
360 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
361 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
362 /// added SCCs.
363 ///
364 /// The range of new SCCs must be in postorder already. The SCC they were split
365 /// out of must be provided as \p C. The current node being mutated and
366 /// triggering updates must be passed as \p N.
367 ///
368 /// This function returns the SCC containing \p N. This will be either \p C if
369 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
370 template <typename SCCRangeT>
371 static LazyCallGraph::SCC *
372 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
373                        LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
374                        CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
375   using SCC = LazyCallGraph::SCC;
376 
377   if (NewSCCRange.begin() == NewSCCRange.end())
378     return C;
379 
380   // Add the current SCC to the worklist as its shape has changed.
381   UR.CWorklist.insert(C);
382   LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
383                     << "\n");
384 
385   SCC *OldC = C;
386 
387   // Update the current SCC. Note that if we have new SCCs, this must actually
388   // change the SCC.
389   assert(C != &*NewSCCRange.begin() &&
390          "Cannot insert new SCCs without changing current SCC!");
391   C = &*NewSCCRange.begin();
392   assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
393 
394   // If we had a cached FAM proxy originally, we will want to create more of
395   // them for each SCC that was split off.
396   bool NeedFAMProxy =
397       AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
398 
399   // We need to propagate an invalidation call to all but the newly current SCC
400   // because the outer pass manager won't do that for us after splitting them.
401   // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
402   // there are preserved analysis we can avoid invalidating them here for
403   // split-off SCCs.
404   // We know however that this will preserve any FAM proxy so go ahead and mark
405   // that.
406   PreservedAnalyses PA;
407   PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
408   AM.invalidate(*OldC, PA);
409 
410   // Ensure the now-current SCC's function analyses are updated.
411   if (NeedFAMProxy)
412     updateNewSCCFunctionAnalyses(*C, G, AM);
413 
414   for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
415                                             NewSCCRange.end()))) {
416     assert(C != &NewC && "No need to re-visit the current SCC!");
417     assert(OldC != &NewC && "Already handled the original SCC!");
418     UR.CWorklist.insert(&NewC);
419     LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
420 
421     // Ensure new SCCs' function analyses are updated.
422     if (NeedFAMProxy)
423       updateNewSCCFunctionAnalyses(NewC, G, AM);
424 
425     // Also propagate a normal invalidation to the new SCC as only the current
426     // will get one from the pass manager infrastructure.
427     AM.invalidate(NewC, PA);
428   }
429   return C;
430 }
431 
432 static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
433     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
434     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool FunctionPass) {
435   using Node = LazyCallGraph::Node;
436   using Edge = LazyCallGraph::Edge;
437   using SCC = LazyCallGraph::SCC;
438   using RefSCC = LazyCallGraph::RefSCC;
439 
440   RefSCC &InitialRC = InitialC.getOuterRefSCC();
441   SCC *C = &InitialC;
442   RefSCC *RC = &InitialRC;
443   Function &F = N.getFunction();
444 
445   // Walk the function body and build up the set of retained, promoted, and
446   // demoted edges.
447   SmallVector<Constant *, 16> Worklist;
448   SmallPtrSet<Constant *, 16> Visited;
449   SmallPtrSet<Node *, 16> RetainedEdges;
450   SmallSetVector<Node *, 4> PromotedRefTargets;
451   SmallSetVector<Node *, 4> DemotedCallTargets;
452   SmallSetVector<Node *, 4> NewCallEdges;
453   SmallSetVector<Node *, 4> NewRefEdges;
454 
455   // First walk the function and handle all called functions. We do this first
456   // because if there is a single call edge, whether there are ref edges is
457   // irrelevant.
458   for (Instruction &I : instructions(F))
459     if (auto CS = CallSite(&I))
460       if (Function *Callee = CS.getCalledFunction())
461         if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
462           Node &CalleeN = *G.lookup(*Callee);
463           Edge *E = N->lookup(CalleeN);
464           assert((E || !FunctionPass) &&
465                  "No function transformations should introduce *new* "
466                  "call edges! Any new calls should be modeled as "
467                  "promoted existing ref edges!");
468           bool Inserted = RetainedEdges.insert(&CalleeN).second;
469           (void)Inserted;
470           assert(Inserted && "We should never visit a function twice.");
471           if (!E)
472             NewCallEdges.insert(&CalleeN);
473           else if (!E->isCall())
474             PromotedRefTargets.insert(&CalleeN);
475         }
476 
477   // Now walk all references.
478   for (Instruction &I : instructions(F))
479     for (Value *Op : I.operand_values())
480       if (auto *C = dyn_cast<Constant>(Op))
481         if (Visited.insert(C).second)
482           Worklist.push_back(C);
483 
484   auto VisitRef = [&](Function &Referee) {
485     Node &RefereeN = *G.lookup(Referee);
486     Edge *E = N->lookup(RefereeN);
487     assert((E || !FunctionPass) &&
488            "No function transformations should introduce *new* ref "
489            "edges! Any new ref edges would require IPO which "
490            "function passes aren't allowed to do!");
491     bool Inserted = RetainedEdges.insert(&RefereeN).second;
492     (void)Inserted;
493     assert(Inserted && "We should never visit a function twice.");
494     if (!E)
495       NewRefEdges.insert(&RefereeN);
496     else if (E->isCall())
497       DemotedCallTargets.insert(&RefereeN);
498   };
499   LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
500 
501   // Handle new ref edges.
502   for (Node *RefTarget : NewRefEdges) {
503     SCC &TargetC = *G.lookupSCC(*RefTarget);
504     RefSCC &TargetRC = TargetC.getOuterRefSCC();
505     (void)TargetRC;
506     // TODO: This only allows trivial edges to be added for now.
507     assert((RC == &TargetRC ||
508            RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
509     RC->insertTrivialRefEdge(N, *RefTarget);
510   }
511 
512   // Handle new call edges.
513   for (Node *CallTarget : NewCallEdges) {
514     SCC &TargetC = *G.lookupSCC(*CallTarget);
515     RefSCC &TargetRC = TargetC.getOuterRefSCC();
516     (void)TargetRC;
517     // TODO: This only allows trivial edges to be added for now.
518     assert((RC == &TargetRC ||
519            RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
520     RC->insertTrivialCallEdge(N, *CallTarget);
521   }
522 
523   // Include synthetic reference edges to known, defined lib functions.
524   for (auto *F : G.getLibFunctions())
525     // While the list of lib functions doesn't have repeats, don't re-visit
526     // anything handled above.
527     if (!Visited.count(F))
528       VisitRef(*F);
529 
530   // First remove all of the edges that are no longer present in this function.
531   // The first step makes these edges uniformly ref edges and accumulates them
532   // into a separate data structure so removal doesn't invalidate anything.
533   SmallVector<Node *, 4> DeadTargets;
534   for (Edge &E : *N) {
535     if (RetainedEdges.count(&E.getNode()))
536       continue;
537 
538     SCC &TargetC = *G.lookupSCC(E.getNode());
539     RefSCC &TargetRC = TargetC.getOuterRefSCC();
540     if (&TargetRC == RC && E.isCall()) {
541       if (C != &TargetC) {
542         // For separate SCCs this is trivial.
543         RC->switchTrivialInternalEdgeToRef(N, E.getNode());
544       } else {
545         // Now update the call graph.
546         C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
547                                    G, N, C, AM, UR);
548       }
549     }
550 
551     // Now that this is ready for actual removal, put it into our list.
552     DeadTargets.push_back(&E.getNode());
553   }
554   // Remove the easy cases quickly and actually pull them out of our list.
555   DeadTargets.erase(
556       llvm::remove_if(DeadTargets,
557                       [&](Node *TargetN) {
558                         SCC &TargetC = *G.lookupSCC(*TargetN);
559                         RefSCC &TargetRC = TargetC.getOuterRefSCC();
560 
561                         // We can't trivially remove internal targets, so skip
562                         // those.
563                         if (&TargetRC == RC)
564                           return false;
565 
566                         RC->removeOutgoingEdge(N, *TargetN);
567                         LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
568                                           << N << "' to '" << TargetN << "'\n");
569                         return true;
570                       }),
571       DeadTargets.end());
572 
573   // Now do a batch removal of the internal ref edges left.
574   auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
575   if (!NewRefSCCs.empty()) {
576     // The old RefSCC is dead, mark it as such.
577     UR.InvalidatedRefSCCs.insert(RC);
578 
579     // Note that we don't bother to invalidate analyses as ref-edge
580     // connectivity is not really observable in any way and is intended
581     // exclusively to be used for ordering of transforms rather than for
582     // analysis conclusions.
583 
584     // Update RC to the "bottom".
585     assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
586     RC = &C->getOuterRefSCC();
587     assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
588 
589     // The RC worklist is in reverse postorder, so we enqueue the new ones in
590     // RPO except for the one which contains the source node as that is the
591     // "bottom" we will continue processing in the bottom-up walk.
592     assert(NewRefSCCs.front() == RC &&
593            "New current RefSCC not first in the returned list!");
594     for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
595                                                   NewRefSCCs.end()))) {
596       assert(NewRC != RC && "Should not encounter the current RefSCC further "
597                             "in the postorder list of new RefSCCs.");
598       UR.RCWorklist.insert(NewRC);
599       LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
600                         << *NewRC << "\n");
601     }
602   }
603 
604   // Next demote all the call edges that are now ref edges. This helps make
605   // the SCCs small which should minimize the work below as we don't want to
606   // form cycles that this would break.
607   for (Node *RefTarget : DemotedCallTargets) {
608     SCC &TargetC = *G.lookupSCC(*RefTarget);
609     RefSCC &TargetRC = TargetC.getOuterRefSCC();
610 
611     // The easy case is when the target RefSCC is not this RefSCC. This is
612     // only supported when the target RefSCC is a child of this RefSCC.
613     if (&TargetRC != RC) {
614       assert(RC->isAncestorOf(TargetRC) &&
615              "Cannot potentially form RefSCC cycles here!");
616       RC->switchOutgoingEdgeToRef(N, *RefTarget);
617       LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
618                         << "' to '" << *RefTarget << "'\n");
619       continue;
620     }
621 
622     // We are switching an internal call edge to a ref edge. This may split up
623     // some SCCs.
624     if (C != &TargetC) {
625       // For separate SCCs this is trivial.
626       RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
627       continue;
628     }
629 
630     // Now update the call graph.
631     C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
632                                C, AM, UR);
633   }
634 
635   // Now promote ref edges into call edges.
636   for (Node *CallTarget : PromotedRefTargets) {
637     SCC &TargetC = *G.lookupSCC(*CallTarget);
638     RefSCC &TargetRC = TargetC.getOuterRefSCC();
639 
640     // The easy case is when the target RefSCC is not this RefSCC. This is
641     // only supported when the target RefSCC is a child of this RefSCC.
642     if (&TargetRC != RC) {
643       assert(RC->isAncestorOf(TargetRC) &&
644              "Cannot potentially form RefSCC cycles here!");
645       RC->switchOutgoingEdgeToCall(N, *CallTarget);
646       LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
647                         << "' to '" << *CallTarget << "'\n");
648       continue;
649     }
650     LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
651                       << N << "' to '" << *CallTarget << "'\n");
652 
653     // Otherwise we are switching an internal ref edge to a call edge. This
654     // may merge away some SCCs, and we add those to the UpdateResult. We also
655     // need to make sure to update the worklist in the event SCCs have moved
656     // before the current one in the post-order sequence
657     bool HasFunctionAnalysisProxy = false;
658     auto InitialSCCIndex = RC->find(*C) - RC->begin();
659     bool FormedCycle = RC->switchInternalEdgeToCall(
660         N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
661           for (SCC *MergedC : MergedSCCs) {
662             assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
663 
664             HasFunctionAnalysisProxy |=
665                 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
666                     *MergedC) != nullptr;
667 
668             // Mark that this SCC will no longer be valid.
669             UR.InvalidatedSCCs.insert(MergedC);
670 
671             // FIXME: We should really do a 'clear' here to forcibly release
672             // memory, but we don't have a good way of doing that and
673             // preserving the function analyses.
674             auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
675             PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
676             AM.invalidate(*MergedC, PA);
677           }
678         });
679 
680     // If we formed a cycle by creating this call, we need to update more data
681     // structures.
682     if (FormedCycle) {
683       C = &TargetC;
684       assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
685 
686       // If one of the invalidated SCCs had a cached proxy to a function
687       // analysis manager, we need to create a proxy in the new current SCC as
688       // the invalidated SCCs had their functions moved.
689       if (HasFunctionAnalysisProxy)
690         AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
691 
692       // Any analyses cached for this SCC are no longer precise as the shape
693       // has changed by introducing this cycle. However, we have taken care to
694       // update the proxies so it remains valide.
695       auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
696       PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
697       AM.invalidate(*C, PA);
698     }
699     auto NewSCCIndex = RC->find(*C) - RC->begin();
700     // If we have actually moved an SCC to be topologically "below" the current
701     // one due to merging, we will need to revisit the current SCC after
702     // visiting those moved SCCs.
703     //
704     // It is critical that we *do not* revisit the current SCC unless we
705     // actually move SCCs in the process of merging because otherwise we may
706     // form a cycle where an SCC is split apart, merged, split, merged and so
707     // on infinitely.
708     if (InitialSCCIndex < NewSCCIndex) {
709       // Put our current SCC back onto the worklist as we'll visit other SCCs
710       // that are now definitively ordered prior to the current one in the
711       // post-order sequence, and may end up observing more precise context to
712       // optimize the current SCC.
713       UR.CWorklist.insert(C);
714       LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
715                         << "\n");
716       // Enqueue in reverse order as we pop off the back of the worklist.
717       for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
718                                                   RC->begin() + NewSCCIndex))) {
719         UR.CWorklist.insert(&MovedC);
720         LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
721                           << MovedC << "\n");
722       }
723     }
724   }
725 
726   assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
727   assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
728   assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
729 
730   // Record the current RefSCC and SCC for higher layers of the CGSCC pass
731   // manager now that all the updates have been applied.
732   if (RC != &InitialRC)
733     UR.UpdatedRC = RC;
734   if (C != &InitialC)
735     UR.UpdatedC = C;
736 
737   return *C;
738 }
739 
740 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
741     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
742     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
743   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR,
744                                            /* FunctionPass */ true);
745 }
746 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
747     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
748     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
749   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR,
750                                            /* FunctionPass */ false);
751 }
752