1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/Analysis/CGSCCPassManager.h"
10 #include "llvm/ADT/ArrayRef.h"
11 #include "llvm/ADT/Optional.h"
12 #include "llvm/ADT/STLExtras.h"
13 #include "llvm/ADT/SetVector.h"
14 #include "llvm/ADT/SmallPtrSet.h"
15 #include "llvm/ADT/SmallVector.h"
16 #include "llvm/ADT/iterator_range.h"
17 #include "llvm/Analysis/LazyCallGraph.h"
18 #include "llvm/IR/Constant.h"
19 #include "llvm/IR/InstIterator.h"
20 #include "llvm/IR/Instruction.h"
21 #include "llvm/IR/PassManager.h"
22 #include "llvm/IR/PassManagerImpl.h"
23 #include "llvm/Support/Casting.h"
24 #include "llvm/Support/Debug.h"
25 #include "llvm/Support/raw_ostream.h"
26 #include "llvm/Support/TimeProfiler.h"
27 #include <algorithm>
28 #include <cassert>
29 #include <iterator>
30 
31 #define DEBUG_TYPE "cgscc"
32 
33 using namespace llvm;
34 
35 // Explicit template instantiations and specialization definitions for core
36 // template typedefs.
37 namespace llvm {
38 
39 // Explicit instantiations for the core proxy templates.
40 template class AllAnalysesOn<LazyCallGraph::SCC>;
41 template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
42 template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
43                            LazyCallGraph &, CGSCCUpdateResult &>;
44 template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
45 template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
46                                          LazyCallGraph::SCC, LazyCallGraph &>;
47 template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
48 
49 /// Explicitly specialize the pass manager run method to handle call graph
50 /// updates.
51 template <>
52 PreservedAnalyses
53 PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
54             CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
55                                       CGSCCAnalysisManager &AM,
56                                       LazyCallGraph &G, CGSCCUpdateResult &UR) {
57   // Request PassInstrumentation from analysis manager, will use it to run
58   // instrumenting callbacks for the passes later.
59   PassInstrumentation PI =
60       AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
61 
62   PreservedAnalyses PA = PreservedAnalyses::all();
63 
64   if (DebugLogging)
65     dbgs() << "Starting CGSCC pass manager run.\n";
66 
67   // The SCC may be refined while we are running passes over it, so set up
68   // a pointer that we can update.
69   LazyCallGraph::SCC *C = &InitialC;
70 
71   for (auto &Pass : Passes) {
72     if (DebugLogging)
73       dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
74 
75     // Check the PassInstrumentation's BeforePass callbacks before running the
76     // pass, skip its execution completely if asked to (callback returns false).
77     if (!PI.runBeforePass(*Pass, *C))
78       continue;
79 
80     PreservedAnalyses PassPA;
81     {
82       TimeTraceScope TimeScope(Pass->name());
83       PassPA = Pass->run(*C, AM, G, UR);
84     }
85 
86     if (UR.InvalidatedSCCs.count(C))
87       PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass);
88     else
89       PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C);
90 
91     // Update the SCC if necessary.
92     C = UR.UpdatedC ? UR.UpdatedC : C;
93 
94     // If the CGSCC pass wasn't able to provide a valid updated SCC, the
95     // current SCC may simply need to be skipped if invalid.
96     if (UR.InvalidatedSCCs.count(C)) {
97       LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
98       break;
99     }
100     // Check that we didn't miss any update scenario.
101     assert(C->begin() != C->end() && "Cannot have an empty SCC!");
102 
103     // Update the analysis manager as each pass runs and potentially
104     // invalidates analyses.
105     AM.invalidate(*C, PassPA);
106 
107     // Finally, we intersect the final preserved analyses to compute the
108     // aggregate preserved set for this pass manager.
109     PA.intersect(std::move(PassPA));
110 
111     // FIXME: Historically, the pass managers all called the LLVM context's
112     // yield function here. We don't have a generic way to acquire the
113     // context and it isn't yet clear what the right pattern is for yielding
114     // in the new pass manager so it is currently omitted.
115     // ...getContext().yield();
116   }
117 
118   // Before we mark all of *this* SCC's analyses as preserved below, intersect
119   // this with the cross-SCC preserved analysis set. This is used to allow
120   // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
121   // for them.
122   UR.CrossSCCPA.intersect(PA);
123 
124   // Invalidation was handled after each pass in the above loop for the current
125   // SCC. Therefore, the remaining analysis results in the AnalysisManager are
126   // preserved. We mark this with a set so that we don't need to inspect each
127   // one individually.
128   PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
129 
130   if (DebugLogging)
131     dbgs() << "Finished CGSCC pass manager run.\n";
132 
133   return PA;
134 }
135 
136 bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
137     Module &M, const PreservedAnalyses &PA,
138     ModuleAnalysisManager::Invalidator &Inv) {
139   // If literally everything is preserved, we're done.
140   if (PA.areAllPreserved())
141     return false; // This is still a valid proxy.
142 
143   // If this proxy or the call graph is going to be invalidated, we also need
144   // to clear all the keys coming from that analysis.
145   //
146   // We also directly invalidate the FAM's module proxy if necessary, and if
147   // that proxy isn't preserved we can't preserve this proxy either. We rely on
148   // it to handle module -> function analysis invalidation in the face of
149   // structural changes and so if it's unavailable we conservatively clear the
150   // entire SCC layer as well rather than trying to do invalidation ourselves.
151   auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
152   if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
153       Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
154       Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
155     InnerAM->clear();
156 
157     // And the proxy itself should be marked as invalid so that we can observe
158     // the new call graph. This isn't strictly necessary because we cheat
159     // above, but is still useful.
160     return true;
161   }
162 
163   // Directly check if the relevant set is preserved so we can short circuit
164   // invalidating SCCs below.
165   bool AreSCCAnalysesPreserved =
166       PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
167 
168   // Ok, we have a graph, so we can propagate the invalidation down into it.
169   G->buildRefSCCs();
170   for (auto &RC : G->postorder_ref_sccs())
171     for (auto &C : RC) {
172       Optional<PreservedAnalyses> InnerPA;
173 
174       // Check to see whether the preserved set needs to be adjusted based on
175       // module-level analysis invalidation triggering deferred invalidation
176       // for this SCC.
177       if (auto *OuterProxy =
178               InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
179         for (const auto &OuterInvalidationPair :
180              OuterProxy->getOuterInvalidations()) {
181           AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
182           const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
183           if (Inv.invalidate(OuterAnalysisID, M, PA)) {
184             if (!InnerPA)
185               InnerPA = PA;
186             for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
187               InnerPA->abandon(InnerAnalysisID);
188           }
189         }
190 
191       // Check if we needed a custom PA set. If so we'll need to run the inner
192       // invalidation.
193       if (InnerPA) {
194         InnerAM->invalidate(C, *InnerPA);
195         continue;
196       }
197 
198       // Otherwise we only need to do invalidation if the original PA set didn't
199       // preserve all SCC analyses.
200       if (!AreSCCAnalysesPreserved)
201         InnerAM->invalidate(C, PA);
202     }
203 
204   // Return false to indicate that this result is still a valid proxy.
205   return false;
206 }
207 
208 template <>
209 CGSCCAnalysisManagerModuleProxy::Result
210 CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
211   // Force the Function analysis manager to also be available so that it can
212   // be accessed in an SCC analysis and proxied onward to function passes.
213   // FIXME: It is pretty awkward to just drop the result here and assert that
214   // we can find it again later.
215   (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
216 
217   return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
218 }
219 
220 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
221 
222 FunctionAnalysisManagerCGSCCProxy::Result
223 FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
224                                        CGSCCAnalysisManager &AM,
225                                        LazyCallGraph &CG) {
226   // Collect the FunctionAnalysisManager from the Module layer and use that to
227   // build the proxy result.
228   //
229   // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
230   // invalidate the function analyses.
231   auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
232   Module &M = *C.begin()->getFunction().getParent();
233   auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
234   assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
235                      "proxy is run on the module prior to entering the CGSCC "
236                      "walk.");
237 
238   // Note that we special-case invalidation handling of this proxy in the CGSCC
239   // analysis manager's Module proxy. This avoids the need to do anything
240   // special here to recompute all of this if ever the FAM's module proxy goes
241   // away.
242   return Result(FAMProxy->getManager());
243 }
244 
245 bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
246     LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
247     CGSCCAnalysisManager::Invalidator &Inv) {
248   // If literally everything is preserved, we're done.
249   if (PA.areAllPreserved())
250     return false; // This is still a valid proxy.
251 
252   // If this proxy isn't marked as preserved, then even if the result remains
253   // valid, the key itself may no longer be valid, so we clear everything.
254   //
255   // Note that in order to preserve this proxy, a module pass must ensure that
256   // the FAM has been completely updated to handle the deletion of functions.
257   // Specifically, any FAM-cached results for those functions need to have been
258   // forcibly cleared. When preserved, this proxy will only invalidate results
259   // cached on functions *still in the module* at the end of the module pass.
260   auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
261   if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
262     for (LazyCallGraph::Node &N : C)
263       FAM->clear(N.getFunction(), N.getFunction().getName());
264 
265     return true;
266   }
267 
268   // Directly check if the relevant set is preserved.
269   bool AreFunctionAnalysesPreserved =
270       PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
271 
272   // Now walk all the functions to see if any inner analysis invalidation is
273   // necessary.
274   for (LazyCallGraph::Node &N : C) {
275     Function &F = N.getFunction();
276     Optional<PreservedAnalyses> FunctionPA;
277 
278     // Check to see whether the preserved set needs to be pruned based on
279     // SCC-level analysis invalidation that triggers deferred invalidation
280     // registered with the outer analysis manager proxy for this function.
281     if (auto *OuterProxy =
282             FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
283       for (const auto &OuterInvalidationPair :
284            OuterProxy->getOuterInvalidations()) {
285         AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
286         const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
287         if (Inv.invalidate(OuterAnalysisID, C, PA)) {
288           if (!FunctionPA)
289             FunctionPA = PA;
290           for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
291             FunctionPA->abandon(InnerAnalysisID);
292         }
293       }
294 
295     // Check if we needed a custom PA set, and if so we'll need to run the
296     // inner invalidation.
297     if (FunctionPA) {
298       FAM->invalidate(F, *FunctionPA);
299       continue;
300     }
301 
302     // Otherwise we only need to do invalidation if the original PA set didn't
303     // preserve all function analyses.
304     if (!AreFunctionAnalysesPreserved)
305       FAM->invalidate(F, PA);
306   }
307 
308   // Return false to indicate that this result is still a valid proxy.
309   return false;
310 }
311 
312 } // end namespace llvm
313 
314 /// When a new SCC is created for the graph and there might be function
315 /// analysis results cached for the functions now in that SCC two forms of
316 /// updates are required.
317 ///
318 /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
319 /// created so that any subsequent invalidation events to the SCC are
320 /// propagated to the function analysis results cached for functions within it.
321 ///
322 /// Second, if any of the functions within the SCC have analysis results with
323 /// outer analysis dependencies, then those dependencies would point to the
324 /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
325 /// function analyses so that they don't retain stale handles.
326 static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
327                                          LazyCallGraph &G,
328                                          CGSCCAnalysisManager &AM) {
329   // Get the relevant function analysis manager.
330   auto &FAM =
331       AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).getManager();
332 
333   // Now walk the functions in this SCC and invalidate any function analysis
334   // results that might have outer dependencies on an SCC analysis.
335   for (LazyCallGraph::Node &N : C) {
336     Function &F = N.getFunction();
337 
338     auto *OuterProxy =
339         FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
340     if (!OuterProxy)
341       // No outer analyses were queried, nothing to do.
342       continue;
343 
344     // Forcibly abandon all the inner analyses with dependencies, but
345     // invalidate nothing else.
346     auto PA = PreservedAnalyses::all();
347     for (const auto &OuterInvalidationPair :
348          OuterProxy->getOuterInvalidations()) {
349       const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
350       for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
351         PA.abandon(InnerAnalysisID);
352     }
353 
354     // Now invalidate anything we found.
355     FAM.invalidate(F, PA);
356   }
357 }
358 
359 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
360 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
361 /// added SCCs.
362 ///
363 /// The range of new SCCs must be in postorder already. The SCC they were split
364 /// out of must be provided as \p C. The current node being mutated and
365 /// triggering updates must be passed as \p N.
366 ///
367 /// This function returns the SCC containing \p N. This will be either \p C if
368 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
369 template <typename SCCRangeT>
370 static LazyCallGraph::SCC *
371 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
372                        LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
373                        CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
374   using SCC = LazyCallGraph::SCC;
375 
376   if (NewSCCRange.begin() == NewSCCRange.end())
377     return C;
378 
379   // Add the current SCC to the worklist as its shape has changed.
380   UR.CWorklist.insert(C);
381   LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
382                     << "\n");
383 
384   SCC *OldC = C;
385 
386   // Update the current SCC. Note that if we have new SCCs, this must actually
387   // change the SCC.
388   assert(C != &*NewSCCRange.begin() &&
389          "Cannot insert new SCCs without changing current SCC!");
390   C = &*NewSCCRange.begin();
391   assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
392 
393   // If we had a cached FAM proxy originally, we will want to create more of
394   // them for each SCC that was split off.
395   bool NeedFAMProxy =
396       AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC) != nullptr;
397 
398   // We need to propagate an invalidation call to all but the newly current SCC
399   // because the outer pass manager won't do that for us after splitting them.
400   // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
401   // there are preserved analysis we can avoid invalidating them here for
402   // split-off SCCs.
403   // We know however that this will preserve any FAM proxy so go ahead and mark
404   // that.
405   PreservedAnalyses PA;
406   PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
407   AM.invalidate(*OldC, PA);
408 
409   // Ensure the now-current SCC's function analyses are updated.
410   if (NeedFAMProxy)
411     updateNewSCCFunctionAnalyses(*C, G, AM);
412 
413   for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
414                                             NewSCCRange.end()))) {
415     assert(C != &NewC && "No need to re-visit the current SCC!");
416     assert(OldC != &NewC && "Already handled the original SCC!");
417     UR.CWorklist.insert(&NewC);
418     LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
419 
420     // Ensure new SCCs' function analyses are updated.
421     if (NeedFAMProxy)
422       updateNewSCCFunctionAnalyses(NewC, G, AM);
423 
424     // Also propagate a normal invalidation to the new SCC as only the current
425     // will get one from the pass manager infrastructure.
426     AM.invalidate(NewC, PA);
427   }
428   return C;
429 }
430 
431 static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
432     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
433     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, bool FunctionPass) {
434   using Node = LazyCallGraph::Node;
435   using Edge = LazyCallGraph::Edge;
436   using SCC = LazyCallGraph::SCC;
437   using RefSCC = LazyCallGraph::RefSCC;
438 
439   RefSCC &InitialRC = InitialC.getOuterRefSCC();
440   SCC *C = &InitialC;
441   RefSCC *RC = &InitialRC;
442   Function &F = N.getFunction();
443 
444   // Walk the function body and build up the set of retained, promoted, and
445   // demoted edges.
446   SmallVector<Constant *, 16> Worklist;
447   SmallPtrSet<Constant *, 16> Visited;
448   SmallPtrSet<Node *, 16> RetainedEdges;
449   SmallSetVector<Node *, 4> PromotedRefTargets;
450   SmallSetVector<Node *, 4> DemotedCallTargets;
451   SmallSetVector<Node *, 4> NewCallEdges;
452   SmallSetVector<Node *, 4> NewRefEdges;
453 
454   // First walk the function and handle all called functions. We do this first
455   // because if there is a single call edge, whether there are ref edges is
456   // irrelevant.
457   for (Instruction &I : instructions(F))
458     if (auto *CB = dyn_cast<CallBase>(&I))
459       if (Function *Callee = CB->getCalledFunction())
460         if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
461           Node &CalleeN = *G.lookup(*Callee);
462           Edge *E = N->lookup(CalleeN);
463           assert((E || !FunctionPass) &&
464                  "No function transformations should introduce *new* "
465                  "call edges! Any new calls should be modeled as "
466                  "promoted existing ref edges!");
467           bool Inserted = RetainedEdges.insert(&CalleeN).second;
468           (void)Inserted;
469           assert(Inserted && "We should never visit a function twice.");
470           if (!E)
471             NewCallEdges.insert(&CalleeN);
472           else if (!E->isCall())
473             PromotedRefTargets.insert(&CalleeN);
474         }
475 
476   // Now walk all references.
477   for (Instruction &I : instructions(F))
478     for (Value *Op : I.operand_values())
479       if (auto *C = dyn_cast<Constant>(Op))
480         if (Visited.insert(C).second)
481           Worklist.push_back(C);
482 
483   auto VisitRef = [&](Function &Referee) {
484     Node &RefereeN = *G.lookup(Referee);
485     Edge *E = N->lookup(RefereeN);
486     assert((E || !FunctionPass) &&
487            "No function transformations should introduce *new* ref "
488            "edges! Any new ref edges would require IPO which "
489            "function passes aren't allowed to do!");
490     bool Inserted = RetainedEdges.insert(&RefereeN).second;
491     (void)Inserted;
492     assert(Inserted && "We should never visit a function twice.");
493     if (!E)
494       NewRefEdges.insert(&RefereeN);
495     else if (E->isCall())
496       DemotedCallTargets.insert(&RefereeN);
497   };
498   LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
499 
500   // Handle new ref edges.
501   for (Node *RefTarget : NewRefEdges) {
502     SCC &TargetC = *G.lookupSCC(*RefTarget);
503     RefSCC &TargetRC = TargetC.getOuterRefSCC();
504     (void)TargetRC;
505     // TODO: This only allows trivial edges to be added for now.
506     assert((RC == &TargetRC ||
507            RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
508     RC->insertTrivialRefEdge(N, *RefTarget);
509   }
510 
511   // Handle new call edges.
512   for (Node *CallTarget : NewCallEdges) {
513     SCC &TargetC = *G.lookupSCC(*CallTarget);
514     RefSCC &TargetRC = TargetC.getOuterRefSCC();
515     (void)TargetRC;
516     // TODO: This only allows trivial edges to be added for now.
517     assert((RC == &TargetRC ||
518            RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
519     RC->insertTrivialCallEdge(N, *CallTarget);
520   }
521 
522   // Include synthetic reference edges to known, defined lib functions.
523   for (auto *F : G.getLibFunctions())
524     // While the list of lib functions doesn't have repeats, don't re-visit
525     // anything handled above.
526     if (!Visited.count(F))
527       VisitRef(*F);
528 
529   // First remove all of the edges that are no longer present in this function.
530   // The first step makes these edges uniformly ref edges and accumulates them
531   // into a separate data structure so removal doesn't invalidate anything.
532   SmallVector<Node *, 4> DeadTargets;
533   for (Edge &E : *N) {
534     if (RetainedEdges.count(&E.getNode()))
535       continue;
536 
537     SCC &TargetC = *G.lookupSCC(E.getNode());
538     RefSCC &TargetRC = TargetC.getOuterRefSCC();
539     if (&TargetRC == RC && E.isCall()) {
540       if (C != &TargetC) {
541         // For separate SCCs this is trivial.
542         RC->switchTrivialInternalEdgeToRef(N, E.getNode());
543       } else {
544         // Now update the call graph.
545         C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
546                                    G, N, C, AM, UR);
547       }
548     }
549 
550     // Now that this is ready for actual removal, put it into our list.
551     DeadTargets.push_back(&E.getNode());
552   }
553   // Remove the easy cases quickly and actually pull them out of our list.
554   DeadTargets.erase(
555       llvm::remove_if(DeadTargets,
556                       [&](Node *TargetN) {
557                         SCC &TargetC = *G.lookupSCC(*TargetN);
558                         RefSCC &TargetRC = TargetC.getOuterRefSCC();
559 
560                         // We can't trivially remove internal targets, so skip
561                         // those.
562                         if (&TargetRC == RC)
563                           return false;
564 
565                         RC->removeOutgoingEdge(N, *TargetN);
566                         LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
567                                           << N << "' to '" << TargetN << "'\n");
568                         return true;
569                       }),
570       DeadTargets.end());
571 
572   // Now do a batch removal of the internal ref edges left.
573   auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
574   if (!NewRefSCCs.empty()) {
575     // The old RefSCC is dead, mark it as such.
576     UR.InvalidatedRefSCCs.insert(RC);
577 
578     // Note that we don't bother to invalidate analyses as ref-edge
579     // connectivity is not really observable in any way and is intended
580     // exclusively to be used for ordering of transforms rather than for
581     // analysis conclusions.
582 
583     // Update RC to the "bottom".
584     assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
585     RC = &C->getOuterRefSCC();
586     assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
587 
588     // The RC worklist is in reverse postorder, so we enqueue the new ones in
589     // RPO except for the one which contains the source node as that is the
590     // "bottom" we will continue processing in the bottom-up walk.
591     assert(NewRefSCCs.front() == RC &&
592            "New current RefSCC not first in the returned list!");
593     for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
594                                                   NewRefSCCs.end()))) {
595       assert(NewRC != RC && "Should not encounter the current RefSCC further "
596                             "in the postorder list of new RefSCCs.");
597       UR.RCWorklist.insert(NewRC);
598       LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
599                         << *NewRC << "\n");
600     }
601   }
602 
603   // Next demote all the call edges that are now ref edges. This helps make
604   // the SCCs small which should minimize the work below as we don't want to
605   // form cycles that this would break.
606   for (Node *RefTarget : DemotedCallTargets) {
607     SCC &TargetC = *G.lookupSCC(*RefTarget);
608     RefSCC &TargetRC = TargetC.getOuterRefSCC();
609 
610     // The easy case is when the target RefSCC is not this RefSCC. This is
611     // only supported when the target RefSCC is a child of this RefSCC.
612     if (&TargetRC != RC) {
613       assert(RC->isAncestorOf(TargetRC) &&
614              "Cannot potentially form RefSCC cycles here!");
615       RC->switchOutgoingEdgeToRef(N, *RefTarget);
616       LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
617                         << "' to '" << *RefTarget << "'\n");
618       continue;
619     }
620 
621     // We are switching an internal call edge to a ref edge. This may split up
622     // some SCCs.
623     if (C != &TargetC) {
624       // For separate SCCs this is trivial.
625       RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
626       continue;
627     }
628 
629     // Now update the call graph.
630     C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
631                                C, AM, UR);
632   }
633 
634   // Now promote ref edges into call edges.
635   for (Node *CallTarget : PromotedRefTargets) {
636     SCC &TargetC = *G.lookupSCC(*CallTarget);
637     RefSCC &TargetRC = TargetC.getOuterRefSCC();
638 
639     // The easy case is when the target RefSCC is not this RefSCC. This is
640     // only supported when the target RefSCC is a child of this RefSCC.
641     if (&TargetRC != RC) {
642       assert(RC->isAncestorOf(TargetRC) &&
643              "Cannot potentially form RefSCC cycles here!");
644       RC->switchOutgoingEdgeToCall(N, *CallTarget);
645       LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
646                         << "' to '" << *CallTarget << "'\n");
647       continue;
648     }
649     LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
650                       << N << "' to '" << *CallTarget << "'\n");
651 
652     // Otherwise we are switching an internal ref edge to a call edge. This
653     // may merge away some SCCs, and we add those to the UpdateResult. We also
654     // need to make sure to update the worklist in the event SCCs have moved
655     // before the current one in the post-order sequence
656     bool HasFunctionAnalysisProxy = false;
657     auto InitialSCCIndex = RC->find(*C) - RC->begin();
658     bool FormedCycle = RC->switchInternalEdgeToCall(
659         N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
660           for (SCC *MergedC : MergedSCCs) {
661             assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
662 
663             HasFunctionAnalysisProxy |=
664                 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
665                     *MergedC) != nullptr;
666 
667             // Mark that this SCC will no longer be valid.
668             UR.InvalidatedSCCs.insert(MergedC);
669 
670             // FIXME: We should really do a 'clear' here to forcibly release
671             // memory, but we don't have a good way of doing that and
672             // preserving the function analyses.
673             auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
674             PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
675             AM.invalidate(*MergedC, PA);
676           }
677         });
678 
679     // If we formed a cycle by creating this call, we need to update more data
680     // structures.
681     if (FormedCycle) {
682       C = &TargetC;
683       assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
684 
685       // If one of the invalidated SCCs had a cached proxy to a function
686       // analysis manager, we need to create a proxy in the new current SCC as
687       // the invalidated SCCs had their functions moved.
688       if (HasFunctionAnalysisProxy)
689         AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
690 
691       // Any analyses cached for this SCC are no longer precise as the shape
692       // has changed by introducing this cycle. However, we have taken care to
693       // update the proxies so it remains valide.
694       auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
695       PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
696       AM.invalidate(*C, PA);
697     }
698     auto NewSCCIndex = RC->find(*C) - RC->begin();
699     // If we have actually moved an SCC to be topologically "below" the current
700     // one due to merging, we will need to revisit the current SCC after
701     // visiting those moved SCCs.
702     //
703     // It is critical that we *do not* revisit the current SCC unless we
704     // actually move SCCs in the process of merging because otherwise we may
705     // form a cycle where an SCC is split apart, merged, split, merged and so
706     // on infinitely.
707     if (InitialSCCIndex < NewSCCIndex) {
708       // Put our current SCC back onto the worklist as we'll visit other SCCs
709       // that are now definitively ordered prior to the current one in the
710       // post-order sequence, and may end up observing more precise context to
711       // optimize the current SCC.
712       UR.CWorklist.insert(C);
713       LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
714                         << "\n");
715       // Enqueue in reverse order as we pop off the back of the worklist.
716       for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
717                                                   RC->begin() + NewSCCIndex))) {
718         UR.CWorklist.insert(&MovedC);
719         LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
720                           << MovedC << "\n");
721       }
722     }
723   }
724 
725   assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
726   assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
727   assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
728 
729   // Record the current RefSCC and SCC for higher layers of the CGSCC pass
730   // manager now that all the updates have been applied.
731   if (RC != &InitialRC)
732     UR.UpdatedRC = RC;
733   if (C != &InitialC)
734     UR.UpdatedC = C;
735 
736   return *C;
737 }
738 
739 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
740     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
741     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
742   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR,
743                                            /* FunctionPass */ true);
744 }
745 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
746     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
747     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
748   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR,
749                                            /* FunctionPass */ false);
750 }
751