1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/Analysis/CGSCCPassManager.h"
10 #include "llvm/ADT/ArrayRef.h"
11 #include "llvm/ADT/Optional.h"
12 #include "llvm/ADT/STLExtras.h"
13 #include "llvm/ADT/SetVector.h"
14 #include "llvm/ADT/SmallPtrSet.h"
15 #include "llvm/ADT/SmallVector.h"
16 #include "llvm/ADT/iterator_range.h"
17 #include "llvm/Analysis/LazyCallGraph.h"
18 #include "llvm/IR/Constant.h"
19 #include "llvm/IR/InstIterator.h"
20 #include "llvm/IR/Instruction.h"
21 #include "llvm/IR/PassManager.h"
22 #include "llvm/IR/PassManagerImpl.h"
23 #include "llvm/Support/Casting.h"
24 #include "llvm/Support/CommandLine.h"
25 #include "llvm/Support/Debug.h"
26 #include "llvm/Support/ErrorHandling.h"
27 #include "llvm/Support/TimeProfiler.h"
28 #include "llvm/Support/raw_ostream.h"
29 #include <algorithm>
30 #include <cassert>
31 #include <iterator>
32 
33 #define DEBUG_TYPE "cgscc"
34 
35 using namespace llvm;
36 
37 // Explicit template instantiations and specialization definitions for core
38 // template typedefs.
39 namespace llvm {
40 
41 static cl::opt<bool> AbortOnMaxDevirtIterationsReached(
42     "abort-on-max-devirt-iterations-reached",
43     cl::desc("Abort when the max iterations for devirtualization CGSCC repeat "
44              "pass is reached"));
45 
46 // Explicit instantiations for the core proxy templates.
47 template class AllAnalysesOn<LazyCallGraph::SCC>;
48 template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
49 template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
50                            LazyCallGraph &, CGSCCUpdateResult &>;
51 template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
52 template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
53                                          LazyCallGraph::SCC, LazyCallGraph &>;
54 template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
55 
56 /// Explicitly specialize the pass manager run method to handle call graph
57 /// updates.
58 template <>
59 PreservedAnalyses
60 PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
61             CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
62                                       CGSCCAnalysisManager &AM,
63                                       LazyCallGraph &G, CGSCCUpdateResult &UR) {
64   // Request PassInstrumentation from analysis manager, will use it to run
65   // instrumenting callbacks for the passes later.
66   PassInstrumentation PI =
67       AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
68 
69   PreservedAnalyses PA = PreservedAnalyses::all();
70 
71   if (DebugLogging)
72     dbgs() << "Starting CGSCC pass manager run.\n";
73 
74   // The SCC may be refined while we are running passes over it, so set up
75   // a pointer that we can update.
76   LazyCallGraph::SCC *C = &InitialC;
77 
78   // Get Function analysis manager from its proxy.
79   FunctionAnalysisManager &FAM =
80       AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager();
81 
82   for (auto &Pass : Passes) {
83     // Check the PassInstrumentation's BeforePass callbacks before running the
84     // pass, skip its execution completely if asked to (callback returns false).
85     if (!PI.runBeforePass(*Pass, *C))
86       continue;
87 
88     PreservedAnalyses PassPA;
89     {
90       TimeTraceScope TimeScope(Pass->name());
91       PassPA = Pass->run(*C, AM, G, UR);
92     }
93 
94     if (UR.InvalidatedSCCs.count(C))
95       PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
96     else
97       PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
98 
99     // Update the SCC if necessary.
100     C = UR.UpdatedC ? UR.UpdatedC : C;
101     if (UR.UpdatedC) {
102       // If C is updated, also create a proxy and update FAM inside the result.
103       auto *ResultFAMCP =
104           &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
105       ResultFAMCP->updateFAM(FAM);
106     }
107 
108     // If the CGSCC pass wasn't able to provide a valid updated SCC, the
109     // current SCC may simply need to be skipped if invalid.
110     if (UR.InvalidatedSCCs.count(C)) {
111       LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
112       break;
113     }
114     // Check that we didn't miss any update scenario.
115     assert(C->begin() != C->end() && "Cannot have an empty SCC!");
116 
117     // Update the analysis manager as each pass runs and potentially
118     // invalidates analyses.
119     AM.invalidate(*C, PassPA);
120 
121     // Finally, we intersect the final preserved analyses to compute the
122     // aggregate preserved set for this pass manager.
123     PA.intersect(std::move(PassPA));
124 
125     // FIXME: Historically, the pass managers all called the LLVM context's
126     // yield function here. We don't have a generic way to acquire the
127     // context and it isn't yet clear what the right pattern is for yielding
128     // in the new pass manager so it is currently omitted.
129     // ...getContext().yield();
130   }
131 
132   // Before we mark all of *this* SCC's analyses as preserved below, intersect
133   // this with the cross-SCC preserved analysis set. This is used to allow
134   // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
135   // for them.
136   UR.CrossSCCPA.intersect(PA);
137 
138   // Invalidation was handled after each pass in the above loop for the current
139   // SCC. Therefore, the remaining analysis results in the AnalysisManager are
140   // preserved. We mark this with a set so that we don't need to inspect each
141   // one individually.
142   PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
143 
144   if (DebugLogging)
145     dbgs() << "Finished CGSCC pass manager run.\n";
146 
147   return PA;
148 }
149 
150 bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
151     Module &M, const PreservedAnalyses &PA,
152     ModuleAnalysisManager::Invalidator &Inv) {
153   // If literally everything is preserved, we're done.
154   if (PA.areAllPreserved())
155     return false; // This is still a valid proxy.
156 
157   // If this proxy or the call graph is going to be invalidated, we also need
158   // to clear all the keys coming from that analysis.
159   //
160   // We also directly invalidate the FAM's module proxy if necessary, and if
161   // that proxy isn't preserved we can't preserve this proxy either. We rely on
162   // it to handle module -> function analysis invalidation in the face of
163   // structural changes and so if it's unavailable we conservatively clear the
164   // entire SCC layer as well rather than trying to do invalidation ourselves.
165   auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
166   if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
167       Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
168       Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
169     InnerAM->clear();
170 
171     // And the proxy itself should be marked as invalid so that we can observe
172     // the new call graph. This isn't strictly necessary because we cheat
173     // above, but is still useful.
174     return true;
175   }
176 
177   // Directly check if the relevant set is preserved so we can short circuit
178   // invalidating SCCs below.
179   bool AreSCCAnalysesPreserved =
180       PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
181 
182   // Ok, we have a graph, so we can propagate the invalidation down into it.
183   G->buildRefSCCs();
184   for (auto &RC : G->postorder_ref_sccs())
185     for (auto &C : RC) {
186       Optional<PreservedAnalyses> InnerPA;
187 
188       // Check to see whether the preserved set needs to be adjusted based on
189       // module-level analysis invalidation triggering deferred invalidation
190       // for this SCC.
191       if (auto *OuterProxy =
192               InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
193         for (const auto &OuterInvalidationPair :
194              OuterProxy->getOuterInvalidations()) {
195           AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
196           const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
197           if (Inv.invalidate(OuterAnalysisID, M, PA)) {
198             if (!InnerPA)
199               InnerPA = PA;
200             for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
201               InnerPA->abandon(InnerAnalysisID);
202           }
203         }
204 
205       // Check if we needed a custom PA set. If so we'll need to run the inner
206       // invalidation.
207       if (InnerPA) {
208         InnerAM->invalidate(C, *InnerPA);
209         continue;
210       }
211 
212       // Otherwise we only need to do invalidation if the original PA set didn't
213       // preserve all SCC analyses.
214       if (!AreSCCAnalysesPreserved)
215         InnerAM->invalidate(C, PA);
216     }
217 
218   // Return false to indicate that this result is still a valid proxy.
219   return false;
220 }
221 
222 template <>
223 CGSCCAnalysisManagerModuleProxy::Result
224 CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
225   // Force the Function analysis manager to also be available so that it can
226   // be accessed in an SCC analysis and proxied onward to function passes.
227   // FIXME: It is pretty awkward to just drop the result here and assert that
228   // we can find it again later.
229   (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
230 
231   return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
232 }
233 
234 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
235 
236 FunctionAnalysisManagerCGSCCProxy::Result
237 FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
238                                        CGSCCAnalysisManager &AM,
239                                        LazyCallGraph &CG) {
240   // Note: unconditionally getting checking that the proxy exists may get it at
241   // this point. There are cases when this is being run unnecessarily, but
242   // it is cheap and having the assertion in place is more valuable.
243   auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
244   Module &M = *C.begin()->getFunction().getParent();
245   bool ProxyExists =
246       MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M);
247   assert(ProxyExists &&
248          "The CGSCC pass manager requires that the FAM module proxy is run "
249          "on the module prior to entering the CGSCC walk");
250   (void)ProxyExists;
251 
252   // We just return an empty result. The caller will use the updateFAM interface
253   // to correctly register the relevant FunctionAnalysisManager based on the
254   // context in which this proxy is run.
255   return Result();
256 }
257 
258 bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
259     LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
260     CGSCCAnalysisManager::Invalidator &Inv) {
261   // If literally everything is preserved, we're done.
262   if (PA.areAllPreserved())
263     return false; // This is still a valid proxy.
264 
265   // All updates to preserve valid results are done below, so we don't need to
266   // invalidate this proxy.
267   //
268   // Note that in order to preserve this proxy, a module pass must ensure that
269   // the FAM has been completely updated to handle the deletion of functions.
270   // Specifically, any FAM-cached results for those functions need to have been
271   // forcibly cleared. When preserved, this proxy will only invalidate results
272   // cached on functions *still in the module* at the end of the module pass.
273   auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
274   if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
275     for (LazyCallGraph::Node &N : C)
276       FAM->clear(N.getFunction(), N.getFunction().getName());
277 
278     return false;
279   }
280 
281   // Directly check if the relevant set is preserved.
282   bool AreFunctionAnalysesPreserved =
283       PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
284 
285   // Now walk all the functions to see if any inner analysis invalidation is
286   // necessary.
287   for (LazyCallGraph::Node &N : C) {
288     Function &F = N.getFunction();
289     Optional<PreservedAnalyses> FunctionPA;
290 
291     // Check to see whether the preserved set needs to be pruned based on
292     // SCC-level analysis invalidation that triggers deferred invalidation
293     // registered with the outer analysis manager proxy for this function.
294     if (auto *OuterProxy =
295             FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
296       for (const auto &OuterInvalidationPair :
297            OuterProxy->getOuterInvalidations()) {
298         AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
299         const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
300         if (Inv.invalidate(OuterAnalysisID, C, PA)) {
301           if (!FunctionPA)
302             FunctionPA = PA;
303           for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
304             FunctionPA->abandon(InnerAnalysisID);
305         }
306       }
307 
308     // Check if we needed a custom PA set, and if so we'll need to run the
309     // inner invalidation.
310     if (FunctionPA) {
311       FAM->invalidate(F, *FunctionPA);
312       continue;
313     }
314 
315     // Otherwise we only need to do invalidation if the original PA set didn't
316     // preserve all function analyses.
317     if (!AreFunctionAnalysesPreserved)
318       FAM->invalidate(F, PA);
319   }
320 
321   // Return false to indicate that this result is still a valid proxy.
322   return false;
323 }
324 
325 } // end namespace llvm
326 
327 /// When a new SCC is created for the graph we first update the
328 /// FunctionAnalysisManager in the Proxy's result.
329 /// As there might be function analysis results cached for the functions now in
330 /// that SCC, two forms of  updates are required.
331 ///
332 /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
333 /// created so that any subsequent invalidation events to the SCC are
334 /// propagated to the function analysis results cached for functions within it.
335 ///
336 /// Second, if any of the functions within the SCC have analysis results with
337 /// outer analysis dependencies, then those dependencies would point to the
338 /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
339 /// function analyses so that they don't retain stale handles.
340 static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
341                                          LazyCallGraph &G,
342                                          CGSCCAnalysisManager &AM,
343                                          FunctionAnalysisManager &FAM) {
344   AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM);
345 
346   // Now walk the functions in this SCC and invalidate any function analysis
347   // results that might have outer dependencies on an SCC analysis.
348   for (LazyCallGraph::Node &N : C) {
349     Function &F = N.getFunction();
350 
351     auto *OuterProxy =
352         FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
353     if (!OuterProxy)
354       // No outer analyses were queried, nothing to do.
355       continue;
356 
357     // Forcibly abandon all the inner analyses with dependencies, but
358     // invalidate nothing else.
359     auto PA = PreservedAnalyses::all();
360     for (const auto &OuterInvalidationPair :
361          OuterProxy->getOuterInvalidations()) {
362       const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
363       for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
364         PA.abandon(InnerAnalysisID);
365     }
366 
367     // Now invalidate anything we found.
368     FAM.invalidate(F, PA);
369   }
370 }
371 
372 void llvm::maxDevirtIterationsReached() {
373   if (AbortOnMaxDevirtIterationsReached)
374     report_fatal_error("Max devirtualization iterations reached");
375 }
376 
377 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
378 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
379 /// added SCCs.
380 ///
381 /// The range of new SCCs must be in postorder already. The SCC they were split
382 /// out of must be provided as \p C. The current node being mutated and
383 /// triggering updates must be passed as \p N.
384 ///
385 /// This function returns the SCC containing \p N. This will be either \p C if
386 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
387 template <typename SCCRangeT>
388 static LazyCallGraph::SCC *
389 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
390                        LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
391                        CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
392   using SCC = LazyCallGraph::SCC;
393 
394   if (NewSCCRange.begin() == NewSCCRange.end())
395     return C;
396 
397   // Add the current SCC to the worklist as its shape has changed.
398   UR.CWorklist.insert(C);
399   LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
400                     << "\n");
401 
402   SCC *OldC = C;
403 
404   // Update the current SCC. Note that if we have new SCCs, this must actually
405   // change the SCC.
406   assert(C != &*NewSCCRange.begin() &&
407          "Cannot insert new SCCs without changing current SCC!");
408   C = &*NewSCCRange.begin();
409   assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
410 
411   // If we had a cached FAM proxy originally, we will want to create more of
412   // them for each SCC that was split off.
413   FunctionAnalysisManager *FAM = nullptr;
414   if (auto *FAMProxy =
415           AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC))
416     FAM = &FAMProxy->getManager();
417 
418   // We need to propagate an invalidation call to all but the newly current SCC
419   // because the outer pass manager won't do that for us after splitting them.
420   // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
421   // there are preserved analysis we can avoid invalidating them here for
422   // split-off SCCs.
423   // We know however that this will preserve any FAM proxy so go ahead and mark
424   // that.
425   PreservedAnalyses PA;
426   PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
427   AM.invalidate(*OldC, PA);
428 
429   // Ensure the now-current SCC's function analyses are updated.
430   if (FAM)
431     updateNewSCCFunctionAnalyses(*C, G, AM, *FAM);
432 
433   for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
434                                             NewSCCRange.end()))) {
435     assert(C != &NewC && "No need to re-visit the current SCC!");
436     assert(OldC != &NewC && "Already handled the original SCC!");
437     UR.CWorklist.insert(&NewC);
438     LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
439 
440     // Ensure new SCCs' function analyses are updated.
441     if (FAM)
442       updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM);
443 
444     // Also propagate a normal invalidation to the new SCC as only the current
445     // will get one from the pass manager infrastructure.
446     AM.invalidate(NewC, PA);
447   }
448   return C;
449 }
450 
451 static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
452     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
453     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
454     FunctionAnalysisManager &FAM, bool FunctionPass) {
455   using Node = LazyCallGraph::Node;
456   using Edge = LazyCallGraph::Edge;
457   using SCC = LazyCallGraph::SCC;
458   using RefSCC = LazyCallGraph::RefSCC;
459 
460   RefSCC &InitialRC = InitialC.getOuterRefSCC();
461   SCC *C = &InitialC;
462   RefSCC *RC = &InitialRC;
463   Function &F = N.getFunction();
464 
465   // Walk the function body and build up the set of retained, promoted, and
466   // demoted edges.
467   SmallVector<Constant *, 16> Worklist;
468   SmallPtrSet<Constant *, 16> Visited;
469   SmallPtrSet<Node *, 16> RetainedEdges;
470   SmallSetVector<Node *, 4> PromotedRefTargets;
471   SmallSetVector<Node *, 4> DemotedCallTargets;
472   SmallSetVector<Node *, 4> NewCallEdges;
473   SmallSetVector<Node *, 4> NewRefEdges;
474   SmallSetVector<Node *, 4> NewNodes;
475 
476   // First walk the function and handle all called functions. We do this first
477   // because if there is a single call edge, whether there are ref edges is
478   // irrelevant.
479   for (Instruction &I : instructions(F))
480     if (auto *CB = dyn_cast<CallBase>(&I))
481       if (Function *Callee = CB->getCalledFunction())
482         if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
483           Node *CalleeN = G.lookup(*Callee);
484           if (!CalleeN) {
485             CalleeN = &G.get(*Callee);
486             NewNodes.insert(CalleeN);
487           }
488           Edge *E = N->lookup(*CalleeN);
489           assert((E || !FunctionPass) &&
490                  "No function transformations should introduce *new* "
491                  "call edges! Any new calls should be modeled as "
492                  "promoted existing ref edges!");
493           bool Inserted = RetainedEdges.insert(CalleeN).second;
494           (void)Inserted;
495           assert(Inserted && "We should never visit a function twice.");
496           if (!E)
497             NewCallEdges.insert(CalleeN);
498           else if (!E->isCall())
499             PromotedRefTargets.insert(CalleeN);
500         }
501 
502   // Now walk all references.
503   for (Instruction &I : instructions(F))
504     for (Value *Op : I.operand_values())
505       if (auto *C = dyn_cast<Constant>(Op))
506         if (Visited.insert(C).second)
507           Worklist.push_back(C);
508 
509   auto VisitRef = [&](Function &Referee) {
510     Node *RefereeN = G.lookup(Referee);
511     if (!RefereeN) {
512       RefereeN = &G.get(Referee);
513       NewNodes.insert(RefereeN);
514     }
515     Edge *E = N->lookup(*RefereeN);
516     assert((E || !FunctionPass) &&
517            "No function transformations should introduce *new* ref "
518            "edges! Any new ref edges would require IPO which "
519            "function passes aren't allowed to do!");
520     bool Inserted = RetainedEdges.insert(RefereeN).second;
521     (void)Inserted;
522     assert(Inserted && "We should never visit a function twice.");
523     if (!E)
524       NewRefEdges.insert(RefereeN);
525     else if (E->isCall())
526       DemotedCallTargets.insert(RefereeN);
527   };
528   LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
529 
530   for (Node *NewNode : NewNodes)
531     G.initNode(*NewNode, *C);
532 
533   // Handle new ref edges.
534   for (Node *RefTarget : NewRefEdges) {
535     SCC &TargetC = *G.lookupSCC(*RefTarget);
536     RefSCC &TargetRC = TargetC.getOuterRefSCC();
537     (void)TargetRC;
538     // TODO: This only allows trivial edges to be added for now.
539     assert((RC == &TargetRC ||
540            RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
541     RC->insertTrivialRefEdge(N, *RefTarget);
542   }
543 
544   // Handle new call edges.
545   for (Node *CallTarget : NewCallEdges) {
546     SCC &TargetC = *G.lookupSCC(*CallTarget);
547     RefSCC &TargetRC = TargetC.getOuterRefSCC();
548     (void)TargetRC;
549     // TODO: This only allows trivial edges to be added for now.
550     assert((RC == &TargetRC ||
551            RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
552     RC->insertTrivialCallEdge(N, *CallTarget);
553   }
554 
555   // Include synthetic reference edges to known, defined lib functions.
556   for (auto *F : G.getLibFunctions())
557     // While the list of lib functions doesn't have repeats, don't re-visit
558     // anything handled above.
559     if (!Visited.count(F))
560       VisitRef(*F);
561 
562   // First remove all of the edges that are no longer present in this function.
563   // The first step makes these edges uniformly ref edges and accumulates them
564   // into a separate data structure so removal doesn't invalidate anything.
565   SmallVector<Node *, 4> DeadTargets;
566   for (Edge &E : *N) {
567     if (RetainedEdges.count(&E.getNode()))
568       continue;
569 
570     SCC &TargetC = *G.lookupSCC(E.getNode());
571     RefSCC &TargetRC = TargetC.getOuterRefSCC();
572     if (&TargetRC == RC && E.isCall()) {
573       if (C != &TargetC) {
574         // For separate SCCs this is trivial.
575         RC->switchTrivialInternalEdgeToRef(N, E.getNode());
576       } else {
577         // Now update the call graph.
578         C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
579                                    G, N, C, AM, UR);
580       }
581     }
582 
583     // Now that this is ready for actual removal, put it into our list.
584     DeadTargets.push_back(&E.getNode());
585   }
586   // Remove the easy cases quickly and actually pull them out of our list.
587   DeadTargets.erase(
588       llvm::remove_if(DeadTargets,
589                       [&](Node *TargetN) {
590                         SCC &TargetC = *G.lookupSCC(*TargetN);
591                         RefSCC &TargetRC = TargetC.getOuterRefSCC();
592 
593                         // We can't trivially remove internal targets, so skip
594                         // those.
595                         if (&TargetRC == RC)
596                           return false;
597 
598                         RC->removeOutgoingEdge(N, *TargetN);
599                         LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
600                                           << N << "' to '" << TargetN << "'\n");
601                         return true;
602                       }),
603       DeadTargets.end());
604 
605   // Now do a batch removal of the internal ref edges left.
606   auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
607   if (!NewRefSCCs.empty()) {
608     // The old RefSCC is dead, mark it as such.
609     UR.InvalidatedRefSCCs.insert(RC);
610 
611     // Note that we don't bother to invalidate analyses as ref-edge
612     // connectivity is not really observable in any way and is intended
613     // exclusively to be used for ordering of transforms rather than for
614     // analysis conclusions.
615 
616     // Update RC to the "bottom".
617     assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
618     RC = &C->getOuterRefSCC();
619     assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
620 
621     // The RC worklist is in reverse postorder, so we enqueue the new ones in
622     // RPO except for the one which contains the source node as that is the
623     // "bottom" we will continue processing in the bottom-up walk.
624     assert(NewRefSCCs.front() == RC &&
625            "New current RefSCC not first in the returned list!");
626     for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
627                                                   NewRefSCCs.end()))) {
628       assert(NewRC != RC && "Should not encounter the current RefSCC further "
629                             "in the postorder list of new RefSCCs.");
630       UR.RCWorklist.insert(NewRC);
631       LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
632                         << *NewRC << "\n");
633     }
634   }
635 
636   // Next demote all the call edges that are now ref edges. This helps make
637   // the SCCs small which should minimize the work below as we don't want to
638   // form cycles that this would break.
639   for (Node *RefTarget : DemotedCallTargets) {
640     SCC &TargetC = *G.lookupSCC(*RefTarget);
641     RefSCC &TargetRC = TargetC.getOuterRefSCC();
642 
643     // The easy case is when the target RefSCC is not this RefSCC. This is
644     // only supported when the target RefSCC is a child of this RefSCC.
645     if (&TargetRC != RC) {
646       assert(RC->isAncestorOf(TargetRC) &&
647              "Cannot potentially form RefSCC cycles here!");
648       RC->switchOutgoingEdgeToRef(N, *RefTarget);
649       LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
650                         << "' to '" << *RefTarget << "'\n");
651       continue;
652     }
653 
654     // We are switching an internal call edge to a ref edge. This may split up
655     // some SCCs.
656     if (C != &TargetC) {
657       // For separate SCCs this is trivial.
658       RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
659       continue;
660     }
661 
662     // Now update the call graph.
663     C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
664                                C, AM, UR);
665   }
666 
667   // Now promote ref edges into call edges.
668   for (Node *CallTarget : PromotedRefTargets) {
669     SCC &TargetC = *G.lookupSCC(*CallTarget);
670     RefSCC &TargetRC = TargetC.getOuterRefSCC();
671 
672     // The easy case is when the target RefSCC is not this RefSCC. This is
673     // only supported when the target RefSCC is a child of this RefSCC.
674     if (&TargetRC != RC) {
675       assert(RC->isAncestorOf(TargetRC) &&
676              "Cannot potentially form RefSCC cycles here!");
677       RC->switchOutgoingEdgeToCall(N, *CallTarget);
678       LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
679                         << "' to '" << *CallTarget << "'\n");
680       continue;
681     }
682     LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
683                       << N << "' to '" << *CallTarget << "'\n");
684 
685     // Otherwise we are switching an internal ref edge to a call edge. This
686     // may merge away some SCCs, and we add those to the UpdateResult. We also
687     // need to make sure to update the worklist in the event SCCs have moved
688     // before the current one in the post-order sequence
689     bool HasFunctionAnalysisProxy = false;
690     auto InitialSCCIndex = RC->find(*C) - RC->begin();
691     bool FormedCycle = RC->switchInternalEdgeToCall(
692         N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
693           for (SCC *MergedC : MergedSCCs) {
694             assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
695 
696             HasFunctionAnalysisProxy |=
697                 AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
698                     *MergedC) != nullptr;
699 
700             // Mark that this SCC will no longer be valid.
701             UR.InvalidatedSCCs.insert(MergedC);
702 
703             // FIXME: We should really do a 'clear' here to forcibly release
704             // memory, but we don't have a good way of doing that and
705             // preserving the function analyses.
706             auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
707             PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
708             AM.invalidate(*MergedC, PA);
709           }
710         });
711 
712     // If we formed a cycle by creating this call, we need to update more data
713     // structures.
714     if (FormedCycle) {
715       C = &TargetC;
716       assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
717 
718       // If one of the invalidated SCCs had a cached proxy to a function
719       // analysis manager, we need to create a proxy in the new current SCC as
720       // the invalidated SCCs had their functions moved.
721       if (HasFunctionAnalysisProxy)
722         AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM);
723 
724       // Any analyses cached for this SCC are no longer precise as the shape
725       // has changed by introducing this cycle. However, we have taken care to
726       // update the proxies so it remains valide.
727       auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
728       PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
729       AM.invalidate(*C, PA);
730     }
731     auto NewSCCIndex = RC->find(*C) - RC->begin();
732     // If we have actually moved an SCC to be topologically "below" the current
733     // one due to merging, we will need to revisit the current SCC after
734     // visiting those moved SCCs.
735     //
736     // It is critical that we *do not* revisit the current SCC unless we
737     // actually move SCCs in the process of merging because otherwise we may
738     // form a cycle where an SCC is split apart, merged, split, merged and so
739     // on infinitely.
740     if (InitialSCCIndex < NewSCCIndex) {
741       // Put our current SCC back onto the worklist as we'll visit other SCCs
742       // that are now definitively ordered prior to the current one in the
743       // post-order sequence, and may end up observing more precise context to
744       // optimize the current SCC.
745       UR.CWorklist.insert(C);
746       LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
747                         << "\n");
748       // Enqueue in reverse order as we pop off the back of the worklist.
749       for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
750                                                   RC->begin() + NewSCCIndex))) {
751         UR.CWorklist.insert(&MovedC);
752         LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
753                           << MovedC << "\n");
754       }
755     }
756   }
757 
758   assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
759   assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
760   assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
761 
762   // Record the current RefSCC and SCC for higher layers of the CGSCC pass
763   // manager now that all the updates have been applied.
764   if (RC != &InitialRC)
765     UR.UpdatedRC = RC;
766   if (C != &InitialC)
767     UR.UpdatedC = C;
768 
769   return *C;
770 }
771 
772 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
773     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
774     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
775     FunctionAnalysisManager &FAM) {
776   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
777                                            /* FunctionPass */ true);
778 }
779 LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
780     LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
781     CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
782     FunctionAnalysisManager &FAM) {
783   return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
784                                            /* FunctionPass */ false);
785 }
786