1572e3407SChandler Carruth //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2572e3407SChandler Carruth //
32946cd70SChandler Carruth // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
42946cd70SChandler Carruth // See https://llvm.org/LICENSE.txt for license information.
52946cd70SChandler Carruth // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6572e3407SChandler Carruth //
7572e3407SChandler Carruth //===----------------------------------------------------------------------===//
8572e3407SChandler Carruth
9572e3407SChandler Carruth #include "llvm/Analysis/CGSCCPassManager.h"
10fa6434beSEugene Zelenko #include "llvm/ADT/ArrayRef.h"
11fa6434beSEugene Zelenko #include "llvm/ADT/Optional.h"
1271c3a551Sserge-sans-paille #include "llvm/ADT/PriorityWorklist.h"
13fa6434beSEugene Zelenko #include "llvm/ADT/STLExtras.h"
14fa6434beSEugene Zelenko #include "llvm/ADT/SetVector.h"
15fa6434beSEugene Zelenko #include "llvm/ADT/SmallPtrSet.h"
16fa6434beSEugene Zelenko #include "llvm/ADT/SmallVector.h"
17fa6434beSEugene Zelenko #include "llvm/ADT/iterator_range.h"
18fa6434beSEugene Zelenko #include "llvm/Analysis/LazyCallGraph.h"
19fa6434beSEugene Zelenko #include "llvm/IR/Constant.h"
208977223eSChandler Carruth #include "llvm/IR/InstIterator.h"
21fa6434beSEugene Zelenko #include "llvm/IR/Instruction.h"
22fa6434beSEugene Zelenko #include "llvm/IR/PassManager.h"
23105642afSReid Kleckner #include "llvm/IR/PassManagerImpl.h"
24aff058b1SArthur Eubanks #include "llvm/IR/ValueHandle.h"
25fa6434beSEugene Zelenko #include "llvm/Support/Casting.h"
26faf52104SArthur Eubanks #include "llvm/Support/CommandLine.h"
27fa6434beSEugene Zelenko #include "llvm/Support/Debug.h"
28faf52104SArthur Eubanks #include "llvm/Support/ErrorHandling.h"
29c5a06019SAndrew Monshizadeh #include "llvm/Support/TimeProfiler.h"
30faf52104SArthur Eubanks #include "llvm/Support/raw_ostream.h"
31fa6434beSEugene Zelenko #include <cassert>
32fa6434beSEugene Zelenko #include <iterator>
33572e3407SChandler Carruth
3419913b22SChandler Carruth #define DEBUG_TYPE "cgscc"
3519913b22SChandler Carruth
36572e3407SChandler Carruth using namespace llvm;
37572e3407SChandler Carruth
38d319674aSVedant Kumar // Explicit template instantiations and specialization definitions for core
396b981647SChandler Carruth // template typedefs.
402a54094dSChandler Carruth namespace llvm {
41faf52104SArthur Eubanks static cl::opt<bool> AbortOnMaxDevirtIterationsReached(
42faf52104SArthur Eubanks "abort-on-max-devirt-iterations-reached",
43faf52104SArthur Eubanks cl::desc("Abort when the max iterations for devirtualization CGSCC repeat "
44faf52104SArthur Eubanks "pass is reached"));
45faf52104SArthur Eubanks
46e3e25b51SArthur Eubanks AnalysisKey ShouldNotRunFunctionPassesAnalysis::Key;
47e3e25b51SArthur Eubanks
4888823468SChandler Carruth // Explicit instantiations for the core proxy templates.
493ab2a5a8SChandler Carruth template class AllAnalysesOn<LazyCallGraph::SCC>;
5088823468SChandler Carruth template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
5188823468SChandler Carruth template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
5288823468SChandler Carruth LazyCallGraph &, CGSCCUpdateResult &>;
532a54094dSChandler Carruth template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
542a54094dSChandler Carruth template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
55346542b7SChandler Carruth LazyCallGraph::SCC, LazyCallGraph &>;
562a54094dSChandler Carruth template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
5788823468SChandler Carruth
5888823468SChandler Carruth /// Explicitly specialize the pass manager run method to handle call graph
5988823468SChandler Carruth /// updates.
6088823468SChandler Carruth template <>
6188823468SChandler Carruth PreservedAnalyses
6288823468SChandler Carruth PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
run(LazyCallGraph::SCC & InitialC,CGSCCAnalysisManager & AM,LazyCallGraph & G,CGSCCUpdateResult & UR)6388823468SChandler Carruth CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
6488823468SChandler Carruth CGSCCAnalysisManager &AM,
6588823468SChandler Carruth LazyCallGraph &G, CGSCCUpdateResult &UR) {
66ee8d31c4SFedor Sergeev // Request PassInstrumentation from analysis manager, will use it to run
67ee8d31c4SFedor Sergeev // instrumenting callbacks for the passes later.
68ee8d31c4SFedor Sergeev PassInstrumentation PI =
69ee8d31c4SFedor Sergeev AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
70ee8d31c4SFedor Sergeev
7188823468SChandler Carruth PreservedAnalyses PA = PreservedAnalyses::all();
7288823468SChandler Carruth
7388823468SChandler Carruth // The SCC may be refined while we are running passes over it, so set up
7488823468SChandler Carruth // a pointer that we can update.
7588823468SChandler Carruth LazyCallGraph::SCC *C = &InitialC;
7688823468SChandler Carruth
77bd541b21SAlina Sbirlea // Get Function analysis manager from its proxy.
78bd541b21SAlina Sbirlea FunctionAnalysisManager &FAM =
79bd541b21SAlina Sbirlea AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager();
80bd541b21SAlina Sbirlea
8188823468SChandler Carruth for (auto &Pass : Passes) {
82ee8d31c4SFedor Sergeev // Check the PassInstrumentation's BeforePass callbacks before running the
83ee8d31c4SFedor Sergeev // pass, skip its execution completely if asked to (callback returns false).
84ee8d31c4SFedor Sergeev if (!PI.runBeforePass(*Pass, *C))
85ee8d31c4SFedor Sergeev continue;
86ee8d31c4SFedor Sergeev
87c5a06019SAndrew Monshizadeh PreservedAnalyses PassPA;
88c5a06019SAndrew Monshizadeh {
89c5a06019SAndrew Monshizadeh TimeTraceScope TimeScope(Pass->name());
90c5a06019SAndrew Monshizadeh PassPA = Pass->run(*C, AM, G, UR);
91c5a06019SAndrew Monshizadeh }
9288823468SChandler Carruth
93a1d95c3fSFedor Sergeev if (UR.InvalidatedSCCs.count(C))
9418bc400fSYevgeny Rouban PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
95a1d95c3fSFedor Sergeev else
9618bc400fSYevgeny Rouban PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
97ee8d31c4SFedor Sergeev
9888823468SChandler Carruth // Update the SCC if necessary.
9988823468SChandler Carruth C = UR.UpdatedC ? UR.UpdatedC : C;
100bd541b21SAlina Sbirlea if (UR.UpdatedC) {
101bd541b21SAlina Sbirlea // If C is updated, also create a proxy and update FAM inside the result.
102bd541b21SAlina Sbirlea auto *ResultFAMCP =
103bd541b21SAlina Sbirlea &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
104bd541b21SAlina Sbirlea ResultFAMCP->updateFAM(FAM);
105bd541b21SAlina Sbirlea }
10688823468SChandler Carruth
1077376ae88SChandler Carruth // If the CGSCC pass wasn't able to provide a valid updated SCC, the
1087376ae88SChandler Carruth // current SCC may simply need to be skipped if invalid.
1097376ae88SChandler Carruth if (UR.InvalidatedSCCs.count(C)) {
110d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
1117376ae88SChandler Carruth break;
1127376ae88SChandler Carruth }
11388823468SChandler Carruth // Check that we didn't miss any update scenario.
11488823468SChandler Carruth assert(C->begin() != C->end() && "Cannot have an empty SCC!");
11588823468SChandler Carruth
11688823468SChandler Carruth // Update the analysis manager as each pass runs and potentially
1170c6efff1SChandler Carruth // invalidates analyses.
1180c6efff1SChandler Carruth AM.invalidate(*C, PassPA);
11988823468SChandler Carruth
12088823468SChandler Carruth // Finally, we intersect the final preserved analyses to compute the
12188823468SChandler Carruth // aggregate preserved set for this pass manager.
12288823468SChandler Carruth PA.intersect(std::move(PassPA));
12388823468SChandler Carruth }
12488823468SChandler Carruth
125923ff550SChandler Carruth // Before we mark all of *this* SCC's analyses as preserved below, intersect
126923ff550SChandler Carruth // this with the cross-SCC preserved analysis set. This is used to allow
127923ff550SChandler Carruth // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
128923ff550SChandler Carruth // for them.
129923ff550SChandler Carruth UR.CrossSCCPA.intersect(PA);
130923ff550SChandler Carruth
131d319674aSVedant Kumar // Invalidation was handled after each pass in the above loop for the current
1320c6efff1SChandler Carruth // SCC. Therefore, the remaining analysis results in the AnalysisManager are
1330c6efff1SChandler Carruth // preserved. We mark this with a set so that we don't need to inspect each
1340c6efff1SChandler Carruth // one individually.
135ba90ae96SChandler Carruth PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
1360c6efff1SChandler Carruth
13788823468SChandler Carruth return PA;
13888823468SChandler Carruth }
13988823468SChandler Carruth
1407f6f9f4cSArthur Eubanks PreservedAnalyses
run(Module & M,ModuleAnalysisManager & AM)1417f6f9f4cSArthur Eubanks ModuleToPostOrderCGSCCPassAdaptor::run(Module &M, ModuleAnalysisManager &AM) {
1427f6f9f4cSArthur Eubanks // Setup the CGSCC analysis manager from its proxy.
1437f6f9f4cSArthur Eubanks CGSCCAnalysisManager &CGAM =
1447f6f9f4cSArthur Eubanks AM.getResult<CGSCCAnalysisManagerModuleProxy>(M).getManager();
1457f6f9f4cSArthur Eubanks
1467f6f9f4cSArthur Eubanks // Get the call graph for this module.
1477f6f9f4cSArthur Eubanks LazyCallGraph &CG = AM.getResult<LazyCallGraphAnalysis>(M);
1487f6f9f4cSArthur Eubanks
1497f6f9f4cSArthur Eubanks // Get Function analysis manager from its proxy.
1507f6f9f4cSArthur Eubanks FunctionAnalysisManager &FAM =
1517f6f9f4cSArthur Eubanks AM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M)->getManager();
1527f6f9f4cSArthur Eubanks
1537f6f9f4cSArthur Eubanks // We keep worklists to allow us to push more work onto the pass manager as
1547f6f9f4cSArthur Eubanks // the passes are run.
1557f6f9f4cSArthur Eubanks SmallPriorityWorklist<LazyCallGraph::RefSCC *, 1> RCWorklist;
1567f6f9f4cSArthur Eubanks SmallPriorityWorklist<LazyCallGraph::SCC *, 1> CWorklist;
1577f6f9f4cSArthur Eubanks
1587f6f9f4cSArthur Eubanks // Keep sets for invalidated SCCs and RefSCCs that should be skipped when
1597f6f9f4cSArthur Eubanks // iterating off the worklists.
1607f6f9f4cSArthur Eubanks SmallPtrSet<LazyCallGraph::RefSCC *, 4> InvalidRefSCCSet;
1617f6f9f4cSArthur Eubanks SmallPtrSet<LazyCallGraph::SCC *, 4> InvalidSCCSet;
1627f6f9f4cSArthur Eubanks
1637f6f9f4cSArthur Eubanks SmallDenseSet<std::pair<LazyCallGraph::Node *, LazyCallGraph::SCC *>, 4>
1647f6f9f4cSArthur Eubanks InlinedInternalEdges;
1657f6f9f4cSArthur Eubanks
1667f6f9f4cSArthur Eubanks CGSCCUpdateResult UR = {
167ddc70237SArthur Eubanks RCWorklist, CWorklist, InvalidRefSCCSet,
168ddc70237SArthur Eubanks InvalidSCCSet, nullptr, PreservedAnalyses::all(),
169ddc70237SArthur Eubanks InlinedInternalEdges, {}};
1707f6f9f4cSArthur Eubanks
1717f6f9f4cSArthur Eubanks // Request PassInstrumentation from analysis manager, will use it to run
1727f6f9f4cSArthur Eubanks // instrumenting callbacks for the passes later.
1737f6f9f4cSArthur Eubanks PassInstrumentation PI = AM.getResult<PassInstrumentationAnalysis>(M);
1747f6f9f4cSArthur Eubanks
1757f6f9f4cSArthur Eubanks PreservedAnalyses PA = PreservedAnalyses::all();
1767f6f9f4cSArthur Eubanks CG.buildRefSCCs();
177*dcad6769SFangrui Song for (LazyCallGraph::RefSCC &RC :
178*dcad6769SFangrui Song llvm::make_early_inc_range(CG.postorder_ref_sccs())) {
1797f6f9f4cSArthur Eubanks assert(RCWorklist.empty() &&
1807f6f9f4cSArthur Eubanks "Should always start with an empty RefSCC worklist");
1817f6f9f4cSArthur Eubanks // The postorder_ref_sccs range we are walking is lazily constructed, so
1827f6f9f4cSArthur Eubanks // we only push the first one onto the worklist. The worklist allows us
1837f6f9f4cSArthur Eubanks // to capture *new* RefSCCs created during transformations.
1847f6f9f4cSArthur Eubanks //
1857f6f9f4cSArthur Eubanks // We really want to form RefSCCs lazily because that makes them cheaper
1867f6f9f4cSArthur Eubanks // to update as the program is simplified and allows us to have greater
1877f6f9f4cSArthur Eubanks // cache locality as forming a RefSCC touches all the parts of all the
1887f6f9f4cSArthur Eubanks // functions within that RefSCC.
1897f6f9f4cSArthur Eubanks //
1907f6f9f4cSArthur Eubanks // We also eagerly increment the iterator to the next position because
1917f6f9f4cSArthur Eubanks // the CGSCC passes below may delete the current RefSCC.
192*dcad6769SFangrui Song RCWorklist.insert(&RC);
1937f6f9f4cSArthur Eubanks
1947f6f9f4cSArthur Eubanks do {
1957f6f9f4cSArthur Eubanks LazyCallGraph::RefSCC *RC = RCWorklist.pop_back_val();
1967f6f9f4cSArthur Eubanks if (InvalidRefSCCSet.count(RC)) {
1977f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Skipping an invalid RefSCC...\n");
1987f6f9f4cSArthur Eubanks continue;
1997f6f9f4cSArthur Eubanks }
2007f6f9f4cSArthur Eubanks
2017f6f9f4cSArthur Eubanks assert(CWorklist.empty() &&
2027f6f9f4cSArthur Eubanks "Should always start with an empty SCC worklist");
2037f6f9f4cSArthur Eubanks
2047f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Running an SCC pass across the RefSCC: " << *RC
2057f6f9f4cSArthur Eubanks << "\n");
2067f6f9f4cSArthur Eubanks
2077f6f9f4cSArthur Eubanks // The top of the worklist may *also* be the same SCC we just ran over
2087f6f9f4cSArthur Eubanks // (and invalidated for). Keep track of that last SCC we processed due
2097f6f9f4cSArthur Eubanks // to SCC update to avoid redundant processing when an SCC is both just
2107f6f9f4cSArthur Eubanks // updated itself and at the top of the worklist.
2117f6f9f4cSArthur Eubanks LazyCallGraph::SCC *LastUpdatedC = nullptr;
2127f6f9f4cSArthur Eubanks
2137f6f9f4cSArthur Eubanks // Push the initial SCCs in reverse post-order as we'll pop off the
2147f6f9f4cSArthur Eubanks // back and so see this in post-order.
2157f6f9f4cSArthur Eubanks for (LazyCallGraph::SCC &C : llvm::reverse(*RC))
2167f6f9f4cSArthur Eubanks CWorklist.insert(&C);
2177f6f9f4cSArthur Eubanks
2187f6f9f4cSArthur Eubanks do {
2197f6f9f4cSArthur Eubanks LazyCallGraph::SCC *C = CWorklist.pop_back_val();
2207f6f9f4cSArthur Eubanks // Due to call graph mutations, we may have invalid SCCs or SCCs from
2217f6f9f4cSArthur Eubanks // other RefSCCs in the worklist. The invalid ones are dead and the
2227f6f9f4cSArthur Eubanks // other RefSCCs should be queued above, so we just need to skip both
2237f6f9f4cSArthur Eubanks // scenarios here.
2247f6f9f4cSArthur Eubanks if (InvalidSCCSet.count(C)) {
2257f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Skipping an invalid SCC...\n");
2267f6f9f4cSArthur Eubanks continue;
2277f6f9f4cSArthur Eubanks }
2287f6f9f4cSArthur Eubanks if (LastUpdatedC == C) {
2297f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Skipping redundant run on SCC: " << *C << "\n");
2307f6f9f4cSArthur Eubanks continue;
2317f6f9f4cSArthur Eubanks }
232ddc70237SArthur Eubanks // We used to also check if the current SCC is part of the current
233ddc70237SArthur Eubanks // RefSCC and bail if it wasn't, since it should be in RCWorklist.
234ddc70237SArthur Eubanks // However, this can cause compile time explosions in some cases on
235ddc70237SArthur Eubanks // modules with a huge RefSCC. If a non-trivial amount of SCCs in the
236ddc70237SArthur Eubanks // huge RefSCC can become their own child RefSCC, we create one child
237ddc70237SArthur Eubanks // RefSCC, bail on the current RefSCC, visit the child RefSCC, revisit
238ddc70237SArthur Eubanks // the huge RefSCC, and repeat. By visiting all SCCs in the original
239ddc70237SArthur Eubanks // RefSCC we create all the child RefSCCs in one pass of the RefSCC,
240ddc70237SArthur Eubanks // rather one pass of the RefSCC creating one child RefSCC at a time.
2417f6f9f4cSArthur Eubanks
2427f6f9f4cSArthur Eubanks // Ensure we can proxy analysis updates from the CGSCC analysis manager
2437f6f9f4cSArthur Eubanks // into the the Function analysis manager by getting a proxy here.
2447f6f9f4cSArthur Eubanks // This also needs to update the FunctionAnalysisManager, as this may be
2457f6f9f4cSArthur Eubanks // the first time we see this SCC.
2467f6f9f4cSArthur Eubanks CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
2477f6f9f4cSArthur Eubanks FAM);
2487f6f9f4cSArthur Eubanks
2497f6f9f4cSArthur Eubanks // Each time we visit a new SCC pulled off the worklist,
2507f6f9f4cSArthur Eubanks // a transformation of a child SCC may have also modified this parent
2517f6f9f4cSArthur Eubanks // and invalidated analyses. So we invalidate using the update record's
2527f6f9f4cSArthur Eubanks // cross-SCC preserved set. This preserved set is intersected by any
2537f6f9f4cSArthur Eubanks // CGSCC pass that handles invalidation (primarily pass managers) prior
2547f6f9f4cSArthur Eubanks // to marking its SCC as preserved. That lets us track everything that
2557f6f9f4cSArthur Eubanks // might need invalidation across SCCs without excessive invalidations
2567f6f9f4cSArthur Eubanks // on a single SCC.
2577f6f9f4cSArthur Eubanks //
2587f6f9f4cSArthur Eubanks // This essentially allows SCC passes to freely invalidate analyses
2597f6f9f4cSArthur Eubanks // of any ancestor SCC. If this becomes detrimental to successfully
2607f6f9f4cSArthur Eubanks // caching analyses, we could force each SCC pass to manually
2617f6f9f4cSArthur Eubanks // invalidate the analyses for any SCCs other than themselves which
2627f6f9f4cSArthur Eubanks // are mutated. However, that seems to lose the robustness of the
2637f6f9f4cSArthur Eubanks // pass-manager driven invalidation scheme.
2647f6f9f4cSArthur Eubanks CGAM.invalidate(*C, UR.CrossSCCPA);
2657f6f9f4cSArthur Eubanks
2667f6f9f4cSArthur Eubanks do {
2677f6f9f4cSArthur Eubanks // Check that we didn't miss any update scenario.
2687f6f9f4cSArthur Eubanks assert(!InvalidSCCSet.count(C) && "Processing an invalid SCC!");
2697f6f9f4cSArthur Eubanks assert(C->begin() != C->end() && "Cannot have an empty SCC!");
2707f6f9f4cSArthur Eubanks
2717f6f9f4cSArthur Eubanks LastUpdatedC = UR.UpdatedC;
2727f6f9f4cSArthur Eubanks UR.UpdatedC = nullptr;
2737f6f9f4cSArthur Eubanks
2747f6f9f4cSArthur Eubanks // Check the PassInstrumentation's BeforePass callbacks before
2757f6f9f4cSArthur Eubanks // running the pass, skip its execution completely if asked to
2767f6f9f4cSArthur Eubanks // (callback returns false).
2777f6f9f4cSArthur Eubanks if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
2787f6f9f4cSArthur Eubanks continue;
2797f6f9f4cSArthur Eubanks
2807f6f9f4cSArthur Eubanks PreservedAnalyses PassPA;
2817f6f9f4cSArthur Eubanks {
2827f6f9f4cSArthur Eubanks TimeTraceScope TimeScope(Pass->name());
2837f6f9f4cSArthur Eubanks PassPA = Pass->run(*C, CGAM, CG, UR);
2847f6f9f4cSArthur Eubanks }
2857f6f9f4cSArthur Eubanks
2867f6f9f4cSArthur Eubanks if (UR.InvalidatedSCCs.count(C))
2877f6f9f4cSArthur Eubanks PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
2887f6f9f4cSArthur Eubanks else
2897f6f9f4cSArthur Eubanks PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
2907f6f9f4cSArthur Eubanks
2917f6f9f4cSArthur Eubanks // Update the SCC and RefSCC if necessary.
2927f6f9f4cSArthur Eubanks C = UR.UpdatedC ? UR.UpdatedC : C;
2937f6f9f4cSArthur Eubanks
2947f6f9f4cSArthur Eubanks if (UR.UpdatedC) {
2957f6f9f4cSArthur Eubanks // If we're updating the SCC, also update the FAM inside the proxy's
2967f6f9f4cSArthur Eubanks // result.
2977f6f9f4cSArthur Eubanks CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
2987f6f9f4cSArthur Eubanks FAM);
2997f6f9f4cSArthur Eubanks }
3007f6f9f4cSArthur Eubanks
3017f6f9f4cSArthur Eubanks // If the CGSCC pass wasn't able to provide a valid updated SCC,
3027f6f9f4cSArthur Eubanks // the current SCC may simply need to be skipped if invalid.
3037f6f9f4cSArthur Eubanks if (UR.InvalidatedSCCs.count(C)) {
3047f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
3057f6f9f4cSArthur Eubanks break;
3067f6f9f4cSArthur Eubanks }
3077f6f9f4cSArthur Eubanks // Check that we didn't miss any update scenario.
3087f6f9f4cSArthur Eubanks assert(C->begin() != C->end() && "Cannot have an empty SCC!");
3097f6f9f4cSArthur Eubanks
3107f6f9f4cSArthur Eubanks // We handle invalidating the CGSCC analysis manager's information
3117f6f9f4cSArthur Eubanks // for the (potentially updated) SCC here. Note that any other SCCs
3127f6f9f4cSArthur Eubanks // whose structure has changed should have been invalidated by
3137f6f9f4cSArthur Eubanks // whatever was updating the call graph. This SCC gets invalidated
3147f6f9f4cSArthur Eubanks // late as it contains the nodes that were actively being
3157f6f9f4cSArthur Eubanks // processed.
3167f6f9f4cSArthur Eubanks CGAM.invalidate(*C, PassPA);
3177f6f9f4cSArthur Eubanks
3187f6f9f4cSArthur Eubanks // Then intersect the preserved set so that invalidation of module
3197f6f9f4cSArthur Eubanks // analyses will eventually occur when the module pass completes.
3207f6f9f4cSArthur Eubanks // Also intersect with the cross-SCC preserved set to capture any
3217f6f9f4cSArthur Eubanks // cross-SCC invalidation.
3227f6f9f4cSArthur Eubanks UR.CrossSCCPA.intersect(PassPA);
3237f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
3247f6f9f4cSArthur Eubanks
3257f6f9f4cSArthur Eubanks // The pass may have restructured the call graph and refined the
3267f6f9f4cSArthur Eubanks // current SCC and/or RefSCC. We need to update our current SCC and
3277f6f9f4cSArthur Eubanks // RefSCC pointers to follow these. Also, when the current SCC is
3287f6f9f4cSArthur Eubanks // refined, re-run the SCC pass over the newly refined SCC in order
3297f6f9f4cSArthur Eubanks // to observe the most precise SCC model available. This inherently
3307f6f9f4cSArthur Eubanks // cannot cycle excessively as it only happens when we split SCCs
3317f6f9f4cSArthur Eubanks // apart, at most converging on a DAG of single nodes.
3327f6f9f4cSArthur Eubanks // FIXME: If we ever start having RefSCC passes, we'll want to
3337f6f9f4cSArthur Eubanks // iterate there too.
3347f6f9f4cSArthur Eubanks if (UR.UpdatedC)
3357f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs()
3367f6f9f4cSArthur Eubanks << "Re-running SCC passes after a refinement of the "
3377f6f9f4cSArthur Eubanks "current SCC: "
3387f6f9f4cSArthur Eubanks << *UR.UpdatedC << "\n");
3397f6f9f4cSArthur Eubanks
3407f6f9f4cSArthur Eubanks // Note that both `C` and `RC` may at this point refer to deleted,
3417f6f9f4cSArthur Eubanks // invalid SCC and RefSCCs respectively. But we will short circuit
3427f6f9f4cSArthur Eubanks // the processing when we check them in the loop above.
3437f6f9f4cSArthur Eubanks } while (UR.UpdatedC);
3447f6f9f4cSArthur Eubanks } while (!CWorklist.empty());
3457f6f9f4cSArthur Eubanks
3467f6f9f4cSArthur Eubanks // We only need to keep internal inlined edge information within
3477f6f9f4cSArthur Eubanks // a RefSCC, clear it to save on space and let the next time we visit
3487f6f9f4cSArthur Eubanks // any of these functions have a fresh start.
3497f6f9f4cSArthur Eubanks InlinedInternalEdges.clear();
3507f6f9f4cSArthur Eubanks } while (!RCWorklist.empty());
3517f6f9f4cSArthur Eubanks }
3527f6f9f4cSArthur Eubanks
3537f6f9f4cSArthur Eubanks // By definition we preserve the call garph, all SCC analyses, and the
3547f6f9f4cSArthur Eubanks // analysis proxies by handling them above and in any nested pass managers.
3557f6f9f4cSArthur Eubanks PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
3567f6f9f4cSArthur Eubanks PA.preserve<LazyCallGraphAnalysis>();
3577f6f9f4cSArthur Eubanks PA.preserve<CGSCCAnalysisManagerModuleProxy>();
3587f6f9f4cSArthur Eubanks PA.preserve<FunctionAnalysisManagerModuleProxy>();
3597f6f9f4cSArthur Eubanks return PA;
3607f6f9f4cSArthur Eubanks }
3617f6f9f4cSArthur Eubanks
run(LazyCallGraph::SCC & InitialC,CGSCCAnalysisManager & AM,LazyCallGraph & CG,CGSCCUpdateResult & UR)3627f6f9f4cSArthur Eubanks PreservedAnalyses DevirtSCCRepeatedPass::run(LazyCallGraph::SCC &InitialC,
3637f6f9f4cSArthur Eubanks CGSCCAnalysisManager &AM,
3647f6f9f4cSArthur Eubanks LazyCallGraph &CG,
3657f6f9f4cSArthur Eubanks CGSCCUpdateResult &UR) {
3667f6f9f4cSArthur Eubanks PreservedAnalyses PA = PreservedAnalyses::all();
3677f6f9f4cSArthur Eubanks PassInstrumentation PI =
3687f6f9f4cSArthur Eubanks AM.getResult<PassInstrumentationAnalysis>(InitialC, CG);
3697f6f9f4cSArthur Eubanks
3707f6f9f4cSArthur Eubanks // The SCC may be refined while we are running passes over it, so set up
3717f6f9f4cSArthur Eubanks // a pointer that we can update.
3727f6f9f4cSArthur Eubanks LazyCallGraph::SCC *C = &InitialC;
3737f6f9f4cSArthur Eubanks
3747f6f9f4cSArthur Eubanks // Struct to track the counts of direct and indirect calls in each function
3757f6f9f4cSArthur Eubanks // of the SCC.
3767f6f9f4cSArthur Eubanks struct CallCount {
3777f6f9f4cSArthur Eubanks int Direct;
3787f6f9f4cSArthur Eubanks int Indirect;
3797f6f9f4cSArthur Eubanks };
3807f6f9f4cSArthur Eubanks
3817f6f9f4cSArthur Eubanks // Put value handles on all of the indirect calls and return the number of
3827f6f9f4cSArthur Eubanks // direct calls for each function in the SCC.
3837f6f9f4cSArthur Eubanks auto ScanSCC = [](LazyCallGraph::SCC &C,
3847f6f9f4cSArthur Eubanks SmallMapVector<Value *, WeakTrackingVH, 16> &CallHandles) {
3857f6f9f4cSArthur Eubanks assert(CallHandles.empty() && "Must start with a clear set of handles.");
3867f6f9f4cSArthur Eubanks
3877f6f9f4cSArthur Eubanks SmallDenseMap<Function *, CallCount> CallCounts;
3887f6f9f4cSArthur Eubanks CallCount CountLocal = {0, 0};
3897f6f9f4cSArthur Eubanks for (LazyCallGraph::Node &N : C) {
3907f6f9f4cSArthur Eubanks CallCount &Count =
3917f6f9f4cSArthur Eubanks CallCounts.insert(std::make_pair(&N.getFunction(), CountLocal))
3927f6f9f4cSArthur Eubanks .first->second;
3937f6f9f4cSArthur Eubanks for (Instruction &I : instructions(N.getFunction()))
3947f6f9f4cSArthur Eubanks if (auto *CB = dyn_cast<CallBase>(&I)) {
3957f6f9f4cSArthur Eubanks if (CB->getCalledFunction()) {
3967f6f9f4cSArthur Eubanks ++Count.Direct;
3977f6f9f4cSArthur Eubanks } else {
3987f6f9f4cSArthur Eubanks ++Count.Indirect;
3997f6f9f4cSArthur Eubanks CallHandles.insert({CB, WeakTrackingVH(CB)});
4007f6f9f4cSArthur Eubanks }
4017f6f9f4cSArthur Eubanks }
4027f6f9f4cSArthur Eubanks }
4037f6f9f4cSArthur Eubanks
4047f6f9f4cSArthur Eubanks return CallCounts;
4057f6f9f4cSArthur Eubanks };
4067f6f9f4cSArthur Eubanks
4077f6f9f4cSArthur Eubanks UR.IndirectVHs.clear();
4087f6f9f4cSArthur Eubanks // Populate the initial call handles and get the initial call counts.
4097f6f9f4cSArthur Eubanks auto CallCounts = ScanSCC(*C, UR.IndirectVHs);
4107f6f9f4cSArthur Eubanks
4117f6f9f4cSArthur Eubanks for (int Iteration = 0;; ++Iteration) {
4127f6f9f4cSArthur Eubanks if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
4137f6f9f4cSArthur Eubanks continue;
4147f6f9f4cSArthur Eubanks
4157f6f9f4cSArthur Eubanks PreservedAnalyses PassPA = Pass->run(*C, AM, CG, UR);
4167f6f9f4cSArthur Eubanks
4177f6f9f4cSArthur Eubanks if (UR.InvalidatedSCCs.count(C))
4187f6f9f4cSArthur Eubanks PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
4197f6f9f4cSArthur Eubanks else
4207f6f9f4cSArthur Eubanks PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
4217f6f9f4cSArthur Eubanks
4227f6f9f4cSArthur Eubanks // If the SCC structure has changed, bail immediately and let the outer
4237f6f9f4cSArthur Eubanks // CGSCC layer handle any iteration to reflect the refined structure.
4247f6f9f4cSArthur Eubanks if (UR.UpdatedC && UR.UpdatedC != C) {
4257f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
4267f6f9f4cSArthur Eubanks break;
4277f6f9f4cSArthur Eubanks }
4287f6f9f4cSArthur Eubanks
4296cbb35ddSArthur Eubanks // If the CGSCC pass wasn't able to provide a valid updated SCC, the
4306cbb35ddSArthur Eubanks // current SCC may simply need to be skipped if invalid.
4316cbb35ddSArthur Eubanks if (UR.InvalidatedSCCs.count(C)) {
4326cbb35ddSArthur Eubanks LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
4336cbb35ddSArthur Eubanks break;
4346cbb35ddSArthur Eubanks }
4356cbb35ddSArthur Eubanks
4367f6f9f4cSArthur Eubanks assert(C->begin() != C->end() && "Cannot have an empty SCC!");
4377f6f9f4cSArthur Eubanks
4387f6f9f4cSArthur Eubanks // Check whether any of the handles were devirtualized.
4397f6f9f4cSArthur Eubanks bool Devirt = llvm::any_of(UR.IndirectVHs, [](auto &P) -> bool {
4407f6f9f4cSArthur Eubanks if (P.second) {
4417f6f9f4cSArthur Eubanks if (CallBase *CB = dyn_cast<CallBase>(P.second)) {
4427f6f9f4cSArthur Eubanks if (CB->getCalledFunction()) {
4437f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Found devirtualized call: " << *CB << "\n");
4447f6f9f4cSArthur Eubanks return true;
4457f6f9f4cSArthur Eubanks }
4467f6f9f4cSArthur Eubanks }
4477f6f9f4cSArthur Eubanks }
4487f6f9f4cSArthur Eubanks return false;
4497f6f9f4cSArthur Eubanks });
4507f6f9f4cSArthur Eubanks
4517f6f9f4cSArthur Eubanks // Rescan to build up a new set of handles and count how many direct
4527f6f9f4cSArthur Eubanks // calls remain. If we decide to iterate, this also sets up the input to
4537f6f9f4cSArthur Eubanks // the next iteration.
4547f6f9f4cSArthur Eubanks UR.IndirectVHs.clear();
4557f6f9f4cSArthur Eubanks auto NewCallCounts = ScanSCC(*C, UR.IndirectVHs);
4567f6f9f4cSArthur Eubanks
4577f6f9f4cSArthur Eubanks // If we haven't found an explicit devirtualization already see if we
4587f6f9f4cSArthur Eubanks // have decreased the number of indirect calls and increased the number
4597f6f9f4cSArthur Eubanks // of direct calls for any function in the SCC. This can be fooled by all
4607f6f9f4cSArthur Eubanks // manner of transformations such as DCE and other things, but seems to
4617f6f9f4cSArthur Eubanks // work well in practice.
4627f6f9f4cSArthur Eubanks if (!Devirt)
4637f6f9f4cSArthur Eubanks // Iterate over the keys in NewCallCounts, if Function also exists in
4647f6f9f4cSArthur Eubanks // CallCounts, make the check below.
4657f6f9f4cSArthur Eubanks for (auto &Pair : NewCallCounts) {
4667f6f9f4cSArthur Eubanks auto &CallCountNew = Pair.second;
4677f6f9f4cSArthur Eubanks auto CountIt = CallCounts.find(Pair.first);
4687f6f9f4cSArthur Eubanks if (CountIt != CallCounts.end()) {
4697f6f9f4cSArthur Eubanks const auto &CallCountOld = CountIt->second;
4707f6f9f4cSArthur Eubanks if (CallCountOld.Indirect > CallCountNew.Indirect &&
4717f6f9f4cSArthur Eubanks CallCountOld.Direct < CallCountNew.Direct) {
4727f6f9f4cSArthur Eubanks Devirt = true;
4737f6f9f4cSArthur Eubanks break;
4747f6f9f4cSArthur Eubanks }
4757f6f9f4cSArthur Eubanks }
4767f6f9f4cSArthur Eubanks }
4777f6f9f4cSArthur Eubanks
4787f6f9f4cSArthur Eubanks if (!Devirt) {
4797f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
4807f6f9f4cSArthur Eubanks break;
4817f6f9f4cSArthur Eubanks }
4827f6f9f4cSArthur Eubanks
4837f6f9f4cSArthur Eubanks // Otherwise, if we've already hit our max, we're done.
4847f6f9f4cSArthur Eubanks if (Iteration >= MaxIterations) {
485c80e1935SArthur Eubanks if (AbortOnMaxDevirtIterationsReached)
486c80e1935SArthur Eubanks report_fatal_error("Max devirtualization iterations reached");
4877f6f9f4cSArthur Eubanks LLVM_DEBUG(
4887f6f9f4cSArthur Eubanks dbgs() << "Found another devirtualization after hitting the max "
4897f6f9f4cSArthur Eubanks "number of repetitions ("
4907f6f9f4cSArthur Eubanks << MaxIterations << ") on SCC: " << *C << "\n");
4917f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
4927f6f9f4cSArthur Eubanks break;
4937f6f9f4cSArthur Eubanks }
4947f6f9f4cSArthur Eubanks
4957f6f9f4cSArthur Eubanks LLVM_DEBUG(
4967f6f9f4cSArthur Eubanks dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
4977f6f9f4cSArthur Eubanks << *C << "\n");
4987f6f9f4cSArthur Eubanks
4997f6f9f4cSArthur Eubanks // Move over the new call counts in preparation for iterating.
5007f6f9f4cSArthur Eubanks CallCounts = std::move(NewCallCounts);
5017f6f9f4cSArthur Eubanks
5027f6f9f4cSArthur Eubanks // Update the analysis manager with each run and intersect the total set
5037f6f9f4cSArthur Eubanks // of preserved analyses so we're ready to iterate.
5047f6f9f4cSArthur Eubanks AM.invalidate(*C, PassPA);
5057f6f9f4cSArthur Eubanks
5067f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
5077f6f9f4cSArthur Eubanks }
5087f6f9f4cSArthur Eubanks
5097f6f9f4cSArthur Eubanks // Note that we don't add any preserved entries here unlike a more normal
5107f6f9f4cSArthur Eubanks // "pass manager" because we only handle invalidation *between* iterations,
5117f6f9f4cSArthur Eubanks // not after the last iteration.
5127f6f9f4cSArthur Eubanks return PA;
5137f6f9f4cSArthur Eubanks }
5147f6f9f4cSArthur Eubanks
run(LazyCallGraph::SCC & C,CGSCCAnalysisManager & AM,LazyCallGraph & CG,CGSCCUpdateResult & UR)5157f6f9f4cSArthur Eubanks PreservedAnalyses CGSCCToFunctionPassAdaptor::run(LazyCallGraph::SCC &C,
5167f6f9f4cSArthur Eubanks CGSCCAnalysisManager &AM,
5177f6f9f4cSArthur Eubanks LazyCallGraph &CG,
5187f6f9f4cSArthur Eubanks CGSCCUpdateResult &UR) {
5197f6f9f4cSArthur Eubanks // Setup the function analysis manager from its proxy.
5207f6f9f4cSArthur Eubanks FunctionAnalysisManager &FAM =
5217f6f9f4cSArthur Eubanks AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager();
5227f6f9f4cSArthur Eubanks
5237f6f9f4cSArthur Eubanks SmallVector<LazyCallGraph::Node *, 4> Nodes;
5247f6f9f4cSArthur Eubanks for (LazyCallGraph::Node &N : C)
5257f6f9f4cSArthur Eubanks Nodes.push_back(&N);
5267f6f9f4cSArthur Eubanks
5277f6f9f4cSArthur Eubanks // The SCC may get split while we are optimizing functions due to deleting
5287f6f9f4cSArthur Eubanks // edges. If this happens, the current SCC can shift, so keep track of
5297f6f9f4cSArthur Eubanks // a pointer we can overwrite.
5307f6f9f4cSArthur Eubanks LazyCallGraph::SCC *CurrentC = &C;
5317f6f9f4cSArthur Eubanks
5327f6f9f4cSArthur Eubanks LLVM_DEBUG(dbgs() << "Running function passes across an SCC: " << C << "\n");
5337f6f9f4cSArthur Eubanks
5347f6f9f4cSArthur Eubanks PreservedAnalyses PA = PreservedAnalyses::all();
5357f6f9f4cSArthur Eubanks for (LazyCallGraph::Node *N : Nodes) {
5367f6f9f4cSArthur Eubanks // Skip nodes from other SCCs. These may have been split out during
5377f6f9f4cSArthur Eubanks // processing. We'll eventually visit those SCCs and pick up the nodes
5387f6f9f4cSArthur Eubanks // there.
5397f6f9f4cSArthur Eubanks if (CG.lookupSCC(*N) != CurrentC)
5407f6f9f4cSArthur Eubanks continue;
5417f6f9f4cSArthur Eubanks
5427f6f9f4cSArthur Eubanks Function &F = N->getFunction();
5437f6f9f4cSArthur Eubanks
544e3e25b51SArthur Eubanks if (NoRerun && FAM.getCachedResult<ShouldNotRunFunctionPassesAnalysis>(F))
545e3e25b51SArthur Eubanks continue;
546e3e25b51SArthur Eubanks
5477f6f9f4cSArthur Eubanks PassInstrumentation PI = FAM.getResult<PassInstrumentationAnalysis>(F);
5487f6f9f4cSArthur Eubanks if (!PI.runBeforePass<Function>(*Pass, F))
5497f6f9f4cSArthur Eubanks continue;
5507f6f9f4cSArthur Eubanks
5517f6f9f4cSArthur Eubanks PreservedAnalyses PassPA;
5527f6f9f4cSArthur Eubanks {
5537f6f9f4cSArthur Eubanks TimeTraceScope TimeScope(Pass->name());
5547f6f9f4cSArthur Eubanks PassPA = Pass->run(F, FAM);
5557f6f9f4cSArthur Eubanks }
5567f6f9f4cSArthur Eubanks
5577f6f9f4cSArthur Eubanks PI.runAfterPass<Function>(*Pass, F, PassPA);
5587f6f9f4cSArthur Eubanks
5597f6f9f4cSArthur Eubanks // We know that the function pass couldn't have invalidated any other
5607f6f9f4cSArthur Eubanks // function's analyses (that's the contract of a function pass), so
5617f6f9f4cSArthur Eubanks // directly handle the function analysis manager's invalidation here.
5627175886aSArthur Eubanks FAM.invalidate(F, EagerlyInvalidate ? PreservedAnalyses::none() : PassPA);
563e3e25b51SArthur Eubanks if (NoRerun)
564e3e25b51SArthur Eubanks (void)FAM.getResult<ShouldNotRunFunctionPassesAnalysis>(F);
5657f6f9f4cSArthur Eubanks
5667f6f9f4cSArthur Eubanks // Then intersect the preserved set so that invalidation of module
5677f6f9f4cSArthur Eubanks // analyses will eventually occur when the module pass completes.
5687f6f9f4cSArthur Eubanks PA.intersect(std::move(PassPA));
5697f6f9f4cSArthur Eubanks
5707f6f9f4cSArthur Eubanks // If the call graph hasn't been preserved, update it based on this
5717f6f9f4cSArthur Eubanks // function pass. This may also update the current SCC to point to
5727f6f9f4cSArthur Eubanks // a smaller, more refined SCC.
5737f6f9f4cSArthur Eubanks auto PAC = PA.getChecker<LazyCallGraphAnalysis>();
5747f6f9f4cSArthur Eubanks if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<Module>>()) {
5757f6f9f4cSArthur Eubanks CurrentC = &updateCGAndAnalysisManagerForFunctionPass(CG, *CurrentC, *N,
5767f6f9f4cSArthur Eubanks AM, UR, FAM);
5777f6f9f4cSArthur Eubanks assert(CG.lookupSCC(*N) == CurrentC &&
5787f6f9f4cSArthur Eubanks "Current SCC not updated to the SCC containing the current node!");
5797f6f9f4cSArthur Eubanks }
5807f6f9f4cSArthur Eubanks }
5817f6f9f4cSArthur Eubanks
5827f6f9f4cSArthur Eubanks // By definition we preserve the proxy. And we preserve all analyses on
5837f6f9f4cSArthur Eubanks // Functions. This precludes *any* invalidation of function analyses by the
5847f6f9f4cSArthur Eubanks // proxy, but that's OK because we've taken care to invalidate analyses in
5857f6f9f4cSArthur Eubanks // the function analysis manager incrementally above.
5867f6f9f4cSArthur Eubanks PA.preserveSet<AllAnalysesOn<Function>>();
5877f6f9f4cSArthur Eubanks PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
5887f6f9f4cSArthur Eubanks
5897f6f9f4cSArthur Eubanks // We've also ensured that we updated the call graph along the way.
5907f6f9f4cSArthur Eubanks PA.preserve<LazyCallGraphAnalysis>();
5917f6f9f4cSArthur Eubanks
5927f6f9f4cSArthur Eubanks return PA;
5937f6f9f4cSArthur Eubanks }
5947f6f9f4cSArthur Eubanks
invalidate(Module & M,const PreservedAnalyses & PA,ModuleAnalysisManager::Invalidator & Inv)5956b981647SChandler Carruth bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
5966b981647SChandler Carruth Module &M, const PreservedAnalyses &PA,
5976b981647SChandler Carruth ModuleAnalysisManager::Invalidator &Inv) {
598ba90ae96SChandler Carruth // If literally everything is preserved, we're done.
599ba90ae96SChandler Carruth if (PA.areAllPreserved())
600ba90ae96SChandler Carruth return false; // This is still a valid proxy.
601ba90ae96SChandler Carruth
6026b981647SChandler Carruth // If this proxy or the call graph is going to be invalidated, we also need
6036b981647SChandler Carruth // to clear all the keys coming from that analysis.
6046b981647SChandler Carruth //
6056b981647SChandler Carruth // We also directly invalidate the FAM's module proxy if necessary, and if
6066b981647SChandler Carruth // that proxy isn't preserved we can't preserve this proxy either. We rely on
6076b981647SChandler Carruth // it to handle module -> function analysis invalidation in the face of
6086b981647SChandler Carruth // structural changes and so if it's unavailable we conservatively clear the
609ba90ae96SChandler Carruth // entire SCC layer as well rather than trying to do invalidation ourselves.
610ba90ae96SChandler Carruth auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
611ba90ae96SChandler Carruth if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
6126b981647SChandler Carruth Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
6136b981647SChandler Carruth Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
6146b981647SChandler Carruth InnerAM->clear();
6156b981647SChandler Carruth
6166b981647SChandler Carruth // And the proxy itself should be marked as invalid so that we can observe
6176b981647SChandler Carruth // the new call graph. This isn't strictly necessary because we cheat
6186b981647SChandler Carruth // above, but is still useful.
6196b981647SChandler Carruth return true;
6206b981647SChandler Carruth }
6216b981647SChandler Carruth
622ba90ae96SChandler Carruth // Directly check if the relevant set is preserved so we can short circuit
623ba90ae96SChandler Carruth // invalidating SCCs below.
624ba90ae96SChandler Carruth bool AreSCCAnalysesPreserved =
625ba90ae96SChandler Carruth PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
626ba90ae96SChandler Carruth
6276b981647SChandler Carruth // Ok, we have a graph, so we can propagate the invalidation down into it.
6282e0fe3e6SChandler Carruth G->buildRefSCCs();
6296b981647SChandler Carruth for (auto &RC : G->postorder_ref_sccs())
630ba90ae96SChandler Carruth for (auto &C : RC) {
631ba90ae96SChandler Carruth Optional<PreservedAnalyses> InnerPA;
632ba90ae96SChandler Carruth
633ba90ae96SChandler Carruth // Check to see whether the preserved set needs to be adjusted based on
634ba90ae96SChandler Carruth // module-level analysis invalidation triggering deferred invalidation
635ba90ae96SChandler Carruth // for this SCC.
636ba90ae96SChandler Carruth if (auto *OuterProxy =
637ba90ae96SChandler Carruth InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
638ba90ae96SChandler Carruth for (const auto &OuterInvalidationPair :
639ba90ae96SChandler Carruth OuterProxy->getOuterInvalidations()) {
640ba90ae96SChandler Carruth AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
641ba90ae96SChandler Carruth const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
642ba90ae96SChandler Carruth if (Inv.invalidate(OuterAnalysisID, M, PA)) {
643ba90ae96SChandler Carruth if (!InnerPA)
644ba90ae96SChandler Carruth InnerPA = PA;
645ba90ae96SChandler Carruth for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
646ba90ae96SChandler Carruth InnerPA->abandon(InnerAnalysisID);
647ba90ae96SChandler Carruth }
648ba90ae96SChandler Carruth }
649ba90ae96SChandler Carruth
650ba90ae96SChandler Carruth // Check if we needed a custom PA set. If so we'll need to run the inner
651ba90ae96SChandler Carruth // invalidation.
652ba90ae96SChandler Carruth if (InnerPA) {
653ba90ae96SChandler Carruth InnerAM->invalidate(C, *InnerPA);
654ba90ae96SChandler Carruth continue;
655ba90ae96SChandler Carruth }
656ba90ae96SChandler Carruth
657ba90ae96SChandler Carruth // Otherwise we only need to do invalidation if the original PA set didn't
658ba90ae96SChandler Carruth // preserve all SCC analyses.
659ba90ae96SChandler Carruth if (!AreSCCAnalysesPreserved)
6606b981647SChandler Carruth InnerAM->invalidate(C, PA);
661ba90ae96SChandler Carruth }
6626b981647SChandler Carruth
6636b981647SChandler Carruth // Return false to indicate that this result is still a valid proxy.
6646b981647SChandler Carruth return false;
6656b981647SChandler Carruth }
6666b981647SChandler Carruth
6676b981647SChandler Carruth template <>
6686b981647SChandler Carruth CGSCCAnalysisManagerModuleProxy::Result
run(Module & M,ModuleAnalysisManager & AM)6696b981647SChandler Carruth CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
6706b981647SChandler Carruth // Force the Function analysis manager to also be available so that it can
6716b981647SChandler Carruth // be accessed in an SCC analysis and proxied onward to function passes.
6726b981647SChandler Carruth // FIXME: It is pretty awkward to just drop the result here and assert that
6736b981647SChandler Carruth // we can find it again later.
6746b981647SChandler Carruth (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
6756b981647SChandler Carruth
6766b981647SChandler Carruth return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
6776b981647SChandler Carruth }
6786b981647SChandler Carruth
6796b981647SChandler Carruth AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
6806b981647SChandler Carruth
6816b981647SChandler Carruth FunctionAnalysisManagerCGSCCProxy::Result
run(LazyCallGraph::SCC & C,CGSCCAnalysisManager & AM,LazyCallGraph & CG)6826b981647SChandler Carruth FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
6836b981647SChandler Carruth CGSCCAnalysisManager &AM,
6846b981647SChandler Carruth LazyCallGraph &CG) {
685bd541b21SAlina Sbirlea // Note: unconditionally getting checking that the proxy exists may get it at
686bd541b21SAlina Sbirlea // this point. There are cases when this is being run unnecessarily, but
687bd541b21SAlina Sbirlea // it is cheap and having the assertion in place is more valuable.
688bd541b21SAlina Sbirlea auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
6896b981647SChandler Carruth Module &M = *C.begin()->getFunction().getParent();
690bd541b21SAlina Sbirlea bool ProxyExists =
691bd541b21SAlina Sbirlea MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M);
692bd541b21SAlina Sbirlea assert(ProxyExists &&
693bd541b21SAlina Sbirlea "The CGSCC pass manager requires that the FAM module proxy is run "
694bd541b21SAlina Sbirlea "on the module prior to entering the CGSCC walk");
695bd541b21SAlina Sbirlea (void)ProxyExists;
6966b981647SChandler Carruth
697bd541b21SAlina Sbirlea // We just return an empty result. The caller will use the updateFAM interface
698bd541b21SAlina Sbirlea // to correctly register the relevant FunctionAnalysisManager based on the
699bd541b21SAlina Sbirlea // context in which this proxy is run.
700bd541b21SAlina Sbirlea return Result();
7016b981647SChandler Carruth }
7026b981647SChandler Carruth
invalidate(LazyCallGraph::SCC & C,const PreservedAnalyses & PA,CGSCCAnalysisManager::Invalidator & Inv)7036b981647SChandler Carruth bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
7046b981647SChandler Carruth LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
7056b981647SChandler Carruth CGSCCAnalysisManager::Invalidator &Inv) {
706bd9c2903SChandler Carruth // If literally everything is preserved, we're done.
707bd9c2903SChandler Carruth if (PA.areAllPreserved())
708bd9c2903SChandler Carruth return false; // This is still a valid proxy.
7096b981647SChandler Carruth
710bd541b21SAlina Sbirlea // All updates to preserve valid results are done below, so we don't need to
711bd541b21SAlina Sbirlea // invalidate this proxy.
712bd9c2903SChandler Carruth //
713bd9c2903SChandler Carruth // Note that in order to preserve this proxy, a module pass must ensure that
714bd9c2903SChandler Carruth // the FAM has been completely updated to handle the deletion of functions.
715bd9c2903SChandler Carruth // Specifically, any FAM-cached results for those functions need to have been
716bd9c2903SChandler Carruth // forcibly cleared. When preserved, this proxy will only invalidate results
717bd9c2903SChandler Carruth // cached on functions *still in the module* at the end of the module pass.
718bd9c2903SChandler Carruth auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
719bd9c2903SChandler Carruth if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
720bd9c2903SChandler Carruth for (LazyCallGraph::Node &N : C)
72192ccc6cbSMircea Trofin FAM->invalidate(N.getFunction(), PA);
722bd9c2903SChandler Carruth
723bd541b21SAlina Sbirlea return false;
724bd9c2903SChandler Carruth }
725bd9c2903SChandler Carruth
726bd9c2903SChandler Carruth // Directly check if the relevant set is preserved.
727bd9c2903SChandler Carruth bool AreFunctionAnalysesPreserved =
728bd9c2903SChandler Carruth PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
729bd9c2903SChandler Carruth
730bd9c2903SChandler Carruth // Now walk all the functions to see if any inner analysis invalidation is
731bd9c2903SChandler Carruth // necessary.
732bd9c2903SChandler Carruth for (LazyCallGraph::Node &N : C) {
733bd9c2903SChandler Carruth Function &F = N.getFunction();
734bd9c2903SChandler Carruth Optional<PreservedAnalyses> FunctionPA;
735bd9c2903SChandler Carruth
736bd9c2903SChandler Carruth // Check to see whether the preserved set needs to be pruned based on
737bd9c2903SChandler Carruth // SCC-level analysis invalidation that triggers deferred invalidation
738bd9c2903SChandler Carruth // registered with the outer analysis manager proxy for this function.
739bd9c2903SChandler Carruth if (auto *OuterProxy =
740bd9c2903SChandler Carruth FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
741bd9c2903SChandler Carruth for (const auto &OuterInvalidationPair :
742bd9c2903SChandler Carruth OuterProxy->getOuterInvalidations()) {
743bd9c2903SChandler Carruth AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
744bd9c2903SChandler Carruth const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
745bd9c2903SChandler Carruth if (Inv.invalidate(OuterAnalysisID, C, PA)) {
746bd9c2903SChandler Carruth if (!FunctionPA)
747bd9c2903SChandler Carruth FunctionPA = PA;
748bd9c2903SChandler Carruth for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
749bd9c2903SChandler Carruth FunctionPA->abandon(InnerAnalysisID);
750bd9c2903SChandler Carruth }
751bd9c2903SChandler Carruth }
752bd9c2903SChandler Carruth
753bd9c2903SChandler Carruth // Check if we needed a custom PA set, and if so we'll need to run the
754bd9c2903SChandler Carruth // inner invalidation.
755bd9c2903SChandler Carruth if (FunctionPA) {
756bd9c2903SChandler Carruth FAM->invalidate(F, *FunctionPA);
757bd9c2903SChandler Carruth continue;
758bd9c2903SChandler Carruth }
759bd9c2903SChandler Carruth
760bd9c2903SChandler Carruth // Otherwise we only need to do invalidation if the original PA set didn't
761bd9c2903SChandler Carruth // preserve all function analyses.
762bd9c2903SChandler Carruth if (!AreFunctionAnalysesPreserved)
763bd9c2903SChandler Carruth FAM->invalidate(F, PA);
764bd9c2903SChandler Carruth }
765bd9c2903SChandler Carruth
766bd9c2903SChandler Carruth // Return false to indicate that this result is still a valid proxy.
7676b981647SChandler Carruth return false;
7686b981647SChandler Carruth }
7696b981647SChandler Carruth
770fa6434beSEugene Zelenko } // end namespace llvm
77188823468SChandler Carruth
772bd541b21SAlina Sbirlea /// When a new SCC is created for the graph we first update the
773bd541b21SAlina Sbirlea /// FunctionAnalysisManager in the Proxy's result.
774bd541b21SAlina Sbirlea /// As there might be function analysis results cached for the functions now in
775bd541b21SAlina Sbirlea /// that SCC, two forms of updates are required.
7767c8964d8SChandler Carruth ///
7777c8964d8SChandler Carruth /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
7787c8964d8SChandler Carruth /// created so that any subsequent invalidation events to the SCC are
7797c8964d8SChandler Carruth /// propagated to the function analysis results cached for functions within it.
7807c8964d8SChandler Carruth ///
7817c8964d8SChandler Carruth /// Second, if any of the functions within the SCC have analysis results with
7827c8964d8SChandler Carruth /// outer analysis dependencies, then those dependencies would point to the
7837c8964d8SChandler Carruth /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
7847c8964d8SChandler Carruth /// function analyses so that they don't retain stale handles.
updateNewSCCFunctionAnalyses(LazyCallGraph::SCC & C,LazyCallGraph & G,CGSCCAnalysisManager & AM,FunctionAnalysisManager & FAM)7857c8964d8SChandler Carruth static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
7867c8964d8SChandler Carruth LazyCallGraph &G,
787bd541b21SAlina Sbirlea CGSCCAnalysisManager &AM,
788bd541b21SAlina Sbirlea FunctionAnalysisManager &FAM) {
789bd541b21SAlina Sbirlea AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM);
7907c8964d8SChandler Carruth
7917c8964d8SChandler Carruth // Now walk the functions in this SCC and invalidate any function analysis
7927c8964d8SChandler Carruth // results that might have outer dependencies on an SCC analysis.
7937c8964d8SChandler Carruth for (LazyCallGraph::Node &N : C) {
7947c8964d8SChandler Carruth Function &F = N.getFunction();
7957c8964d8SChandler Carruth
7967c8964d8SChandler Carruth auto *OuterProxy =
7977c8964d8SChandler Carruth FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
7987c8964d8SChandler Carruth if (!OuterProxy)
7997c8964d8SChandler Carruth // No outer analyses were queried, nothing to do.
8007c8964d8SChandler Carruth continue;
8017c8964d8SChandler Carruth
8027c8964d8SChandler Carruth // Forcibly abandon all the inner analyses with dependencies, but
8037c8964d8SChandler Carruth // invalidate nothing else.
8047c8964d8SChandler Carruth auto PA = PreservedAnalyses::all();
8057c8964d8SChandler Carruth for (const auto &OuterInvalidationPair :
8067c8964d8SChandler Carruth OuterProxy->getOuterInvalidations()) {
8077c8964d8SChandler Carruth const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
8087c8964d8SChandler Carruth for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
8097c8964d8SChandler Carruth PA.abandon(InnerAnalysisID);
8107c8964d8SChandler Carruth }
8117c8964d8SChandler Carruth
8127c8964d8SChandler Carruth // Now invalidate anything we found.
8137c8964d8SChandler Carruth FAM.invalidate(F, PA);
8147c8964d8SChandler Carruth }
8157c8964d8SChandler Carruth }
8167c8964d8SChandler Carruth
81788823468SChandler Carruth /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
81888823468SChandler Carruth /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
81988823468SChandler Carruth /// added SCCs.
82088823468SChandler Carruth ///
82188823468SChandler Carruth /// The range of new SCCs must be in postorder already. The SCC they were split
82288823468SChandler Carruth /// out of must be provided as \p C. The current node being mutated and
82388823468SChandler Carruth /// triggering updates must be passed as \p N.
82488823468SChandler Carruth ///
82588823468SChandler Carruth /// This function returns the SCC containing \p N. This will be either \p C if
82688823468SChandler Carruth /// no new SCCs have been split out, or it will be the new SCC containing \p N.
82788823468SChandler Carruth template <typename SCCRangeT>
828fa6434beSEugene Zelenko static LazyCallGraph::SCC *
incorporateNewSCCRange(const SCCRangeT & NewSCCRange,LazyCallGraph & G,LazyCallGraph::Node & N,LazyCallGraph::SCC * C,CGSCCAnalysisManager & AM,CGSCCUpdateResult & UR)82988823468SChandler Carruth incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
83088823468SChandler Carruth LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
83119913b22SChandler Carruth CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
832fa6434beSEugene Zelenko using SCC = LazyCallGraph::SCC;
83388823468SChandler Carruth
8342082b10dSKazu Hirata if (NewSCCRange.empty())
83588823468SChandler Carruth return C;
83688823468SChandler Carruth
837443e57e0SChandler Carruth // Add the current SCC to the worklist as its shape has changed.
83888823468SChandler Carruth UR.CWorklist.insert(C);
839d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
840d34e60caSNicola Zaghen << "\n");
84188823468SChandler Carruth
84288823468SChandler Carruth SCC *OldC = C;
84388823468SChandler Carruth
84488823468SChandler Carruth // Update the current SCC. Note that if we have new SCCs, this must actually
84588823468SChandler Carruth // change the SCC.
84688823468SChandler Carruth assert(C != &*NewSCCRange.begin() &&
84788823468SChandler Carruth "Cannot insert new SCCs without changing current SCC!");
84888823468SChandler Carruth C = &*NewSCCRange.begin();
84988823468SChandler Carruth assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
85088823468SChandler Carruth
851bd9c2903SChandler Carruth // If we had a cached FAM proxy originally, we will want to create more of
852bd9c2903SChandler Carruth // them for each SCC that was split off.
853bd541b21SAlina Sbirlea FunctionAnalysisManager *FAM = nullptr;
854bd541b21SAlina Sbirlea if (auto *FAMProxy =
855bd541b21SAlina Sbirlea AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC))
856bd541b21SAlina Sbirlea FAM = &FAMProxy->getManager();
857bd9c2903SChandler Carruth
858bd9c2903SChandler Carruth // We need to propagate an invalidation call to all but the newly current SCC
859bd9c2903SChandler Carruth // because the outer pass manager won't do that for us after splitting them.
860bd9c2903SChandler Carruth // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
861d319674aSVedant Kumar // there are preserved analysis we can avoid invalidating them here for
862bd9c2903SChandler Carruth // split-off SCCs.
863bd9c2903SChandler Carruth // We know however that this will preserve any FAM proxy so go ahead and mark
864bd9c2903SChandler Carruth // that.
86519867de9SArthur Eubanks auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
866bd9c2903SChandler Carruth PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
867bd9c2903SChandler Carruth AM.invalidate(*OldC, PA);
868bd9c2903SChandler Carruth
8697c8964d8SChandler Carruth // Ensure the now-current SCC's function analyses are updated.
870bd541b21SAlina Sbirlea if (FAM)
871bd541b21SAlina Sbirlea updateNewSCCFunctionAnalyses(*C, G, AM, *FAM);
872bd9c2903SChandler Carruth
8734ed47858SKazu Hirata for (SCC &NewC : llvm::reverse(llvm::drop_begin(NewSCCRange))) {
87488823468SChandler Carruth assert(C != &NewC && "No need to re-visit the current SCC!");
87588823468SChandler Carruth assert(OldC != &NewC && "Already handled the original SCC!");
87688823468SChandler Carruth UR.CWorklist.insert(&NewC);
877d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
878bd9c2903SChandler Carruth
8797c8964d8SChandler Carruth // Ensure new SCCs' function analyses are updated.
880bd541b21SAlina Sbirlea if (FAM)
881bd541b21SAlina Sbirlea updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM);
882bd9c2903SChandler Carruth
8837c8964d8SChandler Carruth // Also propagate a normal invalidation to the new SCC as only the current
8847c8964d8SChandler Carruth // will get one from the pass manager infrastructure.
885bd9c2903SChandler Carruth AM.invalidate(NewC, PA);
88688823468SChandler Carruth }
88788823468SChandler Carruth return C;
88888823468SChandler Carruth }
88988823468SChandler Carruth
updateCGAndAnalysisManagerForPass(LazyCallGraph & G,LazyCallGraph::SCC & InitialC,LazyCallGraph::Node & N,CGSCCAnalysisManager & AM,CGSCCUpdateResult & UR,FunctionAnalysisManager & FAM,bool FunctionPass)89001377453SJohannes Doerfert static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
89188823468SChandler Carruth LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
892bd541b21SAlina Sbirlea CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
893bd541b21SAlina Sbirlea FunctionAnalysisManager &FAM, bool FunctionPass) {
894fa6434beSEugene Zelenko using Node = LazyCallGraph::Node;
895fa6434beSEugene Zelenko using Edge = LazyCallGraph::Edge;
896fa6434beSEugene Zelenko using SCC = LazyCallGraph::SCC;
897fa6434beSEugene Zelenko using RefSCC = LazyCallGraph::RefSCC;
89888823468SChandler Carruth
89988823468SChandler Carruth RefSCC &InitialRC = InitialC.getOuterRefSCC();
90088823468SChandler Carruth SCC *C = &InitialC;
90188823468SChandler Carruth RefSCC *RC = &InitialRC;
90288823468SChandler Carruth Function &F = N.getFunction();
90388823468SChandler Carruth
90488823468SChandler Carruth // Walk the function body and build up the set of retained, promoted, and
90588823468SChandler Carruth // demoted edges.
90688823468SChandler Carruth SmallVector<Constant *, 16> Worklist;
90788823468SChandler Carruth SmallPtrSet<Constant *, 16> Visited;
908aaad9f84SChandler Carruth SmallPtrSet<Node *, 16> RetainedEdges;
909aaad9f84SChandler Carruth SmallSetVector<Node *, 4> PromotedRefTargets;
910aaad9f84SChandler Carruth SmallSetVector<Node *, 4> DemotedCallTargets;
91101377453SJohannes Doerfert SmallSetVector<Node *, 4> NewCallEdges;
91201377453SJohannes Doerfert SmallSetVector<Node *, 4> NewRefEdges;
9138977223eSChandler Carruth
91488823468SChandler Carruth // First walk the function and handle all called functions. We do this first
91588823468SChandler Carruth // because if there is a single call edge, whether there are ref edges is
91688823468SChandler Carruth // irrelevant.
917aff058b1SArthur Eubanks for (Instruction &I : instructions(F)) {
918aff058b1SArthur Eubanks if (auto *CB = dyn_cast<CallBase>(&I)) {
919aff058b1SArthur Eubanks if (Function *Callee = CB->getCalledFunction()) {
92088823468SChandler Carruth if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
9216b1ce83aSArthur Eubanks Node *CalleeN = G.lookup(*Callee);
9227fea561eSArthur Eubanks assert(CalleeN &&
9237fea561eSArthur Eubanks "Visited function should already have an associated node");
9246b1ce83aSArthur Eubanks Edge *E = N->lookup(*CalleeN);
92501377453SJohannes Doerfert assert((E || !FunctionPass) &&
92601377453SJohannes Doerfert "No function transformations should introduce *new* "
92788823468SChandler Carruth "call edges! Any new calls should be modeled as "
92888823468SChandler Carruth "promoted existing ref edges!");
9296b1ce83aSArthur Eubanks bool Inserted = RetainedEdges.insert(CalleeN).second;
9306e35c31dSChandler Carruth (void)Inserted;
9316e35c31dSChandler Carruth assert(Inserted && "We should never visit a function twice.");
93201377453SJohannes Doerfert if (!E)
9336b1ce83aSArthur Eubanks NewCallEdges.insert(CalleeN);
93401377453SJohannes Doerfert else if (!E->isCall())
9356b1ce83aSArthur Eubanks PromotedRefTargets.insert(CalleeN);
93688823468SChandler Carruth }
937aff058b1SArthur Eubanks } else {
938aff058b1SArthur Eubanks // We can miss devirtualization if an indirect call is created then
939aff058b1SArthur Eubanks // promoted before updateCGAndAnalysisManagerForPass runs.
940aff058b1SArthur Eubanks auto *Entry = UR.IndirectVHs.find(CB);
941aff058b1SArthur Eubanks if (Entry == UR.IndirectVHs.end())
942aff058b1SArthur Eubanks UR.IndirectVHs.insert({CB, WeakTrackingVH(CB)});
943aff058b1SArthur Eubanks else if (!Entry->second)
944aff058b1SArthur Eubanks Entry->second = WeakTrackingVH(CB);
945aff058b1SArthur Eubanks }
946aff058b1SArthur Eubanks }
947aff058b1SArthur Eubanks }
94888823468SChandler Carruth
94988823468SChandler Carruth // Now walk all references.
9508977223eSChandler Carruth for (Instruction &I : instructions(F))
95188823468SChandler Carruth for (Value *Op : I.operand_values())
952eef203dbSSimon Pilgrim if (auto *OpC = dyn_cast<Constant>(Op))
953eef203dbSSimon Pilgrim if (Visited.insert(OpC).second)
954eef203dbSSimon Pilgrim Worklist.push_back(OpC);
95588823468SChandler Carruth
956f59a8387SChandler Carruth auto VisitRef = [&](Function &Referee) {
9576b1ce83aSArthur Eubanks Node *RefereeN = G.lookup(Referee);
9587fea561eSArthur Eubanks assert(RefereeN &&
9597fea561eSArthur Eubanks "Visited function should already have an associated node");
9606b1ce83aSArthur Eubanks Edge *E = N->lookup(*RefereeN);
96101377453SJohannes Doerfert assert((E || !FunctionPass) &&
96201377453SJohannes Doerfert "No function transformations should introduce *new* ref "
96388823468SChandler Carruth "edges! Any new ref edges would require IPO which "
96488823468SChandler Carruth "function passes aren't allowed to do!");
9656b1ce83aSArthur Eubanks bool Inserted = RetainedEdges.insert(RefereeN).second;
9666e35c31dSChandler Carruth (void)Inserted;
9676e35c31dSChandler Carruth assert(Inserted && "We should never visit a function twice.");
96801377453SJohannes Doerfert if (!E)
9696b1ce83aSArthur Eubanks NewRefEdges.insert(RefereeN);
97001377453SJohannes Doerfert else if (E->isCall())
9716b1ce83aSArthur Eubanks DemotedCallTargets.insert(RefereeN);
972f59a8387SChandler Carruth };
973f59a8387SChandler Carruth LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
974f59a8387SChandler Carruth
97501377453SJohannes Doerfert // Handle new ref edges.
97601377453SJohannes Doerfert for (Node *RefTarget : NewRefEdges) {
97701377453SJohannes Doerfert SCC &TargetC = *G.lookupSCC(*RefTarget);
97801377453SJohannes Doerfert RefSCC &TargetRC = TargetC.getOuterRefSCC();
97901377453SJohannes Doerfert (void)TargetRC;
98001377453SJohannes Doerfert // TODO: This only allows trivial edges to be added for now.
981468fa037SArthur Eubanks #ifdef EXPENSIVE_CHECKS
982f867c8e8SMartin Storsjö assert((RC == &TargetRC ||
983f867c8e8SMartin Storsjö RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
984468fa037SArthur Eubanks #endif
98501377453SJohannes Doerfert RC->insertTrivialRefEdge(N, *RefTarget);
98601377453SJohannes Doerfert }
98701377453SJohannes Doerfert
98801377453SJohannes Doerfert // Handle new call edges.
98901377453SJohannes Doerfert for (Node *CallTarget : NewCallEdges) {
99001377453SJohannes Doerfert SCC &TargetC = *G.lookupSCC(*CallTarget);
99101377453SJohannes Doerfert RefSCC &TargetRC = TargetC.getOuterRefSCC();
99201377453SJohannes Doerfert (void)TargetRC;
99301377453SJohannes Doerfert // TODO: This only allows trivial edges to be added for now.
994468fa037SArthur Eubanks #ifdef EXPENSIVE_CHECKS
995f867c8e8SMartin Storsjö assert((RC == &TargetRC ||
996f867c8e8SMartin Storsjö RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
997468fa037SArthur Eubanks #endif
998d9cbceb0SArthur Eubanks // Add a trivial ref edge to be promoted later on alongside
999d9cbceb0SArthur Eubanks // PromotedRefTargets.
1000d9cbceb0SArthur Eubanks RC->insertTrivialRefEdge(N, *CallTarget);
100101377453SJohannes Doerfert }
100201377453SJohannes Doerfert
1003f59a8387SChandler Carruth // Include synthetic reference edges to known, defined lib functions.
1004eef203dbSSimon Pilgrim for (auto *LibFn : G.getLibFunctions())
10056e35c31dSChandler Carruth // While the list of lib functions doesn't have repeats, don't re-visit
10066e35c31dSChandler Carruth // anything handled above.
1007eef203dbSSimon Pilgrim if (!Visited.count(LibFn))
1008eef203dbSSimon Pilgrim VisitRef(*LibFn);
100988823468SChandler Carruth
101088823468SChandler Carruth // First remove all of the edges that are no longer present in this function.
101123c2f44cSChandler Carruth // The first step makes these edges uniformly ref edges and accumulates them
101223c2f44cSChandler Carruth // into a separate data structure so removal doesn't invalidate anything.
101323c2f44cSChandler Carruth SmallVector<Node *, 4> DeadTargets;
101423c2f44cSChandler Carruth for (Edge &E : *N) {
101523c2f44cSChandler Carruth if (RetainedEdges.count(&E.getNode()))
101688823468SChandler Carruth continue;
101788823468SChandler Carruth
101823c2f44cSChandler Carruth SCC &TargetC = *G.lookupSCC(E.getNode());
101923c2f44cSChandler Carruth RefSCC &TargetRC = TargetC.getOuterRefSCC();
102023c2f44cSChandler Carruth if (&TargetRC == RC && E.isCall()) {
1021443e57e0SChandler Carruth if (C != &TargetC) {
1022443e57e0SChandler Carruth // For separate SCCs this is trivial.
102323c2f44cSChandler Carruth RC->switchTrivialInternalEdgeToRef(N, E.getNode());
1024443e57e0SChandler Carruth } else {
1025443e57e0SChandler Carruth // Now update the call graph.
102623c2f44cSChandler Carruth C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
102719913b22SChandler Carruth G, N, C, AM, UR);
1028443e57e0SChandler Carruth }
1029443e57e0SChandler Carruth }
103088823468SChandler Carruth
103123c2f44cSChandler Carruth // Now that this is ready for actual removal, put it into our list.
103223c2f44cSChandler Carruth DeadTargets.push_back(&E.getNode());
103323c2f44cSChandler Carruth }
103423c2f44cSChandler Carruth // Remove the easy cases quickly and actually pull them out of our list.
10353285ee14SKazu Hirata llvm::erase_if(DeadTargets, [&](Node *TargetN) {
103623c2f44cSChandler Carruth SCC &TargetC = *G.lookupSCC(*TargetN);
103723c2f44cSChandler Carruth RefSCC &TargetRC = TargetC.getOuterRefSCC();
103823c2f44cSChandler Carruth
103923c2f44cSChandler Carruth // We can't trivially remove internal targets, so skip
104023c2f44cSChandler Carruth // those.
104123c2f44cSChandler Carruth if (&TargetRC == RC)
104223c2f44cSChandler Carruth return false;
104323c2f44cSChandler Carruth
10443285ee14SKazu Hirata LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '" << N << "' to '"
1045fa9d8aceSBardia Mahjour << *TargetN << "'\n");
1046fa9d8aceSBardia Mahjour RC->removeOutgoingEdge(N, *TargetN);
104723c2f44cSChandler Carruth return true;
10483285ee14SKazu Hirata });
104923c2f44cSChandler Carruth
105023c2f44cSChandler Carruth // Now do a batch removal of the internal ref edges left.
105123c2f44cSChandler Carruth auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
105288823468SChandler Carruth if (!NewRefSCCs.empty()) {
105323c2f44cSChandler Carruth // The old RefSCC is dead, mark it as such.
105423c2f44cSChandler Carruth UR.InvalidatedRefSCCs.insert(RC);
105523c2f44cSChandler Carruth
105688823468SChandler Carruth // Note that we don't bother to invalidate analyses as ref-edge
105788823468SChandler Carruth // connectivity is not really observable in any way and is intended
105888823468SChandler Carruth // exclusively to be used for ordering of transforms rather than for
105988823468SChandler Carruth // analysis conclusions.
106088823468SChandler Carruth
106123c2f44cSChandler Carruth // Update RC to the "bottom".
106288823468SChandler Carruth assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
106388823468SChandler Carruth RC = &C->getOuterRefSCC();
106488823468SChandler Carruth assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
106523c2f44cSChandler Carruth
106623c2f44cSChandler Carruth // The RC worklist is in reverse postorder, so we enqueue the new ones in
106723c2f44cSChandler Carruth // RPO except for the one which contains the source node as that is the
106823c2f44cSChandler Carruth // "bottom" we will continue processing in the bottom-up walk.
106966a95684SChandler Carruth assert(NewRefSCCs.front() == RC &&
107066a95684SChandler Carruth "New current RefSCC not first in the returned list!");
10714ed47858SKazu Hirata for (RefSCC *NewRC : llvm::reverse(llvm::drop_begin(NewRefSCCs))) {
107266a95684SChandler Carruth assert(NewRC != RC && "Should not encounter the current RefSCC further "
107366a95684SChandler Carruth "in the postorder list of new RefSCCs.");
107488823468SChandler Carruth UR.RCWorklist.insert(NewRC);
1075d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
107619913b22SChandler Carruth << *NewRC << "\n");
107788823468SChandler Carruth }
107888823468SChandler Carruth }
107988823468SChandler Carruth
108088823468SChandler Carruth // Next demote all the call edges that are now ref edges. This helps make
108188823468SChandler Carruth // the SCCs small which should minimize the work below as we don't want to
108288823468SChandler Carruth // form cycles that this would break.
1083aaad9f84SChandler Carruth for (Node *RefTarget : DemotedCallTargets) {
1084aaad9f84SChandler Carruth SCC &TargetC = *G.lookupSCC(*RefTarget);
108588823468SChandler Carruth RefSCC &TargetRC = TargetC.getOuterRefSCC();
108688823468SChandler Carruth
108788823468SChandler Carruth // The easy case is when the target RefSCC is not this RefSCC. This is
108888823468SChandler Carruth // only supported when the target RefSCC is a child of this RefSCC.
108988823468SChandler Carruth if (&TargetRC != RC) {
1090468fa037SArthur Eubanks #ifdef EXPENSIVE_CHECKS
109188823468SChandler Carruth assert(RC->isAncestorOf(TargetRC) &&
109288823468SChandler Carruth "Cannot potentially form RefSCC cycles here!");
1093468fa037SArthur Eubanks #endif
1094aaad9f84SChandler Carruth RC->switchOutgoingEdgeToRef(N, *RefTarget);
1095d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
109619913b22SChandler Carruth << "' to '" << *RefTarget << "'\n");
109788823468SChandler Carruth continue;
109888823468SChandler Carruth }
109988823468SChandler Carruth
1100443e57e0SChandler Carruth // We are switching an internal call edge to a ref edge. This may split up
1101443e57e0SChandler Carruth // some SCCs.
1102443e57e0SChandler Carruth if (C != &TargetC) {
1103443e57e0SChandler Carruth // For separate SCCs this is trivial.
1104aaad9f84SChandler Carruth RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
1105443e57e0SChandler Carruth continue;
1106443e57e0SChandler Carruth }
1107443e57e0SChandler Carruth
1108443e57e0SChandler Carruth // Now update the call graph.
1109aaad9f84SChandler Carruth C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
111019913b22SChandler Carruth C, AM, UR);
111188823468SChandler Carruth }
111288823468SChandler Carruth
1113d9cbceb0SArthur Eubanks // We added a ref edge earlier for new call edges, promote those to call edges
1114d9cbceb0SArthur Eubanks // alongside PromotedRefTargets.
1115d9cbceb0SArthur Eubanks for (Node *E : NewCallEdges)
1116d9cbceb0SArthur Eubanks PromotedRefTargets.insert(E);
1117d9cbceb0SArthur Eubanks
111888823468SChandler Carruth // Now promote ref edges into call edges.
1119aaad9f84SChandler Carruth for (Node *CallTarget : PromotedRefTargets) {
1120aaad9f84SChandler Carruth SCC &TargetC = *G.lookupSCC(*CallTarget);
112188823468SChandler Carruth RefSCC &TargetRC = TargetC.getOuterRefSCC();
112288823468SChandler Carruth
112388823468SChandler Carruth // The easy case is when the target RefSCC is not this RefSCC. This is
112488823468SChandler Carruth // only supported when the target RefSCC is a child of this RefSCC.
112588823468SChandler Carruth if (&TargetRC != RC) {
1126468fa037SArthur Eubanks #ifdef EXPENSIVE_CHECKS
112788823468SChandler Carruth assert(RC->isAncestorOf(TargetRC) &&
112888823468SChandler Carruth "Cannot potentially form RefSCC cycles here!");
1129468fa037SArthur Eubanks #endif
1130aaad9f84SChandler Carruth RC->switchOutgoingEdgeToCall(N, *CallTarget);
1131d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
113219913b22SChandler Carruth << "' to '" << *CallTarget << "'\n");
113388823468SChandler Carruth continue;
113488823468SChandler Carruth }
1135d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
1136d34e60caSNicola Zaghen << N << "' to '" << *CallTarget << "'\n");
113788823468SChandler Carruth
113888823468SChandler Carruth // Otherwise we are switching an internal ref edge to a call edge. This
113988823468SChandler Carruth // may merge away some SCCs, and we add those to the UpdateResult. We also
114088823468SChandler Carruth // need to make sure to update the worklist in the event SCCs have moved
1141c213c67dSChandler Carruth // before the current one in the post-order sequence
1142c213c67dSChandler Carruth bool HasFunctionAnalysisProxy = false;
114388823468SChandler Carruth auto InitialSCCIndex = RC->find(*C) - RC->begin();
1144c213c67dSChandler Carruth bool FormedCycle = RC->switchInternalEdgeToCall(
1145c213c67dSChandler Carruth N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
1146c213c67dSChandler Carruth for (SCC *MergedC : MergedSCCs) {
1147c213c67dSChandler Carruth assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
1148c213c67dSChandler Carruth
1149c213c67dSChandler Carruth HasFunctionAnalysisProxy |=
1150c213c67dSChandler Carruth AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
1151c213c67dSChandler Carruth *MergedC) != nullptr;
1152c213c67dSChandler Carruth
1153c213c67dSChandler Carruth // Mark that this SCC will no longer be valid.
1154c213c67dSChandler Carruth UR.InvalidatedSCCs.insert(MergedC);
1155c213c67dSChandler Carruth
1156c213c67dSChandler Carruth // FIXME: We should really do a 'clear' here to forcibly release
1157c213c67dSChandler Carruth // memory, but we don't have a good way of doing that and
1158c213c67dSChandler Carruth // preserving the function analyses.
1159c213c67dSChandler Carruth auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
1160c213c67dSChandler Carruth PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
1161c213c67dSChandler Carruth AM.invalidate(*MergedC, PA);
1162c213c67dSChandler Carruth }
1163c213c67dSChandler Carruth });
1164c213c67dSChandler Carruth
1165c213c67dSChandler Carruth // If we formed a cycle by creating this call, we need to update more data
1166c213c67dSChandler Carruth // structures.
1167c213c67dSChandler Carruth if (FormedCycle) {
116888823468SChandler Carruth C = &TargetC;
116988823468SChandler Carruth assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
117088823468SChandler Carruth
1171c213c67dSChandler Carruth // If one of the invalidated SCCs had a cached proxy to a function
1172c213c67dSChandler Carruth // analysis manager, we need to create a proxy in the new current SCC as
1173d319674aSVedant Kumar // the invalidated SCCs had their functions moved.
1174c213c67dSChandler Carruth if (HasFunctionAnalysisProxy)
1175bd541b21SAlina Sbirlea AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM);
1176c213c67dSChandler Carruth
117788823468SChandler Carruth // Any analyses cached for this SCC are no longer precise as the shape
1178c213c67dSChandler Carruth // has changed by introducing this cycle. However, we have taken care to
1179c213c67dSChandler Carruth // update the proxies so it remains valide.
1180c213c67dSChandler Carruth auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
1181c213c67dSChandler Carruth PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
1182c213c67dSChandler Carruth AM.invalidate(*C, PA);
118388823468SChandler Carruth }
118488823468SChandler Carruth auto NewSCCIndex = RC->find(*C) - RC->begin();
11853c6a820cSChandler Carruth // If we have actually moved an SCC to be topologically "below" the current
11863c6a820cSChandler Carruth // one due to merging, we will need to revisit the current SCC after
11873c6a820cSChandler Carruth // visiting those moved SCCs.
11883c6a820cSChandler Carruth //
11893c6a820cSChandler Carruth // It is critical that we *do not* revisit the current SCC unless we
11903c6a820cSChandler Carruth // actually move SCCs in the process of merging because otherwise we may
11913c6a820cSChandler Carruth // form a cycle where an SCC is split apart, merged, split, merged and so
11923c6a820cSChandler Carruth // on infinitely.
119388823468SChandler Carruth if (InitialSCCIndex < NewSCCIndex) {
119488823468SChandler Carruth // Put our current SCC back onto the worklist as we'll visit other SCCs
119588823468SChandler Carruth // that are now definitively ordered prior to the current one in the
119688823468SChandler Carruth // post-order sequence, and may end up observing more precise context to
119788823468SChandler Carruth // optimize the current SCC.
119888823468SChandler Carruth UR.CWorklist.insert(C);
1199d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
120019913b22SChandler Carruth << "\n");
120188823468SChandler Carruth // Enqueue in reverse order as we pop off the back of the worklist.
1202fa6434beSEugene Zelenko for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
120388823468SChandler Carruth RC->begin() + NewSCCIndex))) {
120488823468SChandler Carruth UR.CWorklist.insert(&MovedC);
1205d34e60caSNicola Zaghen LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
120619913b22SChandler Carruth << MovedC << "\n");
120788823468SChandler Carruth }
120888823468SChandler Carruth }
120988823468SChandler Carruth }
121088823468SChandler Carruth
121188823468SChandler Carruth assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
121288823468SChandler Carruth assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
121388823468SChandler Carruth assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
121488823468SChandler Carruth
1215ddc70237SArthur Eubanks // Record the current SCC for higher layers of the CGSCC pass manager now that
1216ddc70237SArthur Eubanks // all the updates have been applied.
121788823468SChandler Carruth if (C != &InitialC)
121888823468SChandler Carruth UR.UpdatedC = C;
121988823468SChandler Carruth
122088823468SChandler Carruth return *C;
1221572e3407SChandler Carruth }
122201377453SJohannes Doerfert
updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph & G,LazyCallGraph::SCC & InitialC,LazyCallGraph::Node & N,CGSCCAnalysisManager & AM,CGSCCUpdateResult & UR,FunctionAnalysisManager & FAM)122301377453SJohannes Doerfert LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
122401377453SJohannes Doerfert LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
1225bd541b21SAlina Sbirlea CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
1226bd541b21SAlina Sbirlea FunctionAnalysisManager &FAM) {
1227bd541b21SAlina Sbirlea return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
122801377453SJohannes Doerfert /* FunctionPass */ true);
122901377453SJohannes Doerfert }
updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph & G,LazyCallGraph::SCC & InitialC,LazyCallGraph::Node & N,CGSCCAnalysisManager & AM,CGSCCUpdateResult & UR,FunctionAnalysisManager & FAM)123001377453SJohannes Doerfert LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
123101377453SJohannes Doerfert LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
1232bd541b21SAlina Sbirlea CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
1233bd541b21SAlina Sbirlea FunctionAnalysisManager &FAM) {
1234bd541b21SAlina Sbirlea return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
123501377453SJohannes Doerfert /* FunctionPass */ false);
123601377453SJohannes Doerfert }
1237