CGSCCPassManager.cpp 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221
  1. //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #include "llvm/Analysis/CGSCCPassManager.h"
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "llvm/ADT/PriorityWorklist.h"
  11. #include "llvm/ADT/STLExtras.h"
  12. #include "llvm/ADT/SetVector.h"
  13. #include "llvm/ADT/SmallPtrSet.h"
  14. #include "llvm/ADT/SmallVector.h"
  15. #include "llvm/ADT/iterator_range.h"
  16. #include "llvm/Analysis/LazyCallGraph.h"
  17. #include "llvm/IR/Constant.h"
  18. #include "llvm/IR/InstIterator.h"
  19. #include "llvm/IR/Instruction.h"
  20. #include "llvm/IR/PassManager.h"
  21. #include "llvm/IR/PassManagerImpl.h"
  22. #include "llvm/IR/ValueHandle.h"
  23. #include "llvm/Support/Casting.h"
  24. #include "llvm/Support/CommandLine.h"
  25. #include "llvm/Support/Debug.h"
  26. #include "llvm/Support/ErrorHandling.h"
  27. #include "llvm/Support/TimeProfiler.h"
  28. #include "llvm/Support/raw_ostream.h"
  29. #include <cassert>
  30. #include <iterator>
  31. #include <optional>
  32. #define DEBUG_TYPE "cgscc"
  33. using namespace llvm;
  34. // Explicit template instantiations and specialization definitions for core
  35. // template typedefs.
  36. namespace llvm {
  37. static cl::opt<bool> AbortOnMaxDevirtIterationsReached(
  38. "abort-on-max-devirt-iterations-reached",
  39. cl::desc("Abort when the max iterations for devirtualization CGSCC repeat "
  40. "pass is reached"));
  41. AnalysisKey ShouldNotRunFunctionPassesAnalysis::Key;
  42. // Explicit instantiations for the core proxy templates.
  43. template class AllAnalysesOn<LazyCallGraph::SCC>;
  44. template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
  45. template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
  46. LazyCallGraph &, CGSCCUpdateResult &>;
  47. template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
  48. template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
  49. LazyCallGraph::SCC, LazyCallGraph &>;
  50. template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
  51. /// Explicitly specialize the pass manager run method to handle call graph
  52. /// updates.
  53. template <>
  54. PreservedAnalyses
  55. PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
  56. CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
  57. CGSCCAnalysisManager &AM,
  58. LazyCallGraph &G, CGSCCUpdateResult &UR) {
  59. // Request PassInstrumentation from analysis manager, will use it to run
  60. // instrumenting callbacks for the passes later.
  61. PassInstrumentation PI =
  62. AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
  63. PreservedAnalyses PA = PreservedAnalyses::all();
  64. // The SCC may be refined while we are running passes over it, so set up
  65. // a pointer that we can update.
  66. LazyCallGraph::SCC *C = &InitialC;
  67. // Get Function analysis manager from its proxy.
  68. FunctionAnalysisManager &FAM =
  69. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager();
  70. for (auto &Pass : Passes) {
  71. // Check the PassInstrumentation's BeforePass callbacks before running the
  72. // pass, skip its execution completely if asked to (callback returns false).
  73. if (!PI.runBeforePass(*Pass, *C))
  74. continue;
  75. PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
  76. if (UR.InvalidatedSCCs.count(C))
  77. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  78. else
  79. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  80. // Update the SCC if necessary.
  81. C = UR.UpdatedC ? UR.UpdatedC : C;
  82. if (UR.UpdatedC) {
  83. // If C is updated, also create a proxy and update FAM inside the result.
  84. auto *ResultFAMCP =
  85. &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
  86. ResultFAMCP->updateFAM(FAM);
  87. }
  88. // Intersect the final preserved analyses to compute the aggregate
  89. // preserved set for this pass manager.
  90. PA.intersect(PassPA);
  91. // If the CGSCC pass wasn't able to provide a valid updated SCC, the
  92. // current SCC may simply need to be skipped if invalid.
  93. if (UR.InvalidatedSCCs.count(C)) {
  94. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  95. break;
  96. }
  97. // Check that we didn't miss any update scenario.
  98. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  99. // Update the analysis manager as each pass runs and potentially
  100. // invalidates analyses.
  101. AM.invalidate(*C, PassPA);
  102. }
  103. // Before we mark all of *this* SCC's analyses as preserved below, intersect
  104. // this with the cross-SCC preserved analysis set. This is used to allow
  105. // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
  106. // for them.
  107. UR.CrossSCCPA.intersect(PA);
  108. // Invalidation was handled after each pass in the above loop for the current
  109. // SCC. Therefore, the remaining analysis results in the AnalysisManager are
  110. // preserved. We mark this with a set so that we don't need to inspect each
  111. // one individually.
  112. PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
  113. return PA;
  114. }
  115. PreservedAnalyses
  116. ModuleToPostOrderCGSCCPassAdaptor::run(Module &M, ModuleAnalysisManager &AM) {
  117. // Setup the CGSCC analysis manager from its proxy.
  118. CGSCCAnalysisManager &CGAM =
  119. AM.getResult<CGSCCAnalysisManagerModuleProxy>(M).getManager();
  120. // Get the call graph for this module.
  121. LazyCallGraph &CG = AM.getResult<LazyCallGraphAnalysis>(M);
  122. // Get Function analysis manager from its proxy.
  123. FunctionAnalysisManager &FAM =
  124. AM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M)->getManager();
  125. // We keep worklists to allow us to push more work onto the pass manager as
  126. // the passes are run.
  127. SmallPriorityWorklist<LazyCallGraph::RefSCC *, 1> RCWorklist;
  128. SmallPriorityWorklist<LazyCallGraph::SCC *, 1> CWorklist;
  129. // Keep sets for invalidated SCCs and RefSCCs that should be skipped when
  130. // iterating off the worklists.
  131. SmallPtrSet<LazyCallGraph::RefSCC *, 4> InvalidRefSCCSet;
  132. SmallPtrSet<LazyCallGraph::SCC *, 4> InvalidSCCSet;
  133. SmallDenseSet<std::pair<LazyCallGraph::Node *, LazyCallGraph::SCC *>, 4>
  134. InlinedInternalEdges;
  135. CGSCCUpdateResult UR = {
  136. RCWorklist, CWorklist, InvalidRefSCCSet,
  137. InvalidSCCSet, nullptr, PreservedAnalyses::all(),
  138. InlinedInternalEdges, {}};
  139. // Request PassInstrumentation from analysis manager, will use it to run
  140. // instrumenting callbacks for the passes later.
  141. PassInstrumentation PI = AM.getResult<PassInstrumentationAnalysis>(M);
  142. PreservedAnalyses PA = PreservedAnalyses::all();
  143. CG.buildRefSCCs();
  144. for (LazyCallGraph::RefSCC &RC :
  145. llvm::make_early_inc_range(CG.postorder_ref_sccs())) {
  146. assert(RCWorklist.empty() &&
  147. "Should always start with an empty RefSCC worklist");
  148. // The postorder_ref_sccs range we are walking is lazily constructed, so
  149. // we only push the first one onto the worklist. The worklist allows us
  150. // to capture *new* RefSCCs created during transformations.
  151. //
  152. // We really want to form RefSCCs lazily because that makes them cheaper
  153. // to update as the program is simplified and allows us to have greater
  154. // cache locality as forming a RefSCC touches all the parts of all the
  155. // functions within that RefSCC.
  156. //
  157. // We also eagerly increment the iterator to the next position because
  158. // the CGSCC passes below may delete the current RefSCC.
  159. RCWorklist.insert(&RC);
  160. do {
  161. LazyCallGraph::RefSCC *RC = RCWorklist.pop_back_val();
  162. if (InvalidRefSCCSet.count(RC)) {
  163. LLVM_DEBUG(dbgs() << "Skipping an invalid RefSCC...\n");
  164. continue;
  165. }
  166. assert(CWorklist.empty() &&
  167. "Should always start with an empty SCC worklist");
  168. LLVM_DEBUG(dbgs() << "Running an SCC pass across the RefSCC: " << *RC
  169. << "\n");
  170. // The top of the worklist may *also* be the same SCC we just ran over
  171. // (and invalidated for). Keep track of that last SCC we processed due
  172. // to SCC update to avoid redundant processing when an SCC is both just
  173. // updated itself and at the top of the worklist.
  174. LazyCallGraph::SCC *LastUpdatedC = nullptr;
  175. // Push the initial SCCs in reverse post-order as we'll pop off the
  176. // back and so see this in post-order.
  177. for (LazyCallGraph::SCC &C : llvm::reverse(*RC))
  178. CWorklist.insert(&C);
  179. do {
  180. LazyCallGraph::SCC *C = CWorklist.pop_back_val();
  181. // Due to call graph mutations, we may have invalid SCCs or SCCs from
  182. // other RefSCCs in the worklist. The invalid ones are dead and the
  183. // other RefSCCs should be queued above, so we just need to skip both
  184. // scenarios here.
  185. if (InvalidSCCSet.count(C)) {
  186. LLVM_DEBUG(dbgs() << "Skipping an invalid SCC...\n");
  187. continue;
  188. }
  189. if (LastUpdatedC == C) {
  190. LLVM_DEBUG(dbgs() << "Skipping redundant run on SCC: " << *C << "\n");
  191. continue;
  192. }
  193. // We used to also check if the current SCC is part of the current
  194. // RefSCC and bail if it wasn't, since it should be in RCWorklist.
  195. // However, this can cause compile time explosions in some cases on
  196. // modules with a huge RefSCC. If a non-trivial amount of SCCs in the
  197. // huge RefSCC can become their own child RefSCC, we create one child
  198. // RefSCC, bail on the current RefSCC, visit the child RefSCC, revisit
  199. // the huge RefSCC, and repeat. By visiting all SCCs in the original
  200. // RefSCC we create all the child RefSCCs in one pass of the RefSCC,
  201. // rather one pass of the RefSCC creating one child RefSCC at a time.
  202. // Ensure we can proxy analysis updates from the CGSCC analysis manager
  203. // into the the Function analysis manager by getting a proxy here.
  204. // This also needs to update the FunctionAnalysisManager, as this may be
  205. // the first time we see this SCC.
  206. CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
  207. FAM);
  208. // Each time we visit a new SCC pulled off the worklist,
  209. // a transformation of a child SCC may have also modified this parent
  210. // and invalidated analyses. So we invalidate using the update record's
  211. // cross-SCC preserved set. This preserved set is intersected by any
  212. // CGSCC pass that handles invalidation (primarily pass managers) prior
  213. // to marking its SCC as preserved. That lets us track everything that
  214. // might need invalidation across SCCs without excessive invalidations
  215. // on a single SCC.
  216. //
  217. // This essentially allows SCC passes to freely invalidate analyses
  218. // of any ancestor SCC. If this becomes detrimental to successfully
  219. // caching analyses, we could force each SCC pass to manually
  220. // invalidate the analyses for any SCCs other than themselves which
  221. // are mutated. However, that seems to lose the robustness of the
  222. // pass-manager driven invalidation scheme.
  223. CGAM.invalidate(*C, UR.CrossSCCPA);
  224. do {
  225. // Check that we didn't miss any update scenario.
  226. assert(!InvalidSCCSet.count(C) && "Processing an invalid SCC!");
  227. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  228. LastUpdatedC = UR.UpdatedC;
  229. UR.UpdatedC = nullptr;
  230. // Check the PassInstrumentation's BeforePass callbacks before
  231. // running the pass, skip its execution completely if asked to
  232. // (callback returns false).
  233. if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
  234. continue;
  235. PreservedAnalyses PassPA = Pass->run(*C, CGAM, CG, UR);
  236. if (UR.InvalidatedSCCs.count(C))
  237. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  238. else
  239. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  240. // Update the SCC and RefSCC if necessary.
  241. C = UR.UpdatedC ? UR.UpdatedC : C;
  242. if (UR.UpdatedC) {
  243. // If we're updating the SCC, also update the FAM inside the proxy's
  244. // result.
  245. CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
  246. FAM);
  247. }
  248. // Intersect with the cross-SCC preserved set to capture any
  249. // cross-SCC invalidation.
  250. UR.CrossSCCPA.intersect(PassPA);
  251. // Intersect the preserved set so that invalidation of module
  252. // analyses will eventually occur when the module pass completes.
  253. PA.intersect(PassPA);
  254. // If the CGSCC pass wasn't able to provide a valid updated SCC,
  255. // the current SCC may simply need to be skipped if invalid.
  256. if (UR.InvalidatedSCCs.count(C)) {
  257. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  258. break;
  259. }
  260. // Check that we didn't miss any update scenario.
  261. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  262. // We handle invalidating the CGSCC analysis manager's information
  263. // for the (potentially updated) SCC here. Note that any other SCCs
  264. // whose structure has changed should have been invalidated by
  265. // whatever was updating the call graph. This SCC gets invalidated
  266. // late as it contains the nodes that were actively being
  267. // processed.
  268. CGAM.invalidate(*C, PassPA);
  269. // The pass may have restructured the call graph and refined the
  270. // current SCC and/or RefSCC. We need to update our current SCC and
  271. // RefSCC pointers to follow these. Also, when the current SCC is
  272. // refined, re-run the SCC pass over the newly refined SCC in order
  273. // to observe the most precise SCC model available. This inherently
  274. // cannot cycle excessively as it only happens when we split SCCs
  275. // apart, at most converging on a DAG of single nodes.
  276. // FIXME: If we ever start having RefSCC passes, we'll want to
  277. // iterate there too.
  278. if (UR.UpdatedC)
  279. LLVM_DEBUG(dbgs()
  280. << "Re-running SCC passes after a refinement of the "
  281. "current SCC: "
  282. << *UR.UpdatedC << "\n");
  283. // Note that both `C` and `RC` may at this point refer to deleted,
  284. // invalid SCC and RefSCCs respectively. But we will short circuit
  285. // the processing when we check them in the loop above.
  286. } while (UR.UpdatedC);
  287. } while (!CWorklist.empty());
  288. // We only need to keep internal inlined edge information within
  289. // a RefSCC, clear it to save on space and let the next time we visit
  290. // any of these functions have a fresh start.
  291. InlinedInternalEdges.clear();
  292. } while (!RCWorklist.empty());
  293. }
  294. // By definition we preserve the call garph, all SCC analyses, and the
  295. // analysis proxies by handling them above and in any nested pass managers.
  296. PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
  297. PA.preserve<LazyCallGraphAnalysis>();
  298. PA.preserve<CGSCCAnalysisManagerModuleProxy>();
  299. PA.preserve<FunctionAnalysisManagerModuleProxy>();
  300. return PA;
  301. }
  302. PreservedAnalyses DevirtSCCRepeatedPass::run(LazyCallGraph::SCC &InitialC,
  303. CGSCCAnalysisManager &AM,
  304. LazyCallGraph &CG,
  305. CGSCCUpdateResult &UR) {
  306. PreservedAnalyses PA = PreservedAnalyses::all();
  307. PassInstrumentation PI =
  308. AM.getResult<PassInstrumentationAnalysis>(InitialC, CG);
  309. // The SCC may be refined while we are running passes over it, so set up
  310. // a pointer that we can update.
  311. LazyCallGraph::SCC *C = &InitialC;
  312. // Struct to track the counts of direct and indirect calls in each function
  313. // of the SCC.
  314. struct CallCount {
  315. int Direct;
  316. int Indirect;
  317. };
  318. // Put value handles on all of the indirect calls and return the number of
  319. // direct calls for each function in the SCC.
  320. auto ScanSCC = [](LazyCallGraph::SCC &C,
  321. SmallMapVector<Value *, WeakTrackingVH, 16> &CallHandles) {
  322. assert(CallHandles.empty() && "Must start with a clear set of handles.");
  323. SmallDenseMap<Function *, CallCount> CallCounts;
  324. CallCount CountLocal = {0, 0};
  325. for (LazyCallGraph::Node &N : C) {
  326. CallCount &Count =
  327. CallCounts.insert(std::make_pair(&N.getFunction(), CountLocal))
  328. .first->second;
  329. for (Instruction &I : instructions(N.getFunction()))
  330. if (auto *CB = dyn_cast<CallBase>(&I)) {
  331. if (CB->getCalledFunction()) {
  332. ++Count.Direct;
  333. } else {
  334. ++Count.Indirect;
  335. CallHandles.insert({CB, WeakTrackingVH(CB)});
  336. }
  337. }
  338. }
  339. return CallCounts;
  340. };
  341. UR.IndirectVHs.clear();
  342. // Populate the initial call handles and get the initial call counts.
  343. auto CallCounts = ScanSCC(*C, UR.IndirectVHs);
  344. for (int Iteration = 0;; ++Iteration) {
  345. if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
  346. continue;
  347. PreservedAnalyses PassPA = Pass->run(*C, AM, CG, UR);
  348. if (UR.InvalidatedSCCs.count(C))
  349. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  350. else
  351. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  352. PA.intersect(PassPA);
  353. // If the SCC structure has changed, bail immediately and let the outer
  354. // CGSCC layer handle any iteration to reflect the refined structure.
  355. if (UR.UpdatedC && UR.UpdatedC != C)
  356. break;
  357. // If the CGSCC pass wasn't able to provide a valid updated SCC, the
  358. // current SCC may simply need to be skipped if invalid.
  359. if (UR.InvalidatedSCCs.count(C)) {
  360. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  361. break;
  362. }
  363. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  364. // Check whether any of the handles were devirtualized.
  365. bool Devirt = llvm::any_of(UR.IndirectVHs, [](auto &P) -> bool {
  366. if (P.second) {
  367. if (CallBase *CB = dyn_cast<CallBase>(P.second)) {
  368. if (CB->getCalledFunction()) {
  369. LLVM_DEBUG(dbgs() << "Found devirtualized call: " << *CB << "\n");
  370. return true;
  371. }
  372. }
  373. }
  374. return false;
  375. });
  376. // Rescan to build up a new set of handles and count how many direct
  377. // calls remain. If we decide to iterate, this also sets up the input to
  378. // the next iteration.
  379. UR.IndirectVHs.clear();
  380. auto NewCallCounts = ScanSCC(*C, UR.IndirectVHs);
  381. // If we haven't found an explicit devirtualization already see if we
  382. // have decreased the number of indirect calls and increased the number
  383. // of direct calls for any function in the SCC. This can be fooled by all
  384. // manner of transformations such as DCE and other things, but seems to
  385. // work well in practice.
  386. if (!Devirt)
  387. // Iterate over the keys in NewCallCounts, if Function also exists in
  388. // CallCounts, make the check below.
  389. for (auto &Pair : NewCallCounts) {
  390. auto &CallCountNew = Pair.second;
  391. auto CountIt = CallCounts.find(Pair.first);
  392. if (CountIt != CallCounts.end()) {
  393. const auto &CallCountOld = CountIt->second;
  394. if (CallCountOld.Indirect > CallCountNew.Indirect &&
  395. CallCountOld.Direct < CallCountNew.Direct) {
  396. Devirt = true;
  397. break;
  398. }
  399. }
  400. }
  401. if (!Devirt) {
  402. break;
  403. }
  404. // Otherwise, if we've already hit our max, we're done.
  405. if (Iteration >= MaxIterations) {
  406. if (AbortOnMaxDevirtIterationsReached)
  407. report_fatal_error("Max devirtualization iterations reached");
  408. LLVM_DEBUG(
  409. dbgs() << "Found another devirtualization after hitting the max "
  410. "number of repetitions ("
  411. << MaxIterations << ") on SCC: " << *C << "\n");
  412. break;
  413. }
  414. LLVM_DEBUG(
  415. dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
  416. << *C << "\n");
  417. // Move over the new call counts in preparation for iterating.
  418. CallCounts = std::move(NewCallCounts);
  419. // Update the analysis manager with each run and intersect the total set
  420. // of preserved analyses so we're ready to iterate.
  421. AM.invalidate(*C, PassPA);
  422. }
  423. // Note that we don't add any preserved entries here unlike a more normal
  424. // "pass manager" because we only handle invalidation *between* iterations,
  425. // not after the last iteration.
  426. return PA;
  427. }
  428. PreservedAnalyses CGSCCToFunctionPassAdaptor::run(LazyCallGraph::SCC &C,
  429. CGSCCAnalysisManager &AM,
  430. LazyCallGraph &CG,
  431. CGSCCUpdateResult &UR) {
  432. // Setup the function analysis manager from its proxy.
  433. FunctionAnalysisManager &FAM =
  434. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager();
  435. SmallVector<LazyCallGraph::Node *, 4> Nodes;
  436. for (LazyCallGraph::Node &N : C)
  437. Nodes.push_back(&N);
  438. // The SCC may get split while we are optimizing functions due to deleting
  439. // edges. If this happens, the current SCC can shift, so keep track of
  440. // a pointer we can overwrite.
  441. LazyCallGraph::SCC *CurrentC = &C;
  442. LLVM_DEBUG(dbgs() << "Running function passes across an SCC: " << C << "\n");
  443. PreservedAnalyses PA = PreservedAnalyses::all();
  444. for (LazyCallGraph::Node *N : Nodes) {
  445. // Skip nodes from other SCCs. These may have been split out during
  446. // processing. We'll eventually visit those SCCs and pick up the nodes
  447. // there.
  448. if (CG.lookupSCC(*N) != CurrentC)
  449. continue;
  450. Function &F = N->getFunction();
  451. if (NoRerun && FAM.getCachedResult<ShouldNotRunFunctionPassesAnalysis>(F))
  452. continue;
  453. PassInstrumentation PI = FAM.getResult<PassInstrumentationAnalysis>(F);
  454. if (!PI.runBeforePass<Function>(*Pass, F))
  455. continue;
  456. PreservedAnalyses PassPA = Pass->run(F, FAM);
  457. PI.runAfterPass<Function>(*Pass, F, PassPA);
  458. // We know that the function pass couldn't have invalidated any other
  459. // function's analyses (that's the contract of a function pass), so
  460. // directly handle the function analysis manager's invalidation here.
  461. FAM.invalidate(F, EagerlyInvalidate ? PreservedAnalyses::none() : PassPA);
  462. if (NoRerun)
  463. (void)FAM.getResult<ShouldNotRunFunctionPassesAnalysis>(F);
  464. // Then intersect the preserved set so that invalidation of module
  465. // analyses will eventually occur when the module pass completes.
  466. PA.intersect(std::move(PassPA));
  467. // If the call graph hasn't been preserved, update it based on this
  468. // function pass. This may also update the current SCC to point to
  469. // a smaller, more refined SCC.
  470. auto PAC = PA.getChecker<LazyCallGraphAnalysis>();
  471. if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<Module>>()) {
  472. CurrentC = &updateCGAndAnalysisManagerForFunctionPass(CG, *CurrentC, *N,
  473. AM, UR, FAM);
  474. assert(CG.lookupSCC(*N) == CurrentC &&
  475. "Current SCC not updated to the SCC containing the current node!");
  476. }
  477. }
  478. // By definition we preserve the proxy. And we preserve all analyses on
  479. // Functions. This precludes *any* invalidation of function analyses by the
  480. // proxy, but that's OK because we've taken care to invalidate analyses in
  481. // the function analysis manager incrementally above.
  482. PA.preserveSet<AllAnalysesOn<Function>>();
  483. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  484. // We've also ensured that we updated the call graph along the way.
  485. PA.preserve<LazyCallGraphAnalysis>();
  486. return PA;
  487. }
  488. bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
  489. Module &M, const PreservedAnalyses &PA,
  490. ModuleAnalysisManager::Invalidator &Inv) {
  491. // If literally everything is preserved, we're done.
  492. if (PA.areAllPreserved())
  493. return false; // This is still a valid proxy.
  494. // If this proxy or the call graph is going to be invalidated, we also need
  495. // to clear all the keys coming from that analysis.
  496. //
  497. // We also directly invalidate the FAM's module proxy if necessary, and if
  498. // that proxy isn't preserved we can't preserve this proxy either. We rely on
  499. // it to handle module -> function analysis invalidation in the face of
  500. // structural changes and so if it's unavailable we conservatively clear the
  501. // entire SCC layer as well rather than trying to do invalidation ourselves.
  502. auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
  503. if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
  504. Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
  505. Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
  506. InnerAM->clear();
  507. // And the proxy itself should be marked as invalid so that we can observe
  508. // the new call graph. This isn't strictly necessary because we cheat
  509. // above, but is still useful.
  510. return true;
  511. }
  512. // Directly check if the relevant set is preserved so we can short circuit
  513. // invalidating SCCs below.
  514. bool AreSCCAnalysesPreserved =
  515. PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
  516. // Ok, we have a graph, so we can propagate the invalidation down into it.
  517. G->buildRefSCCs();
  518. for (auto &RC : G->postorder_ref_sccs())
  519. for (auto &C : RC) {
  520. std::optional<PreservedAnalyses> InnerPA;
  521. // Check to see whether the preserved set needs to be adjusted based on
  522. // module-level analysis invalidation triggering deferred invalidation
  523. // for this SCC.
  524. if (auto *OuterProxy =
  525. InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
  526. for (const auto &OuterInvalidationPair :
  527. OuterProxy->getOuterInvalidations()) {
  528. AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
  529. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  530. if (Inv.invalidate(OuterAnalysisID, M, PA)) {
  531. if (!InnerPA)
  532. InnerPA = PA;
  533. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  534. InnerPA->abandon(InnerAnalysisID);
  535. }
  536. }
  537. // Check if we needed a custom PA set. If so we'll need to run the inner
  538. // invalidation.
  539. if (InnerPA) {
  540. InnerAM->invalidate(C, *InnerPA);
  541. continue;
  542. }
  543. // Otherwise we only need to do invalidation if the original PA set didn't
  544. // preserve all SCC analyses.
  545. if (!AreSCCAnalysesPreserved)
  546. InnerAM->invalidate(C, PA);
  547. }
  548. // Return false to indicate that this result is still a valid proxy.
  549. return false;
  550. }
  551. template <>
  552. CGSCCAnalysisManagerModuleProxy::Result
  553. CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
  554. // Force the Function analysis manager to also be available so that it can
  555. // be accessed in an SCC analysis and proxied onward to function passes.
  556. // FIXME: It is pretty awkward to just drop the result here and assert that
  557. // we can find it again later.
  558. (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
  559. return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
  560. }
  561. AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
  562. FunctionAnalysisManagerCGSCCProxy::Result
  563. FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
  564. CGSCCAnalysisManager &AM,
  565. LazyCallGraph &CG) {
  566. // Note: unconditionally getting checking that the proxy exists may get it at
  567. // this point. There are cases when this is being run unnecessarily, but
  568. // it is cheap and having the assertion in place is more valuable.
  569. auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
  570. Module &M = *C.begin()->getFunction().getParent();
  571. bool ProxyExists =
  572. MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M);
  573. assert(ProxyExists &&
  574. "The CGSCC pass manager requires that the FAM module proxy is run "
  575. "on the module prior to entering the CGSCC walk");
  576. (void)ProxyExists;
  577. // We just return an empty result. The caller will use the updateFAM interface
  578. // to correctly register the relevant FunctionAnalysisManager based on the
  579. // context in which this proxy is run.
  580. return Result();
  581. }
  582. bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
  583. LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
  584. CGSCCAnalysisManager::Invalidator &Inv) {
  585. // If literally everything is preserved, we're done.
  586. if (PA.areAllPreserved())
  587. return false; // This is still a valid proxy.
  588. // All updates to preserve valid results are done below, so we don't need to
  589. // invalidate this proxy.
  590. //
  591. // Note that in order to preserve this proxy, a module pass must ensure that
  592. // the FAM has been completely updated to handle the deletion of functions.
  593. // Specifically, any FAM-cached results for those functions need to have been
  594. // forcibly cleared. When preserved, this proxy will only invalidate results
  595. // cached on functions *still in the module* at the end of the module pass.
  596. auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
  597. if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
  598. for (LazyCallGraph::Node &N : C)
  599. FAM->invalidate(N.getFunction(), PA);
  600. return false;
  601. }
  602. // Directly check if the relevant set is preserved.
  603. bool AreFunctionAnalysesPreserved =
  604. PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
  605. // Now walk all the functions to see if any inner analysis invalidation is
  606. // necessary.
  607. for (LazyCallGraph::Node &N : C) {
  608. Function &F = N.getFunction();
  609. std::optional<PreservedAnalyses> FunctionPA;
  610. // Check to see whether the preserved set needs to be pruned based on
  611. // SCC-level analysis invalidation that triggers deferred invalidation
  612. // registered with the outer analysis manager proxy for this function.
  613. if (auto *OuterProxy =
  614. FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
  615. for (const auto &OuterInvalidationPair :
  616. OuterProxy->getOuterInvalidations()) {
  617. AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
  618. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  619. if (Inv.invalidate(OuterAnalysisID, C, PA)) {
  620. if (!FunctionPA)
  621. FunctionPA = PA;
  622. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  623. FunctionPA->abandon(InnerAnalysisID);
  624. }
  625. }
  626. // Check if we needed a custom PA set, and if so we'll need to run the
  627. // inner invalidation.
  628. if (FunctionPA) {
  629. FAM->invalidate(F, *FunctionPA);
  630. continue;
  631. }
  632. // Otherwise we only need to do invalidation if the original PA set didn't
  633. // preserve all function analyses.
  634. if (!AreFunctionAnalysesPreserved)
  635. FAM->invalidate(F, PA);
  636. }
  637. // Return false to indicate that this result is still a valid proxy.
  638. return false;
  639. }
  640. } // end namespace llvm
  641. /// When a new SCC is created for the graph we first update the
  642. /// FunctionAnalysisManager in the Proxy's result.
  643. /// As there might be function analysis results cached for the functions now in
  644. /// that SCC, two forms of updates are required.
  645. ///
  646. /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
  647. /// created so that any subsequent invalidation events to the SCC are
  648. /// propagated to the function analysis results cached for functions within it.
  649. ///
  650. /// Second, if any of the functions within the SCC have analysis results with
  651. /// outer analysis dependencies, then those dependencies would point to the
  652. /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
  653. /// function analyses so that they don't retain stale handles.
  654. static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
  655. LazyCallGraph &G,
  656. CGSCCAnalysisManager &AM,
  657. FunctionAnalysisManager &FAM) {
  658. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM);
  659. // Now walk the functions in this SCC and invalidate any function analysis
  660. // results that might have outer dependencies on an SCC analysis.
  661. for (LazyCallGraph::Node &N : C) {
  662. Function &F = N.getFunction();
  663. auto *OuterProxy =
  664. FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
  665. if (!OuterProxy)
  666. // No outer analyses were queried, nothing to do.
  667. continue;
  668. // Forcibly abandon all the inner analyses with dependencies, but
  669. // invalidate nothing else.
  670. auto PA = PreservedAnalyses::all();
  671. for (const auto &OuterInvalidationPair :
  672. OuterProxy->getOuterInvalidations()) {
  673. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  674. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  675. PA.abandon(InnerAnalysisID);
  676. }
  677. // Now invalidate anything we found.
  678. FAM.invalidate(F, PA);
  679. }
  680. }
  681. /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
  682. /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
  683. /// added SCCs.
  684. ///
  685. /// The range of new SCCs must be in postorder already. The SCC they were split
  686. /// out of must be provided as \p C. The current node being mutated and
  687. /// triggering updates must be passed as \p N.
  688. ///
  689. /// This function returns the SCC containing \p N. This will be either \p C if
  690. /// no new SCCs have been split out, or it will be the new SCC containing \p N.
  691. template <typename SCCRangeT>
  692. static LazyCallGraph::SCC *
  693. incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
  694. LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
  695. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
  696. using SCC = LazyCallGraph::SCC;
  697. if (NewSCCRange.empty())
  698. return C;
  699. // Add the current SCC to the worklist as its shape has changed.
  700. UR.CWorklist.insert(C);
  701. LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
  702. << "\n");
  703. SCC *OldC = C;
  704. // Update the current SCC. Note that if we have new SCCs, this must actually
  705. // change the SCC.
  706. assert(C != &*NewSCCRange.begin() &&
  707. "Cannot insert new SCCs without changing current SCC!");
  708. C = &*NewSCCRange.begin();
  709. assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
  710. // If we had a cached FAM proxy originally, we will want to create more of
  711. // them for each SCC that was split off.
  712. FunctionAnalysisManager *FAM = nullptr;
  713. if (auto *FAMProxy =
  714. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC))
  715. FAM = &FAMProxy->getManager();
  716. // We need to propagate an invalidation call to all but the newly current SCC
  717. // because the outer pass manager won't do that for us after splitting them.
  718. // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
  719. // there are preserved analysis we can avoid invalidating them here for
  720. // split-off SCCs.
  721. // We know however that this will preserve any FAM proxy so go ahead and mark
  722. // that.
  723. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  724. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  725. AM.invalidate(*OldC, PA);
  726. // Ensure the now-current SCC's function analyses are updated.
  727. if (FAM)
  728. updateNewSCCFunctionAnalyses(*C, G, AM, *FAM);
  729. for (SCC &NewC : llvm::reverse(llvm::drop_begin(NewSCCRange))) {
  730. assert(C != &NewC && "No need to re-visit the current SCC!");
  731. assert(OldC != &NewC && "Already handled the original SCC!");
  732. UR.CWorklist.insert(&NewC);
  733. LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
  734. // Ensure new SCCs' function analyses are updated.
  735. if (FAM)
  736. updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM);
  737. // Also propagate a normal invalidation to the new SCC as only the current
  738. // will get one from the pass manager infrastructure.
  739. AM.invalidate(NewC, PA);
  740. }
  741. return C;
  742. }
  743. static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
  744. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  745. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  746. FunctionAnalysisManager &FAM, bool FunctionPass) {
  747. using Node = LazyCallGraph::Node;
  748. using Edge = LazyCallGraph::Edge;
  749. using SCC = LazyCallGraph::SCC;
  750. using RefSCC = LazyCallGraph::RefSCC;
  751. RefSCC &InitialRC = InitialC.getOuterRefSCC();
  752. SCC *C = &InitialC;
  753. RefSCC *RC = &InitialRC;
  754. Function &F = N.getFunction();
  755. // Walk the function body and build up the set of retained, promoted, and
  756. // demoted edges.
  757. SmallVector<Constant *, 16> Worklist;
  758. SmallPtrSet<Constant *, 16> Visited;
  759. SmallPtrSet<Node *, 16> RetainedEdges;
  760. SmallSetVector<Node *, 4> PromotedRefTargets;
  761. SmallSetVector<Node *, 4> DemotedCallTargets;
  762. SmallSetVector<Node *, 4> NewCallEdges;
  763. SmallSetVector<Node *, 4> NewRefEdges;
  764. // First walk the function and handle all called functions. We do this first
  765. // because if there is a single call edge, whether there are ref edges is
  766. // irrelevant.
  767. for (Instruction &I : instructions(F)) {
  768. if (auto *CB = dyn_cast<CallBase>(&I)) {
  769. if (Function *Callee = CB->getCalledFunction()) {
  770. if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
  771. Node *CalleeN = G.lookup(*Callee);
  772. assert(CalleeN &&
  773. "Visited function should already have an associated node");
  774. Edge *E = N->lookup(*CalleeN);
  775. assert((E || !FunctionPass) &&
  776. "No function transformations should introduce *new* "
  777. "call edges! Any new calls should be modeled as "
  778. "promoted existing ref edges!");
  779. bool Inserted = RetainedEdges.insert(CalleeN).second;
  780. (void)Inserted;
  781. assert(Inserted && "We should never visit a function twice.");
  782. if (!E)
  783. NewCallEdges.insert(CalleeN);
  784. else if (!E->isCall())
  785. PromotedRefTargets.insert(CalleeN);
  786. }
  787. } else {
  788. // We can miss devirtualization if an indirect call is created then
  789. // promoted before updateCGAndAnalysisManagerForPass runs.
  790. auto *Entry = UR.IndirectVHs.find(CB);
  791. if (Entry == UR.IndirectVHs.end())
  792. UR.IndirectVHs.insert({CB, WeakTrackingVH(CB)});
  793. else if (!Entry->second)
  794. Entry->second = WeakTrackingVH(CB);
  795. }
  796. }
  797. }
  798. // Now walk all references.
  799. for (Instruction &I : instructions(F))
  800. for (Value *Op : I.operand_values())
  801. if (auto *OpC = dyn_cast<Constant>(Op))
  802. if (Visited.insert(OpC).second)
  803. Worklist.push_back(OpC);
  804. auto VisitRef = [&](Function &Referee) {
  805. Node *RefereeN = G.lookup(Referee);
  806. assert(RefereeN &&
  807. "Visited function should already have an associated node");
  808. Edge *E = N->lookup(*RefereeN);
  809. assert((E || !FunctionPass) &&
  810. "No function transformations should introduce *new* ref "
  811. "edges! Any new ref edges would require IPO which "
  812. "function passes aren't allowed to do!");
  813. bool Inserted = RetainedEdges.insert(RefereeN).second;
  814. (void)Inserted;
  815. assert(Inserted && "We should never visit a function twice.");
  816. if (!E)
  817. NewRefEdges.insert(RefereeN);
  818. else if (E->isCall())
  819. DemotedCallTargets.insert(RefereeN);
  820. };
  821. LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
  822. // Handle new ref edges.
  823. for (Node *RefTarget : NewRefEdges) {
  824. SCC &TargetC = *G.lookupSCC(*RefTarget);
  825. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  826. (void)TargetRC;
  827. // TODO: This only allows trivial edges to be added for now.
  828. #ifdef EXPENSIVE_CHECKS
  829. assert((RC == &TargetRC ||
  830. RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
  831. #endif
  832. RC->insertTrivialRefEdge(N, *RefTarget);
  833. }
  834. // Handle new call edges.
  835. for (Node *CallTarget : NewCallEdges) {
  836. SCC &TargetC = *G.lookupSCC(*CallTarget);
  837. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  838. (void)TargetRC;
  839. // TODO: This only allows trivial edges to be added for now.
  840. #ifdef EXPENSIVE_CHECKS
  841. assert((RC == &TargetRC ||
  842. RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
  843. #endif
  844. // Add a trivial ref edge to be promoted later on alongside
  845. // PromotedRefTargets.
  846. RC->insertTrivialRefEdge(N, *CallTarget);
  847. }
  848. // Include synthetic reference edges to known, defined lib functions.
  849. for (auto *LibFn : G.getLibFunctions())
  850. // While the list of lib functions doesn't have repeats, don't re-visit
  851. // anything handled above.
  852. if (!Visited.count(LibFn))
  853. VisitRef(*LibFn);
  854. // First remove all of the edges that are no longer present in this function.
  855. // The first step makes these edges uniformly ref edges and accumulates them
  856. // into a separate data structure so removal doesn't invalidate anything.
  857. SmallVector<Node *, 4> DeadTargets;
  858. for (Edge &E : *N) {
  859. if (RetainedEdges.count(&E.getNode()))
  860. continue;
  861. SCC &TargetC = *G.lookupSCC(E.getNode());
  862. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  863. if (&TargetRC == RC && E.isCall()) {
  864. if (C != &TargetC) {
  865. // For separate SCCs this is trivial.
  866. RC->switchTrivialInternalEdgeToRef(N, E.getNode());
  867. } else {
  868. // Now update the call graph.
  869. C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
  870. G, N, C, AM, UR);
  871. }
  872. }
  873. // Now that this is ready for actual removal, put it into our list.
  874. DeadTargets.push_back(&E.getNode());
  875. }
  876. // Remove the easy cases quickly and actually pull them out of our list.
  877. llvm::erase_if(DeadTargets, [&](Node *TargetN) {
  878. SCC &TargetC = *G.lookupSCC(*TargetN);
  879. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  880. // We can't trivially remove internal targets, so skip
  881. // those.
  882. if (&TargetRC == RC)
  883. return false;
  884. LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '" << N << "' to '"
  885. << *TargetN << "'\n");
  886. RC->removeOutgoingEdge(N, *TargetN);
  887. return true;
  888. });
  889. // Now do a batch removal of the internal ref edges left.
  890. auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
  891. if (!NewRefSCCs.empty()) {
  892. // The old RefSCC is dead, mark it as such.
  893. UR.InvalidatedRefSCCs.insert(RC);
  894. // Note that we don't bother to invalidate analyses as ref-edge
  895. // connectivity is not really observable in any way and is intended
  896. // exclusively to be used for ordering of transforms rather than for
  897. // analysis conclusions.
  898. // Update RC to the "bottom".
  899. assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
  900. RC = &C->getOuterRefSCC();
  901. assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
  902. // The RC worklist is in reverse postorder, so we enqueue the new ones in
  903. // RPO except for the one which contains the source node as that is the
  904. // "bottom" we will continue processing in the bottom-up walk.
  905. assert(NewRefSCCs.front() == RC &&
  906. "New current RefSCC not first in the returned list!");
  907. for (RefSCC *NewRC : llvm::reverse(llvm::drop_begin(NewRefSCCs))) {
  908. assert(NewRC != RC && "Should not encounter the current RefSCC further "
  909. "in the postorder list of new RefSCCs.");
  910. UR.RCWorklist.insert(NewRC);
  911. LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
  912. << *NewRC << "\n");
  913. }
  914. }
  915. // Next demote all the call edges that are now ref edges. This helps make
  916. // the SCCs small which should minimize the work below as we don't want to
  917. // form cycles that this would break.
  918. for (Node *RefTarget : DemotedCallTargets) {
  919. SCC &TargetC = *G.lookupSCC(*RefTarget);
  920. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  921. // The easy case is when the target RefSCC is not this RefSCC. This is
  922. // only supported when the target RefSCC is a child of this RefSCC.
  923. if (&TargetRC != RC) {
  924. #ifdef EXPENSIVE_CHECKS
  925. assert(RC->isAncestorOf(TargetRC) &&
  926. "Cannot potentially form RefSCC cycles here!");
  927. #endif
  928. RC->switchOutgoingEdgeToRef(N, *RefTarget);
  929. LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
  930. << "' to '" << *RefTarget << "'\n");
  931. continue;
  932. }
  933. // We are switching an internal call edge to a ref edge. This may split up
  934. // some SCCs.
  935. if (C != &TargetC) {
  936. // For separate SCCs this is trivial.
  937. RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
  938. continue;
  939. }
  940. // Now update the call graph.
  941. C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
  942. C, AM, UR);
  943. }
  944. // We added a ref edge earlier for new call edges, promote those to call edges
  945. // alongside PromotedRefTargets.
  946. for (Node *E : NewCallEdges)
  947. PromotedRefTargets.insert(E);
  948. // Now promote ref edges into call edges.
  949. for (Node *CallTarget : PromotedRefTargets) {
  950. SCC &TargetC = *G.lookupSCC(*CallTarget);
  951. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  952. // The easy case is when the target RefSCC is not this RefSCC. This is
  953. // only supported when the target RefSCC is a child of this RefSCC.
  954. if (&TargetRC != RC) {
  955. #ifdef EXPENSIVE_CHECKS
  956. assert(RC->isAncestorOf(TargetRC) &&
  957. "Cannot potentially form RefSCC cycles here!");
  958. #endif
  959. RC->switchOutgoingEdgeToCall(N, *CallTarget);
  960. LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
  961. << "' to '" << *CallTarget << "'\n");
  962. continue;
  963. }
  964. LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
  965. << N << "' to '" << *CallTarget << "'\n");
  966. // Otherwise we are switching an internal ref edge to a call edge. This
  967. // may merge away some SCCs, and we add those to the UpdateResult. We also
  968. // need to make sure to update the worklist in the event SCCs have moved
  969. // before the current one in the post-order sequence
  970. bool HasFunctionAnalysisProxy = false;
  971. auto InitialSCCIndex = RC->find(*C) - RC->begin();
  972. bool FormedCycle = RC->switchInternalEdgeToCall(
  973. N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
  974. for (SCC *MergedC : MergedSCCs) {
  975. assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
  976. HasFunctionAnalysisProxy |=
  977. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
  978. *MergedC) != nullptr;
  979. // Mark that this SCC will no longer be valid.
  980. UR.InvalidatedSCCs.insert(MergedC);
  981. // FIXME: We should really do a 'clear' here to forcibly release
  982. // memory, but we don't have a good way of doing that and
  983. // preserving the function analyses.
  984. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  985. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  986. AM.invalidate(*MergedC, PA);
  987. }
  988. });
  989. // If we formed a cycle by creating this call, we need to update more data
  990. // structures.
  991. if (FormedCycle) {
  992. C = &TargetC;
  993. assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
  994. // If one of the invalidated SCCs had a cached proxy to a function
  995. // analysis manager, we need to create a proxy in the new current SCC as
  996. // the invalidated SCCs had their functions moved.
  997. if (HasFunctionAnalysisProxy)
  998. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM);
  999. // Any analyses cached for this SCC are no longer precise as the shape
  1000. // has changed by introducing this cycle. However, we have taken care to
  1001. // update the proxies so it remains valide.
  1002. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  1003. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  1004. AM.invalidate(*C, PA);
  1005. }
  1006. auto NewSCCIndex = RC->find(*C) - RC->begin();
  1007. // If we have actually moved an SCC to be topologically "below" the current
  1008. // one due to merging, we will need to revisit the current SCC after
  1009. // visiting those moved SCCs.
  1010. //
  1011. // It is critical that we *do not* revisit the current SCC unless we
  1012. // actually move SCCs in the process of merging because otherwise we may
  1013. // form a cycle where an SCC is split apart, merged, split, merged and so
  1014. // on infinitely.
  1015. if (InitialSCCIndex < NewSCCIndex) {
  1016. // Put our current SCC back onto the worklist as we'll visit other SCCs
  1017. // that are now definitively ordered prior to the current one in the
  1018. // post-order sequence, and may end up observing more precise context to
  1019. // optimize the current SCC.
  1020. UR.CWorklist.insert(C);
  1021. LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
  1022. << "\n");
  1023. // Enqueue in reverse order as we pop off the back of the worklist.
  1024. for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
  1025. RC->begin() + NewSCCIndex))) {
  1026. UR.CWorklist.insert(&MovedC);
  1027. LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
  1028. << MovedC << "\n");
  1029. }
  1030. }
  1031. }
  1032. assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
  1033. assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
  1034. assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
  1035. // Record the current SCC for higher layers of the CGSCC pass manager now that
  1036. // all the updates have been applied.
  1037. if (C != &InitialC)
  1038. UR.UpdatedC = C;
  1039. return *C;
  1040. }
  1041. LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
  1042. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  1043. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  1044. FunctionAnalysisManager &FAM) {
  1045. return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
  1046. /* FunctionPass */ true);
  1047. }
  1048. LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
  1049. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  1050. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  1051. FunctionAnalysisManager &FAM) {
  1052. return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
  1053. /* FunctionPass */ false);
  1054. }