CGSCCPassManager.cpp 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239
  1. //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #include "llvm/Analysis/CGSCCPassManager.h"
  9. #include "llvm/ADT/ArrayRef.h"
  10. #include "llvm/ADT/Optional.h"
  11. #include "llvm/ADT/STLExtras.h"
  12. #include "llvm/ADT/SetVector.h"
  13. #include "llvm/ADT/SmallPtrSet.h"
  14. #include "llvm/ADT/SmallVector.h"
  15. #include "llvm/ADT/iterator_range.h"
  16. #include "llvm/Analysis/LazyCallGraph.h"
  17. #include "llvm/IR/Constant.h"
  18. #include "llvm/IR/InstIterator.h"
  19. #include "llvm/IR/Instruction.h"
  20. #include "llvm/IR/PassManager.h"
  21. #include "llvm/IR/PassManagerImpl.h"
  22. #include "llvm/IR/ValueHandle.h"
  23. #include "llvm/Support/Casting.h"
  24. #include "llvm/Support/CommandLine.h"
  25. #include "llvm/Support/Debug.h"
  26. #include "llvm/Support/ErrorHandling.h"
  27. #include "llvm/Support/TimeProfiler.h"
  28. #include "llvm/Support/raw_ostream.h"
  29. #include <algorithm>
  30. #include <cassert>
  31. #include <iterator>
  32. #define DEBUG_TYPE "cgscc"
  33. using namespace llvm;
  34. // Explicit template instantiations and specialization definitions for core
  35. // template typedefs.
  36. namespace llvm {
  37. static cl::opt<bool> AbortOnMaxDevirtIterationsReached(
  38. "abort-on-max-devirt-iterations-reached",
  39. cl::desc("Abort when the max iterations for devirtualization CGSCC repeat "
  40. "pass is reached"));
  41. AnalysisKey ShouldNotRunFunctionPassesAnalysis::Key;
  42. // Explicit instantiations for the core proxy templates.
  43. template class AllAnalysesOn<LazyCallGraph::SCC>;
  44. template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>;
  45. template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager,
  46. LazyCallGraph &, CGSCCUpdateResult &>;
  47. template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>;
  48. template class OuterAnalysisManagerProxy<ModuleAnalysisManager,
  49. LazyCallGraph::SCC, LazyCallGraph &>;
  50. template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>;
  51. /// Explicitly specialize the pass manager run method to handle call graph
  52. /// updates.
  53. template <>
  54. PreservedAnalyses
  55. PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
  56. CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
  57. CGSCCAnalysisManager &AM,
  58. LazyCallGraph &G, CGSCCUpdateResult &UR) {
  59. // Request PassInstrumentation from analysis manager, will use it to run
  60. // instrumenting callbacks for the passes later.
  61. PassInstrumentation PI =
  62. AM.getResult<PassInstrumentationAnalysis>(InitialC, G);
  63. PreservedAnalyses PA = PreservedAnalyses::all();
  64. // The SCC may be refined while we are running passes over it, so set up
  65. // a pointer that we can update.
  66. LazyCallGraph::SCC *C = &InitialC;
  67. // Get Function analysis manager from its proxy.
  68. FunctionAnalysisManager &FAM =
  69. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager();
  70. for (auto &Pass : Passes) {
  71. // Check the PassInstrumentation's BeforePass callbacks before running the
  72. // pass, skip its execution completely if asked to (callback returns false).
  73. if (!PI.runBeforePass(*Pass, *C))
  74. continue;
  75. PreservedAnalyses PassPA;
  76. {
  77. TimeTraceScope TimeScope(Pass->name());
  78. PassPA = Pass->run(*C, AM, G, UR);
  79. }
  80. if (UR.InvalidatedSCCs.count(C))
  81. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  82. else
  83. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  84. // Update the SCC if necessary.
  85. C = UR.UpdatedC ? UR.UpdatedC : C;
  86. if (UR.UpdatedC) {
  87. // If C is updated, also create a proxy and update FAM inside the result.
  88. auto *ResultFAMCP =
  89. &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
  90. ResultFAMCP->updateFAM(FAM);
  91. }
  92. // If the CGSCC pass wasn't able to provide a valid updated SCC, the
  93. // current SCC may simply need to be skipped if invalid.
  94. if (UR.InvalidatedSCCs.count(C)) {
  95. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  96. break;
  97. }
  98. // Check that we didn't miss any update scenario.
  99. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  100. // Update the analysis manager as each pass runs and potentially
  101. // invalidates analyses.
  102. AM.invalidate(*C, PassPA);
  103. // Finally, we intersect the final preserved analyses to compute the
  104. // aggregate preserved set for this pass manager.
  105. PA.intersect(std::move(PassPA));
  106. }
  107. // Before we mark all of *this* SCC's analyses as preserved below, intersect
  108. // this with the cross-SCC preserved analysis set. This is used to allow
  109. // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation
  110. // for them.
  111. UR.CrossSCCPA.intersect(PA);
  112. // Invalidation was handled after each pass in the above loop for the current
  113. // SCC. Therefore, the remaining analysis results in the AnalysisManager are
  114. // preserved. We mark this with a set so that we don't need to inspect each
  115. // one individually.
  116. PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
  117. return PA;
  118. }
  119. PreservedAnalyses
  120. ModuleToPostOrderCGSCCPassAdaptor::run(Module &M, ModuleAnalysisManager &AM) {
  121. // Setup the CGSCC analysis manager from its proxy.
  122. CGSCCAnalysisManager &CGAM =
  123. AM.getResult<CGSCCAnalysisManagerModuleProxy>(M).getManager();
  124. // Get the call graph for this module.
  125. LazyCallGraph &CG = AM.getResult<LazyCallGraphAnalysis>(M);
  126. // Get Function analysis manager from its proxy.
  127. FunctionAnalysisManager &FAM =
  128. AM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M)->getManager();
  129. // We keep worklists to allow us to push more work onto the pass manager as
  130. // the passes are run.
  131. SmallPriorityWorklist<LazyCallGraph::RefSCC *, 1> RCWorklist;
  132. SmallPriorityWorklist<LazyCallGraph::SCC *, 1> CWorklist;
  133. // Keep sets for invalidated SCCs and RefSCCs that should be skipped when
  134. // iterating off the worklists.
  135. SmallPtrSet<LazyCallGraph::RefSCC *, 4> InvalidRefSCCSet;
  136. SmallPtrSet<LazyCallGraph::SCC *, 4> InvalidSCCSet;
  137. SmallDenseSet<std::pair<LazyCallGraph::Node *, LazyCallGraph::SCC *>, 4>
  138. InlinedInternalEdges;
  139. CGSCCUpdateResult UR = {
  140. RCWorklist, CWorklist, InvalidRefSCCSet, InvalidSCCSet,
  141. nullptr, nullptr, PreservedAnalyses::all(), InlinedInternalEdges,
  142. {}};
  143. // Request PassInstrumentation from analysis manager, will use it to run
  144. // instrumenting callbacks for the passes later.
  145. PassInstrumentation PI = AM.getResult<PassInstrumentationAnalysis>(M);
  146. PreservedAnalyses PA = PreservedAnalyses::all();
  147. CG.buildRefSCCs();
  148. for (auto RCI = CG.postorder_ref_scc_begin(),
  149. RCE = CG.postorder_ref_scc_end();
  150. RCI != RCE;) {
  151. assert(RCWorklist.empty() &&
  152. "Should always start with an empty RefSCC worklist");
  153. // The postorder_ref_sccs range we are walking is lazily constructed, so
  154. // we only push the first one onto the worklist. The worklist allows us
  155. // to capture *new* RefSCCs created during transformations.
  156. //
  157. // We really want to form RefSCCs lazily because that makes them cheaper
  158. // to update as the program is simplified and allows us to have greater
  159. // cache locality as forming a RefSCC touches all the parts of all the
  160. // functions within that RefSCC.
  161. //
  162. // We also eagerly increment the iterator to the next position because
  163. // the CGSCC passes below may delete the current RefSCC.
  164. RCWorklist.insert(&*RCI++);
  165. do {
  166. LazyCallGraph::RefSCC *RC = RCWorklist.pop_back_val();
  167. if (InvalidRefSCCSet.count(RC)) {
  168. LLVM_DEBUG(dbgs() << "Skipping an invalid RefSCC...\n");
  169. continue;
  170. }
  171. assert(CWorklist.empty() &&
  172. "Should always start with an empty SCC worklist");
  173. LLVM_DEBUG(dbgs() << "Running an SCC pass across the RefSCC: " << *RC
  174. << "\n");
  175. // The top of the worklist may *also* be the same SCC we just ran over
  176. // (and invalidated for). Keep track of that last SCC we processed due
  177. // to SCC update to avoid redundant processing when an SCC is both just
  178. // updated itself and at the top of the worklist.
  179. LazyCallGraph::SCC *LastUpdatedC = nullptr;
  180. // Push the initial SCCs in reverse post-order as we'll pop off the
  181. // back and so see this in post-order.
  182. for (LazyCallGraph::SCC &C : llvm::reverse(*RC))
  183. CWorklist.insert(&C);
  184. do {
  185. LazyCallGraph::SCC *C = CWorklist.pop_back_val();
  186. // Due to call graph mutations, we may have invalid SCCs or SCCs from
  187. // other RefSCCs in the worklist. The invalid ones are dead and the
  188. // other RefSCCs should be queued above, so we just need to skip both
  189. // scenarios here.
  190. if (InvalidSCCSet.count(C)) {
  191. LLVM_DEBUG(dbgs() << "Skipping an invalid SCC...\n");
  192. continue;
  193. }
  194. if (LastUpdatedC == C) {
  195. LLVM_DEBUG(dbgs() << "Skipping redundant run on SCC: " << *C << "\n");
  196. continue;
  197. }
  198. if (&C->getOuterRefSCC() != RC) {
  199. LLVM_DEBUG(dbgs() << "Skipping an SCC that is now part of some other "
  200. "RefSCC...\n");
  201. continue;
  202. }
  203. // Ensure we can proxy analysis updates from the CGSCC analysis manager
  204. // into the the Function analysis manager by getting a proxy here.
  205. // This also needs to update the FunctionAnalysisManager, as this may be
  206. // the first time we see this SCC.
  207. CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
  208. FAM);
  209. // Each time we visit a new SCC pulled off the worklist,
  210. // a transformation of a child SCC may have also modified this parent
  211. // and invalidated analyses. So we invalidate using the update record's
  212. // cross-SCC preserved set. This preserved set is intersected by any
  213. // CGSCC pass that handles invalidation (primarily pass managers) prior
  214. // to marking its SCC as preserved. That lets us track everything that
  215. // might need invalidation across SCCs without excessive invalidations
  216. // on a single SCC.
  217. //
  218. // This essentially allows SCC passes to freely invalidate analyses
  219. // of any ancestor SCC. If this becomes detrimental to successfully
  220. // caching analyses, we could force each SCC pass to manually
  221. // invalidate the analyses for any SCCs other than themselves which
  222. // are mutated. However, that seems to lose the robustness of the
  223. // pass-manager driven invalidation scheme.
  224. CGAM.invalidate(*C, UR.CrossSCCPA);
  225. do {
  226. // Check that we didn't miss any update scenario.
  227. assert(!InvalidSCCSet.count(C) && "Processing an invalid SCC!");
  228. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  229. assert(&C->getOuterRefSCC() == RC &&
  230. "Processing an SCC in a different RefSCC!");
  231. LastUpdatedC = UR.UpdatedC;
  232. UR.UpdatedRC = nullptr;
  233. UR.UpdatedC = nullptr;
  234. // Check the PassInstrumentation's BeforePass callbacks before
  235. // running the pass, skip its execution completely if asked to
  236. // (callback returns false).
  237. if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
  238. continue;
  239. PreservedAnalyses PassPA;
  240. {
  241. TimeTraceScope TimeScope(Pass->name());
  242. PassPA = Pass->run(*C, CGAM, CG, UR);
  243. }
  244. if (UR.InvalidatedSCCs.count(C))
  245. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  246. else
  247. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  248. // Update the SCC and RefSCC if necessary.
  249. C = UR.UpdatedC ? UR.UpdatedC : C;
  250. RC = UR.UpdatedRC ? UR.UpdatedRC : RC;
  251. if (UR.UpdatedC) {
  252. // If we're updating the SCC, also update the FAM inside the proxy's
  253. // result.
  254. CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM(
  255. FAM);
  256. }
  257. // If the CGSCC pass wasn't able to provide a valid updated SCC,
  258. // the current SCC may simply need to be skipped if invalid.
  259. if (UR.InvalidatedSCCs.count(C)) {
  260. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  261. break;
  262. }
  263. // Check that we didn't miss any update scenario.
  264. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  265. // We handle invalidating the CGSCC analysis manager's information
  266. // for the (potentially updated) SCC here. Note that any other SCCs
  267. // whose structure has changed should have been invalidated by
  268. // whatever was updating the call graph. This SCC gets invalidated
  269. // late as it contains the nodes that were actively being
  270. // processed.
  271. CGAM.invalidate(*C, PassPA);
  272. // Then intersect the preserved set so that invalidation of module
  273. // analyses will eventually occur when the module pass completes.
  274. // Also intersect with the cross-SCC preserved set to capture any
  275. // cross-SCC invalidation.
  276. UR.CrossSCCPA.intersect(PassPA);
  277. PA.intersect(std::move(PassPA));
  278. // The pass may have restructured the call graph and refined the
  279. // current SCC and/or RefSCC. We need to update our current SCC and
  280. // RefSCC pointers to follow these. Also, when the current SCC is
  281. // refined, re-run the SCC pass over the newly refined SCC in order
  282. // to observe the most precise SCC model available. This inherently
  283. // cannot cycle excessively as it only happens when we split SCCs
  284. // apart, at most converging on a DAG of single nodes.
  285. // FIXME: If we ever start having RefSCC passes, we'll want to
  286. // iterate there too.
  287. if (UR.UpdatedC)
  288. LLVM_DEBUG(dbgs()
  289. << "Re-running SCC passes after a refinement of the "
  290. "current SCC: "
  291. << *UR.UpdatedC << "\n");
  292. // Note that both `C` and `RC` may at this point refer to deleted,
  293. // invalid SCC and RefSCCs respectively. But we will short circuit
  294. // the processing when we check them in the loop above.
  295. } while (UR.UpdatedC);
  296. } while (!CWorklist.empty());
  297. // We only need to keep internal inlined edge information within
  298. // a RefSCC, clear it to save on space and let the next time we visit
  299. // any of these functions have a fresh start.
  300. InlinedInternalEdges.clear();
  301. } while (!RCWorklist.empty());
  302. }
  303. // By definition we preserve the call garph, all SCC analyses, and the
  304. // analysis proxies by handling them above and in any nested pass managers.
  305. PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
  306. PA.preserve<LazyCallGraphAnalysis>();
  307. PA.preserve<CGSCCAnalysisManagerModuleProxy>();
  308. PA.preserve<FunctionAnalysisManagerModuleProxy>();
  309. return PA;
  310. }
  311. PreservedAnalyses DevirtSCCRepeatedPass::run(LazyCallGraph::SCC &InitialC,
  312. CGSCCAnalysisManager &AM,
  313. LazyCallGraph &CG,
  314. CGSCCUpdateResult &UR) {
  315. PreservedAnalyses PA = PreservedAnalyses::all();
  316. PassInstrumentation PI =
  317. AM.getResult<PassInstrumentationAnalysis>(InitialC, CG);
  318. // The SCC may be refined while we are running passes over it, so set up
  319. // a pointer that we can update.
  320. LazyCallGraph::SCC *C = &InitialC;
  321. // Struct to track the counts of direct and indirect calls in each function
  322. // of the SCC.
  323. struct CallCount {
  324. int Direct;
  325. int Indirect;
  326. };
  327. // Put value handles on all of the indirect calls and return the number of
  328. // direct calls for each function in the SCC.
  329. auto ScanSCC = [](LazyCallGraph::SCC &C,
  330. SmallMapVector<Value *, WeakTrackingVH, 16> &CallHandles) {
  331. assert(CallHandles.empty() && "Must start with a clear set of handles.");
  332. SmallDenseMap<Function *, CallCount> CallCounts;
  333. CallCount CountLocal = {0, 0};
  334. for (LazyCallGraph::Node &N : C) {
  335. CallCount &Count =
  336. CallCounts.insert(std::make_pair(&N.getFunction(), CountLocal))
  337. .first->second;
  338. for (Instruction &I : instructions(N.getFunction()))
  339. if (auto *CB = dyn_cast<CallBase>(&I)) {
  340. if (CB->getCalledFunction()) {
  341. ++Count.Direct;
  342. } else {
  343. ++Count.Indirect;
  344. CallHandles.insert({CB, WeakTrackingVH(CB)});
  345. }
  346. }
  347. }
  348. return CallCounts;
  349. };
  350. UR.IndirectVHs.clear();
  351. // Populate the initial call handles and get the initial call counts.
  352. auto CallCounts = ScanSCC(*C, UR.IndirectVHs);
  353. for (int Iteration = 0;; ++Iteration) {
  354. if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C))
  355. continue;
  356. PreservedAnalyses PassPA = Pass->run(*C, AM, CG, UR);
  357. if (UR.InvalidatedSCCs.count(C))
  358. PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA);
  359. else
  360. PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA);
  361. // If the SCC structure has changed, bail immediately and let the outer
  362. // CGSCC layer handle any iteration to reflect the refined structure.
  363. if (UR.UpdatedC && UR.UpdatedC != C) {
  364. PA.intersect(std::move(PassPA));
  365. break;
  366. }
  367. // If the CGSCC pass wasn't able to provide a valid updated SCC, the
  368. // current SCC may simply need to be skipped if invalid.
  369. if (UR.InvalidatedSCCs.count(C)) {
  370. LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
  371. break;
  372. }
  373. assert(C->begin() != C->end() && "Cannot have an empty SCC!");
  374. // Check whether any of the handles were devirtualized.
  375. bool Devirt = llvm::any_of(UR.IndirectVHs, [](auto &P) -> bool {
  376. if (P.second) {
  377. if (CallBase *CB = dyn_cast<CallBase>(P.second)) {
  378. if (CB->getCalledFunction()) {
  379. LLVM_DEBUG(dbgs() << "Found devirtualized call: " << *CB << "\n");
  380. return true;
  381. }
  382. }
  383. }
  384. return false;
  385. });
  386. // Rescan to build up a new set of handles and count how many direct
  387. // calls remain. If we decide to iterate, this also sets up the input to
  388. // the next iteration.
  389. UR.IndirectVHs.clear();
  390. auto NewCallCounts = ScanSCC(*C, UR.IndirectVHs);
  391. // If we haven't found an explicit devirtualization already see if we
  392. // have decreased the number of indirect calls and increased the number
  393. // of direct calls for any function in the SCC. This can be fooled by all
  394. // manner of transformations such as DCE and other things, but seems to
  395. // work well in practice.
  396. if (!Devirt)
  397. // Iterate over the keys in NewCallCounts, if Function also exists in
  398. // CallCounts, make the check below.
  399. for (auto &Pair : NewCallCounts) {
  400. auto &CallCountNew = Pair.second;
  401. auto CountIt = CallCounts.find(Pair.first);
  402. if (CountIt != CallCounts.end()) {
  403. const auto &CallCountOld = CountIt->second;
  404. if (CallCountOld.Indirect > CallCountNew.Indirect &&
  405. CallCountOld.Direct < CallCountNew.Direct) {
  406. Devirt = true;
  407. break;
  408. }
  409. }
  410. }
  411. if (!Devirt) {
  412. PA.intersect(std::move(PassPA));
  413. break;
  414. }
  415. // Otherwise, if we've already hit our max, we're done.
  416. if (Iteration >= MaxIterations) {
  417. if (AbortOnMaxDevirtIterationsReached)
  418. report_fatal_error("Max devirtualization iterations reached");
  419. LLVM_DEBUG(
  420. dbgs() << "Found another devirtualization after hitting the max "
  421. "number of repetitions ("
  422. << MaxIterations << ") on SCC: " << *C << "\n");
  423. PA.intersect(std::move(PassPA));
  424. break;
  425. }
  426. LLVM_DEBUG(
  427. dbgs() << "Repeating an SCC pass after finding a devirtualization in: "
  428. << *C << "\n");
  429. // Move over the new call counts in preparation for iterating.
  430. CallCounts = std::move(NewCallCounts);
  431. // Update the analysis manager with each run and intersect the total set
  432. // of preserved analyses so we're ready to iterate.
  433. AM.invalidate(*C, PassPA);
  434. PA.intersect(std::move(PassPA));
  435. }
  436. // Note that we don't add any preserved entries here unlike a more normal
  437. // "pass manager" because we only handle invalidation *between* iterations,
  438. // not after the last iteration.
  439. return PA;
  440. }
  441. PreservedAnalyses CGSCCToFunctionPassAdaptor::run(LazyCallGraph::SCC &C,
  442. CGSCCAnalysisManager &AM,
  443. LazyCallGraph &CG,
  444. CGSCCUpdateResult &UR) {
  445. // Setup the function analysis manager from its proxy.
  446. FunctionAnalysisManager &FAM =
  447. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager();
  448. SmallVector<LazyCallGraph::Node *, 4> Nodes;
  449. for (LazyCallGraph::Node &N : C)
  450. Nodes.push_back(&N);
  451. // The SCC may get split while we are optimizing functions due to deleting
  452. // edges. If this happens, the current SCC can shift, so keep track of
  453. // a pointer we can overwrite.
  454. LazyCallGraph::SCC *CurrentC = &C;
  455. LLVM_DEBUG(dbgs() << "Running function passes across an SCC: " << C << "\n");
  456. PreservedAnalyses PA = PreservedAnalyses::all();
  457. for (LazyCallGraph::Node *N : Nodes) {
  458. // Skip nodes from other SCCs. These may have been split out during
  459. // processing. We'll eventually visit those SCCs and pick up the nodes
  460. // there.
  461. if (CG.lookupSCC(*N) != CurrentC)
  462. continue;
  463. Function &F = N->getFunction();
  464. if (NoRerun && FAM.getCachedResult<ShouldNotRunFunctionPassesAnalysis>(F))
  465. continue;
  466. PassInstrumentation PI = FAM.getResult<PassInstrumentationAnalysis>(F);
  467. if (!PI.runBeforePass<Function>(*Pass, F))
  468. continue;
  469. PreservedAnalyses PassPA;
  470. {
  471. TimeTraceScope TimeScope(Pass->name());
  472. PassPA = Pass->run(F, FAM);
  473. }
  474. PI.runAfterPass<Function>(*Pass, F, PassPA);
  475. // We know that the function pass couldn't have invalidated any other
  476. // function's analyses (that's the contract of a function pass), so
  477. // directly handle the function analysis manager's invalidation here.
  478. FAM.invalidate(F, EagerlyInvalidate ? PreservedAnalyses::none() : PassPA);
  479. if (NoRerun)
  480. (void)FAM.getResult<ShouldNotRunFunctionPassesAnalysis>(F);
  481. // Then intersect the preserved set so that invalidation of module
  482. // analyses will eventually occur when the module pass completes.
  483. PA.intersect(std::move(PassPA));
  484. // If the call graph hasn't been preserved, update it based on this
  485. // function pass. This may also update the current SCC to point to
  486. // a smaller, more refined SCC.
  487. auto PAC = PA.getChecker<LazyCallGraphAnalysis>();
  488. if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<Module>>()) {
  489. CurrentC = &updateCGAndAnalysisManagerForFunctionPass(CG, *CurrentC, *N,
  490. AM, UR, FAM);
  491. assert(CG.lookupSCC(*N) == CurrentC &&
  492. "Current SCC not updated to the SCC containing the current node!");
  493. }
  494. }
  495. // By definition we preserve the proxy. And we preserve all analyses on
  496. // Functions. This precludes *any* invalidation of function analyses by the
  497. // proxy, but that's OK because we've taken care to invalidate analyses in
  498. // the function analysis manager incrementally above.
  499. PA.preserveSet<AllAnalysesOn<Function>>();
  500. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  501. // We've also ensured that we updated the call graph along the way.
  502. PA.preserve<LazyCallGraphAnalysis>();
  503. return PA;
  504. }
  505. bool CGSCCAnalysisManagerModuleProxy::Result::invalidate(
  506. Module &M, const PreservedAnalyses &PA,
  507. ModuleAnalysisManager::Invalidator &Inv) {
  508. // If literally everything is preserved, we're done.
  509. if (PA.areAllPreserved())
  510. return false; // This is still a valid proxy.
  511. // If this proxy or the call graph is going to be invalidated, we also need
  512. // to clear all the keys coming from that analysis.
  513. //
  514. // We also directly invalidate the FAM's module proxy if necessary, and if
  515. // that proxy isn't preserved we can't preserve this proxy either. We rely on
  516. // it to handle module -> function analysis invalidation in the face of
  517. // structural changes and so if it's unavailable we conservatively clear the
  518. // entire SCC layer as well rather than trying to do invalidation ourselves.
  519. auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>();
  520. if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
  521. Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
  522. Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) {
  523. InnerAM->clear();
  524. // And the proxy itself should be marked as invalid so that we can observe
  525. // the new call graph. This isn't strictly necessary because we cheat
  526. // above, but is still useful.
  527. return true;
  528. }
  529. // Directly check if the relevant set is preserved so we can short circuit
  530. // invalidating SCCs below.
  531. bool AreSCCAnalysesPreserved =
  532. PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>();
  533. // Ok, we have a graph, so we can propagate the invalidation down into it.
  534. G->buildRefSCCs();
  535. for (auto &RC : G->postorder_ref_sccs())
  536. for (auto &C : RC) {
  537. Optional<PreservedAnalyses> InnerPA;
  538. // Check to see whether the preserved set needs to be adjusted based on
  539. // module-level analysis invalidation triggering deferred invalidation
  540. // for this SCC.
  541. if (auto *OuterProxy =
  542. InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
  543. for (const auto &OuterInvalidationPair :
  544. OuterProxy->getOuterInvalidations()) {
  545. AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
  546. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  547. if (Inv.invalidate(OuterAnalysisID, M, PA)) {
  548. if (!InnerPA)
  549. InnerPA = PA;
  550. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  551. InnerPA->abandon(InnerAnalysisID);
  552. }
  553. }
  554. // Check if we needed a custom PA set. If so we'll need to run the inner
  555. // invalidation.
  556. if (InnerPA) {
  557. InnerAM->invalidate(C, *InnerPA);
  558. continue;
  559. }
  560. // Otherwise we only need to do invalidation if the original PA set didn't
  561. // preserve all SCC analyses.
  562. if (!AreSCCAnalysesPreserved)
  563. InnerAM->invalidate(C, PA);
  564. }
  565. // Return false to indicate that this result is still a valid proxy.
  566. return false;
  567. }
  568. template <>
  569. CGSCCAnalysisManagerModuleProxy::Result
  570. CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
  571. // Force the Function analysis manager to also be available so that it can
  572. // be accessed in an SCC analysis and proxied onward to function passes.
  573. // FIXME: It is pretty awkward to just drop the result here and assert that
  574. // we can find it again later.
  575. (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M);
  576. return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
  577. }
  578. AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
  579. FunctionAnalysisManagerCGSCCProxy::Result
  580. FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C,
  581. CGSCCAnalysisManager &AM,
  582. LazyCallGraph &CG) {
  583. // Note: unconditionally getting checking that the proxy exists may get it at
  584. // this point. There are cases when this is being run unnecessarily, but
  585. // it is cheap and having the assertion in place is more valuable.
  586. auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
  587. Module &M = *C.begin()->getFunction().getParent();
  588. bool ProxyExists =
  589. MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M);
  590. assert(ProxyExists &&
  591. "The CGSCC pass manager requires that the FAM module proxy is run "
  592. "on the module prior to entering the CGSCC walk");
  593. (void)ProxyExists;
  594. // We just return an empty result. The caller will use the updateFAM interface
  595. // to correctly register the relevant FunctionAnalysisManager based on the
  596. // context in which this proxy is run.
  597. return Result();
  598. }
  599. bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate(
  600. LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
  601. CGSCCAnalysisManager::Invalidator &Inv) {
  602. // If literally everything is preserved, we're done.
  603. if (PA.areAllPreserved())
  604. return false; // This is still a valid proxy.
  605. // All updates to preserve valid results are done below, so we don't need to
  606. // invalidate this proxy.
  607. //
  608. // Note that in order to preserve this proxy, a module pass must ensure that
  609. // the FAM has been completely updated to handle the deletion of functions.
  610. // Specifically, any FAM-cached results for those functions need to have been
  611. // forcibly cleared. When preserved, this proxy will only invalidate results
  612. // cached on functions *still in the module* at the end of the module pass.
  613. auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>();
  614. if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
  615. for (LazyCallGraph::Node &N : C)
  616. FAM->invalidate(N.getFunction(), PA);
  617. return false;
  618. }
  619. // Directly check if the relevant set is preserved.
  620. bool AreFunctionAnalysesPreserved =
  621. PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>();
  622. // Now walk all the functions to see if any inner analysis invalidation is
  623. // necessary.
  624. for (LazyCallGraph::Node &N : C) {
  625. Function &F = N.getFunction();
  626. Optional<PreservedAnalyses> FunctionPA;
  627. // Check to see whether the preserved set needs to be pruned based on
  628. // SCC-level analysis invalidation that triggers deferred invalidation
  629. // registered with the outer analysis manager proxy for this function.
  630. if (auto *OuterProxy =
  631. FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
  632. for (const auto &OuterInvalidationPair :
  633. OuterProxy->getOuterInvalidations()) {
  634. AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
  635. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  636. if (Inv.invalidate(OuterAnalysisID, C, PA)) {
  637. if (!FunctionPA)
  638. FunctionPA = PA;
  639. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  640. FunctionPA->abandon(InnerAnalysisID);
  641. }
  642. }
  643. // Check if we needed a custom PA set, and if so we'll need to run the
  644. // inner invalidation.
  645. if (FunctionPA) {
  646. FAM->invalidate(F, *FunctionPA);
  647. continue;
  648. }
  649. // Otherwise we only need to do invalidation if the original PA set didn't
  650. // preserve all function analyses.
  651. if (!AreFunctionAnalysesPreserved)
  652. FAM->invalidate(F, PA);
  653. }
  654. // Return false to indicate that this result is still a valid proxy.
  655. return false;
  656. }
  657. } // end namespace llvm
  658. /// When a new SCC is created for the graph we first update the
  659. /// FunctionAnalysisManager in the Proxy's result.
  660. /// As there might be function analysis results cached for the functions now in
  661. /// that SCC, two forms of updates are required.
  662. ///
  663. /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
  664. /// created so that any subsequent invalidation events to the SCC are
  665. /// propagated to the function analysis results cached for functions within it.
  666. ///
  667. /// Second, if any of the functions within the SCC have analysis results with
  668. /// outer analysis dependencies, then those dependencies would point to the
  669. /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
  670. /// function analyses so that they don't retain stale handles.
  671. static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C,
  672. LazyCallGraph &G,
  673. CGSCCAnalysisManager &AM,
  674. FunctionAnalysisManager &FAM) {
  675. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM);
  676. // Now walk the functions in this SCC and invalidate any function analysis
  677. // results that might have outer dependencies on an SCC analysis.
  678. for (LazyCallGraph::Node &N : C) {
  679. Function &F = N.getFunction();
  680. auto *OuterProxy =
  681. FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
  682. if (!OuterProxy)
  683. // No outer analyses were queried, nothing to do.
  684. continue;
  685. // Forcibly abandon all the inner analyses with dependencies, but
  686. // invalidate nothing else.
  687. auto PA = PreservedAnalyses::all();
  688. for (const auto &OuterInvalidationPair :
  689. OuterProxy->getOuterInvalidations()) {
  690. const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
  691. for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
  692. PA.abandon(InnerAnalysisID);
  693. }
  694. // Now invalidate anything we found.
  695. FAM.invalidate(F, PA);
  696. }
  697. }
  698. /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
  699. /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
  700. /// added SCCs.
  701. ///
  702. /// The range of new SCCs must be in postorder already. The SCC they were split
  703. /// out of must be provided as \p C. The current node being mutated and
  704. /// triggering updates must be passed as \p N.
  705. ///
  706. /// This function returns the SCC containing \p N. This will be either \p C if
  707. /// no new SCCs have been split out, or it will be the new SCC containing \p N.
  708. template <typename SCCRangeT>
  709. static LazyCallGraph::SCC *
  710. incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
  711. LazyCallGraph::Node &N, LazyCallGraph::SCC *C,
  712. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) {
  713. using SCC = LazyCallGraph::SCC;
  714. if (NewSCCRange.empty())
  715. return C;
  716. // Add the current SCC to the worklist as its shape has changed.
  717. UR.CWorklist.insert(C);
  718. LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
  719. << "\n");
  720. SCC *OldC = C;
  721. // Update the current SCC. Note that if we have new SCCs, this must actually
  722. // change the SCC.
  723. assert(C != &*NewSCCRange.begin() &&
  724. "Cannot insert new SCCs without changing current SCC!");
  725. C = &*NewSCCRange.begin();
  726. assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
  727. // If we had a cached FAM proxy originally, we will want to create more of
  728. // them for each SCC that was split off.
  729. FunctionAnalysisManager *FAM = nullptr;
  730. if (auto *FAMProxy =
  731. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC))
  732. FAM = &FAMProxy->getManager();
  733. // We need to propagate an invalidation call to all but the newly current SCC
  734. // because the outer pass manager won't do that for us after splitting them.
  735. // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
  736. // there are preserved analysis we can avoid invalidating them here for
  737. // split-off SCCs.
  738. // We know however that this will preserve any FAM proxy so go ahead and mark
  739. // that.
  740. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  741. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  742. AM.invalidate(*OldC, PA);
  743. // Ensure the now-current SCC's function analyses are updated.
  744. if (FAM)
  745. updateNewSCCFunctionAnalyses(*C, G, AM, *FAM);
  746. for (SCC &NewC : llvm::reverse(llvm::drop_begin(NewSCCRange))) {
  747. assert(C != &NewC && "No need to re-visit the current SCC!");
  748. assert(OldC != &NewC && "Already handled the original SCC!");
  749. UR.CWorklist.insert(&NewC);
  750. LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
  751. // Ensure new SCCs' function analyses are updated.
  752. if (FAM)
  753. updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM);
  754. // Also propagate a normal invalidation to the new SCC as only the current
  755. // will get one from the pass manager infrastructure.
  756. AM.invalidate(NewC, PA);
  757. }
  758. return C;
  759. }
  760. static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass(
  761. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  762. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  763. FunctionAnalysisManager &FAM, bool FunctionPass) {
  764. using Node = LazyCallGraph::Node;
  765. using Edge = LazyCallGraph::Edge;
  766. using SCC = LazyCallGraph::SCC;
  767. using RefSCC = LazyCallGraph::RefSCC;
  768. RefSCC &InitialRC = InitialC.getOuterRefSCC();
  769. SCC *C = &InitialC;
  770. RefSCC *RC = &InitialRC;
  771. Function &F = N.getFunction();
  772. // Walk the function body and build up the set of retained, promoted, and
  773. // demoted edges.
  774. SmallVector<Constant *, 16> Worklist;
  775. SmallPtrSet<Constant *, 16> Visited;
  776. SmallPtrSet<Node *, 16> RetainedEdges;
  777. SmallSetVector<Node *, 4> PromotedRefTargets;
  778. SmallSetVector<Node *, 4> DemotedCallTargets;
  779. SmallSetVector<Node *, 4> NewCallEdges;
  780. SmallSetVector<Node *, 4> NewRefEdges;
  781. // First walk the function and handle all called functions. We do this first
  782. // because if there is a single call edge, whether there are ref edges is
  783. // irrelevant.
  784. for (Instruction &I : instructions(F)) {
  785. if (auto *CB = dyn_cast<CallBase>(&I)) {
  786. if (Function *Callee = CB->getCalledFunction()) {
  787. if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
  788. Node *CalleeN = G.lookup(*Callee);
  789. assert(CalleeN &&
  790. "Visited function should already have an associated node");
  791. Edge *E = N->lookup(*CalleeN);
  792. assert((E || !FunctionPass) &&
  793. "No function transformations should introduce *new* "
  794. "call edges! Any new calls should be modeled as "
  795. "promoted existing ref edges!");
  796. bool Inserted = RetainedEdges.insert(CalleeN).second;
  797. (void)Inserted;
  798. assert(Inserted && "We should never visit a function twice.");
  799. if (!E)
  800. NewCallEdges.insert(CalleeN);
  801. else if (!E->isCall())
  802. PromotedRefTargets.insert(CalleeN);
  803. }
  804. } else {
  805. // We can miss devirtualization if an indirect call is created then
  806. // promoted before updateCGAndAnalysisManagerForPass runs.
  807. auto *Entry = UR.IndirectVHs.find(CB);
  808. if (Entry == UR.IndirectVHs.end())
  809. UR.IndirectVHs.insert({CB, WeakTrackingVH(CB)});
  810. else if (!Entry->second)
  811. Entry->second = WeakTrackingVH(CB);
  812. }
  813. }
  814. }
  815. // Now walk all references.
  816. for (Instruction &I : instructions(F))
  817. for (Value *Op : I.operand_values())
  818. if (auto *OpC = dyn_cast<Constant>(Op))
  819. if (Visited.insert(OpC).second)
  820. Worklist.push_back(OpC);
  821. auto VisitRef = [&](Function &Referee) {
  822. Node *RefereeN = G.lookup(Referee);
  823. assert(RefereeN &&
  824. "Visited function should already have an associated node");
  825. Edge *E = N->lookup(*RefereeN);
  826. assert((E || !FunctionPass) &&
  827. "No function transformations should introduce *new* ref "
  828. "edges! Any new ref edges would require IPO which "
  829. "function passes aren't allowed to do!");
  830. bool Inserted = RetainedEdges.insert(RefereeN).second;
  831. (void)Inserted;
  832. assert(Inserted && "We should never visit a function twice.");
  833. if (!E)
  834. NewRefEdges.insert(RefereeN);
  835. else if (E->isCall())
  836. DemotedCallTargets.insert(RefereeN);
  837. };
  838. LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
  839. // Handle new ref edges.
  840. for (Node *RefTarget : NewRefEdges) {
  841. SCC &TargetC = *G.lookupSCC(*RefTarget);
  842. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  843. (void)TargetRC;
  844. // TODO: This only allows trivial edges to be added for now.
  845. #ifdef EXPENSIVE_CHECKS
  846. assert((RC == &TargetRC ||
  847. RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!");
  848. #endif
  849. RC->insertTrivialRefEdge(N, *RefTarget);
  850. }
  851. // Handle new call edges.
  852. for (Node *CallTarget : NewCallEdges) {
  853. SCC &TargetC = *G.lookupSCC(*CallTarget);
  854. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  855. (void)TargetRC;
  856. // TODO: This only allows trivial edges to be added for now.
  857. #ifdef EXPENSIVE_CHECKS
  858. assert((RC == &TargetRC ||
  859. RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!");
  860. #endif
  861. // Add a trivial ref edge to be promoted later on alongside
  862. // PromotedRefTargets.
  863. RC->insertTrivialRefEdge(N, *CallTarget);
  864. }
  865. // Include synthetic reference edges to known, defined lib functions.
  866. for (auto *LibFn : G.getLibFunctions())
  867. // While the list of lib functions doesn't have repeats, don't re-visit
  868. // anything handled above.
  869. if (!Visited.count(LibFn))
  870. VisitRef(*LibFn);
  871. // First remove all of the edges that are no longer present in this function.
  872. // The first step makes these edges uniformly ref edges and accumulates them
  873. // into a separate data structure so removal doesn't invalidate anything.
  874. SmallVector<Node *, 4> DeadTargets;
  875. for (Edge &E : *N) {
  876. if (RetainedEdges.count(&E.getNode()))
  877. continue;
  878. SCC &TargetC = *G.lookupSCC(E.getNode());
  879. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  880. if (&TargetRC == RC && E.isCall()) {
  881. if (C != &TargetC) {
  882. // For separate SCCs this is trivial.
  883. RC->switchTrivialInternalEdgeToRef(N, E.getNode());
  884. } else {
  885. // Now update the call graph.
  886. C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
  887. G, N, C, AM, UR);
  888. }
  889. }
  890. // Now that this is ready for actual removal, put it into our list.
  891. DeadTargets.push_back(&E.getNode());
  892. }
  893. // Remove the easy cases quickly and actually pull them out of our list.
  894. llvm::erase_if(DeadTargets, [&](Node *TargetN) {
  895. SCC &TargetC = *G.lookupSCC(*TargetN);
  896. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  897. // We can't trivially remove internal targets, so skip
  898. // those.
  899. if (&TargetRC == RC)
  900. return false;
  901. LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '" << N << "' to '"
  902. << *TargetN << "'\n");
  903. RC->removeOutgoingEdge(N, *TargetN);
  904. return true;
  905. });
  906. // Now do a batch removal of the internal ref edges left.
  907. auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
  908. if (!NewRefSCCs.empty()) {
  909. // The old RefSCC is dead, mark it as such.
  910. UR.InvalidatedRefSCCs.insert(RC);
  911. // Note that we don't bother to invalidate analyses as ref-edge
  912. // connectivity is not really observable in any way and is intended
  913. // exclusively to be used for ordering of transforms rather than for
  914. // analysis conclusions.
  915. // Update RC to the "bottom".
  916. assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
  917. RC = &C->getOuterRefSCC();
  918. assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
  919. // The RC worklist is in reverse postorder, so we enqueue the new ones in
  920. // RPO except for the one which contains the source node as that is the
  921. // "bottom" we will continue processing in the bottom-up walk.
  922. assert(NewRefSCCs.front() == RC &&
  923. "New current RefSCC not first in the returned list!");
  924. for (RefSCC *NewRC : llvm::reverse(llvm::drop_begin(NewRefSCCs))) {
  925. assert(NewRC != RC && "Should not encounter the current RefSCC further "
  926. "in the postorder list of new RefSCCs.");
  927. UR.RCWorklist.insert(NewRC);
  928. LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
  929. << *NewRC << "\n");
  930. }
  931. }
  932. // Next demote all the call edges that are now ref edges. This helps make
  933. // the SCCs small which should minimize the work below as we don't want to
  934. // form cycles that this would break.
  935. for (Node *RefTarget : DemotedCallTargets) {
  936. SCC &TargetC = *G.lookupSCC(*RefTarget);
  937. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  938. // The easy case is when the target RefSCC is not this RefSCC. This is
  939. // only supported when the target RefSCC is a child of this RefSCC.
  940. if (&TargetRC != RC) {
  941. #ifdef EXPENSIVE_CHECKS
  942. assert(RC->isAncestorOf(TargetRC) &&
  943. "Cannot potentially form RefSCC cycles here!");
  944. #endif
  945. RC->switchOutgoingEdgeToRef(N, *RefTarget);
  946. LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
  947. << "' to '" << *RefTarget << "'\n");
  948. continue;
  949. }
  950. // We are switching an internal call edge to a ref edge. This may split up
  951. // some SCCs.
  952. if (C != &TargetC) {
  953. // For separate SCCs this is trivial.
  954. RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
  955. continue;
  956. }
  957. // Now update the call graph.
  958. C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
  959. C, AM, UR);
  960. }
  961. // We added a ref edge earlier for new call edges, promote those to call edges
  962. // alongside PromotedRefTargets.
  963. for (Node *E : NewCallEdges)
  964. PromotedRefTargets.insert(E);
  965. // Now promote ref edges into call edges.
  966. for (Node *CallTarget : PromotedRefTargets) {
  967. SCC &TargetC = *G.lookupSCC(*CallTarget);
  968. RefSCC &TargetRC = TargetC.getOuterRefSCC();
  969. // The easy case is when the target RefSCC is not this RefSCC. This is
  970. // only supported when the target RefSCC is a child of this RefSCC.
  971. if (&TargetRC != RC) {
  972. #ifdef EXPENSIVE_CHECKS
  973. assert(RC->isAncestorOf(TargetRC) &&
  974. "Cannot potentially form RefSCC cycles here!");
  975. #endif
  976. RC->switchOutgoingEdgeToCall(N, *CallTarget);
  977. LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
  978. << "' to '" << *CallTarget << "'\n");
  979. continue;
  980. }
  981. LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
  982. << N << "' to '" << *CallTarget << "'\n");
  983. // Otherwise we are switching an internal ref edge to a call edge. This
  984. // may merge away some SCCs, and we add those to the UpdateResult. We also
  985. // need to make sure to update the worklist in the event SCCs have moved
  986. // before the current one in the post-order sequence
  987. bool HasFunctionAnalysisProxy = false;
  988. auto InitialSCCIndex = RC->find(*C) - RC->begin();
  989. bool FormedCycle = RC->switchInternalEdgeToCall(
  990. N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
  991. for (SCC *MergedC : MergedSCCs) {
  992. assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
  993. HasFunctionAnalysisProxy |=
  994. AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
  995. *MergedC) != nullptr;
  996. // Mark that this SCC will no longer be valid.
  997. UR.InvalidatedSCCs.insert(MergedC);
  998. // FIXME: We should really do a 'clear' here to forcibly release
  999. // memory, but we don't have a good way of doing that and
  1000. // preserving the function analyses.
  1001. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  1002. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  1003. AM.invalidate(*MergedC, PA);
  1004. }
  1005. });
  1006. // If we formed a cycle by creating this call, we need to update more data
  1007. // structures.
  1008. if (FormedCycle) {
  1009. C = &TargetC;
  1010. assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
  1011. // If one of the invalidated SCCs had a cached proxy to a function
  1012. // analysis manager, we need to create a proxy in the new current SCC as
  1013. // the invalidated SCCs had their functions moved.
  1014. if (HasFunctionAnalysisProxy)
  1015. AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM);
  1016. // Any analyses cached for this SCC are no longer precise as the shape
  1017. // has changed by introducing this cycle. However, we have taken care to
  1018. // update the proxies so it remains valide.
  1019. auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
  1020. PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
  1021. AM.invalidate(*C, PA);
  1022. }
  1023. auto NewSCCIndex = RC->find(*C) - RC->begin();
  1024. // If we have actually moved an SCC to be topologically "below" the current
  1025. // one due to merging, we will need to revisit the current SCC after
  1026. // visiting those moved SCCs.
  1027. //
  1028. // It is critical that we *do not* revisit the current SCC unless we
  1029. // actually move SCCs in the process of merging because otherwise we may
  1030. // form a cycle where an SCC is split apart, merged, split, merged and so
  1031. // on infinitely.
  1032. if (InitialSCCIndex < NewSCCIndex) {
  1033. // Put our current SCC back onto the worklist as we'll visit other SCCs
  1034. // that are now definitively ordered prior to the current one in the
  1035. // post-order sequence, and may end up observing more precise context to
  1036. // optimize the current SCC.
  1037. UR.CWorklist.insert(C);
  1038. LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
  1039. << "\n");
  1040. // Enqueue in reverse order as we pop off the back of the worklist.
  1041. for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
  1042. RC->begin() + NewSCCIndex))) {
  1043. UR.CWorklist.insert(&MovedC);
  1044. LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
  1045. << MovedC << "\n");
  1046. }
  1047. }
  1048. }
  1049. assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
  1050. assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
  1051. assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
  1052. // Record the current RefSCC and SCC for higher layers of the CGSCC pass
  1053. // manager now that all the updates have been applied.
  1054. if (RC != &InitialRC)
  1055. UR.UpdatedRC = RC;
  1056. if (C != &InitialC)
  1057. UR.UpdatedC = C;
  1058. return *C;
  1059. }
  1060. LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
  1061. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  1062. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  1063. FunctionAnalysisManager &FAM) {
  1064. return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
  1065. /* FunctionPass */ true);
  1066. }
  1067. LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass(
  1068. LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N,
  1069. CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR,
  1070. FunctionAnalysisManager &FAM) {
  1071. return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM,
  1072. /* FunctionPass */ false);
  1073. }