LazyCallGraph.h 51 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- LazyCallGraph.h - Analysis of a Module's call graph ------*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. /// \file
  14. ///
  15. /// Implements a lazy call graph analysis and related passes for the new pass
  16. /// manager.
  17. ///
  18. /// NB: This is *not* a traditional call graph! It is a graph which models both
  19. /// the current calls and potential calls. As a consequence there are many
  20. /// edges in this call graph that do not correspond to a 'call' or 'invoke'
  21. /// instruction.
  22. ///
  23. /// The primary use cases of this graph analysis is to facilitate iterating
  24. /// across the functions of a module in ways that ensure all callees are
  25. /// visited prior to a caller (given any SCC constraints), or vice versa. As
  26. /// such is it particularly well suited to organizing CGSCC optimizations such
  27. /// as inlining, outlining, argument promotion, etc. That is its primary use
  28. /// case and motivates the design. It may not be appropriate for other
  29. /// purposes. The use graph of functions or some other conservative analysis of
  30. /// call instructions may be interesting for optimizations and subsequent
  31. /// analyses which don't work in the context of an overly specified
  32. /// potential-call-edge graph.
  33. ///
  34. /// To understand the specific rules and nature of this call graph analysis,
  35. /// see the documentation of the \c LazyCallGraph below.
  36. ///
  37. //===----------------------------------------------------------------------===//
  38. #ifndef LLVM_ANALYSIS_LAZYCALLGRAPH_H
  39. #define LLVM_ANALYSIS_LAZYCALLGRAPH_H
  40. #include "llvm/ADT/ArrayRef.h"
  41. #include "llvm/ADT/DenseMap.h"
  42. #include "llvm/ADT/Optional.h"
  43. #include "llvm/ADT/PointerIntPair.h"
  44. #include "llvm/ADT/STLExtras.h"
  45. #include "llvm/ADT/SetVector.h"
  46. #include "llvm/ADT/SmallPtrSet.h"
  47. #include "llvm/ADT/SmallVector.h"
  48. #include "llvm/ADT/StringRef.h"
  49. #include "llvm/ADT/iterator.h"
  50. #include "llvm/ADT/iterator_range.h"
  51. #include "llvm/Analysis/TargetLibraryInfo.h"
  52. #include "llvm/IR/Constant.h"
  53. #include "llvm/IR/Constants.h"
  54. #include "llvm/IR/Function.h"
  55. #include "llvm/IR/PassManager.h"
  56. #include "llvm/Support/Allocator.h"
  57. #include "llvm/Support/Casting.h"
  58. #include "llvm/Support/raw_ostream.h"
  59. #include <cassert>
  60. #include <iterator>
  61. #include <string>
  62. #include <utility>
  63. namespace llvm {
  64. class Module;
  65. class Value;
  66. /// A lazily constructed view of the call graph of a module.
  67. ///
  68. /// With the edges of this graph, the motivating constraint that we are
  69. /// attempting to maintain is that function-local optimization, CGSCC-local
  70. /// optimizations, and optimizations transforming a pair of functions connected
  71. /// by an edge in the graph, do not invalidate a bottom-up traversal of the SCC
  72. /// DAG. That is, no optimizations will delete, remove, or add an edge such
  73. /// that functions already visited in a bottom-up order of the SCC DAG are no
  74. /// longer valid to have visited, or such that functions not yet visited in
  75. /// a bottom-up order of the SCC DAG are not required to have already been
  76. /// visited.
  77. ///
  78. /// Within this constraint, the desire is to minimize the merge points of the
  79. /// SCC DAG. The greater the fanout of the SCC DAG and the fewer merge points
  80. /// in the SCC DAG, the more independence there is in optimizing within it.
  81. /// There is a strong desire to enable parallelization of optimizations over
  82. /// the call graph, and both limited fanout and merge points will (artificially
  83. /// in some cases) limit the scaling of such an effort.
  84. ///
  85. /// To this end, graph represents both direct and any potential resolution to
  86. /// an indirect call edge. Another way to think about it is that it represents
  87. /// both the direct call edges and any direct call edges that might be formed
  88. /// through static optimizations. Specifically, it considers taking the address
  89. /// of a function to be an edge in the call graph because this might be
  90. /// forwarded to become a direct call by some subsequent function-local
  91. /// optimization. The result is that the graph closely follows the use-def
  92. /// edges for functions. Walking "up" the graph can be done by looking at all
  93. /// of the uses of a function.
  94. ///
  95. /// The roots of the call graph are the external functions and functions
  96. /// escaped into global variables. Those functions can be called from outside
  97. /// of the module or via unknowable means in the IR -- we may not be able to
  98. /// form even a potential call edge from a function body which may dynamically
  99. /// load the function and call it.
  100. ///
  101. /// This analysis still requires updates to remain valid after optimizations
  102. /// which could potentially change the set of potential callees. The
  103. /// constraints it operates under only make the traversal order remain valid.
  104. ///
  105. /// The entire analysis must be re-computed if full interprocedural
  106. /// optimizations run at any point. For example, globalopt completely
  107. /// invalidates the information in this analysis.
  108. ///
  109. /// FIXME: This class is named LazyCallGraph in a lame attempt to distinguish
  110. /// it from the existing CallGraph. At some point, it is expected that this
  111. /// will be the only call graph and it will be renamed accordingly.
  112. class LazyCallGraph {
  113. public:
  114. class Node;
  115. class EdgeSequence;
  116. class SCC;
  117. class RefSCC;
  118. class edge_iterator;
  119. class call_edge_iterator;
  120. /// A class used to represent edges in the call graph.
  121. ///
  122. /// The lazy call graph models both *call* edges and *reference* edges. Call
  123. /// edges are much what you would expect, and exist when there is a 'call' or
  124. /// 'invoke' instruction of some function. Reference edges are also tracked
  125. /// along side these, and exist whenever any instruction (transitively
  126. /// through its operands) references a function. All call edges are
  127. /// inherently reference edges, and so the reference graph forms a superset
  128. /// of the formal call graph.
  129. ///
  130. /// All of these forms of edges are fundamentally represented as outgoing
  131. /// edges. The edges are stored in the source node and point at the target
  132. /// node. This allows the edge structure itself to be a very compact data
  133. /// structure: essentially a tagged pointer.
  134. class Edge {
  135. public:
  136. /// The kind of edge in the graph.
  137. enum Kind : bool { Ref = false, Call = true };
  138. Edge();
  139. explicit Edge(Node &N, Kind K);
  140. /// Test whether the edge is null.
  141. ///
  142. /// This happens when an edge has been deleted. We leave the edge objects
  143. /// around but clear them.
  144. explicit operator bool() const;
  145. /// Returnss the \c Kind of the edge.
  146. Kind getKind() const;
  147. /// Test whether the edge represents a direct call to a function.
  148. ///
  149. /// This requires that the edge is not null.
  150. bool isCall() const;
  151. /// Get the call graph node referenced by this edge.
  152. ///
  153. /// This requires that the edge is not null.
  154. Node &getNode() const;
  155. /// Get the function referenced by this edge.
  156. ///
  157. /// This requires that the edge is not null.
  158. Function &getFunction() const;
  159. private:
  160. friend class LazyCallGraph::EdgeSequence;
  161. friend class LazyCallGraph::RefSCC;
  162. PointerIntPair<Node *, 1, Kind> Value;
  163. void setKind(Kind K) { Value.setInt(K); }
  164. };
  165. /// The edge sequence object.
  166. ///
  167. /// This typically exists entirely within the node but is exposed as
  168. /// a separate type because a node doesn't initially have edges. An explicit
  169. /// population step is required to produce this sequence at first and it is
  170. /// then cached in the node. It is also used to represent edges entering the
  171. /// graph from outside the module to model the graph's roots.
  172. ///
  173. /// The sequence itself both iterable and indexable. The indexes remain
  174. /// stable even as the sequence mutates (including removal).
  175. class EdgeSequence {
  176. friend class LazyCallGraph;
  177. friend class LazyCallGraph::Node;
  178. friend class LazyCallGraph::RefSCC;
  179. using VectorT = SmallVector<Edge, 4>;
  180. using VectorImplT = SmallVectorImpl<Edge>;
  181. public:
  182. /// An iterator used for the edges to both entry nodes and child nodes.
  183. class iterator
  184. : public iterator_adaptor_base<iterator, VectorImplT::iterator,
  185. std::forward_iterator_tag> {
  186. friend class LazyCallGraph;
  187. friend class LazyCallGraph::Node;
  188. VectorImplT::iterator E;
  189. // Build the iterator for a specific position in the edge list.
  190. iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  191. : iterator_adaptor_base(BaseI), E(E) {
  192. while (I != E && !*I)
  193. ++I;
  194. }
  195. public:
  196. iterator() = default;
  197. using iterator_adaptor_base::operator++;
  198. iterator &operator++() {
  199. do {
  200. ++I;
  201. } while (I != E && !*I);
  202. return *this;
  203. }
  204. };
  205. /// An iterator over specifically call edges.
  206. ///
  207. /// This has the same iteration properties as the \c iterator, but
  208. /// restricts itself to edges which represent actual calls.
  209. class call_iterator
  210. : public iterator_adaptor_base<call_iterator, VectorImplT::iterator,
  211. std::forward_iterator_tag> {
  212. friend class LazyCallGraph;
  213. friend class LazyCallGraph::Node;
  214. VectorImplT::iterator E;
  215. /// Advance the iterator to the next valid, call edge.
  216. void advanceToNextEdge() {
  217. while (I != E && (!*I || !I->isCall()))
  218. ++I;
  219. }
  220. // Build the iterator for a specific position in the edge list.
  221. call_iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  222. : iterator_adaptor_base(BaseI), E(E) {
  223. advanceToNextEdge();
  224. }
  225. public:
  226. call_iterator() = default;
  227. using iterator_adaptor_base::operator++;
  228. call_iterator &operator++() {
  229. ++I;
  230. advanceToNextEdge();
  231. return *this;
  232. }
  233. };
  234. iterator begin() { return iterator(Edges.begin(), Edges.end()); }
  235. iterator end() { return iterator(Edges.end(), Edges.end()); }
  236. Edge &operator[](Node &N) {
  237. assert(EdgeIndexMap.find(&N) != EdgeIndexMap.end() && "No such edge!");
  238. auto &E = Edges[EdgeIndexMap.find(&N)->second];
  239. assert(E && "Dead or null edge!");
  240. return E;
  241. }
  242. Edge *lookup(Node &N) {
  243. auto EI = EdgeIndexMap.find(&N);
  244. if (EI == EdgeIndexMap.end())
  245. return nullptr;
  246. auto &E = Edges[EI->second];
  247. return E ? &E : nullptr;
  248. }
  249. call_iterator call_begin() {
  250. return call_iterator(Edges.begin(), Edges.end());
  251. }
  252. call_iterator call_end() { return call_iterator(Edges.end(), Edges.end()); }
  253. iterator_range<call_iterator> calls() {
  254. return make_range(call_begin(), call_end());
  255. }
  256. bool empty() {
  257. for (auto &E : Edges)
  258. if (E)
  259. return false;
  260. return true;
  261. }
  262. private:
  263. VectorT Edges;
  264. DenseMap<Node *, int> EdgeIndexMap;
  265. EdgeSequence() = default;
  266. /// Internal helper to insert an edge to a node.
  267. void insertEdgeInternal(Node &ChildN, Edge::Kind EK);
  268. /// Internal helper to change an edge kind.
  269. void setEdgeKind(Node &ChildN, Edge::Kind EK);
  270. /// Internal helper to remove the edge to the given function.
  271. bool removeEdgeInternal(Node &ChildN);
  272. };
  273. /// A node in the call graph.
  274. ///
  275. /// This represents a single node. It's primary roles are to cache the list of
  276. /// callees, de-duplicate and provide fast testing of whether a function is
  277. /// a callee, and facilitate iteration of child nodes in the graph.
  278. ///
  279. /// The node works much like an optional in order to lazily populate the
  280. /// edges of each node. Until populated, there are no edges. Once populated,
  281. /// you can access the edges by dereferencing the node or using the `->`
  282. /// operator as if the node was an `Optional<EdgeSequence>`.
  283. class Node {
  284. friend class LazyCallGraph;
  285. friend class LazyCallGraph::RefSCC;
  286. public:
  287. LazyCallGraph &getGraph() const { return *G; }
  288. Function &getFunction() const { return *F; }
  289. StringRef getName() const { return F->getName(); }
  290. /// Equality is defined as address equality.
  291. bool operator==(const Node &N) const { return this == &N; }
  292. bool operator!=(const Node &N) const { return !operator==(N); }
  293. /// Tests whether the node has been populated with edges.
  294. bool isPopulated() const { return Edges.hasValue(); }
  295. /// Tests whether this is actually a dead node and no longer valid.
  296. ///
  297. /// Users rarely interact with nodes in this state and other methods are
  298. /// invalid. This is used to model a node in an edge list where the
  299. /// function has been completely removed.
  300. bool isDead() const {
  301. assert(!G == !F &&
  302. "Both graph and function pointers should be null or non-null.");
  303. return !G;
  304. }
  305. // We allow accessing the edges by dereferencing or using the arrow
  306. // operator, essentially wrapping the internal optional.
  307. EdgeSequence &operator*() const {
  308. // Rip const off because the node itself isn't changing here.
  309. return const_cast<EdgeSequence &>(*Edges);
  310. }
  311. EdgeSequence *operator->() const { return &**this; }
  312. /// Populate the edges of this node if necessary.
  313. ///
  314. /// The first time this is called it will populate the edges for this node
  315. /// in the graph. It does this by scanning the underlying function, so once
  316. /// this is done, any changes to that function must be explicitly reflected
  317. /// in updates to the graph.
  318. ///
  319. /// \returns the populated \c EdgeSequence to simplify walking it.
  320. ///
  321. /// This will not update or re-scan anything if called repeatedly. Instead,
  322. /// the edge sequence is cached and returned immediately on subsequent
  323. /// calls.
  324. EdgeSequence &populate() {
  325. if (Edges)
  326. return *Edges;
  327. return populateSlow();
  328. }
  329. private:
  330. LazyCallGraph *G;
  331. Function *F;
  332. // We provide for the DFS numbering and Tarjan walk lowlink numbers to be
  333. // stored directly within the node. These are both '-1' when nodes are part
  334. // of an SCC (or RefSCC), or '0' when not yet reached in a DFS walk.
  335. int DFSNumber = 0;
  336. int LowLink = 0;
  337. Optional<EdgeSequence> Edges;
  338. /// Basic constructor implements the scanning of F into Edges and
  339. /// EdgeIndexMap.
  340. Node(LazyCallGraph &G, Function &F) : G(&G), F(&F) {}
  341. /// Implementation of the scan when populating.
  342. EdgeSequence &populateSlow();
  343. /// Internal helper to directly replace the function with a new one.
  344. ///
  345. /// This is used to facilitate tranfsormations which need to replace the
  346. /// formal Function object but directly move the body and users from one to
  347. /// the other.
  348. void replaceFunction(Function &NewF);
  349. void clear() { Edges.reset(); }
  350. /// Print the name of this node's function.
  351. friend raw_ostream &operator<<(raw_ostream &OS, const Node &N) {
  352. return OS << N.F->getName();
  353. }
  354. /// Dump the name of this node's function to stderr.
  355. void dump() const;
  356. };
  357. /// An SCC of the call graph.
  358. ///
  359. /// This represents a Strongly Connected Component of the direct call graph
  360. /// -- ignoring indirect calls and function references. It stores this as
  361. /// a collection of call graph nodes. While the order of nodes in the SCC is
  362. /// stable, it is not any particular order.
  363. ///
  364. /// The SCCs are nested within a \c RefSCC, see below for details about that
  365. /// outer structure. SCCs do not support mutation of the call graph, that
  366. /// must be done through the containing \c RefSCC in order to fully reason
  367. /// about the ordering and connections of the graph.
  368. class SCC {
  369. friend class LazyCallGraph;
  370. friend class LazyCallGraph::Node;
  371. RefSCC *OuterRefSCC;
  372. SmallVector<Node *, 1> Nodes;
  373. template <typename NodeRangeT>
  374. SCC(RefSCC &OuterRefSCC, NodeRangeT &&Nodes)
  375. : OuterRefSCC(&OuterRefSCC), Nodes(std::forward<NodeRangeT>(Nodes)) {}
  376. void clear() {
  377. OuterRefSCC = nullptr;
  378. Nodes.clear();
  379. }
  380. /// Print a short descrtiption useful for debugging or logging.
  381. ///
  382. /// We print the function names in the SCC wrapped in '()'s and skipping
  383. /// the middle functions if there are a large number.
  384. //
  385. // Note: this is defined inline to dodge issues with GCC's interpretation
  386. // of enclosing namespaces for friend function declarations.
  387. friend raw_ostream &operator<<(raw_ostream &OS, const SCC &C) {
  388. OS << '(';
  389. int i = 0;
  390. for (LazyCallGraph::Node &N : C) {
  391. if (i > 0)
  392. OS << ", ";
  393. // Elide the inner elements if there are too many.
  394. if (i > 8) {
  395. OS << "..., " << *C.Nodes.back();
  396. break;
  397. }
  398. OS << N;
  399. ++i;
  400. }
  401. OS << ')';
  402. return OS;
  403. }
  404. /// Dump a short description of this SCC to stderr.
  405. void dump() const;
  406. #ifndef NDEBUG
  407. /// Verify invariants about the SCC.
  408. ///
  409. /// This will attempt to validate all of the basic invariants within an
  410. /// SCC, but not that it is a strongly connected componet per-se. Primarily
  411. /// useful while building and updating the graph to check that basic
  412. /// properties are in place rather than having inexplicable crashes later.
  413. void verify();
  414. #endif
  415. public:
  416. using iterator = pointee_iterator<SmallVectorImpl<Node *>::const_iterator>;
  417. iterator begin() const { return Nodes.begin(); }
  418. iterator end() const { return Nodes.end(); }
  419. int size() const { return Nodes.size(); }
  420. RefSCC &getOuterRefSCC() const { return *OuterRefSCC; }
  421. /// Test if this SCC is a parent of \a C.
  422. ///
  423. /// Note that this is linear in the number of edges departing the current
  424. /// SCC.
  425. bool isParentOf(const SCC &C) const;
  426. /// Test if this SCC is an ancestor of \a C.
  427. ///
  428. /// Note that in the worst case this is linear in the number of edges
  429. /// departing the current SCC and every SCC in the entire graph reachable
  430. /// from this SCC. Thus this very well may walk every edge in the entire
  431. /// call graph! Do not call this in a tight loop!
  432. bool isAncestorOf(const SCC &C) const;
  433. /// Test if this SCC is a child of \a C.
  434. ///
  435. /// See the comments for \c isParentOf for detailed notes about the
  436. /// complexity of this routine.
  437. bool isChildOf(const SCC &C) const { return C.isParentOf(*this); }
  438. /// Test if this SCC is a descendant of \a C.
  439. ///
  440. /// See the comments for \c isParentOf for detailed notes about the
  441. /// complexity of this routine.
  442. bool isDescendantOf(const SCC &C) const { return C.isAncestorOf(*this); }
  443. /// Provide a short name by printing this SCC to a std::string.
  444. ///
  445. /// This copes with the fact that we don't have a name per-se for an SCC
  446. /// while still making the use of this in debugging and logging useful.
  447. std::string getName() const {
  448. std::string Name;
  449. raw_string_ostream OS(Name);
  450. OS << *this;
  451. OS.flush();
  452. return Name;
  453. }
  454. };
  455. /// A RefSCC of the call graph.
  456. ///
  457. /// This models a Strongly Connected Component of function reference edges in
  458. /// the call graph. As opposed to actual SCCs, these can be used to scope
  459. /// subgraphs of the module which are independent from other subgraphs of the
  460. /// module because they do not reference it in any way. This is also the unit
  461. /// where we do mutation of the graph in order to restrict mutations to those
  462. /// which don't violate this independence.
  463. ///
  464. /// A RefSCC contains a DAG of actual SCCs. All the nodes within the RefSCC
  465. /// are necessarily within some actual SCC that nests within it. Since
  466. /// a direct call *is* a reference, there will always be at least one RefSCC
  467. /// around any SCC.
  468. class RefSCC {
  469. friend class LazyCallGraph;
  470. friend class LazyCallGraph::Node;
  471. LazyCallGraph *G;
  472. /// A postorder list of the inner SCCs.
  473. SmallVector<SCC *, 4> SCCs;
  474. /// A map from SCC to index in the postorder list.
  475. SmallDenseMap<SCC *, int, 4> SCCIndices;
  476. /// Fast-path constructor. RefSCCs should instead be constructed by calling
  477. /// formRefSCCFast on the graph itself.
  478. RefSCC(LazyCallGraph &G);
  479. void clear() {
  480. SCCs.clear();
  481. SCCIndices.clear();
  482. }
  483. /// Print a short description useful for debugging or logging.
  484. ///
  485. /// We print the SCCs wrapped in '[]'s and skipping the middle SCCs if
  486. /// there are a large number.
  487. //
  488. // Note: this is defined inline to dodge issues with GCC's interpretation
  489. // of enclosing namespaces for friend function declarations.
  490. friend raw_ostream &operator<<(raw_ostream &OS, const RefSCC &RC) {
  491. OS << '[';
  492. int i = 0;
  493. for (LazyCallGraph::SCC &C : RC) {
  494. if (i > 0)
  495. OS << ", ";
  496. // Elide the inner elements if there are too many.
  497. if (i > 4) {
  498. OS << "..., " << *RC.SCCs.back();
  499. break;
  500. }
  501. OS << C;
  502. ++i;
  503. }
  504. OS << ']';
  505. return OS;
  506. }
  507. /// Dump a short description of this RefSCC to stderr.
  508. void dump() const;
  509. #ifndef NDEBUG
  510. /// Verify invariants about the RefSCC and all its SCCs.
  511. ///
  512. /// This will attempt to validate all of the invariants *within* the
  513. /// RefSCC, but not that it is a strongly connected component of the larger
  514. /// graph. This makes it useful even when partially through an update.
  515. ///
  516. /// Invariants checked:
  517. /// - SCCs and their indices match.
  518. /// - The SCCs list is in fact in post-order.
  519. void verify();
  520. #endif
  521. public:
  522. using iterator = pointee_iterator<SmallVectorImpl<SCC *>::const_iterator>;
  523. using range = iterator_range<iterator>;
  524. using parent_iterator =
  525. pointee_iterator<SmallPtrSetImpl<RefSCC *>::const_iterator>;
  526. iterator begin() const { return SCCs.begin(); }
  527. iterator end() const { return SCCs.end(); }
  528. ssize_t size() const { return SCCs.size(); }
  529. SCC &operator[](int Idx) { return *SCCs[Idx]; }
  530. iterator find(SCC &C) const {
  531. return SCCs.begin() + SCCIndices.find(&C)->second;
  532. }
  533. /// Test if this RefSCC is a parent of \a RC.
  534. ///
  535. /// CAUTION: This method walks every edge in the \c RefSCC, it can be very
  536. /// expensive.
  537. bool isParentOf(const RefSCC &RC) const;
  538. /// Test if this RefSCC is an ancestor of \a RC.
  539. ///
  540. /// CAUTION: This method walks the directed graph of edges as far as
  541. /// necessary to find a possible path to the argument. In the worst case
  542. /// this may walk the entire graph and can be extremely expensive.
  543. bool isAncestorOf(const RefSCC &RC) const;
  544. /// Test if this RefSCC is a child of \a RC.
  545. ///
  546. /// CAUTION: This method walks every edge in the argument \c RefSCC, it can
  547. /// be very expensive.
  548. bool isChildOf(const RefSCC &RC) const { return RC.isParentOf(*this); }
  549. /// Test if this RefSCC is a descendant of \a RC.
  550. ///
  551. /// CAUTION: This method walks the directed graph of edges as far as
  552. /// necessary to find a possible path from the argument. In the worst case
  553. /// this may walk the entire graph and can be extremely expensive.
  554. bool isDescendantOf(const RefSCC &RC) const {
  555. return RC.isAncestorOf(*this);
  556. }
  557. /// Provide a short name by printing this RefSCC to a std::string.
  558. ///
  559. /// This copes with the fact that we don't have a name per-se for an RefSCC
  560. /// while still making the use of this in debugging and logging useful.
  561. std::string getName() const {
  562. std::string Name;
  563. raw_string_ostream OS(Name);
  564. OS << *this;
  565. OS.flush();
  566. return Name;
  567. }
  568. ///@{
  569. /// \name Mutation API
  570. ///
  571. /// These methods provide the core API for updating the call graph in the
  572. /// presence of (potentially still in-flight) DFS-found RefSCCs and SCCs.
  573. ///
  574. /// Note that these methods sometimes have complex runtimes, so be careful
  575. /// how you call them.
  576. /// Make an existing internal ref edge into a call edge.
  577. ///
  578. /// This may form a larger cycle and thus collapse SCCs into TargetN's SCC.
  579. /// If that happens, the optional callback \p MergedCB will be invoked (if
  580. /// provided) on the SCCs being merged away prior to actually performing
  581. /// the merge. Note that this will never include the target SCC as that
  582. /// will be the SCC functions are merged into to resolve the cycle. Once
  583. /// this function returns, these merged SCCs are not in a valid state but
  584. /// the pointers will remain valid until destruction of the parent graph
  585. /// instance for the purpose of clearing cached information. This function
  586. /// also returns 'true' if a cycle was formed and some SCCs merged away as
  587. /// a convenience.
  588. ///
  589. /// After this operation, both SourceN's SCC and TargetN's SCC may move
  590. /// position within this RefSCC's postorder list. Any SCCs merged are
  591. /// merged into the TargetN's SCC in order to preserve reachability analyses
  592. /// which took place on that SCC.
  593. bool switchInternalEdgeToCall(
  594. Node &SourceN, Node &TargetN,
  595. function_ref<void(ArrayRef<SCC *> MergedSCCs)> MergeCB = {});
  596. /// Make an existing internal call edge between separate SCCs into a ref
  597. /// edge.
  598. ///
  599. /// If SourceN and TargetN in separate SCCs within this RefSCC, changing
  600. /// the call edge between them to a ref edge is a trivial operation that
  601. /// does not require any structural changes to the call graph.
  602. void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN);
  603. /// Make an existing internal call edge within a single SCC into a ref
  604. /// edge.
  605. ///
  606. /// Since SourceN and TargetN are part of a single SCC, this SCC may be
  607. /// split up due to breaking a cycle in the call edges that formed it. If
  608. /// that happens, then this routine will insert new SCCs into the postorder
  609. /// list *before* the SCC of TargetN (previously the SCC of both). This
  610. /// preserves postorder as the TargetN can reach all of the other nodes by
  611. /// definition of previously being in a single SCC formed by the cycle from
  612. /// SourceN to TargetN.
  613. ///
  614. /// The newly added SCCs are added *immediately* and contiguously
  615. /// prior to the TargetN SCC and return the range covering the new SCCs in
  616. /// the RefSCC's postorder sequence. You can directly iterate the returned
  617. /// range to observe all of the new SCCs in postorder.
  618. ///
  619. /// Note that if SourceN and TargetN are in separate SCCs, the simpler
  620. /// routine `switchTrivialInternalEdgeToRef` should be used instead.
  621. iterator_range<iterator> switchInternalEdgeToRef(Node &SourceN,
  622. Node &TargetN);
  623. /// Make an existing outgoing ref edge into a call edge.
  624. ///
  625. /// Note that this is trivial as there are no cyclic impacts and there
  626. /// remains a reference edge.
  627. void switchOutgoingEdgeToCall(Node &SourceN, Node &TargetN);
  628. /// Make an existing outgoing call edge into a ref edge.
  629. ///
  630. /// This is trivial as there are no cyclic impacts and there remains
  631. /// a reference edge.
  632. void switchOutgoingEdgeToRef(Node &SourceN, Node &TargetN);
  633. /// Insert a ref edge from one node in this RefSCC to another in this
  634. /// RefSCC.
  635. ///
  636. /// This is always a trivial operation as it doesn't change any part of the
  637. /// graph structure besides connecting the two nodes.
  638. ///
  639. /// Note that we don't support directly inserting internal *call* edges
  640. /// because that could change the graph structure and requires returning
  641. /// information about what became invalid. As a consequence, the pattern
  642. /// should be to first insert the necessary ref edge, and then to switch it
  643. /// to a call edge if needed and handle any invalidation that results. See
  644. /// the \c switchInternalEdgeToCall routine for details.
  645. void insertInternalRefEdge(Node &SourceN, Node &TargetN);
  646. /// Insert an edge whose parent is in this RefSCC and child is in some
  647. /// child RefSCC.
  648. ///
  649. /// There must be an existing path from the \p SourceN to the \p TargetN.
  650. /// This operation is inexpensive and does not change the set of SCCs and
  651. /// RefSCCs in the graph.
  652. void insertOutgoingEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  653. /// Insert an edge whose source is in a descendant RefSCC and target is in
  654. /// this RefSCC.
  655. ///
  656. /// There must be an existing path from the target to the source in this
  657. /// case.
  658. ///
  659. /// NB! This is has the potential to be a very expensive function. It
  660. /// inherently forms a cycle in the prior RefSCC DAG and we have to merge
  661. /// RefSCCs to resolve that cycle. But finding all of the RefSCCs which
  662. /// participate in the cycle can in the worst case require traversing every
  663. /// RefSCC in the graph. Every attempt is made to avoid that, but passes
  664. /// must still exercise caution calling this routine repeatedly.
  665. ///
  666. /// Also note that this can only insert ref edges. In order to insert
  667. /// a call edge, first insert a ref edge and then switch it to a call edge.
  668. /// These are intentionally kept as separate interfaces because each step
  669. /// of the operation invalidates a different set of data structures.
  670. ///
  671. /// This returns all the RefSCCs which were merged into the this RefSCC
  672. /// (the target's). This allows callers to invalidate any cached
  673. /// information.
  674. ///
  675. /// FIXME: We could possibly optimize this quite a bit for cases where the
  676. /// caller and callee are very nearby in the graph. See comments in the
  677. /// implementation for details, but that use case might impact users.
  678. SmallVector<RefSCC *, 1> insertIncomingRefEdge(Node &SourceN,
  679. Node &TargetN);
  680. /// Remove an edge whose source is in this RefSCC and target is *not*.
  681. ///
  682. /// This removes an inter-RefSCC edge. All inter-RefSCC edges originating
  683. /// from this SCC have been fully explored by any in-flight DFS graph
  684. /// formation, so this is always safe to call once you have the source
  685. /// RefSCC.
  686. ///
  687. /// This operation does not change the cyclic structure of the graph and so
  688. /// is very inexpensive. It may change the connectivity graph of the SCCs
  689. /// though, so be careful calling this while iterating over them.
  690. void removeOutgoingEdge(Node &SourceN, Node &TargetN);
  691. /// Remove a list of ref edges which are entirely within this RefSCC.
  692. ///
  693. /// Both the \a SourceN and all of the \a TargetNs must be within this
  694. /// RefSCC. Removing these edges may break cycles that form this RefSCC and
  695. /// thus this operation may change the RefSCC graph significantly. In
  696. /// particular, this operation will re-form new RefSCCs based on the
  697. /// remaining connectivity of the graph. The following invariants are
  698. /// guaranteed to hold after calling this method:
  699. ///
  700. /// 1) If a ref-cycle remains after removal, it leaves this RefSCC intact
  701. /// and in the graph. No new RefSCCs are built.
  702. /// 2) Otherwise, this RefSCC will be dead after this call and no longer in
  703. /// the graph or the postorder traversal of the call graph. Any iterator
  704. /// pointing at this RefSCC will become invalid.
  705. /// 3) All newly formed RefSCCs will be returned and the order of the
  706. /// RefSCCs returned will be a valid postorder traversal of the new
  707. /// RefSCCs.
  708. /// 4) No RefSCC other than this RefSCC has its member set changed (this is
  709. /// inherent in the definition of removing such an edge).
  710. ///
  711. /// These invariants are very important to ensure that we can build
  712. /// optimization pipelines on top of the CGSCC pass manager which
  713. /// intelligently update the RefSCC graph without invalidating other parts
  714. /// of the RefSCC graph.
  715. ///
  716. /// Note that we provide no routine to remove a *call* edge. Instead, you
  717. /// must first switch it to a ref edge using \c switchInternalEdgeToRef.
  718. /// This split API is intentional as each of these two steps can invalidate
  719. /// a different aspect of the graph structure and needs to have the
  720. /// invalidation handled independently.
  721. ///
  722. /// The runtime complexity of this method is, in the worst case, O(V+E)
  723. /// where V is the number of nodes in this RefSCC and E is the number of
  724. /// edges leaving the nodes in this RefSCC. Note that E includes both edges
  725. /// within this RefSCC and edges from this RefSCC to child RefSCCs. Some
  726. /// effort has been made to minimize the overhead of common cases such as
  727. /// self-edges and edge removals which result in a spanning tree with no
  728. /// more cycles.
  729. SmallVector<RefSCC *, 1> removeInternalRefEdge(Node &SourceN,
  730. ArrayRef<Node *> TargetNs);
  731. /// A convenience wrapper around the above to handle trivial cases of
  732. /// inserting a new call edge.
  733. ///
  734. /// This is trivial whenever the target is in the same SCC as the source or
  735. /// the edge is an outgoing edge to some descendant SCC. In these cases
  736. /// there is no change to the cyclic structure of SCCs or RefSCCs.
  737. ///
  738. /// To further make calling this convenient, it also handles inserting
  739. /// already existing edges.
  740. void insertTrivialCallEdge(Node &SourceN, Node &TargetN);
  741. /// A convenience wrapper around the above to handle trivial cases of
  742. /// inserting a new ref edge.
  743. ///
  744. /// This is trivial whenever the target is in the same RefSCC as the source
  745. /// or the edge is an outgoing edge to some descendant RefSCC. In these
  746. /// cases there is no change to the cyclic structure of the RefSCCs.
  747. ///
  748. /// To further make calling this convenient, it also handles inserting
  749. /// already existing edges.
  750. void insertTrivialRefEdge(Node &SourceN, Node &TargetN);
  751. /// Directly replace a node's function with a new function.
  752. ///
  753. /// This should be used when moving the body and users of a function to
  754. /// a new formal function object but not otherwise changing the call graph
  755. /// structure in any way.
  756. ///
  757. /// It requires that the old function in the provided node have zero uses
  758. /// and the new function must have calls and references to it establishing
  759. /// an equivalent graph.
  760. void replaceNodeFunction(Node &N, Function &NewF);
  761. ///@}
  762. };
  763. /// A post-order depth-first RefSCC iterator over the call graph.
  764. ///
  765. /// This iterator walks the cached post-order sequence of RefSCCs. However,
  766. /// it trades stability for flexibility. It is restricted to a forward
  767. /// iterator but will survive mutations which insert new RefSCCs and continue
  768. /// to point to the same RefSCC even if it moves in the post-order sequence.
  769. class postorder_ref_scc_iterator
  770. : public iterator_facade_base<postorder_ref_scc_iterator,
  771. std::forward_iterator_tag, RefSCC> {
  772. friend class LazyCallGraph;
  773. friend class LazyCallGraph::Node;
  774. /// Nonce type to select the constructor for the end iterator.
  775. struct IsAtEndT {};
  776. LazyCallGraph *G;
  777. RefSCC *RC = nullptr;
  778. /// Build the begin iterator for a node.
  779. postorder_ref_scc_iterator(LazyCallGraph &G) : G(&G), RC(getRC(G, 0)) {}
  780. /// Build the end iterator for a node. This is selected purely by overload.
  781. postorder_ref_scc_iterator(LazyCallGraph &G, IsAtEndT /*Nonce*/) : G(&G) {}
  782. /// Get the post-order RefSCC at the given index of the postorder walk,
  783. /// populating it if necessary.
  784. static RefSCC *getRC(LazyCallGraph &G, int Index) {
  785. if (Index == (int)G.PostOrderRefSCCs.size())
  786. // We're at the end.
  787. return nullptr;
  788. return G.PostOrderRefSCCs[Index];
  789. }
  790. public:
  791. bool operator==(const postorder_ref_scc_iterator &Arg) const {
  792. return G == Arg.G && RC == Arg.RC;
  793. }
  794. reference operator*() const { return *RC; }
  795. using iterator_facade_base::operator++;
  796. postorder_ref_scc_iterator &operator++() {
  797. assert(RC && "Cannot increment the end iterator!");
  798. RC = getRC(*G, G->RefSCCIndices.find(RC)->second + 1);
  799. return *this;
  800. }
  801. };
  802. /// Construct a graph for the given module.
  803. ///
  804. /// This sets up the graph and computes all of the entry points of the graph.
  805. /// No function definitions are scanned until their nodes in the graph are
  806. /// requested during traversal.
  807. LazyCallGraph(Module &M,
  808. function_ref<TargetLibraryInfo &(Function &)> GetTLI);
  809. LazyCallGraph(LazyCallGraph &&G);
  810. LazyCallGraph &operator=(LazyCallGraph &&RHS);
  811. bool invalidate(Module &, const PreservedAnalyses &PA,
  812. ModuleAnalysisManager::Invalidator &);
  813. EdgeSequence::iterator begin() { return EntryEdges.begin(); }
  814. EdgeSequence::iterator end() { return EntryEdges.end(); }
  815. void buildRefSCCs();
  816. postorder_ref_scc_iterator postorder_ref_scc_begin() {
  817. if (!EntryEdges.empty())
  818. assert(!PostOrderRefSCCs.empty() &&
  819. "Must form RefSCCs before iterating them!");
  820. return postorder_ref_scc_iterator(*this);
  821. }
  822. postorder_ref_scc_iterator postorder_ref_scc_end() {
  823. if (!EntryEdges.empty())
  824. assert(!PostOrderRefSCCs.empty() &&
  825. "Must form RefSCCs before iterating them!");
  826. return postorder_ref_scc_iterator(*this,
  827. postorder_ref_scc_iterator::IsAtEndT());
  828. }
  829. iterator_range<postorder_ref_scc_iterator> postorder_ref_sccs() {
  830. return make_range(postorder_ref_scc_begin(), postorder_ref_scc_end());
  831. }
  832. /// Lookup a function in the graph which has already been scanned and added.
  833. Node *lookup(const Function &F) const { return NodeMap.lookup(&F); }
  834. /// Lookup a function's SCC in the graph.
  835. ///
  836. /// \returns null if the function hasn't been assigned an SCC via the RefSCC
  837. /// iterator walk.
  838. SCC *lookupSCC(Node &N) const { return SCCMap.lookup(&N); }
  839. /// Lookup a function's RefSCC in the graph.
  840. ///
  841. /// \returns null if the function hasn't been assigned a RefSCC via the
  842. /// RefSCC iterator walk.
  843. RefSCC *lookupRefSCC(Node &N) const {
  844. if (SCC *C = lookupSCC(N))
  845. return &C->getOuterRefSCC();
  846. return nullptr;
  847. }
  848. /// Get a graph node for a given function, scanning it to populate the graph
  849. /// data as necessary.
  850. Node &get(Function &F) {
  851. Node *&N = NodeMap[&F];
  852. if (N)
  853. return *N;
  854. return insertInto(F, N);
  855. }
  856. /// Get the sequence of known and defined library functions.
  857. ///
  858. /// These functions, because they are known to LLVM, can have calls
  859. /// introduced out of thin air from arbitrary IR.
  860. ArrayRef<Function *> getLibFunctions() const {
  861. return LibFunctions.getArrayRef();
  862. }
  863. /// Test whether a function is a known and defined library function tracked by
  864. /// the call graph.
  865. ///
  866. /// Because these functions are known to LLVM they are specially modeled in
  867. /// the call graph and even when all IR-level references have been removed
  868. /// remain active and reachable.
  869. bool isLibFunction(Function &F) const { return LibFunctions.count(&F); }
  870. ///@{
  871. /// \name Pre-SCC Mutation API
  872. ///
  873. /// These methods are only valid to call prior to forming any SCCs for this
  874. /// call graph. They can be used to update the core node-graph during
  875. /// a node-based inorder traversal that precedes any SCC-based traversal.
  876. ///
  877. /// Once you begin manipulating a call graph's SCCs, most mutation of the
  878. /// graph must be performed via a RefSCC method. There are some exceptions
  879. /// below.
  880. /// Update the call graph after inserting a new edge.
  881. void insertEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  882. /// Update the call graph after inserting a new edge.
  883. void insertEdge(Function &Source, Function &Target, Edge::Kind EK) {
  884. return insertEdge(get(Source), get(Target), EK);
  885. }
  886. /// Update the call graph after deleting an edge.
  887. void removeEdge(Node &SourceN, Node &TargetN);
  888. /// Update the call graph after deleting an edge.
  889. void removeEdge(Function &Source, Function &Target) {
  890. return removeEdge(get(Source), get(Target));
  891. }
  892. ///@}
  893. ///@{
  894. /// \name General Mutation API
  895. ///
  896. /// There are a very limited set of mutations allowed on the graph as a whole
  897. /// once SCCs have started to be formed. These routines have strict contracts
  898. /// but may be called at any point.
  899. /// Remove a dead function from the call graph (typically to delete it).
  900. ///
  901. /// Note that the function must have an empty use list, and the call graph
  902. /// must be up-to-date prior to calling this. That means it is by itself in
  903. /// a maximal SCC which is by itself in a maximal RefSCC, etc. No structural
  904. /// changes result from calling this routine other than potentially removing
  905. /// entry points into the call graph.
  906. ///
  907. /// If SCC formation has begun, this function must not be part of the current
  908. /// DFS in order to call this safely. Typically, the function will have been
  909. /// fully visited by the DFS prior to calling this routine.
  910. void removeDeadFunction(Function &F);
  911. /// Add a new function split/outlined from an existing function.
  912. ///
  913. /// The new function may only reference other functions that the original
  914. /// function did.
  915. ///
  916. /// The original function must reference (either directly or indirectly) the
  917. /// new function.
  918. ///
  919. /// The new function may also reference the original function.
  920. /// It may end up in a parent SCC in the case that the original function's
  921. /// edge to the new function is a ref edge, and the edge back is a call edge.
  922. void addSplitFunction(Function &OriginalFunction, Function &NewFunction);
  923. /// Add new ref-recursive functions split/outlined from an existing function.
  924. ///
  925. /// The new functions may only reference other functions that the original
  926. /// function did. The new functions may reference (not call) the original
  927. /// function.
  928. ///
  929. /// The original function must reference (not call) all new functions.
  930. /// All new functions must reference (not call) each other.
  931. void addSplitRefRecursiveFunctions(Function &OriginalFunction,
  932. ArrayRef<Function *> NewFunctions);
  933. ///@}
  934. ///@{
  935. /// \name Static helpers for code doing updates to the call graph.
  936. ///
  937. /// These helpers are used to implement parts of the call graph but are also
  938. /// useful to code doing updates or otherwise wanting to walk the IR in the
  939. /// same patterns as when we build the call graph.
  940. /// Recursively visits the defined functions whose address is reachable from
  941. /// every constant in the \p Worklist.
  942. ///
  943. /// Doesn't recurse through any constants already in the \p Visited set, and
  944. /// updates that set with every constant visited.
  945. ///
  946. /// For each defined function, calls \p Callback with that function.
  947. template <typename CallbackT>
  948. static void visitReferences(SmallVectorImpl<Constant *> &Worklist,
  949. SmallPtrSetImpl<Constant *> &Visited,
  950. CallbackT Callback) {
  951. while (!Worklist.empty()) {
  952. Constant *C = Worklist.pop_back_val();
  953. if (Function *F = dyn_cast<Function>(C)) {
  954. if (!F->isDeclaration())
  955. Callback(*F);
  956. continue;
  957. }
  958. // The blockaddress constant expression is a weird special case, we can't
  959. // generically walk its operands the way we do for all other constants.
  960. if (BlockAddress *BA = dyn_cast<BlockAddress>(C)) {
  961. // If we've already visited the function referred to by the block
  962. // address, we don't need to revisit it.
  963. if (Visited.count(BA->getFunction()))
  964. continue;
  965. // If all of the blockaddress' users are instructions within the
  966. // referred to function, we don't need to insert a cycle.
  967. if (llvm::all_of(BA->users(), [&](User *U) {
  968. if (Instruction *I = dyn_cast<Instruction>(U))
  969. return I->getFunction() == BA->getFunction();
  970. return false;
  971. }))
  972. continue;
  973. // Otherwise we should go visit the referred to function.
  974. Visited.insert(BA->getFunction());
  975. Worklist.push_back(BA->getFunction());
  976. continue;
  977. }
  978. for (Value *Op : C->operand_values())
  979. if (Visited.insert(cast<Constant>(Op)).second)
  980. Worklist.push_back(cast<Constant>(Op));
  981. }
  982. }
  983. ///@}
  984. private:
  985. using node_stack_iterator = SmallVectorImpl<Node *>::reverse_iterator;
  986. using node_stack_range = iterator_range<node_stack_iterator>;
  987. /// Allocator that holds all the call graph nodes.
  988. SpecificBumpPtrAllocator<Node> BPA;
  989. /// Maps function->node for fast lookup.
  990. DenseMap<const Function *, Node *> NodeMap;
  991. /// The entry edges into the graph.
  992. ///
  993. /// These edges are from "external" sources. Put another way, they
  994. /// escape at the module scope.
  995. EdgeSequence EntryEdges;
  996. /// Allocator that holds all the call graph SCCs.
  997. SpecificBumpPtrAllocator<SCC> SCCBPA;
  998. /// Maps Function -> SCC for fast lookup.
  999. DenseMap<Node *, SCC *> SCCMap;
  1000. /// Allocator that holds all the call graph RefSCCs.
  1001. SpecificBumpPtrAllocator<RefSCC> RefSCCBPA;
  1002. /// The post-order sequence of RefSCCs.
  1003. ///
  1004. /// This list is lazily formed the first time we walk the graph.
  1005. SmallVector<RefSCC *, 16> PostOrderRefSCCs;
  1006. /// A map from RefSCC to the index for it in the postorder sequence of
  1007. /// RefSCCs.
  1008. DenseMap<RefSCC *, int> RefSCCIndices;
  1009. /// Defined functions that are also known library functions which the
  1010. /// optimizer can reason about and therefore might introduce calls to out of
  1011. /// thin air.
  1012. SmallSetVector<Function *, 4> LibFunctions;
  1013. /// Helper to insert a new function, with an already looked-up entry in
  1014. /// the NodeMap.
  1015. Node &insertInto(Function &F, Node *&MappedN);
  1016. /// Helper to initialize a new node created outside of creating SCCs and add
  1017. /// it to the NodeMap if necessary. For example, useful when a function is
  1018. /// split.
  1019. Node &initNode(Function &F);
  1020. /// Helper to update pointers back to the graph object during moves.
  1021. void updateGraphPtrs();
  1022. /// Allocates an SCC and constructs it using the graph allocator.
  1023. ///
  1024. /// The arguments are forwarded to the constructor.
  1025. template <typename... Ts> SCC *createSCC(Ts &&... Args) {
  1026. return new (SCCBPA.Allocate()) SCC(std::forward<Ts>(Args)...);
  1027. }
  1028. /// Allocates a RefSCC and constructs it using the graph allocator.
  1029. ///
  1030. /// The arguments are forwarded to the constructor.
  1031. template <typename... Ts> RefSCC *createRefSCC(Ts &&... Args) {
  1032. return new (RefSCCBPA.Allocate()) RefSCC(std::forward<Ts>(Args)...);
  1033. }
  1034. /// Common logic for building SCCs from a sequence of roots.
  1035. ///
  1036. /// This is a very generic implementation of the depth-first walk and SCC
  1037. /// formation algorithm. It uses a generic sequence of roots and generic
  1038. /// callbacks for each step. This is designed to be used to implement both
  1039. /// the RefSCC formation and SCC formation with shared logic.
  1040. ///
  1041. /// Currently this is a relatively naive implementation of Tarjan's DFS
  1042. /// algorithm to form the SCCs.
  1043. ///
  1044. /// FIXME: We should consider newer variants such as Nuutila.
  1045. template <typename RootsT, typename GetBeginT, typename GetEndT,
  1046. typename GetNodeT, typename FormSCCCallbackT>
  1047. static void buildGenericSCCs(RootsT &&Roots, GetBeginT &&GetBegin,
  1048. GetEndT &&GetEnd, GetNodeT &&GetNode,
  1049. FormSCCCallbackT &&FormSCC);
  1050. /// Build the SCCs for a RefSCC out of a list of nodes.
  1051. void buildSCCs(RefSCC &RC, node_stack_range Nodes);
  1052. /// Get the index of a RefSCC within the postorder traversal.
  1053. ///
  1054. /// Requires that this RefSCC is a valid one in the (perhaps partial)
  1055. /// postorder traversed part of the graph.
  1056. int getRefSCCIndex(RefSCC &RC) {
  1057. auto IndexIt = RefSCCIndices.find(&RC);
  1058. assert(IndexIt != RefSCCIndices.end() && "RefSCC doesn't have an index!");
  1059. assert(PostOrderRefSCCs[IndexIt->second] == &RC &&
  1060. "Index does not point back at RC!");
  1061. return IndexIt->second;
  1062. }
  1063. };
  1064. inline LazyCallGraph::Edge::Edge() : Value() {}
  1065. inline LazyCallGraph::Edge::Edge(Node &N, Kind K) : Value(&N, K) {}
  1066. inline LazyCallGraph::Edge::operator bool() const {
  1067. return Value.getPointer() && !Value.getPointer()->isDead();
  1068. }
  1069. inline LazyCallGraph::Edge::Kind LazyCallGraph::Edge::getKind() const {
  1070. assert(*this && "Queried a null edge!");
  1071. return Value.getInt();
  1072. }
  1073. inline bool LazyCallGraph::Edge::isCall() const {
  1074. assert(*this && "Queried a null edge!");
  1075. return getKind() == Call;
  1076. }
  1077. inline LazyCallGraph::Node &LazyCallGraph::Edge::getNode() const {
  1078. assert(*this && "Queried a null edge!");
  1079. return *Value.getPointer();
  1080. }
  1081. inline Function &LazyCallGraph::Edge::getFunction() const {
  1082. assert(*this && "Queried a null edge!");
  1083. return getNode().getFunction();
  1084. }
  1085. // Provide GraphTraits specializations for call graphs.
  1086. template <> struct GraphTraits<LazyCallGraph::Node *> {
  1087. using NodeRef = LazyCallGraph::Node *;
  1088. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1089. static NodeRef getEntryNode(NodeRef N) { return N; }
  1090. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1091. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1092. };
  1093. template <> struct GraphTraits<LazyCallGraph *> {
  1094. using NodeRef = LazyCallGraph::Node *;
  1095. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1096. static NodeRef getEntryNode(NodeRef N) { return N; }
  1097. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1098. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1099. };
  1100. /// An analysis pass which computes the call graph for a module.
  1101. class LazyCallGraphAnalysis : public AnalysisInfoMixin<LazyCallGraphAnalysis> {
  1102. friend AnalysisInfoMixin<LazyCallGraphAnalysis>;
  1103. static AnalysisKey Key;
  1104. public:
  1105. /// Inform generic clients of the result type.
  1106. using Result = LazyCallGraph;
  1107. /// Compute the \c LazyCallGraph for the module \c M.
  1108. ///
  1109. /// This just builds the set of entry points to the call graph. The rest is
  1110. /// built lazily as it is walked.
  1111. LazyCallGraph run(Module &M, ModuleAnalysisManager &AM) {
  1112. FunctionAnalysisManager &FAM =
  1113. AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1114. auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
  1115. return FAM.getResult<TargetLibraryAnalysis>(F);
  1116. };
  1117. return LazyCallGraph(M, GetTLI);
  1118. }
  1119. };
  1120. /// A pass which prints the call graph to a \c raw_ostream.
  1121. ///
  1122. /// This is primarily useful for testing the analysis.
  1123. class LazyCallGraphPrinterPass
  1124. : public PassInfoMixin<LazyCallGraphPrinterPass> {
  1125. raw_ostream &OS;
  1126. public:
  1127. explicit LazyCallGraphPrinterPass(raw_ostream &OS);
  1128. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1129. };
  1130. /// A pass which prints the call graph as a DOT file to a \c raw_ostream.
  1131. ///
  1132. /// This is primarily useful for visualization purposes.
  1133. class LazyCallGraphDOTPrinterPass
  1134. : public PassInfoMixin<LazyCallGraphDOTPrinterPass> {
  1135. raw_ostream &OS;
  1136. public:
  1137. explicit LazyCallGraphDOTPrinterPass(raw_ostream &OS);
  1138. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1139. };
  1140. } // end namespace llvm
  1141. #endif // LLVM_ANALYSIS_LAZYCALLGRAPH_H
  1142. #ifdef __GNUC__
  1143. #pragma GCC diagnostic pop
  1144. #endif