LazyCallGraph.h 50 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- LazyCallGraph.h - Analysis of a Module's call graph ------*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. /// \file
  14. ///
  15. /// Implements a lazy call graph analysis and related passes for the new pass
  16. /// manager.
  17. ///
  18. /// NB: This is *not* a traditional call graph! It is a graph which models both
  19. /// the current calls and potential calls. As a consequence there are many
  20. /// edges in this call graph that do not correspond to a 'call' or 'invoke'
  21. /// instruction.
  22. ///
  23. /// The primary use cases of this graph analysis is to facilitate iterating
  24. /// across the functions of a module in ways that ensure all callees are
  25. /// visited prior to a caller (given any SCC constraints), or vice versa. As
  26. /// such is it particularly well suited to organizing CGSCC optimizations such
  27. /// as inlining, outlining, argument promotion, etc. That is its primary use
  28. /// case and motivates the design. It may not be appropriate for other
  29. /// purposes. The use graph of functions or some other conservative analysis of
  30. /// call instructions may be interesting for optimizations and subsequent
  31. /// analyses which don't work in the context of an overly specified
  32. /// potential-call-edge graph.
  33. ///
  34. /// To understand the specific rules and nature of this call graph analysis,
  35. /// see the documentation of the \c LazyCallGraph below.
  36. ///
  37. //===----------------------------------------------------------------------===//
  38. #ifndef LLVM_ANALYSIS_LAZYCALLGRAPH_H
  39. #define LLVM_ANALYSIS_LAZYCALLGRAPH_H
  40. #include "llvm/ADT/ArrayRef.h"
  41. #include "llvm/ADT/DenseMap.h"
  42. #include "llvm/ADT/Optional.h"
  43. #include "llvm/ADT/PointerIntPair.h"
  44. #include "llvm/ADT/STLExtras.h"
  45. #include "llvm/ADT/SetVector.h"
  46. #include "llvm/ADT/SmallPtrSet.h"
  47. #include "llvm/ADT/SmallVector.h"
  48. #include "llvm/ADT/StringRef.h"
  49. #include "llvm/ADT/iterator.h"
  50. #include "llvm/ADT/iterator_range.h"
  51. #include "llvm/Analysis/TargetLibraryInfo.h"
  52. #include "llvm/IR/Constant.h"
  53. #include "llvm/IR/Constants.h"
  54. #include "llvm/IR/Function.h"
  55. #include "llvm/IR/PassManager.h"
  56. #include "llvm/Support/Allocator.h"
  57. #include "llvm/Support/Casting.h"
  58. #include "llvm/Support/raw_ostream.h"
  59. #include <cassert>
  60. #include <iterator>
  61. #include <string>
  62. #include <utility>
  63. namespace llvm {
  64. template <class GraphType> struct GraphTraits;
  65. class Module;
  66. class Value;
  67. /// A lazily constructed view of the call graph of a module.
  68. ///
  69. /// With the edges of this graph, the motivating constraint that we are
  70. /// attempting to maintain is that function-local optimization, CGSCC-local
  71. /// optimizations, and optimizations transforming a pair of functions connected
  72. /// by an edge in the graph, do not invalidate a bottom-up traversal of the SCC
  73. /// DAG. That is, no optimizations will delete, remove, or add an edge such
  74. /// that functions already visited in a bottom-up order of the SCC DAG are no
  75. /// longer valid to have visited, or such that functions not yet visited in
  76. /// a bottom-up order of the SCC DAG are not required to have already been
  77. /// visited.
  78. ///
  79. /// Within this constraint, the desire is to minimize the merge points of the
  80. /// SCC DAG. The greater the fanout of the SCC DAG and the fewer merge points
  81. /// in the SCC DAG, the more independence there is in optimizing within it.
  82. /// There is a strong desire to enable parallelization of optimizations over
  83. /// the call graph, and both limited fanout and merge points will (artificially
  84. /// in some cases) limit the scaling of such an effort.
  85. ///
  86. /// To this end, graph represents both direct and any potential resolution to
  87. /// an indirect call edge. Another way to think about it is that it represents
  88. /// both the direct call edges and any direct call edges that might be formed
  89. /// through static optimizations. Specifically, it considers taking the address
  90. /// of a function to be an edge in the call graph because this might be
  91. /// forwarded to become a direct call by some subsequent function-local
  92. /// optimization. The result is that the graph closely follows the use-def
  93. /// edges for functions. Walking "up" the graph can be done by looking at all
  94. /// of the uses of a function.
  95. ///
  96. /// The roots of the call graph are the external functions and functions
  97. /// escaped into global variables. Those functions can be called from outside
  98. /// of the module or via unknowable means in the IR -- we may not be able to
  99. /// form even a potential call edge from a function body which may dynamically
  100. /// load the function and call it.
  101. ///
  102. /// This analysis still requires updates to remain valid after optimizations
  103. /// which could potentially change the set of potential callees. The
  104. /// constraints it operates under only make the traversal order remain valid.
  105. ///
  106. /// The entire analysis must be re-computed if full interprocedural
  107. /// optimizations run at any point. For example, globalopt completely
  108. /// invalidates the information in this analysis.
  109. ///
  110. /// FIXME: This class is named LazyCallGraph in a lame attempt to distinguish
  111. /// it from the existing CallGraph. At some point, it is expected that this
  112. /// will be the only call graph and it will be renamed accordingly.
  113. class LazyCallGraph {
  114. public:
  115. class Node;
  116. class EdgeSequence;
  117. class SCC;
  118. class RefSCC;
  119. /// A class used to represent edges in the call graph.
  120. ///
  121. /// The lazy call graph models both *call* edges and *reference* edges. Call
  122. /// edges are much what you would expect, and exist when there is a 'call' or
  123. /// 'invoke' instruction of some function. Reference edges are also tracked
  124. /// along side these, and exist whenever any instruction (transitively
  125. /// through its operands) references a function. All call edges are
  126. /// inherently reference edges, and so the reference graph forms a superset
  127. /// of the formal call graph.
  128. ///
  129. /// All of these forms of edges are fundamentally represented as outgoing
  130. /// edges. The edges are stored in the source node and point at the target
  131. /// node. This allows the edge structure itself to be a very compact data
  132. /// structure: essentially a tagged pointer.
  133. class Edge {
  134. public:
  135. /// The kind of edge in the graph.
  136. enum Kind : bool { Ref = false, Call = true };
  137. Edge();
  138. explicit Edge(Node &N, Kind K);
  139. /// Test whether the edge is null.
  140. ///
  141. /// This happens when an edge has been deleted. We leave the edge objects
  142. /// around but clear them.
  143. explicit operator bool() const;
  144. /// Returns the \c Kind of the edge.
  145. Kind getKind() const;
  146. /// Test whether the edge represents a direct call to a function.
  147. ///
  148. /// This requires that the edge is not null.
  149. bool isCall() const;
  150. /// Get the call graph node referenced by this edge.
  151. ///
  152. /// This requires that the edge is not null.
  153. Node &getNode() const;
  154. /// Get the function referenced by this edge.
  155. ///
  156. /// This requires that the edge is not null.
  157. Function &getFunction() const;
  158. private:
  159. friend class LazyCallGraph::EdgeSequence;
  160. friend class LazyCallGraph::RefSCC;
  161. PointerIntPair<Node *, 1, Kind> Value;
  162. void setKind(Kind K) { Value.setInt(K); }
  163. };
  164. /// The edge sequence object.
  165. ///
  166. /// This typically exists entirely within the node but is exposed as
  167. /// a separate type because a node doesn't initially have edges. An explicit
  168. /// population step is required to produce this sequence at first and it is
  169. /// then cached in the node. It is also used to represent edges entering the
  170. /// graph from outside the module to model the graph's roots.
  171. ///
  172. /// The sequence itself both iterable and indexable. The indexes remain
  173. /// stable even as the sequence mutates (including removal).
  174. class EdgeSequence {
  175. friend class LazyCallGraph;
  176. friend class LazyCallGraph::Node;
  177. friend class LazyCallGraph::RefSCC;
  178. using VectorT = SmallVector<Edge, 4>;
  179. using VectorImplT = SmallVectorImpl<Edge>;
  180. public:
  181. /// An iterator used for the edges to both entry nodes and child nodes.
  182. class iterator
  183. : public iterator_adaptor_base<iterator, VectorImplT::iterator,
  184. std::forward_iterator_tag> {
  185. friend class LazyCallGraph;
  186. friend class LazyCallGraph::Node;
  187. VectorImplT::iterator E;
  188. // Build the iterator for a specific position in the edge list.
  189. iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  190. : iterator_adaptor_base(BaseI), E(E) {
  191. while (I != E && !*I)
  192. ++I;
  193. }
  194. public:
  195. iterator() = default;
  196. using iterator_adaptor_base::operator++;
  197. iterator &operator++() {
  198. do {
  199. ++I;
  200. } while (I != E && !*I);
  201. return *this;
  202. }
  203. };
  204. /// An iterator over specifically call edges.
  205. ///
  206. /// This has the same iteration properties as the \c iterator, but
  207. /// restricts itself to edges which represent actual calls.
  208. class call_iterator
  209. : public iterator_adaptor_base<call_iterator, VectorImplT::iterator,
  210. std::forward_iterator_tag> {
  211. friend class LazyCallGraph;
  212. friend class LazyCallGraph::Node;
  213. VectorImplT::iterator E;
  214. /// Advance the iterator to the next valid, call edge.
  215. void advanceToNextEdge() {
  216. while (I != E && (!*I || !I->isCall()))
  217. ++I;
  218. }
  219. // Build the iterator for a specific position in the edge list.
  220. call_iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  221. : iterator_adaptor_base(BaseI), E(E) {
  222. advanceToNextEdge();
  223. }
  224. public:
  225. call_iterator() = default;
  226. using iterator_adaptor_base::operator++;
  227. call_iterator &operator++() {
  228. ++I;
  229. advanceToNextEdge();
  230. return *this;
  231. }
  232. };
  233. iterator begin() { return iterator(Edges.begin(), Edges.end()); }
  234. iterator end() { return iterator(Edges.end(), Edges.end()); }
  235. Edge &operator[](Node &N) {
  236. assert(EdgeIndexMap.find(&N) != EdgeIndexMap.end() && "No such edge!");
  237. auto &E = Edges[EdgeIndexMap.find(&N)->second];
  238. assert(E && "Dead or null edge!");
  239. return E;
  240. }
  241. Edge *lookup(Node &N) {
  242. auto EI = EdgeIndexMap.find(&N);
  243. if (EI == EdgeIndexMap.end())
  244. return nullptr;
  245. auto &E = Edges[EI->second];
  246. return E ? &E : nullptr;
  247. }
  248. call_iterator call_begin() {
  249. return call_iterator(Edges.begin(), Edges.end());
  250. }
  251. call_iterator call_end() { return call_iterator(Edges.end(), Edges.end()); }
  252. iterator_range<call_iterator> calls() {
  253. return make_range(call_begin(), call_end());
  254. }
  255. bool empty() {
  256. for (auto &E : Edges)
  257. if (E)
  258. return false;
  259. return true;
  260. }
  261. private:
  262. VectorT Edges;
  263. DenseMap<Node *, int> EdgeIndexMap;
  264. EdgeSequence() = default;
  265. /// Internal helper to insert an edge to a node.
  266. void insertEdgeInternal(Node &ChildN, Edge::Kind EK);
  267. /// Internal helper to change an edge kind.
  268. void setEdgeKind(Node &ChildN, Edge::Kind EK);
  269. /// Internal helper to remove the edge to the given function.
  270. bool removeEdgeInternal(Node &ChildN);
  271. };
  272. /// A node in the call graph.
  273. ///
  274. /// This represents a single node. Its primary roles are to cache the list of
  275. /// callees, de-duplicate and provide fast testing of whether a function is a
  276. /// callee, and facilitate iteration of child nodes in the graph.
  277. ///
  278. /// The node works much like an optional in order to lazily populate the
  279. /// edges of each node. Until populated, there are no edges. Once populated,
  280. /// you can access the edges by dereferencing the node or using the `->`
  281. /// operator as if the node was an `Optional<EdgeSequence>`.
  282. class Node {
  283. friend class LazyCallGraph;
  284. friend class LazyCallGraph::RefSCC;
  285. public:
  286. LazyCallGraph &getGraph() const { return *G; }
  287. Function &getFunction() const { return *F; }
  288. StringRef getName() const { return F->getName(); }
  289. /// Equality is defined as address equality.
  290. bool operator==(const Node &N) const { return this == &N; }
  291. bool operator!=(const Node &N) const { return !operator==(N); }
  292. /// Tests whether the node has been populated with edges.
  293. bool isPopulated() const { return Edges.hasValue(); }
  294. /// Tests whether this is actually a dead node and no longer valid.
  295. ///
  296. /// Users rarely interact with nodes in this state and other methods are
  297. /// invalid. This is used to model a node in an edge list where the
  298. /// function has been completely removed.
  299. bool isDead() const {
  300. assert(!G == !F &&
  301. "Both graph and function pointers should be null or non-null.");
  302. return !G;
  303. }
  304. // We allow accessing the edges by dereferencing or using the arrow
  305. // operator, essentially wrapping the internal optional.
  306. EdgeSequence &operator*() const {
  307. // Rip const off because the node itself isn't changing here.
  308. return const_cast<EdgeSequence &>(*Edges);
  309. }
  310. EdgeSequence *operator->() const { return &**this; }
  311. /// Populate the edges of this node if necessary.
  312. ///
  313. /// The first time this is called it will populate the edges for this node
  314. /// in the graph. It does this by scanning the underlying function, so once
  315. /// this is done, any changes to that function must be explicitly reflected
  316. /// in updates to the graph.
  317. ///
  318. /// \returns the populated \c EdgeSequence to simplify walking it.
  319. ///
  320. /// This will not update or re-scan anything if called repeatedly. Instead,
  321. /// the edge sequence is cached and returned immediately on subsequent
  322. /// calls.
  323. EdgeSequence &populate() {
  324. if (Edges)
  325. return *Edges;
  326. return populateSlow();
  327. }
  328. private:
  329. LazyCallGraph *G;
  330. Function *F;
  331. // We provide for the DFS numbering and Tarjan walk lowlink numbers to be
  332. // stored directly within the node. These are both '-1' when nodes are part
  333. // of an SCC (or RefSCC), or '0' when not yet reached in a DFS walk.
  334. int DFSNumber = 0;
  335. int LowLink = 0;
  336. Optional<EdgeSequence> Edges;
  337. /// Basic constructor implements the scanning of F into Edges and
  338. /// EdgeIndexMap.
  339. Node(LazyCallGraph &G, Function &F) : G(&G), F(&F) {}
  340. /// Implementation of the scan when populating.
  341. EdgeSequence &populateSlow();
  342. /// Internal helper to directly replace the function with a new one.
  343. ///
  344. /// This is used to facilitate transformations which need to replace the
  345. /// formal Function object but directly move the body and users from one to
  346. /// the other.
  347. void replaceFunction(Function &NewF);
  348. void clear() { Edges.reset(); }
  349. /// Print the name of this node's function.
  350. friend raw_ostream &operator<<(raw_ostream &OS, const Node &N) {
  351. return OS << N.F->getName();
  352. }
  353. /// Dump the name of this node's function to stderr.
  354. void dump() const;
  355. };
  356. /// An SCC of the call graph.
  357. ///
  358. /// This represents a Strongly Connected Component of the direct call graph
  359. /// -- ignoring indirect calls and function references. It stores this as
  360. /// a collection of call graph nodes. While the order of nodes in the SCC is
  361. /// stable, it is not any particular order.
  362. ///
  363. /// The SCCs are nested within a \c RefSCC, see below for details about that
  364. /// outer structure. SCCs do not support mutation of the call graph, that
  365. /// must be done through the containing \c RefSCC in order to fully reason
  366. /// about the ordering and connections of the graph.
  367. class LLVM_EXTERNAL_VISIBILITY SCC {
  368. friend class LazyCallGraph;
  369. friend class LazyCallGraph::Node;
  370. RefSCC *OuterRefSCC;
  371. SmallVector<Node *, 1> Nodes;
  372. template <typename NodeRangeT>
  373. SCC(RefSCC &OuterRefSCC, NodeRangeT &&Nodes)
  374. : OuterRefSCC(&OuterRefSCC), Nodes(std::forward<NodeRangeT>(Nodes)) {}
  375. void clear() {
  376. OuterRefSCC = nullptr;
  377. Nodes.clear();
  378. }
  379. /// Print a short description useful for debugging or logging.
  380. ///
  381. /// We print the function names in the SCC wrapped in '()'s and skipping
  382. /// the middle functions if there are a large number.
  383. //
  384. // Note: this is defined inline to dodge issues with GCC's interpretation
  385. // of enclosing namespaces for friend function declarations.
  386. friend raw_ostream &operator<<(raw_ostream &OS, const SCC &C) {
  387. OS << '(';
  388. int i = 0;
  389. for (LazyCallGraph::Node &N : C) {
  390. if (i > 0)
  391. OS << ", ";
  392. // Elide the inner elements if there are too many.
  393. if (i > 8) {
  394. OS << "..., " << *C.Nodes.back();
  395. break;
  396. }
  397. OS << N;
  398. ++i;
  399. }
  400. OS << ')';
  401. return OS;
  402. }
  403. /// Dump a short description of this SCC to stderr.
  404. void dump() const;
  405. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  406. /// Verify invariants about the SCC.
  407. ///
  408. /// This will attempt to validate all of the basic invariants within an
  409. /// SCC, but not that it is a strongly connected component per se.
  410. /// Primarily useful while building and updating the graph to check that
  411. /// basic properties are in place rather than having inexplicable crashes
  412. /// later.
  413. void verify();
  414. #endif
  415. public:
  416. using iterator = pointee_iterator<SmallVectorImpl<Node *>::const_iterator>;
  417. iterator begin() const { return Nodes.begin(); }
  418. iterator end() const { return Nodes.end(); }
  419. int size() const { return Nodes.size(); }
  420. RefSCC &getOuterRefSCC() const { return *OuterRefSCC; }
  421. /// Test if this SCC is a parent of \a C.
  422. ///
  423. /// Note that this is linear in the number of edges departing the current
  424. /// SCC.
  425. bool isParentOf(const SCC &C) const;
  426. /// Test if this SCC is an ancestor of \a C.
  427. ///
  428. /// Note that in the worst case this is linear in the number of edges
  429. /// departing the current SCC and every SCC in the entire graph reachable
  430. /// from this SCC. Thus this very well may walk every edge in the entire
  431. /// call graph! Do not call this in a tight loop!
  432. bool isAncestorOf(const SCC &C) const;
  433. /// Test if this SCC is a child of \a C.
  434. ///
  435. /// See the comments for \c isParentOf for detailed notes about the
  436. /// complexity of this routine.
  437. bool isChildOf(const SCC &C) const { return C.isParentOf(*this); }
  438. /// Test if this SCC is a descendant of \a C.
  439. ///
  440. /// See the comments for \c isParentOf for detailed notes about the
  441. /// complexity of this routine.
  442. bool isDescendantOf(const SCC &C) const { return C.isAncestorOf(*this); }
  443. /// Provide a short name by printing this SCC to a std::string.
  444. ///
  445. /// This copes with the fact that we don't have a name per se for an SCC
  446. /// while still making the use of this in debugging and logging useful.
  447. std::string getName() const {
  448. std::string Name;
  449. raw_string_ostream OS(Name);
  450. OS << *this;
  451. OS.flush();
  452. return Name;
  453. }
  454. };
  455. /// A RefSCC of the call graph.
  456. ///
  457. /// This models a Strongly Connected Component of function reference edges in
  458. /// the call graph. As opposed to actual SCCs, these can be used to scope
  459. /// subgraphs of the module which are independent from other subgraphs of the
  460. /// module because they do not reference it in any way. This is also the unit
  461. /// where we do mutation of the graph in order to restrict mutations to those
  462. /// which don't violate this independence.
  463. ///
  464. /// A RefSCC contains a DAG of actual SCCs. All the nodes within the RefSCC
  465. /// are necessarily within some actual SCC that nests within it. Since
  466. /// a direct call *is* a reference, there will always be at least one RefSCC
  467. /// around any SCC.
  468. class RefSCC {
  469. friend class LazyCallGraph;
  470. friend class LazyCallGraph::Node;
  471. LazyCallGraph *G;
  472. /// A postorder list of the inner SCCs.
  473. SmallVector<SCC *, 4> SCCs;
  474. /// A map from SCC to index in the postorder list.
  475. SmallDenseMap<SCC *, int, 4> SCCIndices;
  476. /// Fast-path constructor. RefSCCs should instead be constructed by calling
  477. /// formRefSCCFast on the graph itself.
  478. RefSCC(LazyCallGraph &G);
  479. void clear() {
  480. SCCs.clear();
  481. SCCIndices.clear();
  482. }
  483. /// Print a short description useful for debugging or logging.
  484. ///
  485. /// We print the SCCs wrapped in '[]'s and skipping the middle SCCs if
  486. /// there are a large number.
  487. //
  488. // Note: this is defined inline to dodge issues with GCC's interpretation
  489. // of enclosing namespaces for friend function declarations.
  490. friend raw_ostream &operator<<(raw_ostream &OS, const RefSCC &RC) {
  491. OS << '[';
  492. int i = 0;
  493. for (LazyCallGraph::SCC &C : RC) {
  494. if (i > 0)
  495. OS << ", ";
  496. // Elide the inner elements if there are too many.
  497. if (i > 4) {
  498. OS << "..., " << *RC.SCCs.back();
  499. break;
  500. }
  501. OS << C;
  502. ++i;
  503. }
  504. OS << ']';
  505. return OS;
  506. }
  507. /// Dump a short description of this RefSCC to stderr.
  508. void dump() const;
  509. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  510. /// Verify invariants about the RefSCC and all its SCCs.
  511. ///
  512. /// This will attempt to validate all of the invariants *within* the
  513. /// RefSCC, but not that it is a strongly connected component of the larger
  514. /// graph. This makes it useful even when partially through an update.
  515. ///
  516. /// Invariants checked:
  517. /// - SCCs and their indices match.
  518. /// - The SCCs list is in fact in post-order.
  519. void verify();
  520. #endif
  521. public:
  522. using iterator = pointee_iterator<SmallVectorImpl<SCC *>::const_iterator>;
  523. using range = iterator_range<iterator>;
  524. using parent_iterator =
  525. pointee_iterator<SmallPtrSetImpl<RefSCC *>::const_iterator>;
  526. iterator begin() const { return SCCs.begin(); }
  527. iterator end() const { return SCCs.end(); }
  528. ssize_t size() const { return SCCs.size(); }
  529. SCC &operator[](int Idx) { return *SCCs[Idx]; }
  530. iterator find(SCC &C) const {
  531. return SCCs.begin() + SCCIndices.find(&C)->second;
  532. }
  533. /// Test if this RefSCC is a parent of \a RC.
  534. ///
  535. /// CAUTION: This method walks every edge in the \c RefSCC, it can be very
  536. /// expensive.
  537. bool isParentOf(const RefSCC &RC) const;
  538. /// Test if this RefSCC is an ancestor of \a RC.
  539. ///
  540. /// CAUTION: This method walks the directed graph of edges as far as
  541. /// necessary to find a possible path to the argument. In the worst case
  542. /// this may walk the entire graph and can be extremely expensive.
  543. bool isAncestorOf(const RefSCC &RC) const;
  544. /// Test if this RefSCC is a child of \a RC.
  545. ///
  546. /// CAUTION: This method walks every edge in the argument \c RefSCC, it can
  547. /// be very expensive.
  548. bool isChildOf(const RefSCC &RC) const { return RC.isParentOf(*this); }
  549. /// Test if this RefSCC is a descendant of \a RC.
  550. ///
  551. /// CAUTION: This method walks the directed graph of edges as far as
  552. /// necessary to find a possible path from the argument. In the worst case
  553. /// this may walk the entire graph and can be extremely expensive.
  554. bool isDescendantOf(const RefSCC &RC) const {
  555. return RC.isAncestorOf(*this);
  556. }
  557. /// Provide a short name by printing this RefSCC to a std::string.
  558. ///
  559. /// This copes with the fact that we don't have a name per se for an RefSCC
  560. /// while still making the use of this in debugging and logging useful.
  561. std::string getName() const {
  562. std::string Name;
  563. raw_string_ostream OS(Name);
  564. OS << *this;
  565. OS.flush();
  566. return Name;
  567. }
  568. ///@{
  569. /// \name Mutation API
  570. ///
  571. /// These methods provide the core API for updating the call graph in the
  572. /// presence of (potentially still in-flight) DFS-found RefSCCs and SCCs.
  573. ///
  574. /// Note that these methods sometimes have complex runtimes, so be careful
  575. /// how you call them.
  576. /// Make an existing internal ref edge into a call edge.
  577. ///
  578. /// This may form a larger cycle and thus collapse SCCs into TargetN's SCC.
  579. /// If that happens, the optional callback \p MergedCB will be invoked (if
  580. /// provided) on the SCCs being merged away prior to actually performing
  581. /// the merge. Note that this will never include the target SCC as that
  582. /// will be the SCC functions are merged into to resolve the cycle. Once
  583. /// this function returns, these merged SCCs are not in a valid state but
  584. /// the pointers will remain valid until destruction of the parent graph
  585. /// instance for the purpose of clearing cached information. This function
  586. /// also returns 'true' if a cycle was formed and some SCCs merged away as
  587. /// a convenience.
  588. ///
  589. /// After this operation, both SourceN's SCC and TargetN's SCC may move
  590. /// position within this RefSCC's postorder list. Any SCCs merged are
  591. /// merged into the TargetN's SCC in order to preserve reachability analyses
  592. /// which took place on that SCC.
  593. bool switchInternalEdgeToCall(
  594. Node &SourceN, Node &TargetN,
  595. function_ref<void(ArrayRef<SCC *> MergedSCCs)> MergeCB = {});
  596. /// Make an existing internal call edge between separate SCCs into a ref
  597. /// edge.
  598. ///
  599. /// If SourceN and TargetN in separate SCCs within this RefSCC, changing
  600. /// the call edge between them to a ref edge is a trivial operation that
  601. /// does not require any structural changes to the call graph.
  602. void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN);
  603. /// Make an existing internal call edge within a single SCC into a ref
  604. /// edge.
  605. ///
  606. /// Since SourceN and TargetN are part of a single SCC, this SCC may be
  607. /// split up due to breaking a cycle in the call edges that formed it. If
  608. /// that happens, then this routine will insert new SCCs into the postorder
  609. /// list *before* the SCC of TargetN (previously the SCC of both). This
  610. /// preserves postorder as the TargetN can reach all of the other nodes by
  611. /// definition of previously being in a single SCC formed by the cycle from
  612. /// SourceN to TargetN.
  613. ///
  614. /// The newly added SCCs are added *immediately* and contiguously
  615. /// prior to the TargetN SCC and return the range covering the new SCCs in
  616. /// the RefSCC's postorder sequence. You can directly iterate the returned
  617. /// range to observe all of the new SCCs in postorder.
  618. ///
  619. /// Note that if SourceN and TargetN are in separate SCCs, the simpler
  620. /// routine `switchTrivialInternalEdgeToRef` should be used instead.
  621. iterator_range<iterator> switchInternalEdgeToRef(Node &SourceN,
  622. Node &TargetN);
  623. /// Make an existing outgoing ref edge into a call edge.
  624. ///
  625. /// Note that this is trivial as there are no cyclic impacts and there
  626. /// remains a reference edge.
  627. void switchOutgoingEdgeToCall(Node &SourceN, Node &TargetN);
  628. /// Make an existing outgoing call edge into a ref edge.
  629. ///
  630. /// This is trivial as there are no cyclic impacts and there remains
  631. /// a reference edge.
  632. void switchOutgoingEdgeToRef(Node &SourceN, Node &TargetN);
  633. /// Insert a ref edge from one node in this RefSCC to another in this
  634. /// RefSCC.
  635. ///
  636. /// This is always a trivial operation as it doesn't change any part of the
  637. /// graph structure besides connecting the two nodes.
  638. ///
  639. /// Note that we don't support directly inserting internal *call* edges
  640. /// because that could change the graph structure and requires returning
  641. /// information about what became invalid. As a consequence, the pattern
  642. /// should be to first insert the necessary ref edge, and then to switch it
  643. /// to a call edge if needed and handle any invalidation that results. See
  644. /// the \c switchInternalEdgeToCall routine for details.
  645. void insertInternalRefEdge(Node &SourceN, Node &TargetN);
  646. /// Insert an edge whose parent is in this RefSCC and child is in some
  647. /// child RefSCC.
  648. ///
  649. /// There must be an existing path from the \p SourceN to the \p TargetN.
  650. /// This operation is inexpensive and does not change the set of SCCs and
  651. /// RefSCCs in the graph.
  652. void insertOutgoingEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  653. /// Insert an edge whose source is in a descendant RefSCC and target is in
  654. /// this RefSCC.
  655. ///
  656. /// There must be an existing path from the target to the source in this
  657. /// case.
  658. ///
  659. /// NB! This is has the potential to be a very expensive function. It
  660. /// inherently forms a cycle in the prior RefSCC DAG and we have to merge
  661. /// RefSCCs to resolve that cycle. But finding all of the RefSCCs which
  662. /// participate in the cycle can in the worst case require traversing every
  663. /// RefSCC in the graph. Every attempt is made to avoid that, but passes
  664. /// must still exercise caution calling this routine repeatedly.
  665. ///
  666. /// Also note that this can only insert ref edges. In order to insert
  667. /// a call edge, first insert a ref edge and then switch it to a call edge.
  668. /// These are intentionally kept as separate interfaces because each step
  669. /// of the operation invalidates a different set of data structures.
  670. ///
  671. /// This returns all the RefSCCs which were merged into the this RefSCC
  672. /// (the target's). This allows callers to invalidate any cached
  673. /// information.
  674. ///
  675. /// FIXME: We could possibly optimize this quite a bit for cases where the
  676. /// caller and callee are very nearby in the graph. See comments in the
  677. /// implementation for details, but that use case might impact users.
  678. SmallVector<RefSCC *, 1> insertIncomingRefEdge(Node &SourceN,
  679. Node &TargetN);
  680. /// Remove an edge whose source is in this RefSCC and target is *not*.
  681. ///
  682. /// This removes an inter-RefSCC edge. All inter-RefSCC edges originating
  683. /// from this SCC have been fully explored by any in-flight DFS graph
  684. /// formation, so this is always safe to call once you have the source
  685. /// RefSCC.
  686. ///
  687. /// This operation does not change the cyclic structure of the graph and so
  688. /// is very inexpensive. It may change the connectivity graph of the SCCs
  689. /// though, so be careful calling this while iterating over them.
  690. void removeOutgoingEdge(Node &SourceN, Node &TargetN);
  691. /// Remove a list of ref edges which are entirely within this RefSCC.
  692. ///
  693. /// Both the \a SourceN and all of the \a TargetNs must be within this
  694. /// RefSCC. Removing these edges may break cycles that form this RefSCC and
  695. /// thus this operation may change the RefSCC graph significantly. In
  696. /// particular, this operation will re-form new RefSCCs based on the
  697. /// remaining connectivity of the graph. The following invariants are
  698. /// guaranteed to hold after calling this method:
  699. ///
  700. /// 1) If a ref-cycle remains after removal, it leaves this RefSCC intact
  701. /// and in the graph. No new RefSCCs are built.
  702. /// 2) Otherwise, this RefSCC will be dead after this call and no longer in
  703. /// the graph or the postorder traversal of the call graph. Any iterator
  704. /// pointing at this RefSCC will become invalid.
  705. /// 3) All newly formed RefSCCs will be returned and the order of the
  706. /// RefSCCs returned will be a valid postorder traversal of the new
  707. /// RefSCCs.
  708. /// 4) No RefSCC other than this RefSCC has its member set changed (this is
  709. /// inherent in the definition of removing such an edge).
  710. ///
  711. /// These invariants are very important to ensure that we can build
  712. /// optimization pipelines on top of the CGSCC pass manager which
  713. /// intelligently update the RefSCC graph without invalidating other parts
  714. /// of the RefSCC graph.
  715. ///
  716. /// Note that we provide no routine to remove a *call* edge. Instead, you
  717. /// must first switch it to a ref edge using \c switchInternalEdgeToRef.
  718. /// This split API is intentional as each of these two steps can invalidate
  719. /// a different aspect of the graph structure and needs to have the
  720. /// invalidation handled independently.
  721. ///
  722. /// The runtime complexity of this method is, in the worst case, O(V+E)
  723. /// where V is the number of nodes in this RefSCC and E is the number of
  724. /// edges leaving the nodes in this RefSCC. Note that E includes both edges
  725. /// within this RefSCC and edges from this RefSCC to child RefSCCs. Some
  726. /// effort has been made to minimize the overhead of common cases such as
  727. /// self-edges and edge removals which result in a spanning tree with no
  728. /// more cycles.
  729. SmallVector<RefSCC *, 1> removeInternalRefEdge(Node &SourceN,
  730. ArrayRef<Node *> TargetNs);
  731. /// A convenience wrapper around the above to handle trivial cases of
  732. /// inserting a new call edge.
  733. ///
  734. /// This is trivial whenever the target is in the same SCC as the source or
  735. /// the edge is an outgoing edge to some descendant SCC. In these cases
  736. /// there is no change to the cyclic structure of SCCs or RefSCCs.
  737. ///
  738. /// To further make calling this convenient, it also handles inserting
  739. /// already existing edges.
  740. void insertTrivialCallEdge(Node &SourceN, Node &TargetN);
  741. /// A convenience wrapper around the above to handle trivial cases of
  742. /// inserting a new ref edge.
  743. ///
  744. /// This is trivial whenever the target is in the same RefSCC as the source
  745. /// or the edge is an outgoing edge to some descendant RefSCC. In these
  746. /// cases there is no change to the cyclic structure of the RefSCCs.
  747. ///
  748. /// To further make calling this convenient, it also handles inserting
  749. /// already existing edges.
  750. void insertTrivialRefEdge(Node &SourceN, Node &TargetN);
  751. /// Directly replace a node's function with a new function.
  752. ///
  753. /// This should be used when moving the body and users of a function to
  754. /// a new formal function object but not otherwise changing the call graph
  755. /// structure in any way.
  756. ///
  757. /// It requires that the old function in the provided node have zero uses
  758. /// and the new function must have calls and references to it establishing
  759. /// an equivalent graph.
  760. void replaceNodeFunction(Node &N, Function &NewF);
  761. ///@}
  762. };
  763. /// A post-order depth-first RefSCC iterator over the call graph.
  764. ///
  765. /// This iterator walks the cached post-order sequence of RefSCCs. However,
  766. /// it trades stability for flexibility. It is restricted to a forward
  767. /// iterator but will survive mutations which insert new RefSCCs and continue
  768. /// to point to the same RefSCC even if it moves in the post-order sequence.
  769. class postorder_ref_scc_iterator
  770. : public iterator_facade_base<postorder_ref_scc_iterator,
  771. std::forward_iterator_tag, RefSCC> {
  772. friend class LazyCallGraph;
  773. friend class LazyCallGraph::Node;
  774. /// Nonce type to select the constructor for the end iterator.
  775. struct IsAtEndT {};
  776. LazyCallGraph *G;
  777. RefSCC *RC = nullptr;
  778. /// Build the begin iterator for a node.
  779. postorder_ref_scc_iterator(LazyCallGraph &G) : G(&G), RC(getRC(G, 0)) {
  780. incrementUntilNonEmptyRefSCC();
  781. }
  782. /// Build the end iterator for a node. This is selected purely by overload.
  783. postorder_ref_scc_iterator(LazyCallGraph &G, IsAtEndT /*Nonce*/) : G(&G) {}
  784. /// Get the post-order RefSCC at the given index of the postorder walk,
  785. /// populating it if necessary.
  786. static RefSCC *getRC(LazyCallGraph &G, int Index) {
  787. if (Index == (int)G.PostOrderRefSCCs.size())
  788. // We're at the end.
  789. return nullptr;
  790. return G.PostOrderRefSCCs[Index];
  791. }
  792. // Keep incrementing until RC is non-empty (or null).
  793. void incrementUntilNonEmptyRefSCC() {
  794. while (RC && RC->size() == 0)
  795. increment();
  796. }
  797. void increment() {
  798. assert(RC && "Cannot increment the end iterator!");
  799. RC = getRC(*G, G->RefSCCIndices.find(RC)->second + 1);
  800. }
  801. public:
  802. bool operator==(const postorder_ref_scc_iterator &Arg) const {
  803. return G == Arg.G && RC == Arg.RC;
  804. }
  805. reference operator*() const { return *RC; }
  806. using iterator_facade_base::operator++;
  807. postorder_ref_scc_iterator &operator++() {
  808. increment();
  809. incrementUntilNonEmptyRefSCC();
  810. return *this;
  811. }
  812. };
  813. /// Construct a graph for the given module.
  814. ///
  815. /// This sets up the graph and computes all of the entry points of the graph.
  816. /// No function definitions are scanned until their nodes in the graph are
  817. /// requested during traversal.
  818. LazyCallGraph(Module &M,
  819. function_ref<TargetLibraryInfo &(Function &)> GetTLI);
  820. LazyCallGraph(LazyCallGraph &&G);
  821. LazyCallGraph &operator=(LazyCallGraph &&RHS);
  822. bool invalidate(Module &, const PreservedAnalyses &PA,
  823. ModuleAnalysisManager::Invalidator &);
  824. EdgeSequence::iterator begin() { return EntryEdges.begin(); }
  825. EdgeSequence::iterator end() { return EntryEdges.end(); }
  826. void buildRefSCCs();
  827. postorder_ref_scc_iterator postorder_ref_scc_begin() {
  828. if (!EntryEdges.empty())
  829. assert(!PostOrderRefSCCs.empty() &&
  830. "Must form RefSCCs before iterating them!");
  831. return postorder_ref_scc_iterator(*this);
  832. }
  833. postorder_ref_scc_iterator postorder_ref_scc_end() {
  834. if (!EntryEdges.empty())
  835. assert(!PostOrderRefSCCs.empty() &&
  836. "Must form RefSCCs before iterating them!");
  837. return postorder_ref_scc_iterator(*this,
  838. postorder_ref_scc_iterator::IsAtEndT());
  839. }
  840. iterator_range<postorder_ref_scc_iterator> postorder_ref_sccs() {
  841. return make_range(postorder_ref_scc_begin(), postorder_ref_scc_end());
  842. }
  843. /// Lookup a function in the graph which has already been scanned and added.
  844. Node *lookup(const Function &F) const { return NodeMap.lookup(&F); }
  845. /// Lookup a function's SCC in the graph.
  846. ///
  847. /// \returns null if the function hasn't been assigned an SCC via the RefSCC
  848. /// iterator walk.
  849. SCC *lookupSCC(Node &N) const { return SCCMap.lookup(&N); }
  850. /// Lookup a function's RefSCC in the graph.
  851. ///
  852. /// \returns null if the function hasn't been assigned a RefSCC via the
  853. /// RefSCC iterator walk.
  854. RefSCC *lookupRefSCC(Node &N) const {
  855. if (SCC *C = lookupSCC(N))
  856. return &C->getOuterRefSCC();
  857. return nullptr;
  858. }
  859. /// Get a graph node for a given function, scanning it to populate the graph
  860. /// data as necessary.
  861. Node &get(Function &F) {
  862. Node *&N = NodeMap[&F];
  863. if (N)
  864. return *N;
  865. return insertInto(F, N);
  866. }
  867. /// Get the sequence of known and defined library functions.
  868. ///
  869. /// These functions, because they are known to LLVM, can have calls
  870. /// introduced out of thin air from arbitrary IR.
  871. ArrayRef<Function *> getLibFunctions() const {
  872. return LibFunctions.getArrayRef();
  873. }
  874. /// Test whether a function is a known and defined library function tracked by
  875. /// the call graph.
  876. ///
  877. /// Because these functions are known to LLVM they are specially modeled in
  878. /// the call graph and even when all IR-level references have been removed
  879. /// remain active and reachable.
  880. bool isLibFunction(Function &F) const { return LibFunctions.count(&F); }
  881. ///@{
  882. /// \name Pre-SCC Mutation API
  883. ///
  884. /// These methods are only valid to call prior to forming any SCCs for this
  885. /// call graph. They can be used to update the core node-graph during
  886. /// a node-based inorder traversal that precedes any SCC-based traversal.
  887. ///
  888. /// Once you begin manipulating a call graph's SCCs, most mutation of the
  889. /// graph must be performed via a RefSCC method. There are some exceptions
  890. /// below.
  891. /// Update the call graph after inserting a new edge.
  892. void insertEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  893. /// Update the call graph after inserting a new edge.
  894. void insertEdge(Function &Source, Function &Target, Edge::Kind EK) {
  895. return insertEdge(get(Source), get(Target), EK);
  896. }
  897. /// Update the call graph after deleting an edge.
  898. void removeEdge(Node &SourceN, Node &TargetN);
  899. /// Update the call graph after deleting an edge.
  900. void removeEdge(Function &Source, Function &Target) {
  901. return removeEdge(get(Source), get(Target));
  902. }
  903. ///@}
  904. ///@{
  905. /// \name General Mutation API
  906. ///
  907. /// There are a very limited set of mutations allowed on the graph as a whole
  908. /// once SCCs have started to be formed. These routines have strict contracts
  909. /// but may be called at any point.
  910. /// Remove a dead function from the call graph (typically to delete it).
  911. ///
  912. /// Note that the function must have an empty use list, and the call graph
  913. /// must be up-to-date prior to calling this. That means it is by itself in
  914. /// a maximal SCC which is by itself in a maximal RefSCC, etc. No structural
  915. /// changes result from calling this routine other than potentially removing
  916. /// entry points into the call graph.
  917. ///
  918. /// If SCC formation has begun, this function must not be part of the current
  919. /// DFS in order to call this safely. Typically, the function will have been
  920. /// fully visited by the DFS prior to calling this routine.
  921. void removeDeadFunction(Function &F);
  922. /// Add a new function split/outlined from an existing function.
  923. ///
  924. /// The new function may only reference other functions that the original
  925. /// function did.
  926. ///
  927. /// The original function must reference (either directly or indirectly) the
  928. /// new function.
  929. ///
  930. /// The new function may also reference the original function.
  931. /// It may end up in a parent SCC in the case that the original function's
  932. /// edge to the new function is a ref edge, and the edge back is a call edge.
  933. void addSplitFunction(Function &OriginalFunction, Function &NewFunction);
  934. /// Add new ref-recursive functions split/outlined from an existing function.
  935. ///
  936. /// The new functions may only reference other functions that the original
  937. /// function did. The new functions may reference (not call) the original
  938. /// function.
  939. ///
  940. /// The original function must reference (not call) all new functions.
  941. /// All new functions must reference (not call) each other.
  942. void addSplitRefRecursiveFunctions(Function &OriginalFunction,
  943. ArrayRef<Function *> NewFunctions);
  944. ///@}
  945. ///@{
  946. /// \name Static helpers for code doing updates to the call graph.
  947. ///
  948. /// These helpers are used to implement parts of the call graph but are also
  949. /// useful to code doing updates or otherwise wanting to walk the IR in the
  950. /// same patterns as when we build the call graph.
  951. /// Recursively visits the defined functions whose address is reachable from
  952. /// every constant in the \p Worklist.
  953. ///
  954. /// Doesn't recurse through any constants already in the \p Visited set, and
  955. /// updates that set with every constant visited.
  956. ///
  957. /// For each defined function, calls \p Callback with that function.
  958. static void visitReferences(SmallVectorImpl<Constant *> &Worklist,
  959. SmallPtrSetImpl<Constant *> &Visited,
  960. function_ref<void(Function &)> Callback);
  961. ///@}
  962. private:
  963. using node_stack_iterator = SmallVectorImpl<Node *>::reverse_iterator;
  964. using node_stack_range = iterator_range<node_stack_iterator>;
  965. /// Allocator that holds all the call graph nodes.
  966. SpecificBumpPtrAllocator<Node> BPA;
  967. /// Maps function->node for fast lookup.
  968. DenseMap<const Function *, Node *> NodeMap;
  969. /// The entry edges into the graph.
  970. ///
  971. /// These edges are from "external" sources. Put another way, they
  972. /// escape at the module scope.
  973. EdgeSequence EntryEdges;
  974. /// Allocator that holds all the call graph SCCs.
  975. SpecificBumpPtrAllocator<SCC> SCCBPA;
  976. /// Maps Function -> SCC for fast lookup.
  977. DenseMap<Node *, SCC *> SCCMap;
  978. /// Allocator that holds all the call graph RefSCCs.
  979. SpecificBumpPtrAllocator<RefSCC> RefSCCBPA;
  980. /// The post-order sequence of RefSCCs.
  981. ///
  982. /// This list is lazily formed the first time we walk the graph.
  983. SmallVector<RefSCC *, 16> PostOrderRefSCCs;
  984. /// A map from RefSCC to the index for it in the postorder sequence of
  985. /// RefSCCs.
  986. DenseMap<RefSCC *, int> RefSCCIndices;
  987. /// Defined functions that are also known library functions which the
  988. /// optimizer can reason about and therefore might introduce calls to out of
  989. /// thin air.
  990. SmallSetVector<Function *, 4> LibFunctions;
  991. /// Helper to insert a new function, with an already looked-up entry in
  992. /// the NodeMap.
  993. Node &insertInto(Function &F, Node *&MappedN);
  994. /// Helper to initialize a new node created outside of creating SCCs and add
  995. /// it to the NodeMap if necessary. For example, useful when a function is
  996. /// split.
  997. Node &initNode(Function &F);
  998. /// Helper to update pointers back to the graph object during moves.
  999. void updateGraphPtrs();
  1000. /// Allocates an SCC and constructs it using the graph allocator.
  1001. ///
  1002. /// The arguments are forwarded to the constructor.
  1003. template <typename... Ts> SCC *createSCC(Ts &&... Args) {
  1004. return new (SCCBPA.Allocate()) SCC(std::forward<Ts>(Args)...);
  1005. }
  1006. /// Allocates a RefSCC and constructs it using the graph allocator.
  1007. ///
  1008. /// The arguments are forwarded to the constructor.
  1009. template <typename... Ts> RefSCC *createRefSCC(Ts &&... Args) {
  1010. return new (RefSCCBPA.Allocate()) RefSCC(std::forward<Ts>(Args)...);
  1011. }
  1012. /// Common logic for building SCCs from a sequence of roots.
  1013. ///
  1014. /// This is a very generic implementation of the depth-first walk and SCC
  1015. /// formation algorithm. It uses a generic sequence of roots and generic
  1016. /// callbacks for each step. This is designed to be used to implement both
  1017. /// the RefSCC formation and SCC formation with shared logic.
  1018. ///
  1019. /// Currently this is a relatively naive implementation of Tarjan's DFS
  1020. /// algorithm to form the SCCs.
  1021. ///
  1022. /// FIXME: We should consider newer variants such as Nuutila.
  1023. template <typename RootsT, typename GetBeginT, typename GetEndT,
  1024. typename GetNodeT, typename FormSCCCallbackT>
  1025. static void buildGenericSCCs(RootsT &&Roots, GetBeginT &&GetBegin,
  1026. GetEndT &&GetEnd, GetNodeT &&GetNode,
  1027. FormSCCCallbackT &&FormSCC);
  1028. /// Build the SCCs for a RefSCC out of a list of nodes.
  1029. void buildSCCs(RefSCC &RC, node_stack_range Nodes);
  1030. /// Get the index of a RefSCC within the postorder traversal.
  1031. ///
  1032. /// Requires that this RefSCC is a valid one in the (perhaps partial)
  1033. /// postorder traversed part of the graph.
  1034. int getRefSCCIndex(RefSCC &RC) {
  1035. auto IndexIt = RefSCCIndices.find(&RC);
  1036. assert(IndexIt != RefSCCIndices.end() && "RefSCC doesn't have an index!");
  1037. assert(PostOrderRefSCCs[IndexIt->second] == &RC &&
  1038. "Index does not point back at RC!");
  1039. return IndexIt->second;
  1040. }
  1041. };
  1042. inline LazyCallGraph::Edge::Edge() = default;
  1043. inline LazyCallGraph::Edge::Edge(Node &N, Kind K) : Value(&N, K) {}
  1044. inline LazyCallGraph::Edge::operator bool() const {
  1045. return Value.getPointer() && !Value.getPointer()->isDead();
  1046. }
  1047. inline LazyCallGraph::Edge::Kind LazyCallGraph::Edge::getKind() const {
  1048. assert(*this && "Queried a null edge!");
  1049. return Value.getInt();
  1050. }
  1051. inline bool LazyCallGraph::Edge::isCall() const {
  1052. assert(*this && "Queried a null edge!");
  1053. return getKind() == Call;
  1054. }
  1055. inline LazyCallGraph::Node &LazyCallGraph::Edge::getNode() const {
  1056. assert(*this && "Queried a null edge!");
  1057. return *Value.getPointer();
  1058. }
  1059. inline Function &LazyCallGraph::Edge::getFunction() const {
  1060. assert(*this && "Queried a null edge!");
  1061. return getNode().getFunction();
  1062. }
  1063. // Provide GraphTraits specializations for call graphs.
  1064. template <> struct GraphTraits<LazyCallGraph::Node *> {
  1065. using NodeRef = LazyCallGraph::Node *;
  1066. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1067. static NodeRef getEntryNode(NodeRef N) { return N; }
  1068. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1069. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1070. };
  1071. template <> struct GraphTraits<LazyCallGraph *> {
  1072. using NodeRef = LazyCallGraph::Node *;
  1073. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1074. static NodeRef getEntryNode(NodeRef N) { return N; }
  1075. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1076. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1077. };
  1078. /// An analysis pass which computes the call graph for a module.
  1079. class LazyCallGraphAnalysis : public AnalysisInfoMixin<LazyCallGraphAnalysis> {
  1080. friend AnalysisInfoMixin<LazyCallGraphAnalysis>;
  1081. static AnalysisKey Key;
  1082. public:
  1083. /// Inform generic clients of the result type.
  1084. using Result = LazyCallGraph;
  1085. /// Compute the \c LazyCallGraph for the module \c M.
  1086. ///
  1087. /// This just builds the set of entry points to the call graph. The rest is
  1088. /// built lazily as it is walked.
  1089. LazyCallGraph run(Module &M, ModuleAnalysisManager &AM) {
  1090. FunctionAnalysisManager &FAM =
  1091. AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1092. auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
  1093. return FAM.getResult<TargetLibraryAnalysis>(F);
  1094. };
  1095. return LazyCallGraph(M, GetTLI);
  1096. }
  1097. };
  1098. /// A pass which prints the call graph to a \c raw_ostream.
  1099. ///
  1100. /// This is primarily useful for testing the analysis.
  1101. class LazyCallGraphPrinterPass
  1102. : public PassInfoMixin<LazyCallGraphPrinterPass> {
  1103. raw_ostream &OS;
  1104. public:
  1105. explicit LazyCallGraphPrinterPass(raw_ostream &OS);
  1106. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1107. };
  1108. /// A pass which prints the call graph as a DOT file to a \c raw_ostream.
  1109. ///
  1110. /// This is primarily useful for visualization purposes.
  1111. class LazyCallGraphDOTPrinterPass
  1112. : public PassInfoMixin<LazyCallGraphDOTPrinterPass> {
  1113. raw_ostream &OS;
  1114. public:
  1115. explicit LazyCallGraphDOTPrinterPass(raw_ostream &OS);
  1116. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1117. };
  1118. } // end namespace llvm
  1119. #endif // LLVM_ANALYSIS_LAZYCALLGRAPH_H
  1120. #ifdef __GNUC__
  1121. #pragma GCC diagnostic pop
  1122. #endif