LazyCallGraph.h 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- LazyCallGraph.h - Analysis of a Module's call graph ------*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. /// \file
  14. ///
  15. /// Implements a lazy call graph analysis and related passes for the new pass
  16. /// manager.
  17. ///
  18. /// NB: This is *not* a traditional call graph! It is a graph which models both
  19. /// the current calls and potential calls. As a consequence there are many
  20. /// edges in this call graph that do not correspond to a 'call' or 'invoke'
  21. /// instruction.
  22. ///
  23. /// The primary use cases of this graph analysis is to facilitate iterating
  24. /// across the functions of a module in ways that ensure all callees are
  25. /// visited prior to a caller (given any SCC constraints), or vice versa. As
  26. /// such is it particularly well suited to organizing CGSCC optimizations such
  27. /// as inlining, outlining, argument promotion, etc. That is its primary use
  28. /// case and motivates the design. It may not be appropriate for other
  29. /// purposes. The use graph of functions or some other conservative analysis of
  30. /// call instructions may be interesting for optimizations and subsequent
  31. /// analyses which don't work in the context of an overly specified
  32. /// potential-call-edge graph.
  33. ///
  34. /// To understand the specific rules and nature of this call graph analysis,
  35. /// see the documentation of the \c LazyCallGraph below.
  36. ///
  37. //===----------------------------------------------------------------------===//
  38. #ifndef LLVM_ANALYSIS_LAZYCALLGRAPH_H
  39. #define LLVM_ANALYSIS_LAZYCALLGRAPH_H
  40. #include "llvm/ADT/ArrayRef.h"
  41. #include "llvm/ADT/DenseMap.h"
  42. #include "llvm/ADT/PointerIntPair.h"
  43. #include "llvm/ADT/SetVector.h"
  44. #include "llvm/ADT/SmallVector.h"
  45. #include "llvm/ADT/StringRef.h"
  46. #include "llvm/ADT/iterator.h"
  47. #include "llvm/ADT/iterator_range.h"
  48. #include "llvm/Analysis/TargetLibraryInfo.h"
  49. #include "llvm/IR/PassManager.h"
  50. #include "llvm/Support/Allocator.h"
  51. #include "llvm/Support/raw_ostream.h"
  52. #include <cassert>
  53. #include <iterator>
  54. #include <optional>
  55. #include <string>
  56. #include <utility>
  57. namespace llvm {
  58. class Constant;
  59. class Function;
  60. template <class GraphType> struct GraphTraits;
  61. class Module;
  62. class TargetLibraryInfo;
  63. class Value;
  64. /// A lazily constructed view of the call graph of a module.
  65. ///
  66. /// With the edges of this graph, the motivating constraint that we are
  67. /// attempting to maintain is that function-local optimization, CGSCC-local
  68. /// optimizations, and optimizations transforming a pair of functions connected
  69. /// by an edge in the graph, do not invalidate a bottom-up traversal of the SCC
  70. /// DAG. That is, no optimizations will delete, remove, or add an edge such
  71. /// that functions already visited in a bottom-up order of the SCC DAG are no
  72. /// longer valid to have visited, or such that functions not yet visited in
  73. /// a bottom-up order of the SCC DAG are not required to have already been
  74. /// visited.
  75. ///
  76. /// Within this constraint, the desire is to minimize the merge points of the
  77. /// SCC DAG. The greater the fanout of the SCC DAG and the fewer merge points
  78. /// in the SCC DAG, the more independence there is in optimizing within it.
  79. /// There is a strong desire to enable parallelization of optimizations over
  80. /// the call graph, and both limited fanout and merge points will (artificially
  81. /// in some cases) limit the scaling of such an effort.
  82. ///
  83. /// To this end, graph represents both direct and any potential resolution to
  84. /// an indirect call edge. Another way to think about it is that it represents
  85. /// both the direct call edges and any direct call edges that might be formed
  86. /// through static optimizations. Specifically, it considers taking the address
  87. /// of a function to be an edge in the call graph because this might be
  88. /// forwarded to become a direct call by some subsequent function-local
  89. /// optimization. The result is that the graph closely follows the use-def
  90. /// edges for functions. Walking "up" the graph can be done by looking at all
  91. /// of the uses of a function.
  92. ///
  93. /// The roots of the call graph are the external functions and functions
  94. /// escaped into global variables. Those functions can be called from outside
  95. /// of the module or via unknowable means in the IR -- we may not be able to
  96. /// form even a potential call edge from a function body which may dynamically
  97. /// load the function and call it.
  98. ///
  99. /// This analysis still requires updates to remain valid after optimizations
  100. /// which could potentially change the set of potential callees. The
  101. /// constraints it operates under only make the traversal order remain valid.
  102. ///
  103. /// The entire analysis must be re-computed if full interprocedural
  104. /// optimizations run at any point. For example, globalopt completely
  105. /// invalidates the information in this analysis.
  106. ///
  107. /// FIXME: This class is named LazyCallGraph in a lame attempt to distinguish
  108. /// it from the existing CallGraph. At some point, it is expected that this
  109. /// will be the only call graph and it will be renamed accordingly.
  110. class LazyCallGraph {
  111. public:
  112. class Node;
  113. class EdgeSequence;
  114. class SCC;
  115. class RefSCC;
  116. /// A class used to represent edges in the call graph.
  117. ///
  118. /// The lazy call graph models both *call* edges and *reference* edges. Call
  119. /// edges are much what you would expect, and exist when there is a 'call' or
  120. /// 'invoke' instruction of some function. Reference edges are also tracked
  121. /// along side these, and exist whenever any instruction (transitively
  122. /// through its operands) references a function. All call edges are
  123. /// inherently reference edges, and so the reference graph forms a superset
  124. /// of the formal call graph.
  125. ///
  126. /// All of these forms of edges are fundamentally represented as outgoing
  127. /// edges. The edges are stored in the source node and point at the target
  128. /// node. This allows the edge structure itself to be a very compact data
  129. /// structure: essentially a tagged pointer.
  130. class Edge {
  131. public:
  132. /// The kind of edge in the graph.
  133. enum Kind : bool { Ref = false, Call = true };
  134. Edge();
  135. explicit Edge(Node &N, Kind K);
  136. /// Test whether the edge is null.
  137. ///
  138. /// This happens when an edge has been deleted. We leave the edge objects
  139. /// around but clear them.
  140. explicit operator bool() const;
  141. /// Returns the \c Kind of the edge.
  142. Kind getKind() const;
  143. /// Test whether the edge represents a direct call to a function.
  144. ///
  145. /// This requires that the edge is not null.
  146. bool isCall() const;
  147. /// Get the call graph node referenced by this edge.
  148. ///
  149. /// This requires that the edge is not null.
  150. Node &getNode() const;
  151. /// Get the function referenced by this edge.
  152. ///
  153. /// This requires that the edge is not null.
  154. Function &getFunction() const;
  155. private:
  156. friend class LazyCallGraph::EdgeSequence;
  157. friend class LazyCallGraph::RefSCC;
  158. PointerIntPair<Node *, 1, Kind> Value;
  159. void setKind(Kind K) { Value.setInt(K); }
  160. };
  161. /// The edge sequence object.
  162. ///
  163. /// This typically exists entirely within the node but is exposed as
  164. /// a separate type because a node doesn't initially have edges. An explicit
  165. /// population step is required to produce this sequence at first and it is
  166. /// then cached in the node. It is also used to represent edges entering the
  167. /// graph from outside the module to model the graph's roots.
  168. ///
  169. /// The sequence itself both iterable and indexable. The indexes remain
  170. /// stable even as the sequence mutates (including removal).
  171. class EdgeSequence {
  172. friend class LazyCallGraph;
  173. friend class LazyCallGraph::Node;
  174. friend class LazyCallGraph::RefSCC;
  175. using VectorT = SmallVector<Edge, 4>;
  176. using VectorImplT = SmallVectorImpl<Edge>;
  177. public:
  178. /// An iterator used for the edges to both entry nodes and child nodes.
  179. class iterator
  180. : public iterator_adaptor_base<iterator, VectorImplT::iterator,
  181. std::forward_iterator_tag> {
  182. friend class LazyCallGraph;
  183. friend class LazyCallGraph::Node;
  184. VectorImplT::iterator E;
  185. // Build the iterator for a specific position in the edge list.
  186. iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  187. : iterator_adaptor_base(BaseI), E(E) {
  188. while (I != E && !*I)
  189. ++I;
  190. }
  191. public:
  192. iterator() = default;
  193. using iterator_adaptor_base::operator++;
  194. iterator &operator++() {
  195. do {
  196. ++I;
  197. } while (I != E && !*I);
  198. return *this;
  199. }
  200. };
  201. /// An iterator over specifically call edges.
  202. ///
  203. /// This has the same iteration properties as the \c iterator, but
  204. /// restricts itself to edges which represent actual calls.
  205. class call_iterator
  206. : public iterator_adaptor_base<call_iterator, VectorImplT::iterator,
  207. std::forward_iterator_tag> {
  208. friend class LazyCallGraph;
  209. friend class LazyCallGraph::Node;
  210. VectorImplT::iterator E;
  211. /// Advance the iterator to the next valid, call edge.
  212. void advanceToNextEdge() {
  213. while (I != E && (!*I || !I->isCall()))
  214. ++I;
  215. }
  216. // Build the iterator for a specific position in the edge list.
  217. call_iterator(VectorImplT::iterator BaseI, VectorImplT::iterator E)
  218. : iterator_adaptor_base(BaseI), E(E) {
  219. advanceToNextEdge();
  220. }
  221. public:
  222. call_iterator() = default;
  223. using iterator_adaptor_base::operator++;
  224. call_iterator &operator++() {
  225. ++I;
  226. advanceToNextEdge();
  227. return *this;
  228. }
  229. };
  230. iterator begin() { return iterator(Edges.begin(), Edges.end()); }
  231. iterator end() { return iterator(Edges.end(), Edges.end()); }
  232. Edge &operator[](Node &N) {
  233. assert(EdgeIndexMap.find(&N) != EdgeIndexMap.end() && "No such edge!");
  234. auto &E = Edges[EdgeIndexMap.find(&N)->second];
  235. assert(E && "Dead or null edge!");
  236. return E;
  237. }
  238. Edge *lookup(Node &N) {
  239. auto EI = EdgeIndexMap.find(&N);
  240. if (EI == EdgeIndexMap.end())
  241. return nullptr;
  242. auto &E = Edges[EI->second];
  243. return E ? &E : nullptr;
  244. }
  245. call_iterator call_begin() {
  246. return call_iterator(Edges.begin(), Edges.end());
  247. }
  248. call_iterator call_end() { return call_iterator(Edges.end(), Edges.end()); }
  249. iterator_range<call_iterator> calls() {
  250. return make_range(call_begin(), call_end());
  251. }
  252. bool empty() {
  253. for (auto &E : Edges)
  254. if (E)
  255. return false;
  256. return true;
  257. }
  258. private:
  259. VectorT Edges;
  260. DenseMap<Node *, int> EdgeIndexMap;
  261. EdgeSequence() = default;
  262. /// Internal helper to insert an edge to a node.
  263. void insertEdgeInternal(Node &ChildN, Edge::Kind EK);
  264. /// Internal helper to change an edge kind.
  265. void setEdgeKind(Node &ChildN, Edge::Kind EK);
  266. /// Internal helper to remove the edge to the given function.
  267. bool removeEdgeInternal(Node &ChildN);
  268. };
  269. /// A node in the call graph.
  270. ///
  271. /// This represents a single node. Its primary roles are to cache the list of
  272. /// callees, de-duplicate and provide fast testing of whether a function is a
  273. /// callee, and facilitate iteration of child nodes in the graph.
  274. ///
  275. /// The node works much like an optional in order to lazily populate the
  276. /// edges of each node. Until populated, there are no edges. Once populated,
  277. /// you can access the edges by dereferencing the node or using the `->`
  278. /// operator as if the node was an `std::optional<EdgeSequence>`.
  279. class Node {
  280. friend class LazyCallGraph;
  281. friend class LazyCallGraph::RefSCC;
  282. public:
  283. LazyCallGraph &getGraph() const { return *G; }
  284. Function &getFunction() const { return *F; }
  285. StringRef getName() const { return F->getName(); }
  286. /// Equality is defined as address equality.
  287. bool operator==(const Node &N) const { return this == &N; }
  288. bool operator!=(const Node &N) const { return !operator==(N); }
  289. /// Tests whether the node has been populated with edges.
  290. bool isPopulated() const { return Edges.has_value(); }
  291. /// Tests whether this is actually a dead node and no longer valid.
  292. ///
  293. /// Users rarely interact with nodes in this state and other methods are
  294. /// invalid. This is used to model a node in an edge list where the
  295. /// function has been completely removed.
  296. bool isDead() const {
  297. assert(!G == !F &&
  298. "Both graph and function pointers should be null or non-null.");
  299. return !G;
  300. }
  301. // We allow accessing the edges by dereferencing or using the arrow
  302. // operator, essentially wrapping the internal optional.
  303. EdgeSequence &operator*() const {
  304. // Rip const off because the node itself isn't changing here.
  305. return const_cast<EdgeSequence &>(*Edges);
  306. }
  307. EdgeSequence *operator->() const { return &**this; }
  308. /// Populate the edges of this node if necessary.
  309. ///
  310. /// The first time this is called it will populate the edges for this node
  311. /// in the graph. It does this by scanning the underlying function, so once
  312. /// this is done, any changes to that function must be explicitly reflected
  313. /// in updates to the graph.
  314. ///
  315. /// \returns the populated \c EdgeSequence to simplify walking it.
  316. ///
  317. /// This will not update or re-scan anything if called repeatedly. Instead,
  318. /// the edge sequence is cached and returned immediately on subsequent
  319. /// calls.
  320. EdgeSequence &populate() {
  321. if (Edges)
  322. return *Edges;
  323. return populateSlow();
  324. }
  325. private:
  326. LazyCallGraph *G;
  327. Function *F;
  328. // We provide for the DFS numbering and Tarjan walk lowlink numbers to be
  329. // stored directly within the node. These are both '-1' when nodes are part
  330. // of an SCC (or RefSCC), or '0' when not yet reached in a DFS walk.
  331. int DFSNumber = 0;
  332. int LowLink = 0;
  333. std::optional<EdgeSequence> Edges;
  334. /// Basic constructor implements the scanning of F into Edges and
  335. /// EdgeIndexMap.
  336. Node(LazyCallGraph &G, Function &F) : G(&G), F(&F) {}
  337. /// Implementation of the scan when populating.
  338. EdgeSequence &populateSlow();
  339. /// Internal helper to directly replace the function with a new one.
  340. ///
  341. /// This is used to facilitate transformations which need to replace the
  342. /// formal Function object but directly move the body and users from one to
  343. /// the other.
  344. void replaceFunction(Function &NewF);
  345. void clear() { Edges.reset(); }
  346. /// Print the name of this node's function.
  347. friend raw_ostream &operator<<(raw_ostream &OS, const Node &N) {
  348. return OS << N.F->getName();
  349. }
  350. /// Dump the name of this node's function to stderr.
  351. void dump() const;
  352. };
  353. /// An SCC of the call graph.
  354. ///
  355. /// This represents a Strongly Connected Component of the direct call graph
  356. /// -- ignoring indirect calls and function references. It stores this as
  357. /// a collection of call graph nodes. While the order of nodes in the SCC is
  358. /// stable, it is not any particular order.
  359. ///
  360. /// The SCCs are nested within a \c RefSCC, see below for details about that
  361. /// outer structure. SCCs do not support mutation of the call graph, that
  362. /// must be done through the containing \c RefSCC in order to fully reason
  363. /// about the ordering and connections of the graph.
  364. class LLVM_EXTERNAL_VISIBILITY SCC {
  365. friend class LazyCallGraph;
  366. friend class LazyCallGraph::Node;
  367. RefSCC *OuterRefSCC;
  368. SmallVector<Node *, 1> Nodes;
  369. template <typename NodeRangeT>
  370. SCC(RefSCC &OuterRefSCC, NodeRangeT &&Nodes)
  371. : OuterRefSCC(&OuterRefSCC), Nodes(std::forward<NodeRangeT>(Nodes)) {}
  372. void clear() {
  373. OuterRefSCC = nullptr;
  374. Nodes.clear();
  375. }
  376. /// Print a short description useful for debugging or logging.
  377. ///
  378. /// We print the function names in the SCC wrapped in '()'s and skipping
  379. /// the middle functions if there are a large number.
  380. //
  381. // Note: this is defined inline to dodge issues with GCC's interpretation
  382. // of enclosing namespaces for friend function declarations.
  383. friend raw_ostream &operator<<(raw_ostream &OS, const SCC &C) {
  384. OS << '(';
  385. int I = 0;
  386. for (LazyCallGraph::Node &N : C) {
  387. if (I > 0)
  388. OS << ", ";
  389. // Elide the inner elements if there are too many.
  390. if (I > 8) {
  391. OS << "..., " << *C.Nodes.back();
  392. break;
  393. }
  394. OS << N;
  395. ++I;
  396. }
  397. OS << ')';
  398. return OS;
  399. }
  400. /// Dump a short description of this SCC to stderr.
  401. void dump() const;
  402. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  403. /// Verify invariants about the SCC.
  404. ///
  405. /// This will attempt to validate all of the basic invariants within an
  406. /// SCC, but not that it is a strongly connected component per se.
  407. /// Primarily useful while building and updating the graph to check that
  408. /// basic properties are in place rather than having inexplicable crashes
  409. /// later.
  410. void verify();
  411. #endif
  412. public:
  413. using iterator = pointee_iterator<SmallVectorImpl<Node *>::const_iterator>;
  414. iterator begin() const { return Nodes.begin(); }
  415. iterator end() const { return Nodes.end(); }
  416. int size() const { return Nodes.size(); }
  417. RefSCC &getOuterRefSCC() const { return *OuterRefSCC; }
  418. /// Test if this SCC is a parent of \a C.
  419. ///
  420. /// Note that this is linear in the number of edges departing the current
  421. /// SCC.
  422. bool isParentOf(const SCC &C) const;
  423. /// Test if this SCC is an ancestor of \a C.
  424. ///
  425. /// Note that in the worst case this is linear in the number of edges
  426. /// departing the current SCC and every SCC in the entire graph reachable
  427. /// from this SCC. Thus this very well may walk every edge in the entire
  428. /// call graph! Do not call this in a tight loop!
  429. bool isAncestorOf(const SCC &C) const;
  430. /// Test if this SCC is a child of \a C.
  431. ///
  432. /// See the comments for \c isParentOf for detailed notes about the
  433. /// complexity of this routine.
  434. bool isChildOf(const SCC &C) const { return C.isParentOf(*this); }
  435. /// Test if this SCC is a descendant of \a C.
  436. ///
  437. /// See the comments for \c isParentOf for detailed notes about the
  438. /// complexity of this routine.
  439. bool isDescendantOf(const SCC &C) const { return C.isAncestorOf(*this); }
  440. /// Provide a short name by printing this SCC to a std::string.
  441. ///
  442. /// This copes with the fact that we don't have a name per se for an SCC
  443. /// while still making the use of this in debugging and logging useful.
  444. std::string getName() const {
  445. std::string Name;
  446. raw_string_ostream OS(Name);
  447. OS << *this;
  448. OS.flush();
  449. return Name;
  450. }
  451. };
  452. /// A RefSCC of the call graph.
  453. ///
  454. /// This models a Strongly Connected Component of function reference edges in
  455. /// the call graph. As opposed to actual SCCs, these can be used to scope
  456. /// subgraphs of the module which are independent from other subgraphs of the
  457. /// module because they do not reference it in any way. This is also the unit
  458. /// where we do mutation of the graph in order to restrict mutations to those
  459. /// which don't violate this independence.
  460. ///
  461. /// A RefSCC contains a DAG of actual SCCs. All the nodes within the RefSCC
  462. /// are necessarily within some actual SCC that nests within it. Since
  463. /// a direct call *is* a reference, there will always be at least one RefSCC
  464. /// around any SCC.
  465. ///
  466. /// Spurious ref edges, meaning ref edges that still exist in the call graph
  467. /// even though the corresponding IR reference no longer exists, are allowed.
  468. /// This is mostly to support argument promotion, which can modify a caller to
  469. /// no longer pass a function. The only place that needs to specially handle
  470. /// this is deleting a dead function/node, otherwise the dead ref edges are
  471. /// automatically removed when visiting the function/node no longer containing
  472. /// the ref edge.
  473. class RefSCC {
  474. friend class LazyCallGraph;
  475. friend class LazyCallGraph::Node;
  476. LazyCallGraph *G;
  477. /// A postorder list of the inner SCCs.
  478. SmallVector<SCC *, 4> SCCs;
  479. /// A map from SCC to index in the postorder list.
  480. SmallDenseMap<SCC *, int, 4> SCCIndices;
  481. /// Fast-path constructor. RefSCCs should instead be constructed by calling
  482. /// formRefSCCFast on the graph itself.
  483. RefSCC(LazyCallGraph &G);
  484. void clear() {
  485. SCCs.clear();
  486. SCCIndices.clear();
  487. }
  488. /// Print a short description useful for debugging or logging.
  489. ///
  490. /// We print the SCCs wrapped in '[]'s and skipping the middle SCCs if
  491. /// there are a large number.
  492. //
  493. // Note: this is defined inline to dodge issues with GCC's interpretation
  494. // of enclosing namespaces for friend function declarations.
  495. friend raw_ostream &operator<<(raw_ostream &OS, const RefSCC &RC) {
  496. OS << '[';
  497. int I = 0;
  498. for (LazyCallGraph::SCC &C : RC) {
  499. if (I > 0)
  500. OS << ", ";
  501. // Elide the inner elements if there are too many.
  502. if (I > 4) {
  503. OS << "..., " << *RC.SCCs.back();
  504. break;
  505. }
  506. OS << C;
  507. ++I;
  508. }
  509. OS << ']';
  510. return OS;
  511. }
  512. /// Dump a short description of this RefSCC to stderr.
  513. void dump() const;
  514. #if !defined(NDEBUG) || defined(EXPENSIVE_CHECKS)
  515. /// Verify invariants about the RefSCC and all its SCCs.
  516. ///
  517. /// This will attempt to validate all of the invariants *within* the
  518. /// RefSCC, but not that it is a strongly connected component of the larger
  519. /// graph. This makes it useful even when partially through an update.
  520. ///
  521. /// Invariants checked:
  522. /// - SCCs and their indices match.
  523. /// - The SCCs list is in fact in post-order.
  524. void verify();
  525. #endif
  526. public:
  527. using iterator = pointee_iterator<SmallVectorImpl<SCC *>::const_iterator>;
  528. using range = iterator_range<iterator>;
  529. using parent_iterator =
  530. pointee_iterator<SmallPtrSetImpl<RefSCC *>::const_iterator>;
  531. iterator begin() const { return SCCs.begin(); }
  532. iterator end() const { return SCCs.end(); }
  533. ssize_t size() const { return SCCs.size(); }
  534. SCC &operator[](int Idx) { return *SCCs[Idx]; }
  535. iterator find(SCC &C) const {
  536. return SCCs.begin() + SCCIndices.find(&C)->second;
  537. }
  538. /// Test if this RefSCC is a parent of \a RC.
  539. ///
  540. /// CAUTION: This method walks every edge in the \c RefSCC, it can be very
  541. /// expensive.
  542. bool isParentOf(const RefSCC &RC) const;
  543. /// Test if this RefSCC is an ancestor of \a RC.
  544. ///
  545. /// CAUTION: This method walks the directed graph of edges as far as
  546. /// necessary to find a possible path to the argument. In the worst case
  547. /// this may walk the entire graph and can be extremely expensive.
  548. bool isAncestorOf(const RefSCC &RC) const;
  549. /// Test if this RefSCC is a child of \a RC.
  550. ///
  551. /// CAUTION: This method walks every edge in the argument \c RefSCC, it can
  552. /// be very expensive.
  553. bool isChildOf(const RefSCC &RC) const { return RC.isParentOf(*this); }
  554. /// Test if this RefSCC is a descendant of \a RC.
  555. ///
  556. /// CAUTION: This method walks the directed graph of edges as far as
  557. /// necessary to find a possible path from the argument. In the worst case
  558. /// this may walk the entire graph and can be extremely expensive.
  559. bool isDescendantOf(const RefSCC &RC) const {
  560. return RC.isAncestorOf(*this);
  561. }
  562. /// Provide a short name by printing this RefSCC to a std::string.
  563. ///
  564. /// This copes with the fact that we don't have a name per se for an RefSCC
  565. /// while still making the use of this in debugging and logging useful.
  566. std::string getName() const {
  567. std::string Name;
  568. raw_string_ostream OS(Name);
  569. OS << *this;
  570. OS.flush();
  571. return Name;
  572. }
  573. ///@{
  574. /// \name Mutation API
  575. ///
  576. /// These methods provide the core API for updating the call graph in the
  577. /// presence of (potentially still in-flight) DFS-found RefSCCs and SCCs.
  578. ///
  579. /// Note that these methods sometimes have complex runtimes, so be careful
  580. /// how you call them.
  581. /// Make an existing internal ref edge into a call edge.
  582. ///
  583. /// This may form a larger cycle and thus collapse SCCs into TargetN's SCC.
  584. /// If that happens, the optional callback \p MergedCB will be invoked (if
  585. /// provided) on the SCCs being merged away prior to actually performing
  586. /// the merge. Note that this will never include the target SCC as that
  587. /// will be the SCC functions are merged into to resolve the cycle. Once
  588. /// this function returns, these merged SCCs are not in a valid state but
  589. /// the pointers will remain valid until destruction of the parent graph
  590. /// instance for the purpose of clearing cached information. This function
  591. /// also returns 'true' if a cycle was formed and some SCCs merged away as
  592. /// a convenience.
  593. ///
  594. /// After this operation, both SourceN's SCC and TargetN's SCC may move
  595. /// position within this RefSCC's postorder list. Any SCCs merged are
  596. /// merged into the TargetN's SCC in order to preserve reachability analyses
  597. /// which took place on that SCC.
  598. bool switchInternalEdgeToCall(
  599. Node &SourceN, Node &TargetN,
  600. function_ref<void(ArrayRef<SCC *> MergedSCCs)> MergeCB = {});
  601. /// Make an existing internal call edge between separate SCCs into a ref
  602. /// edge.
  603. ///
  604. /// If SourceN and TargetN in separate SCCs within this RefSCC, changing
  605. /// the call edge between them to a ref edge is a trivial operation that
  606. /// does not require any structural changes to the call graph.
  607. void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN);
  608. /// Make an existing internal call edge within a single SCC into a ref
  609. /// edge.
  610. ///
  611. /// Since SourceN and TargetN are part of a single SCC, this SCC may be
  612. /// split up due to breaking a cycle in the call edges that formed it. If
  613. /// that happens, then this routine will insert new SCCs into the postorder
  614. /// list *before* the SCC of TargetN (previously the SCC of both). This
  615. /// preserves postorder as the TargetN can reach all of the other nodes by
  616. /// definition of previously being in a single SCC formed by the cycle from
  617. /// SourceN to TargetN.
  618. ///
  619. /// The newly added SCCs are added *immediately* and contiguously
  620. /// prior to the TargetN SCC and return the range covering the new SCCs in
  621. /// the RefSCC's postorder sequence. You can directly iterate the returned
  622. /// range to observe all of the new SCCs in postorder.
  623. ///
  624. /// Note that if SourceN and TargetN are in separate SCCs, the simpler
  625. /// routine `switchTrivialInternalEdgeToRef` should be used instead.
  626. iterator_range<iterator> switchInternalEdgeToRef(Node &SourceN,
  627. Node &TargetN);
  628. /// Make an existing outgoing ref edge into a call edge.
  629. ///
  630. /// Note that this is trivial as there are no cyclic impacts and there
  631. /// remains a reference edge.
  632. void switchOutgoingEdgeToCall(Node &SourceN, Node &TargetN);
  633. /// Make an existing outgoing call edge into a ref edge.
  634. ///
  635. /// This is trivial as there are no cyclic impacts and there remains
  636. /// a reference edge.
  637. void switchOutgoingEdgeToRef(Node &SourceN, Node &TargetN);
  638. /// Insert a ref edge from one node in this RefSCC to another in this
  639. /// RefSCC.
  640. ///
  641. /// This is always a trivial operation as it doesn't change any part of the
  642. /// graph structure besides connecting the two nodes.
  643. ///
  644. /// Note that we don't support directly inserting internal *call* edges
  645. /// because that could change the graph structure and requires returning
  646. /// information about what became invalid. As a consequence, the pattern
  647. /// should be to first insert the necessary ref edge, and then to switch it
  648. /// to a call edge if needed and handle any invalidation that results. See
  649. /// the \c switchInternalEdgeToCall routine for details.
  650. void insertInternalRefEdge(Node &SourceN, Node &TargetN);
  651. /// Insert an edge whose parent is in this RefSCC and child is in some
  652. /// child RefSCC.
  653. ///
  654. /// There must be an existing path from the \p SourceN to the \p TargetN.
  655. /// This operation is inexpensive and does not change the set of SCCs and
  656. /// RefSCCs in the graph.
  657. void insertOutgoingEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  658. /// Insert an edge whose source is in a descendant RefSCC and target is in
  659. /// this RefSCC.
  660. ///
  661. /// There must be an existing path from the target to the source in this
  662. /// case.
  663. ///
  664. /// NB! This is has the potential to be a very expensive function. It
  665. /// inherently forms a cycle in the prior RefSCC DAG and we have to merge
  666. /// RefSCCs to resolve that cycle. But finding all of the RefSCCs which
  667. /// participate in the cycle can in the worst case require traversing every
  668. /// RefSCC in the graph. Every attempt is made to avoid that, but passes
  669. /// must still exercise caution calling this routine repeatedly.
  670. ///
  671. /// Also note that this can only insert ref edges. In order to insert
  672. /// a call edge, first insert a ref edge and then switch it to a call edge.
  673. /// These are intentionally kept as separate interfaces because each step
  674. /// of the operation invalidates a different set of data structures.
  675. ///
  676. /// This returns all the RefSCCs which were merged into the this RefSCC
  677. /// (the target's). This allows callers to invalidate any cached
  678. /// information.
  679. ///
  680. /// FIXME: We could possibly optimize this quite a bit for cases where the
  681. /// caller and callee are very nearby in the graph. See comments in the
  682. /// implementation for details, but that use case might impact users.
  683. SmallVector<RefSCC *, 1> insertIncomingRefEdge(Node &SourceN,
  684. Node &TargetN);
  685. /// Remove an edge whose source is in this RefSCC and target is *not*.
  686. ///
  687. /// This removes an inter-RefSCC edge. All inter-RefSCC edges originating
  688. /// from this SCC have been fully explored by any in-flight DFS graph
  689. /// formation, so this is always safe to call once you have the source
  690. /// RefSCC.
  691. ///
  692. /// This operation does not change the cyclic structure of the graph and so
  693. /// is very inexpensive. It may change the connectivity graph of the SCCs
  694. /// though, so be careful calling this while iterating over them.
  695. void removeOutgoingEdge(Node &SourceN, Node &TargetN);
  696. /// Remove a list of ref edges which are entirely within this RefSCC.
  697. ///
  698. /// Both the \a SourceN and all of the \a TargetNs must be within this
  699. /// RefSCC. Removing these edges may break cycles that form this RefSCC and
  700. /// thus this operation may change the RefSCC graph significantly. In
  701. /// particular, this operation will re-form new RefSCCs based on the
  702. /// remaining connectivity of the graph. The following invariants are
  703. /// guaranteed to hold after calling this method:
  704. ///
  705. /// 1) If a ref-cycle remains after removal, it leaves this RefSCC intact
  706. /// and in the graph. No new RefSCCs are built.
  707. /// 2) Otherwise, this RefSCC will be dead after this call and no longer in
  708. /// the graph or the postorder traversal of the call graph. Any iterator
  709. /// pointing at this RefSCC will become invalid.
  710. /// 3) All newly formed RefSCCs will be returned and the order of the
  711. /// RefSCCs returned will be a valid postorder traversal of the new
  712. /// RefSCCs.
  713. /// 4) No RefSCC other than this RefSCC has its member set changed (this is
  714. /// inherent in the definition of removing such an edge).
  715. ///
  716. /// These invariants are very important to ensure that we can build
  717. /// optimization pipelines on top of the CGSCC pass manager which
  718. /// intelligently update the RefSCC graph without invalidating other parts
  719. /// of the RefSCC graph.
  720. ///
  721. /// Note that we provide no routine to remove a *call* edge. Instead, you
  722. /// must first switch it to a ref edge using \c switchInternalEdgeToRef.
  723. /// This split API is intentional as each of these two steps can invalidate
  724. /// a different aspect of the graph structure and needs to have the
  725. /// invalidation handled independently.
  726. ///
  727. /// The runtime complexity of this method is, in the worst case, O(V+E)
  728. /// where V is the number of nodes in this RefSCC and E is the number of
  729. /// edges leaving the nodes in this RefSCC. Note that E includes both edges
  730. /// within this RefSCC and edges from this RefSCC to child RefSCCs. Some
  731. /// effort has been made to minimize the overhead of common cases such as
  732. /// self-edges and edge removals which result in a spanning tree with no
  733. /// more cycles.
  734. [[nodiscard]] SmallVector<RefSCC *, 1>
  735. removeInternalRefEdge(Node &SourceN, ArrayRef<Node *> TargetNs);
  736. /// A convenience wrapper around the above to handle trivial cases of
  737. /// inserting a new call edge.
  738. ///
  739. /// This is trivial whenever the target is in the same SCC as the source or
  740. /// the edge is an outgoing edge to some descendant SCC. In these cases
  741. /// there is no change to the cyclic structure of SCCs or RefSCCs.
  742. ///
  743. /// To further make calling this convenient, it also handles inserting
  744. /// already existing edges.
  745. void insertTrivialCallEdge(Node &SourceN, Node &TargetN);
  746. /// A convenience wrapper around the above to handle trivial cases of
  747. /// inserting a new ref edge.
  748. ///
  749. /// This is trivial whenever the target is in the same RefSCC as the source
  750. /// or the edge is an outgoing edge to some descendant RefSCC. In these
  751. /// cases there is no change to the cyclic structure of the RefSCCs.
  752. ///
  753. /// To further make calling this convenient, it also handles inserting
  754. /// already existing edges.
  755. void insertTrivialRefEdge(Node &SourceN, Node &TargetN);
  756. /// Directly replace a node's function with a new function.
  757. ///
  758. /// This should be used when moving the body and users of a function to
  759. /// a new formal function object but not otherwise changing the call graph
  760. /// structure in any way.
  761. ///
  762. /// It requires that the old function in the provided node have zero uses
  763. /// and the new function must have calls and references to it establishing
  764. /// an equivalent graph.
  765. void replaceNodeFunction(Node &N, Function &NewF);
  766. ///@}
  767. };
  768. /// A post-order depth-first RefSCC iterator over the call graph.
  769. ///
  770. /// This iterator walks the cached post-order sequence of RefSCCs. However,
  771. /// it trades stability for flexibility. It is restricted to a forward
  772. /// iterator but will survive mutations which insert new RefSCCs and continue
  773. /// to point to the same RefSCC even if it moves in the post-order sequence.
  774. class postorder_ref_scc_iterator
  775. : public iterator_facade_base<postorder_ref_scc_iterator,
  776. std::forward_iterator_tag, RefSCC> {
  777. friend class LazyCallGraph;
  778. friend class LazyCallGraph::Node;
  779. /// Nonce type to select the constructor for the end iterator.
  780. struct IsAtEndT {};
  781. LazyCallGraph *G;
  782. RefSCC *RC = nullptr;
  783. /// Build the begin iterator for a node.
  784. postorder_ref_scc_iterator(LazyCallGraph &G) : G(&G), RC(getRC(G, 0)) {
  785. incrementUntilNonEmptyRefSCC();
  786. }
  787. /// Build the end iterator for a node. This is selected purely by overload.
  788. postorder_ref_scc_iterator(LazyCallGraph &G, IsAtEndT /*Nonce*/) : G(&G) {}
  789. /// Get the post-order RefSCC at the given index of the postorder walk,
  790. /// populating it if necessary.
  791. static RefSCC *getRC(LazyCallGraph &G, int Index) {
  792. if (Index == (int)G.PostOrderRefSCCs.size())
  793. // We're at the end.
  794. return nullptr;
  795. return G.PostOrderRefSCCs[Index];
  796. }
  797. // Keep incrementing until RC is non-empty (or null).
  798. void incrementUntilNonEmptyRefSCC() {
  799. while (RC && RC->size() == 0)
  800. increment();
  801. }
  802. void increment() {
  803. assert(RC && "Cannot increment the end iterator!");
  804. RC = getRC(*G, G->RefSCCIndices.find(RC)->second + 1);
  805. }
  806. public:
  807. bool operator==(const postorder_ref_scc_iterator &Arg) const {
  808. return G == Arg.G && RC == Arg.RC;
  809. }
  810. reference operator*() const { return *RC; }
  811. using iterator_facade_base::operator++;
  812. postorder_ref_scc_iterator &operator++() {
  813. increment();
  814. incrementUntilNonEmptyRefSCC();
  815. return *this;
  816. }
  817. };
  818. /// Construct a graph for the given module.
  819. ///
  820. /// This sets up the graph and computes all of the entry points of the graph.
  821. /// No function definitions are scanned until their nodes in the graph are
  822. /// requested during traversal.
  823. LazyCallGraph(Module &M,
  824. function_ref<TargetLibraryInfo &(Function &)> GetTLI);
  825. LazyCallGraph(LazyCallGraph &&G);
  826. LazyCallGraph &operator=(LazyCallGraph &&RHS);
  827. bool invalidate(Module &, const PreservedAnalyses &PA,
  828. ModuleAnalysisManager::Invalidator &);
  829. EdgeSequence::iterator begin() { return EntryEdges.begin(); }
  830. EdgeSequence::iterator end() { return EntryEdges.end(); }
  831. void buildRefSCCs();
  832. postorder_ref_scc_iterator postorder_ref_scc_begin() {
  833. if (!EntryEdges.empty())
  834. assert(!PostOrderRefSCCs.empty() &&
  835. "Must form RefSCCs before iterating them!");
  836. return postorder_ref_scc_iterator(*this);
  837. }
  838. postorder_ref_scc_iterator postorder_ref_scc_end() {
  839. if (!EntryEdges.empty())
  840. assert(!PostOrderRefSCCs.empty() &&
  841. "Must form RefSCCs before iterating them!");
  842. return postorder_ref_scc_iterator(*this,
  843. postorder_ref_scc_iterator::IsAtEndT());
  844. }
  845. iterator_range<postorder_ref_scc_iterator> postorder_ref_sccs() {
  846. return make_range(postorder_ref_scc_begin(), postorder_ref_scc_end());
  847. }
  848. /// Lookup a function in the graph which has already been scanned and added.
  849. Node *lookup(const Function &F) const { return NodeMap.lookup(&F); }
  850. /// Lookup a function's SCC in the graph.
  851. ///
  852. /// \returns null if the function hasn't been assigned an SCC via the RefSCC
  853. /// iterator walk.
  854. SCC *lookupSCC(Node &N) const { return SCCMap.lookup(&N); }
  855. /// Lookup a function's RefSCC in the graph.
  856. ///
  857. /// \returns null if the function hasn't been assigned a RefSCC via the
  858. /// RefSCC iterator walk.
  859. RefSCC *lookupRefSCC(Node &N) const {
  860. if (SCC *C = lookupSCC(N))
  861. return &C->getOuterRefSCC();
  862. return nullptr;
  863. }
  864. /// Get a graph node for a given function, scanning it to populate the graph
  865. /// data as necessary.
  866. Node &get(Function &F) {
  867. Node *&N = NodeMap[&F];
  868. if (N)
  869. return *N;
  870. return insertInto(F, N);
  871. }
  872. /// Get the sequence of known and defined library functions.
  873. ///
  874. /// These functions, because they are known to LLVM, can have calls
  875. /// introduced out of thin air from arbitrary IR.
  876. ArrayRef<Function *> getLibFunctions() const {
  877. return LibFunctions.getArrayRef();
  878. }
  879. /// Test whether a function is a known and defined library function tracked by
  880. /// the call graph.
  881. ///
  882. /// Because these functions are known to LLVM they are specially modeled in
  883. /// the call graph and even when all IR-level references have been removed
  884. /// remain active and reachable.
  885. bool isLibFunction(Function &F) const { return LibFunctions.count(&F); }
  886. ///@{
  887. /// \name Pre-SCC Mutation API
  888. ///
  889. /// These methods are only valid to call prior to forming any SCCs for this
  890. /// call graph. They can be used to update the core node-graph during
  891. /// a node-based inorder traversal that precedes any SCC-based traversal.
  892. ///
  893. /// Once you begin manipulating a call graph's SCCs, most mutation of the
  894. /// graph must be performed via a RefSCC method. There are some exceptions
  895. /// below.
  896. /// Update the call graph after inserting a new edge.
  897. void insertEdge(Node &SourceN, Node &TargetN, Edge::Kind EK);
  898. /// Update the call graph after inserting a new edge.
  899. void insertEdge(Function &Source, Function &Target, Edge::Kind EK) {
  900. return insertEdge(get(Source), get(Target), EK);
  901. }
  902. /// Update the call graph after deleting an edge.
  903. void removeEdge(Node &SourceN, Node &TargetN);
  904. /// Update the call graph after deleting an edge.
  905. void removeEdge(Function &Source, Function &Target) {
  906. return removeEdge(get(Source), get(Target));
  907. }
  908. ///@}
  909. ///@{
  910. /// \name General Mutation API
  911. ///
  912. /// There are a very limited set of mutations allowed on the graph as a whole
  913. /// once SCCs have started to be formed. These routines have strict contracts
  914. /// but may be called at any point.
  915. /// Remove a dead function from the call graph (typically to delete it).
  916. ///
  917. /// Note that the function must have an empty use list, and the call graph
  918. /// must be up-to-date prior to calling this. That means it is by itself in
  919. /// a maximal SCC which is by itself in a maximal RefSCC, etc. No structural
  920. /// changes result from calling this routine other than potentially removing
  921. /// entry points into the call graph.
  922. ///
  923. /// If SCC formation has begun, this function must not be part of the current
  924. /// DFS in order to call this safely. Typically, the function will have been
  925. /// fully visited by the DFS prior to calling this routine.
  926. void removeDeadFunction(Function &F);
  927. /// Add a new function split/outlined from an existing function.
  928. ///
  929. /// The new function may only reference other functions that the original
  930. /// function did.
  931. ///
  932. /// The original function must reference (either directly or indirectly) the
  933. /// new function.
  934. ///
  935. /// The new function may also reference the original function.
  936. /// It may end up in a parent SCC in the case that the original function's
  937. /// edge to the new function is a ref edge, and the edge back is a call edge.
  938. void addSplitFunction(Function &OriginalFunction, Function &NewFunction);
  939. /// Add new ref-recursive functions split/outlined from an existing function.
  940. ///
  941. /// The new functions may only reference other functions that the original
  942. /// function did. The new functions may reference (not call) the original
  943. /// function.
  944. ///
  945. /// The original function must reference (not call) all new functions.
  946. /// All new functions must reference (not call) each other.
  947. void addSplitRefRecursiveFunctions(Function &OriginalFunction,
  948. ArrayRef<Function *> NewFunctions);
  949. ///@}
  950. ///@{
  951. /// \name Static helpers for code doing updates to the call graph.
  952. ///
  953. /// These helpers are used to implement parts of the call graph but are also
  954. /// useful to code doing updates or otherwise wanting to walk the IR in the
  955. /// same patterns as when we build the call graph.
  956. /// Recursively visits the defined functions whose address is reachable from
  957. /// every constant in the \p Worklist.
  958. ///
  959. /// Doesn't recurse through any constants already in the \p Visited set, and
  960. /// updates that set with every constant visited.
  961. ///
  962. /// For each defined function, calls \p Callback with that function.
  963. static void visitReferences(SmallVectorImpl<Constant *> &Worklist,
  964. SmallPtrSetImpl<Constant *> &Visited,
  965. function_ref<void(Function &)> Callback);
  966. ///@}
  967. private:
  968. using node_stack_iterator = SmallVectorImpl<Node *>::reverse_iterator;
  969. using node_stack_range = iterator_range<node_stack_iterator>;
  970. /// Allocator that holds all the call graph nodes.
  971. SpecificBumpPtrAllocator<Node> BPA;
  972. /// Maps function->node for fast lookup.
  973. DenseMap<const Function *, Node *> NodeMap;
  974. /// The entry edges into the graph.
  975. ///
  976. /// These edges are from "external" sources. Put another way, they
  977. /// escape at the module scope.
  978. EdgeSequence EntryEdges;
  979. /// Allocator that holds all the call graph SCCs.
  980. SpecificBumpPtrAllocator<SCC> SCCBPA;
  981. /// Maps Function -> SCC for fast lookup.
  982. DenseMap<Node *, SCC *> SCCMap;
  983. /// Allocator that holds all the call graph RefSCCs.
  984. SpecificBumpPtrAllocator<RefSCC> RefSCCBPA;
  985. /// The post-order sequence of RefSCCs.
  986. ///
  987. /// This list is lazily formed the first time we walk the graph.
  988. SmallVector<RefSCC *, 16> PostOrderRefSCCs;
  989. /// A map from RefSCC to the index for it in the postorder sequence of
  990. /// RefSCCs.
  991. DenseMap<RefSCC *, int> RefSCCIndices;
  992. /// Defined functions that are also known library functions which the
  993. /// optimizer can reason about and therefore might introduce calls to out of
  994. /// thin air.
  995. SmallSetVector<Function *, 4> LibFunctions;
  996. /// Helper to insert a new function, with an already looked-up entry in
  997. /// the NodeMap.
  998. Node &insertInto(Function &F, Node *&MappedN);
  999. /// Helper to initialize a new node created outside of creating SCCs and add
  1000. /// it to the NodeMap if necessary. For example, useful when a function is
  1001. /// split.
  1002. Node &initNode(Function &F);
  1003. /// Helper to update pointers back to the graph object during moves.
  1004. void updateGraphPtrs();
  1005. /// Allocates an SCC and constructs it using the graph allocator.
  1006. ///
  1007. /// The arguments are forwarded to the constructor.
  1008. template <typename... Ts> SCC *createSCC(Ts &&...Args) {
  1009. return new (SCCBPA.Allocate()) SCC(std::forward<Ts>(Args)...);
  1010. }
  1011. /// Allocates a RefSCC and constructs it using the graph allocator.
  1012. ///
  1013. /// The arguments are forwarded to the constructor.
  1014. template <typename... Ts> RefSCC *createRefSCC(Ts &&...Args) {
  1015. return new (RefSCCBPA.Allocate()) RefSCC(std::forward<Ts>(Args)...);
  1016. }
  1017. /// Common logic for building SCCs from a sequence of roots.
  1018. ///
  1019. /// This is a very generic implementation of the depth-first walk and SCC
  1020. /// formation algorithm. It uses a generic sequence of roots and generic
  1021. /// callbacks for each step. This is designed to be used to implement both
  1022. /// the RefSCC formation and SCC formation with shared logic.
  1023. ///
  1024. /// Currently this is a relatively naive implementation of Tarjan's DFS
  1025. /// algorithm to form the SCCs.
  1026. ///
  1027. /// FIXME: We should consider newer variants such as Nuutila.
  1028. template <typename RootsT, typename GetBeginT, typename GetEndT,
  1029. typename GetNodeT, typename FormSCCCallbackT>
  1030. static void buildGenericSCCs(RootsT &&Roots, GetBeginT &&GetBegin,
  1031. GetEndT &&GetEnd, GetNodeT &&GetNode,
  1032. FormSCCCallbackT &&FormSCC);
  1033. /// Build the SCCs for a RefSCC out of a list of nodes.
  1034. void buildSCCs(RefSCC &RC, node_stack_range Nodes);
  1035. /// Get the index of a RefSCC within the postorder traversal.
  1036. ///
  1037. /// Requires that this RefSCC is a valid one in the (perhaps partial)
  1038. /// postorder traversed part of the graph.
  1039. int getRefSCCIndex(RefSCC &RC) {
  1040. auto IndexIt = RefSCCIndices.find(&RC);
  1041. assert(IndexIt != RefSCCIndices.end() && "RefSCC doesn't have an index!");
  1042. assert(PostOrderRefSCCs[IndexIt->second] == &RC &&
  1043. "Index does not point back at RC!");
  1044. return IndexIt->second;
  1045. }
  1046. };
  1047. inline LazyCallGraph::Edge::Edge() = default;
  1048. inline LazyCallGraph::Edge::Edge(Node &N, Kind K) : Value(&N, K) {}
  1049. inline LazyCallGraph::Edge::operator bool() const {
  1050. return Value.getPointer() && !Value.getPointer()->isDead();
  1051. }
  1052. inline LazyCallGraph::Edge::Kind LazyCallGraph::Edge::getKind() const {
  1053. assert(*this && "Queried a null edge!");
  1054. return Value.getInt();
  1055. }
  1056. inline bool LazyCallGraph::Edge::isCall() const {
  1057. assert(*this && "Queried a null edge!");
  1058. return getKind() == Call;
  1059. }
  1060. inline LazyCallGraph::Node &LazyCallGraph::Edge::getNode() const {
  1061. assert(*this && "Queried a null edge!");
  1062. return *Value.getPointer();
  1063. }
  1064. inline Function &LazyCallGraph::Edge::getFunction() const {
  1065. assert(*this && "Queried a null edge!");
  1066. return getNode().getFunction();
  1067. }
  1068. // Provide GraphTraits specializations for call graphs.
  1069. template <> struct GraphTraits<LazyCallGraph::Node *> {
  1070. using NodeRef = LazyCallGraph::Node *;
  1071. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1072. static NodeRef getEntryNode(NodeRef N) { return N; }
  1073. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1074. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1075. };
  1076. template <> struct GraphTraits<LazyCallGraph *> {
  1077. using NodeRef = LazyCallGraph::Node *;
  1078. using ChildIteratorType = LazyCallGraph::EdgeSequence::iterator;
  1079. static NodeRef getEntryNode(NodeRef N) { return N; }
  1080. static ChildIteratorType child_begin(NodeRef N) { return (*N)->begin(); }
  1081. static ChildIteratorType child_end(NodeRef N) { return (*N)->end(); }
  1082. };
  1083. /// An analysis pass which computes the call graph for a module.
  1084. class LazyCallGraphAnalysis : public AnalysisInfoMixin<LazyCallGraphAnalysis> {
  1085. friend AnalysisInfoMixin<LazyCallGraphAnalysis>;
  1086. static AnalysisKey Key;
  1087. public:
  1088. /// Inform generic clients of the result type.
  1089. using Result = LazyCallGraph;
  1090. /// Compute the \c LazyCallGraph for the module \c M.
  1091. ///
  1092. /// This just builds the set of entry points to the call graph. The rest is
  1093. /// built lazily as it is walked.
  1094. LazyCallGraph run(Module &M, ModuleAnalysisManager &AM) {
  1095. FunctionAnalysisManager &FAM =
  1096. AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1097. auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
  1098. return FAM.getResult<TargetLibraryAnalysis>(F);
  1099. };
  1100. return LazyCallGraph(M, GetTLI);
  1101. }
  1102. };
  1103. /// A pass which prints the call graph to a \c raw_ostream.
  1104. ///
  1105. /// This is primarily useful for testing the analysis.
  1106. class LazyCallGraphPrinterPass
  1107. : public PassInfoMixin<LazyCallGraphPrinterPass> {
  1108. raw_ostream &OS;
  1109. public:
  1110. explicit LazyCallGraphPrinterPass(raw_ostream &OS);
  1111. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1112. };
  1113. /// A pass which prints the call graph as a DOT file to a \c raw_ostream.
  1114. ///
  1115. /// This is primarily useful for visualization purposes.
  1116. class LazyCallGraphDOTPrinterPass
  1117. : public PassInfoMixin<LazyCallGraphDOTPrinterPass> {
  1118. raw_ostream &OS;
  1119. public:
  1120. explicit LazyCallGraphDOTPrinterPass(raw_ostream &OS);
  1121. PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM);
  1122. };
  1123. } // end namespace llvm
  1124. #endif // LLVM_ANALYSIS_LAZYCALLGRAPH_H
  1125. #ifdef __GNUC__
  1126. #pragma GCC diagnostic pop
  1127. #endif