LiveInterval.h 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. //
  14. // This file implements the LiveRange and LiveInterval classes. Given some
  15. // numbering of each the machine instructions an interval [i, j) is said to be a
  16. // live range for register v if there is no instruction with number j' >= j
  17. // such that v is live at j' and there is no instruction with number i' < i such
  18. // that v is live at i'. In this implementation ranges can have holes,
  19. // i.e. a range might look like [1,20), [50,65), [1000,1001). Each
  20. // individual segment is represented as an instance of LiveRange::Segment,
  21. // and the whole range is represented as an instance of LiveRange.
  22. //
  23. //===----------------------------------------------------------------------===//
  24. #ifndef LLVM_CODEGEN_LIVEINTERVAL_H
  25. #define LLVM_CODEGEN_LIVEINTERVAL_H
  26. #include "llvm/ADT/ArrayRef.h"
  27. #include "llvm/ADT/IntEqClasses.h"
  28. #include "llvm/ADT/STLExtras.h"
  29. #include "llvm/ADT/SmallVector.h"
  30. #include "llvm/ADT/iterator_range.h"
  31. #include "llvm/CodeGen/Register.h"
  32. #include "llvm/CodeGen/SlotIndexes.h"
  33. #include "llvm/MC/LaneBitmask.h"
  34. #include "llvm/Support/Allocator.h"
  35. #include "llvm/Support/MathExtras.h"
  36. #include <algorithm>
  37. #include <cassert>
  38. #include <cstddef>
  39. #include <functional>
  40. #include <memory>
  41. #include <set>
  42. #include <tuple>
  43. #include <utility>
  44. namespace llvm {
  45. class CoalescerPair;
  46. class LiveIntervals;
  47. class MachineRegisterInfo;
  48. class raw_ostream;
  49. /// VNInfo - Value Number Information.
  50. /// This class holds information about a machine level values, including
  51. /// definition and use points.
  52. ///
  53. class VNInfo {
  54. public:
  55. using Allocator = BumpPtrAllocator;
  56. /// The ID number of this value.
  57. unsigned id;
  58. /// The index of the defining instruction.
  59. SlotIndex def;
  60. /// VNInfo constructor.
  61. VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {}
  62. /// VNInfo constructor, copies values from orig, except for the value number.
  63. VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {}
  64. /// Copy from the parameter into this VNInfo.
  65. void copyFrom(VNInfo &src) {
  66. def = src.def;
  67. }
  68. /// Returns true if this value is defined by a PHI instruction (or was,
  69. /// PHI instructions may have been eliminated).
  70. /// PHI-defs begin at a block boundary, all other defs begin at register or
  71. /// EC slots.
  72. bool isPHIDef() const { return def.isBlock(); }
  73. /// Returns true if this value is unused.
  74. bool isUnused() const { return !def.isValid(); }
  75. /// Mark this value as unused.
  76. void markUnused() { def = SlotIndex(); }
  77. };
  78. /// Result of a LiveRange query. This class hides the implementation details
  79. /// of live ranges, and it should be used as the primary interface for
  80. /// examining live ranges around instructions.
  81. class LiveQueryResult {
  82. VNInfo *const EarlyVal;
  83. VNInfo *const LateVal;
  84. const SlotIndex EndPoint;
  85. const bool Kill;
  86. public:
  87. LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint,
  88. bool Kill)
  89. : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill)
  90. {}
  91. /// Return the value that is live-in to the instruction. This is the value
  92. /// that will be read by the instruction's use operands. Return NULL if no
  93. /// value is live-in.
  94. VNInfo *valueIn() const {
  95. return EarlyVal;
  96. }
  97. /// Return true if the live-in value is killed by this instruction. This
  98. /// means that either the live range ends at the instruction, or it changes
  99. /// value.
  100. bool isKill() const {
  101. return Kill;
  102. }
  103. /// Return true if this instruction has a dead def.
  104. bool isDeadDef() const {
  105. return EndPoint.isDead();
  106. }
  107. /// Return the value leaving the instruction, if any. This can be a
  108. /// live-through value, or a live def. A dead def returns NULL.
  109. VNInfo *valueOut() const {
  110. return isDeadDef() ? nullptr : LateVal;
  111. }
  112. /// Returns the value alive at the end of the instruction, if any. This can
  113. /// be a live-through value, a live def or a dead def.
  114. VNInfo *valueOutOrDead() const {
  115. return LateVal;
  116. }
  117. /// Return the value defined by this instruction, if any. This includes
  118. /// dead defs, it is the value created by the instruction's def operands.
  119. VNInfo *valueDefined() const {
  120. return EarlyVal == LateVal ? nullptr : LateVal;
  121. }
  122. /// Return the end point of the last live range segment to interact with
  123. /// the instruction, if any.
  124. ///
  125. /// The end point is an invalid SlotIndex only if the live range doesn't
  126. /// intersect the instruction at all.
  127. ///
  128. /// The end point may be at or past the end of the instruction's basic
  129. /// block. That means the value was live out of the block.
  130. SlotIndex endPoint() const {
  131. return EndPoint;
  132. }
  133. };
  134. /// This class represents the liveness of a register, stack slot, etc.
  135. /// It manages an ordered list of Segment objects.
  136. /// The Segments are organized in a static single assignment form: At places
  137. /// where a new value is defined or different values reach a CFG join a new
  138. /// segment with a new value number is used.
  139. class LiveRange {
  140. public:
  141. /// This represents a simple continuous liveness interval for a value.
  142. /// The start point is inclusive, the end point exclusive. These intervals
  143. /// are rendered as [start,end).
  144. struct Segment {
  145. SlotIndex start; // Start point of the interval (inclusive)
  146. SlotIndex end; // End point of the interval (exclusive)
  147. VNInfo *valno = nullptr; // identifier for the value contained in this
  148. // segment.
  149. Segment() = default;
  150. Segment(SlotIndex S, SlotIndex E, VNInfo *V)
  151. : start(S), end(E), valno(V) {
  152. assert(S < E && "Cannot create empty or backwards segment");
  153. }
  154. /// Return true if the index is covered by this segment.
  155. bool contains(SlotIndex I) const {
  156. return start <= I && I < end;
  157. }
  158. /// Return true if the given interval, [S, E), is covered by this segment.
  159. bool containsInterval(SlotIndex S, SlotIndex E) const {
  160. assert((S < E) && "Backwards interval?");
  161. return (start <= S && S < end) && (start < E && E <= end);
  162. }
  163. bool operator<(const Segment &Other) const {
  164. return std::tie(start, end) < std::tie(Other.start, Other.end);
  165. }
  166. bool operator==(const Segment &Other) const {
  167. return start == Other.start && end == Other.end;
  168. }
  169. bool operator!=(const Segment &Other) const {
  170. return !(*this == Other);
  171. }
  172. void dump() const;
  173. };
  174. using Segments = SmallVector<Segment, 2>;
  175. using VNInfoList = SmallVector<VNInfo *, 2>;
  176. Segments segments; // the liveness segments
  177. VNInfoList valnos; // value#'s
  178. // The segment set is used temporarily to accelerate initial computation
  179. // of live ranges of physical registers in computeRegUnitRange.
  180. // After that the set is flushed to the segment vector and deleted.
  181. using SegmentSet = std::set<Segment>;
  182. std::unique_ptr<SegmentSet> segmentSet;
  183. using iterator = Segments::iterator;
  184. using const_iterator = Segments::const_iterator;
  185. iterator begin() { return segments.begin(); }
  186. iterator end() { return segments.end(); }
  187. const_iterator begin() const { return segments.begin(); }
  188. const_iterator end() const { return segments.end(); }
  189. using vni_iterator = VNInfoList::iterator;
  190. using const_vni_iterator = VNInfoList::const_iterator;
  191. vni_iterator vni_begin() { return valnos.begin(); }
  192. vni_iterator vni_end() { return valnos.end(); }
  193. const_vni_iterator vni_begin() const { return valnos.begin(); }
  194. const_vni_iterator vni_end() const { return valnos.end(); }
  195. /// Constructs a new LiveRange object.
  196. LiveRange(bool UseSegmentSet = false)
  197. : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>()
  198. : nullptr) {}
  199. /// Constructs a new LiveRange object by copying segments and valnos from
  200. /// another LiveRange.
  201. LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) {
  202. assert(Other.segmentSet == nullptr &&
  203. "Copying of LiveRanges with active SegmentSets is not supported");
  204. assign(Other, Allocator);
  205. }
  206. /// Copies values numbers and live segments from \p Other into this range.
  207. void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) {
  208. if (this == &Other)
  209. return;
  210. assert(Other.segmentSet == nullptr &&
  211. "Copying of LiveRanges with active SegmentSets is not supported");
  212. // Duplicate valnos.
  213. for (const VNInfo *VNI : Other.valnos)
  214. createValueCopy(VNI, Allocator);
  215. // Now we can copy segments and remap their valnos.
  216. for (const Segment &S : Other.segments)
  217. segments.push_back(Segment(S.start, S.end, valnos[S.valno->id]));
  218. }
  219. /// advanceTo - Advance the specified iterator to point to the Segment
  220. /// containing the specified position, or end() if the position is past the
  221. /// end of the range. If no Segment contains this position, but the
  222. /// position is in a hole, this method returns an iterator pointing to the
  223. /// Segment immediately after the hole.
  224. iterator advanceTo(iterator I, SlotIndex Pos) {
  225. assert(I != end());
  226. if (Pos >= endIndex())
  227. return end();
  228. while (I->end <= Pos) ++I;
  229. return I;
  230. }
  231. const_iterator advanceTo(const_iterator I, SlotIndex Pos) const {
  232. assert(I != end());
  233. if (Pos >= endIndex())
  234. return end();
  235. while (I->end <= Pos) ++I;
  236. return I;
  237. }
  238. /// find - Return an iterator pointing to the first segment that ends after
  239. /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster
  240. /// when searching large ranges.
  241. ///
  242. /// If Pos is contained in a Segment, that segment is returned.
  243. /// If Pos is in a hole, the following Segment is returned.
  244. /// If Pos is beyond endIndex, end() is returned.
  245. iterator find(SlotIndex Pos);
  246. const_iterator find(SlotIndex Pos) const {
  247. return const_cast<LiveRange*>(this)->find(Pos);
  248. }
  249. void clear() {
  250. valnos.clear();
  251. segments.clear();
  252. }
  253. size_t size() const {
  254. return segments.size();
  255. }
  256. bool hasAtLeastOneValue() const { return !valnos.empty(); }
  257. bool containsOneValue() const { return valnos.size() == 1; }
  258. unsigned getNumValNums() const { return (unsigned)valnos.size(); }
  259. /// getValNumInfo - Returns pointer to the specified val#.
  260. ///
  261. inline VNInfo *getValNumInfo(unsigned ValNo) {
  262. return valnos[ValNo];
  263. }
  264. inline const VNInfo *getValNumInfo(unsigned ValNo) const {
  265. return valnos[ValNo];
  266. }
  267. /// containsValue - Returns true if VNI belongs to this range.
  268. bool containsValue(const VNInfo *VNI) const {
  269. return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id);
  270. }
  271. /// getNextValue - Create a new value number and return it. MIIdx specifies
  272. /// the instruction that defines the value number.
  273. VNInfo *getNextValue(SlotIndex def, VNInfo::Allocator &VNInfoAllocator) {
  274. VNInfo *VNI =
  275. new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), def);
  276. valnos.push_back(VNI);
  277. return VNI;
  278. }
  279. /// createDeadDef - Make sure the range has a value defined at Def.
  280. /// If one already exists, return it. Otherwise allocate a new value and
  281. /// add liveness for a dead def.
  282. VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc);
  283. /// Create a def of value @p VNI. Return @p VNI. If there already exists
  284. /// a definition at VNI->def, the value defined there must be @p VNI.
  285. VNInfo *createDeadDef(VNInfo *VNI);
  286. /// Create a copy of the given value. The new value will be identical except
  287. /// for the Value number.
  288. VNInfo *createValueCopy(const VNInfo *orig,
  289. VNInfo::Allocator &VNInfoAllocator) {
  290. VNInfo *VNI =
  291. new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig);
  292. valnos.push_back(VNI);
  293. return VNI;
  294. }
  295. /// RenumberValues - Renumber all values in order of appearance and remove
  296. /// unused values.
  297. void RenumberValues();
  298. /// MergeValueNumberInto - This method is called when two value numbers
  299. /// are found to be equivalent. This eliminates V1, replacing all
  300. /// segments with the V1 value number with the V2 value number. This can
  301. /// cause merging of V1/V2 values numbers and compaction of the value space.
  302. VNInfo* MergeValueNumberInto(VNInfo *V1, VNInfo *V2);
  303. /// Merge all of the live segments of a specific val# in RHS into this live
  304. /// range as the specified value number. The segments in RHS are allowed
  305. /// to overlap with segments in the current range, it will replace the
  306. /// value numbers of the overlaped live segments with the specified value
  307. /// number.
  308. void MergeSegmentsInAsValue(const LiveRange &RHS, VNInfo *LHSValNo);
  309. /// MergeValueInAsValue - Merge all of the segments of a specific val#
  310. /// in RHS into this live range as the specified value number.
  311. /// The segments in RHS are allowed to overlap with segments in the
  312. /// current range, but only if the overlapping segments have the
  313. /// specified value number.
  314. void MergeValueInAsValue(const LiveRange &RHS,
  315. const VNInfo *RHSValNo, VNInfo *LHSValNo);
  316. bool empty() const { return segments.empty(); }
  317. /// beginIndex - Return the lowest numbered slot covered.
  318. SlotIndex beginIndex() const {
  319. assert(!empty() && "Call to beginIndex() on empty range.");
  320. return segments.front().start;
  321. }
  322. /// endNumber - return the maximum point of the range of the whole,
  323. /// exclusive.
  324. SlotIndex endIndex() const {
  325. assert(!empty() && "Call to endIndex() on empty range.");
  326. return segments.back().end;
  327. }
  328. bool expiredAt(SlotIndex index) const {
  329. return index >= endIndex();
  330. }
  331. bool liveAt(SlotIndex index) const {
  332. const_iterator r = find(index);
  333. return r != end() && r->start <= index;
  334. }
  335. /// Return the segment that contains the specified index, or null if there
  336. /// is none.
  337. const Segment *getSegmentContaining(SlotIndex Idx) const {
  338. const_iterator I = FindSegmentContaining(Idx);
  339. return I == end() ? nullptr : &*I;
  340. }
  341. /// Return the live segment that contains the specified index, or null if
  342. /// there is none.
  343. Segment *getSegmentContaining(SlotIndex Idx) {
  344. iterator I = FindSegmentContaining(Idx);
  345. return I == end() ? nullptr : &*I;
  346. }
  347. /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
  348. VNInfo *getVNInfoAt(SlotIndex Idx) const {
  349. const_iterator I = FindSegmentContaining(Idx);
  350. return I == end() ? nullptr : I->valno;
  351. }
  352. /// getVNInfoBefore - Return the VNInfo that is live up to but not
  353. /// necessarilly including Idx, or NULL. Use this to find the reaching def
  354. /// used by an instruction at this SlotIndex position.
  355. VNInfo *getVNInfoBefore(SlotIndex Idx) const {
  356. const_iterator I = FindSegmentContaining(Idx.getPrevSlot());
  357. return I == end() ? nullptr : I->valno;
  358. }
  359. /// Return an iterator to the segment that contains the specified index, or
  360. /// end() if there is none.
  361. iterator FindSegmentContaining(SlotIndex Idx) {
  362. iterator I = find(Idx);
  363. return I != end() && I->start <= Idx ? I : end();
  364. }
  365. const_iterator FindSegmentContaining(SlotIndex Idx) const {
  366. const_iterator I = find(Idx);
  367. return I != end() && I->start <= Idx ? I : end();
  368. }
  369. /// overlaps - Return true if the intersection of the two live ranges is
  370. /// not empty.
  371. bool overlaps(const LiveRange &other) const {
  372. if (other.empty())
  373. return false;
  374. return overlapsFrom(other, other.begin());
  375. }
  376. /// overlaps - Return true if the two ranges have overlapping segments
  377. /// that are not coalescable according to CP.
  378. ///
  379. /// Overlapping segments where one range is defined by a coalescable
  380. /// copy are allowed.
  381. bool overlaps(const LiveRange &Other, const CoalescerPair &CP,
  382. const SlotIndexes&) const;
  383. /// overlaps - Return true if the live range overlaps an interval specified
  384. /// by [Start, End).
  385. bool overlaps(SlotIndex Start, SlotIndex End) const;
  386. /// overlapsFrom - Return true if the intersection of the two live ranges
  387. /// is not empty. The specified iterator is a hint that we can begin
  388. /// scanning the Other range starting at I.
  389. bool overlapsFrom(const LiveRange &Other, const_iterator StartPos) const;
  390. /// Returns true if all segments of the @p Other live range are completely
  391. /// covered by this live range.
  392. /// Adjacent live ranges do not affect the covering:the liverange
  393. /// [1,5](5,10] covers (3,7].
  394. bool covers(const LiveRange &Other) const;
  395. /// Add the specified Segment to this range, merging segments as
  396. /// appropriate. This returns an iterator to the inserted segment (which
  397. /// may have grown since it was inserted).
  398. iterator addSegment(Segment S);
  399. /// Attempt to extend a value defined after @p StartIdx to include @p Use.
  400. /// Both @p StartIdx and @p Use should be in the same basic block. In case
  401. /// of subranges, an extension could be prevented by an explicit "undef"
  402. /// caused by a <def,read-undef> on a non-overlapping lane. The list of
  403. /// location of such "undefs" should be provided in @p Undefs.
  404. /// The return value is a pair: the first element is VNInfo of the value
  405. /// that was extended (possibly nullptr), the second is a boolean value
  406. /// indicating whether an "undef" was encountered.
  407. /// If this range is live before @p Use in the basic block that starts at
  408. /// @p StartIdx, and there is no intervening "undef", extend it to be live
  409. /// up to @p Use, and return the pair {value, false}. If there is no
  410. /// segment before @p Use and there is no "undef" between @p StartIdx and
  411. /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use,
  412. /// return {nullptr, true}.
  413. std::pair<VNInfo*,bool> extendInBlock(ArrayRef<SlotIndex> Undefs,
  414. SlotIndex StartIdx, SlotIndex Kill);
  415. /// Simplified version of the above "extendInBlock", which assumes that
  416. /// no register lanes are undefined by <def,read-undef> operands.
  417. /// If this range is live before @p Use in the basic block that starts
  418. /// at @p StartIdx, extend it to be live up to @p Use, and return the
  419. /// value. If there is no segment before @p Use, return nullptr.
  420. VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill);
  421. /// join - Join two live ranges (this, and other) together. This applies
  422. /// mappings to the value numbers in the LHS/RHS ranges as specified. If
  423. /// the ranges are not joinable, this aborts.
  424. void join(LiveRange &Other,
  425. const int *ValNoAssignments,
  426. const int *RHSValNoAssignments,
  427. SmallVectorImpl<VNInfo *> &NewVNInfo);
  428. /// True iff this segment is a single segment that lies between the
  429. /// specified boundaries, exclusively. Vregs live across a backedge are not
  430. /// considered local. The boundaries are expected to lie within an extended
  431. /// basic block, so vregs that are not live out should contain no holes.
  432. bool isLocal(SlotIndex Start, SlotIndex End) const {
  433. return beginIndex() > Start.getBaseIndex() &&
  434. endIndex() < End.getBoundaryIndex();
  435. }
  436. /// Remove the specified segment from this range. Note that the segment
  437. /// must be a single Segment in its entirety.
  438. void removeSegment(SlotIndex Start, SlotIndex End,
  439. bool RemoveDeadValNo = false);
  440. void removeSegment(Segment S, bool RemoveDeadValNo = false) {
  441. removeSegment(S.start, S.end, RemoveDeadValNo);
  442. }
  443. /// Remove segment pointed to by iterator @p I from this range.
  444. iterator removeSegment(iterator I, bool RemoveDeadValNo = false);
  445. /// Mark \p ValNo for deletion if no segments in this range use it.
  446. void removeValNoIfDead(VNInfo *ValNo);
  447. /// Query Liveness at Idx.
  448. /// The sub-instruction slot of Idx doesn't matter, only the instruction
  449. /// it refers to is considered.
  450. LiveQueryResult Query(SlotIndex Idx) const {
  451. // Find the segment that enters the instruction.
  452. const_iterator I = find(Idx.getBaseIndex());
  453. const_iterator E = end();
  454. if (I == E)
  455. return LiveQueryResult(nullptr, nullptr, SlotIndex(), false);
  456. // Is this an instruction live-in segment?
  457. // If Idx is the start index of a basic block, include live-in segments
  458. // that start at Idx.getBaseIndex().
  459. VNInfo *EarlyVal = nullptr;
  460. VNInfo *LateVal = nullptr;
  461. SlotIndex EndPoint;
  462. bool Kill = false;
  463. if (I->start <= Idx.getBaseIndex()) {
  464. EarlyVal = I->valno;
  465. EndPoint = I->end;
  466. // Move to the potentially live-out segment.
  467. if (SlotIndex::isSameInstr(Idx, I->end)) {
  468. Kill = true;
  469. if (++I == E)
  470. return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
  471. }
  472. // Special case: A PHIDef value can have its def in the middle of a
  473. // segment if the value happens to be live out of the layout
  474. // predecessor.
  475. // Such a value is not live-in.
  476. if (EarlyVal->def == Idx.getBaseIndex())
  477. EarlyVal = nullptr;
  478. }
  479. // I now points to the segment that may be live-through, or defined by
  480. // this instr. Ignore segments starting after the current instr.
  481. if (!SlotIndex::isEarlierInstr(Idx, I->start)) {
  482. LateVal = I->valno;
  483. EndPoint = I->end;
  484. }
  485. return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill);
  486. }
  487. /// removeValNo - Remove all the segments defined by the specified value#.
  488. /// Also remove the value# from value# list.
  489. void removeValNo(VNInfo *ValNo);
  490. /// Returns true if the live range is zero length, i.e. no live segments
  491. /// span instructions. It doesn't pay to spill such a range.
  492. bool isZeroLength(SlotIndexes *Indexes) const {
  493. for (const Segment &S : segments)
  494. if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() <
  495. S.end.getBaseIndex())
  496. return false;
  497. return true;
  498. }
  499. // Returns true if any segment in the live range contains any of the
  500. // provided slot indexes. Slots which occur in holes between
  501. // segments will not cause the function to return true.
  502. bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const;
  503. bool operator<(const LiveRange& other) const {
  504. const SlotIndex &thisIndex = beginIndex();
  505. const SlotIndex &otherIndex = other.beginIndex();
  506. return thisIndex < otherIndex;
  507. }
  508. /// Returns true if there is an explicit "undef" between @p Begin
  509. /// @p End.
  510. bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin,
  511. SlotIndex End) const {
  512. return llvm::any_of(Undefs, [Begin, End](SlotIndex Idx) -> bool {
  513. return Begin <= Idx && Idx < End;
  514. });
  515. }
  516. /// Flush segment set into the regular segment vector.
  517. /// The method is to be called after the live range
  518. /// has been created, if use of the segment set was
  519. /// activated in the constructor of the live range.
  520. void flushSegmentSet();
  521. /// Stores indexes from the input index sequence R at which this LiveRange
  522. /// is live to the output O iterator.
  523. /// R is a range of _ascending sorted_ _random_ access iterators
  524. /// to the input indexes. Indexes stored at O are ascending sorted so it
  525. /// can be used directly in the subsequent search (for example for
  526. /// subranges). Returns true if found at least one index.
  527. template <typename Range, typename OutputIt>
  528. bool findIndexesLiveAt(Range &&R, OutputIt O) const {
  529. assert(llvm::is_sorted(R));
  530. auto Idx = R.begin(), EndIdx = R.end();
  531. auto Seg = segments.begin(), EndSeg = segments.end();
  532. bool Found = false;
  533. while (Idx != EndIdx && Seg != EndSeg) {
  534. // if the Seg is lower find first segment that is above Idx using binary
  535. // search
  536. if (Seg->end <= *Idx) {
  537. Seg = std::upper_bound(
  538. ++Seg, EndSeg, *Idx,
  539. [=](std::remove_reference_t<decltype(*Idx)> V,
  540. const std::remove_reference_t<decltype(*Seg)> &S) {
  541. return V < S.end;
  542. });
  543. if (Seg == EndSeg)
  544. break;
  545. }
  546. auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start);
  547. if (NotLessStart == EndIdx)
  548. break;
  549. auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end);
  550. if (NotLessEnd != NotLessStart) {
  551. Found = true;
  552. O = std::copy(NotLessStart, NotLessEnd, O);
  553. }
  554. Idx = NotLessEnd;
  555. ++Seg;
  556. }
  557. return Found;
  558. }
  559. void print(raw_ostream &OS) const;
  560. void dump() const;
  561. /// Walk the range and assert if any invariants fail to hold.
  562. ///
  563. /// Note that this is a no-op when asserts are disabled.
  564. #ifdef NDEBUG
  565. void verify() const {}
  566. #else
  567. void verify() const;
  568. #endif
  569. protected:
  570. /// Append a segment to the list of segments.
  571. void append(const LiveRange::Segment S);
  572. private:
  573. friend class LiveRangeUpdater;
  574. void addSegmentToSet(Segment S);
  575. void markValNoForDeletion(VNInfo *V);
  576. };
  577. inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) {
  578. LR.print(OS);
  579. return OS;
  580. }
  581. /// LiveInterval - This class represents the liveness of a register,
  582. /// or stack slot.
  583. class LiveInterval : public LiveRange {
  584. public:
  585. using super = LiveRange;
  586. /// A live range for subregisters. The LaneMask specifies which parts of the
  587. /// super register are covered by the interval.
  588. /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()).
  589. class SubRange : public LiveRange {
  590. public:
  591. SubRange *Next = nullptr;
  592. LaneBitmask LaneMask;
  593. /// Constructs a new SubRange object.
  594. SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {}
  595. /// Constructs a new SubRange object by copying liveness from @p Other.
  596. SubRange(LaneBitmask LaneMask, const LiveRange &Other,
  597. BumpPtrAllocator &Allocator)
  598. : LiveRange(Other, Allocator), LaneMask(LaneMask) {}
  599. void print(raw_ostream &OS) const;
  600. void dump() const;
  601. };
  602. private:
  603. SubRange *SubRanges = nullptr; ///< Single linked list of subregister live
  604. /// ranges.
  605. const Register Reg; // the register or stack slot of this interval.
  606. float Weight = 0.0; // weight of this interval
  607. public:
  608. Register reg() const { return Reg; }
  609. float weight() const { return Weight; }
  610. void incrementWeight(float Inc) { Weight += Inc; }
  611. void setWeight(float Value) { Weight = Value; }
  612. LiveInterval(unsigned Reg, float Weight) : Reg(Reg), Weight(Weight) {}
  613. ~LiveInterval() {
  614. clearSubRanges();
  615. }
  616. template<typename T>
  617. class SingleLinkedListIterator {
  618. T *P;
  619. public:
  620. SingleLinkedListIterator(T *P) : P(P) {}
  621. SingleLinkedListIterator<T> &operator++() {
  622. P = P->Next;
  623. return *this;
  624. }
  625. SingleLinkedListIterator<T> operator++(int) {
  626. SingleLinkedListIterator res = *this;
  627. ++*this;
  628. return res;
  629. }
  630. bool operator!=(const SingleLinkedListIterator<T> &Other) const {
  631. return P != Other.operator->();
  632. }
  633. bool operator==(const SingleLinkedListIterator<T> &Other) const {
  634. return P == Other.operator->();
  635. }
  636. T &operator*() const {
  637. return *P;
  638. }
  639. T *operator->() const {
  640. return P;
  641. }
  642. };
  643. using subrange_iterator = SingleLinkedListIterator<SubRange>;
  644. using const_subrange_iterator = SingleLinkedListIterator<const SubRange>;
  645. subrange_iterator subrange_begin() {
  646. return subrange_iterator(SubRanges);
  647. }
  648. subrange_iterator subrange_end() {
  649. return subrange_iterator(nullptr);
  650. }
  651. const_subrange_iterator subrange_begin() const {
  652. return const_subrange_iterator(SubRanges);
  653. }
  654. const_subrange_iterator subrange_end() const {
  655. return const_subrange_iterator(nullptr);
  656. }
  657. iterator_range<subrange_iterator> subranges() {
  658. return make_range(subrange_begin(), subrange_end());
  659. }
  660. iterator_range<const_subrange_iterator> subranges() const {
  661. return make_range(subrange_begin(), subrange_end());
  662. }
  663. /// Creates a new empty subregister live range. The range is added at the
  664. /// beginning of the subrange list; subrange iterators stay valid.
  665. SubRange *createSubRange(BumpPtrAllocator &Allocator,
  666. LaneBitmask LaneMask) {
  667. SubRange *Range = new (Allocator) SubRange(LaneMask);
  668. appendSubRange(Range);
  669. return Range;
  670. }
  671. /// Like createSubRange() but the new range is filled with a copy of the
  672. /// liveness information in @p CopyFrom.
  673. SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator,
  674. LaneBitmask LaneMask,
  675. const LiveRange &CopyFrom) {
  676. SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator);
  677. appendSubRange(Range);
  678. return Range;
  679. }
  680. /// Returns true if subregister liveness information is available.
  681. bool hasSubRanges() const {
  682. return SubRanges != nullptr;
  683. }
  684. /// Removes all subregister liveness information.
  685. void clearSubRanges();
  686. /// Removes all subranges without any segments (subranges without segments
  687. /// are not considered valid and should only exist temporarily).
  688. void removeEmptySubRanges();
  689. /// getSize - Returns the sum of sizes of all the LiveRange's.
  690. ///
  691. unsigned getSize() const;
  692. /// isSpillable - Can this interval be spilled?
  693. bool isSpillable() const { return Weight != huge_valf; }
  694. /// markNotSpillable - Mark interval as not spillable
  695. void markNotSpillable() { Weight = huge_valf; }
  696. /// For a given lane mask @p LaneMask, compute indexes at which the
  697. /// lane is marked undefined by subregister <def,read-undef> definitions.
  698. void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs,
  699. LaneBitmask LaneMask,
  700. const MachineRegisterInfo &MRI,
  701. const SlotIndexes &Indexes) const;
  702. /// Refines the subranges to support \p LaneMask. This may only be called
  703. /// for LI.hasSubrange()==true. Subregister ranges are split or created
  704. /// until \p LaneMask can be matched exactly. \p Mod is executed on the
  705. /// matching subranges.
  706. ///
  707. /// Example:
  708. /// Given an interval with subranges with lanemasks L0F00, L00F0 and
  709. /// L000F, refining for mask L0018. Will split the L00F0 lane into
  710. /// L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod
  711. /// function will be applied to the L0010 and L0008 subranges.
  712. ///
  713. /// \p Indexes and \p TRI are required to clean up the VNIs that
  714. /// don't define the related lane masks after they get shrunk. E.g.,
  715. /// when L000F gets split into L0007 and L0008 maybe only a subset
  716. /// of the VNIs that defined L000F defines L0007.
  717. ///
  718. /// The clean up of the VNIs need to look at the actual instructions
  719. /// to decide what is or is not live at a definition point. If the
  720. /// update of the subranges occurs while the IR does not reflect these
  721. /// changes, \p ComposeSubRegIdx can be used to specify how the
  722. /// definition are going to be rewritten.
  723. /// E.g., let say we want to merge:
  724. /// V1.sub1:<2 x s32> = COPY V2.sub3:<4 x s32>
  725. /// We do that by choosing a class where sub1:<2 x s32> and sub3:<4 x s32>
  726. /// overlap, i.e., by choosing a class where we can find "offset + 1 == 3".
  727. /// Put differently we align V2's sub3 with V1's sub1:
  728. /// V2: sub0 sub1 sub2 sub3
  729. /// V1: <offset> sub0 sub1
  730. ///
  731. /// This offset will look like a composed subregidx in the the class:
  732. /// V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
  733. /// => V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32>
  734. ///
  735. /// Now if we didn't rewrite the uses and def of V1, all the checks for V1
  736. /// need to account for this offset.
  737. /// This happens during coalescing where we update the live-ranges while
  738. /// still having the old IR around because updating the IR on-the-fly
  739. /// would actually clobber some information on how the live-ranges that
  740. /// are being updated look like.
  741. void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask,
  742. std::function<void(LiveInterval::SubRange &)> Apply,
  743. const SlotIndexes &Indexes,
  744. const TargetRegisterInfo &TRI,
  745. unsigned ComposeSubRegIdx = 0);
  746. bool operator<(const LiveInterval& other) const {
  747. const SlotIndex &thisIndex = beginIndex();
  748. const SlotIndex &otherIndex = other.beginIndex();
  749. return std::tie(thisIndex, Reg) < std::tie(otherIndex, other.Reg);
  750. }
  751. void print(raw_ostream &OS) const;
  752. void dump() const;
  753. /// Walks the interval and assert if any invariants fail to hold.
  754. ///
  755. /// Note that this is a no-op when asserts are disabled.
  756. #ifdef NDEBUG
  757. void verify(const MachineRegisterInfo *MRI = nullptr) const {}
  758. #else
  759. void verify(const MachineRegisterInfo *MRI = nullptr) const;
  760. #endif
  761. private:
  762. /// Appends @p Range to SubRanges list.
  763. void appendSubRange(SubRange *Range) {
  764. Range->Next = SubRanges;
  765. SubRanges = Range;
  766. }
  767. /// Free memory held by SubRange.
  768. void freeSubRange(SubRange *S);
  769. };
  770. inline raw_ostream &operator<<(raw_ostream &OS,
  771. const LiveInterval::SubRange &SR) {
  772. SR.print(OS);
  773. return OS;
  774. }
  775. inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) {
  776. LI.print(OS);
  777. return OS;
  778. }
  779. raw_ostream &operator<<(raw_ostream &OS, const LiveRange::Segment &S);
  780. inline bool operator<(SlotIndex V, const LiveRange::Segment &S) {
  781. return V < S.start;
  782. }
  783. inline bool operator<(const LiveRange::Segment &S, SlotIndex V) {
  784. return S.start < V;
  785. }
  786. /// Helper class for performant LiveRange bulk updates.
  787. ///
  788. /// Calling LiveRange::addSegment() repeatedly can be expensive on large
  789. /// live ranges because segments after the insertion point may need to be
  790. /// shifted. The LiveRangeUpdater class can defer the shifting when adding
  791. /// many segments in order.
  792. ///
  793. /// The LiveRange will be in an invalid state until flush() is called.
  794. class LiveRangeUpdater {
  795. LiveRange *LR;
  796. SlotIndex LastStart;
  797. LiveRange::iterator WriteI;
  798. LiveRange::iterator ReadI;
  799. SmallVector<LiveRange::Segment, 16> Spills;
  800. void mergeSpills();
  801. public:
  802. /// Create a LiveRangeUpdater for adding segments to LR.
  803. /// LR will temporarily be in an invalid state until flush() is called.
  804. LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {}
  805. ~LiveRangeUpdater() { flush(); }
  806. /// Add a segment to LR and coalesce when possible, just like
  807. /// LR.addSegment(). Segments should be added in increasing start order for
  808. /// best performance.
  809. void add(LiveRange::Segment);
  810. void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) {
  811. add(LiveRange::Segment(Start, End, VNI));
  812. }
  813. /// Return true if the LR is currently in an invalid state, and flush()
  814. /// needs to be called.
  815. bool isDirty() const { return LastStart.isValid(); }
  816. /// Flush the updater state to LR so it is valid and contains all added
  817. /// segments.
  818. void flush();
  819. /// Select a different destination live range.
  820. void setDest(LiveRange *lr) {
  821. if (LR != lr && isDirty())
  822. flush();
  823. LR = lr;
  824. }
  825. /// Get the current destination live range.
  826. LiveRange *getDest() const { return LR; }
  827. void dump() const;
  828. void print(raw_ostream&) const;
  829. };
  830. inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) {
  831. X.print(OS);
  832. return OS;
  833. }
  834. /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a
  835. /// LiveInterval into equivalence clases of connected components. A
  836. /// LiveInterval that has multiple connected components can be broken into
  837. /// multiple LiveIntervals.
  838. ///
  839. /// Given a LiveInterval that may have multiple connected components, run:
  840. ///
  841. /// unsigned numComps = ConEQ.Classify(LI);
  842. /// if (numComps > 1) {
  843. /// // allocate numComps-1 new LiveIntervals into LIS[1..]
  844. /// ConEQ.Distribute(LIS);
  845. /// }
  846. class ConnectedVNInfoEqClasses {
  847. LiveIntervals &LIS;
  848. IntEqClasses EqClass;
  849. public:
  850. explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {}
  851. /// Classify the values in \p LR into connected components.
  852. /// Returns the number of connected components.
  853. unsigned Classify(const LiveRange &LR);
  854. /// getEqClass - Classify creates equivalence classes numbered 0..N. Return
  855. /// the equivalence class assigned the VNI.
  856. unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; }
  857. /// Distribute values in \p LI into a separate LiveIntervals
  858. /// for each connected component. LIV must have an empty LiveInterval for
  859. /// each additional connected component. The first connected component is
  860. /// left in \p LI.
  861. void Distribute(LiveInterval &LI, LiveInterval *LIV[],
  862. MachineRegisterInfo &MRI);
  863. };
  864. } // end namespace llvm
  865. #endif // LLVM_CODEGEN_LIVEINTERVAL_H
  866. #ifdef __GNUC__
  867. #pragma GCC diagnostic pop
  868. #endif