LiveIntervals.cpp 66 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755
  1. //===- LiveIntervals.cpp - Live Interval Analysis -------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file This file implements the LiveInterval analysis pass which is used
  10. /// by the Linear Scan Register allocator. This pass linearizes the
  11. /// basic blocks of the function in DFS order and computes live intervals for
  12. /// each virtual and physical register.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/LiveIntervals.h"
  16. #include "llvm/ADT/ArrayRef.h"
  17. #include "llvm/ADT/DepthFirstIterator.h"
  18. #include "llvm/ADT/SmallPtrSet.h"
  19. #include "llvm/ADT/SmallVector.h"
  20. #include "llvm/ADT/iterator_range.h"
  21. #include "llvm/CodeGen/LiveInterval.h"
  22. #include "llvm/CodeGen/LiveIntervalCalc.h"
  23. #include "llvm/CodeGen/LiveVariables.h"
  24. #include "llvm/CodeGen/MachineBasicBlock.h"
  25. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  26. #include "llvm/CodeGen/MachineDominators.h"
  27. #include "llvm/CodeGen/MachineFunction.h"
  28. #include "llvm/CodeGen/MachineInstr.h"
  29. #include "llvm/CodeGen/MachineInstrBundle.h"
  30. #include "llvm/CodeGen/MachineOperand.h"
  31. #include "llvm/CodeGen/MachineRegisterInfo.h"
  32. #include "llvm/CodeGen/Passes.h"
  33. #include "llvm/CodeGen/SlotIndexes.h"
  34. #include "llvm/CodeGen/StackMaps.h"
  35. #include "llvm/CodeGen/TargetRegisterInfo.h"
  36. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  37. #include "llvm/CodeGen/VirtRegMap.h"
  38. #include "llvm/Config/llvm-config.h"
  39. #include "llvm/IR/Statepoint.h"
  40. #include "llvm/MC/LaneBitmask.h"
  41. #include "llvm/MC/MCRegisterInfo.h"
  42. #include "llvm/Pass.h"
  43. #include "llvm/Support/CommandLine.h"
  44. #include "llvm/Support/Compiler.h"
  45. #include "llvm/Support/Debug.h"
  46. #include "llvm/Support/MathExtras.h"
  47. #include "llvm/Support/raw_ostream.h"
  48. #include <algorithm>
  49. #include <cassert>
  50. #include <cstdint>
  51. #include <iterator>
  52. #include <tuple>
  53. #include <utility>
  54. using namespace llvm;
  55. #define DEBUG_TYPE "regalloc"
  56. char LiveIntervals::ID = 0;
  57. char &llvm::LiveIntervalsID = LiveIntervals::ID;
  58. INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals", "Live Interval Analysis",
  59. false, false)
  60. INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
  61. INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
  62. INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
  63. "Live Interval Analysis", false, false)
  64. #ifndef NDEBUG
  65. static cl::opt<bool> EnablePrecomputePhysRegs(
  66. "precompute-phys-liveness", cl::Hidden,
  67. cl::desc("Eagerly compute live intervals for all physreg units."));
  68. #else
  69. static bool EnablePrecomputePhysRegs = false;
  70. #endif // NDEBUG
  71. namespace llvm {
  72. cl::opt<bool> UseSegmentSetForPhysRegs(
  73. "use-segment-set-for-physregs", cl::Hidden, cl::init(true),
  74. cl::desc(
  75. "Use segment set for the computation of the live ranges of physregs."));
  76. } // end namespace llvm
  77. void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
  78. AU.setPreservesCFG();
  79. AU.addPreserved<LiveVariables>();
  80. AU.addPreservedID(MachineLoopInfoID);
  81. AU.addRequiredTransitiveID(MachineDominatorsID);
  82. AU.addPreservedID(MachineDominatorsID);
  83. AU.addPreserved<SlotIndexes>();
  84. AU.addRequiredTransitive<SlotIndexes>();
  85. MachineFunctionPass::getAnalysisUsage(AU);
  86. }
  87. LiveIntervals::LiveIntervals() : MachineFunctionPass(ID) {
  88. initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
  89. }
  90. LiveIntervals::~LiveIntervals() { delete LICalc; }
  91. void LiveIntervals::releaseMemory() {
  92. // Free the live intervals themselves.
  93. for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
  94. delete VirtRegIntervals[Register::index2VirtReg(i)];
  95. VirtRegIntervals.clear();
  96. RegMaskSlots.clear();
  97. RegMaskBits.clear();
  98. RegMaskBlocks.clear();
  99. for (LiveRange *LR : RegUnitRanges)
  100. delete LR;
  101. RegUnitRanges.clear();
  102. // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
  103. VNInfoAllocator.Reset();
  104. }
  105. bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
  106. MF = &fn;
  107. MRI = &MF->getRegInfo();
  108. TRI = MF->getSubtarget().getRegisterInfo();
  109. TII = MF->getSubtarget().getInstrInfo();
  110. Indexes = &getAnalysis<SlotIndexes>();
  111. DomTree = &getAnalysis<MachineDominatorTree>();
  112. if (!LICalc)
  113. LICalc = new LiveIntervalCalc();
  114. // Allocate space for all virtual registers.
  115. VirtRegIntervals.resize(MRI->getNumVirtRegs());
  116. computeVirtRegs();
  117. computeRegMasks();
  118. computeLiveInRegUnits();
  119. if (EnablePrecomputePhysRegs) {
  120. // For stress testing, precompute live ranges of all physical register
  121. // units, including reserved registers.
  122. for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
  123. getRegUnit(i);
  124. }
  125. LLVM_DEBUG(dump());
  126. return false;
  127. }
  128. void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
  129. OS << "********** INTERVALS **********\n";
  130. // Dump the regunits.
  131. for (unsigned Unit = 0, UnitE = RegUnitRanges.size(); Unit != UnitE; ++Unit)
  132. if (LiveRange *LR = RegUnitRanges[Unit])
  133. OS << printRegUnit(Unit, TRI) << ' ' << *LR << '\n';
  134. // Dump the virtregs.
  135. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  136. Register Reg = Register::index2VirtReg(i);
  137. if (hasInterval(Reg))
  138. OS << getInterval(Reg) << '\n';
  139. }
  140. OS << "RegMasks:";
  141. for (SlotIndex Idx : RegMaskSlots)
  142. OS << ' ' << Idx;
  143. OS << '\n';
  144. printInstrs(OS);
  145. }
  146. void LiveIntervals::printInstrs(raw_ostream &OS) const {
  147. OS << "********** MACHINEINSTRS **********\n";
  148. MF->print(OS, Indexes);
  149. }
  150. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  151. LLVM_DUMP_METHOD void LiveIntervals::dumpInstrs() const {
  152. printInstrs(dbgs());
  153. }
  154. #endif
  155. LiveInterval *LiveIntervals::createInterval(Register reg) {
  156. float Weight = reg.isPhysical() ? huge_valf : 0.0F;
  157. return new LiveInterval(reg, Weight);
  158. }
  159. /// Compute the live interval of a virtual register, based on defs and uses.
  160. bool LiveIntervals::computeVirtRegInterval(LiveInterval &LI) {
  161. assert(LICalc && "LICalc not initialized.");
  162. assert(LI.empty() && "Should only compute empty intervals.");
  163. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  164. LICalc->calculate(LI, MRI->shouldTrackSubRegLiveness(LI.reg()));
  165. return computeDeadValues(LI, nullptr);
  166. }
  167. void LiveIntervals::computeVirtRegs() {
  168. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  169. Register Reg = Register::index2VirtReg(i);
  170. if (MRI->reg_nodbg_empty(Reg))
  171. continue;
  172. LiveInterval &LI = createEmptyInterval(Reg);
  173. bool NeedSplit = computeVirtRegInterval(LI);
  174. if (NeedSplit) {
  175. SmallVector<LiveInterval*, 8> SplitLIs;
  176. splitSeparateComponents(LI, SplitLIs);
  177. }
  178. }
  179. }
  180. void LiveIntervals::computeRegMasks() {
  181. RegMaskBlocks.resize(MF->getNumBlockIDs());
  182. // Find all instructions with regmask operands.
  183. for (const MachineBasicBlock &MBB : *MF) {
  184. std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB.getNumber()];
  185. RMB.first = RegMaskSlots.size();
  186. // Some block starts, such as EH funclets, create masks.
  187. if (const uint32_t *Mask = MBB.getBeginClobberMask(TRI)) {
  188. RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
  189. RegMaskBits.push_back(Mask);
  190. }
  191. // Unwinders may clobber additional registers.
  192. // FIXME: This functionality can possibly be merged into
  193. // MachineBasicBlock::getBeginClobberMask().
  194. if (MBB.isEHPad())
  195. if (auto *Mask = TRI->getCustomEHPadPreservedMask(*MBB.getParent())) {
  196. RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
  197. RegMaskBits.push_back(Mask);
  198. }
  199. for (const MachineInstr &MI : MBB) {
  200. for (const MachineOperand &MO : MI.operands()) {
  201. if (!MO.isRegMask())
  202. continue;
  203. RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
  204. RegMaskBits.push_back(MO.getRegMask());
  205. }
  206. }
  207. // Some block ends, such as funclet returns, create masks. Put the mask on
  208. // the last instruction of the block, because MBB slot index intervals are
  209. // half-open.
  210. if (const uint32_t *Mask = MBB.getEndClobberMask(TRI)) {
  211. assert(!MBB.empty() && "empty return block?");
  212. RegMaskSlots.push_back(
  213. Indexes->getInstructionIndex(MBB.back()).getRegSlot());
  214. RegMaskBits.push_back(Mask);
  215. }
  216. // Compute the number of register mask instructions in this block.
  217. RMB.second = RegMaskSlots.size() - RMB.first;
  218. }
  219. }
  220. //===----------------------------------------------------------------------===//
  221. // Register Unit Liveness
  222. //===----------------------------------------------------------------------===//
  223. //
  224. // Fixed interference typically comes from ABI boundaries: Function arguments
  225. // and return values are passed in fixed registers, and so are exception
  226. // pointers entering landing pads. Certain instructions require values to be
  227. // present in specific registers. That is also represented through fixed
  228. // interference.
  229. //
  230. /// Compute the live range of a register unit, based on the uses and defs of
  231. /// aliasing registers. The range should be empty, or contain only dead
  232. /// phi-defs from ABI blocks.
  233. void LiveIntervals::computeRegUnitRange(LiveRange &LR, unsigned Unit) {
  234. assert(LICalc && "LICalc not initialized.");
  235. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  236. // The physregs aliasing Unit are the roots and their super-registers.
  237. // Create all values as dead defs before extending to uses. Note that roots
  238. // may share super-registers. That's OK because createDeadDefs() is
  239. // idempotent. It is very rare for a register unit to have multiple roots, so
  240. // uniquing super-registers is probably not worthwhile.
  241. bool IsReserved = false;
  242. for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
  243. bool IsRootReserved = true;
  244. for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
  245. Super.isValid(); ++Super) {
  246. MCRegister Reg = *Super;
  247. if (!MRI->reg_empty(Reg))
  248. LICalc->createDeadDefs(LR, Reg);
  249. // A register unit is considered reserved if all its roots and all their
  250. // super registers are reserved.
  251. if (!MRI->isReserved(Reg))
  252. IsRootReserved = false;
  253. }
  254. IsReserved |= IsRootReserved;
  255. }
  256. assert(IsReserved == MRI->isReservedRegUnit(Unit) &&
  257. "reserved computation mismatch");
  258. // Now extend LR to reach all uses.
  259. // Ignore uses of reserved registers. We only track defs of those.
  260. if (!IsReserved) {
  261. for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
  262. for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
  263. Super.isValid(); ++Super) {
  264. MCRegister Reg = *Super;
  265. if (!MRI->reg_empty(Reg))
  266. LICalc->extendToUses(LR, Reg);
  267. }
  268. }
  269. }
  270. // Flush the segment set to the segment vector.
  271. if (UseSegmentSetForPhysRegs)
  272. LR.flushSegmentSet();
  273. }
  274. /// Precompute the live ranges of any register units that are live-in to an ABI
  275. /// block somewhere. Register values can appear without a corresponding def when
  276. /// entering the entry block or a landing pad.
  277. void LiveIntervals::computeLiveInRegUnits() {
  278. RegUnitRanges.resize(TRI->getNumRegUnits());
  279. LLVM_DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
  280. // Keep track of the live range sets allocated.
  281. SmallVector<unsigned, 8> NewRanges;
  282. // Check all basic blocks for live-ins.
  283. for (const MachineBasicBlock &MBB : *MF) {
  284. // We only care about ABI blocks: Entry + landing pads.
  285. if ((&MBB != &MF->front() && !MBB.isEHPad()) || MBB.livein_empty())
  286. continue;
  287. // Create phi-defs at Begin for all live-in registers.
  288. SlotIndex Begin = Indexes->getMBBStartIdx(&MBB);
  289. LLVM_DEBUG(dbgs() << Begin << "\t" << printMBBReference(MBB));
  290. for (const auto &LI : MBB.liveins()) {
  291. for (MCRegUnitIterator Units(LI.PhysReg, TRI); Units.isValid(); ++Units) {
  292. unsigned Unit = *Units;
  293. LiveRange *LR = RegUnitRanges[Unit];
  294. if (!LR) {
  295. // Use segment set to speed-up initial computation of the live range.
  296. LR = RegUnitRanges[Unit] = new LiveRange(UseSegmentSetForPhysRegs);
  297. NewRanges.push_back(Unit);
  298. }
  299. VNInfo *VNI = LR->createDeadDef(Begin, getVNInfoAllocator());
  300. (void)VNI;
  301. LLVM_DEBUG(dbgs() << ' ' << printRegUnit(Unit, TRI) << '#' << VNI->id);
  302. }
  303. }
  304. LLVM_DEBUG(dbgs() << '\n');
  305. }
  306. LLVM_DEBUG(dbgs() << "Created " << NewRanges.size() << " new intervals.\n");
  307. // Compute the 'normal' part of the ranges.
  308. for (unsigned Unit : NewRanges)
  309. computeRegUnitRange(*RegUnitRanges[Unit], Unit);
  310. }
  311. static void createSegmentsForValues(LiveRange &LR,
  312. iterator_range<LiveInterval::vni_iterator> VNIs) {
  313. for (VNInfo *VNI : VNIs) {
  314. if (VNI->isUnused())
  315. continue;
  316. SlotIndex Def = VNI->def;
  317. LR.addSegment(LiveRange::Segment(Def, Def.getDeadSlot(), VNI));
  318. }
  319. }
  320. void LiveIntervals::extendSegmentsToUses(LiveRange &Segments,
  321. ShrinkToUsesWorkList &WorkList,
  322. Register Reg, LaneBitmask LaneMask) {
  323. // Keep track of the PHIs that are in use.
  324. SmallPtrSet<VNInfo*, 8> UsedPHIs;
  325. // Blocks that have already been added to WorkList as live-out.
  326. SmallPtrSet<const MachineBasicBlock*, 16> LiveOut;
  327. auto getSubRange = [](const LiveInterval &I, LaneBitmask M)
  328. -> const LiveRange& {
  329. if (M.none())
  330. return I;
  331. for (const LiveInterval::SubRange &SR : I.subranges()) {
  332. if ((SR.LaneMask & M).any()) {
  333. assert(SR.LaneMask == M && "Expecting lane masks to match exactly");
  334. return SR;
  335. }
  336. }
  337. llvm_unreachable("Subrange for mask not found");
  338. };
  339. const LiveInterval &LI = getInterval(Reg);
  340. const LiveRange &OldRange = getSubRange(LI, LaneMask);
  341. // Extend intervals to reach all uses in WorkList.
  342. while (!WorkList.empty()) {
  343. SlotIndex Idx = WorkList.back().first;
  344. VNInfo *VNI = WorkList.back().second;
  345. WorkList.pop_back();
  346. const MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Idx.getPrevSlot());
  347. SlotIndex BlockStart = Indexes->getMBBStartIdx(MBB);
  348. // Extend the live range for VNI to be live at Idx.
  349. if (VNInfo *ExtVNI = Segments.extendInBlock(BlockStart, Idx)) {
  350. assert(ExtVNI == VNI && "Unexpected existing value number");
  351. (void)ExtVNI;
  352. // Is this a PHIDef we haven't seen before?
  353. if (!VNI->isPHIDef() || VNI->def != BlockStart ||
  354. !UsedPHIs.insert(VNI).second)
  355. continue;
  356. // The PHI is live, make sure the predecessors are live-out.
  357. for (const MachineBasicBlock *Pred : MBB->predecessors()) {
  358. if (!LiveOut.insert(Pred).second)
  359. continue;
  360. SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
  361. // A predecessor is not required to have a live-out value for a PHI.
  362. if (VNInfo *PVNI = OldRange.getVNInfoBefore(Stop))
  363. WorkList.push_back(std::make_pair(Stop, PVNI));
  364. }
  365. continue;
  366. }
  367. // VNI is live-in to MBB.
  368. LLVM_DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
  369. Segments.addSegment(LiveRange::Segment(BlockStart, Idx, VNI));
  370. // Make sure VNI is live-out from the predecessors.
  371. for (const MachineBasicBlock *Pred : MBB->predecessors()) {
  372. if (!LiveOut.insert(Pred).second)
  373. continue;
  374. SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
  375. if (VNInfo *OldVNI = OldRange.getVNInfoBefore(Stop)) {
  376. assert(OldVNI == VNI && "Wrong value out of predecessor");
  377. (void)OldVNI;
  378. WorkList.push_back(std::make_pair(Stop, VNI));
  379. } else {
  380. #ifndef NDEBUG
  381. // There was no old VNI. Verify that Stop is jointly dominated
  382. // by <undef>s for this live range.
  383. assert(LaneMask.any() &&
  384. "Missing value out of predecessor for main range");
  385. SmallVector<SlotIndex,8> Undefs;
  386. LI.computeSubRangeUndefs(Undefs, LaneMask, *MRI, *Indexes);
  387. assert(LiveRangeCalc::isJointlyDominated(Pred, Undefs, *Indexes) &&
  388. "Missing value out of predecessor for subrange");
  389. #endif
  390. }
  391. }
  392. }
  393. }
  394. bool LiveIntervals::shrinkToUses(LiveInterval *li,
  395. SmallVectorImpl<MachineInstr*> *dead) {
  396. LLVM_DEBUG(dbgs() << "Shrink: " << *li << '\n');
  397. assert(li->reg().isVirtual() && "Can only shrink virtual registers");
  398. // Shrink subregister live ranges.
  399. bool NeedsCleanup = false;
  400. for (LiveInterval::SubRange &S : li->subranges()) {
  401. shrinkToUses(S, li->reg());
  402. if (S.empty())
  403. NeedsCleanup = true;
  404. }
  405. if (NeedsCleanup)
  406. li->removeEmptySubRanges();
  407. // Find all the values used, including PHI kills.
  408. ShrinkToUsesWorkList WorkList;
  409. // Visit all instructions reading li->reg().
  410. Register Reg = li->reg();
  411. for (MachineInstr &UseMI : MRI->reg_instructions(Reg)) {
  412. if (UseMI.isDebugInstr() || !UseMI.readsVirtualRegister(Reg))
  413. continue;
  414. SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
  415. LiveQueryResult LRQ = li->Query(Idx);
  416. VNInfo *VNI = LRQ.valueIn();
  417. if (!VNI) {
  418. // This shouldn't happen: readsVirtualRegister returns true, but there is
  419. // no live value. It is likely caused by a target getting <undef> flags
  420. // wrong.
  421. LLVM_DEBUG(
  422. dbgs() << Idx << '\t' << UseMI
  423. << "Warning: Instr claims to read non-existent value in "
  424. << *li << '\n');
  425. continue;
  426. }
  427. // Special case: An early-clobber tied operand reads and writes the
  428. // register one slot early.
  429. if (VNInfo *DefVNI = LRQ.valueDefined())
  430. Idx = DefVNI->def;
  431. WorkList.push_back(std::make_pair(Idx, VNI));
  432. }
  433. // Create new live ranges with only minimal live segments per def.
  434. LiveRange NewLR;
  435. createSegmentsForValues(NewLR, li->vnis());
  436. extendSegmentsToUses(NewLR, WorkList, Reg, LaneBitmask::getNone());
  437. // Move the trimmed segments back.
  438. li->segments.swap(NewLR.segments);
  439. // Handle dead values.
  440. bool CanSeparate = computeDeadValues(*li, dead);
  441. LLVM_DEBUG(dbgs() << "Shrunk: " << *li << '\n');
  442. return CanSeparate;
  443. }
  444. bool LiveIntervals::computeDeadValues(LiveInterval &LI,
  445. SmallVectorImpl<MachineInstr*> *dead) {
  446. bool MayHaveSplitComponents = false;
  447. for (VNInfo *VNI : LI.valnos) {
  448. if (VNI->isUnused())
  449. continue;
  450. SlotIndex Def = VNI->def;
  451. LiveRange::iterator I = LI.FindSegmentContaining(Def);
  452. assert(I != LI.end() && "Missing segment for VNI");
  453. // Is the register live before? Otherwise we may have to add a read-undef
  454. // flag for subregister defs.
  455. Register VReg = LI.reg();
  456. if (MRI->shouldTrackSubRegLiveness(VReg)) {
  457. if ((I == LI.begin() || std::prev(I)->end < Def) && !VNI->isPHIDef()) {
  458. MachineInstr *MI = getInstructionFromIndex(Def);
  459. MI->setRegisterDefReadUndef(VReg);
  460. }
  461. }
  462. if (I->end != Def.getDeadSlot())
  463. continue;
  464. if (VNI->isPHIDef()) {
  465. // This is a dead PHI. Remove it.
  466. VNI->markUnused();
  467. LI.removeSegment(I);
  468. LLVM_DEBUG(dbgs() << "Dead PHI at " << Def << " may separate interval\n");
  469. } else {
  470. // This is a dead def. Make sure the instruction knows.
  471. MachineInstr *MI = getInstructionFromIndex(Def);
  472. assert(MI && "No instruction defining live value");
  473. MI->addRegisterDead(LI.reg(), TRI);
  474. if (dead && MI->allDefsAreDead()) {
  475. LLVM_DEBUG(dbgs() << "All defs dead: " << Def << '\t' << *MI);
  476. dead->push_back(MI);
  477. }
  478. }
  479. MayHaveSplitComponents = true;
  480. }
  481. return MayHaveSplitComponents;
  482. }
  483. void LiveIntervals::shrinkToUses(LiveInterval::SubRange &SR, Register Reg) {
  484. LLVM_DEBUG(dbgs() << "Shrink: " << SR << '\n');
  485. assert(Reg.isVirtual() && "Can only shrink virtual registers");
  486. // Find all the values used, including PHI kills.
  487. ShrinkToUsesWorkList WorkList;
  488. // Visit all instructions reading Reg.
  489. SlotIndex LastIdx;
  490. for (MachineOperand &MO : MRI->use_nodbg_operands(Reg)) {
  491. // Skip "undef" uses.
  492. if (!MO.readsReg())
  493. continue;
  494. // Maybe the operand is for a subregister we don't care about.
  495. unsigned SubReg = MO.getSubReg();
  496. if (SubReg != 0) {
  497. LaneBitmask LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
  498. if ((LaneMask & SR.LaneMask).none())
  499. continue;
  500. }
  501. // We only need to visit each instruction once.
  502. MachineInstr *UseMI = MO.getParent();
  503. SlotIndex Idx = getInstructionIndex(*UseMI).getRegSlot();
  504. if (Idx == LastIdx)
  505. continue;
  506. LastIdx = Idx;
  507. LiveQueryResult LRQ = SR.Query(Idx);
  508. VNInfo *VNI = LRQ.valueIn();
  509. // For Subranges it is possible that only undef values are left in that
  510. // part of the subregister, so there is no real liverange at the use
  511. if (!VNI)
  512. continue;
  513. // Special case: An early-clobber tied operand reads and writes the
  514. // register one slot early.
  515. if (VNInfo *DefVNI = LRQ.valueDefined())
  516. Idx = DefVNI->def;
  517. WorkList.push_back(std::make_pair(Idx, VNI));
  518. }
  519. // Create a new live ranges with only minimal live segments per def.
  520. LiveRange NewLR;
  521. createSegmentsForValues(NewLR, SR.vnis());
  522. extendSegmentsToUses(NewLR, WorkList, Reg, SR.LaneMask);
  523. // Move the trimmed ranges back.
  524. SR.segments.swap(NewLR.segments);
  525. // Remove dead PHI value numbers
  526. for (VNInfo *VNI : SR.valnos) {
  527. if (VNI->isUnused())
  528. continue;
  529. const LiveRange::Segment *Segment = SR.getSegmentContaining(VNI->def);
  530. assert(Segment != nullptr && "Missing segment for VNI");
  531. if (Segment->end != VNI->def.getDeadSlot())
  532. continue;
  533. if (VNI->isPHIDef()) {
  534. // This is a dead PHI. Remove it.
  535. LLVM_DEBUG(dbgs() << "Dead PHI at " << VNI->def
  536. << " may separate interval\n");
  537. VNI->markUnused();
  538. SR.removeSegment(*Segment);
  539. }
  540. }
  541. LLVM_DEBUG(dbgs() << "Shrunk: " << SR << '\n');
  542. }
  543. void LiveIntervals::extendToIndices(LiveRange &LR,
  544. ArrayRef<SlotIndex> Indices,
  545. ArrayRef<SlotIndex> Undefs) {
  546. assert(LICalc && "LICalc not initialized.");
  547. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  548. for (SlotIndex Idx : Indices)
  549. LICalc->extend(LR, Idx, /*PhysReg=*/0, Undefs);
  550. }
  551. void LiveIntervals::pruneValue(LiveRange &LR, SlotIndex Kill,
  552. SmallVectorImpl<SlotIndex> *EndPoints) {
  553. LiveQueryResult LRQ = LR.Query(Kill);
  554. VNInfo *VNI = LRQ.valueOutOrDead();
  555. if (!VNI)
  556. return;
  557. MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
  558. SlotIndex MBBEnd = Indexes->getMBBEndIdx(KillMBB);
  559. // If VNI isn't live out from KillMBB, the value is trivially pruned.
  560. if (LRQ.endPoint() < MBBEnd) {
  561. LR.removeSegment(Kill, LRQ.endPoint());
  562. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  563. return;
  564. }
  565. // VNI is live out of KillMBB.
  566. LR.removeSegment(Kill, MBBEnd);
  567. if (EndPoints) EndPoints->push_back(MBBEnd);
  568. // Find all blocks that are reachable from KillMBB without leaving VNI's live
  569. // range. It is possible that KillMBB itself is reachable, so start a DFS
  570. // from each successor.
  571. using VisitedTy = df_iterator_default_set<MachineBasicBlock*,9>;
  572. VisitedTy Visited;
  573. for (MachineBasicBlock *Succ : KillMBB->successors()) {
  574. for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
  575. I = df_ext_begin(Succ, Visited), E = df_ext_end(Succ, Visited);
  576. I != E;) {
  577. MachineBasicBlock *MBB = *I;
  578. // Check if VNI is live in to MBB.
  579. SlotIndex MBBStart, MBBEnd;
  580. std::tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
  581. LiveQueryResult LRQ = LR.Query(MBBStart);
  582. if (LRQ.valueIn() != VNI) {
  583. // This block isn't part of the VNI segment. Prune the search.
  584. I.skipChildren();
  585. continue;
  586. }
  587. // Prune the search if VNI is killed in MBB.
  588. if (LRQ.endPoint() < MBBEnd) {
  589. LR.removeSegment(MBBStart, LRQ.endPoint());
  590. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  591. I.skipChildren();
  592. continue;
  593. }
  594. // VNI is live through MBB.
  595. LR.removeSegment(MBBStart, MBBEnd);
  596. if (EndPoints) EndPoints->push_back(MBBEnd);
  597. ++I;
  598. }
  599. }
  600. }
  601. //===----------------------------------------------------------------------===//
  602. // Register allocator hooks.
  603. //
  604. void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
  605. // Keep track of regunit ranges.
  606. SmallVector<std::pair<const LiveRange*, LiveRange::const_iterator>, 8> RU;
  607. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  608. Register Reg = Register::index2VirtReg(i);
  609. if (MRI->reg_nodbg_empty(Reg))
  610. continue;
  611. const LiveInterval &LI = getInterval(Reg);
  612. if (LI.empty())
  613. continue;
  614. // Target may have not allocated this yet.
  615. Register PhysReg = VRM->getPhys(Reg);
  616. if (!PhysReg)
  617. continue;
  618. // Find the regunit intervals for the assigned register. They may overlap
  619. // the virtual register live range, cancelling any kills.
  620. RU.clear();
  621. for (MCRegUnitIterator Unit(PhysReg, TRI); Unit.isValid();
  622. ++Unit) {
  623. const LiveRange &RURange = getRegUnit(*Unit);
  624. if (RURange.empty())
  625. continue;
  626. RU.push_back(std::make_pair(&RURange, RURange.find(LI.begin()->end)));
  627. }
  628. // Every instruction that kills Reg corresponds to a segment range end
  629. // point.
  630. for (LiveInterval::const_iterator RI = LI.begin(), RE = LI.end(); RI != RE;
  631. ++RI) {
  632. // A block index indicates an MBB edge.
  633. if (RI->end.isBlock())
  634. continue;
  635. MachineInstr *MI = getInstructionFromIndex(RI->end);
  636. if (!MI)
  637. continue;
  638. // Check if any of the regunits are live beyond the end of RI. That could
  639. // happen when a physreg is defined as a copy of a virtreg:
  640. //
  641. // %eax = COPY %5
  642. // FOO %5 <--- MI, cancel kill because %eax is live.
  643. // BAR killed %eax
  644. //
  645. // There should be no kill flag on FOO when %5 is rewritten as %eax.
  646. for (auto &RUP : RU) {
  647. const LiveRange &RURange = *RUP.first;
  648. LiveRange::const_iterator &I = RUP.second;
  649. if (I == RURange.end())
  650. continue;
  651. I = RURange.advanceTo(I, RI->end);
  652. if (I == RURange.end() || I->start >= RI->end)
  653. continue;
  654. // I is overlapping RI.
  655. goto CancelKill;
  656. }
  657. if (MRI->subRegLivenessEnabled()) {
  658. // When reading a partial undefined value we must not add a kill flag.
  659. // The regalloc might have used the undef lane for something else.
  660. // Example:
  661. // %1 = ... ; R32: %1
  662. // %2:high16 = ... ; R64: %2
  663. // = read killed %2 ; R64: %2
  664. // = read %1 ; R32: %1
  665. // The <kill> flag is correct for %2, but the register allocator may
  666. // assign R0L to %1, and R0 to %2 because the low 32bits of R0
  667. // are actually never written by %2. After assignment the <kill>
  668. // flag at the read instruction is invalid.
  669. LaneBitmask DefinedLanesMask;
  670. if (LI.hasSubRanges()) {
  671. // Compute a mask of lanes that are defined.
  672. DefinedLanesMask = LaneBitmask::getNone();
  673. for (const LiveInterval::SubRange &SR : LI.subranges())
  674. for (const LiveRange::Segment &Segment : SR.segments) {
  675. if (Segment.start >= RI->end)
  676. break;
  677. if (Segment.end == RI->end) {
  678. DefinedLanesMask |= SR.LaneMask;
  679. break;
  680. }
  681. }
  682. } else
  683. DefinedLanesMask = LaneBitmask::getAll();
  684. bool IsFullWrite = false;
  685. for (const MachineOperand &MO : MI->operands()) {
  686. if (!MO.isReg() || MO.getReg() != Reg)
  687. continue;
  688. if (MO.isUse()) {
  689. // Reading any undefined lanes?
  690. unsigned SubReg = MO.getSubReg();
  691. LaneBitmask UseMask = SubReg ? TRI->getSubRegIndexLaneMask(SubReg)
  692. : MRI->getMaxLaneMaskForVReg(Reg);
  693. if ((UseMask & ~DefinedLanesMask).any())
  694. goto CancelKill;
  695. } else if (MO.getSubReg() == 0) {
  696. // Writing to the full register?
  697. assert(MO.isDef());
  698. IsFullWrite = true;
  699. }
  700. }
  701. // If an instruction writes to a subregister, a new segment starts in
  702. // the LiveInterval. But as this is only overriding part of the register
  703. // adding kill-flags is not correct here after registers have been
  704. // assigned.
  705. if (!IsFullWrite) {
  706. // Next segment has to be adjacent in the subregister write case.
  707. LiveRange::const_iterator N = std::next(RI);
  708. if (N != LI.end() && N->start == RI->end)
  709. goto CancelKill;
  710. }
  711. }
  712. MI->addRegisterKilled(Reg, nullptr);
  713. continue;
  714. CancelKill:
  715. MI->clearRegisterKills(Reg, nullptr);
  716. }
  717. }
  718. }
  719. MachineBasicBlock*
  720. LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
  721. assert(!LI.empty() && "LiveInterval is empty.");
  722. // A local live range must be fully contained inside the block, meaning it is
  723. // defined and killed at instructions, not at block boundaries. It is not
  724. // live in or out of any block.
  725. //
  726. // It is technically possible to have a PHI-defined live range identical to a
  727. // single block, but we are going to return false in that case.
  728. SlotIndex Start = LI.beginIndex();
  729. if (Start.isBlock())
  730. return nullptr;
  731. SlotIndex Stop = LI.endIndex();
  732. if (Stop.isBlock())
  733. return nullptr;
  734. // getMBBFromIndex doesn't need to search the MBB table when both indexes
  735. // belong to proper instructions.
  736. MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
  737. MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
  738. return MBB1 == MBB2 ? MBB1 : nullptr;
  739. }
  740. bool
  741. LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
  742. for (const VNInfo *PHI : LI.valnos) {
  743. if (PHI->isUnused() || !PHI->isPHIDef())
  744. continue;
  745. const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
  746. // Conservatively return true instead of scanning huge predecessor lists.
  747. if (PHIMBB->pred_size() > 100)
  748. return true;
  749. for (const MachineBasicBlock *Pred : PHIMBB->predecessors())
  750. if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(Pred)))
  751. return true;
  752. }
  753. return false;
  754. }
  755. float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  756. const MachineBlockFrequencyInfo *MBFI,
  757. const MachineInstr &MI) {
  758. return getSpillWeight(isDef, isUse, MBFI, MI.getParent());
  759. }
  760. float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  761. const MachineBlockFrequencyInfo *MBFI,
  762. const MachineBasicBlock *MBB) {
  763. return (isDef + isUse) * MBFI->getBlockFreqRelativeToEntryBlock(MBB);
  764. }
  765. LiveRange::Segment
  766. LiveIntervals::addSegmentToEndOfBlock(Register Reg, MachineInstr &startInst) {
  767. LiveInterval &Interval = createEmptyInterval(Reg);
  768. VNInfo *VN = Interval.getNextValue(
  769. SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  770. getVNInfoAllocator());
  771. LiveRange::Segment S(SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  772. getMBBEndIdx(startInst.getParent()), VN);
  773. Interval.addSegment(S);
  774. return S;
  775. }
  776. //===----------------------------------------------------------------------===//
  777. // Register mask functions
  778. //===----------------------------------------------------------------------===//
  779. /// Check whether use of reg in MI is live-through. Live-through means that
  780. /// the value is alive on exit from Machine instruction. The example of such
  781. /// use is a deopt value in statepoint instruction.
  782. static bool hasLiveThroughUse(const MachineInstr *MI, Register Reg) {
  783. if (MI->getOpcode() != TargetOpcode::STATEPOINT)
  784. return false;
  785. StatepointOpers SO(MI);
  786. if (SO.getFlags() & (uint64_t)StatepointFlags::DeoptLiveIn)
  787. return false;
  788. for (unsigned Idx = SO.getNumDeoptArgsIdx(), E = SO.getNumGCPtrIdx(); Idx < E;
  789. ++Idx) {
  790. const MachineOperand &MO = MI->getOperand(Idx);
  791. if (MO.isReg() && MO.getReg() == Reg)
  792. return true;
  793. }
  794. return false;
  795. }
  796. bool LiveIntervals::checkRegMaskInterference(const LiveInterval &LI,
  797. BitVector &UsableRegs) {
  798. if (LI.empty())
  799. return false;
  800. LiveInterval::const_iterator LiveI = LI.begin(), LiveE = LI.end();
  801. // Use a smaller arrays for local live ranges.
  802. ArrayRef<SlotIndex> Slots;
  803. ArrayRef<const uint32_t*> Bits;
  804. if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
  805. Slots = getRegMaskSlotsInBlock(MBB->getNumber());
  806. Bits = getRegMaskBitsInBlock(MBB->getNumber());
  807. } else {
  808. Slots = getRegMaskSlots();
  809. Bits = getRegMaskBits();
  810. }
  811. // We are going to enumerate all the register mask slots contained in LI.
  812. // Start with a binary search of RegMaskSlots to find a starting point.
  813. ArrayRef<SlotIndex>::iterator SlotI = llvm::lower_bound(Slots, LiveI->start);
  814. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  815. // No slots in range, LI begins after the last call.
  816. if (SlotI == SlotE)
  817. return false;
  818. bool Found = false;
  819. // Utility to union regmasks.
  820. auto unionBitMask = [&](unsigned Idx) {
  821. if (!Found) {
  822. // This is the first overlap. Initialize UsableRegs to all ones.
  823. UsableRegs.clear();
  824. UsableRegs.resize(TRI->getNumRegs(), true);
  825. Found = true;
  826. }
  827. // Remove usable registers clobbered by this mask.
  828. UsableRegs.clearBitsNotInMask(Bits[Idx]);
  829. };
  830. while (true) {
  831. assert(*SlotI >= LiveI->start);
  832. // Loop over all slots overlapping this segment.
  833. while (*SlotI < LiveI->end) {
  834. // *SlotI overlaps LI. Collect mask bits.
  835. unionBitMask(SlotI - Slots.begin());
  836. if (++SlotI == SlotE)
  837. return Found;
  838. }
  839. // If segment ends with live-through use we need to collect its regmask.
  840. if (*SlotI == LiveI->end)
  841. if (MachineInstr *MI = getInstructionFromIndex(*SlotI))
  842. if (hasLiveThroughUse(MI, LI.reg()))
  843. unionBitMask(SlotI++ - Slots.begin());
  844. // *SlotI is beyond the current LI segment.
  845. // Special advance implementation to not miss next LiveI->end.
  846. if (++LiveI == LiveE || SlotI == SlotE || *SlotI > LI.endIndex())
  847. return Found;
  848. while (LiveI->end < *SlotI)
  849. ++LiveI;
  850. // Advance SlotI until it overlaps.
  851. while (*SlotI < LiveI->start)
  852. if (++SlotI == SlotE)
  853. return Found;
  854. }
  855. }
  856. //===----------------------------------------------------------------------===//
  857. // IntervalUpdate class.
  858. //===----------------------------------------------------------------------===//
  859. /// Toolkit used by handleMove to trim or extend live intervals.
  860. class LiveIntervals::HMEditor {
  861. private:
  862. LiveIntervals& LIS;
  863. const MachineRegisterInfo& MRI;
  864. const TargetRegisterInfo& TRI;
  865. SlotIndex OldIdx;
  866. SlotIndex NewIdx;
  867. SmallPtrSet<LiveRange*, 8> Updated;
  868. bool UpdateFlags;
  869. public:
  870. HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
  871. const TargetRegisterInfo& TRI,
  872. SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
  873. : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
  874. UpdateFlags(UpdateFlags) {}
  875. // FIXME: UpdateFlags is a workaround that creates live intervals for all
  876. // physregs, even those that aren't needed for regalloc, in order to update
  877. // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
  878. // flags, and postRA passes will use a live register utility instead.
  879. LiveRange *getRegUnitLI(unsigned Unit) {
  880. if (UpdateFlags && !MRI.isReservedRegUnit(Unit))
  881. return &LIS.getRegUnit(Unit);
  882. return LIS.getCachedRegUnit(Unit);
  883. }
  884. /// Update all live ranges touched by MI, assuming a move from OldIdx to
  885. /// NewIdx.
  886. void updateAllRanges(MachineInstr *MI) {
  887. LLVM_DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": "
  888. << *MI);
  889. bool hasRegMask = false;
  890. for (MachineOperand &MO : MI->operands()) {
  891. if (MO.isRegMask())
  892. hasRegMask = true;
  893. if (!MO.isReg())
  894. continue;
  895. if (MO.isUse()) {
  896. if (!MO.readsReg())
  897. continue;
  898. // Aggressively clear all kill flags.
  899. // They are reinserted by VirtRegRewriter.
  900. MO.setIsKill(false);
  901. }
  902. Register Reg = MO.getReg();
  903. if (!Reg)
  904. continue;
  905. if (Reg.isVirtual()) {
  906. LiveInterval &LI = LIS.getInterval(Reg);
  907. if (LI.hasSubRanges()) {
  908. unsigned SubReg = MO.getSubReg();
  909. LaneBitmask LaneMask = SubReg ? TRI.getSubRegIndexLaneMask(SubReg)
  910. : MRI.getMaxLaneMaskForVReg(Reg);
  911. for (LiveInterval::SubRange &S : LI.subranges()) {
  912. if ((S.LaneMask & LaneMask).none())
  913. continue;
  914. updateRange(S, Reg, S.LaneMask);
  915. }
  916. }
  917. updateRange(LI, Reg, LaneBitmask::getNone());
  918. // If main range has a hole and we are moving a subrange use across
  919. // the hole updateRange() cannot properly handle it since it only
  920. // gets the LiveRange and not the whole LiveInterval. As a result
  921. // we may end up with a main range not covering all subranges.
  922. // This is extremely rare case, so let's check and reconstruct the
  923. // main range.
  924. if (LI.hasSubRanges()) {
  925. unsigned SubReg = MO.getSubReg();
  926. LaneBitmask LaneMask = SubReg ? TRI.getSubRegIndexLaneMask(SubReg)
  927. : MRI.getMaxLaneMaskForVReg(Reg);
  928. for (LiveInterval::SubRange &S : LI.subranges()) {
  929. if ((S.LaneMask & LaneMask).none() || LI.covers(S))
  930. continue;
  931. LI.clear();
  932. LIS.constructMainRangeFromSubranges(LI);
  933. break;
  934. }
  935. }
  936. continue;
  937. }
  938. // For physregs, only update the regunits that actually have a
  939. // precomputed live range.
  940. for (MCRegUnitIterator Units(Reg.asMCReg(), &TRI); Units.isValid();
  941. ++Units)
  942. if (LiveRange *LR = getRegUnitLI(*Units))
  943. updateRange(*LR, *Units, LaneBitmask::getNone());
  944. }
  945. if (hasRegMask)
  946. updateRegMaskSlots();
  947. }
  948. private:
  949. /// Update a single live range, assuming an instruction has been moved from
  950. /// OldIdx to NewIdx.
  951. void updateRange(LiveRange &LR, Register Reg, LaneBitmask LaneMask) {
  952. if (!Updated.insert(&LR).second)
  953. return;
  954. LLVM_DEBUG({
  955. dbgs() << " ";
  956. if (Reg.isVirtual()) {
  957. dbgs() << printReg(Reg);
  958. if (LaneMask.any())
  959. dbgs() << " L" << PrintLaneMask(LaneMask);
  960. } else {
  961. dbgs() << printRegUnit(Reg, &TRI);
  962. }
  963. dbgs() << ":\t" << LR << '\n';
  964. });
  965. if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
  966. handleMoveDown(LR);
  967. else
  968. handleMoveUp(LR, Reg, LaneMask);
  969. LLVM_DEBUG(dbgs() << " -->\t" << LR << '\n');
  970. LR.verify();
  971. }
  972. /// Update LR to reflect an instruction has been moved downwards from OldIdx
  973. /// to NewIdx (OldIdx < NewIdx).
  974. void handleMoveDown(LiveRange &LR) {
  975. LiveRange::iterator E = LR.end();
  976. // Segment going into OldIdx.
  977. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  978. // No value live before or after OldIdx? Nothing to do.
  979. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  980. return;
  981. LiveRange::iterator OldIdxOut;
  982. // Do we have a value live-in to OldIdx?
  983. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  984. // If the live-in value already extends to NewIdx, there is nothing to do.
  985. if (SlotIndex::isEarlierEqualInstr(NewIdx, OldIdxIn->end))
  986. return;
  987. // Aggressively remove all kill flags from the old kill point.
  988. // Kill flags shouldn't be used while live intervals exist, they will be
  989. // reinserted by VirtRegRewriter.
  990. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(OldIdxIn->end))
  991. for (MachineOperand &MOP : mi_bundle_ops(*KillMI))
  992. if (MOP.isReg() && MOP.isUse())
  993. MOP.setIsKill(false);
  994. // Is there a def before NewIdx which is not OldIdx?
  995. LiveRange::iterator Next = std::next(OldIdxIn);
  996. if (Next != E && !SlotIndex::isSameInstr(OldIdx, Next->start) &&
  997. SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  998. // If we are here then OldIdx was just a use but not a def. We only have
  999. // to ensure liveness extends to NewIdx.
  1000. LiveRange::iterator NewIdxIn =
  1001. LR.advanceTo(Next, NewIdx.getBaseIndex());
  1002. // Extend the segment before NewIdx if necessary.
  1003. if (NewIdxIn == E ||
  1004. !SlotIndex::isEarlierInstr(NewIdxIn->start, NewIdx)) {
  1005. LiveRange::iterator Prev = std::prev(NewIdxIn);
  1006. Prev->end = NewIdx.getRegSlot();
  1007. }
  1008. // Extend OldIdxIn.
  1009. OldIdxIn->end = Next->start;
  1010. return;
  1011. }
  1012. // Adjust OldIdxIn->end to reach NewIdx. This may temporarily make LR
  1013. // invalid by overlapping ranges.
  1014. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  1015. OldIdxIn->end = NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber());
  1016. // If this was not a kill, then there was no def and we're done.
  1017. if (!isKill)
  1018. return;
  1019. // Did we have a Def at OldIdx?
  1020. OldIdxOut = Next;
  1021. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  1022. return;
  1023. } else {
  1024. OldIdxOut = OldIdxIn;
  1025. }
  1026. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  1027. // to the segment starting there.
  1028. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  1029. "No def?");
  1030. VNInfo *OldIdxVNI = OldIdxOut->valno;
  1031. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  1032. // If the defined value extends beyond NewIdx, just move the beginning
  1033. // of the segment to NewIdx.
  1034. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  1035. if (SlotIndex::isEarlierInstr(NewIdxDef, OldIdxOut->end)) {
  1036. OldIdxVNI->def = NewIdxDef;
  1037. OldIdxOut->start = OldIdxVNI->def;
  1038. return;
  1039. }
  1040. // If we are here then we have a Definition at OldIdx which ends before
  1041. // NewIdx.
  1042. // Is there an existing Def at NewIdx?
  1043. LiveRange::iterator AfterNewIdx
  1044. = LR.advanceTo(OldIdxOut, NewIdx.getRegSlot());
  1045. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  1046. if (!OldIdxDefIsDead &&
  1047. SlotIndex::isEarlierInstr(OldIdxOut->end, NewIdxDef)) {
  1048. // OldIdx is not a dead def, and NewIdxDef is inside a new interval.
  1049. VNInfo *DefVNI;
  1050. if (OldIdxOut != LR.begin() &&
  1051. !SlotIndex::isEarlierInstr(std::prev(OldIdxOut)->end,
  1052. OldIdxOut->start)) {
  1053. // There is no gap between OldIdxOut and its predecessor anymore,
  1054. // merge them.
  1055. LiveRange::iterator IPrev = std::prev(OldIdxOut);
  1056. DefVNI = OldIdxVNI;
  1057. IPrev->end = OldIdxOut->end;
  1058. } else {
  1059. // The value is live in to OldIdx
  1060. LiveRange::iterator INext = std::next(OldIdxOut);
  1061. assert(INext != E && "Must have following segment");
  1062. // We merge OldIdxOut and its successor. As we're dealing with subreg
  1063. // reordering, there is always a successor to OldIdxOut in the same BB
  1064. // We don't need INext->valno anymore and will reuse for the new segment
  1065. // we create later.
  1066. DefVNI = OldIdxVNI;
  1067. INext->start = OldIdxOut->end;
  1068. INext->valno->def = INext->start;
  1069. }
  1070. // If NewIdx is behind the last segment, extend that and append a new one.
  1071. if (AfterNewIdx == E) {
  1072. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  1073. // one position.
  1074. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn -| end
  1075. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS -| end
  1076. std::copy(std::next(OldIdxOut), E, OldIdxOut);
  1077. // The last segment is undefined now, reuse it for a dead def.
  1078. LiveRange::iterator NewSegment = std::prev(E);
  1079. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1080. DefVNI);
  1081. DefVNI->def = NewIdxDef;
  1082. LiveRange::iterator Prev = std::prev(NewSegment);
  1083. Prev->end = NewIdxDef;
  1084. } else {
  1085. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  1086. // one position.
  1087. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn/AfterNewIdx -| |- Next -|
  1088. // => |- X0/OldIdxOut -| ... |- Xn -| |- Xn/AfterNewIdx -| |- Next -|
  1089. std::copy(std::next(OldIdxOut), std::next(AfterNewIdx), OldIdxOut);
  1090. LiveRange::iterator Prev = std::prev(AfterNewIdx);
  1091. // We have two cases:
  1092. if (SlotIndex::isEarlierInstr(Prev->start, NewIdxDef)) {
  1093. // Case 1: NewIdx is inside a liverange. Split this liverange at
  1094. // NewIdxDef into the segment "Prev" followed by "NewSegment".
  1095. LiveRange::iterator NewSegment = AfterNewIdx;
  1096. *NewSegment = LiveRange::Segment(NewIdxDef, Prev->end, Prev->valno);
  1097. Prev->valno->def = NewIdxDef;
  1098. *Prev = LiveRange::Segment(Prev->start, NewIdxDef, DefVNI);
  1099. DefVNI->def = Prev->start;
  1100. } else {
  1101. // Case 2: NewIdx is in a lifetime hole. Keep AfterNewIdx as is and
  1102. // turn Prev into a segment from NewIdx to AfterNewIdx->start.
  1103. *Prev = LiveRange::Segment(NewIdxDef, AfterNewIdx->start, DefVNI);
  1104. DefVNI->def = NewIdxDef;
  1105. assert(DefVNI != AfterNewIdx->valno);
  1106. }
  1107. }
  1108. return;
  1109. }
  1110. if (AfterNewIdx != E &&
  1111. SlotIndex::isSameInstr(AfterNewIdx->start, NewIdxDef)) {
  1112. // There is an existing def at NewIdx. The def at OldIdx is coalesced into
  1113. // that value.
  1114. assert(AfterNewIdx->valno != OldIdxVNI && "Multiple defs of value?");
  1115. LR.removeValNo(OldIdxVNI);
  1116. } else {
  1117. // There was no existing def at NewIdx. We need to create a dead def
  1118. // at NewIdx. Shift segments over the old OldIdxOut segment, this frees
  1119. // a new segment at the place where we want to construct the dead def.
  1120. // |- OldIdxOut -| |- X0 -| ... |- Xn -| |- AfterNewIdx -|
  1121. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS. -| |- AfterNewIdx -|
  1122. assert(AfterNewIdx != OldIdxOut && "Inconsistent iterators");
  1123. std::copy(std::next(OldIdxOut), AfterNewIdx, OldIdxOut);
  1124. // We can reuse OldIdxVNI now.
  1125. LiveRange::iterator NewSegment = std::prev(AfterNewIdx);
  1126. VNInfo *NewSegmentVNI = OldIdxVNI;
  1127. NewSegmentVNI->def = NewIdxDef;
  1128. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1129. NewSegmentVNI);
  1130. }
  1131. }
  1132. /// Update LR to reflect an instruction has been moved upwards from OldIdx
  1133. /// to NewIdx (NewIdx < OldIdx).
  1134. void handleMoveUp(LiveRange &LR, Register Reg, LaneBitmask LaneMask) {
  1135. LiveRange::iterator E = LR.end();
  1136. // Segment going into OldIdx.
  1137. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  1138. // No value live before or after OldIdx? Nothing to do.
  1139. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  1140. return;
  1141. LiveRange::iterator OldIdxOut;
  1142. // Do we have a value live-in to OldIdx?
  1143. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  1144. // If the live-in value isn't killed here, then we have no Def at
  1145. // OldIdx, moreover the value must be live at NewIdx so there is nothing
  1146. // to do.
  1147. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  1148. if (!isKill)
  1149. return;
  1150. // At this point we have to move OldIdxIn->end back to the nearest
  1151. // previous use or (dead-)def but no further than NewIdx.
  1152. SlotIndex DefBeforeOldIdx
  1153. = std::max(OldIdxIn->start.getDeadSlot(),
  1154. NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber()));
  1155. OldIdxIn->end = findLastUseBefore(DefBeforeOldIdx, Reg, LaneMask);
  1156. // Did we have a Def at OldIdx? If not we are done now.
  1157. OldIdxOut = std::next(OldIdxIn);
  1158. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  1159. return;
  1160. } else {
  1161. OldIdxOut = OldIdxIn;
  1162. OldIdxIn = OldIdxOut != LR.begin() ? std::prev(OldIdxOut) : E;
  1163. }
  1164. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  1165. // to the segment starting there.
  1166. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  1167. "No def?");
  1168. VNInfo *OldIdxVNI = OldIdxOut->valno;
  1169. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  1170. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  1171. // Is there an existing def at NewIdx?
  1172. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  1173. LiveRange::iterator NewIdxOut = LR.find(NewIdx.getRegSlot());
  1174. if (SlotIndex::isSameInstr(NewIdxOut->start, NewIdx)) {
  1175. assert(NewIdxOut->valno != OldIdxVNI &&
  1176. "Same value defined more than once?");
  1177. // If OldIdx was a dead def remove it.
  1178. if (!OldIdxDefIsDead) {
  1179. // Remove segment starting at NewIdx and move begin of OldIdxOut to
  1180. // NewIdx so it can take its place.
  1181. OldIdxVNI->def = NewIdxDef;
  1182. OldIdxOut->start = NewIdxDef;
  1183. LR.removeValNo(NewIdxOut->valno);
  1184. } else {
  1185. // Simply remove the dead def at OldIdx.
  1186. LR.removeValNo(OldIdxVNI);
  1187. }
  1188. } else {
  1189. // Previously nothing was live after NewIdx, so all we have to do now is
  1190. // move the begin of OldIdxOut to NewIdx.
  1191. if (!OldIdxDefIsDead) {
  1192. // Do we have any intermediate Defs between OldIdx and NewIdx?
  1193. if (OldIdxIn != E &&
  1194. SlotIndex::isEarlierInstr(NewIdxDef, OldIdxIn->start)) {
  1195. // OldIdx is not a dead def and NewIdx is before predecessor start.
  1196. LiveRange::iterator NewIdxIn = NewIdxOut;
  1197. assert(NewIdxIn == LR.find(NewIdx.getBaseIndex()));
  1198. const SlotIndex SplitPos = NewIdxDef;
  1199. OldIdxVNI = OldIdxIn->valno;
  1200. SlotIndex NewDefEndPoint = std::next(NewIdxIn)->end;
  1201. LiveRange::iterator Prev = std::prev(OldIdxIn);
  1202. if (OldIdxIn != LR.begin() &&
  1203. SlotIndex::isEarlierInstr(NewIdx, Prev->end)) {
  1204. // If the segment before OldIdx read a value defined earlier than
  1205. // NewIdx, the moved instruction also reads and forwards that
  1206. // value. Extend the lifetime of the new def point.
  1207. // Extend to where the previous range started, unless there is
  1208. // another redef first.
  1209. NewDefEndPoint = std::min(OldIdxIn->start,
  1210. std::next(NewIdxOut)->start);
  1211. }
  1212. // Merge the OldIdxIn and OldIdxOut segments into OldIdxOut.
  1213. OldIdxOut->valno->def = OldIdxIn->start;
  1214. *OldIdxOut = LiveRange::Segment(OldIdxIn->start, OldIdxOut->end,
  1215. OldIdxOut->valno);
  1216. // OldIdxIn and OldIdxVNI are now undef and can be overridden.
  1217. // We Slide [NewIdxIn, OldIdxIn) down one position.
  1218. // |- X0/NewIdxIn -| ... |- Xn-1 -||- Xn/OldIdxIn -||- OldIdxOut -|
  1219. // => |- undef/NexIdxIn -| |- X0 -| ... |- Xn-1 -| |- Xn/OldIdxOut -|
  1220. std::copy_backward(NewIdxIn, OldIdxIn, OldIdxOut);
  1221. // NewIdxIn is now considered undef so we can reuse it for the moved
  1222. // value.
  1223. LiveRange::iterator NewSegment = NewIdxIn;
  1224. LiveRange::iterator Next = std::next(NewSegment);
  1225. if (SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  1226. // There is no gap between NewSegment and its predecessor.
  1227. *NewSegment = LiveRange::Segment(Next->start, SplitPos,
  1228. Next->valno);
  1229. *Next = LiveRange::Segment(SplitPos, NewDefEndPoint, OldIdxVNI);
  1230. Next->valno->def = SplitPos;
  1231. } else {
  1232. // There is a gap between NewSegment and its predecessor
  1233. // Value becomes live in.
  1234. *NewSegment = LiveRange::Segment(SplitPos, Next->start, OldIdxVNI);
  1235. NewSegment->valno->def = SplitPos;
  1236. }
  1237. } else {
  1238. // Leave the end point of a live def.
  1239. OldIdxOut->start = NewIdxDef;
  1240. OldIdxVNI->def = NewIdxDef;
  1241. if (OldIdxIn != E && SlotIndex::isEarlierInstr(NewIdx, OldIdxIn->end))
  1242. OldIdxIn->end = NewIdxDef;
  1243. }
  1244. } else if (OldIdxIn != E
  1245. && SlotIndex::isEarlierInstr(NewIdxOut->start, NewIdx)
  1246. && SlotIndex::isEarlierInstr(NewIdx, NewIdxOut->end)) {
  1247. // OldIdxVNI is a dead def that has been moved into the middle of
  1248. // another value in LR. That can happen when LR is a whole register,
  1249. // but the dead def is a write to a subreg that is dead at NewIdx.
  1250. // The dead def may have been moved across other values
  1251. // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
  1252. // down one position.
  1253. // |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
  1254. // => |- X0/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
  1255. std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
  1256. // Modify the segment at NewIdxOut and the following segment to meet at
  1257. // the point of the dead def, with the following segment getting
  1258. // OldIdxVNI as its value number.
  1259. *NewIdxOut = LiveRange::Segment(
  1260. NewIdxOut->start, NewIdxDef.getRegSlot(), NewIdxOut->valno);
  1261. *(NewIdxOut + 1) = LiveRange::Segment(
  1262. NewIdxDef.getRegSlot(), (NewIdxOut + 1)->end, OldIdxVNI);
  1263. OldIdxVNI->def = NewIdxDef;
  1264. // Modify subsequent segments to be defined by the moved def OldIdxVNI.
  1265. for (auto *Idx = NewIdxOut + 2; Idx <= OldIdxOut; ++Idx)
  1266. Idx->valno = OldIdxVNI;
  1267. // Aggressively remove all dead flags from the former dead definition.
  1268. // Kill/dead flags shouldn't be used while live intervals exist; they
  1269. // will be reinserted by VirtRegRewriter.
  1270. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(NewIdx))
  1271. for (MIBundleOperands MO(*KillMI); MO.isValid(); ++MO)
  1272. if (MO->isReg() && !MO->isUse())
  1273. MO->setIsDead(false);
  1274. } else {
  1275. // OldIdxVNI is a dead def. It may have been moved across other values
  1276. // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
  1277. // down one position.
  1278. // |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
  1279. // => |- undef/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
  1280. std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
  1281. // OldIdxVNI can be reused now to build a new dead def segment.
  1282. LiveRange::iterator NewSegment = NewIdxOut;
  1283. VNInfo *NewSegmentVNI = OldIdxVNI;
  1284. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1285. NewSegmentVNI);
  1286. NewSegmentVNI->def = NewIdxDef;
  1287. }
  1288. }
  1289. }
  1290. void updateRegMaskSlots() {
  1291. SmallVectorImpl<SlotIndex>::iterator RI =
  1292. llvm::lower_bound(LIS.RegMaskSlots, OldIdx);
  1293. assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
  1294. "No RegMask at OldIdx.");
  1295. *RI = NewIdx.getRegSlot();
  1296. assert((RI == LIS.RegMaskSlots.begin() ||
  1297. SlotIndex::isEarlierInstr(*std::prev(RI), *RI)) &&
  1298. "Cannot move regmask instruction above another call");
  1299. assert((std::next(RI) == LIS.RegMaskSlots.end() ||
  1300. SlotIndex::isEarlierInstr(*RI, *std::next(RI))) &&
  1301. "Cannot move regmask instruction below another call");
  1302. }
  1303. // Return the last use of reg between NewIdx and OldIdx.
  1304. SlotIndex findLastUseBefore(SlotIndex Before, Register Reg,
  1305. LaneBitmask LaneMask) {
  1306. if (Reg.isVirtual()) {
  1307. SlotIndex LastUse = Before;
  1308. for (MachineOperand &MO : MRI.use_nodbg_operands(Reg)) {
  1309. if (MO.isUndef())
  1310. continue;
  1311. unsigned SubReg = MO.getSubReg();
  1312. if (SubReg != 0 && LaneMask.any()
  1313. && (TRI.getSubRegIndexLaneMask(SubReg) & LaneMask).none())
  1314. continue;
  1315. const MachineInstr &MI = *MO.getParent();
  1316. SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
  1317. if (InstSlot > LastUse && InstSlot < OldIdx)
  1318. LastUse = InstSlot.getRegSlot();
  1319. }
  1320. return LastUse;
  1321. }
  1322. // This is a regunit interval, so scanning the use list could be very
  1323. // expensive. Scan upwards from OldIdx instead.
  1324. assert(Before < OldIdx && "Expected upwards move");
  1325. SlotIndexes *Indexes = LIS.getSlotIndexes();
  1326. MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Before);
  1327. // OldIdx may not correspond to an instruction any longer, so set MII to
  1328. // point to the next instruction after OldIdx, or MBB->end().
  1329. MachineBasicBlock::iterator MII = MBB->end();
  1330. if (MachineInstr *MI = Indexes->getInstructionFromIndex(
  1331. Indexes->getNextNonNullIndex(OldIdx)))
  1332. if (MI->getParent() == MBB)
  1333. MII = MI;
  1334. MachineBasicBlock::iterator Begin = MBB->begin();
  1335. while (MII != Begin) {
  1336. if ((--MII)->isDebugOrPseudoInstr())
  1337. continue;
  1338. SlotIndex Idx = Indexes->getInstructionIndex(*MII);
  1339. // Stop searching when Before is reached.
  1340. if (!SlotIndex::isEarlierInstr(Before, Idx))
  1341. return Before;
  1342. // Check if MII uses Reg.
  1343. for (MIBundleOperands MO(*MII); MO.isValid(); ++MO)
  1344. if (MO->isReg() && !MO->isUndef() && MO->getReg().isPhysical() &&
  1345. TRI.hasRegUnit(MO->getReg(), Reg))
  1346. return Idx.getRegSlot();
  1347. }
  1348. // Didn't reach Before. It must be the first instruction in the block.
  1349. return Before;
  1350. }
  1351. };
  1352. void LiveIntervals::handleMove(MachineInstr &MI, bool UpdateFlags) {
  1353. // It is fine to move a bundle as a whole, but not an individual instruction
  1354. // inside it.
  1355. assert((!MI.isBundled() || MI.getOpcode() == TargetOpcode::BUNDLE) &&
  1356. "Cannot move instruction in bundle");
  1357. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  1358. Indexes->removeMachineInstrFromMaps(MI);
  1359. SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
  1360. assert(getMBBStartIdx(MI.getParent()) <= OldIndex &&
  1361. OldIndex < getMBBEndIdx(MI.getParent()) &&
  1362. "Cannot handle moves across basic block boundaries.");
  1363. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1364. HME.updateAllRanges(&MI);
  1365. }
  1366. void LiveIntervals::handleMoveIntoNewBundle(MachineInstr &BundleStart,
  1367. bool UpdateFlags) {
  1368. assert((BundleStart.getOpcode() == TargetOpcode::BUNDLE) &&
  1369. "Bundle start is not a bundle");
  1370. SmallVector<SlotIndex, 16> ToProcess;
  1371. const SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(BundleStart);
  1372. auto BundleEnd = getBundleEnd(BundleStart.getIterator());
  1373. auto I = BundleStart.getIterator();
  1374. I++;
  1375. while (I != BundleEnd) {
  1376. if (!Indexes->hasIndex(*I))
  1377. continue;
  1378. SlotIndex OldIndex = Indexes->getInstructionIndex(*I, true);
  1379. ToProcess.push_back(OldIndex);
  1380. Indexes->removeMachineInstrFromMaps(*I, true);
  1381. I++;
  1382. }
  1383. for (SlotIndex OldIndex : ToProcess) {
  1384. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1385. HME.updateAllRanges(&BundleStart);
  1386. }
  1387. // Fix up dead defs
  1388. const SlotIndex Index = getInstructionIndex(BundleStart);
  1389. for (unsigned Idx = 0, E = BundleStart.getNumOperands(); Idx != E; ++Idx) {
  1390. MachineOperand &MO = BundleStart.getOperand(Idx);
  1391. if (!MO.isReg())
  1392. continue;
  1393. Register Reg = MO.getReg();
  1394. if (Reg.isVirtual() && hasInterval(Reg) && !MO.isUndef()) {
  1395. LiveInterval &LI = getInterval(Reg);
  1396. LiveQueryResult LRQ = LI.Query(Index);
  1397. if (LRQ.isDeadDef())
  1398. MO.setIsDead();
  1399. }
  1400. }
  1401. }
  1402. void LiveIntervals::repairOldRegInRange(const MachineBasicBlock::iterator Begin,
  1403. const MachineBasicBlock::iterator End,
  1404. const SlotIndex EndIdx, LiveRange &LR,
  1405. const Register Reg,
  1406. LaneBitmask LaneMask) {
  1407. LiveInterval::iterator LII = LR.find(EndIdx);
  1408. SlotIndex lastUseIdx;
  1409. if (LII != LR.end() && LII->start < EndIdx) {
  1410. lastUseIdx = LII->end;
  1411. } else if (LII == LR.begin()) {
  1412. // We may not have a liverange at all if this is a subregister untouched
  1413. // between \p Begin and \p End.
  1414. } else {
  1415. --LII;
  1416. }
  1417. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1418. --I;
  1419. MachineInstr &MI = *I;
  1420. if (MI.isDebugOrPseudoInstr())
  1421. continue;
  1422. SlotIndex instrIdx = getInstructionIndex(MI);
  1423. bool isStartValid = getInstructionFromIndex(LII->start);
  1424. bool isEndValid = getInstructionFromIndex(LII->end);
  1425. // FIXME: This doesn't currently handle early-clobber or multiple removed
  1426. // defs inside of the region to repair.
  1427. for (const MachineOperand &MO : MI.operands()) {
  1428. if (!MO.isReg() || MO.getReg() != Reg)
  1429. continue;
  1430. unsigned SubReg = MO.getSubReg();
  1431. LaneBitmask Mask = TRI->getSubRegIndexLaneMask(SubReg);
  1432. if ((Mask & LaneMask).none())
  1433. continue;
  1434. if (MO.isDef()) {
  1435. if (!isStartValid) {
  1436. if (LII->end.isDead()) {
  1437. LII = LR.removeSegment(LII, true);
  1438. if (LII != LR.begin())
  1439. --LII;
  1440. } else {
  1441. LII->start = instrIdx.getRegSlot();
  1442. LII->valno->def = instrIdx.getRegSlot();
  1443. if (MO.getSubReg() && !MO.isUndef())
  1444. lastUseIdx = instrIdx.getRegSlot();
  1445. else
  1446. lastUseIdx = SlotIndex();
  1447. continue;
  1448. }
  1449. }
  1450. if (!lastUseIdx.isValid()) {
  1451. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1452. LiveRange::Segment S(instrIdx.getRegSlot(),
  1453. instrIdx.getDeadSlot(), VNI);
  1454. LII = LR.addSegment(S);
  1455. } else if (LII->start != instrIdx.getRegSlot()) {
  1456. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1457. LiveRange::Segment S(instrIdx.getRegSlot(), lastUseIdx, VNI);
  1458. LII = LR.addSegment(S);
  1459. }
  1460. if (MO.getSubReg() && !MO.isUndef())
  1461. lastUseIdx = instrIdx.getRegSlot();
  1462. else
  1463. lastUseIdx = SlotIndex();
  1464. } else if (MO.isUse()) {
  1465. // FIXME: This should probably be handled outside of this branch,
  1466. // either as part of the def case (for defs inside of the region) or
  1467. // after the loop over the region.
  1468. if (!isEndValid && !LII->end.isBlock())
  1469. LII->end = instrIdx.getRegSlot();
  1470. if (!lastUseIdx.isValid())
  1471. lastUseIdx = instrIdx.getRegSlot();
  1472. }
  1473. }
  1474. }
  1475. bool isStartValid = getInstructionFromIndex(LII->start);
  1476. if (!isStartValid && LII->end.isDead())
  1477. LR.removeSegment(*LII, true);
  1478. }
  1479. void
  1480. LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
  1481. MachineBasicBlock::iterator Begin,
  1482. MachineBasicBlock::iterator End,
  1483. ArrayRef<Register> OrigRegs) {
  1484. // Find anchor points, which are at the beginning/end of blocks or at
  1485. // instructions that already have indexes.
  1486. while (Begin != MBB->begin() && !Indexes->hasIndex(*std::prev(Begin)))
  1487. --Begin;
  1488. while (End != MBB->end() && !Indexes->hasIndex(*End))
  1489. ++End;
  1490. SlotIndex EndIdx;
  1491. if (End == MBB->end())
  1492. EndIdx = getMBBEndIdx(MBB).getPrevSlot();
  1493. else
  1494. EndIdx = getInstructionIndex(*End);
  1495. Indexes->repairIndexesInRange(MBB, Begin, End);
  1496. // Make sure a live interval exists for all register operands in the range.
  1497. SmallVector<Register> RegsToRepair(OrigRegs.begin(), OrigRegs.end());
  1498. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1499. --I;
  1500. MachineInstr &MI = *I;
  1501. if (MI.isDebugOrPseudoInstr())
  1502. continue;
  1503. for (const MachineOperand &MO : MI.operands()) {
  1504. if (MO.isReg() && MO.getReg().isVirtual()) {
  1505. Register Reg = MO.getReg();
  1506. // If the new instructions refer to subregs but the old instructions did
  1507. // not, throw away any old live interval so it will be recomputed with
  1508. // subranges.
  1509. if (MO.getSubReg() && hasInterval(Reg) &&
  1510. !getInterval(Reg).hasSubRanges() &&
  1511. MRI->shouldTrackSubRegLiveness(Reg))
  1512. removeInterval(Reg);
  1513. if (!hasInterval(Reg)) {
  1514. createAndComputeVirtRegInterval(Reg);
  1515. // Don't bother to repair a freshly calculated live interval.
  1516. erase_value(RegsToRepair, Reg);
  1517. }
  1518. }
  1519. }
  1520. }
  1521. for (Register Reg : RegsToRepair) {
  1522. if (!Reg.isVirtual())
  1523. continue;
  1524. LiveInterval &LI = getInterval(Reg);
  1525. // FIXME: Should we support undefs that gain defs?
  1526. if (!LI.hasAtLeastOneValue())
  1527. continue;
  1528. for (LiveInterval::SubRange &S : LI.subranges())
  1529. repairOldRegInRange(Begin, End, EndIdx, S, Reg, S.LaneMask);
  1530. LI.removeEmptySubRanges();
  1531. repairOldRegInRange(Begin, End, EndIdx, LI, Reg);
  1532. }
  1533. }
  1534. void LiveIntervals::removePhysRegDefAt(MCRegister Reg, SlotIndex Pos) {
  1535. for (MCRegUnitIterator Unit(Reg, TRI); Unit.isValid(); ++Unit) {
  1536. if (LiveRange *LR = getCachedRegUnit(*Unit))
  1537. if (VNInfo *VNI = LR->getVNInfoAt(Pos))
  1538. LR->removeValNo(VNI);
  1539. }
  1540. }
  1541. void LiveIntervals::removeVRegDefAt(LiveInterval &LI, SlotIndex Pos) {
  1542. // LI may not have the main range computed yet, but its subranges may
  1543. // be present.
  1544. VNInfo *VNI = LI.getVNInfoAt(Pos);
  1545. if (VNI != nullptr) {
  1546. assert(VNI->def.getBaseIndex() == Pos.getBaseIndex());
  1547. LI.removeValNo(VNI);
  1548. }
  1549. // Also remove the value defined in subranges.
  1550. for (LiveInterval::SubRange &S : LI.subranges()) {
  1551. if (VNInfo *SVNI = S.getVNInfoAt(Pos))
  1552. if (SVNI->def.getBaseIndex() == Pos.getBaseIndex())
  1553. S.removeValNo(SVNI);
  1554. }
  1555. LI.removeEmptySubRanges();
  1556. }
  1557. void LiveIntervals::splitSeparateComponents(LiveInterval &LI,
  1558. SmallVectorImpl<LiveInterval*> &SplitLIs) {
  1559. ConnectedVNInfoEqClasses ConEQ(*this);
  1560. unsigned NumComp = ConEQ.Classify(LI);
  1561. if (NumComp <= 1)
  1562. return;
  1563. LLVM_DEBUG(dbgs() << " Split " << NumComp << " components: " << LI << '\n');
  1564. Register Reg = LI.reg();
  1565. for (unsigned I = 1; I < NumComp; ++I) {
  1566. Register NewVReg = MRI->cloneVirtualRegister(Reg);
  1567. LiveInterval &NewLI = createEmptyInterval(NewVReg);
  1568. SplitLIs.push_back(&NewLI);
  1569. }
  1570. ConEQ.Distribute(LI, SplitLIs.data(), *MRI);
  1571. }
  1572. void LiveIntervals::constructMainRangeFromSubranges(LiveInterval &LI) {
  1573. assert(LICalc && "LICalc not initialized.");
  1574. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  1575. LICalc->constructMainRangeFromSubranges(LI);
  1576. }