LiveIntervals.cpp 66 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765
  1. //===- LiveIntervals.cpp - Live Interval Analysis -------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file This file implements the LiveInterval analysis pass which is used
  10. /// by the Linear Scan Register allocator. This pass linearizes the
  11. /// basic blocks of the function in DFS order and computes live intervals for
  12. /// each virtual and physical register.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/LiveIntervals.h"
  16. #include "llvm/ADT/ArrayRef.h"
  17. #include "llvm/ADT/DepthFirstIterator.h"
  18. #include "llvm/ADT/SmallPtrSet.h"
  19. #include "llvm/ADT/SmallVector.h"
  20. #include "llvm/ADT/iterator_range.h"
  21. #include "llvm/Analysis/AliasAnalysis.h"
  22. #include "llvm/CodeGen/LiveInterval.h"
  23. #include "llvm/CodeGen/LiveIntervalCalc.h"
  24. #include "llvm/CodeGen/LiveVariables.h"
  25. #include "llvm/CodeGen/MachineBasicBlock.h"
  26. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  27. #include "llvm/CodeGen/MachineDominators.h"
  28. #include "llvm/CodeGen/MachineFunction.h"
  29. #include "llvm/CodeGen/MachineInstr.h"
  30. #include "llvm/CodeGen/MachineInstrBundle.h"
  31. #include "llvm/CodeGen/MachineOperand.h"
  32. #include "llvm/CodeGen/MachineRegisterInfo.h"
  33. #include "llvm/CodeGen/Passes.h"
  34. #include "llvm/CodeGen/SlotIndexes.h"
  35. #include "llvm/CodeGen/TargetRegisterInfo.h"
  36. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  37. #include "llvm/CodeGen/VirtRegMap.h"
  38. #include "llvm/Config/llvm-config.h"
  39. #include "llvm/IR/InstrTypes.h"
  40. #include "llvm/IR/Statepoint.h"
  41. #include "llvm/MC/LaneBitmask.h"
  42. #include "llvm/MC/MCRegisterInfo.h"
  43. #include "llvm/Pass.h"
  44. #include "llvm/Support/BlockFrequency.h"
  45. #include "llvm/Support/CommandLine.h"
  46. #include "llvm/Support/Compiler.h"
  47. #include "llvm/Support/Debug.h"
  48. #include "llvm/Support/MathExtras.h"
  49. #include "llvm/Support/raw_ostream.h"
  50. #include "llvm/CodeGen/StackMaps.h"
  51. #include <algorithm>
  52. #include <cassert>
  53. #include <cstdint>
  54. #include <iterator>
  55. #include <tuple>
  56. #include <utility>
  57. using namespace llvm;
  58. #define DEBUG_TYPE "regalloc"
  59. char LiveIntervals::ID = 0;
  60. char &llvm::LiveIntervalsID = LiveIntervals::ID;
  61. INITIALIZE_PASS_BEGIN(LiveIntervals, "liveintervals",
  62. "Live Interval Analysis", false, false)
  63. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
  64. INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
  65. INITIALIZE_PASS_DEPENDENCY(SlotIndexes)
  66. INITIALIZE_PASS_END(LiveIntervals, "liveintervals",
  67. "Live Interval Analysis", false, false)
  68. #ifndef NDEBUG
  69. static cl::opt<bool> EnablePrecomputePhysRegs(
  70. "precompute-phys-liveness", cl::Hidden,
  71. cl::desc("Eagerly compute live intervals for all physreg units."));
  72. #else
  73. static bool EnablePrecomputePhysRegs = false;
  74. #endif // NDEBUG
  75. namespace llvm {
  76. cl::opt<bool> UseSegmentSetForPhysRegs(
  77. "use-segment-set-for-physregs", cl::Hidden, cl::init(true),
  78. cl::desc(
  79. "Use segment set for the computation of the live ranges of physregs."));
  80. } // end namespace llvm
  81. void LiveIntervals::getAnalysisUsage(AnalysisUsage &AU) const {
  82. AU.setPreservesCFG();
  83. AU.addRequired<AAResultsWrapperPass>();
  84. AU.addPreserved<AAResultsWrapperPass>();
  85. AU.addPreserved<LiveVariables>();
  86. AU.addPreservedID(MachineLoopInfoID);
  87. AU.addRequiredTransitiveID(MachineDominatorsID);
  88. AU.addPreservedID(MachineDominatorsID);
  89. AU.addPreserved<SlotIndexes>();
  90. AU.addRequiredTransitive<SlotIndexes>();
  91. MachineFunctionPass::getAnalysisUsage(AU);
  92. }
  93. LiveIntervals::LiveIntervals() : MachineFunctionPass(ID) {
  94. initializeLiveIntervalsPass(*PassRegistry::getPassRegistry());
  95. }
  96. LiveIntervals::~LiveIntervals() { delete LICalc; }
  97. void LiveIntervals::releaseMemory() {
  98. // Free the live intervals themselves.
  99. for (unsigned i = 0, e = VirtRegIntervals.size(); i != e; ++i)
  100. delete VirtRegIntervals[Register::index2VirtReg(i)];
  101. VirtRegIntervals.clear();
  102. RegMaskSlots.clear();
  103. RegMaskBits.clear();
  104. RegMaskBlocks.clear();
  105. for (LiveRange *LR : RegUnitRanges)
  106. delete LR;
  107. RegUnitRanges.clear();
  108. // Release VNInfo memory regions, VNInfo objects don't need to be dtor'd.
  109. VNInfoAllocator.Reset();
  110. }
  111. bool LiveIntervals::runOnMachineFunction(MachineFunction &fn) {
  112. MF = &fn;
  113. MRI = &MF->getRegInfo();
  114. TRI = MF->getSubtarget().getRegisterInfo();
  115. TII = MF->getSubtarget().getInstrInfo();
  116. AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
  117. Indexes = &getAnalysis<SlotIndexes>();
  118. DomTree = &getAnalysis<MachineDominatorTree>();
  119. if (!LICalc)
  120. LICalc = new LiveIntervalCalc();
  121. // Allocate space for all virtual registers.
  122. VirtRegIntervals.resize(MRI->getNumVirtRegs());
  123. computeVirtRegs();
  124. computeRegMasks();
  125. computeLiveInRegUnits();
  126. if (EnablePrecomputePhysRegs) {
  127. // For stress testing, precompute live ranges of all physical register
  128. // units, including reserved registers.
  129. for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i)
  130. getRegUnit(i);
  131. }
  132. LLVM_DEBUG(dump());
  133. return true;
  134. }
  135. void LiveIntervals::print(raw_ostream &OS, const Module* ) const {
  136. OS << "********** INTERVALS **********\n";
  137. // Dump the regunits.
  138. for (unsigned Unit = 0, UnitE = RegUnitRanges.size(); Unit != UnitE; ++Unit)
  139. if (LiveRange *LR = RegUnitRanges[Unit])
  140. OS << printRegUnit(Unit, TRI) << ' ' << *LR << '\n';
  141. // Dump the virtregs.
  142. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  143. Register Reg = Register::index2VirtReg(i);
  144. if (hasInterval(Reg))
  145. OS << getInterval(Reg) << '\n';
  146. }
  147. OS << "RegMasks:";
  148. for (SlotIndex Idx : RegMaskSlots)
  149. OS << ' ' << Idx;
  150. OS << '\n';
  151. printInstrs(OS);
  152. }
  153. void LiveIntervals::printInstrs(raw_ostream &OS) const {
  154. OS << "********** MACHINEINSTRS **********\n";
  155. MF->print(OS, Indexes);
  156. }
  157. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  158. LLVM_DUMP_METHOD void LiveIntervals::dumpInstrs() const {
  159. printInstrs(dbgs());
  160. }
  161. #endif
  162. LiveInterval *LiveIntervals::createInterval(Register reg) {
  163. float Weight = Register::isPhysicalRegister(reg) ? huge_valf : 0.0F;
  164. return new LiveInterval(reg, Weight);
  165. }
  166. /// Compute the live interval of a virtual register, based on defs and uses.
  167. bool LiveIntervals::computeVirtRegInterval(LiveInterval &LI) {
  168. assert(LICalc && "LICalc not initialized.");
  169. assert(LI.empty() && "Should only compute empty intervals.");
  170. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  171. LICalc->calculate(LI, MRI->shouldTrackSubRegLiveness(LI.reg()));
  172. return computeDeadValues(LI, nullptr);
  173. }
  174. void LiveIntervals::computeVirtRegs() {
  175. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  176. Register Reg = Register::index2VirtReg(i);
  177. if (MRI->reg_nodbg_empty(Reg))
  178. continue;
  179. LiveInterval &LI = createEmptyInterval(Reg);
  180. bool NeedSplit = computeVirtRegInterval(LI);
  181. if (NeedSplit) {
  182. SmallVector<LiveInterval*, 8> SplitLIs;
  183. splitSeparateComponents(LI, SplitLIs);
  184. }
  185. }
  186. }
  187. void LiveIntervals::computeRegMasks() {
  188. RegMaskBlocks.resize(MF->getNumBlockIDs());
  189. // Find all instructions with regmask operands.
  190. for (const MachineBasicBlock &MBB : *MF) {
  191. std::pair<unsigned, unsigned> &RMB = RegMaskBlocks[MBB.getNumber()];
  192. RMB.first = RegMaskSlots.size();
  193. // Some block starts, such as EH funclets, create masks.
  194. if (const uint32_t *Mask = MBB.getBeginClobberMask(TRI)) {
  195. RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
  196. RegMaskBits.push_back(Mask);
  197. }
  198. // Unwinders may clobber additional registers.
  199. // FIXME: This functionality can possibly be merged into
  200. // MachineBasicBlock::getBeginClobberMask().
  201. if (MBB.isEHPad())
  202. if (auto *Mask = TRI->getCustomEHPadPreservedMask(*MBB.getParent())) {
  203. RegMaskSlots.push_back(Indexes->getMBBStartIdx(&MBB));
  204. RegMaskBits.push_back(Mask);
  205. }
  206. for (const MachineInstr &MI : MBB) {
  207. for (const MachineOperand &MO : MI.operands()) {
  208. if (!MO.isRegMask())
  209. continue;
  210. RegMaskSlots.push_back(Indexes->getInstructionIndex(MI).getRegSlot());
  211. RegMaskBits.push_back(MO.getRegMask());
  212. }
  213. }
  214. // Some block ends, such as funclet returns, create masks. Put the mask on
  215. // the last instruction of the block, because MBB slot index intervals are
  216. // half-open.
  217. if (const uint32_t *Mask = MBB.getEndClobberMask(TRI)) {
  218. assert(!MBB.empty() && "empty return block?");
  219. RegMaskSlots.push_back(
  220. Indexes->getInstructionIndex(MBB.back()).getRegSlot());
  221. RegMaskBits.push_back(Mask);
  222. }
  223. // Compute the number of register mask instructions in this block.
  224. RMB.second = RegMaskSlots.size() - RMB.first;
  225. }
  226. }
  227. //===----------------------------------------------------------------------===//
  228. // Register Unit Liveness
  229. //===----------------------------------------------------------------------===//
  230. //
  231. // Fixed interference typically comes from ABI boundaries: Function arguments
  232. // and return values are passed in fixed registers, and so are exception
  233. // pointers entering landing pads. Certain instructions require values to be
  234. // present in specific registers. That is also represented through fixed
  235. // interference.
  236. //
  237. /// Compute the live range of a register unit, based on the uses and defs of
  238. /// aliasing registers. The range should be empty, or contain only dead
  239. /// phi-defs from ABI blocks.
  240. void LiveIntervals::computeRegUnitRange(LiveRange &LR, unsigned Unit) {
  241. assert(LICalc && "LICalc not initialized.");
  242. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  243. // The physregs aliasing Unit are the roots and their super-registers.
  244. // Create all values as dead defs before extending to uses. Note that roots
  245. // may share super-registers. That's OK because createDeadDefs() is
  246. // idempotent. It is very rare for a register unit to have multiple roots, so
  247. // uniquing super-registers is probably not worthwhile.
  248. bool IsReserved = false;
  249. for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
  250. bool IsRootReserved = true;
  251. for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
  252. Super.isValid(); ++Super) {
  253. MCRegister Reg = *Super;
  254. if (!MRI->reg_empty(Reg))
  255. LICalc->createDeadDefs(LR, Reg);
  256. // A register unit is considered reserved if all its roots and all their
  257. // super registers are reserved.
  258. if (!MRI->isReserved(Reg))
  259. IsRootReserved = false;
  260. }
  261. IsReserved |= IsRootReserved;
  262. }
  263. assert(IsReserved == MRI->isReservedRegUnit(Unit) &&
  264. "reserved computation mismatch");
  265. // Now extend LR to reach all uses.
  266. // Ignore uses of reserved registers. We only track defs of those.
  267. if (!IsReserved) {
  268. for (MCRegUnitRootIterator Root(Unit, TRI); Root.isValid(); ++Root) {
  269. for (MCSuperRegIterator Super(*Root, TRI, /*IncludeSelf=*/true);
  270. Super.isValid(); ++Super) {
  271. MCRegister Reg = *Super;
  272. if (!MRI->reg_empty(Reg))
  273. LICalc->extendToUses(LR, Reg);
  274. }
  275. }
  276. }
  277. // Flush the segment set to the segment vector.
  278. if (UseSegmentSetForPhysRegs)
  279. LR.flushSegmentSet();
  280. }
  281. /// Precompute the live ranges of any register units that are live-in to an ABI
  282. /// block somewhere. Register values can appear without a corresponding def when
  283. /// entering the entry block or a landing pad.
  284. void LiveIntervals::computeLiveInRegUnits() {
  285. RegUnitRanges.resize(TRI->getNumRegUnits());
  286. LLVM_DEBUG(dbgs() << "Computing live-in reg-units in ABI blocks.\n");
  287. // Keep track of the live range sets allocated.
  288. SmallVector<unsigned, 8> NewRanges;
  289. // Check all basic blocks for live-ins.
  290. for (const MachineBasicBlock &MBB : *MF) {
  291. // We only care about ABI blocks: Entry + landing pads.
  292. if ((&MBB != &MF->front() && !MBB.isEHPad()) || MBB.livein_empty())
  293. continue;
  294. // Create phi-defs at Begin for all live-in registers.
  295. SlotIndex Begin = Indexes->getMBBStartIdx(&MBB);
  296. LLVM_DEBUG(dbgs() << Begin << "\t" << printMBBReference(MBB));
  297. for (const auto &LI : MBB.liveins()) {
  298. for (MCRegUnitIterator Units(LI.PhysReg, TRI); Units.isValid(); ++Units) {
  299. unsigned Unit = *Units;
  300. LiveRange *LR = RegUnitRanges[Unit];
  301. if (!LR) {
  302. // Use segment set to speed-up initial computation of the live range.
  303. LR = RegUnitRanges[Unit] = new LiveRange(UseSegmentSetForPhysRegs);
  304. NewRanges.push_back(Unit);
  305. }
  306. VNInfo *VNI = LR->createDeadDef(Begin, getVNInfoAllocator());
  307. (void)VNI;
  308. LLVM_DEBUG(dbgs() << ' ' << printRegUnit(Unit, TRI) << '#' << VNI->id);
  309. }
  310. }
  311. LLVM_DEBUG(dbgs() << '\n');
  312. }
  313. LLVM_DEBUG(dbgs() << "Created " << NewRanges.size() << " new intervals.\n");
  314. // Compute the 'normal' part of the ranges.
  315. for (unsigned Unit : NewRanges)
  316. computeRegUnitRange(*RegUnitRanges[Unit], Unit);
  317. }
  318. static void createSegmentsForValues(LiveRange &LR,
  319. iterator_range<LiveInterval::vni_iterator> VNIs) {
  320. for (VNInfo *VNI : VNIs) {
  321. if (VNI->isUnused())
  322. continue;
  323. SlotIndex Def = VNI->def;
  324. LR.addSegment(LiveRange::Segment(Def, Def.getDeadSlot(), VNI));
  325. }
  326. }
  327. void LiveIntervals::extendSegmentsToUses(LiveRange &Segments,
  328. ShrinkToUsesWorkList &WorkList,
  329. Register Reg, LaneBitmask LaneMask) {
  330. // Keep track of the PHIs that are in use.
  331. SmallPtrSet<VNInfo*, 8> UsedPHIs;
  332. // Blocks that have already been added to WorkList as live-out.
  333. SmallPtrSet<const MachineBasicBlock*, 16> LiveOut;
  334. auto getSubRange = [](const LiveInterval &I, LaneBitmask M)
  335. -> const LiveRange& {
  336. if (M.none())
  337. return I;
  338. for (const LiveInterval::SubRange &SR : I.subranges()) {
  339. if ((SR.LaneMask & M).any()) {
  340. assert(SR.LaneMask == M && "Expecting lane masks to match exactly");
  341. return SR;
  342. }
  343. }
  344. llvm_unreachable("Subrange for mask not found");
  345. };
  346. const LiveInterval &LI = getInterval(Reg);
  347. const LiveRange &OldRange = getSubRange(LI, LaneMask);
  348. // Extend intervals to reach all uses in WorkList.
  349. while (!WorkList.empty()) {
  350. SlotIndex Idx = WorkList.back().first;
  351. VNInfo *VNI = WorkList.back().second;
  352. WorkList.pop_back();
  353. const MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Idx.getPrevSlot());
  354. SlotIndex BlockStart = Indexes->getMBBStartIdx(MBB);
  355. // Extend the live range for VNI to be live at Idx.
  356. if (VNInfo *ExtVNI = Segments.extendInBlock(BlockStart, Idx)) {
  357. assert(ExtVNI == VNI && "Unexpected existing value number");
  358. (void)ExtVNI;
  359. // Is this a PHIDef we haven't seen before?
  360. if (!VNI->isPHIDef() || VNI->def != BlockStart ||
  361. !UsedPHIs.insert(VNI).second)
  362. continue;
  363. // The PHI is live, make sure the predecessors are live-out.
  364. for (const MachineBasicBlock *Pred : MBB->predecessors()) {
  365. if (!LiveOut.insert(Pred).second)
  366. continue;
  367. SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
  368. // A predecessor is not required to have a live-out value for a PHI.
  369. if (VNInfo *PVNI = OldRange.getVNInfoBefore(Stop))
  370. WorkList.push_back(std::make_pair(Stop, PVNI));
  371. }
  372. continue;
  373. }
  374. // VNI is live-in to MBB.
  375. LLVM_DEBUG(dbgs() << " live-in at " << BlockStart << '\n');
  376. Segments.addSegment(LiveRange::Segment(BlockStart, Idx, VNI));
  377. // Make sure VNI is live-out from the predecessors.
  378. for (const MachineBasicBlock *Pred : MBB->predecessors()) {
  379. if (!LiveOut.insert(Pred).second)
  380. continue;
  381. SlotIndex Stop = Indexes->getMBBEndIdx(Pred);
  382. if (VNInfo *OldVNI = OldRange.getVNInfoBefore(Stop)) {
  383. assert(OldVNI == VNI && "Wrong value out of predecessor");
  384. (void)OldVNI;
  385. WorkList.push_back(std::make_pair(Stop, VNI));
  386. } else {
  387. #ifndef NDEBUG
  388. // There was no old VNI. Verify that Stop is jointly dominated
  389. // by <undef>s for this live range.
  390. assert(LaneMask.any() &&
  391. "Missing value out of predecessor for main range");
  392. SmallVector<SlotIndex,8> Undefs;
  393. LI.computeSubRangeUndefs(Undefs, LaneMask, *MRI, *Indexes);
  394. assert(LiveRangeCalc::isJointlyDominated(Pred, Undefs, *Indexes) &&
  395. "Missing value out of predecessor for subrange");
  396. #endif
  397. }
  398. }
  399. }
  400. }
  401. bool LiveIntervals::shrinkToUses(LiveInterval *li,
  402. SmallVectorImpl<MachineInstr*> *dead) {
  403. LLVM_DEBUG(dbgs() << "Shrink: " << *li << '\n');
  404. assert(Register::isVirtualRegister(li->reg()) &&
  405. "Can only shrink virtual registers");
  406. // Shrink subregister live ranges.
  407. bool NeedsCleanup = false;
  408. for (LiveInterval::SubRange &S : li->subranges()) {
  409. shrinkToUses(S, li->reg());
  410. if (S.empty())
  411. NeedsCleanup = true;
  412. }
  413. if (NeedsCleanup)
  414. li->removeEmptySubRanges();
  415. // Find all the values used, including PHI kills.
  416. ShrinkToUsesWorkList WorkList;
  417. // Visit all instructions reading li->reg().
  418. Register Reg = li->reg();
  419. for (MachineInstr &UseMI : MRI->reg_instructions(Reg)) {
  420. if (UseMI.isDebugInstr() || !UseMI.readsVirtualRegister(Reg))
  421. continue;
  422. SlotIndex Idx = getInstructionIndex(UseMI).getRegSlot();
  423. LiveQueryResult LRQ = li->Query(Idx);
  424. VNInfo *VNI = LRQ.valueIn();
  425. if (!VNI) {
  426. // This shouldn't happen: readsVirtualRegister returns true, but there is
  427. // no live value. It is likely caused by a target getting <undef> flags
  428. // wrong.
  429. LLVM_DEBUG(
  430. dbgs() << Idx << '\t' << UseMI
  431. << "Warning: Instr claims to read non-existent value in "
  432. << *li << '\n');
  433. continue;
  434. }
  435. // Special case: An early-clobber tied operand reads and writes the
  436. // register one slot early.
  437. if (VNInfo *DefVNI = LRQ.valueDefined())
  438. Idx = DefVNI->def;
  439. WorkList.push_back(std::make_pair(Idx, VNI));
  440. }
  441. // Create new live ranges with only minimal live segments per def.
  442. LiveRange NewLR;
  443. createSegmentsForValues(NewLR, make_range(li->vni_begin(), li->vni_end()));
  444. extendSegmentsToUses(NewLR, WorkList, Reg, LaneBitmask::getNone());
  445. // Move the trimmed segments back.
  446. li->segments.swap(NewLR.segments);
  447. // Handle dead values.
  448. bool CanSeparate = computeDeadValues(*li, dead);
  449. LLVM_DEBUG(dbgs() << "Shrunk: " << *li << '\n');
  450. return CanSeparate;
  451. }
  452. bool LiveIntervals::computeDeadValues(LiveInterval &LI,
  453. SmallVectorImpl<MachineInstr*> *dead) {
  454. bool MayHaveSplitComponents = false;
  455. bool HaveDeadDef = false;
  456. for (VNInfo *VNI : LI.valnos) {
  457. if (VNI->isUnused())
  458. continue;
  459. SlotIndex Def = VNI->def;
  460. LiveRange::iterator I = LI.FindSegmentContaining(Def);
  461. assert(I != LI.end() && "Missing segment for VNI");
  462. // Is the register live before? Otherwise we may have to add a read-undef
  463. // flag for subregister defs.
  464. Register VReg = LI.reg();
  465. if (MRI->shouldTrackSubRegLiveness(VReg)) {
  466. if ((I == LI.begin() || std::prev(I)->end < Def) && !VNI->isPHIDef()) {
  467. MachineInstr *MI = getInstructionFromIndex(Def);
  468. MI->setRegisterDefReadUndef(VReg);
  469. }
  470. }
  471. if (I->end != Def.getDeadSlot())
  472. continue;
  473. if (VNI->isPHIDef()) {
  474. // This is a dead PHI. Remove it.
  475. VNI->markUnused();
  476. LI.removeSegment(I);
  477. LLVM_DEBUG(dbgs() << "Dead PHI at " << Def << " may separate interval\n");
  478. MayHaveSplitComponents = true;
  479. } else {
  480. // This is a dead def. Make sure the instruction knows.
  481. MachineInstr *MI = getInstructionFromIndex(Def);
  482. assert(MI && "No instruction defining live value");
  483. MI->addRegisterDead(LI.reg(), TRI);
  484. if (HaveDeadDef)
  485. MayHaveSplitComponents = true;
  486. HaveDeadDef = true;
  487. if (dead && MI->allDefsAreDead()) {
  488. LLVM_DEBUG(dbgs() << "All defs dead: " << Def << '\t' << *MI);
  489. dead->push_back(MI);
  490. }
  491. }
  492. }
  493. return MayHaveSplitComponents;
  494. }
  495. void LiveIntervals::shrinkToUses(LiveInterval::SubRange &SR, Register Reg) {
  496. LLVM_DEBUG(dbgs() << "Shrink: " << SR << '\n');
  497. assert(Register::isVirtualRegister(Reg) &&
  498. "Can only shrink virtual registers");
  499. // Find all the values used, including PHI kills.
  500. ShrinkToUsesWorkList WorkList;
  501. // Visit all instructions reading Reg.
  502. SlotIndex LastIdx;
  503. for (MachineOperand &MO : MRI->use_nodbg_operands(Reg)) {
  504. // Skip "undef" uses.
  505. if (!MO.readsReg())
  506. continue;
  507. // Maybe the operand is for a subregister we don't care about.
  508. unsigned SubReg = MO.getSubReg();
  509. if (SubReg != 0) {
  510. LaneBitmask LaneMask = TRI->getSubRegIndexLaneMask(SubReg);
  511. if ((LaneMask & SR.LaneMask).none())
  512. continue;
  513. }
  514. // We only need to visit each instruction once.
  515. MachineInstr *UseMI = MO.getParent();
  516. SlotIndex Idx = getInstructionIndex(*UseMI).getRegSlot();
  517. if (Idx == LastIdx)
  518. continue;
  519. LastIdx = Idx;
  520. LiveQueryResult LRQ = SR.Query(Idx);
  521. VNInfo *VNI = LRQ.valueIn();
  522. // For Subranges it is possible that only undef values are left in that
  523. // part of the subregister, so there is no real liverange at the use
  524. if (!VNI)
  525. continue;
  526. // Special case: An early-clobber tied operand reads and writes the
  527. // register one slot early.
  528. if (VNInfo *DefVNI = LRQ.valueDefined())
  529. Idx = DefVNI->def;
  530. WorkList.push_back(std::make_pair(Idx, VNI));
  531. }
  532. // Create a new live ranges with only minimal live segments per def.
  533. LiveRange NewLR;
  534. createSegmentsForValues(NewLR, make_range(SR.vni_begin(), SR.vni_end()));
  535. extendSegmentsToUses(NewLR, WorkList, Reg, SR.LaneMask);
  536. // Move the trimmed ranges back.
  537. SR.segments.swap(NewLR.segments);
  538. // Remove dead PHI value numbers
  539. for (VNInfo *VNI : SR.valnos) {
  540. if (VNI->isUnused())
  541. continue;
  542. const LiveRange::Segment *Segment = SR.getSegmentContaining(VNI->def);
  543. assert(Segment != nullptr && "Missing segment for VNI");
  544. if (Segment->end != VNI->def.getDeadSlot())
  545. continue;
  546. if (VNI->isPHIDef()) {
  547. // This is a dead PHI. Remove it.
  548. LLVM_DEBUG(dbgs() << "Dead PHI at " << VNI->def
  549. << " may separate interval\n");
  550. VNI->markUnused();
  551. SR.removeSegment(*Segment);
  552. }
  553. }
  554. LLVM_DEBUG(dbgs() << "Shrunk: " << SR << '\n');
  555. }
  556. void LiveIntervals::extendToIndices(LiveRange &LR,
  557. ArrayRef<SlotIndex> Indices,
  558. ArrayRef<SlotIndex> Undefs) {
  559. assert(LICalc && "LICalc not initialized.");
  560. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  561. for (SlotIndex Idx : Indices)
  562. LICalc->extend(LR, Idx, /*PhysReg=*/0, Undefs);
  563. }
  564. void LiveIntervals::pruneValue(LiveRange &LR, SlotIndex Kill,
  565. SmallVectorImpl<SlotIndex> *EndPoints) {
  566. LiveQueryResult LRQ = LR.Query(Kill);
  567. VNInfo *VNI = LRQ.valueOutOrDead();
  568. if (!VNI)
  569. return;
  570. MachineBasicBlock *KillMBB = Indexes->getMBBFromIndex(Kill);
  571. SlotIndex MBBEnd = Indexes->getMBBEndIdx(KillMBB);
  572. // If VNI isn't live out from KillMBB, the value is trivially pruned.
  573. if (LRQ.endPoint() < MBBEnd) {
  574. LR.removeSegment(Kill, LRQ.endPoint());
  575. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  576. return;
  577. }
  578. // VNI is live out of KillMBB.
  579. LR.removeSegment(Kill, MBBEnd);
  580. if (EndPoints) EndPoints->push_back(MBBEnd);
  581. // Find all blocks that are reachable from KillMBB without leaving VNI's live
  582. // range. It is possible that KillMBB itself is reachable, so start a DFS
  583. // from each successor.
  584. using VisitedTy = df_iterator_default_set<MachineBasicBlock*,9>;
  585. VisitedTy Visited;
  586. for (MachineBasicBlock *Succ : KillMBB->successors()) {
  587. for (df_ext_iterator<MachineBasicBlock*, VisitedTy>
  588. I = df_ext_begin(Succ, Visited), E = df_ext_end(Succ, Visited);
  589. I != E;) {
  590. MachineBasicBlock *MBB = *I;
  591. // Check if VNI is live in to MBB.
  592. SlotIndex MBBStart, MBBEnd;
  593. std::tie(MBBStart, MBBEnd) = Indexes->getMBBRange(MBB);
  594. LiveQueryResult LRQ = LR.Query(MBBStart);
  595. if (LRQ.valueIn() != VNI) {
  596. // This block isn't part of the VNI segment. Prune the search.
  597. I.skipChildren();
  598. continue;
  599. }
  600. // Prune the search if VNI is killed in MBB.
  601. if (LRQ.endPoint() < MBBEnd) {
  602. LR.removeSegment(MBBStart, LRQ.endPoint());
  603. if (EndPoints) EndPoints->push_back(LRQ.endPoint());
  604. I.skipChildren();
  605. continue;
  606. }
  607. // VNI is live through MBB.
  608. LR.removeSegment(MBBStart, MBBEnd);
  609. if (EndPoints) EndPoints->push_back(MBBEnd);
  610. ++I;
  611. }
  612. }
  613. }
  614. //===----------------------------------------------------------------------===//
  615. // Register allocator hooks.
  616. //
  617. void LiveIntervals::addKillFlags(const VirtRegMap *VRM) {
  618. // Keep track of regunit ranges.
  619. SmallVector<std::pair<const LiveRange*, LiveRange::const_iterator>, 8> RU;
  620. for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) {
  621. Register Reg = Register::index2VirtReg(i);
  622. if (MRI->reg_nodbg_empty(Reg))
  623. continue;
  624. const LiveInterval &LI = getInterval(Reg);
  625. if (LI.empty())
  626. continue;
  627. // Target may have not allocated this yet.
  628. Register PhysReg = VRM->getPhys(Reg);
  629. if (!PhysReg)
  630. continue;
  631. // Find the regunit intervals for the assigned register. They may overlap
  632. // the virtual register live range, cancelling any kills.
  633. RU.clear();
  634. for (MCRegUnitIterator Unit(PhysReg, TRI); Unit.isValid();
  635. ++Unit) {
  636. const LiveRange &RURange = getRegUnit(*Unit);
  637. if (RURange.empty())
  638. continue;
  639. RU.push_back(std::make_pair(&RURange, RURange.find(LI.begin()->end)));
  640. }
  641. // Every instruction that kills Reg corresponds to a segment range end
  642. // point.
  643. for (LiveInterval::const_iterator RI = LI.begin(), RE = LI.end(); RI != RE;
  644. ++RI) {
  645. // A block index indicates an MBB edge.
  646. if (RI->end.isBlock())
  647. continue;
  648. MachineInstr *MI = getInstructionFromIndex(RI->end);
  649. if (!MI)
  650. continue;
  651. // Check if any of the regunits are live beyond the end of RI. That could
  652. // happen when a physreg is defined as a copy of a virtreg:
  653. //
  654. // %eax = COPY %5
  655. // FOO %5 <--- MI, cancel kill because %eax is live.
  656. // BAR killed %eax
  657. //
  658. // There should be no kill flag on FOO when %5 is rewritten as %eax.
  659. for (auto &RUP : RU) {
  660. const LiveRange &RURange = *RUP.first;
  661. LiveRange::const_iterator &I = RUP.second;
  662. if (I == RURange.end())
  663. continue;
  664. I = RURange.advanceTo(I, RI->end);
  665. if (I == RURange.end() || I->start >= RI->end)
  666. continue;
  667. // I is overlapping RI.
  668. goto CancelKill;
  669. }
  670. if (MRI->subRegLivenessEnabled()) {
  671. // When reading a partial undefined value we must not add a kill flag.
  672. // The regalloc might have used the undef lane for something else.
  673. // Example:
  674. // %1 = ... ; R32: %1
  675. // %2:high16 = ... ; R64: %2
  676. // = read killed %2 ; R64: %2
  677. // = read %1 ; R32: %1
  678. // The <kill> flag is correct for %2, but the register allocator may
  679. // assign R0L to %1, and R0 to %2 because the low 32bits of R0
  680. // are actually never written by %2. After assignment the <kill>
  681. // flag at the read instruction is invalid.
  682. LaneBitmask DefinedLanesMask;
  683. if (LI.hasSubRanges()) {
  684. // Compute a mask of lanes that are defined.
  685. DefinedLanesMask = LaneBitmask::getNone();
  686. for (const LiveInterval::SubRange &SR : LI.subranges())
  687. for (const LiveRange::Segment &Segment : SR.segments) {
  688. if (Segment.start >= RI->end)
  689. break;
  690. if (Segment.end == RI->end) {
  691. DefinedLanesMask |= SR.LaneMask;
  692. break;
  693. }
  694. }
  695. } else
  696. DefinedLanesMask = LaneBitmask::getAll();
  697. bool IsFullWrite = false;
  698. for (const MachineOperand &MO : MI->operands()) {
  699. if (!MO.isReg() || MO.getReg() != Reg)
  700. continue;
  701. if (MO.isUse()) {
  702. // Reading any undefined lanes?
  703. unsigned SubReg = MO.getSubReg();
  704. LaneBitmask UseMask = SubReg ? TRI->getSubRegIndexLaneMask(SubReg)
  705. : MRI->getMaxLaneMaskForVReg(Reg);
  706. if ((UseMask & ~DefinedLanesMask).any())
  707. goto CancelKill;
  708. } else if (MO.getSubReg() == 0) {
  709. // Writing to the full register?
  710. assert(MO.isDef());
  711. IsFullWrite = true;
  712. }
  713. }
  714. // If an instruction writes to a subregister, a new segment starts in
  715. // the LiveInterval. But as this is only overriding part of the register
  716. // adding kill-flags is not correct here after registers have been
  717. // assigned.
  718. if (!IsFullWrite) {
  719. // Next segment has to be adjacent in the subregister write case.
  720. LiveRange::const_iterator N = std::next(RI);
  721. if (N != LI.end() && N->start == RI->end)
  722. goto CancelKill;
  723. }
  724. }
  725. MI->addRegisterKilled(Reg, nullptr);
  726. continue;
  727. CancelKill:
  728. MI->clearRegisterKills(Reg, nullptr);
  729. }
  730. }
  731. }
  732. MachineBasicBlock*
  733. LiveIntervals::intervalIsInOneMBB(const LiveInterval &LI) const {
  734. assert(!LI.empty() && "LiveInterval is empty.");
  735. // A local live range must be fully contained inside the block, meaning it is
  736. // defined and killed at instructions, not at block boundaries. It is not
  737. // live in or out of any block.
  738. //
  739. // It is technically possible to have a PHI-defined live range identical to a
  740. // single block, but we are going to return false in that case.
  741. SlotIndex Start = LI.beginIndex();
  742. if (Start.isBlock())
  743. return nullptr;
  744. SlotIndex Stop = LI.endIndex();
  745. if (Stop.isBlock())
  746. return nullptr;
  747. // getMBBFromIndex doesn't need to search the MBB table when both indexes
  748. // belong to proper instructions.
  749. MachineBasicBlock *MBB1 = Indexes->getMBBFromIndex(Start);
  750. MachineBasicBlock *MBB2 = Indexes->getMBBFromIndex(Stop);
  751. return MBB1 == MBB2 ? MBB1 : nullptr;
  752. }
  753. bool
  754. LiveIntervals::hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const {
  755. for (const VNInfo *PHI : LI.valnos) {
  756. if (PHI->isUnused() || !PHI->isPHIDef())
  757. continue;
  758. const MachineBasicBlock *PHIMBB = getMBBFromIndex(PHI->def);
  759. // Conservatively return true instead of scanning huge predecessor lists.
  760. if (PHIMBB->pred_size() > 100)
  761. return true;
  762. for (const MachineBasicBlock *Pred : PHIMBB->predecessors())
  763. if (VNI == LI.getVNInfoBefore(Indexes->getMBBEndIdx(Pred)))
  764. return true;
  765. }
  766. return false;
  767. }
  768. float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  769. const MachineBlockFrequencyInfo *MBFI,
  770. const MachineInstr &MI) {
  771. return getSpillWeight(isDef, isUse, MBFI, MI.getParent());
  772. }
  773. float LiveIntervals::getSpillWeight(bool isDef, bool isUse,
  774. const MachineBlockFrequencyInfo *MBFI,
  775. const MachineBasicBlock *MBB) {
  776. return (isDef + isUse) * MBFI->getBlockFreqRelativeToEntryBlock(MBB);
  777. }
  778. LiveRange::Segment
  779. LiveIntervals::addSegmentToEndOfBlock(Register Reg, MachineInstr &startInst) {
  780. LiveInterval &Interval = createEmptyInterval(Reg);
  781. VNInfo *VN = Interval.getNextValue(
  782. SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  783. getVNInfoAllocator());
  784. LiveRange::Segment S(SlotIndex(getInstructionIndex(startInst).getRegSlot()),
  785. getMBBEndIdx(startInst.getParent()), VN);
  786. Interval.addSegment(S);
  787. return S;
  788. }
  789. //===----------------------------------------------------------------------===//
  790. // Register mask functions
  791. //===----------------------------------------------------------------------===//
  792. /// Check whether use of reg in MI is live-through. Live-through means that
  793. /// the value is alive on exit from Machine instruction. The example of such
  794. /// use is a deopt value in statepoint instruction.
  795. static bool hasLiveThroughUse(const MachineInstr *MI, Register Reg) {
  796. if (MI->getOpcode() != TargetOpcode::STATEPOINT)
  797. return false;
  798. StatepointOpers SO(MI);
  799. if (SO.getFlags() & (uint64_t)StatepointFlags::DeoptLiveIn)
  800. return false;
  801. for (unsigned Idx = SO.getNumDeoptArgsIdx(), E = SO.getNumGCPtrIdx(); Idx < E;
  802. ++Idx) {
  803. const MachineOperand &MO = MI->getOperand(Idx);
  804. if (MO.isReg() && MO.getReg() == Reg)
  805. return true;
  806. }
  807. return false;
  808. }
  809. bool LiveIntervals::checkRegMaskInterference(LiveInterval &LI,
  810. BitVector &UsableRegs) {
  811. if (LI.empty())
  812. return false;
  813. LiveInterval::iterator LiveI = LI.begin(), LiveE = LI.end();
  814. // Use a smaller arrays for local live ranges.
  815. ArrayRef<SlotIndex> Slots;
  816. ArrayRef<const uint32_t*> Bits;
  817. if (MachineBasicBlock *MBB = intervalIsInOneMBB(LI)) {
  818. Slots = getRegMaskSlotsInBlock(MBB->getNumber());
  819. Bits = getRegMaskBitsInBlock(MBB->getNumber());
  820. } else {
  821. Slots = getRegMaskSlots();
  822. Bits = getRegMaskBits();
  823. }
  824. // We are going to enumerate all the register mask slots contained in LI.
  825. // Start with a binary search of RegMaskSlots to find a starting point.
  826. ArrayRef<SlotIndex>::iterator SlotI = llvm::lower_bound(Slots, LiveI->start);
  827. ArrayRef<SlotIndex>::iterator SlotE = Slots.end();
  828. // No slots in range, LI begins after the last call.
  829. if (SlotI == SlotE)
  830. return false;
  831. bool Found = false;
  832. // Utility to union regmasks.
  833. auto unionBitMask = [&](unsigned Idx) {
  834. if (!Found) {
  835. // This is the first overlap. Initialize UsableRegs to all ones.
  836. UsableRegs.clear();
  837. UsableRegs.resize(TRI->getNumRegs(), true);
  838. Found = true;
  839. }
  840. // Remove usable registers clobbered by this mask.
  841. UsableRegs.clearBitsNotInMask(Bits[Idx]);
  842. };
  843. while (true) {
  844. assert(*SlotI >= LiveI->start);
  845. // Loop over all slots overlapping this segment.
  846. while (*SlotI < LiveI->end) {
  847. // *SlotI overlaps LI. Collect mask bits.
  848. unionBitMask(SlotI - Slots.begin());
  849. if (++SlotI == SlotE)
  850. return Found;
  851. }
  852. // If segment ends with live-through use we need to collect its regmask.
  853. if (*SlotI == LiveI->end)
  854. if (MachineInstr *MI = getInstructionFromIndex(*SlotI))
  855. if (hasLiveThroughUse(MI, LI.reg()))
  856. unionBitMask(SlotI++ - Slots.begin());
  857. // *SlotI is beyond the current LI segment.
  858. // Special advance implementation to not miss next LiveI->end.
  859. if (++LiveI == LiveE || SlotI == SlotE || *SlotI > LI.endIndex())
  860. return Found;
  861. while (LiveI->end < *SlotI)
  862. ++LiveI;
  863. // Advance SlotI until it overlaps.
  864. while (*SlotI < LiveI->start)
  865. if (++SlotI == SlotE)
  866. return Found;
  867. }
  868. }
  869. //===----------------------------------------------------------------------===//
  870. // IntervalUpdate class.
  871. //===----------------------------------------------------------------------===//
  872. /// Toolkit used by handleMove to trim or extend live intervals.
  873. class LiveIntervals::HMEditor {
  874. private:
  875. LiveIntervals& LIS;
  876. const MachineRegisterInfo& MRI;
  877. const TargetRegisterInfo& TRI;
  878. SlotIndex OldIdx;
  879. SlotIndex NewIdx;
  880. SmallPtrSet<LiveRange*, 8> Updated;
  881. bool UpdateFlags;
  882. public:
  883. HMEditor(LiveIntervals& LIS, const MachineRegisterInfo& MRI,
  884. const TargetRegisterInfo& TRI,
  885. SlotIndex OldIdx, SlotIndex NewIdx, bool UpdateFlags)
  886. : LIS(LIS), MRI(MRI), TRI(TRI), OldIdx(OldIdx), NewIdx(NewIdx),
  887. UpdateFlags(UpdateFlags) {}
  888. // FIXME: UpdateFlags is a workaround that creates live intervals for all
  889. // physregs, even those that aren't needed for regalloc, in order to update
  890. // kill flags. This is wasteful. Eventually, LiveVariables will strip all kill
  891. // flags, and postRA passes will use a live register utility instead.
  892. LiveRange *getRegUnitLI(unsigned Unit) {
  893. if (UpdateFlags && !MRI.isReservedRegUnit(Unit))
  894. return &LIS.getRegUnit(Unit);
  895. return LIS.getCachedRegUnit(Unit);
  896. }
  897. /// Update all live ranges touched by MI, assuming a move from OldIdx to
  898. /// NewIdx.
  899. void updateAllRanges(MachineInstr *MI) {
  900. LLVM_DEBUG(dbgs() << "handleMove " << OldIdx << " -> " << NewIdx << ": "
  901. << *MI);
  902. bool hasRegMask = false;
  903. for (MachineOperand &MO : MI->operands()) {
  904. if (MO.isRegMask())
  905. hasRegMask = true;
  906. if (!MO.isReg())
  907. continue;
  908. if (MO.isUse()) {
  909. if (!MO.readsReg())
  910. continue;
  911. // Aggressively clear all kill flags.
  912. // They are reinserted by VirtRegRewriter.
  913. MO.setIsKill(false);
  914. }
  915. Register Reg = MO.getReg();
  916. if (!Reg)
  917. continue;
  918. if (Register::isVirtualRegister(Reg)) {
  919. LiveInterval &LI = LIS.getInterval(Reg);
  920. if (LI.hasSubRanges()) {
  921. unsigned SubReg = MO.getSubReg();
  922. LaneBitmask LaneMask = SubReg ? TRI.getSubRegIndexLaneMask(SubReg)
  923. : MRI.getMaxLaneMaskForVReg(Reg);
  924. for (LiveInterval::SubRange &S : LI.subranges()) {
  925. if ((S.LaneMask & LaneMask).none())
  926. continue;
  927. updateRange(S, Reg, S.LaneMask);
  928. }
  929. }
  930. updateRange(LI, Reg, LaneBitmask::getNone());
  931. // If main range has a hole and we are moving a subrange use across
  932. // the hole updateRange() cannot properly handle it since it only
  933. // gets the LiveRange and not the whole LiveInterval. As a result
  934. // we may end up with a main range not covering all subranges.
  935. // This is extremely rare case, so let's check and reconstruct the
  936. // main range.
  937. for (LiveInterval::SubRange &S : LI.subranges()) {
  938. if (LI.covers(S))
  939. continue;
  940. LI.clear();
  941. LIS.constructMainRangeFromSubranges(LI);
  942. break;
  943. }
  944. continue;
  945. }
  946. // For physregs, only update the regunits that actually have a
  947. // precomputed live range.
  948. for (MCRegUnitIterator Units(Reg.asMCReg(), &TRI); Units.isValid();
  949. ++Units)
  950. if (LiveRange *LR = getRegUnitLI(*Units))
  951. updateRange(*LR, *Units, LaneBitmask::getNone());
  952. }
  953. if (hasRegMask)
  954. updateRegMaskSlots();
  955. }
  956. private:
  957. /// Update a single live range, assuming an instruction has been moved from
  958. /// OldIdx to NewIdx.
  959. void updateRange(LiveRange &LR, Register Reg, LaneBitmask LaneMask) {
  960. if (!Updated.insert(&LR).second)
  961. return;
  962. LLVM_DEBUG({
  963. dbgs() << " ";
  964. if (Register::isVirtualRegister(Reg)) {
  965. dbgs() << printReg(Reg);
  966. if (LaneMask.any())
  967. dbgs() << " L" << PrintLaneMask(LaneMask);
  968. } else {
  969. dbgs() << printRegUnit(Reg, &TRI);
  970. }
  971. dbgs() << ":\t" << LR << '\n';
  972. });
  973. if (SlotIndex::isEarlierInstr(OldIdx, NewIdx))
  974. handleMoveDown(LR);
  975. else
  976. handleMoveUp(LR, Reg, LaneMask);
  977. LLVM_DEBUG(dbgs() << " -->\t" << LR << '\n');
  978. LR.verify();
  979. }
  980. /// Update LR to reflect an instruction has been moved downwards from OldIdx
  981. /// to NewIdx (OldIdx < NewIdx).
  982. void handleMoveDown(LiveRange &LR) {
  983. LiveRange::iterator E = LR.end();
  984. // Segment going into OldIdx.
  985. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  986. // No value live before or after OldIdx? Nothing to do.
  987. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  988. return;
  989. LiveRange::iterator OldIdxOut;
  990. // Do we have a value live-in to OldIdx?
  991. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  992. // If the live-in value already extends to NewIdx, there is nothing to do.
  993. if (SlotIndex::isEarlierEqualInstr(NewIdx, OldIdxIn->end))
  994. return;
  995. // Aggressively remove all kill flags from the old kill point.
  996. // Kill flags shouldn't be used while live intervals exist, they will be
  997. // reinserted by VirtRegRewriter.
  998. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(OldIdxIn->end))
  999. for (MachineOperand &MOP : mi_bundle_ops(*KillMI))
  1000. if (MOP.isReg() && MOP.isUse())
  1001. MOP.setIsKill(false);
  1002. // Is there a def before NewIdx which is not OldIdx?
  1003. LiveRange::iterator Next = std::next(OldIdxIn);
  1004. if (Next != E && !SlotIndex::isSameInstr(OldIdx, Next->start) &&
  1005. SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  1006. // If we are here then OldIdx was just a use but not a def. We only have
  1007. // to ensure liveness extends to NewIdx.
  1008. LiveRange::iterator NewIdxIn =
  1009. LR.advanceTo(Next, NewIdx.getBaseIndex());
  1010. // Extend the segment before NewIdx if necessary.
  1011. if (NewIdxIn == E ||
  1012. !SlotIndex::isEarlierInstr(NewIdxIn->start, NewIdx)) {
  1013. LiveRange::iterator Prev = std::prev(NewIdxIn);
  1014. Prev->end = NewIdx.getRegSlot();
  1015. }
  1016. // Extend OldIdxIn.
  1017. OldIdxIn->end = Next->start;
  1018. return;
  1019. }
  1020. // Adjust OldIdxIn->end to reach NewIdx. This may temporarily make LR
  1021. // invalid by overlapping ranges.
  1022. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  1023. OldIdxIn->end = NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber());
  1024. // If this was not a kill, then there was no def and we're done.
  1025. if (!isKill)
  1026. return;
  1027. // Did we have a Def at OldIdx?
  1028. OldIdxOut = Next;
  1029. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  1030. return;
  1031. } else {
  1032. OldIdxOut = OldIdxIn;
  1033. }
  1034. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  1035. // to the segment starting there.
  1036. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  1037. "No def?");
  1038. VNInfo *OldIdxVNI = OldIdxOut->valno;
  1039. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  1040. // If the defined value extends beyond NewIdx, just move the beginning
  1041. // of the segment to NewIdx.
  1042. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  1043. if (SlotIndex::isEarlierInstr(NewIdxDef, OldIdxOut->end)) {
  1044. OldIdxVNI->def = NewIdxDef;
  1045. OldIdxOut->start = OldIdxVNI->def;
  1046. return;
  1047. }
  1048. // If we are here then we have a Definition at OldIdx which ends before
  1049. // NewIdx.
  1050. // Is there an existing Def at NewIdx?
  1051. LiveRange::iterator AfterNewIdx
  1052. = LR.advanceTo(OldIdxOut, NewIdx.getRegSlot());
  1053. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  1054. if (!OldIdxDefIsDead &&
  1055. SlotIndex::isEarlierInstr(OldIdxOut->end, NewIdxDef)) {
  1056. // OldIdx is not a dead def, and NewIdxDef is inside a new interval.
  1057. VNInfo *DefVNI;
  1058. if (OldIdxOut != LR.begin() &&
  1059. !SlotIndex::isEarlierInstr(std::prev(OldIdxOut)->end,
  1060. OldIdxOut->start)) {
  1061. // There is no gap between OldIdxOut and its predecessor anymore,
  1062. // merge them.
  1063. LiveRange::iterator IPrev = std::prev(OldIdxOut);
  1064. DefVNI = OldIdxVNI;
  1065. IPrev->end = OldIdxOut->end;
  1066. } else {
  1067. // The value is live in to OldIdx
  1068. LiveRange::iterator INext = std::next(OldIdxOut);
  1069. assert(INext != E && "Must have following segment");
  1070. // We merge OldIdxOut and its successor. As we're dealing with subreg
  1071. // reordering, there is always a successor to OldIdxOut in the same BB
  1072. // We don't need INext->valno anymore and will reuse for the new segment
  1073. // we create later.
  1074. DefVNI = OldIdxVNI;
  1075. INext->start = OldIdxOut->end;
  1076. INext->valno->def = INext->start;
  1077. }
  1078. // If NewIdx is behind the last segment, extend that and append a new one.
  1079. if (AfterNewIdx == E) {
  1080. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  1081. // one position.
  1082. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn -| end
  1083. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS -| end
  1084. std::copy(std::next(OldIdxOut), E, OldIdxOut);
  1085. // The last segment is undefined now, reuse it for a dead def.
  1086. LiveRange::iterator NewSegment = std::prev(E);
  1087. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1088. DefVNI);
  1089. DefVNI->def = NewIdxDef;
  1090. LiveRange::iterator Prev = std::prev(NewSegment);
  1091. Prev->end = NewIdxDef;
  1092. } else {
  1093. // OldIdxOut is undef at this point, Slide (OldIdxOut;AfterNewIdx] up
  1094. // one position.
  1095. // |- ?/OldIdxOut -| |- X0 -| ... |- Xn/AfterNewIdx -| |- Next -|
  1096. // => |- X0/OldIdxOut -| ... |- Xn -| |- Xn/AfterNewIdx -| |- Next -|
  1097. std::copy(std::next(OldIdxOut), std::next(AfterNewIdx), OldIdxOut);
  1098. LiveRange::iterator Prev = std::prev(AfterNewIdx);
  1099. // We have two cases:
  1100. if (SlotIndex::isEarlierInstr(Prev->start, NewIdxDef)) {
  1101. // Case 1: NewIdx is inside a liverange. Split this liverange at
  1102. // NewIdxDef into the segment "Prev" followed by "NewSegment".
  1103. LiveRange::iterator NewSegment = AfterNewIdx;
  1104. *NewSegment = LiveRange::Segment(NewIdxDef, Prev->end, Prev->valno);
  1105. Prev->valno->def = NewIdxDef;
  1106. *Prev = LiveRange::Segment(Prev->start, NewIdxDef, DefVNI);
  1107. DefVNI->def = Prev->start;
  1108. } else {
  1109. // Case 2: NewIdx is in a lifetime hole. Keep AfterNewIdx as is and
  1110. // turn Prev into a segment from NewIdx to AfterNewIdx->start.
  1111. *Prev = LiveRange::Segment(NewIdxDef, AfterNewIdx->start, DefVNI);
  1112. DefVNI->def = NewIdxDef;
  1113. assert(DefVNI != AfterNewIdx->valno);
  1114. }
  1115. }
  1116. return;
  1117. }
  1118. if (AfterNewIdx != E &&
  1119. SlotIndex::isSameInstr(AfterNewIdx->start, NewIdxDef)) {
  1120. // There is an existing def at NewIdx. The def at OldIdx is coalesced into
  1121. // that value.
  1122. assert(AfterNewIdx->valno != OldIdxVNI && "Multiple defs of value?");
  1123. LR.removeValNo(OldIdxVNI);
  1124. } else {
  1125. // There was no existing def at NewIdx. We need to create a dead def
  1126. // at NewIdx. Shift segments over the old OldIdxOut segment, this frees
  1127. // a new segment at the place where we want to construct the dead def.
  1128. // |- OldIdxOut -| |- X0 -| ... |- Xn -| |- AfterNewIdx -|
  1129. // => |- X0/OldIdxOut -| ... |- Xn -| |- undef/NewS. -| |- AfterNewIdx -|
  1130. assert(AfterNewIdx != OldIdxOut && "Inconsistent iterators");
  1131. std::copy(std::next(OldIdxOut), AfterNewIdx, OldIdxOut);
  1132. // We can reuse OldIdxVNI now.
  1133. LiveRange::iterator NewSegment = std::prev(AfterNewIdx);
  1134. VNInfo *NewSegmentVNI = OldIdxVNI;
  1135. NewSegmentVNI->def = NewIdxDef;
  1136. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1137. NewSegmentVNI);
  1138. }
  1139. }
  1140. /// Update LR to reflect an instruction has been moved upwards from OldIdx
  1141. /// to NewIdx (NewIdx < OldIdx).
  1142. void handleMoveUp(LiveRange &LR, Register Reg, LaneBitmask LaneMask) {
  1143. LiveRange::iterator E = LR.end();
  1144. // Segment going into OldIdx.
  1145. LiveRange::iterator OldIdxIn = LR.find(OldIdx.getBaseIndex());
  1146. // No value live before or after OldIdx? Nothing to do.
  1147. if (OldIdxIn == E || SlotIndex::isEarlierInstr(OldIdx, OldIdxIn->start))
  1148. return;
  1149. LiveRange::iterator OldIdxOut;
  1150. // Do we have a value live-in to OldIdx?
  1151. if (SlotIndex::isEarlierInstr(OldIdxIn->start, OldIdx)) {
  1152. // If the live-in value isn't killed here, then we have no Def at
  1153. // OldIdx, moreover the value must be live at NewIdx so there is nothing
  1154. // to do.
  1155. bool isKill = SlotIndex::isSameInstr(OldIdx, OldIdxIn->end);
  1156. if (!isKill)
  1157. return;
  1158. // At this point we have to move OldIdxIn->end back to the nearest
  1159. // previous use or (dead-)def but no further than NewIdx.
  1160. SlotIndex DefBeforeOldIdx
  1161. = std::max(OldIdxIn->start.getDeadSlot(),
  1162. NewIdx.getRegSlot(OldIdxIn->end.isEarlyClobber()));
  1163. OldIdxIn->end = findLastUseBefore(DefBeforeOldIdx, Reg, LaneMask);
  1164. // Did we have a Def at OldIdx? If not we are done now.
  1165. OldIdxOut = std::next(OldIdxIn);
  1166. if (OldIdxOut == E || !SlotIndex::isSameInstr(OldIdx, OldIdxOut->start))
  1167. return;
  1168. } else {
  1169. OldIdxOut = OldIdxIn;
  1170. OldIdxIn = OldIdxOut != LR.begin() ? std::prev(OldIdxOut) : E;
  1171. }
  1172. // If we are here then there is a Definition at OldIdx. OldIdxOut points
  1173. // to the segment starting there.
  1174. assert(OldIdxOut != E && SlotIndex::isSameInstr(OldIdx, OldIdxOut->start) &&
  1175. "No def?");
  1176. VNInfo *OldIdxVNI = OldIdxOut->valno;
  1177. assert(OldIdxVNI->def == OldIdxOut->start && "Inconsistent def");
  1178. bool OldIdxDefIsDead = OldIdxOut->end.isDead();
  1179. // Is there an existing def at NewIdx?
  1180. SlotIndex NewIdxDef = NewIdx.getRegSlot(OldIdxOut->start.isEarlyClobber());
  1181. LiveRange::iterator NewIdxOut = LR.find(NewIdx.getRegSlot());
  1182. if (SlotIndex::isSameInstr(NewIdxOut->start, NewIdx)) {
  1183. assert(NewIdxOut->valno != OldIdxVNI &&
  1184. "Same value defined more than once?");
  1185. // If OldIdx was a dead def remove it.
  1186. if (!OldIdxDefIsDead) {
  1187. // Remove segment starting at NewIdx and move begin of OldIdxOut to
  1188. // NewIdx so it can take its place.
  1189. OldIdxVNI->def = NewIdxDef;
  1190. OldIdxOut->start = NewIdxDef;
  1191. LR.removeValNo(NewIdxOut->valno);
  1192. } else {
  1193. // Simply remove the dead def at OldIdx.
  1194. LR.removeValNo(OldIdxVNI);
  1195. }
  1196. } else {
  1197. // Previously nothing was live after NewIdx, so all we have to do now is
  1198. // move the begin of OldIdxOut to NewIdx.
  1199. if (!OldIdxDefIsDead) {
  1200. // Do we have any intermediate Defs between OldIdx and NewIdx?
  1201. if (OldIdxIn != E &&
  1202. SlotIndex::isEarlierInstr(NewIdxDef, OldIdxIn->start)) {
  1203. // OldIdx is not a dead def and NewIdx is before predecessor start.
  1204. LiveRange::iterator NewIdxIn = NewIdxOut;
  1205. assert(NewIdxIn == LR.find(NewIdx.getBaseIndex()));
  1206. const SlotIndex SplitPos = NewIdxDef;
  1207. OldIdxVNI = OldIdxIn->valno;
  1208. SlotIndex NewDefEndPoint = std::next(NewIdxIn)->end;
  1209. LiveRange::iterator Prev = std::prev(OldIdxIn);
  1210. if (OldIdxIn != LR.begin() &&
  1211. SlotIndex::isEarlierInstr(NewIdx, Prev->end)) {
  1212. // If the segment before OldIdx read a value defined earlier than
  1213. // NewIdx, the moved instruction also reads and forwards that
  1214. // value. Extend the lifetime of the new def point.
  1215. // Extend to where the previous range started, unless there is
  1216. // another redef first.
  1217. NewDefEndPoint = std::min(OldIdxIn->start,
  1218. std::next(NewIdxOut)->start);
  1219. }
  1220. // Merge the OldIdxIn and OldIdxOut segments into OldIdxOut.
  1221. OldIdxOut->valno->def = OldIdxIn->start;
  1222. *OldIdxOut = LiveRange::Segment(OldIdxIn->start, OldIdxOut->end,
  1223. OldIdxOut->valno);
  1224. // OldIdxIn and OldIdxVNI are now undef and can be overridden.
  1225. // We Slide [NewIdxIn, OldIdxIn) down one position.
  1226. // |- X0/NewIdxIn -| ... |- Xn-1 -||- Xn/OldIdxIn -||- OldIdxOut -|
  1227. // => |- undef/NexIdxIn -| |- X0 -| ... |- Xn-1 -| |- Xn/OldIdxOut -|
  1228. std::copy_backward(NewIdxIn, OldIdxIn, OldIdxOut);
  1229. // NewIdxIn is now considered undef so we can reuse it for the moved
  1230. // value.
  1231. LiveRange::iterator NewSegment = NewIdxIn;
  1232. LiveRange::iterator Next = std::next(NewSegment);
  1233. if (SlotIndex::isEarlierInstr(Next->start, NewIdx)) {
  1234. // There is no gap between NewSegment and its predecessor.
  1235. *NewSegment = LiveRange::Segment(Next->start, SplitPos,
  1236. Next->valno);
  1237. *Next = LiveRange::Segment(SplitPos, NewDefEndPoint, OldIdxVNI);
  1238. Next->valno->def = SplitPos;
  1239. } else {
  1240. // There is a gap between NewSegment and its predecessor
  1241. // Value becomes live in.
  1242. *NewSegment = LiveRange::Segment(SplitPos, Next->start, OldIdxVNI);
  1243. NewSegment->valno->def = SplitPos;
  1244. }
  1245. } else {
  1246. // Leave the end point of a live def.
  1247. OldIdxOut->start = NewIdxDef;
  1248. OldIdxVNI->def = NewIdxDef;
  1249. if (OldIdxIn != E && SlotIndex::isEarlierInstr(NewIdx, OldIdxIn->end))
  1250. OldIdxIn->end = NewIdxDef;
  1251. }
  1252. } else if (OldIdxIn != E
  1253. && SlotIndex::isEarlierInstr(NewIdxOut->start, NewIdx)
  1254. && SlotIndex::isEarlierInstr(NewIdx, NewIdxOut->end)) {
  1255. // OldIdxVNI is a dead def that has been moved into the middle of
  1256. // another value in LR. That can happen when LR is a whole register,
  1257. // but the dead def is a write to a subreg that is dead at NewIdx.
  1258. // The dead def may have been moved across other values
  1259. // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
  1260. // down one position.
  1261. // |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
  1262. // => |- X0/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
  1263. std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
  1264. // Modify the segment at NewIdxOut and the following segment to meet at
  1265. // the point of the dead def, with the following segment getting
  1266. // OldIdxVNI as its value number.
  1267. *NewIdxOut = LiveRange::Segment(
  1268. NewIdxOut->start, NewIdxDef.getRegSlot(), NewIdxOut->valno);
  1269. *(NewIdxOut + 1) = LiveRange::Segment(
  1270. NewIdxDef.getRegSlot(), (NewIdxOut + 1)->end, OldIdxVNI);
  1271. OldIdxVNI->def = NewIdxDef;
  1272. // Modify subsequent segments to be defined by the moved def OldIdxVNI.
  1273. for (auto Idx = NewIdxOut + 2; Idx <= OldIdxOut; ++Idx)
  1274. Idx->valno = OldIdxVNI;
  1275. // Aggressively remove all dead flags from the former dead definition.
  1276. // Kill/dead flags shouldn't be used while live intervals exist; they
  1277. // will be reinserted by VirtRegRewriter.
  1278. if (MachineInstr *KillMI = LIS.getInstructionFromIndex(NewIdx))
  1279. for (MIBundleOperands MO(*KillMI); MO.isValid(); ++MO)
  1280. if (MO->isReg() && !MO->isUse())
  1281. MO->setIsDead(false);
  1282. } else {
  1283. // OldIdxVNI is a dead def. It may have been moved across other values
  1284. // in LR, so move OldIdxOut up to NewIdxOut. Slide [NewIdxOut;OldIdxOut)
  1285. // down one position.
  1286. // |- X0/NewIdxOut -| ... |- Xn-1 -| |- Xn/OldIdxOut -| |- next - |
  1287. // => |- undef/NewIdxOut -| |- X0 -| ... |- Xn-1 -| |- next -|
  1288. std::copy_backward(NewIdxOut, OldIdxOut, std::next(OldIdxOut));
  1289. // OldIdxVNI can be reused now to build a new dead def segment.
  1290. LiveRange::iterator NewSegment = NewIdxOut;
  1291. VNInfo *NewSegmentVNI = OldIdxVNI;
  1292. *NewSegment = LiveRange::Segment(NewIdxDef, NewIdxDef.getDeadSlot(),
  1293. NewSegmentVNI);
  1294. NewSegmentVNI->def = NewIdxDef;
  1295. }
  1296. }
  1297. }
  1298. void updateRegMaskSlots() {
  1299. SmallVectorImpl<SlotIndex>::iterator RI =
  1300. llvm::lower_bound(LIS.RegMaskSlots, OldIdx);
  1301. assert(RI != LIS.RegMaskSlots.end() && *RI == OldIdx.getRegSlot() &&
  1302. "No RegMask at OldIdx.");
  1303. *RI = NewIdx.getRegSlot();
  1304. assert((RI == LIS.RegMaskSlots.begin() ||
  1305. SlotIndex::isEarlierInstr(*std::prev(RI), *RI)) &&
  1306. "Cannot move regmask instruction above another call");
  1307. assert((std::next(RI) == LIS.RegMaskSlots.end() ||
  1308. SlotIndex::isEarlierInstr(*RI, *std::next(RI))) &&
  1309. "Cannot move regmask instruction below another call");
  1310. }
  1311. // Return the last use of reg between NewIdx and OldIdx.
  1312. SlotIndex findLastUseBefore(SlotIndex Before, Register Reg,
  1313. LaneBitmask LaneMask) {
  1314. if (Register::isVirtualRegister(Reg)) {
  1315. SlotIndex LastUse = Before;
  1316. for (MachineOperand &MO : MRI.use_nodbg_operands(Reg)) {
  1317. if (MO.isUndef())
  1318. continue;
  1319. unsigned SubReg = MO.getSubReg();
  1320. if (SubReg != 0 && LaneMask.any()
  1321. && (TRI.getSubRegIndexLaneMask(SubReg) & LaneMask).none())
  1322. continue;
  1323. const MachineInstr &MI = *MO.getParent();
  1324. SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
  1325. if (InstSlot > LastUse && InstSlot < OldIdx)
  1326. LastUse = InstSlot.getRegSlot();
  1327. }
  1328. return LastUse;
  1329. }
  1330. // This is a regunit interval, so scanning the use list could be very
  1331. // expensive. Scan upwards from OldIdx instead.
  1332. assert(Before < OldIdx && "Expected upwards move");
  1333. SlotIndexes *Indexes = LIS.getSlotIndexes();
  1334. MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Before);
  1335. // OldIdx may not correspond to an instruction any longer, so set MII to
  1336. // point to the next instruction after OldIdx, or MBB->end().
  1337. MachineBasicBlock::iterator MII = MBB->end();
  1338. if (MachineInstr *MI = Indexes->getInstructionFromIndex(
  1339. Indexes->getNextNonNullIndex(OldIdx)))
  1340. if (MI->getParent() == MBB)
  1341. MII = MI;
  1342. MachineBasicBlock::iterator Begin = MBB->begin();
  1343. while (MII != Begin) {
  1344. if ((--MII)->isDebugOrPseudoInstr())
  1345. continue;
  1346. SlotIndex Idx = Indexes->getInstructionIndex(*MII);
  1347. // Stop searching when Before is reached.
  1348. if (!SlotIndex::isEarlierInstr(Before, Idx))
  1349. return Before;
  1350. // Check if MII uses Reg.
  1351. for (MIBundleOperands MO(*MII); MO.isValid(); ++MO)
  1352. if (MO->isReg() && !MO->isUndef() &&
  1353. Register::isPhysicalRegister(MO->getReg()) &&
  1354. TRI.hasRegUnit(MO->getReg(), Reg))
  1355. return Idx.getRegSlot();
  1356. }
  1357. // Didn't reach Before. It must be the first instruction in the block.
  1358. return Before;
  1359. }
  1360. };
  1361. void LiveIntervals::handleMove(MachineInstr &MI, bool UpdateFlags) {
  1362. // It is fine to move a bundle as a whole, but not an individual instruction
  1363. // inside it.
  1364. assert((!MI.isBundled() || MI.getOpcode() == TargetOpcode::BUNDLE) &&
  1365. "Cannot move instruction in bundle");
  1366. SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  1367. Indexes->removeMachineInstrFromMaps(MI);
  1368. SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(MI);
  1369. assert(getMBBStartIdx(MI.getParent()) <= OldIndex &&
  1370. OldIndex < getMBBEndIdx(MI.getParent()) &&
  1371. "Cannot handle moves across basic block boundaries.");
  1372. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1373. HME.updateAllRanges(&MI);
  1374. }
  1375. void LiveIntervals::handleMoveIntoNewBundle(MachineInstr &BundleStart,
  1376. bool UpdateFlags) {
  1377. assert((BundleStart.getOpcode() == TargetOpcode::BUNDLE) &&
  1378. "Bundle start is not a bundle");
  1379. SmallVector<SlotIndex, 16> ToProcess;
  1380. const SlotIndex NewIndex = Indexes->insertMachineInstrInMaps(BundleStart);
  1381. auto BundleEnd = getBundleEnd(BundleStart.getIterator());
  1382. auto I = BundleStart.getIterator();
  1383. I++;
  1384. while (I != BundleEnd) {
  1385. if (!Indexes->hasIndex(*I))
  1386. continue;
  1387. SlotIndex OldIndex = Indexes->getInstructionIndex(*I, true);
  1388. ToProcess.push_back(OldIndex);
  1389. Indexes->removeMachineInstrFromMaps(*I, true);
  1390. I++;
  1391. }
  1392. for (SlotIndex OldIndex : ToProcess) {
  1393. HMEditor HME(*this, *MRI, *TRI, OldIndex, NewIndex, UpdateFlags);
  1394. HME.updateAllRanges(&BundleStart);
  1395. }
  1396. // Fix up dead defs
  1397. const SlotIndex Index = getInstructionIndex(BundleStart);
  1398. for (unsigned Idx = 0, E = BundleStart.getNumOperands(); Idx != E; ++Idx) {
  1399. MachineOperand &MO = BundleStart.getOperand(Idx);
  1400. if (!MO.isReg())
  1401. continue;
  1402. Register Reg = MO.getReg();
  1403. if (Reg.isVirtual() && hasInterval(Reg) && !MO.isUndef()) {
  1404. LiveInterval &LI = getInterval(Reg);
  1405. LiveQueryResult LRQ = LI.Query(Index);
  1406. if (LRQ.isDeadDef())
  1407. MO.setIsDead();
  1408. }
  1409. }
  1410. }
  1411. void LiveIntervals::repairOldRegInRange(const MachineBasicBlock::iterator Begin,
  1412. const MachineBasicBlock::iterator End,
  1413. const SlotIndex EndIdx, LiveRange &LR,
  1414. const Register Reg,
  1415. LaneBitmask LaneMask) {
  1416. LiveInterval::iterator LII = LR.find(EndIdx);
  1417. SlotIndex lastUseIdx;
  1418. if (LII != LR.end() && LII->start < EndIdx) {
  1419. lastUseIdx = LII->end;
  1420. } else if (LII == LR.begin()) {
  1421. // We may not have a liverange at all if this is a subregister untouched
  1422. // between \p Begin and \p End.
  1423. } else {
  1424. --LII;
  1425. }
  1426. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1427. --I;
  1428. MachineInstr &MI = *I;
  1429. if (MI.isDebugOrPseudoInstr())
  1430. continue;
  1431. SlotIndex instrIdx = getInstructionIndex(MI);
  1432. bool isStartValid = getInstructionFromIndex(LII->start);
  1433. bool isEndValid = getInstructionFromIndex(LII->end);
  1434. // FIXME: This doesn't currently handle early-clobber or multiple removed
  1435. // defs inside of the region to repair.
  1436. for (const MachineOperand &MO : MI.operands()) {
  1437. if (!MO.isReg() || MO.getReg() != Reg)
  1438. continue;
  1439. unsigned SubReg = MO.getSubReg();
  1440. LaneBitmask Mask = TRI->getSubRegIndexLaneMask(SubReg);
  1441. if ((Mask & LaneMask).none())
  1442. continue;
  1443. if (MO.isDef()) {
  1444. if (!isStartValid) {
  1445. if (LII->end.isDead()) {
  1446. LII = LR.removeSegment(LII, true);
  1447. if (LII != LR.begin())
  1448. --LII;
  1449. } else {
  1450. LII->start = instrIdx.getRegSlot();
  1451. LII->valno->def = instrIdx.getRegSlot();
  1452. if (MO.getSubReg() && !MO.isUndef())
  1453. lastUseIdx = instrIdx.getRegSlot();
  1454. else
  1455. lastUseIdx = SlotIndex();
  1456. continue;
  1457. }
  1458. }
  1459. if (!lastUseIdx.isValid()) {
  1460. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1461. LiveRange::Segment S(instrIdx.getRegSlot(),
  1462. instrIdx.getDeadSlot(), VNI);
  1463. LII = LR.addSegment(S);
  1464. } else if (LII->start != instrIdx.getRegSlot()) {
  1465. VNInfo *VNI = LR.getNextValue(instrIdx.getRegSlot(), VNInfoAllocator);
  1466. LiveRange::Segment S(instrIdx.getRegSlot(), lastUseIdx, VNI);
  1467. LII = LR.addSegment(S);
  1468. }
  1469. if (MO.getSubReg() && !MO.isUndef())
  1470. lastUseIdx = instrIdx.getRegSlot();
  1471. else
  1472. lastUseIdx = SlotIndex();
  1473. } else if (MO.isUse()) {
  1474. // FIXME: This should probably be handled outside of this branch,
  1475. // either as part of the def case (for defs inside of the region) or
  1476. // after the loop over the region.
  1477. if (!isEndValid && !LII->end.isBlock())
  1478. LII->end = instrIdx.getRegSlot();
  1479. if (!lastUseIdx.isValid())
  1480. lastUseIdx = instrIdx.getRegSlot();
  1481. }
  1482. }
  1483. }
  1484. bool isStartValid = getInstructionFromIndex(LII->start);
  1485. if (!isStartValid && LII->end.isDead())
  1486. LR.removeSegment(*LII, true);
  1487. }
  1488. void
  1489. LiveIntervals::repairIntervalsInRange(MachineBasicBlock *MBB,
  1490. MachineBasicBlock::iterator Begin,
  1491. MachineBasicBlock::iterator End,
  1492. ArrayRef<Register> OrigRegs) {
  1493. // Find anchor points, which are at the beginning/end of blocks or at
  1494. // instructions that already have indexes.
  1495. while (Begin != MBB->begin() && !Indexes->hasIndex(*Begin))
  1496. --Begin;
  1497. while (End != MBB->end() && !Indexes->hasIndex(*End))
  1498. ++End;
  1499. SlotIndex EndIdx;
  1500. if (End == MBB->end())
  1501. EndIdx = getMBBEndIdx(MBB).getPrevSlot();
  1502. else
  1503. EndIdx = getInstructionIndex(*End);
  1504. Indexes->repairIndexesInRange(MBB, Begin, End);
  1505. // Make sure a live interval exists for all register operands in the range.
  1506. SmallVector<Register> RegsToRepair(OrigRegs.begin(), OrigRegs.end());
  1507. for (MachineBasicBlock::iterator I = End; I != Begin;) {
  1508. --I;
  1509. MachineInstr &MI = *I;
  1510. if (MI.isDebugOrPseudoInstr())
  1511. continue;
  1512. for (const MachineOperand &MO : MI.operands()) {
  1513. if (MO.isReg() && MO.getReg().isVirtual()) {
  1514. Register Reg = MO.getReg();
  1515. // If the new instructions refer to subregs but the old instructions did
  1516. // not, throw away any old live interval so it will be recomputed with
  1517. // subranges.
  1518. if (MO.getSubReg() && hasInterval(Reg) &&
  1519. !getInterval(Reg).hasSubRanges() &&
  1520. MRI->shouldTrackSubRegLiveness(Reg))
  1521. removeInterval(Reg);
  1522. if (!hasInterval(Reg)) {
  1523. createAndComputeVirtRegInterval(Reg);
  1524. // Don't bother to repair a freshly calculated live interval.
  1525. erase_value(RegsToRepair, Reg);
  1526. }
  1527. }
  1528. }
  1529. }
  1530. for (Register Reg : RegsToRepair) {
  1531. if (!Reg.isVirtual())
  1532. continue;
  1533. LiveInterval &LI = getInterval(Reg);
  1534. // FIXME: Should we support undefs that gain defs?
  1535. if (!LI.hasAtLeastOneValue())
  1536. continue;
  1537. for (LiveInterval::SubRange &S : LI.subranges())
  1538. repairOldRegInRange(Begin, End, EndIdx, S, Reg, S.LaneMask);
  1539. LI.removeEmptySubRanges();
  1540. repairOldRegInRange(Begin, End, EndIdx, LI, Reg);
  1541. }
  1542. }
  1543. void LiveIntervals::removePhysRegDefAt(MCRegister Reg, SlotIndex Pos) {
  1544. for (MCRegUnitIterator Unit(Reg, TRI); Unit.isValid(); ++Unit) {
  1545. if (LiveRange *LR = getCachedRegUnit(*Unit))
  1546. if (VNInfo *VNI = LR->getVNInfoAt(Pos))
  1547. LR->removeValNo(VNI);
  1548. }
  1549. }
  1550. void LiveIntervals::removeVRegDefAt(LiveInterval &LI, SlotIndex Pos) {
  1551. // LI may not have the main range computed yet, but its subranges may
  1552. // be present.
  1553. VNInfo *VNI = LI.getVNInfoAt(Pos);
  1554. if (VNI != nullptr) {
  1555. assert(VNI->def.getBaseIndex() == Pos.getBaseIndex());
  1556. LI.removeValNo(VNI);
  1557. }
  1558. // Also remove the value defined in subranges.
  1559. for (LiveInterval::SubRange &S : LI.subranges()) {
  1560. if (VNInfo *SVNI = S.getVNInfoAt(Pos))
  1561. if (SVNI->def.getBaseIndex() == Pos.getBaseIndex())
  1562. S.removeValNo(SVNI);
  1563. }
  1564. LI.removeEmptySubRanges();
  1565. }
  1566. void LiveIntervals::splitSeparateComponents(LiveInterval &LI,
  1567. SmallVectorImpl<LiveInterval*> &SplitLIs) {
  1568. ConnectedVNInfoEqClasses ConEQ(*this);
  1569. unsigned NumComp = ConEQ.Classify(LI);
  1570. if (NumComp <= 1)
  1571. return;
  1572. LLVM_DEBUG(dbgs() << " Split " << NumComp << " components: " << LI << '\n');
  1573. Register Reg = LI.reg();
  1574. const TargetRegisterClass *RegClass = MRI->getRegClass(Reg);
  1575. for (unsigned I = 1; I < NumComp; ++I) {
  1576. Register NewVReg = MRI->createVirtualRegister(RegClass);
  1577. LiveInterval &NewLI = createEmptyInterval(NewVReg);
  1578. SplitLIs.push_back(&NewLI);
  1579. }
  1580. ConEQ.Distribute(LI, SplitLIs.data(), *MRI);
  1581. }
  1582. void LiveIntervals::constructMainRangeFromSubranges(LiveInterval &LI) {
  1583. assert(LICalc && "LICalc not initialized.");
  1584. LICalc->reset(MF, getSlotIndexes(), DomTree, &getVNInfoAllocator());
  1585. LICalc->constructMainRangeFromSubranges(LI);
  1586. }