SplitKit.cpp 67 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866
  1. //===- SplitKit.cpp - Toolkit for splitting live ranges -------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file contains the SplitAnalysis class as well as mutator functions for
  10. // live range splitting.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "SplitKit.h"
  14. #include "llvm/ADT/None.h"
  15. #include "llvm/ADT/STLExtras.h"
  16. #include "llvm/ADT/Statistic.h"
  17. #include "llvm/Analysis/AliasAnalysis.h"
  18. #include "llvm/CodeGen/LiveRangeEdit.h"
  19. #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
  20. #include "llvm/CodeGen/MachineDominators.h"
  21. #include "llvm/CodeGen/MachineInstr.h"
  22. #include "llvm/CodeGen/MachineInstrBuilder.h"
  23. #include "llvm/CodeGen/MachineLoopInfo.h"
  24. #include "llvm/CodeGen/MachineOperand.h"
  25. #include "llvm/CodeGen/MachineRegisterInfo.h"
  26. #include "llvm/CodeGen/TargetInstrInfo.h"
  27. #include "llvm/CodeGen/TargetOpcodes.h"
  28. #include "llvm/CodeGen/TargetRegisterInfo.h"
  29. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  30. #include "llvm/CodeGen/VirtRegMap.h"
  31. #include "llvm/Config/llvm-config.h"
  32. #include "llvm/IR/DebugLoc.h"
  33. #include "llvm/Support/Allocator.h"
  34. #include "llvm/Support/BlockFrequency.h"
  35. #include "llvm/Support/Debug.h"
  36. #include "llvm/Support/ErrorHandling.h"
  37. #include "llvm/Support/raw_ostream.h"
  38. #include <algorithm>
  39. #include <cassert>
  40. #include <iterator>
  41. #include <limits>
  42. #include <tuple>
  43. using namespace llvm;
  44. #define DEBUG_TYPE "regalloc"
  45. STATISTIC(NumFinished, "Number of splits finished");
  46. STATISTIC(NumSimple, "Number of splits that were simple");
  47. STATISTIC(NumCopies, "Number of copies inserted for splitting");
  48. STATISTIC(NumRemats, "Number of rematerialized defs for splitting");
  49. STATISTIC(NumRepairs, "Number of invalid live ranges repaired");
  50. //===----------------------------------------------------------------------===//
  51. // Last Insert Point Analysis
  52. //===----------------------------------------------------------------------===//
  53. InsertPointAnalysis::InsertPointAnalysis(const LiveIntervals &lis,
  54. unsigned BBNum)
  55. : LIS(lis), LastInsertPoint(BBNum) {}
  56. SlotIndex
  57. InsertPointAnalysis::computeLastInsertPoint(const LiveInterval &CurLI,
  58. const MachineBasicBlock &MBB) {
  59. unsigned Num = MBB.getNumber();
  60. std::pair<SlotIndex, SlotIndex> &LIP = LastInsertPoint[Num];
  61. SlotIndex MBBEnd = LIS.getMBBEndIdx(&MBB);
  62. SmallVector<const MachineBasicBlock *, 1> ExceptionalSuccessors;
  63. bool EHPadSuccessor = false;
  64. for (const MachineBasicBlock *SMBB : MBB.successors()) {
  65. if (SMBB->isEHPad()) {
  66. ExceptionalSuccessors.push_back(SMBB);
  67. EHPadSuccessor = true;
  68. } else if (SMBB->isInlineAsmBrIndirectTarget())
  69. ExceptionalSuccessors.push_back(SMBB);
  70. }
  71. // Compute insert points on the first call. The pair is independent of the
  72. // current live interval.
  73. if (!LIP.first.isValid()) {
  74. MachineBasicBlock::const_iterator FirstTerm = MBB.getFirstTerminator();
  75. if (FirstTerm == MBB.end())
  76. LIP.first = MBBEnd;
  77. else
  78. LIP.first = LIS.getInstructionIndex(*FirstTerm);
  79. // If there is a landing pad or inlineasm_br successor, also find the
  80. // instruction. If there is no such instruction, we don't need to do
  81. // anything special. We assume there cannot be multiple instructions that
  82. // are Calls with EHPad successors or INLINEASM_BR in a block. Further, we
  83. // assume that if there are any, they will be after any other call
  84. // instructions in the block.
  85. if (ExceptionalSuccessors.empty())
  86. return LIP.first;
  87. for (auto I = MBB.rbegin(), E = MBB.rend(); I != E; ++I) {
  88. if ((EHPadSuccessor && I->isCall()) ||
  89. I->getOpcode() == TargetOpcode::INLINEASM_BR) {
  90. LIP.second = LIS.getInstructionIndex(*I);
  91. break;
  92. }
  93. }
  94. }
  95. // If CurLI is live into a landing pad successor, move the last insert point
  96. // back to the call that may throw.
  97. if (!LIP.second)
  98. return LIP.first;
  99. if (none_of(ExceptionalSuccessors, [&](const MachineBasicBlock *EHPad) {
  100. return LIS.isLiveInToMBB(CurLI, EHPad);
  101. }))
  102. return LIP.first;
  103. // Find the value leaving MBB.
  104. const VNInfo *VNI = CurLI.getVNInfoBefore(MBBEnd);
  105. if (!VNI)
  106. return LIP.first;
  107. // If the value leaving MBB was defined after the call in MBB, it can't
  108. // really be live-in to the landing pad. This can happen if the landing pad
  109. // has a PHI, and this register is undef on the exceptional edge.
  110. // <rdar://problem/10664933>
  111. if (!SlotIndex::isEarlierInstr(VNI->def, LIP.second) && VNI->def < MBBEnd)
  112. return LIP.first;
  113. // Value is properly live-in to the landing pad.
  114. // Only allow inserts before the call.
  115. return LIP.second;
  116. }
  117. MachineBasicBlock::iterator
  118. InsertPointAnalysis::getLastInsertPointIter(const LiveInterval &CurLI,
  119. MachineBasicBlock &MBB) {
  120. SlotIndex LIP = getLastInsertPoint(CurLI, MBB);
  121. if (LIP == LIS.getMBBEndIdx(&MBB))
  122. return MBB.end();
  123. return LIS.getInstructionFromIndex(LIP);
  124. }
  125. //===----------------------------------------------------------------------===//
  126. // Split Analysis
  127. //===----------------------------------------------------------------------===//
  128. SplitAnalysis::SplitAnalysis(const VirtRegMap &vrm, const LiveIntervals &lis,
  129. const MachineLoopInfo &mli)
  130. : MF(vrm.getMachineFunction()), VRM(vrm), LIS(lis), Loops(mli),
  131. TII(*MF.getSubtarget().getInstrInfo()), IPA(lis, MF.getNumBlockIDs()) {}
  132. void SplitAnalysis::clear() {
  133. UseSlots.clear();
  134. UseBlocks.clear();
  135. ThroughBlocks.clear();
  136. CurLI = nullptr;
  137. DidRepairRange = false;
  138. }
  139. /// analyzeUses - Count instructions, basic blocks, and loops using CurLI.
  140. void SplitAnalysis::analyzeUses() {
  141. assert(UseSlots.empty() && "Call clear first");
  142. // First get all the defs from the interval values. This provides the correct
  143. // slots for early clobbers.
  144. for (const VNInfo *VNI : CurLI->valnos)
  145. if (!VNI->isPHIDef() && !VNI->isUnused())
  146. UseSlots.push_back(VNI->def);
  147. // Get use slots form the use-def chain.
  148. const MachineRegisterInfo &MRI = MF.getRegInfo();
  149. for (MachineOperand &MO : MRI.use_nodbg_operands(CurLI->reg()))
  150. if (!MO.isUndef())
  151. UseSlots.push_back(LIS.getInstructionIndex(*MO.getParent()).getRegSlot());
  152. array_pod_sort(UseSlots.begin(), UseSlots.end());
  153. // Remove duplicates, keeping the smaller slot for each instruction.
  154. // That is what we want for early clobbers.
  155. UseSlots.erase(std::unique(UseSlots.begin(), UseSlots.end(),
  156. SlotIndex::isSameInstr),
  157. UseSlots.end());
  158. // Compute per-live block info.
  159. if (!calcLiveBlockInfo()) {
  160. // FIXME: calcLiveBlockInfo found inconsistencies in the live range.
  161. // I am looking at you, RegisterCoalescer!
  162. DidRepairRange = true;
  163. ++NumRepairs;
  164. LLVM_DEBUG(dbgs() << "*** Fixing inconsistent live interval! ***\n");
  165. const_cast<LiveIntervals&>(LIS)
  166. .shrinkToUses(const_cast<LiveInterval*>(CurLI));
  167. UseBlocks.clear();
  168. ThroughBlocks.clear();
  169. bool fixed = calcLiveBlockInfo();
  170. (void)fixed;
  171. assert(fixed && "Couldn't fix broken live interval");
  172. }
  173. LLVM_DEBUG(dbgs() << "Analyze counted " << UseSlots.size() << " instrs in "
  174. << UseBlocks.size() << " blocks, through "
  175. << NumThroughBlocks << " blocks.\n");
  176. }
  177. /// calcLiveBlockInfo - Fill the LiveBlocks array with information about blocks
  178. /// where CurLI is live.
  179. bool SplitAnalysis::calcLiveBlockInfo() {
  180. ThroughBlocks.resize(MF.getNumBlockIDs());
  181. NumThroughBlocks = NumGapBlocks = 0;
  182. if (CurLI->empty())
  183. return true;
  184. LiveInterval::const_iterator LVI = CurLI->begin();
  185. LiveInterval::const_iterator LVE = CurLI->end();
  186. SmallVectorImpl<SlotIndex>::const_iterator UseI, UseE;
  187. UseI = UseSlots.begin();
  188. UseE = UseSlots.end();
  189. // Loop over basic blocks where CurLI is live.
  190. MachineFunction::iterator MFI =
  191. LIS.getMBBFromIndex(LVI->start)->getIterator();
  192. while (true) {
  193. BlockInfo BI;
  194. BI.MBB = &*MFI;
  195. SlotIndex Start, Stop;
  196. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  197. // If the block contains no uses, the range must be live through. At one
  198. // point, RegisterCoalescer could create dangling ranges that ended
  199. // mid-block.
  200. if (UseI == UseE || *UseI >= Stop) {
  201. ++NumThroughBlocks;
  202. ThroughBlocks.set(BI.MBB->getNumber());
  203. // The range shouldn't end mid-block if there are no uses. This shouldn't
  204. // happen.
  205. if (LVI->end < Stop)
  206. return false;
  207. } else {
  208. // This block has uses. Find the first and last uses in the block.
  209. BI.FirstInstr = *UseI;
  210. assert(BI.FirstInstr >= Start);
  211. do ++UseI;
  212. while (UseI != UseE && *UseI < Stop);
  213. BI.LastInstr = UseI[-1];
  214. assert(BI.LastInstr < Stop);
  215. // LVI is the first live segment overlapping MBB.
  216. BI.LiveIn = LVI->start <= Start;
  217. // When not live in, the first use should be a def.
  218. if (!BI.LiveIn) {
  219. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  220. assert(LVI->start == BI.FirstInstr && "First instr should be a def");
  221. BI.FirstDef = BI.FirstInstr;
  222. }
  223. // Look for gaps in the live range.
  224. BI.LiveOut = true;
  225. while (LVI->end < Stop) {
  226. SlotIndex LastStop = LVI->end;
  227. if (++LVI == LVE || LVI->start >= Stop) {
  228. BI.LiveOut = false;
  229. BI.LastInstr = LastStop;
  230. break;
  231. }
  232. if (LastStop < LVI->start) {
  233. // There is a gap in the live range. Create duplicate entries for the
  234. // live-in snippet and the live-out snippet.
  235. ++NumGapBlocks;
  236. // Push the Live-in part.
  237. BI.LiveOut = false;
  238. UseBlocks.push_back(BI);
  239. UseBlocks.back().LastInstr = LastStop;
  240. // Set up BI for the live-out part.
  241. BI.LiveIn = false;
  242. BI.LiveOut = true;
  243. BI.FirstInstr = BI.FirstDef = LVI->start;
  244. }
  245. // A Segment that starts in the middle of the block must be a def.
  246. assert(LVI->start == LVI->valno->def && "Dangling Segment start");
  247. if (!BI.FirstDef)
  248. BI.FirstDef = LVI->start;
  249. }
  250. UseBlocks.push_back(BI);
  251. // LVI is now at LVE or LVI->end >= Stop.
  252. if (LVI == LVE)
  253. break;
  254. }
  255. // Live segment ends exactly at Stop. Move to the next segment.
  256. if (LVI->end == Stop && ++LVI == LVE)
  257. break;
  258. // Pick the next basic block.
  259. if (LVI->start < Stop)
  260. ++MFI;
  261. else
  262. MFI = LIS.getMBBFromIndex(LVI->start)->getIterator();
  263. }
  264. assert(getNumLiveBlocks() == countLiveBlocks(CurLI) && "Bad block count");
  265. return true;
  266. }
  267. unsigned SplitAnalysis::countLiveBlocks(const LiveInterval *cli) const {
  268. if (cli->empty())
  269. return 0;
  270. LiveInterval *li = const_cast<LiveInterval*>(cli);
  271. LiveInterval::iterator LVI = li->begin();
  272. LiveInterval::iterator LVE = li->end();
  273. unsigned Count = 0;
  274. // Loop over basic blocks where li is live.
  275. MachineFunction::const_iterator MFI =
  276. LIS.getMBBFromIndex(LVI->start)->getIterator();
  277. SlotIndex Stop = LIS.getMBBEndIdx(&*MFI);
  278. while (true) {
  279. ++Count;
  280. LVI = li->advanceTo(LVI, Stop);
  281. if (LVI == LVE)
  282. return Count;
  283. do {
  284. ++MFI;
  285. Stop = LIS.getMBBEndIdx(&*MFI);
  286. } while (Stop <= LVI->start);
  287. }
  288. }
  289. bool SplitAnalysis::isOriginalEndpoint(SlotIndex Idx) const {
  290. unsigned OrigReg = VRM.getOriginal(CurLI->reg());
  291. const LiveInterval &Orig = LIS.getInterval(OrigReg);
  292. assert(!Orig.empty() && "Splitting empty interval?");
  293. LiveInterval::const_iterator I = Orig.find(Idx);
  294. // Range containing Idx should begin at Idx.
  295. if (I != Orig.end() && I->start <= Idx)
  296. return I->start == Idx;
  297. // Range does not contain Idx, previous must end at Idx.
  298. return I != Orig.begin() && (--I)->end == Idx;
  299. }
  300. void SplitAnalysis::analyze(const LiveInterval *li) {
  301. clear();
  302. CurLI = li;
  303. analyzeUses();
  304. }
  305. //===----------------------------------------------------------------------===//
  306. // Split Editor
  307. //===----------------------------------------------------------------------===//
  308. /// Create a new SplitEditor for editing the LiveInterval analyzed by SA.
  309. SplitEditor::SplitEditor(SplitAnalysis &sa, AliasAnalysis &aa,
  310. LiveIntervals &lis, VirtRegMap &vrm,
  311. MachineDominatorTree &mdt,
  312. MachineBlockFrequencyInfo &mbfi)
  313. : SA(sa), AA(aa), LIS(lis), VRM(vrm),
  314. MRI(vrm.getMachineFunction().getRegInfo()), MDT(mdt),
  315. TII(*vrm.getMachineFunction().getSubtarget().getInstrInfo()),
  316. TRI(*vrm.getMachineFunction().getSubtarget().getRegisterInfo()),
  317. MBFI(mbfi), RegAssign(Allocator) {}
  318. void SplitEditor::reset(LiveRangeEdit &LRE, ComplementSpillMode SM) {
  319. Edit = &LRE;
  320. SpillMode = SM;
  321. OpenIdx = 0;
  322. RegAssign.clear();
  323. Values.clear();
  324. // Reset the LiveIntervalCalc instances needed for this spill mode.
  325. LICalc[0].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  326. &LIS.getVNInfoAllocator());
  327. if (SpillMode)
  328. LICalc[1].reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  329. &LIS.getVNInfoAllocator());
  330. // We don't need an AliasAnalysis since we will only be performing
  331. // cheap-as-a-copy remats anyway.
  332. Edit->anyRematerializable(nullptr);
  333. }
  334. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  335. LLVM_DUMP_METHOD void SplitEditor::dump() const {
  336. if (RegAssign.empty()) {
  337. dbgs() << " empty\n";
  338. return;
  339. }
  340. for (RegAssignMap::const_iterator I = RegAssign.begin(); I.valid(); ++I)
  341. dbgs() << " [" << I.start() << ';' << I.stop() << "):" << I.value();
  342. dbgs() << '\n';
  343. }
  344. #endif
  345. LiveInterval::SubRange &SplitEditor::getSubRangeForMaskExact(LaneBitmask LM,
  346. LiveInterval &LI) {
  347. for (LiveInterval::SubRange &S : LI.subranges())
  348. if (S.LaneMask == LM)
  349. return S;
  350. llvm_unreachable("SubRange for this mask not found");
  351. }
  352. LiveInterval::SubRange &SplitEditor::getSubRangeForMask(LaneBitmask LM,
  353. LiveInterval &LI) {
  354. for (LiveInterval::SubRange &S : LI.subranges())
  355. if ((S.LaneMask & LM) == LM)
  356. return S;
  357. llvm_unreachable("SubRange for this mask not found");
  358. }
  359. void SplitEditor::addDeadDef(LiveInterval &LI, VNInfo *VNI, bool Original) {
  360. if (!LI.hasSubRanges()) {
  361. LI.createDeadDef(VNI);
  362. return;
  363. }
  364. SlotIndex Def = VNI->def;
  365. if (Original) {
  366. // If we are transferring a def from the original interval, make sure
  367. // to only update the subranges for which the original subranges had
  368. // a def at this location.
  369. for (LiveInterval::SubRange &S : LI.subranges()) {
  370. auto &PS = getSubRangeForMask(S.LaneMask, Edit->getParent());
  371. VNInfo *PV = PS.getVNInfoAt(Def);
  372. if (PV != nullptr && PV->def == Def)
  373. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  374. }
  375. } else {
  376. // This is a new def: either from rematerialization, or from an inserted
  377. // copy. Since rematerialization can regenerate a definition of a sub-
  378. // register, we need to check which subranges need to be updated.
  379. const MachineInstr *DefMI = LIS.getInstructionFromIndex(Def);
  380. assert(DefMI != nullptr);
  381. LaneBitmask LM;
  382. for (const MachineOperand &DefOp : DefMI->defs()) {
  383. Register R = DefOp.getReg();
  384. if (R != LI.reg())
  385. continue;
  386. if (unsigned SR = DefOp.getSubReg())
  387. LM |= TRI.getSubRegIndexLaneMask(SR);
  388. else {
  389. LM = MRI.getMaxLaneMaskForVReg(R);
  390. break;
  391. }
  392. }
  393. for (LiveInterval::SubRange &S : LI.subranges())
  394. if ((S.LaneMask & LM).any())
  395. S.createDeadDef(Def, LIS.getVNInfoAllocator());
  396. }
  397. }
  398. VNInfo *SplitEditor::defValue(unsigned RegIdx,
  399. const VNInfo *ParentVNI,
  400. SlotIndex Idx,
  401. bool Original) {
  402. assert(ParentVNI && "Mapping NULL value");
  403. assert(Idx.isValid() && "Invalid SlotIndex");
  404. assert(Edit->getParent().getVNInfoAt(Idx) == ParentVNI && "Bad Parent VNI");
  405. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  406. // Create a new value.
  407. VNInfo *VNI = LI->getNextValue(Idx, LIS.getVNInfoAllocator());
  408. bool Force = LI->hasSubRanges();
  409. ValueForcePair FP(Force ? nullptr : VNI, Force);
  410. // Use insert for lookup, so we can add missing values with a second lookup.
  411. std::pair<ValueMap::iterator, bool> InsP =
  412. Values.insert(std::make_pair(std::make_pair(RegIdx, ParentVNI->id), FP));
  413. // This was the first time (RegIdx, ParentVNI) was mapped, and it is not
  414. // forced. Keep it as a simple def without any liveness.
  415. if (!Force && InsP.second)
  416. return VNI;
  417. // If the previous value was a simple mapping, add liveness for it now.
  418. if (VNInfo *OldVNI = InsP.first->second.getPointer()) {
  419. addDeadDef(*LI, OldVNI, Original);
  420. // No longer a simple mapping. Switch to a complex mapping. If the
  421. // interval has subranges, make it a forced mapping.
  422. InsP.first->second = ValueForcePair(nullptr, Force);
  423. }
  424. // This is a complex mapping, add liveness for VNI
  425. addDeadDef(*LI, VNI, Original);
  426. return VNI;
  427. }
  428. void SplitEditor::forceRecompute(unsigned RegIdx, const VNInfo &ParentVNI) {
  429. ValueForcePair &VFP = Values[std::make_pair(RegIdx, ParentVNI.id)];
  430. VNInfo *VNI = VFP.getPointer();
  431. // ParentVNI was either unmapped or already complex mapped. Either way, just
  432. // set the force bit.
  433. if (!VNI) {
  434. VFP.setInt(true);
  435. return;
  436. }
  437. // This was previously a single mapping. Make sure the old def is represented
  438. // by a trivial live range.
  439. addDeadDef(LIS.getInterval(Edit->get(RegIdx)), VNI, false);
  440. // Mark as complex mapped, forced.
  441. VFP = ValueForcePair(nullptr, true);
  442. }
  443. SlotIndex SplitEditor::buildSingleSubRegCopy(Register FromReg, Register ToReg,
  444. MachineBasicBlock &MBB, MachineBasicBlock::iterator InsertBefore,
  445. unsigned SubIdx, LiveInterval &DestLI, bool Late, SlotIndex Def) {
  446. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  447. bool FirstCopy = !Def.isValid();
  448. MachineInstr *CopyMI = BuildMI(MBB, InsertBefore, DebugLoc(), Desc)
  449. .addReg(ToReg, RegState::Define | getUndefRegState(FirstCopy)
  450. | getInternalReadRegState(!FirstCopy), SubIdx)
  451. .addReg(FromReg, 0, SubIdx);
  452. BumpPtrAllocator &Allocator = LIS.getVNInfoAllocator();
  453. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  454. if (FirstCopy) {
  455. Def = Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  456. } else {
  457. CopyMI->bundleWithPred();
  458. }
  459. LaneBitmask LaneMask = TRI.getSubRegIndexLaneMask(SubIdx);
  460. DestLI.refineSubRanges(Allocator, LaneMask,
  461. [Def, &Allocator](LiveInterval::SubRange &SR) {
  462. SR.createDeadDef(Def, Allocator);
  463. },
  464. Indexes, TRI);
  465. return Def;
  466. }
  467. SlotIndex SplitEditor::buildCopy(Register FromReg, Register ToReg,
  468. LaneBitmask LaneMask, MachineBasicBlock &MBB,
  469. MachineBasicBlock::iterator InsertBefore, bool Late, unsigned RegIdx) {
  470. const MCInstrDesc &Desc = TII.get(TargetOpcode::COPY);
  471. if (LaneMask.all() || LaneMask == MRI.getMaxLaneMaskForVReg(FromReg)) {
  472. // The full vreg is copied.
  473. MachineInstr *CopyMI =
  474. BuildMI(MBB, InsertBefore, DebugLoc(), Desc, ToReg).addReg(FromReg);
  475. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  476. return Indexes.insertMachineInstrInMaps(*CopyMI, Late).getRegSlot();
  477. }
  478. // Only a subset of lanes needs to be copied. The following is a simple
  479. // heuristic to construct a sequence of COPYs. We could add a target
  480. // specific callback if this turns out to be suboptimal.
  481. LiveInterval &DestLI = LIS.getInterval(Edit->get(RegIdx));
  482. // First pass: Try to find a perfectly matching subregister index. If none
  483. // exists find the one covering the most lanemask bits.
  484. SmallVector<unsigned, 8> PossibleIndexes;
  485. unsigned BestIdx = 0;
  486. unsigned BestCover = 0;
  487. const TargetRegisterClass *RC = MRI.getRegClass(FromReg);
  488. assert(RC == MRI.getRegClass(ToReg) && "Should have same reg class");
  489. for (unsigned Idx = 1, E = TRI.getNumSubRegIndices(); Idx < E; ++Idx) {
  490. // Is this index even compatible with the given class?
  491. if (TRI.getSubClassWithSubReg(RC, Idx) != RC)
  492. continue;
  493. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  494. // Early exit if we found a perfect match.
  495. if (SubRegMask == LaneMask) {
  496. BestIdx = Idx;
  497. break;
  498. }
  499. // The index must not cover any lanes outside \p LaneMask.
  500. if ((SubRegMask & ~LaneMask).any())
  501. continue;
  502. unsigned PopCount = SubRegMask.getNumLanes();
  503. PossibleIndexes.push_back(Idx);
  504. if (PopCount > BestCover) {
  505. BestCover = PopCount;
  506. BestIdx = Idx;
  507. }
  508. }
  509. // Abort if we cannot possibly implement the COPY with the given indexes.
  510. if (BestIdx == 0)
  511. report_fatal_error("Impossible to implement partial COPY");
  512. SlotIndex Def = buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore,
  513. BestIdx, DestLI, Late, SlotIndex());
  514. // Greedy heuristic: Keep iterating keeping the best covering subreg index
  515. // each time.
  516. LaneBitmask LanesLeft = LaneMask & ~(TRI.getSubRegIndexLaneMask(BestIdx));
  517. while (LanesLeft.any()) {
  518. unsigned BestIdx = 0;
  519. int BestCover = std::numeric_limits<int>::min();
  520. for (unsigned Idx : PossibleIndexes) {
  521. LaneBitmask SubRegMask = TRI.getSubRegIndexLaneMask(Idx);
  522. // Early exit if we found a perfect match.
  523. if (SubRegMask == LanesLeft) {
  524. BestIdx = Idx;
  525. break;
  526. }
  527. // Try to cover as much of the remaining lanes as possible but
  528. // as few of the already covered lanes as possible.
  529. int Cover = (SubRegMask & LanesLeft).getNumLanes()
  530. - (SubRegMask & ~LanesLeft).getNumLanes();
  531. if (Cover > BestCover) {
  532. BestCover = Cover;
  533. BestIdx = Idx;
  534. }
  535. }
  536. if (BestIdx == 0)
  537. report_fatal_error("Impossible to implement partial COPY");
  538. buildSingleSubRegCopy(FromReg, ToReg, MBB, InsertBefore, BestIdx,
  539. DestLI, Late, Def);
  540. LanesLeft &= ~TRI.getSubRegIndexLaneMask(BestIdx);
  541. }
  542. return Def;
  543. }
  544. VNInfo *SplitEditor::defFromParent(unsigned RegIdx,
  545. VNInfo *ParentVNI,
  546. SlotIndex UseIdx,
  547. MachineBasicBlock &MBB,
  548. MachineBasicBlock::iterator I) {
  549. SlotIndex Def;
  550. LiveInterval *LI = &LIS.getInterval(Edit->get(RegIdx));
  551. // We may be trying to avoid interference that ends at a deleted instruction,
  552. // so always begin RegIdx 0 early and all others late.
  553. bool Late = RegIdx != 0;
  554. // Attempt cheap-as-a-copy rematerialization.
  555. unsigned Original = VRM.getOriginal(Edit->get(RegIdx));
  556. LiveInterval &OrigLI = LIS.getInterval(Original);
  557. VNInfo *OrigVNI = OrigLI.getVNInfoAt(UseIdx);
  558. Register Reg = LI->reg();
  559. bool DidRemat = false;
  560. if (OrigVNI) {
  561. LiveRangeEdit::Remat RM(ParentVNI);
  562. RM.OrigMI = LIS.getInstructionFromIndex(OrigVNI->def);
  563. if (Edit->canRematerializeAt(RM, OrigVNI, UseIdx, true)) {
  564. Def = Edit->rematerializeAt(MBB, I, Reg, RM, TRI, Late);
  565. ++NumRemats;
  566. DidRemat = true;
  567. }
  568. }
  569. if (!DidRemat) {
  570. LaneBitmask LaneMask;
  571. if (OrigLI.hasSubRanges()) {
  572. LaneMask = LaneBitmask::getNone();
  573. for (LiveInterval::SubRange &S : OrigLI.subranges()) {
  574. if (S.liveAt(UseIdx))
  575. LaneMask |= S.LaneMask;
  576. }
  577. } else {
  578. LaneMask = LaneBitmask::getAll();
  579. }
  580. if (LaneMask.none()) {
  581. const MCInstrDesc &Desc = TII.get(TargetOpcode::IMPLICIT_DEF);
  582. MachineInstr *ImplicitDef = BuildMI(MBB, I, DebugLoc(), Desc, Reg);
  583. SlotIndexes &Indexes = *LIS.getSlotIndexes();
  584. Def = Indexes.insertMachineInstrInMaps(*ImplicitDef, Late).getRegSlot();
  585. } else {
  586. ++NumCopies;
  587. Def = buildCopy(Edit->getReg(), Reg, LaneMask, MBB, I, Late, RegIdx);
  588. }
  589. }
  590. // Define the value in Reg.
  591. return defValue(RegIdx, ParentVNI, Def, false);
  592. }
  593. /// Create a new virtual register and live interval.
  594. unsigned SplitEditor::openIntv() {
  595. // Create the complement as index 0.
  596. if (Edit->empty())
  597. Edit->createEmptyInterval();
  598. // Create the open interval.
  599. OpenIdx = Edit->size();
  600. Edit->createEmptyInterval();
  601. return OpenIdx;
  602. }
  603. void SplitEditor::selectIntv(unsigned Idx) {
  604. assert(Idx != 0 && "Cannot select the complement interval");
  605. assert(Idx < Edit->size() && "Can only select previously opened interval");
  606. LLVM_DEBUG(dbgs() << " selectIntv " << OpenIdx << " -> " << Idx << '\n');
  607. OpenIdx = Idx;
  608. }
  609. SlotIndex SplitEditor::enterIntvBefore(SlotIndex Idx) {
  610. assert(OpenIdx && "openIntv not called before enterIntvBefore");
  611. LLVM_DEBUG(dbgs() << " enterIntvBefore " << Idx);
  612. Idx = Idx.getBaseIndex();
  613. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  614. if (!ParentVNI) {
  615. LLVM_DEBUG(dbgs() << ": not live\n");
  616. return Idx;
  617. }
  618. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  619. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  620. assert(MI && "enterIntvBefore called with invalid index");
  621. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(), MI);
  622. return VNI->def;
  623. }
  624. SlotIndex SplitEditor::enterIntvAfter(SlotIndex Idx) {
  625. assert(OpenIdx && "openIntv not called before enterIntvAfter");
  626. LLVM_DEBUG(dbgs() << " enterIntvAfter " << Idx);
  627. Idx = Idx.getBoundaryIndex();
  628. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  629. if (!ParentVNI) {
  630. LLVM_DEBUG(dbgs() << ": not live\n");
  631. return Idx;
  632. }
  633. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  634. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  635. assert(MI && "enterIntvAfter called with invalid index");
  636. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Idx, *MI->getParent(),
  637. std::next(MachineBasicBlock::iterator(MI)));
  638. return VNI->def;
  639. }
  640. SlotIndex SplitEditor::enterIntvAtEnd(MachineBasicBlock &MBB) {
  641. assert(OpenIdx && "openIntv not called before enterIntvAtEnd");
  642. SlotIndex End = LIS.getMBBEndIdx(&MBB);
  643. SlotIndex Last = End.getPrevSlot();
  644. LLVM_DEBUG(dbgs() << " enterIntvAtEnd " << printMBBReference(MBB) << ", "
  645. << Last);
  646. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Last);
  647. if (!ParentVNI) {
  648. LLVM_DEBUG(dbgs() << ": not live\n");
  649. return End;
  650. }
  651. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id);
  652. VNInfo *VNI = defFromParent(OpenIdx, ParentVNI, Last, MBB,
  653. SA.getLastSplitPointIter(&MBB));
  654. RegAssign.insert(VNI->def, End, OpenIdx);
  655. LLVM_DEBUG(dump());
  656. return VNI->def;
  657. }
  658. /// useIntv - indicate that all instructions in MBB should use OpenLI.
  659. void SplitEditor::useIntv(const MachineBasicBlock &MBB) {
  660. useIntv(LIS.getMBBStartIdx(&MBB), LIS.getMBBEndIdx(&MBB));
  661. }
  662. void SplitEditor::useIntv(SlotIndex Start, SlotIndex End) {
  663. assert(OpenIdx && "openIntv not called before useIntv");
  664. LLVM_DEBUG(dbgs() << " useIntv [" << Start << ';' << End << "):");
  665. RegAssign.insert(Start, End, OpenIdx);
  666. LLVM_DEBUG(dump());
  667. }
  668. SlotIndex SplitEditor::leaveIntvAfter(SlotIndex Idx) {
  669. assert(OpenIdx && "openIntv not called before leaveIntvAfter");
  670. LLVM_DEBUG(dbgs() << " leaveIntvAfter " << Idx);
  671. // The interval must be live beyond the instruction at Idx.
  672. SlotIndex Boundary = Idx.getBoundaryIndex();
  673. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Boundary);
  674. if (!ParentVNI) {
  675. LLVM_DEBUG(dbgs() << ": not live\n");
  676. return Boundary.getNextSlot();
  677. }
  678. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  679. MachineInstr *MI = LIS.getInstructionFromIndex(Boundary);
  680. assert(MI && "No instruction at index");
  681. // In spill mode, make live ranges as short as possible by inserting the copy
  682. // before MI. This is only possible if that instruction doesn't redefine the
  683. // value. The inserted COPY is not a kill, and we don't need to recompute
  684. // the source live range. The spiller also won't try to hoist this copy.
  685. if (SpillMode && !SlotIndex::isSameInstr(ParentVNI->def, Idx) &&
  686. MI->readsVirtualRegister(Edit->getReg())) {
  687. forceRecompute(0, *ParentVNI);
  688. defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  689. return Idx;
  690. }
  691. VNInfo *VNI = defFromParent(0, ParentVNI, Boundary, *MI->getParent(),
  692. std::next(MachineBasicBlock::iterator(MI)));
  693. return VNI->def;
  694. }
  695. SlotIndex SplitEditor::leaveIntvBefore(SlotIndex Idx) {
  696. assert(OpenIdx && "openIntv not called before leaveIntvBefore");
  697. LLVM_DEBUG(dbgs() << " leaveIntvBefore " << Idx);
  698. // The interval must be live into the instruction at Idx.
  699. Idx = Idx.getBaseIndex();
  700. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Idx);
  701. if (!ParentVNI) {
  702. LLVM_DEBUG(dbgs() << ": not live\n");
  703. return Idx.getNextSlot();
  704. }
  705. LLVM_DEBUG(dbgs() << ": valno " << ParentVNI->id << '\n');
  706. MachineInstr *MI = LIS.getInstructionFromIndex(Idx);
  707. assert(MI && "No instruction at index");
  708. VNInfo *VNI = defFromParent(0, ParentVNI, Idx, *MI->getParent(), MI);
  709. return VNI->def;
  710. }
  711. SlotIndex SplitEditor::leaveIntvAtTop(MachineBasicBlock &MBB) {
  712. assert(OpenIdx && "openIntv not called before leaveIntvAtTop");
  713. SlotIndex Start = LIS.getMBBStartIdx(&MBB);
  714. LLVM_DEBUG(dbgs() << " leaveIntvAtTop " << printMBBReference(MBB) << ", "
  715. << Start);
  716. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  717. if (!ParentVNI) {
  718. LLVM_DEBUG(dbgs() << ": not live\n");
  719. return Start;
  720. }
  721. VNInfo *VNI = defFromParent(0, ParentVNI, Start, MBB,
  722. MBB.SkipPHIsLabelsAndDebug(MBB.begin()));
  723. RegAssign.insert(Start, VNI->def, OpenIdx);
  724. LLVM_DEBUG(dump());
  725. return VNI->def;
  726. }
  727. void SplitEditor::overlapIntv(SlotIndex Start, SlotIndex End) {
  728. assert(OpenIdx && "openIntv not called before overlapIntv");
  729. const VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(Start);
  730. assert(ParentVNI == Edit->getParent().getVNInfoBefore(End) &&
  731. "Parent changes value in extended range");
  732. assert(LIS.getMBBFromIndex(Start) == LIS.getMBBFromIndex(End) &&
  733. "Range cannot span basic blocks");
  734. // The complement interval will be extended as needed by LICalc.extend().
  735. if (ParentVNI)
  736. forceRecompute(0, *ParentVNI);
  737. LLVM_DEBUG(dbgs() << " overlapIntv [" << Start << ';' << End << "):");
  738. RegAssign.insert(Start, End, OpenIdx);
  739. LLVM_DEBUG(dump());
  740. }
  741. //===----------------------------------------------------------------------===//
  742. // Spill modes
  743. //===----------------------------------------------------------------------===//
  744. void SplitEditor::removeBackCopies(SmallVectorImpl<VNInfo*> &Copies) {
  745. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  746. LLVM_DEBUG(dbgs() << "Removing " << Copies.size() << " back-copies.\n");
  747. RegAssignMap::iterator AssignI;
  748. AssignI.setMap(RegAssign);
  749. for (unsigned i = 0, e = Copies.size(); i != e; ++i) {
  750. SlotIndex Def = Copies[i]->def;
  751. MachineInstr *MI = LIS.getInstructionFromIndex(Def);
  752. assert(MI && "No instruction for back-copy");
  753. MachineBasicBlock *MBB = MI->getParent();
  754. MachineBasicBlock::iterator MBBI(MI);
  755. bool AtBegin;
  756. do AtBegin = MBBI == MBB->begin();
  757. while (!AtBegin && (--MBBI)->isDebugInstr());
  758. LLVM_DEBUG(dbgs() << "Removing " << Def << '\t' << *MI);
  759. LIS.removeVRegDefAt(*LI, Def);
  760. LIS.RemoveMachineInstrFromMaps(*MI);
  761. MI->eraseFromParent();
  762. // Adjust RegAssign if a register assignment is killed at Def. We want to
  763. // avoid calculating the live range of the source register if possible.
  764. AssignI.find(Def.getPrevSlot());
  765. if (!AssignI.valid() || AssignI.start() >= Def)
  766. continue;
  767. // If MI doesn't kill the assigned register, just leave it.
  768. if (AssignI.stop() != Def)
  769. continue;
  770. unsigned RegIdx = AssignI.value();
  771. if (AtBegin || !MBBI->readsVirtualRegister(Edit->getReg())) {
  772. LLVM_DEBUG(dbgs() << " cannot find simple kill of RegIdx " << RegIdx
  773. << '\n');
  774. forceRecompute(RegIdx, *Edit->getParent().getVNInfoAt(Def));
  775. } else {
  776. SlotIndex Kill = LIS.getInstructionIndex(*MBBI).getRegSlot();
  777. LLVM_DEBUG(dbgs() << " move kill to " << Kill << '\t' << *MBBI);
  778. AssignI.setStop(Kill);
  779. }
  780. }
  781. }
  782. MachineBasicBlock*
  783. SplitEditor::findShallowDominator(MachineBasicBlock *MBB,
  784. MachineBasicBlock *DefMBB) {
  785. if (MBB == DefMBB)
  786. return MBB;
  787. assert(MDT.dominates(DefMBB, MBB) && "MBB must be dominated by the def.");
  788. const MachineLoopInfo &Loops = SA.Loops;
  789. const MachineLoop *DefLoop = Loops.getLoopFor(DefMBB);
  790. MachineDomTreeNode *DefDomNode = MDT[DefMBB];
  791. // Best candidate so far.
  792. MachineBasicBlock *BestMBB = MBB;
  793. unsigned BestDepth = std::numeric_limits<unsigned>::max();
  794. while (true) {
  795. const MachineLoop *Loop = Loops.getLoopFor(MBB);
  796. // MBB isn't in a loop, it doesn't get any better. All dominators have a
  797. // higher frequency by definition.
  798. if (!Loop) {
  799. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  800. << " dominates " << printMBBReference(*MBB)
  801. << " at depth 0\n");
  802. return MBB;
  803. }
  804. // We'll never be able to exit the DefLoop.
  805. if (Loop == DefLoop) {
  806. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  807. << " dominates " << printMBBReference(*MBB)
  808. << " in the same loop\n");
  809. return MBB;
  810. }
  811. // Least busy dominator seen so far.
  812. unsigned Depth = Loop->getLoopDepth();
  813. if (Depth < BestDepth) {
  814. BestMBB = MBB;
  815. BestDepth = Depth;
  816. LLVM_DEBUG(dbgs() << "Def in " << printMBBReference(*DefMBB)
  817. << " dominates " << printMBBReference(*MBB)
  818. << " at depth " << Depth << '\n');
  819. }
  820. // Leave loop by going to the immediate dominator of the loop header.
  821. // This is a bigger stride than simply walking up the dominator tree.
  822. MachineDomTreeNode *IDom = MDT[Loop->getHeader()]->getIDom();
  823. // Too far up the dominator tree?
  824. if (!IDom || !MDT.dominates(DefDomNode, IDom))
  825. return BestMBB;
  826. MBB = IDom->getBlock();
  827. }
  828. }
  829. void SplitEditor::computeRedundantBackCopies(
  830. DenseSet<unsigned> &NotToHoistSet, SmallVectorImpl<VNInfo *> &BackCopies) {
  831. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  832. LiveInterval *Parent = &Edit->getParent();
  833. SmallVector<SmallPtrSet<VNInfo *, 8>, 8> EqualVNs(Parent->getNumValNums());
  834. SmallPtrSet<VNInfo *, 8> DominatedVNIs;
  835. // Aggregate VNIs having the same value as ParentVNI.
  836. for (VNInfo *VNI : LI->valnos) {
  837. if (VNI->isUnused())
  838. continue;
  839. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  840. EqualVNs[ParentVNI->id].insert(VNI);
  841. }
  842. // For VNI aggregation of each ParentVNI, collect dominated, i.e.,
  843. // redundant VNIs to BackCopies.
  844. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  845. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  846. if (!NotToHoistSet.count(ParentVNI->id))
  847. continue;
  848. SmallPtrSetIterator<VNInfo *> It1 = EqualVNs[ParentVNI->id].begin();
  849. SmallPtrSetIterator<VNInfo *> It2 = It1;
  850. for (; It1 != EqualVNs[ParentVNI->id].end(); ++It1) {
  851. It2 = It1;
  852. for (++It2; It2 != EqualVNs[ParentVNI->id].end(); ++It2) {
  853. if (DominatedVNIs.count(*It1) || DominatedVNIs.count(*It2))
  854. continue;
  855. MachineBasicBlock *MBB1 = LIS.getMBBFromIndex((*It1)->def);
  856. MachineBasicBlock *MBB2 = LIS.getMBBFromIndex((*It2)->def);
  857. if (MBB1 == MBB2) {
  858. DominatedVNIs.insert((*It1)->def < (*It2)->def ? (*It2) : (*It1));
  859. } else if (MDT.dominates(MBB1, MBB2)) {
  860. DominatedVNIs.insert(*It2);
  861. } else if (MDT.dominates(MBB2, MBB1)) {
  862. DominatedVNIs.insert(*It1);
  863. }
  864. }
  865. }
  866. if (!DominatedVNIs.empty()) {
  867. forceRecompute(0, *ParentVNI);
  868. append_range(BackCopies, DominatedVNIs);
  869. DominatedVNIs.clear();
  870. }
  871. }
  872. }
  873. /// For SM_Size mode, find a common dominator for all the back-copies for
  874. /// the same ParentVNI and hoist the backcopies to the dominator BB.
  875. /// For SM_Speed mode, if the common dominator is hot and it is not beneficial
  876. /// to do the hoisting, simply remove the dominated backcopies for the same
  877. /// ParentVNI.
  878. void SplitEditor::hoistCopies() {
  879. // Get the complement interval, always RegIdx 0.
  880. LiveInterval *LI = &LIS.getInterval(Edit->get(0));
  881. LiveInterval *Parent = &Edit->getParent();
  882. // Track the nearest common dominator for all back-copies for each ParentVNI,
  883. // indexed by ParentVNI->id.
  884. using DomPair = std::pair<MachineBasicBlock *, SlotIndex>;
  885. SmallVector<DomPair, 8> NearestDom(Parent->getNumValNums());
  886. // The total cost of all the back-copies for each ParentVNI.
  887. SmallVector<BlockFrequency, 8> Costs(Parent->getNumValNums());
  888. // The ParentVNI->id set for which hoisting back-copies are not beneficial
  889. // for Speed.
  890. DenseSet<unsigned> NotToHoistSet;
  891. // Find the nearest common dominator for parent values with multiple
  892. // back-copies. If a single back-copy dominates, put it in DomPair.second.
  893. for (VNInfo *VNI : LI->valnos) {
  894. if (VNI->isUnused())
  895. continue;
  896. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  897. assert(ParentVNI && "Parent not live at complement def");
  898. // Don't hoist remats. The complement is probably going to disappear
  899. // completely anyway.
  900. if (Edit->didRematerialize(ParentVNI))
  901. continue;
  902. MachineBasicBlock *ValMBB = LIS.getMBBFromIndex(VNI->def);
  903. DomPair &Dom = NearestDom[ParentVNI->id];
  904. // Keep directly defined parent values. This is either a PHI or an
  905. // instruction in the complement range. All other copies of ParentVNI
  906. // should be eliminated.
  907. if (VNI->def == ParentVNI->def) {
  908. LLVM_DEBUG(dbgs() << "Direct complement def at " << VNI->def << '\n');
  909. Dom = DomPair(ValMBB, VNI->def);
  910. continue;
  911. }
  912. // Skip the singly mapped values. There is nothing to gain from hoisting a
  913. // single back-copy.
  914. if (Values.lookup(std::make_pair(0, ParentVNI->id)).getPointer()) {
  915. LLVM_DEBUG(dbgs() << "Single complement def at " << VNI->def << '\n');
  916. continue;
  917. }
  918. if (!Dom.first) {
  919. // First time we see ParentVNI. VNI dominates itself.
  920. Dom = DomPair(ValMBB, VNI->def);
  921. } else if (Dom.first == ValMBB) {
  922. // Two defs in the same block. Pick the earlier def.
  923. if (!Dom.second.isValid() || VNI->def < Dom.second)
  924. Dom.second = VNI->def;
  925. } else {
  926. // Different basic blocks. Check if one dominates.
  927. MachineBasicBlock *Near =
  928. MDT.findNearestCommonDominator(Dom.first, ValMBB);
  929. if (Near == ValMBB)
  930. // Def ValMBB dominates.
  931. Dom = DomPair(ValMBB, VNI->def);
  932. else if (Near != Dom.first)
  933. // None dominate. Hoist to common dominator, need new def.
  934. Dom = DomPair(Near, SlotIndex());
  935. Costs[ParentVNI->id] += MBFI.getBlockFreq(ValMBB);
  936. }
  937. LLVM_DEBUG(dbgs() << "Multi-mapped complement " << VNI->id << '@'
  938. << VNI->def << " for parent " << ParentVNI->id << '@'
  939. << ParentVNI->def << " hoist to "
  940. << printMBBReference(*Dom.first) << ' ' << Dom.second
  941. << '\n');
  942. }
  943. // Insert the hoisted copies.
  944. for (unsigned i = 0, e = Parent->getNumValNums(); i != e; ++i) {
  945. DomPair &Dom = NearestDom[i];
  946. if (!Dom.first || Dom.second.isValid())
  947. continue;
  948. // This value needs a hoisted copy inserted at the end of Dom.first.
  949. VNInfo *ParentVNI = Parent->getValNumInfo(i);
  950. MachineBasicBlock *DefMBB = LIS.getMBBFromIndex(ParentVNI->def);
  951. // Get a less loopy dominator than Dom.first.
  952. Dom.first = findShallowDominator(Dom.first, DefMBB);
  953. if (SpillMode == SM_Speed &&
  954. MBFI.getBlockFreq(Dom.first) > Costs[ParentVNI->id]) {
  955. NotToHoistSet.insert(ParentVNI->id);
  956. continue;
  957. }
  958. SlotIndex Last = LIS.getMBBEndIdx(Dom.first).getPrevSlot();
  959. Dom.second =
  960. defFromParent(0, ParentVNI, Last, *Dom.first,
  961. SA.getLastSplitPointIter(Dom.first))->def;
  962. }
  963. // Remove redundant back-copies that are now known to be dominated by another
  964. // def with the same value.
  965. SmallVector<VNInfo*, 8> BackCopies;
  966. for (VNInfo *VNI : LI->valnos) {
  967. if (VNI->isUnused())
  968. continue;
  969. VNInfo *ParentVNI = Edit->getParent().getVNInfoAt(VNI->def);
  970. const DomPair &Dom = NearestDom[ParentVNI->id];
  971. if (!Dom.first || Dom.second == VNI->def ||
  972. NotToHoistSet.count(ParentVNI->id))
  973. continue;
  974. BackCopies.push_back(VNI);
  975. forceRecompute(0, *ParentVNI);
  976. }
  977. // If it is not beneficial to hoist all the BackCopies, simply remove
  978. // redundant BackCopies in speed mode.
  979. if (SpillMode == SM_Speed && !NotToHoistSet.empty())
  980. computeRedundantBackCopies(NotToHoistSet, BackCopies);
  981. removeBackCopies(BackCopies);
  982. }
  983. /// transferValues - Transfer all possible values to the new live ranges.
  984. /// Values that were rematerialized are left alone, they need LICalc.extend().
  985. bool SplitEditor::transferValues() {
  986. bool Skipped = false;
  987. RegAssignMap::const_iterator AssignI = RegAssign.begin();
  988. for (const LiveRange::Segment &S : Edit->getParent()) {
  989. LLVM_DEBUG(dbgs() << " blit " << S << ':');
  990. VNInfo *ParentVNI = S.valno;
  991. // RegAssign has holes where RegIdx 0 should be used.
  992. SlotIndex Start = S.start;
  993. AssignI.advanceTo(Start);
  994. do {
  995. unsigned RegIdx;
  996. SlotIndex End = S.end;
  997. if (!AssignI.valid()) {
  998. RegIdx = 0;
  999. } else if (AssignI.start() <= Start) {
  1000. RegIdx = AssignI.value();
  1001. if (AssignI.stop() < End) {
  1002. End = AssignI.stop();
  1003. ++AssignI;
  1004. }
  1005. } else {
  1006. RegIdx = 0;
  1007. End = std::min(End, AssignI.start());
  1008. }
  1009. // The interval [Start;End) is continuously mapped to RegIdx, ParentVNI.
  1010. LLVM_DEBUG(dbgs() << " [" << Start << ';' << End << ")=" << RegIdx << '('
  1011. << printReg(Edit->get(RegIdx)) << ')');
  1012. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1013. // Check for a simply defined value that can be blitted directly.
  1014. ValueForcePair VFP = Values.lookup(std::make_pair(RegIdx, ParentVNI->id));
  1015. if (VNInfo *VNI = VFP.getPointer()) {
  1016. LLVM_DEBUG(dbgs() << ':' << VNI->id);
  1017. LI.addSegment(LiveInterval::Segment(Start, End, VNI));
  1018. Start = End;
  1019. continue;
  1020. }
  1021. // Skip values with forced recomputation.
  1022. if (VFP.getInt()) {
  1023. LLVM_DEBUG(dbgs() << "(recalc)");
  1024. Skipped = true;
  1025. Start = End;
  1026. continue;
  1027. }
  1028. LiveIntervalCalc &LIC = getLICalc(RegIdx);
  1029. // This value has multiple defs in RegIdx, but it wasn't rematerialized,
  1030. // so the live range is accurate. Add live-in blocks in [Start;End) to the
  1031. // LiveInBlocks.
  1032. MachineFunction::iterator MBB = LIS.getMBBFromIndex(Start)->getIterator();
  1033. SlotIndex BlockStart, BlockEnd;
  1034. std::tie(BlockStart, BlockEnd) = LIS.getSlotIndexes()->getMBBRange(&*MBB);
  1035. // The first block may be live-in, or it may have its own def.
  1036. if (Start != BlockStart) {
  1037. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1038. assert(VNI && "Missing def for complex mapped value");
  1039. LLVM_DEBUG(dbgs() << ':' << VNI->id << "*" << printMBBReference(*MBB));
  1040. // MBB has its own def. Is it also live-out?
  1041. if (BlockEnd <= End)
  1042. LIC.setLiveOutValue(&*MBB, VNI);
  1043. // Skip to the next block for live-in.
  1044. ++MBB;
  1045. BlockStart = BlockEnd;
  1046. }
  1047. // Handle the live-in blocks covered by [Start;End).
  1048. assert(Start <= BlockStart && "Expected live-in block");
  1049. while (BlockStart < End) {
  1050. LLVM_DEBUG(dbgs() << ">" << printMBBReference(*MBB));
  1051. BlockEnd = LIS.getMBBEndIdx(&*MBB);
  1052. if (BlockStart == ParentVNI->def) {
  1053. // This block has the def of a parent PHI, so it isn't live-in.
  1054. assert(ParentVNI->isPHIDef() && "Non-phi defined at block start?");
  1055. VNInfo *VNI = LI.extendInBlock(BlockStart, std::min(BlockEnd, End));
  1056. assert(VNI && "Missing def for complex mapped parent PHI");
  1057. if (End >= BlockEnd)
  1058. LIC.setLiveOutValue(&*MBB, VNI); // Live-out as well.
  1059. } else {
  1060. // This block needs a live-in value. The last block covered may not
  1061. // be live-out.
  1062. if (End < BlockEnd)
  1063. LIC.addLiveInBlock(LI, MDT[&*MBB], End);
  1064. else {
  1065. // Live-through, and we don't know the value.
  1066. LIC.addLiveInBlock(LI, MDT[&*MBB]);
  1067. LIC.setLiveOutValue(&*MBB, nullptr);
  1068. }
  1069. }
  1070. BlockStart = BlockEnd;
  1071. ++MBB;
  1072. }
  1073. Start = End;
  1074. } while (Start != S.end);
  1075. LLVM_DEBUG(dbgs() << '\n');
  1076. }
  1077. LICalc[0].calculateValues();
  1078. if (SpillMode)
  1079. LICalc[1].calculateValues();
  1080. return Skipped;
  1081. }
  1082. static bool removeDeadSegment(SlotIndex Def, LiveRange &LR) {
  1083. const LiveRange::Segment *Seg = LR.getSegmentContaining(Def);
  1084. if (Seg == nullptr)
  1085. return true;
  1086. if (Seg->end != Def.getDeadSlot())
  1087. return false;
  1088. // This is a dead PHI. Remove it.
  1089. LR.removeSegment(*Seg, true);
  1090. return true;
  1091. }
  1092. void SplitEditor::extendPHIRange(MachineBasicBlock &B, LiveIntervalCalc &LIC,
  1093. LiveRange &LR, LaneBitmask LM,
  1094. ArrayRef<SlotIndex> Undefs) {
  1095. for (MachineBasicBlock *P : B.predecessors()) {
  1096. SlotIndex End = LIS.getMBBEndIdx(P);
  1097. SlotIndex LastUse = End.getPrevSlot();
  1098. // The predecessor may not have a live-out value. That is OK, like an
  1099. // undef PHI operand.
  1100. LiveInterval &PLI = Edit->getParent();
  1101. // Need the cast because the inputs to ?: would otherwise be deemed
  1102. // "incompatible": SubRange vs LiveInterval.
  1103. LiveRange &PSR = !LM.all() ? getSubRangeForMaskExact(LM, PLI)
  1104. : static_cast<LiveRange &>(PLI);
  1105. if (PSR.liveAt(LastUse))
  1106. LIC.extend(LR, End, /*PhysReg=*/0, Undefs);
  1107. }
  1108. }
  1109. void SplitEditor::extendPHIKillRanges() {
  1110. // Extend live ranges to be live-out for successor PHI values.
  1111. // Visit each PHI def slot in the parent live interval. If the def is dead,
  1112. // remove it. Otherwise, extend the live interval to reach the end indexes
  1113. // of all predecessor blocks.
  1114. LiveInterval &ParentLI = Edit->getParent();
  1115. for (const VNInfo *V : ParentLI.valnos) {
  1116. if (V->isUnused() || !V->isPHIDef())
  1117. continue;
  1118. unsigned RegIdx = RegAssign.lookup(V->def);
  1119. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1120. LiveIntervalCalc &LIC = getLICalc(RegIdx);
  1121. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1122. if (!removeDeadSegment(V->def, LI))
  1123. extendPHIRange(B, LIC, LI, LaneBitmask::getAll(), /*Undefs=*/{});
  1124. }
  1125. SmallVector<SlotIndex, 4> Undefs;
  1126. LiveIntervalCalc SubLIC;
  1127. for (LiveInterval::SubRange &PS : ParentLI.subranges()) {
  1128. for (const VNInfo *V : PS.valnos) {
  1129. if (V->isUnused() || !V->isPHIDef())
  1130. continue;
  1131. unsigned RegIdx = RegAssign.lookup(V->def);
  1132. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1133. LiveInterval::SubRange &S = getSubRangeForMaskExact(PS.LaneMask, LI);
  1134. if (removeDeadSegment(V->def, S))
  1135. continue;
  1136. MachineBasicBlock &B = *LIS.getMBBFromIndex(V->def);
  1137. SubLIC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1138. &LIS.getVNInfoAllocator());
  1139. Undefs.clear();
  1140. LI.computeSubRangeUndefs(Undefs, PS.LaneMask, MRI, *LIS.getSlotIndexes());
  1141. extendPHIRange(B, SubLIC, S, PS.LaneMask, Undefs);
  1142. }
  1143. }
  1144. }
  1145. /// rewriteAssigned - Rewrite all uses of Edit->getReg().
  1146. void SplitEditor::rewriteAssigned(bool ExtendRanges) {
  1147. struct ExtPoint {
  1148. ExtPoint(const MachineOperand &O, unsigned R, SlotIndex N)
  1149. : MO(O), RegIdx(R), Next(N) {}
  1150. MachineOperand MO;
  1151. unsigned RegIdx;
  1152. SlotIndex Next;
  1153. };
  1154. SmallVector<ExtPoint,4> ExtPoints;
  1155. for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(Edit->getReg()),
  1156. RE = MRI.reg_end(); RI != RE;) {
  1157. MachineOperand &MO = *RI;
  1158. MachineInstr *MI = MO.getParent();
  1159. ++RI;
  1160. // LiveDebugVariables should have handled all DBG_VALUE instructions.
  1161. if (MI->isDebugValue()) {
  1162. LLVM_DEBUG(dbgs() << "Zapping " << *MI);
  1163. MO.setReg(0);
  1164. continue;
  1165. }
  1166. // <undef> operands don't really read the register, so it doesn't matter
  1167. // which register we choose. When the use operand is tied to a def, we must
  1168. // use the same register as the def, so just do that always.
  1169. SlotIndex Idx = LIS.getInstructionIndex(*MI);
  1170. if (MO.isDef() || MO.isUndef())
  1171. Idx = Idx.getRegSlot(MO.isEarlyClobber());
  1172. // Rewrite to the mapped register at Idx.
  1173. unsigned RegIdx = RegAssign.lookup(Idx);
  1174. LiveInterval &LI = LIS.getInterval(Edit->get(RegIdx));
  1175. MO.setReg(LI.reg());
  1176. LLVM_DEBUG(dbgs() << " rewr " << printMBBReference(*MI->getParent())
  1177. << '\t' << Idx << ':' << RegIdx << '\t' << *MI);
  1178. // Extend liveness to Idx if the instruction reads reg.
  1179. if (!ExtendRanges || MO.isUndef())
  1180. continue;
  1181. // Skip instructions that don't read Reg.
  1182. if (MO.isDef()) {
  1183. if (!MO.getSubReg() && !MO.isEarlyClobber())
  1184. continue;
  1185. // We may want to extend a live range for a partial redef, or for a use
  1186. // tied to an early clobber.
  1187. Idx = Idx.getPrevSlot();
  1188. if (!Edit->getParent().liveAt(Idx))
  1189. continue;
  1190. } else
  1191. Idx = Idx.getRegSlot(true);
  1192. SlotIndex Next = Idx.getNextSlot();
  1193. if (LI.hasSubRanges()) {
  1194. // We have to delay extending subranges until we have seen all operands
  1195. // defining the register. This is because a <def,read-undef> operand
  1196. // will create an "undef" point, and we cannot extend any subranges
  1197. // until all of them have been accounted for.
  1198. if (MO.isUse())
  1199. ExtPoints.push_back(ExtPoint(MO, RegIdx, Next));
  1200. } else {
  1201. LiveIntervalCalc &LIC = getLICalc(RegIdx);
  1202. LIC.extend(LI, Next, 0, ArrayRef<SlotIndex>());
  1203. }
  1204. }
  1205. for (ExtPoint &EP : ExtPoints) {
  1206. LiveInterval &LI = LIS.getInterval(Edit->get(EP.RegIdx));
  1207. assert(LI.hasSubRanges());
  1208. LiveIntervalCalc SubLIC;
  1209. Register Reg = EP.MO.getReg(), Sub = EP.MO.getSubReg();
  1210. LaneBitmask LM = Sub != 0 ? TRI.getSubRegIndexLaneMask(Sub)
  1211. : MRI.getMaxLaneMaskForVReg(Reg);
  1212. for (LiveInterval::SubRange &S : LI.subranges()) {
  1213. if ((S.LaneMask & LM).none())
  1214. continue;
  1215. // The problem here can be that the new register may have been created
  1216. // for a partially defined original register. For example:
  1217. // %0:subreg_hireg<def,read-undef> = ...
  1218. // ...
  1219. // %1 = COPY %0
  1220. if (S.empty())
  1221. continue;
  1222. SubLIC.reset(&VRM.getMachineFunction(), LIS.getSlotIndexes(), &MDT,
  1223. &LIS.getVNInfoAllocator());
  1224. SmallVector<SlotIndex, 4> Undefs;
  1225. LI.computeSubRangeUndefs(Undefs, S.LaneMask, MRI, *LIS.getSlotIndexes());
  1226. SubLIC.extend(S, EP.Next, 0, Undefs);
  1227. }
  1228. }
  1229. for (Register R : *Edit) {
  1230. LiveInterval &LI = LIS.getInterval(R);
  1231. if (!LI.hasSubRanges())
  1232. continue;
  1233. LI.clear();
  1234. LI.removeEmptySubRanges();
  1235. LIS.constructMainRangeFromSubranges(LI);
  1236. }
  1237. }
  1238. void SplitEditor::deleteRematVictims() {
  1239. SmallVector<MachineInstr*, 8> Dead;
  1240. for (LiveRangeEdit::iterator I = Edit->begin(), E = Edit->end(); I != E; ++I){
  1241. LiveInterval *LI = &LIS.getInterval(*I);
  1242. for (const LiveRange::Segment &S : LI->segments) {
  1243. // Dead defs end at the dead slot.
  1244. if (S.end != S.valno->def.getDeadSlot())
  1245. continue;
  1246. if (S.valno->isPHIDef())
  1247. continue;
  1248. MachineInstr *MI = LIS.getInstructionFromIndex(S.valno->def);
  1249. assert(MI && "Missing instruction for dead def");
  1250. MI->addRegisterDead(LI->reg(), &TRI);
  1251. if (!MI->allDefsAreDead())
  1252. continue;
  1253. LLVM_DEBUG(dbgs() << "All defs dead: " << *MI);
  1254. Dead.push_back(MI);
  1255. }
  1256. }
  1257. if (Dead.empty())
  1258. return;
  1259. Edit->eliminateDeadDefs(Dead, None, &AA);
  1260. }
  1261. void SplitEditor::forceRecomputeVNI(const VNInfo &ParentVNI) {
  1262. // Fast-path for common case.
  1263. if (!ParentVNI.isPHIDef()) {
  1264. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1265. forceRecompute(I, ParentVNI);
  1266. return;
  1267. }
  1268. // Trace value through phis.
  1269. SmallPtrSet<const VNInfo *, 8> Visited; ///< whether VNI was/is in worklist.
  1270. SmallVector<const VNInfo *, 4> WorkList;
  1271. Visited.insert(&ParentVNI);
  1272. WorkList.push_back(&ParentVNI);
  1273. const LiveInterval &ParentLI = Edit->getParent();
  1274. const SlotIndexes &Indexes = *LIS.getSlotIndexes();
  1275. do {
  1276. const VNInfo &VNI = *WorkList.back();
  1277. WorkList.pop_back();
  1278. for (unsigned I = 0, E = Edit->size(); I != E; ++I)
  1279. forceRecompute(I, VNI);
  1280. if (!VNI.isPHIDef())
  1281. continue;
  1282. MachineBasicBlock &MBB = *Indexes.getMBBFromIndex(VNI.def);
  1283. for (const MachineBasicBlock *Pred : MBB.predecessors()) {
  1284. SlotIndex PredEnd = Indexes.getMBBEndIdx(Pred);
  1285. VNInfo *PredVNI = ParentLI.getVNInfoBefore(PredEnd);
  1286. assert(PredVNI && "Value available in PhiVNI predecessor");
  1287. if (Visited.insert(PredVNI).second)
  1288. WorkList.push_back(PredVNI);
  1289. }
  1290. } while(!WorkList.empty());
  1291. }
  1292. void SplitEditor::finish(SmallVectorImpl<unsigned> *LRMap) {
  1293. ++NumFinished;
  1294. // At this point, the live intervals in Edit contain VNInfos corresponding to
  1295. // the inserted copies.
  1296. // Add the original defs from the parent interval.
  1297. for (const VNInfo *ParentVNI : Edit->getParent().valnos) {
  1298. if (ParentVNI->isUnused())
  1299. continue;
  1300. unsigned RegIdx = RegAssign.lookup(ParentVNI->def);
  1301. defValue(RegIdx, ParentVNI, ParentVNI->def, true);
  1302. // Force rematted values to be recomputed everywhere.
  1303. // The new live ranges may be truncated.
  1304. if (Edit->didRematerialize(ParentVNI))
  1305. forceRecomputeVNI(*ParentVNI);
  1306. }
  1307. // Hoist back-copies to the complement interval when in spill mode.
  1308. switch (SpillMode) {
  1309. case SM_Partition:
  1310. // Leave all back-copies as is.
  1311. break;
  1312. case SM_Size:
  1313. case SM_Speed:
  1314. // hoistCopies will behave differently between size and speed.
  1315. hoistCopies();
  1316. }
  1317. // Transfer the simply mapped values, check if any are skipped.
  1318. bool Skipped = transferValues();
  1319. // Rewrite virtual registers, possibly extending ranges.
  1320. rewriteAssigned(Skipped);
  1321. if (Skipped)
  1322. extendPHIKillRanges();
  1323. else
  1324. ++NumSimple;
  1325. // Delete defs that were rematted everywhere.
  1326. if (Skipped)
  1327. deleteRematVictims();
  1328. // Get rid of unused values and set phi-kill flags.
  1329. for (Register Reg : *Edit) {
  1330. LiveInterval &LI = LIS.getInterval(Reg);
  1331. LI.removeEmptySubRanges();
  1332. LI.RenumberValues();
  1333. }
  1334. // Provide a reverse mapping from original indices to Edit ranges.
  1335. if (LRMap) {
  1336. LRMap->clear();
  1337. for (unsigned i = 0, e = Edit->size(); i != e; ++i)
  1338. LRMap->push_back(i);
  1339. }
  1340. // Now check if any registers were separated into multiple components.
  1341. ConnectedVNInfoEqClasses ConEQ(LIS);
  1342. for (unsigned i = 0, e = Edit->size(); i != e; ++i) {
  1343. // Don't use iterators, they are invalidated by create() below.
  1344. Register VReg = Edit->get(i);
  1345. LiveInterval &LI = LIS.getInterval(VReg);
  1346. SmallVector<LiveInterval*, 8> SplitLIs;
  1347. LIS.splitSeparateComponents(LI, SplitLIs);
  1348. Register Original = VRM.getOriginal(VReg);
  1349. for (LiveInterval *SplitLI : SplitLIs)
  1350. VRM.setIsSplitFromReg(SplitLI->reg(), Original);
  1351. // The new intervals all map back to i.
  1352. if (LRMap)
  1353. LRMap->resize(Edit->size(), i);
  1354. }
  1355. // Calculate spill weight and allocation hints for new intervals.
  1356. Edit->calculateRegClassAndHint(VRM.getMachineFunction(), SA.Loops, MBFI);
  1357. assert(!LRMap || LRMap->size() == Edit->size());
  1358. }
  1359. //===----------------------------------------------------------------------===//
  1360. // Single Block Splitting
  1361. //===----------------------------------------------------------------------===//
  1362. bool SplitAnalysis::shouldSplitSingleBlock(const BlockInfo &BI,
  1363. bool SingleInstrs) const {
  1364. // Always split for multiple instructions.
  1365. if (!BI.isOneInstr())
  1366. return true;
  1367. // Don't split for single instructions unless explicitly requested.
  1368. if (!SingleInstrs)
  1369. return false;
  1370. // Splitting a live-through range always makes progress.
  1371. if (BI.LiveIn && BI.LiveOut)
  1372. return true;
  1373. // No point in isolating a copy. It has no register class constraints.
  1374. if (LIS.getInstructionFromIndex(BI.FirstInstr)->isCopyLike())
  1375. return false;
  1376. // Finally, don't isolate an end point that was created by earlier splits.
  1377. return isOriginalEndpoint(BI.FirstInstr);
  1378. }
  1379. void SplitEditor::splitSingleBlock(const SplitAnalysis::BlockInfo &BI) {
  1380. openIntv();
  1381. SlotIndex LastSplitPoint = SA.getLastSplitPoint(BI.MBB->getNumber());
  1382. SlotIndex SegStart = enterIntvBefore(std::min(BI.FirstInstr,
  1383. LastSplitPoint));
  1384. if (!BI.LiveOut || BI.LastInstr < LastSplitPoint) {
  1385. useIntv(SegStart, leaveIntvAfter(BI.LastInstr));
  1386. } else {
  1387. // The last use is after the last valid split point.
  1388. SlotIndex SegStop = leaveIntvBefore(LastSplitPoint);
  1389. useIntv(SegStart, SegStop);
  1390. overlapIntv(SegStop, BI.LastInstr);
  1391. }
  1392. }
  1393. //===----------------------------------------------------------------------===//
  1394. // Global Live Range Splitting Support
  1395. //===----------------------------------------------------------------------===//
  1396. // These methods support a method of global live range splitting that uses a
  1397. // global algorithm to decide intervals for CFG edges. They will insert split
  1398. // points and color intervals in basic blocks while avoiding interference.
  1399. //
  1400. // Note that splitSingleBlock is also useful for blocks where both CFG edges
  1401. // are on the stack.
  1402. void SplitEditor::splitLiveThroughBlock(unsigned MBBNum,
  1403. unsigned IntvIn, SlotIndex LeaveBefore,
  1404. unsigned IntvOut, SlotIndex EnterAfter){
  1405. SlotIndex Start, Stop;
  1406. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(MBBNum);
  1407. LLVM_DEBUG(dbgs() << "%bb." << MBBNum << " [" << Start << ';' << Stop
  1408. << ") intf " << LeaveBefore << '-' << EnterAfter
  1409. << ", live-through " << IntvIn << " -> " << IntvOut);
  1410. assert((IntvIn || IntvOut) && "Use splitSingleBlock for isolated blocks");
  1411. assert((!LeaveBefore || LeaveBefore < Stop) && "Interference after block");
  1412. assert((!IntvIn || !LeaveBefore || LeaveBefore > Start) && "Impossible intf");
  1413. assert((!EnterAfter || EnterAfter >= Start) && "Interference before block");
  1414. MachineBasicBlock *MBB = VRM.getMachineFunction().getBlockNumbered(MBBNum);
  1415. if (!IntvOut) {
  1416. LLVM_DEBUG(dbgs() << ", spill on entry.\n");
  1417. //
  1418. // <<<<<<<<< Possible LeaveBefore interference.
  1419. // |-----------| Live through.
  1420. // -____________ Spill on entry.
  1421. //
  1422. selectIntv(IntvIn);
  1423. SlotIndex Idx = leaveIntvAtTop(*MBB);
  1424. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1425. (void)Idx;
  1426. return;
  1427. }
  1428. if (!IntvIn) {
  1429. LLVM_DEBUG(dbgs() << ", reload on exit.\n");
  1430. //
  1431. // >>>>>>> Possible EnterAfter interference.
  1432. // |-----------| Live through.
  1433. // ___________-- Reload on exit.
  1434. //
  1435. selectIntv(IntvOut);
  1436. SlotIndex Idx = enterIntvAtEnd(*MBB);
  1437. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1438. (void)Idx;
  1439. return;
  1440. }
  1441. if (IntvIn == IntvOut && !LeaveBefore && !EnterAfter) {
  1442. LLVM_DEBUG(dbgs() << ", straight through.\n");
  1443. //
  1444. // |-----------| Live through.
  1445. // ------------- Straight through, same intv, no interference.
  1446. //
  1447. selectIntv(IntvOut);
  1448. useIntv(Start, Stop);
  1449. return;
  1450. }
  1451. // We cannot legally insert splits after LSP.
  1452. SlotIndex LSP = SA.getLastSplitPoint(MBBNum);
  1453. assert((!IntvOut || !EnterAfter || EnterAfter < LSP) && "Impossible intf");
  1454. if (IntvIn != IntvOut && (!LeaveBefore || !EnterAfter ||
  1455. LeaveBefore.getBaseIndex() > EnterAfter.getBoundaryIndex())) {
  1456. LLVM_DEBUG(dbgs() << ", switch avoiding interference.\n");
  1457. //
  1458. // >>>> <<<< Non-overlapping EnterAfter/LeaveBefore interference.
  1459. // |-----------| Live through.
  1460. // ------======= Switch intervals between interference.
  1461. //
  1462. selectIntv(IntvOut);
  1463. SlotIndex Idx;
  1464. if (LeaveBefore && LeaveBefore < LSP) {
  1465. Idx = enterIntvBefore(LeaveBefore);
  1466. useIntv(Idx, Stop);
  1467. } else {
  1468. Idx = enterIntvAtEnd(*MBB);
  1469. }
  1470. selectIntv(IntvIn);
  1471. useIntv(Start, Idx);
  1472. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1473. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1474. return;
  1475. }
  1476. LLVM_DEBUG(dbgs() << ", create local intv for interference.\n");
  1477. //
  1478. // >>><><><><<<< Overlapping EnterAfter/LeaveBefore interference.
  1479. // |-----------| Live through.
  1480. // ==---------== Switch intervals before/after interference.
  1481. //
  1482. assert(LeaveBefore <= EnterAfter && "Missed case");
  1483. selectIntv(IntvOut);
  1484. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1485. useIntv(Idx, Stop);
  1486. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1487. selectIntv(IntvIn);
  1488. Idx = leaveIntvBefore(LeaveBefore);
  1489. useIntv(Start, Idx);
  1490. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1491. }
  1492. void SplitEditor::splitRegInBlock(const SplitAnalysis::BlockInfo &BI,
  1493. unsigned IntvIn, SlotIndex LeaveBefore) {
  1494. SlotIndex Start, Stop;
  1495. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1496. LLVM_DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';'
  1497. << Stop << "), uses " << BI.FirstInstr << '-'
  1498. << BI.LastInstr << ", reg-in " << IntvIn
  1499. << ", leave before " << LeaveBefore
  1500. << (BI.LiveOut ? ", stack-out" : ", killed in block"));
  1501. assert(IntvIn && "Must have register in");
  1502. assert(BI.LiveIn && "Must be live-in");
  1503. assert((!LeaveBefore || LeaveBefore > Start) && "Bad interference");
  1504. if (!BI.LiveOut && (!LeaveBefore || LeaveBefore >= BI.LastInstr)) {
  1505. LLVM_DEBUG(dbgs() << " before interference.\n");
  1506. //
  1507. // <<< Interference after kill.
  1508. // |---o---x | Killed in block.
  1509. // ========= Use IntvIn everywhere.
  1510. //
  1511. selectIntv(IntvIn);
  1512. useIntv(Start, BI.LastInstr);
  1513. return;
  1514. }
  1515. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1516. if (!LeaveBefore || LeaveBefore > BI.LastInstr.getBoundaryIndex()) {
  1517. //
  1518. // <<< Possible interference after last use.
  1519. // |---o---o---| Live-out on stack.
  1520. // =========____ Leave IntvIn after last use.
  1521. //
  1522. // < Interference after last use.
  1523. // |---o---o--o| Live-out on stack, late last use.
  1524. // ============ Copy to stack after LSP, overlap IntvIn.
  1525. // \_____ Stack interval is live-out.
  1526. //
  1527. if (BI.LastInstr < LSP) {
  1528. LLVM_DEBUG(dbgs() << ", spill after last use before interference.\n");
  1529. selectIntv(IntvIn);
  1530. SlotIndex Idx = leaveIntvAfter(BI.LastInstr);
  1531. useIntv(Start, Idx);
  1532. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1533. } else {
  1534. LLVM_DEBUG(dbgs() << ", spill before last split point.\n");
  1535. selectIntv(IntvIn);
  1536. SlotIndex Idx = leaveIntvBefore(LSP);
  1537. overlapIntv(Idx, BI.LastInstr);
  1538. useIntv(Start, Idx);
  1539. assert((!LeaveBefore || Idx <= LeaveBefore) && "Interference");
  1540. }
  1541. return;
  1542. }
  1543. // The interference is overlapping somewhere we wanted to use IntvIn. That
  1544. // means we need to create a local interval that can be allocated a
  1545. // different register.
  1546. unsigned LocalIntv = openIntv();
  1547. (void)LocalIntv;
  1548. LLVM_DEBUG(dbgs() << ", creating local interval " << LocalIntv << ".\n");
  1549. if (!BI.LiveOut || BI.LastInstr < LSP) {
  1550. //
  1551. // <<<<<<< Interference overlapping uses.
  1552. // |---o---o---| Live-out on stack.
  1553. // =====----____ Leave IntvIn before interference, then spill.
  1554. //
  1555. SlotIndex To = leaveIntvAfter(BI.LastInstr);
  1556. SlotIndex From = enterIntvBefore(LeaveBefore);
  1557. useIntv(From, To);
  1558. selectIntv(IntvIn);
  1559. useIntv(Start, From);
  1560. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1561. return;
  1562. }
  1563. // <<<<<<< Interference overlapping uses.
  1564. // |---o---o--o| Live-out on stack, late last use.
  1565. // =====------- Copy to stack before LSP, overlap LocalIntv.
  1566. // \_____ Stack interval is live-out.
  1567. //
  1568. SlotIndex To = leaveIntvBefore(LSP);
  1569. overlapIntv(To, BI.LastInstr);
  1570. SlotIndex From = enterIntvBefore(std::min(To, LeaveBefore));
  1571. useIntv(From, To);
  1572. selectIntv(IntvIn);
  1573. useIntv(Start, From);
  1574. assert((!LeaveBefore || From <= LeaveBefore) && "Interference");
  1575. }
  1576. void SplitEditor::splitRegOutBlock(const SplitAnalysis::BlockInfo &BI,
  1577. unsigned IntvOut, SlotIndex EnterAfter) {
  1578. SlotIndex Start, Stop;
  1579. std::tie(Start, Stop) = LIS.getSlotIndexes()->getMBBRange(BI.MBB);
  1580. LLVM_DEBUG(dbgs() << printMBBReference(*BI.MBB) << " [" << Start << ';'
  1581. << Stop << "), uses " << BI.FirstInstr << '-'
  1582. << BI.LastInstr << ", reg-out " << IntvOut
  1583. << ", enter after " << EnterAfter
  1584. << (BI.LiveIn ? ", stack-in" : ", defined in block"));
  1585. SlotIndex LSP = SA.getLastSplitPoint(BI.MBB->getNumber());
  1586. assert(IntvOut && "Must have register out");
  1587. assert(BI.LiveOut && "Must be live-out");
  1588. assert((!EnterAfter || EnterAfter < LSP) && "Bad interference");
  1589. if (!BI.LiveIn && (!EnterAfter || EnterAfter <= BI.FirstInstr)) {
  1590. LLVM_DEBUG(dbgs() << " after interference.\n");
  1591. //
  1592. // >>>> Interference before def.
  1593. // | o---o---| Defined in block.
  1594. // ========= Use IntvOut everywhere.
  1595. //
  1596. selectIntv(IntvOut);
  1597. useIntv(BI.FirstInstr, Stop);
  1598. return;
  1599. }
  1600. if (!EnterAfter || EnterAfter < BI.FirstInstr.getBaseIndex()) {
  1601. LLVM_DEBUG(dbgs() << ", reload after interference.\n");
  1602. //
  1603. // >>>> Interference before def.
  1604. // |---o---o---| Live-through, stack-in.
  1605. // ____========= Enter IntvOut before first use.
  1606. //
  1607. selectIntv(IntvOut);
  1608. SlotIndex Idx = enterIntvBefore(std::min(LSP, BI.FirstInstr));
  1609. useIntv(Idx, Stop);
  1610. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1611. return;
  1612. }
  1613. // The interference is overlapping somewhere we wanted to use IntvOut. That
  1614. // means we need to create a local interval that can be allocated a
  1615. // different register.
  1616. LLVM_DEBUG(dbgs() << ", interference overlaps uses.\n");
  1617. //
  1618. // >>>>>>> Interference overlapping uses.
  1619. // |---o---o---| Live-through, stack-in.
  1620. // ____---====== Create local interval for interference range.
  1621. //
  1622. selectIntv(IntvOut);
  1623. SlotIndex Idx = enterIntvAfter(EnterAfter);
  1624. useIntv(Idx, Stop);
  1625. assert((!EnterAfter || Idx >= EnterAfter) && "Interference");
  1626. openIntv();
  1627. SlotIndex From = enterIntvBefore(std::min(Idx, BI.FirstInstr));
  1628. useIntv(From, Idx);
  1629. }