RegisterScavenging.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824
  1. //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file
  10. /// This file implements the machine register scavenger. It can provide
  11. /// information, such as unused registers, at any point in a machine basic
  12. /// block. It also provides a mechanism to make registers available by evicting
  13. /// them to spill slots.
  14. //
  15. //===----------------------------------------------------------------------===//
  16. #include "llvm/CodeGen/RegisterScavenging.h"
  17. #include "llvm/ADT/ArrayRef.h"
  18. #include "llvm/ADT/BitVector.h"
  19. #include "llvm/ADT/SmallVector.h"
  20. #include "llvm/ADT/Statistic.h"
  21. #include "llvm/CodeGen/LiveRegUnits.h"
  22. #include "llvm/CodeGen/MachineBasicBlock.h"
  23. #include "llvm/CodeGen/MachineFrameInfo.h"
  24. #include "llvm/CodeGen/MachineFunction.h"
  25. #include "llvm/CodeGen/MachineFunctionPass.h"
  26. #include "llvm/CodeGen/MachineInstr.h"
  27. #include "llvm/CodeGen/MachineOperand.h"
  28. #include "llvm/CodeGen/MachineRegisterInfo.h"
  29. #include "llvm/CodeGen/TargetFrameLowering.h"
  30. #include "llvm/CodeGen/TargetInstrInfo.h"
  31. #include "llvm/CodeGen/TargetRegisterInfo.h"
  32. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  33. #include "llvm/InitializePasses.h"
  34. #include "llvm/MC/MCRegisterInfo.h"
  35. #include "llvm/Pass.h"
  36. #include "llvm/Support/Debug.h"
  37. #include "llvm/Support/ErrorHandling.h"
  38. #include "llvm/Support/raw_ostream.h"
  39. #include <algorithm>
  40. #include <cassert>
  41. #include <iterator>
  42. #include <limits>
  43. #include <string>
  44. #include <utility>
  45. using namespace llvm;
  46. #define DEBUG_TYPE "reg-scavenging"
  47. STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
  48. void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) {
  49. LiveUnits.addRegMasked(Reg, LaneMask);
  50. }
  51. void RegScavenger::init(MachineBasicBlock &MBB) {
  52. MachineFunction &MF = *MBB.getParent();
  53. TII = MF.getSubtarget().getInstrInfo();
  54. TRI = MF.getSubtarget().getRegisterInfo();
  55. MRI = &MF.getRegInfo();
  56. LiveUnits.init(*TRI);
  57. assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
  58. "Target changed?");
  59. // Self-initialize.
  60. if (!this->MBB) {
  61. NumRegUnits = TRI->getNumRegUnits();
  62. KillRegUnits.resize(NumRegUnits);
  63. DefRegUnits.resize(NumRegUnits);
  64. TmpRegUnits.resize(NumRegUnits);
  65. }
  66. this->MBB = &MBB;
  67. for (ScavengedInfo &SI : Scavenged) {
  68. SI.Reg = 0;
  69. SI.Restore = nullptr;
  70. }
  71. Tracking = false;
  72. }
  73. void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
  74. init(MBB);
  75. LiveUnits.addLiveIns(MBB);
  76. }
  77. void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
  78. init(MBB);
  79. LiveUnits.addLiveOuts(MBB);
  80. // Move internal iterator at the last instruction of the block.
  81. if (!MBB.empty()) {
  82. MBBI = std::prev(MBB.end());
  83. Tracking = true;
  84. }
  85. }
  86. void RegScavenger::addRegUnits(BitVector &BV, MCRegister Reg) {
  87. for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
  88. BV.set(*RUI);
  89. }
  90. void RegScavenger::removeRegUnits(BitVector &BV, MCRegister Reg) {
  91. for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
  92. BV.reset(*RUI);
  93. }
  94. void RegScavenger::determineKillsAndDefs() {
  95. assert(Tracking && "Must be tracking to determine kills and defs");
  96. MachineInstr &MI = *MBBI;
  97. assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
  98. // Find out which registers are early clobbered, killed, defined, and marked
  99. // def-dead in this instruction.
  100. KillRegUnits.reset();
  101. DefRegUnits.reset();
  102. for (const MachineOperand &MO : MI.operands()) {
  103. if (MO.isRegMask()) {
  104. TmpRegUnits.reset();
  105. for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
  106. for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
  107. if (MO.clobbersPhysReg(*RURI)) {
  108. TmpRegUnits.set(RU);
  109. break;
  110. }
  111. }
  112. }
  113. // Apply the mask.
  114. KillRegUnits |= TmpRegUnits;
  115. }
  116. if (!MO.isReg())
  117. continue;
  118. if (!MO.getReg().isPhysical() || isReserved(MO.getReg()))
  119. continue;
  120. MCRegister Reg = MO.getReg().asMCReg();
  121. if (MO.isUse()) {
  122. // Ignore undef uses.
  123. if (MO.isUndef())
  124. continue;
  125. if (MO.isKill())
  126. addRegUnits(KillRegUnits, Reg);
  127. } else {
  128. assert(MO.isDef());
  129. if (MO.isDead())
  130. addRegUnits(KillRegUnits, Reg);
  131. else
  132. addRegUnits(DefRegUnits, Reg);
  133. }
  134. }
  135. }
  136. void RegScavenger::forward() {
  137. // Move ptr forward.
  138. if (!Tracking) {
  139. MBBI = MBB->begin();
  140. Tracking = true;
  141. } else {
  142. assert(MBBI != MBB->end() && "Already past the end of the basic block!");
  143. MBBI = std::next(MBBI);
  144. }
  145. assert(MBBI != MBB->end() && "Already at the end of the basic block!");
  146. MachineInstr &MI = *MBBI;
  147. for (ScavengedInfo &I : Scavenged) {
  148. if (I.Restore != &MI)
  149. continue;
  150. I.Reg = 0;
  151. I.Restore = nullptr;
  152. }
  153. if (MI.isDebugOrPseudoInstr())
  154. return;
  155. determineKillsAndDefs();
  156. // Verify uses and defs.
  157. #ifndef NDEBUG
  158. for (const MachineOperand &MO : MI.operands()) {
  159. if (!MO.isReg())
  160. continue;
  161. Register Reg = MO.getReg();
  162. if (!Register::isPhysicalRegister(Reg) || isReserved(Reg))
  163. continue;
  164. if (MO.isUse()) {
  165. if (MO.isUndef())
  166. continue;
  167. if (!isRegUsed(Reg)) {
  168. // Check if it's partial live: e.g.
  169. // D0 = insert_subreg undef D0, S0
  170. // ... D0
  171. // The problem is the insert_subreg could be eliminated. The use of
  172. // D0 is using a partially undef value. This is not *incorrect* since
  173. // S1 is can be freely clobbered.
  174. // Ideally we would like a way to model this, but leaving the
  175. // insert_subreg around causes both correctness and performance issues.
  176. bool SubUsed = false;
  177. for (const MCPhysReg &SubReg : TRI->subregs(Reg))
  178. if (isRegUsed(SubReg)) {
  179. SubUsed = true;
  180. break;
  181. }
  182. bool SuperUsed = false;
  183. for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
  184. if (isRegUsed(*SR)) {
  185. SuperUsed = true;
  186. break;
  187. }
  188. }
  189. if (!SubUsed && !SuperUsed) {
  190. MBB->getParent()->verify(nullptr, "In Register Scavenger");
  191. llvm_unreachable("Using an undefined register!");
  192. }
  193. (void)SubUsed;
  194. (void)SuperUsed;
  195. }
  196. } else {
  197. assert(MO.isDef());
  198. #if 0
  199. // FIXME: Enable this once we've figured out how to correctly transfer
  200. // implicit kills during codegen passes like the coalescer.
  201. assert((KillRegs.test(Reg) || isUnused(Reg) ||
  202. isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
  203. "Re-defining a live register!");
  204. #endif
  205. }
  206. }
  207. #endif // NDEBUG
  208. // Commit the changes.
  209. setUnused(KillRegUnits);
  210. setUsed(DefRegUnits);
  211. }
  212. void RegScavenger::backward() {
  213. assert(Tracking && "Must be tracking to determine kills and defs");
  214. const MachineInstr &MI = *MBBI;
  215. LiveUnits.stepBackward(MI);
  216. // Expire scavenge spill frameindex uses.
  217. for (ScavengedInfo &I : Scavenged) {
  218. if (I.Restore == &MI) {
  219. I.Reg = 0;
  220. I.Restore = nullptr;
  221. }
  222. }
  223. if (MBBI == MBB->begin()) {
  224. MBBI = MachineBasicBlock::iterator(nullptr);
  225. Tracking = false;
  226. } else
  227. --MBBI;
  228. }
  229. bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const {
  230. if (isReserved(Reg))
  231. return includeReserved;
  232. return !LiveUnits.available(Reg);
  233. }
  234. Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
  235. for (Register Reg : *RC) {
  236. if (!isRegUsed(Reg)) {
  237. LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
  238. << "\n");
  239. return Reg;
  240. }
  241. }
  242. return 0;
  243. }
  244. BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
  245. BitVector Mask(TRI->getNumRegs());
  246. for (Register Reg : *RC)
  247. if (!isRegUsed(Reg))
  248. Mask.set(Reg);
  249. return Mask;
  250. }
  251. Register RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
  252. BitVector &Candidates,
  253. unsigned InstrLimit,
  254. MachineBasicBlock::iterator &UseMI) {
  255. int Survivor = Candidates.find_first();
  256. assert(Survivor > 0 && "No candidates for scavenging");
  257. MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
  258. assert(StartMI != ME && "MI already at terminator");
  259. MachineBasicBlock::iterator RestorePointMI = StartMI;
  260. MachineBasicBlock::iterator MI = StartMI;
  261. bool inVirtLiveRange = false;
  262. for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
  263. if (MI->isDebugOrPseudoInstr()) {
  264. ++InstrLimit; // Don't count debug instructions
  265. continue;
  266. }
  267. bool isVirtKillInsn = false;
  268. bool isVirtDefInsn = false;
  269. // Remove any candidates touched by instruction.
  270. for (const MachineOperand &MO : MI->operands()) {
  271. if (MO.isRegMask())
  272. Candidates.clearBitsNotInMask(MO.getRegMask());
  273. if (!MO.isReg() || MO.isUndef() || !MO.getReg())
  274. continue;
  275. if (Register::isVirtualRegister(MO.getReg())) {
  276. if (MO.isDef())
  277. isVirtDefInsn = true;
  278. else if (MO.isKill())
  279. isVirtKillInsn = true;
  280. continue;
  281. }
  282. for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
  283. Candidates.reset(*AI);
  284. }
  285. // If we're not in a virtual reg's live range, this is a valid
  286. // restore point.
  287. if (!inVirtLiveRange) RestorePointMI = MI;
  288. // Update whether we're in the live range of a virtual register
  289. if (isVirtKillInsn) inVirtLiveRange = false;
  290. if (isVirtDefInsn) inVirtLiveRange = true;
  291. // Was our survivor untouched by this instruction?
  292. if (Candidates.test(Survivor))
  293. continue;
  294. // All candidates gone?
  295. if (Candidates.none())
  296. break;
  297. Survivor = Candidates.find_first();
  298. }
  299. // If we ran off the end, that's where we want to restore.
  300. if (MI == ME) RestorePointMI = ME;
  301. assert(RestorePointMI != StartMI &&
  302. "No available scavenger restore location!");
  303. // We ran out of candidates, so stop the search.
  304. UseMI = RestorePointMI;
  305. return Survivor;
  306. }
  307. /// Given the bitvector \p Available of free register units at position
  308. /// \p From. Search backwards to find a register that is part of \p
  309. /// Candidates and not used/clobbered until the point \p To. If there is
  310. /// multiple candidates continue searching and pick the one that is not used/
  311. /// clobbered for the longest time.
  312. /// Returns the register and the earliest position we know it to be free or
  313. /// the position MBB.end() if no register is available.
  314. static std::pair<MCPhysReg, MachineBasicBlock::iterator>
  315. findSurvivorBackwards(const MachineRegisterInfo &MRI,
  316. MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
  317. const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
  318. bool RestoreAfter) {
  319. bool FoundTo = false;
  320. MCPhysReg Survivor = 0;
  321. MachineBasicBlock::iterator Pos;
  322. MachineBasicBlock &MBB = *From->getParent();
  323. unsigned InstrLimit = 25;
  324. unsigned InstrCountDown = InstrLimit;
  325. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  326. LiveRegUnits Used(TRI);
  327. assert(From->getParent() == To->getParent() &&
  328. "Target instruction is in other than current basic block, use "
  329. "enterBasicBlockEnd first");
  330. for (MachineBasicBlock::iterator I = From;; --I) {
  331. const MachineInstr &MI = *I;
  332. Used.accumulate(MI);
  333. if (I == To) {
  334. // See if one of the registers in RC wasn't used so far.
  335. for (MCPhysReg Reg : AllocationOrder) {
  336. if (!MRI.isReserved(Reg) && Used.available(Reg) &&
  337. LiveOut.available(Reg))
  338. return std::make_pair(Reg, MBB.end());
  339. }
  340. // Otherwise we will continue up to InstrLimit instructions to find
  341. // the register which is not defined/used for the longest time.
  342. FoundTo = true;
  343. Pos = To;
  344. // Note: It was fine so far to start our search at From, however now that
  345. // we have to spill, and can only place the restore after From then
  346. // add the regs used/defed by std::next(From) to the set.
  347. if (RestoreAfter)
  348. Used.accumulate(*std::next(From));
  349. }
  350. if (FoundTo) {
  351. if (Survivor == 0 || !Used.available(Survivor)) {
  352. MCPhysReg AvilableReg = 0;
  353. for (MCPhysReg Reg : AllocationOrder) {
  354. if (!MRI.isReserved(Reg) && Used.available(Reg)) {
  355. AvilableReg = Reg;
  356. break;
  357. }
  358. }
  359. if (AvilableReg == 0)
  360. break;
  361. Survivor = AvilableReg;
  362. }
  363. if (--InstrCountDown == 0)
  364. break;
  365. // Keep searching when we find a vreg since the spilled register will
  366. // be usefull for this other vreg as well later.
  367. bool FoundVReg = false;
  368. for (const MachineOperand &MO : MI.operands()) {
  369. if (MO.isReg() && Register::isVirtualRegister(MO.getReg())) {
  370. FoundVReg = true;
  371. break;
  372. }
  373. }
  374. if (FoundVReg) {
  375. InstrCountDown = InstrLimit;
  376. Pos = I;
  377. }
  378. if (I == MBB.begin())
  379. break;
  380. }
  381. assert(I != MBB.begin() && "Did not find target instruction while "
  382. "iterating backwards");
  383. }
  384. return std::make_pair(Survivor, Pos);
  385. }
  386. static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
  387. unsigned i = 0;
  388. while (!MI.getOperand(i).isFI()) {
  389. ++i;
  390. assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
  391. }
  392. return i;
  393. }
  394. RegScavenger::ScavengedInfo &
  395. RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj,
  396. MachineBasicBlock::iterator Before,
  397. MachineBasicBlock::iterator &UseMI) {
  398. // Find an available scavenging slot with size and alignment matching
  399. // the requirements of the class RC.
  400. const MachineFunction &MF = *Before->getMF();
  401. const MachineFrameInfo &MFI = MF.getFrameInfo();
  402. unsigned NeedSize = TRI->getSpillSize(RC);
  403. Align NeedAlign = TRI->getSpillAlign(RC);
  404. unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
  405. int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
  406. for (unsigned I = 0; I < Scavenged.size(); ++I) {
  407. if (Scavenged[I].Reg != 0)
  408. continue;
  409. // Verify that this slot is valid for this register.
  410. int FI = Scavenged[I].FrameIndex;
  411. if (FI < FIB || FI >= FIE)
  412. continue;
  413. unsigned S = MFI.getObjectSize(FI);
  414. Align A = MFI.getObjectAlign(FI);
  415. if (NeedSize > S || NeedAlign > A)
  416. continue;
  417. // Avoid wasting slots with large size and/or large alignment. Pick one
  418. // that is the best fit for this register class (in street metric).
  419. // Picking a larger slot than necessary could happen if a slot for a
  420. // larger register is reserved before a slot for a smaller one. When
  421. // trying to spill a smaller register, the large slot would be found
  422. // first, thus making it impossible to spill the larger register later.
  423. unsigned D = (S - NeedSize) + (A.value() - NeedAlign.value());
  424. if (D < Diff) {
  425. SI = I;
  426. Diff = D;
  427. }
  428. }
  429. if (SI == Scavenged.size()) {
  430. // We need to scavenge a register but have no spill slot, the target
  431. // must know how to do it (if not, we'll assert below).
  432. Scavenged.push_back(ScavengedInfo(FIE));
  433. }
  434. // Avoid infinite regress
  435. Scavenged[SI].Reg = Reg;
  436. // If the target knows how to save/restore the register, let it do so;
  437. // otherwise, use the emergency stack spill slot.
  438. if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
  439. // Spill the scavenged register before \p Before.
  440. int FI = Scavenged[SI].FrameIndex;
  441. if (FI < FIB || FI >= FIE) {
  442. report_fatal_error(Twine("Error while trying to spill ") +
  443. TRI->getName(Reg) + " from class " +
  444. TRI->getRegClassName(&RC) +
  445. ": Cannot scavenge register without an emergency "
  446. "spill slot!");
  447. }
  448. TII->storeRegToStackSlot(*MBB, Before, Reg, true, FI, &RC, TRI);
  449. MachineBasicBlock::iterator II = std::prev(Before);
  450. unsigned FIOperandNum = getFrameIndexOperandNum(*II);
  451. TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
  452. // Restore the scavenged register before its use (or first terminator).
  453. TII->loadRegFromStackSlot(*MBB, UseMI, Reg, FI, &RC, TRI);
  454. II = std::prev(UseMI);
  455. FIOperandNum = getFrameIndexOperandNum(*II);
  456. TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
  457. }
  458. return Scavenged[SI];
  459. }
  460. Register RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
  461. MachineBasicBlock::iterator I,
  462. int SPAdj, bool AllowSpill) {
  463. MachineInstr &MI = *I;
  464. const MachineFunction &MF = *MI.getMF();
  465. // Consider all allocatable registers in the register class initially
  466. BitVector Candidates = TRI->getAllocatableSet(MF, RC);
  467. // Exclude all the registers being used by the instruction.
  468. for (const MachineOperand &MO : MI.operands()) {
  469. if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
  470. !Register::isVirtualRegister(MO.getReg()))
  471. for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
  472. Candidates.reset(*AI);
  473. }
  474. // If we have already scavenged some registers, remove them from the
  475. // candidates. If we end up recursively calling eliminateFrameIndex, we don't
  476. // want to be clobbering previously scavenged registers or their associated
  477. // stack slots.
  478. for (ScavengedInfo &SI : Scavenged) {
  479. if (SI.Reg) {
  480. if (isRegUsed(SI.Reg)) {
  481. LLVM_DEBUG(
  482. dbgs() << "Removing " << printReg(SI.Reg, TRI) <<
  483. " from scavenging candidates since it was already scavenged\n");
  484. for (MCRegAliasIterator AI(SI.Reg, TRI, true); AI.isValid(); ++AI)
  485. Candidates.reset(*AI);
  486. }
  487. }
  488. }
  489. // Try to find a register that's unused if there is one, as then we won't
  490. // have to spill.
  491. BitVector Available = getRegsAvailable(RC);
  492. Available &= Candidates;
  493. if (Available.any())
  494. Candidates = Available;
  495. // Find the register whose use is furthest away.
  496. MachineBasicBlock::iterator UseMI;
  497. Register SReg = findSurvivorReg(I, Candidates, 25, UseMI);
  498. // If we found an unused register there is no reason to spill it.
  499. if (!isRegUsed(SReg)) {
  500. LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
  501. return SReg;
  502. }
  503. if (!AllowSpill)
  504. return 0;
  505. #ifndef NDEBUG
  506. for (ScavengedInfo &SI : Scavenged) {
  507. assert(SI.Reg != SReg && "scavenged a previously scavenged register");
  508. }
  509. #endif
  510. ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
  511. Scavenged.Restore = &*std::prev(UseMI);
  512. LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
  513. << printReg(SReg, TRI) << "\n");
  514. return SReg;
  515. }
  516. Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
  517. MachineBasicBlock::iterator To,
  518. bool RestoreAfter, int SPAdj,
  519. bool AllowSpill) {
  520. const MachineBasicBlock &MBB = *To->getParent();
  521. const MachineFunction &MF = *MBB.getParent();
  522. // Find the register whose use is furthest away.
  523. MachineBasicBlock::iterator UseMI;
  524. ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
  525. std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
  526. findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
  527. RestoreAfter);
  528. MCPhysReg Reg = P.first;
  529. MachineBasicBlock::iterator SpillBefore = P.second;
  530. // Found an available register?
  531. if (Reg != 0 && SpillBefore == MBB.end()) {
  532. LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
  533. << '\n');
  534. return Reg;
  535. }
  536. if (!AllowSpill)
  537. return 0;
  538. assert(Reg != 0 && "No register left to scavenge!");
  539. MachineBasicBlock::iterator ReloadAfter =
  540. RestoreAfter ? std::next(MBBI) : MBBI;
  541. MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
  542. if (ReloadBefore != MBB.end())
  543. LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
  544. ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
  545. Scavenged.Restore = &*std::prev(SpillBefore);
  546. LiveUnits.removeReg(Reg);
  547. LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
  548. << " until " << *SpillBefore);
  549. return Reg;
  550. }
  551. /// Allocate a register for the virtual register \p VReg. The last use of
  552. /// \p VReg is around the current position of the register scavenger \p RS.
  553. /// \p ReserveAfter controls whether the scavenged register needs to be reserved
  554. /// after the current instruction, otherwise it will only be reserved before the
  555. /// current instruction.
  556. static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
  557. Register VReg, bool ReserveAfter) {
  558. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  559. #ifndef NDEBUG
  560. // Verify that all definitions and uses are in the same basic block.
  561. const MachineBasicBlock *CommonMBB = nullptr;
  562. // Real definition for the reg, re-definitions are not considered.
  563. const MachineInstr *RealDef = nullptr;
  564. for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
  565. MachineBasicBlock *MBB = MO.getParent()->getParent();
  566. if (CommonMBB == nullptr)
  567. CommonMBB = MBB;
  568. assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
  569. if (MO.isDef()) {
  570. const MachineInstr &MI = *MO.getParent();
  571. if (!MI.readsRegister(VReg, &TRI)) {
  572. assert((!RealDef || RealDef == &MI) &&
  573. "Can have at most one definition which is not a redefinition");
  574. RealDef = &MI;
  575. }
  576. }
  577. }
  578. assert(RealDef != nullptr && "Must have at least 1 Def");
  579. #endif
  580. // We should only have one definition of the register. However to accommodate
  581. // the requirements of two address code we also allow definitions in
  582. // subsequent instructions provided they also read the register. That way
  583. // we get a single contiguous lifetime.
  584. //
  585. // Definitions in MRI.def_begin() are unordered, search for the first.
  586. MachineRegisterInfo::def_iterator FirstDef = llvm::find_if(
  587. MRI.def_operands(VReg), [VReg, &TRI](const MachineOperand &MO) {
  588. return !MO.getParent()->readsRegister(VReg, &TRI);
  589. });
  590. assert(FirstDef != MRI.def_end() &&
  591. "Must have one definition that does not redefine vreg");
  592. MachineInstr &DefMI = *FirstDef->getParent();
  593. // The register scavenger will report a free register inserting an emergency
  594. // spill/reload if necessary.
  595. int SPAdj = 0;
  596. const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
  597. Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
  598. ReserveAfter, SPAdj);
  599. MRI.replaceRegWith(VReg, SReg);
  600. ++NumScavengedRegs;
  601. return SReg;
  602. }
  603. /// Allocate (scavenge) vregs inside a single basic block.
  604. /// Returns true if the target spill callback created new vregs and a 2nd pass
  605. /// is necessary.
  606. static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
  607. RegScavenger &RS,
  608. MachineBasicBlock &MBB) {
  609. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  610. RS.enterBasicBlockEnd(MBB);
  611. unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
  612. bool NextInstructionReadsVReg = false;
  613. for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
  614. --I;
  615. // Move RegScavenger to the position between *I and *std::next(I).
  616. RS.backward(I);
  617. // Look for unassigned vregs in the uses of *std::next(I).
  618. if (NextInstructionReadsVReg) {
  619. MachineBasicBlock::iterator N = std::next(I);
  620. const MachineInstr &NMI = *N;
  621. for (const MachineOperand &MO : NMI.operands()) {
  622. if (!MO.isReg())
  623. continue;
  624. Register Reg = MO.getReg();
  625. // We only care about virtual registers and ignore virtual registers
  626. // created by the target callbacks in the process (those will be handled
  627. // in a scavenging round).
  628. if (!Register::isVirtualRegister(Reg) ||
  629. Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
  630. continue;
  631. if (!MO.readsReg())
  632. continue;
  633. Register SReg = scavengeVReg(MRI, RS, Reg, true);
  634. N->addRegisterKilled(SReg, &TRI, false);
  635. RS.setRegUsed(SReg);
  636. }
  637. }
  638. // Look for unassigned vregs in the defs of *I.
  639. NextInstructionReadsVReg = false;
  640. const MachineInstr &MI = *I;
  641. for (const MachineOperand &MO : MI.operands()) {
  642. if (!MO.isReg())
  643. continue;
  644. Register Reg = MO.getReg();
  645. // Only vregs, no newly created vregs (see above).
  646. if (!Register::isVirtualRegister(Reg) ||
  647. Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
  648. continue;
  649. // We have to look at all operands anyway so we can precalculate here
  650. // whether there is a reading operand. This allows use to skip the use
  651. // step in the next iteration if there was none.
  652. assert(!MO.isInternalRead() && "Cannot assign inside bundles");
  653. assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
  654. if (MO.readsReg()) {
  655. NextInstructionReadsVReg = true;
  656. }
  657. if (MO.isDef()) {
  658. Register SReg = scavengeVReg(MRI, RS, Reg, false);
  659. I->addRegisterDead(SReg, &TRI, false);
  660. }
  661. }
  662. }
  663. #ifndef NDEBUG
  664. for (const MachineOperand &MO : MBB.front().operands()) {
  665. if (!MO.isReg() || !Register::isVirtualRegister(MO.getReg()))
  666. continue;
  667. assert(!MO.isInternalRead() && "Cannot assign inside bundles");
  668. assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
  669. assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
  670. }
  671. #endif
  672. return MRI.getNumVirtRegs() != InitialNumVirtRegs;
  673. }
  674. void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
  675. // FIXME: Iterating over the instruction stream is unnecessary. We can simply
  676. // iterate over the vreg use list, which at this point only contains machine
  677. // operands for which eliminateFrameIndex need a new scratch reg.
  678. MachineRegisterInfo &MRI = MF.getRegInfo();
  679. // Shortcut.
  680. if (MRI.getNumVirtRegs() == 0) {
  681. MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
  682. return;
  683. }
  684. // Run through the instructions and find any virtual registers.
  685. for (MachineBasicBlock &MBB : MF) {
  686. if (MBB.empty())
  687. continue;
  688. bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
  689. if (Again) {
  690. LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
  691. << MBB.getName() << '\n');
  692. Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
  693. // The target required a 2nd run (because it created new vregs while
  694. // spilling). Refuse to do another pass to keep compiletime in check.
  695. if (Again)
  696. report_fatal_error("Incomplete scavenging after 2nd pass");
  697. }
  698. }
  699. MRI.clearVirtRegs();
  700. MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
  701. }
  702. namespace {
  703. /// This class runs register scavenging independ of the PrologEpilogInserter.
  704. /// This is used in for testing.
  705. class ScavengerTest : public MachineFunctionPass {
  706. public:
  707. static char ID;
  708. ScavengerTest() : MachineFunctionPass(ID) {}
  709. bool runOnMachineFunction(MachineFunction &MF) override {
  710. const TargetSubtargetInfo &STI = MF.getSubtarget();
  711. const TargetFrameLowering &TFL = *STI.getFrameLowering();
  712. RegScavenger RS;
  713. // Let's hope that calling those outside of PrologEpilogueInserter works
  714. // well enough to initialize the scavenger with some emergency spillslots
  715. // for the target.
  716. BitVector SavedRegs;
  717. TFL.determineCalleeSaves(MF, SavedRegs, &RS);
  718. TFL.processFunctionBeforeFrameFinalized(MF, &RS);
  719. // Let's scavenge the current function
  720. scavengeFrameVirtualRegs(MF, RS);
  721. return true;
  722. }
  723. };
  724. } // end anonymous namespace
  725. char ScavengerTest::ID;
  726. INITIALIZE_PASS(ScavengerTest, "scavenger-test",
  727. "Scavenge virtual registers inside basic blocks", false, false)