PrologEpilogInserter.cpp 61 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603
  1. //===- PrologEpilogInserter.cpp - Insert Prolog/Epilog code in function ---===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This pass is responsible for finalizing the functions frame layout, saving
  10. // callee saved registers, and for emitting prolog & epilog code for the
  11. // function.
  12. //
  13. // This pass must be run after register allocation. After this pass is
  14. // executed, it is illegal to construct MO_FrameIndex operands.
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #include "llvm/ADT/ArrayRef.h"
  18. #include "llvm/ADT/BitVector.h"
  19. #include "llvm/ADT/DepthFirstIterator.h"
  20. #include "llvm/ADT/STLExtras.h"
  21. #include "llvm/ADT/SetVector.h"
  22. #include "llvm/ADT/SmallPtrSet.h"
  23. #include "llvm/ADT/SmallSet.h"
  24. #include "llvm/ADT/SmallVector.h"
  25. #include "llvm/ADT/Statistic.h"
  26. #include "llvm/Analysis/OptimizationRemarkEmitter.h"
  27. #include "llvm/CodeGen/MachineBasicBlock.h"
  28. #include "llvm/CodeGen/MachineDominators.h"
  29. #include "llvm/CodeGen/MachineFrameInfo.h"
  30. #include "llvm/CodeGen/MachineFunction.h"
  31. #include "llvm/CodeGen/MachineFunctionPass.h"
  32. #include "llvm/CodeGen/MachineInstr.h"
  33. #include "llvm/CodeGen/MachineInstrBuilder.h"
  34. #include "llvm/CodeGen/MachineLoopInfo.h"
  35. #include "llvm/CodeGen/MachineModuleInfo.h"
  36. #include "llvm/CodeGen/MachineOperand.h"
  37. #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h"
  38. #include "llvm/CodeGen/MachineRegisterInfo.h"
  39. #include "llvm/CodeGen/RegisterScavenging.h"
  40. #include "llvm/CodeGen/TargetFrameLowering.h"
  41. #include "llvm/CodeGen/TargetInstrInfo.h"
  42. #include "llvm/CodeGen/TargetOpcodes.h"
  43. #include "llvm/CodeGen/TargetRegisterInfo.h"
  44. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  45. #include "llvm/CodeGen/WinEHFuncInfo.h"
  46. #include "llvm/IR/Attributes.h"
  47. #include "llvm/IR/CallingConv.h"
  48. #include "llvm/IR/DebugInfoMetadata.h"
  49. #include "llvm/IR/DiagnosticInfo.h"
  50. #include "llvm/IR/Function.h"
  51. #include "llvm/IR/InlineAsm.h"
  52. #include "llvm/IR/LLVMContext.h"
  53. #include "llvm/InitializePasses.h"
  54. #include "llvm/MC/MCRegisterInfo.h"
  55. #include "llvm/Pass.h"
  56. #include "llvm/Support/CodeGen.h"
  57. #include "llvm/Support/Debug.h"
  58. #include "llvm/Support/ErrorHandling.h"
  59. #include "llvm/Support/FormatVariadic.h"
  60. #include "llvm/Support/raw_ostream.h"
  61. #include "llvm/Target/TargetMachine.h"
  62. #include "llvm/Target/TargetOptions.h"
  63. #include <algorithm>
  64. #include <cassert>
  65. #include <cstdint>
  66. #include <functional>
  67. #include <limits>
  68. #include <utility>
  69. #include <vector>
  70. using namespace llvm;
  71. #define DEBUG_TYPE "prologepilog"
  72. using MBBVector = SmallVector<MachineBasicBlock *, 4>;
  73. STATISTIC(NumLeafFuncWithSpills, "Number of leaf functions with CSRs");
  74. STATISTIC(NumFuncSeen, "Number of functions seen in PEI");
  75. namespace {
  76. class PEI : public MachineFunctionPass {
  77. public:
  78. static char ID;
  79. PEI() : MachineFunctionPass(ID) {
  80. initializePEIPass(*PassRegistry::getPassRegistry());
  81. }
  82. void getAnalysisUsage(AnalysisUsage &AU) const override;
  83. /// runOnMachineFunction - Insert prolog/epilog code and replace abstract
  84. /// frame indexes with appropriate references.
  85. bool runOnMachineFunction(MachineFunction &MF) override;
  86. private:
  87. RegScavenger *RS;
  88. // MinCSFrameIndex, MaxCSFrameIndex - Keeps the range of callee saved
  89. // stack frame indexes.
  90. unsigned MinCSFrameIndex = std::numeric_limits<unsigned>::max();
  91. unsigned MaxCSFrameIndex = 0;
  92. // Save and Restore blocks of the current function. Typically there is a
  93. // single save block, unless Windows EH funclets are involved.
  94. MBBVector SaveBlocks;
  95. MBBVector RestoreBlocks;
  96. // Flag to control whether to use the register scavenger to resolve
  97. // frame index materialization registers. Set according to
  98. // TRI->requiresFrameIndexScavenging() for the current function.
  99. bool FrameIndexVirtualScavenging;
  100. // Flag to control whether the scavenger should be passed even though
  101. // FrameIndexVirtualScavenging is used.
  102. bool FrameIndexEliminationScavenging;
  103. // Emit remarks.
  104. MachineOptimizationRemarkEmitter *ORE = nullptr;
  105. void calculateCallFrameInfo(MachineFunction &MF);
  106. void calculateSaveRestoreBlocks(MachineFunction &MF);
  107. void spillCalleeSavedRegs(MachineFunction &MF);
  108. void calculateFrameObjectOffsets(MachineFunction &MF);
  109. void replaceFrameIndices(MachineFunction &MF);
  110. void replaceFrameIndices(MachineBasicBlock *BB, MachineFunction &MF,
  111. int &SPAdj);
  112. // Frame indices in debug values are encoded in a target independent
  113. // way with simply the frame index and offset rather than any
  114. // target-specific addressing mode.
  115. bool replaceFrameIndexDebugInstr(MachineFunction &MF, MachineInstr &MI,
  116. unsigned OpIdx, int SPAdj = 0);
  117. // Does same as replaceFrameIndices but using the backward MIR walk and
  118. // backward register scavenger walk. Does not yet support call sequence
  119. // processing.
  120. void replaceFrameIndicesBackward(MachineBasicBlock *BB, MachineFunction &MF,
  121. int &SPAdj);
  122. void insertPrologEpilogCode(MachineFunction &MF);
  123. void insertZeroCallUsedRegs(MachineFunction &MF);
  124. };
  125. } // end anonymous namespace
  126. char PEI::ID = 0;
  127. char &llvm::PrologEpilogCodeInserterID = PEI::ID;
  128. INITIALIZE_PASS_BEGIN(PEI, DEBUG_TYPE, "Prologue/Epilogue Insertion", false,
  129. false)
  130. INITIALIZE_PASS_DEPENDENCY(MachineLoopInfo)
  131. INITIALIZE_PASS_DEPENDENCY(MachineDominatorTree)
  132. INITIALIZE_PASS_DEPENDENCY(MachineOptimizationRemarkEmitterPass)
  133. INITIALIZE_PASS_END(PEI, DEBUG_TYPE,
  134. "Prologue/Epilogue Insertion & Frame Finalization", false,
  135. false)
  136. MachineFunctionPass *llvm::createPrologEpilogInserterPass() {
  137. return new PEI();
  138. }
  139. STATISTIC(NumBytesStackSpace,
  140. "Number of bytes used for stack in all functions");
  141. void PEI::getAnalysisUsage(AnalysisUsage &AU) const {
  142. AU.setPreservesCFG();
  143. AU.addPreserved<MachineLoopInfo>();
  144. AU.addPreserved<MachineDominatorTree>();
  145. AU.addRequired<MachineOptimizationRemarkEmitterPass>();
  146. MachineFunctionPass::getAnalysisUsage(AU);
  147. }
  148. /// StackObjSet - A set of stack object indexes
  149. using StackObjSet = SmallSetVector<int, 8>;
  150. using SavedDbgValuesMap =
  151. SmallDenseMap<MachineBasicBlock *, SmallVector<MachineInstr *, 4>, 4>;
  152. /// Stash DBG_VALUEs that describe parameters and which are placed at the start
  153. /// of the block. Later on, after the prologue code has been emitted, the
  154. /// stashed DBG_VALUEs will be reinserted at the start of the block.
  155. static void stashEntryDbgValues(MachineBasicBlock &MBB,
  156. SavedDbgValuesMap &EntryDbgValues) {
  157. SmallVector<const MachineInstr *, 4> FrameIndexValues;
  158. for (auto &MI : MBB) {
  159. if (!MI.isDebugInstr())
  160. break;
  161. if (!MI.isDebugValue() || !MI.getDebugVariable()->isParameter())
  162. continue;
  163. if (any_of(MI.debug_operands(),
  164. [](const MachineOperand &MO) { return MO.isFI(); })) {
  165. // We can only emit valid locations for frame indices after the frame
  166. // setup, so do not stash away them.
  167. FrameIndexValues.push_back(&MI);
  168. continue;
  169. }
  170. const DILocalVariable *Var = MI.getDebugVariable();
  171. const DIExpression *Expr = MI.getDebugExpression();
  172. auto Overlaps = [Var, Expr](const MachineInstr *DV) {
  173. return Var == DV->getDebugVariable() &&
  174. Expr->fragmentsOverlap(DV->getDebugExpression());
  175. };
  176. // See if the debug value overlaps with any preceding debug value that will
  177. // not be stashed. If that is the case, then we can't stash this value, as
  178. // we would then reorder the values at reinsertion.
  179. if (llvm::none_of(FrameIndexValues, Overlaps))
  180. EntryDbgValues[&MBB].push_back(&MI);
  181. }
  182. // Remove stashed debug values from the block.
  183. if (EntryDbgValues.count(&MBB))
  184. for (auto *MI : EntryDbgValues[&MBB])
  185. MI->removeFromParent();
  186. }
  187. /// runOnMachineFunction - Insert prolog/epilog code and replace abstract
  188. /// frame indexes with appropriate references.
  189. bool PEI::runOnMachineFunction(MachineFunction &MF) {
  190. NumFuncSeen++;
  191. const Function &F = MF.getFunction();
  192. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  193. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  194. RS = TRI->requiresRegisterScavenging(MF) ? new RegScavenger() : nullptr;
  195. FrameIndexVirtualScavenging = TRI->requiresFrameIndexScavenging(MF);
  196. ORE = &getAnalysis<MachineOptimizationRemarkEmitterPass>().getORE();
  197. // Calculate the MaxCallFrameSize and AdjustsStack variables for the
  198. // function's frame information. Also eliminates call frame pseudo
  199. // instructions.
  200. calculateCallFrameInfo(MF);
  201. // Determine placement of CSR spill/restore code and prolog/epilog code:
  202. // place all spills in the entry block, all restores in return blocks.
  203. calculateSaveRestoreBlocks(MF);
  204. // Stash away DBG_VALUEs that should not be moved by insertion of prolog code.
  205. SavedDbgValuesMap EntryDbgValues;
  206. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  207. stashEntryDbgValues(*SaveBlock, EntryDbgValues);
  208. // Handle CSR spilling and restoring, for targets that need it.
  209. if (MF.getTarget().usesPhysRegsForValues())
  210. spillCalleeSavedRegs(MF);
  211. // Allow the target machine to make final modifications to the function
  212. // before the frame layout is finalized.
  213. TFI->processFunctionBeforeFrameFinalized(MF, RS);
  214. // Calculate actual frame offsets for all abstract stack objects...
  215. calculateFrameObjectOffsets(MF);
  216. // Add prolog and epilog code to the function. This function is required
  217. // to align the stack frame as necessary for any stack variables or
  218. // called functions. Because of this, calculateCalleeSavedRegisters()
  219. // must be called before this function in order to set the AdjustsStack
  220. // and MaxCallFrameSize variables.
  221. if (!F.hasFnAttribute(Attribute::Naked))
  222. insertPrologEpilogCode(MF);
  223. // Reinsert stashed debug values at the start of the entry blocks.
  224. for (auto &I : EntryDbgValues)
  225. I.first->insert(I.first->begin(), I.second.begin(), I.second.end());
  226. // Allow the target machine to make final modifications to the function
  227. // before the frame layout is finalized.
  228. TFI->processFunctionBeforeFrameIndicesReplaced(MF, RS);
  229. // Replace all MO_FrameIndex operands with physical register references
  230. // and actual offsets.
  231. //
  232. replaceFrameIndices(MF);
  233. // If register scavenging is needed, as we've enabled doing it as a
  234. // post-pass, scavenge the virtual registers that frame index elimination
  235. // inserted.
  236. if (TRI->requiresRegisterScavenging(MF) && FrameIndexVirtualScavenging)
  237. scavengeFrameVirtualRegs(MF, *RS);
  238. // Warn on stack size when we exceeds the given limit.
  239. MachineFrameInfo &MFI = MF.getFrameInfo();
  240. uint64_t StackSize = MFI.getStackSize();
  241. unsigned Threshold = UINT_MAX;
  242. if (MF.getFunction().hasFnAttribute("warn-stack-size")) {
  243. bool Failed = MF.getFunction()
  244. .getFnAttribute("warn-stack-size")
  245. .getValueAsString()
  246. .getAsInteger(10, Threshold);
  247. // Verifier should have caught this.
  248. assert(!Failed && "Invalid warn-stack-size fn attr value");
  249. (void)Failed;
  250. }
  251. uint64_t UnsafeStackSize = MFI.getUnsafeStackSize();
  252. if (MF.getFunction().hasFnAttribute(Attribute::SafeStack))
  253. StackSize += UnsafeStackSize;
  254. if (StackSize > Threshold) {
  255. DiagnosticInfoStackSize DiagStackSize(F, StackSize, Threshold, DS_Warning);
  256. F.getContext().diagnose(DiagStackSize);
  257. int64_t SpillSize = 0;
  258. for (int Idx = MFI.getObjectIndexBegin(), End = MFI.getObjectIndexEnd();
  259. Idx != End; ++Idx) {
  260. if (MFI.isSpillSlotObjectIndex(Idx))
  261. SpillSize += MFI.getObjectSize(Idx);
  262. }
  263. float SpillPct =
  264. static_cast<float>(SpillSize) / static_cast<float>(StackSize);
  265. float VarPct = 1.0f - SpillPct;
  266. int64_t VariableSize = StackSize - SpillSize;
  267. dbgs() << formatv("{0}/{1} ({3:P}) spills, {2}/{1} ({4:P}) variables",
  268. SpillSize, StackSize, VariableSize, SpillPct, VarPct);
  269. if (UnsafeStackSize != 0) {
  270. float UnsafePct =
  271. static_cast<float>(UnsafeStackSize) / static_cast<float>(StackSize);
  272. dbgs() << formatv(", {0}/{2} ({1:P}) unsafe stack", UnsafeStackSize,
  273. UnsafePct, StackSize);
  274. }
  275. dbgs() << "\n";
  276. }
  277. ORE->emit([&]() {
  278. return MachineOptimizationRemarkAnalysis(DEBUG_TYPE, "StackSize",
  279. MF.getFunction().getSubprogram(),
  280. &MF.front())
  281. << ore::NV("NumStackBytes", StackSize) << " stack bytes in function";
  282. });
  283. delete RS;
  284. SaveBlocks.clear();
  285. RestoreBlocks.clear();
  286. MFI.setSavePoint(nullptr);
  287. MFI.setRestorePoint(nullptr);
  288. return true;
  289. }
  290. /// Calculate the MaxCallFrameSize and AdjustsStack
  291. /// variables for the function's frame information and eliminate call frame
  292. /// pseudo instructions.
  293. void PEI::calculateCallFrameInfo(MachineFunction &MF) {
  294. const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo();
  295. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  296. MachineFrameInfo &MFI = MF.getFrameInfo();
  297. unsigned MaxCallFrameSize = 0;
  298. bool AdjustsStack = MFI.adjustsStack();
  299. // Get the function call frame set-up and tear-down instruction opcode
  300. unsigned FrameSetupOpcode = TII.getCallFrameSetupOpcode();
  301. unsigned FrameDestroyOpcode = TII.getCallFrameDestroyOpcode();
  302. // Early exit for targets which have no call frame setup/destroy pseudo
  303. // instructions.
  304. if (FrameSetupOpcode == ~0u && FrameDestroyOpcode == ~0u)
  305. return;
  306. std::vector<MachineBasicBlock::iterator> FrameSDOps;
  307. for (MachineBasicBlock &BB : MF)
  308. for (MachineBasicBlock::iterator I = BB.begin(); I != BB.end(); ++I)
  309. if (TII.isFrameInstr(*I)) {
  310. unsigned Size = TII.getFrameSize(*I);
  311. if (Size > MaxCallFrameSize) MaxCallFrameSize = Size;
  312. AdjustsStack = true;
  313. FrameSDOps.push_back(I);
  314. } else if (I->isInlineAsm()) {
  315. // Some inline asm's need a stack frame, as indicated by operand 1.
  316. unsigned ExtraInfo = I->getOperand(InlineAsm::MIOp_ExtraInfo).getImm();
  317. if (ExtraInfo & InlineAsm::Extra_IsAlignStack)
  318. AdjustsStack = true;
  319. }
  320. assert(!MFI.isMaxCallFrameSizeComputed() ||
  321. (MFI.getMaxCallFrameSize() == MaxCallFrameSize &&
  322. MFI.adjustsStack() == AdjustsStack));
  323. MFI.setAdjustsStack(AdjustsStack);
  324. MFI.setMaxCallFrameSize(MaxCallFrameSize);
  325. for (MachineBasicBlock::iterator I : FrameSDOps) {
  326. // If call frames are not being included as part of the stack frame, and
  327. // the target doesn't indicate otherwise, remove the call frame pseudos
  328. // here. The sub/add sp instruction pairs are still inserted, but we don't
  329. // need to track the SP adjustment for frame index elimination.
  330. if (TFI->canSimplifyCallFramePseudos(MF))
  331. TFI->eliminateCallFramePseudoInstr(MF, *I->getParent(), I);
  332. }
  333. }
  334. /// Compute the sets of entry and return blocks for saving and restoring
  335. /// callee-saved registers, and placing prolog and epilog code.
  336. void PEI::calculateSaveRestoreBlocks(MachineFunction &MF) {
  337. const MachineFrameInfo &MFI = MF.getFrameInfo();
  338. // Even when we do not change any CSR, we still want to insert the
  339. // prologue and epilogue of the function.
  340. // So set the save points for those.
  341. // Use the points found by shrink-wrapping, if any.
  342. if (MFI.getSavePoint()) {
  343. SaveBlocks.push_back(MFI.getSavePoint());
  344. assert(MFI.getRestorePoint() && "Both restore and save must be set");
  345. MachineBasicBlock *RestoreBlock = MFI.getRestorePoint();
  346. // If RestoreBlock does not have any successor and is not a return block
  347. // then the end point is unreachable and we do not need to insert any
  348. // epilogue.
  349. if (!RestoreBlock->succ_empty() || RestoreBlock->isReturnBlock())
  350. RestoreBlocks.push_back(RestoreBlock);
  351. return;
  352. }
  353. // Save refs to entry and return blocks.
  354. SaveBlocks.push_back(&MF.front());
  355. for (MachineBasicBlock &MBB : MF) {
  356. if (MBB.isEHFuncletEntry())
  357. SaveBlocks.push_back(&MBB);
  358. if (MBB.isReturnBlock())
  359. RestoreBlocks.push_back(&MBB);
  360. }
  361. }
  362. static void assignCalleeSavedSpillSlots(MachineFunction &F,
  363. const BitVector &SavedRegs,
  364. unsigned &MinCSFrameIndex,
  365. unsigned &MaxCSFrameIndex) {
  366. if (SavedRegs.empty())
  367. return;
  368. const TargetRegisterInfo *RegInfo = F.getSubtarget().getRegisterInfo();
  369. const MCPhysReg *CSRegs = F.getRegInfo().getCalleeSavedRegs();
  370. BitVector CSMask(SavedRegs.size());
  371. for (unsigned i = 0; CSRegs[i]; ++i)
  372. CSMask.set(CSRegs[i]);
  373. std::vector<CalleeSavedInfo> CSI;
  374. for (unsigned i = 0; CSRegs[i]; ++i) {
  375. unsigned Reg = CSRegs[i];
  376. if (SavedRegs.test(Reg)) {
  377. bool SavedSuper = false;
  378. for (const MCPhysReg &SuperReg : RegInfo->superregs(Reg)) {
  379. // Some backends set all aliases for some registers as saved, such as
  380. // Mips's $fp, so they appear in SavedRegs but not CSRegs.
  381. if (SavedRegs.test(SuperReg) && CSMask.test(SuperReg)) {
  382. SavedSuper = true;
  383. break;
  384. }
  385. }
  386. if (!SavedSuper)
  387. CSI.push_back(CalleeSavedInfo(Reg));
  388. }
  389. }
  390. const TargetFrameLowering *TFI = F.getSubtarget().getFrameLowering();
  391. MachineFrameInfo &MFI = F.getFrameInfo();
  392. if (!TFI->assignCalleeSavedSpillSlots(F, RegInfo, CSI, MinCSFrameIndex,
  393. MaxCSFrameIndex)) {
  394. // If target doesn't implement this, use generic code.
  395. if (CSI.empty())
  396. return; // Early exit if no callee saved registers are modified!
  397. unsigned NumFixedSpillSlots;
  398. const TargetFrameLowering::SpillSlot *FixedSpillSlots =
  399. TFI->getCalleeSavedSpillSlots(NumFixedSpillSlots);
  400. // Now that we know which registers need to be saved and restored, allocate
  401. // stack slots for them.
  402. for (auto &CS : CSI) {
  403. // If the target has spilled this register to another register, we don't
  404. // need to allocate a stack slot.
  405. if (CS.isSpilledToReg())
  406. continue;
  407. unsigned Reg = CS.getReg();
  408. const TargetRegisterClass *RC = RegInfo->getMinimalPhysRegClass(Reg);
  409. int FrameIdx;
  410. if (RegInfo->hasReservedSpillSlot(F, Reg, FrameIdx)) {
  411. CS.setFrameIdx(FrameIdx);
  412. continue;
  413. }
  414. // Check to see if this physreg must be spilled to a particular stack slot
  415. // on this target.
  416. const TargetFrameLowering::SpillSlot *FixedSlot = FixedSpillSlots;
  417. while (FixedSlot != FixedSpillSlots + NumFixedSpillSlots &&
  418. FixedSlot->Reg != Reg)
  419. ++FixedSlot;
  420. unsigned Size = RegInfo->getSpillSize(*RC);
  421. if (FixedSlot == FixedSpillSlots + NumFixedSpillSlots) {
  422. // Nope, just spill it anywhere convenient.
  423. Align Alignment = RegInfo->getSpillAlign(*RC);
  424. // We may not be able to satisfy the desired alignment specification of
  425. // the TargetRegisterClass if the stack alignment is smaller. Use the
  426. // min.
  427. Alignment = std::min(Alignment, TFI->getStackAlign());
  428. FrameIdx = MFI.CreateStackObject(Size, Alignment, true);
  429. if ((unsigned)FrameIdx < MinCSFrameIndex) MinCSFrameIndex = FrameIdx;
  430. if ((unsigned)FrameIdx > MaxCSFrameIndex) MaxCSFrameIndex = FrameIdx;
  431. } else {
  432. // Spill it to the stack where we must.
  433. FrameIdx = MFI.CreateFixedSpillStackObject(Size, FixedSlot->Offset);
  434. }
  435. CS.setFrameIdx(FrameIdx);
  436. }
  437. }
  438. MFI.setCalleeSavedInfo(CSI);
  439. }
  440. /// Helper function to update the liveness information for the callee-saved
  441. /// registers.
  442. static void updateLiveness(MachineFunction &MF) {
  443. MachineFrameInfo &MFI = MF.getFrameInfo();
  444. // Visited will contain all the basic blocks that are in the region
  445. // where the callee saved registers are alive:
  446. // - Anything that is not Save or Restore -> LiveThrough.
  447. // - Save -> LiveIn.
  448. // - Restore -> LiveOut.
  449. // The live-out is not attached to the block, so no need to keep
  450. // Restore in this set.
  451. SmallPtrSet<MachineBasicBlock *, 8> Visited;
  452. SmallVector<MachineBasicBlock *, 8> WorkList;
  453. MachineBasicBlock *Entry = &MF.front();
  454. MachineBasicBlock *Save = MFI.getSavePoint();
  455. if (!Save)
  456. Save = Entry;
  457. if (Entry != Save) {
  458. WorkList.push_back(Entry);
  459. Visited.insert(Entry);
  460. }
  461. Visited.insert(Save);
  462. MachineBasicBlock *Restore = MFI.getRestorePoint();
  463. if (Restore)
  464. // By construction Restore cannot be visited, otherwise it
  465. // means there exists a path to Restore that does not go
  466. // through Save.
  467. WorkList.push_back(Restore);
  468. while (!WorkList.empty()) {
  469. const MachineBasicBlock *CurBB = WorkList.pop_back_val();
  470. // By construction, the region that is after the save point is
  471. // dominated by the Save and post-dominated by the Restore.
  472. if (CurBB == Save && Save != Restore)
  473. continue;
  474. // Enqueue all the successors not already visited.
  475. // Those are by construction either before Save or after Restore.
  476. for (MachineBasicBlock *SuccBB : CurBB->successors())
  477. if (Visited.insert(SuccBB).second)
  478. WorkList.push_back(SuccBB);
  479. }
  480. const std::vector<CalleeSavedInfo> &CSI = MFI.getCalleeSavedInfo();
  481. MachineRegisterInfo &MRI = MF.getRegInfo();
  482. for (const CalleeSavedInfo &I : CSI) {
  483. for (MachineBasicBlock *MBB : Visited) {
  484. MCPhysReg Reg = I.getReg();
  485. // Add the callee-saved register as live-in.
  486. // It's killed at the spill.
  487. if (!MRI.isReserved(Reg) && !MBB->isLiveIn(Reg))
  488. MBB->addLiveIn(Reg);
  489. }
  490. // If callee-saved register is spilled to another register rather than
  491. // spilling to stack, the destination register has to be marked as live for
  492. // each MBB between the prologue and epilogue so that it is not clobbered
  493. // before it is reloaded in the epilogue. The Visited set contains all
  494. // blocks outside of the region delimited by prologue/epilogue.
  495. if (I.isSpilledToReg()) {
  496. for (MachineBasicBlock &MBB : MF) {
  497. if (Visited.count(&MBB))
  498. continue;
  499. MCPhysReg DstReg = I.getDstReg();
  500. if (!MBB.isLiveIn(DstReg))
  501. MBB.addLiveIn(DstReg);
  502. }
  503. }
  504. }
  505. }
  506. /// Insert spill code for the callee-saved registers used in the function.
  507. static void insertCSRSaves(MachineBasicBlock &SaveBlock,
  508. ArrayRef<CalleeSavedInfo> CSI) {
  509. MachineFunction &MF = *SaveBlock.getParent();
  510. const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo();
  511. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  512. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  513. MachineBasicBlock::iterator I = SaveBlock.begin();
  514. if (!TFI->spillCalleeSavedRegisters(SaveBlock, I, CSI, TRI)) {
  515. for (const CalleeSavedInfo &CS : CSI) {
  516. // Insert the spill to the stack frame.
  517. unsigned Reg = CS.getReg();
  518. if (CS.isSpilledToReg()) {
  519. BuildMI(SaveBlock, I, DebugLoc(),
  520. TII.get(TargetOpcode::COPY), CS.getDstReg())
  521. .addReg(Reg, getKillRegState(true));
  522. } else {
  523. const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
  524. TII.storeRegToStackSlot(SaveBlock, I, Reg, true, CS.getFrameIdx(), RC,
  525. TRI, Register());
  526. }
  527. }
  528. }
  529. }
  530. /// Insert restore code for the callee-saved registers used in the function.
  531. static void insertCSRRestores(MachineBasicBlock &RestoreBlock,
  532. std::vector<CalleeSavedInfo> &CSI) {
  533. MachineFunction &MF = *RestoreBlock.getParent();
  534. const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo();
  535. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  536. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  537. // Restore all registers immediately before the return and any
  538. // terminators that precede it.
  539. MachineBasicBlock::iterator I = RestoreBlock.getFirstTerminator();
  540. if (!TFI->restoreCalleeSavedRegisters(RestoreBlock, I, CSI, TRI)) {
  541. for (const CalleeSavedInfo &CI : reverse(CSI)) {
  542. unsigned Reg = CI.getReg();
  543. if (CI.isSpilledToReg()) {
  544. BuildMI(RestoreBlock, I, DebugLoc(), TII.get(TargetOpcode::COPY), Reg)
  545. .addReg(CI.getDstReg(), getKillRegState(true));
  546. } else {
  547. const TargetRegisterClass *RC = TRI->getMinimalPhysRegClass(Reg);
  548. TII.loadRegFromStackSlot(RestoreBlock, I, Reg, CI.getFrameIdx(), RC,
  549. TRI, Register());
  550. assert(I != RestoreBlock.begin() &&
  551. "loadRegFromStackSlot didn't insert any code!");
  552. // Insert in reverse order. loadRegFromStackSlot can insert
  553. // multiple instructions.
  554. }
  555. }
  556. }
  557. }
  558. void PEI::spillCalleeSavedRegs(MachineFunction &MF) {
  559. // We can't list this requirement in getRequiredProperties because some
  560. // targets (WebAssembly) use virtual registers past this point, and the pass
  561. // pipeline is set up without giving the passes a chance to look at the
  562. // TargetMachine.
  563. // FIXME: Find a way to express this in getRequiredProperties.
  564. assert(MF.getProperties().hasProperty(
  565. MachineFunctionProperties::Property::NoVRegs));
  566. const Function &F = MF.getFunction();
  567. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  568. MachineFrameInfo &MFI = MF.getFrameInfo();
  569. MinCSFrameIndex = std::numeric_limits<unsigned>::max();
  570. MaxCSFrameIndex = 0;
  571. // Determine which of the registers in the callee save list should be saved.
  572. BitVector SavedRegs;
  573. TFI->determineCalleeSaves(MF, SavedRegs, RS);
  574. // Assign stack slots for any callee-saved registers that must be spilled.
  575. assignCalleeSavedSpillSlots(MF, SavedRegs, MinCSFrameIndex, MaxCSFrameIndex);
  576. // Add the code to save and restore the callee saved registers.
  577. if (!F.hasFnAttribute(Attribute::Naked)) {
  578. MFI.setCalleeSavedInfoValid(true);
  579. std::vector<CalleeSavedInfo> &CSI = MFI.getCalleeSavedInfo();
  580. if (!CSI.empty()) {
  581. if (!MFI.hasCalls())
  582. NumLeafFuncWithSpills++;
  583. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  584. insertCSRSaves(*SaveBlock, CSI);
  585. // Update the live-in information of all the blocks up to the save point.
  586. updateLiveness(MF);
  587. for (MachineBasicBlock *RestoreBlock : RestoreBlocks)
  588. insertCSRRestores(*RestoreBlock, CSI);
  589. }
  590. }
  591. }
  592. /// AdjustStackOffset - Helper function used to adjust the stack frame offset.
  593. static inline void AdjustStackOffset(MachineFrameInfo &MFI, int FrameIdx,
  594. bool StackGrowsDown, int64_t &Offset,
  595. Align &MaxAlign, unsigned Skew) {
  596. // If the stack grows down, add the object size to find the lowest address.
  597. if (StackGrowsDown)
  598. Offset += MFI.getObjectSize(FrameIdx);
  599. Align Alignment = MFI.getObjectAlign(FrameIdx);
  600. // If the alignment of this object is greater than that of the stack, then
  601. // increase the stack alignment to match.
  602. MaxAlign = std::max(MaxAlign, Alignment);
  603. // Adjust to alignment boundary.
  604. Offset = alignTo(Offset, Alignment, Skew);
  605. if (StackGrowsDown) {
  606. LLVM_DEBUG(dbgs() << "alloc FI(" << FrameIdx << ") at SP[" << -Offset
  607. << "]\n");
  608. MFI.setObjectOffset(FrameIdx, -Offset); // Set the computed offset
  609. } else {
  610. LLVM_DEBUG(dbgs() << "alloc FI(" << FrameIdx << ") at SP[" << Offset
  611. << "]\n");
  612. MFI.setObjectOffset(FrameIdx, Offset);
  613. Offset += MFI.getObjectSize(FrameIdx);
  614. }
  615. }
  616. /// Compute which bytes of fixed and callee-save stack area are unused and keep
  617. /// track of them in StackBytesFree.
  618. static inline void
  619. computeFreeStackSlots(MachineFrameInfo &MFI, bool StackGrowsDown,
  620. unsigned MinCSFrameIndex, unsigned MaxCSFrameIndex,
  621. int64_t FixedCSEnd, BitVector &StackBytesFree) {
  622. // Avoid undefined int64_t -> int conversion below in extreme case.
  623. if (FixedCSEnd > std::numeric_limits<int>::max())
  624. return;
  625. StackBytesFree.resize(FixedCSEnd, true);
  626. SmallVector<int, 16> AllocatedFrameSlots;
  627. // Add fixed objects.
  628. for (int i = MFI.getObjectIndexBegin(); i != 0; ++i)
  629. // StackSlot scavenging is only implemented for the default stack.
  630. if (MFI.getStackID(i) == TargetStackID::Default)
  631. AllocatedFrameSlots.push_back(i);
  632. // Add callee-save objects if there are any.
  633. if (MinCSFrameIndex <= MaxCSFrameIndex) {
  634. for (int i = MinCSFrameIndex; i <= (int)MaxCSFrameIndex; ++i)
  635. if (MFI.getStackID(i) == TargetStackID::Default)
  636. AllocatedFrameSlots.push_back(i);
  637. }
  638. for (int i : AllocatedFrameSlots) {
  639. // These are converted from int64_t, but they should always fit in int
  640. // because of the FixedCSEnd check above.
  641. int ObjOffset = MFI.getObjectOffset(i);
  642. int ObjSize = MFI.getObjectSize(i);
  643. int ObjStart, ObjEnd;
  644. if (StackGrowsDown) {
  645. // ObjOffset is negative when StackGrowsDown is true.
  646. ObjStart = -ObjOffset - ObjSize;
  647. ObjEnd = -ObjOffset;
  648. } else {
  649. ObjStart = ObjOffset;
  650. ObjEnd = ObjOffset + ObjSize;
  651. }
  652. // Ignore fixed holes that are in the previous stack frame.
  653. if (ObjEnd > 0)
  654. StackBytesFree.reset(ObjStart, ObjEnd);
  655. }
  656. }
  657. /// Assign frame object to an unused portion of the stack in the fixed stack
  658. /// object range. Return true if the allocation was successful.
  659. static inline bool scavengeStackSlot(MachineFrameInfo &MFI, int FrameIdx,
  660. bool StackGrowsDown, Align MaxAlign,
  661. BitVector &StackBytesFree) {
  662. if (MFI.isVariableSizedObjectIndex(FrameIdx))
  663. return false;
  664. if (StackBytesFree.none()) {
  665. // clear it to speed up later scavengeStackSlot calls to
  666. // StackBytesFree.none()
  667. StackBytesFree.clear();
  668. return false;
  669. }
  670. Align ObjAlign = MFI.getObjectAlign(FrameIdx);
  671. if (ObjAlign > MaxAlign)
  672. return false;
  673. int64_t ObjSize = MFI.getObjectSize(FrameIdx);
  674. int FreeStart;
  675. for (FreeStart = StackBytesFree.find_first(); FreeStart != -1;
  676. FreeStart = StackBytesFree.find_next(FreeStart)) {
  677. // Check that free space has suitable alignment.
  678. unsigned ObjStart = StackGrowsDown ? FreeStart + ObjSize : FreeStart;
  679. if (alignTo(ObjStart, ObjAlign) != ObjStart)
  680. continue;
  681. if (FreeStart + ObjSize > StackBytesFree.size())
  682. return false;
  683. bool AllBytesFree = true;
  684. for (unsigned Byte = 0; Byte < ObjSize; ++Byte)
  685. if (!StackBytesFree.test(FreeStart + Byte)) {
  686. AllBytesFree = false;
  687. break;
  688. }
  689. if (AllBytesFree)
  690. break;
  691. }
  692. if (FreeStart == -1)
  693. return false;
  694. if (StackGrowsDown) {
  695. int ObjStart = -(FreeStart + ObjSize);
  696. LLVM_DEBUG(dbgs() << "alloc FI(" << FrameIdx << ") scavenged at SP["
  697. << ObjStart << "]\n");
  698. MFI.setObjectOffset(FrameIdx, ObjStart);
  699. } else {
  700. LLVM_DEBUG(dbgs() << "alloc FI(" << FrameIdx << ") scavenged at SP["
  701. << FreeStart << "]\n");
  702. MFI.setObjectOffset(FrameIdx, FreeStart);
  703. }
  704. StackBytesFree.reset(FreeStart, FreeStart + ObjSize);
  705. return true;
  706. }
  707. /// AssignProtectedObjSet - Helper function to assign large stack objects (i.e.,
  708. /// those required to be close to the Stack Protector) to stack offsets.
  709. static void AssignProtectedObjSet(const StackObjSet &UnassignedObjs,
  710. SmallSet<int, 16> &ProtectedObjs,
  711. MachineFrameInfo &MFI, bool StackGrowsDown,
  712. int64_t &Offset, Align &MaxAlign,
  713. unsigned Skew) {
  714. for (int i : UnassignedObjs) {
  715. AdjustStackOffset(MFI, i, StackGrowsDown, Offset, MaxAlign, Skew);
  716. ProtectedObjs.insert(i);
  717. }
  718. }
  719. /// calculateFrameObjectOffsets - Calculate actual frame offsets for all of the
  720. /// abstract stack objects.
  721. void PEI::calculateFrameObjectOffsets(MachineFunction &MF) {
  722. const TargetFrameLowering &TFI = *MF.getSubtarget().getFrameLowering();
  723. bool StackGrowsDown =
  724. TFI.getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
  725. // Loop over all of the stack objects, assigning sequential addresses...
  726. MachineFrameInfo &MFI = MF.getFrameInfo();
  727. // Start at the beginning of the local area.
  728. // The Offset is the distance from the stack top in the direction
  729. // of stack growth -- so it's always nonnegative.
  730. int LocalAreaOffset = TFI.getOffsetOfLocalArea();
  731. if (StackGrowsDown)
  732. LocalAreaOffset = -LocalAreaOffset;
  733. assert(LocalAreaOffset >= 0
  734. && "Local area offset should be in direction of stack growth");
  735. int64_t Offset = LocalAreaOffset;
  736. // Skew to be applied to alignment.
  737. unsigned Skew = TFI.getStackAlignmentSkew(MF);
  738. #ifdef EXPENSIVE_CHECKS
  739. for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i)
  740. if (!MFI.isDeadObjectIndex(i) &&
  741. MFI.getStackID(i) == TargetStackID::Default)
  742. assert(MFI.getObjectAlign(i) <= MFI.getMaxAlign() &&
  743. "MaxAlignment is invalid");
  744. #endif
  745. // If there are fixed sized objects that are preallocated in the local area,
  746. // non-fixed objects can't be allocated right at the start of local area.
  747. // Adjust 'Offset' to point to the end of last fixed sized preallocated
  748. // object.
  749. for (int i = MFI.getObjectIndexBegin(); i != 0; ++i) {
  750. // Only allocate objects on the default stack.
  751. if (MFI.getStackID(i) != TargetStackID::Default)
  752. continue;
  753. int64_t FixedOff;
  754. if (StackGrowsDown) {
  755. // The maximum distance from the stack pointer is at lower address of
  756. // the object -- which is given by offset. For down growing stack
  757. // the offset is negative, so we negate the offset to get the distance.
  758. FixedOff = -MFI.getObjectOffset(i);
  759. } else {
  760. // The maximum distance from the start pointer is at the upper
  761. // address of the object.
  762. FixedOff = MFI.getObjectOffset(i) + MFI.getObjectSize(i);
  763. }
  764. if (FixedOff > Offset) Offset = FixedOff;
  765. }
  766. Align MaxAlign = MFI.getMaxAlign();
  767. // First assign frame offsets to stack objects that are used to spill
  768. // callee saved registers.
  769. if (MaxCSFrameIndex >= MinCSFrameIndex) {
  770. for (unsigned i = 0; i <= MaxCSFrameIndex - MinCSFrameIndex; ++i) {
  771. unsigned FrameIndex =
  772. StackGrowsDown ? MinCSFrameIndex + i : MaxCSFrameIndex - i;
  773. // Only allocate objects on the default stack.
  774. if (MFI.getStackID(FrameIndex) != TargetStackID::Default)
  775. continue;
  776. // TODO: should this just be if (MFI.isDeadObjectIndex(FrameIndex))
  777. if (!StackGrowsDown && MFI.isDeadObjectIndex(FrameIndex))
  778. continue;
  779. AdjustStackOffset(MFI, FrameIndex, StackGrowsDown, Offset, MaxAlign,
  780. Skew);
  781. }
  782. }
  783. assert(MaxAlign == MFI.getMaxAlign() &&
  784. "MFI.getMaxAlign should already account for all callee-saved "
  785. "registers without a fixed stack slot");
  786. // FixedCSEnd is the stack offset to the end of the fixed and callee-save
  787. // stack area.
  788. int64_t FixedCSEnd = Offset;
  789. // Make sure the special register scavenging spill slot is closest to the
  790. // incoming stack pointer if a frame pointer is required and is closer
  791. // to the incoming rather than the final stack pointer.
  792. const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
  793. bool EarlyScavengingSlots = TFI.allocateScavengingFrameIndexesNearIncomingSP(MF);
  794. if (RS && EarlyScavengingSlots) {
  795. SmallVector<int, 2> SFIs;
  796. RS->getScavengingFrameIndices(SFIs);
  797. for (int SFI : SFIs)
  798. AdjustStackOffset(MFI, SFI, StackGrowsDown, Offset, MaxAlign, Skew);
  799. }
  800. // FIXME: Once this is working, then enable flag will change to a target
  801. // check for whether the frame is large enough to want to use virtual
  802. // frame index registers. Functions which don't want/need this optimization
  803. // will continue to use the existing code path.
  804. if (MFI.getUseLocalStackAllocationBlock()) {
  805. Align Alignment = MFI.getLocalFrameMaxAlign();
  806. // Adjust to alignment boundary.
  807. Offset = alignTo(Offset, Alignment, Skew);
  808. LLVM_DEBUG(dbgs() << "Local frame base offset: " << Offset << "\n");
  809. // Resolve offsets for objects in the local block.
  810. for (unsigned i = 0, e = MFI.getLocalFrameObjectCount(); i != e; ++i) {
  811. std::pair<int, int64_t> Entry = MFI.getLocalFrameObjectMap(i);
  812. int64_t FIOffset = (StackGrowsDown ? -Offset : Offset) + Entry.second;
  813. LLVM_DEBUG(dbgs() << "alloc FI(" << Entry.first << ") at SP[" << FIOffset
  814. << "]\n");
  815. MFI.setObjectOffset(Entry.first, FIOffset);
  816. }
  817. // Allocate the local block
  818. Offset += MFI.getLocalFrameSize();
  819. MaxAlign = std::max(Alignment, MaxAlign);
  820. }
  821. // Retrieve the Exception Handler registration node.
  822. int EHRegNodeFrameIndex = std::numeric_limits<int>::max();
  823. if (const WinEHFuncInfo *FuncInfo = MF.getWinEHFuncInfo())
  824. EHRegNodeFrameIndex = FuncInfo->EHRegNodeFrameIndex;
  825. // Make sure that the stack protector comes before the local variables on the
  826. // stack.
  827. SmallSet<int, 16> ProtectedObjs;
  828. if (MFI.hasStackProtectorIndex()) {
  829. int StackProtectorFI = MFI.getStackProtectorIndex();
  830. StackObjSet LargeArrayObjs;
  831. StackObjSet SmallArrayObjs;
  832. StackObjSet AddrOfObjs;
  833. // If we need a stack protector, we need to make sure that
  834. // LocalStackSlotPass didn't already allocate a slot for it.
  835. // If we are told to use the LocalStackAllocationBlock, the stack protector
  836. // is expected to be already pre-allocated.
  837. if (MFI.getStackID(StackProtectorFI) != TargetStackID::Default) {
  838. // If the stack protector isn't on the default stack then it's up to the
  839. // target to set the stack offset.
  840. assert(MFI.getObjectOffset(StackProtectorFI) != 0 &&
  841. "Offset of stack protector on non-default stack expected to be "
  842. "already set.");
  843. assert(!MFI.isObjectPreAllocated(MFI.getStackProtectorIndex()) &&
  844. "Stack protector on non-default stack expected to not be "
  845. "pre-allocated by LocalStackSlotPass.");
  846. } else if (!MFI.getUseLocalStackAllocationBlock()) {
  847. AdjustStackOffset(MFI, StackProtectorFI, StackGrowsDown, Offset, MaxAlign,
  848. Skew);
  849. } else if (!MFI.isObjectPreAllocated(MFI.getStackProtectorIndex())) {
  850. llvm_unreachable(
  851. "Stack protector not pre-allocated by LocalStackSlotPass.");
  852. }
  853. // Assign large stack objects first.
  854. for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i) {
  855. if (MFI.isObjectPreAllocated(i) && MFI.getUseLocalStackAllocationBlock())
  856. continue;
  857. if (i >= MinCSFrameIndex && i <= MaxCSFrameIndex)
  858. continue;
  859. if (RS && RS->isScavengingFrameIndex((int)i))
  860. continue;
  861. if (MFI.isDeadObjectIndex(i))
  862. continue;
  863. if (StackProtectorFI == (int)i || EHRegNodeFrameIndex == (int)i)
  864. continue;
  865. // Only allocate objects on the default stack.
  866. if (MFI.getStackID(i) != TargetStackID::Default)
  867. continue;
  868. switch (MFI.getObjectSSPLayout(i)) {
  869. case MachineFrameInfo::SSPLK_None:
  870. continue;
  871. case MachineFrameInfo::SSPLK_SmallArray:
  872. SmallArrayObjs.insert(i);
  873. continue;
  874. case MachineFrameInfo::SSPLK_AddrOf:
  875. AddrOfObjs.insert(i);
  876. continue;
  877. case MachineFrameInfo::SSPLK_LargeArray:
  878. LargeArrayObjs.insert(i);
  879. continue;
  880. }
  881. llvm_unreachable("Unexpected SSPLayoutKind.");
  882. }
  883. // We expect **all** the protected stack objects to be pre-allocated by
  884. // LocalStackSlotPass. If it turns out that PEI still has to allocate some
  885. // of them, we may end up messing up the expected order of the objects.
  886. if (MFI.getUseLocalStackAllocationBlock() &&
  887. !(LargeArrayObjs.empty() && SmallArrayObjs.empty() &&
  888. AddrOfObjs.empty()))
  889. llvm_unreachable("Found protected stack objects not pre-allocated by "
  890. "LocalStackSlotPass.");
  891. AssignProtectedObjSet(LargeArrayObjs, ProtectedObjs, MFI, StackGrowsDown,
  892. Offset, MaxAlign, Skew);
  893. AssignProtectedObjSet(SmallArrayObjs, ProtectedObjs, MFI, StackGrowsDown,
  894. Offset, MaxAlign, Skew);
  895. AssignProtectedObjSet(AddrOfObjs, ProtectedObjs, MFI, StackGrowsDown,
  896. Offset, MaxAlign, Skew);
  897. }
  898. SmallVector<int, 8> ObjectsToAllocate;
  899. // Then prepare to assign frame offsets to stack objects that are not used to
  900. // spill callee saved registers.
  901. for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i) {
  902. if (MFI.isObjectPreAllocated(i) && MFI.getUseLocalStackAllocationBlock())
  903. continue;
  904. if (i >= MinCSFrameIndex && i <= MaxCSFrameIndex)
  905. continue;
  906. if (RS && RS->isScavengingFrameIndex((int)i))
  907. continue;
  908. if (MFI.isDeadObjectIndex(i))
  909. continue;
  910. if (MFI.getStackProtectorIndex() == (int)i || EHRegNodeFrameIndex == (int)i)
  911. continue;
  912. if (ProtectedObjs.count(i))
  913. continue;
  914. // Only allocate objects on the default stack.
  915. if (MFI.getStackID(i) != TargetStackID::Default)
  916. continue;
  917. // Add the objects that we need to allocate to our working set.
  918. ObjectsToAllocate.push_back(i);
  919. }
  920. // Allocate the EH registration node first if one is present.
  921. if (EHRegNodeFrameIndex != std::numeric_limits<int>::max())
  922. AdjustStackOffset(MFI, EHRegNodeFrameIndex, StackGrowsDown, Offset,
  923. MaxAlign, Skew);
  924. // Give the targets a chance to order the objects the way they like it.
  925. if (MF.getTarget().getOptLevel() != CodeGenOpt::None &&
  926. MF.getTarget().Options.StackSymbolOrdering)
  927. TFI.orderFrameObjects(MF, ObjectsToAllocate);
  928. // Keep track of which bytes in the fixed and callee-save range are used so we
  929. // can use the holes when allocating later stack objects. Only do this if
  930. // stack protector isn't being used and the target requests it and we're
  931. // optimizing.
  932. BitVector StackBytesFree;
  933. if (!ObjectsToAllocate.empty() &&
  934. MF.getTarget().getOptLevel() != CodeGenOpt::None &&
  935. MFI.getStackProtectorIndex() < 0 && TFI.enableStackSlotScavenging(MF))
  936. computeFreeStackSlots(MFI, StackGrowsDown, MinCSFrameIndex, MaxCSFrameIndex,
  937. FixedCSEnd, StackBytesFree);
  938. // Now walk the objects and actually assign base offsets to them.
  939. for (auto &Object : ObjectsToAllocate)
  940. if (!scavengeStackSlot(MFI, Object, StackGrowsDown, MaxAlign,
  941. StackBytesFree))
  942. AdjustStackOffset(MFI, Object, StackGrowsDown, Offset, MaxAlign, Skew);
  943. // Make sure the special register scavenging spill slot is closest to the
  944. // stack pointer.
  945. if (RS && !EarlyScavengingSlots) {
  946. SmallVector<int, 2> SFIs;
  947. RS->getScavengingFrameIndices(SFIs);
  948. for (int SFI : SFIs)
  949. AdjustStackOffset(MFI, SFI, StackGrowsDown, Offset, MaxAlign, Skew);
  950. }
  951. if (!TFI.targetHandlesStackFrameRounding()) {
  952. // If we have reserved argument space for call sites in the function
  953. // immediately on entry to the current function, count it as part of the
  954. // overall stack size.
  955. if (MFI.adjustsStack() && TFI.hasReservedCallFrame(MF))
  956. Offset += MFI.getMaxCallFrameSize();
  957. // Round up the size to a multiple of the alignment. If the function has
  958. // any calls or alloca's, align to the target's StackAlignment value to
  959. // ensure that the callee's frame or the alloca data is suitably aligned;
  960. // otherwise, for leaf functions, align to the TransientStackAlignment
  961. // value.
  962. Align StackAlign;
  963. if (MFI.adjustsStack() || MFI.hasVarSizedObjects() ||
  964. (RegInfo->hasStackRealignment(MF) && MFI.getObjectIndexEnd() != 0))
  965. StackAlign = TFI.getStackAlign();
  966. else
  967. StackAlign = TFI.getTransientStackAlign();
  968. // If the frame pointer is eliminated, all frame offsets will be relative to
  969. // SP not FP. Align to MaxAlign so this works.
  970. StackAlign = std::max(StackAlign, MaxAlign);
  971. int64_t OffsetBeforeAlignment = Offset;
  972. Offset = alignTo(Offset, StackAlign, Skew);
  973. // If we have increased the offset to fulfill the alignment constrants,
  974. // then the scavenging spill slots may become harder to reach from the
  975. // stack pointer, float them so they stay close.
  976. if (StackGrowsDown && OffsetBeforeAlignment != Offset && RS &&
  977. !EarlyScavengingSlots) {
  978. SmallVector<int, 2> SFIs;
  979. RS->getScavengingFrameIndices(SFIs);
  980. LLVM_DEBUG(if (!SFIs.empty()) llvm::dbgs()
  981. << "Adjusting emergency spill slots!\n";);
  982. int64_t Delta = Offset - OffsetBeforeAlignment;
  983. for (int SFI : SFIs) {
  984. LLVM_DEBUG(llvm::dbgs()
  985. << "Adjusting offset of emergency spill slot #" << SFI
  986. << " from " << MFI.getObjectOffset(SFI););
  987. MFI.setObjectOffset(SFI, MFI.getObjectOffset(SFI) - Delta);
  988. LLVM_DEBUG(llvm::dbgs() << " to " << MFI.getObjectOffset(SFI) << "\n";);
  989. }
  990. }
  991. }
  992. // Update frame info to pretend that this is part of the stack...
  993. int64_t StackSize = Offset - LocalAreaOffset;
  994. MFI.setStackSize(StackSize);
  995. NumBytesStackSpace += StackSize;
  996. }
  997. /// insertPrologEpilogCode - Scan the function for modified callee saved
  998. /// registers, insert spill code for these callee saved registers, then add
  999. /// prolog and epilog code to the function.
  1000. void PEI::insertPrologEpilogCode(MachineFunction &MF) {
  1001. const TargetFrameLowering &TFI = *MF.getSubtarget().getFrameLowering();
  1002. // Add prologue to the function...
  1003. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  1004. TFI.emitPrologue(MF, *SaveBlock);
  1005. // Add epilogue to restore the callee-save registers in each exiting block.
  1006. for (MachineBasicBlock *RestoreBlock : RestoreBlocks)
  1007. TFI.emitEpilogue(MF, *RestoreBlock);
  1008. // Zero call used registers before restoring callee-saved registers.
  1009. insertZeroCallUsedRegs(MF);
  1010. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  1011. TFI.inlineStackProbe(MF, *SaveBlock);
  1012. // Emit additional code that is required to support segmented stacks, if
  1013. // we've been asked for it. This, when linked with a runtime with support
  1014. // for segmented stacks (libgcc is one), will result in allocating stack
  1015. // space in small chunks instead of one large contiguous block.
  1016. if (MF.shouldSplitStack()) {
  1017. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  1018. TFI.adjustForSegmentedStacks(MF, *SaveBlock);
  1019. }
  1020. // Emit additional code that is required to explicitly handle the stack in
  1021. // HiPE native code (if needed) when loaded in the Erlang/OTP runtime. The
  1022. // approach is rather similar to that of Segmented Stacks, but it uses a
  1023. // different conditional check and another BIF for allocating more stack
  1024. // space.
  1025. if (MF.getFunction().getCallingConv() == CallingConv::HiPE)
  1026. for (MachineBasicBlock *SaveBlock : SaveBlocks)
  1027. TFI.adjustForHiPEPrologue(MF, *SaveBlock);
  1028. }
  1029. /// insertZeroCallUsedRegs - Zero out call used registers.
  1030. void PEI::insertZeroCallUsedRegs(MachineFunction &MF) {
  1031. const Function &F = MF.getFunction();
  1032. if (!F.hasFnAttribute("zero-call-used-regs"))
  1033. return;
  1034. using namespace ZeroCallUsedRegs;
  1035. ZeroCallUsedRegsKind ZeroRegsKind =
  1036. StringSwitch<ZeroCallUsedRegsKind>(
  1037. F.getFnAttribute("zero-call-used-regs").getValueAsString())
  1038. .Case("skip", ZeroCallUsedRegsKind::Skip)
  1039. .Case("used-gpr-arg", ZeroCallUsedRegsKind::UsedGPRArg)
  1040. .Case("used-gpr", ZeroCallUsedRegsKind::UsedGPR)
  1041. .Case("used-arg", ZeroCallUsedRegsKind::UsedArg)
  1042. .Case("used", ZeroCallUsedRegsKind::Used)
  1043. .Case("all-gpr-arg", ZeroCallUsedRegsKind::AllGPRArg)
  1044. .Case("all-gpr", ZeroCallUsedRegsKind::AllGPR)
  1045. .Case("all-arg", ZeroCallUsedRegsKind::AllArg)
  1046. .Case("all", ZeroCallUsedRegsKind::All);
  1047. if (ZeroRegsKind == ZeroCallUsedRegsKind::Skip)
  1048. return;
  1049. const bool OnlyGPR = static_cast<unsigned>(ZeroRegsKind) & ONLY_GPR;
  1050. const bool OnlyUsed = static_cast<unsigned>(ZeroRegsKind) & ONLY_USED;
  1051. const bool OnlyArg = static_cast<unsigned>(ZeroRegsKind) & ONLY_ARG;
  1052. const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo();
  1053. const BitVector AllocatableSet(TRI.getAllocatableSet(MF));
  1054. // Mark all used registers.
  1055. BitVector UsedRegs(TRI.getNumRegs());
  1056. if (OnlyUsed)
  1057. for (const MachineBasicBlock &MBB : MF)
  1058. for (const MachineInstr &MI : MBB) {
  1059. // skip debug instructions
  1060. if (MI.isDebugInstr())
  1061. continue;
  1062. for (const MachineOperand &MO : MI.operands()) {
  1063. if (!MO.isReg())
  1064. continue;
  1065. MCRegister Reg = MO.getReg();
  1066. if (AllocatableSet[Reg] && !MO.isImplicit() &&
  1067. (MO.isDef() || MO.isUse()))
  1068. UsedRegs.set(Reg);
  1069. }
  1070. }
  1071. // Get a list of registers that are used.
  1072. BitVector LiveIns(TRI.getNumRegs());
  1073. for (const MachineBasicBlock::RegisterMaskPair &LI : MF.front().liveins())
  1074. LiveIns.set(LI.PhysReg);
  1075. BitVector RegsToZero(TRI.getNumRegs());
  1076. for (MCRegister Reg : AllocatableSet.set_bits()) {
  1077. // Skip over fixed registers.
  1078. if (TRI.isFixedRegister(MF, Reg))
  1079. continue;
  1080. // Want only general purpose registers.
  1081. if (OnlyGPR && !TRI.isGeneralPurposeRegister(MF, Reg))
  1082. continue;
  1083. // Want only used registers.
  1084. if (OnlyUsed && !UsedRegs[Reg])
  1085. continue;
  1086. // Want only registers used for arguments.
  1087. if (OnlyArg) {
  1088. if (OnlyUsed) {
  1089. if (!LiveIns[Reg])
  1090. continue;
  1091. } else if (!TRI.isArgumentRegister(MF, Reg)) {
  1092. continue;
  1093. }
  1094. }
  1095. RegsToZero.set(Reg);
  1096. }
  1097. // Don't clear registers that are live when leaving the function.
  1098. for (const MachineBasicBlock &MBB : MF)
  1099. for (const MachineInstr &MI : MBB.terminators()) {
  1100. if (!MI.isReturn())
  1101. continue;
  1102. for (const auto &MO : MI.operands()) {
  1103. if (!MO.isReg())
  1104. continue;
  1105. MCRegister Reg = MO.getReg();
  1106. // This picks up sibling registers (e.q. %al -> %ah).
  1107. for (MCRegUnitIterator Unit(Reg, &TRI); Unit.isValid(); ++Unit)
  1108. RegsToZero.reset(*Unit);
  1109. for (MCPhysReg SReg : TRI.sub_and_superregs_inclusive(Reg))
  1110. RegsToZero.reset(SReg);
  1111. }
  1112. }
  1113. // Don't need to clear registers that are used/clobbered by terminating
  1114. // instructions.
  1115. for (const MachineBasicBlock &MBB : MF) {
  1116. if (!MBB.isReturnBlock())
  1117. continue;
  1118. MachineBasicBlock::const_iterator MBBI = MBB.getFirstTerminator();
  1119. for (MachineBasicBlock::const_iterator I = MBBI, E = MBB.end(); I != E;
  1120. ++I) {
  1121. for (const MachineOperand &MO : I->operands()) {
  1122. if (!MO.isReg())
  1123. continue;
  1124. for (const MCPhysReg &Reg :
  1125. TRI.sub_and_superregs_inclusive(MO.getReg()))
  1126. RegsToZero.reset(Reg);
  1127. }
  1128. }
  1129. }
  1130. // Don't clear registers that must be preserved.
  1131. for (const MCPhysReg *CSRegs = TRI.getCalleeSavedRegs(&MF);
  1132. MCPhysReg CSReg = *CSRegs; ++CSRegs)
  1133. for (MCRegister Reg : TRI.sub_and_superregs_inclusive(CSReg))
  1134. RegsToZero.reset(Reg);
  1135. const TargetFrameLowering &TFI = *MF.getSubtarget().getFrameLowering();
  1136. for (MachineBasicBlock &MBB : MF)
  1137. if (MBB.isReturnBlock())
  1138. TFI.emitZeroCallUsedRegs(RegsToZero, MBB);
  1139. }
  1140. /// replaceFrameIndices - Replace all MO_FrameIndex operands with physical
  1141. /// register references and actual offsets.
  1142. void PEI::replaceFrameIndices(MachineFunction &MF) {
  1143. const auto &ST = MF.getSubtarget();
  1144. const TargetFrameLowering &TFI = *ST.getFrameLowering();
  1145. if (!TFI.needsFrameIndexResolution(MF))
  1146. return;
  1147. const TargetRegisterInfo *TRI = ST.getRegisterInfo();
  1148. // Allow the target to determine this after knowing the frame size.
  1149. FrameIndexEliminationScavenging = (RS && !FrameIndexVirtualScavenging) ||
  1150. TRI->requiresFrameIndexReplacementScavenging(MF);
  1151. // Store SPAdj at exit of a basic block.
  1152. SmallVector<int, 8> SPState;
  1153. SPState.resize(MF.getNumBlockIDs());
  1154. df_iterator_default_set<MachineBasicBlock*> Reachable;
  1155. // Iterate over the reachable blocks in DFS order.
  1156. for (auto DFI = df_ext_begin(&MF, Reachable), DFE = df_ext_end(&MF, Reachable);
  1157. DFI != DFE; ++DFI) {
  1158. int SPAdj = 0;
  1159. // Check the exit state of the DFS stack predecessor.
  1160. if (DFI.getPathLength() >= 2) {
  1161. MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2);
  1162. assert(Reachable.count(StackPred) &&
  1163. "DFS stack predecessor is already visited.\n");
  1164. SPAdj = SPState[StackPred->getNumber()];
  1165. }
  1166. MachineBasicBlock *BB = *DFI;
  1167. replaceFrameIndices(BB, MF, SPAdj);
  1168. SPState[BB->getNumber()] = SPAdj;
  1169. }
  1170. // Handle the unreachable blocks.
  1171. for (auto &BB : MF) {
  1172. if (Reachable.count(&BB))
  1173. // Already handled in DFS traversal.
  1174. continue;
  1175. int SPAdj = 0;
  1176. replaceFrameIndices(&BB, MF, SPAdj);
  1177. }
  1178. }
  1179. bool PEI::replaceFrameIndexDebugInstr(MachineFunction &MF, MachineInstr &MI,
  1180. unsigned OpIdx, int SPAdj) {
  1181. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  1182. const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo();
  1183. if (MI.isDebugValue()) {
  1184. MachineOperand &Op = MI.getOperand(OpIdx);
  1185. assert(MI.isDebugOperand(&Op) &&
  1186. "Frame indices can only appear as a debug operand in a DBG_VALUE*"
  1187. " machine instruction");
  1188. Register Reg;
  1189. unsigned FrameIdx = Op.getIndex();
  1190. unsigned Size = MF.getFrameInfo().getObjectSize(FrameIdx);
  1191. StackOffset Offset = TFI->getFrameIndexReference(MF, FrameIdx, Reg);
  1192. Op.ChangeToRegister(Reg, false /*isDef*/);
  1193. const DIExpression *DIExpr = MI.getDebugExpression();
  1194. // If we have a direct DBG_VALUE, and its location expression isn't
  1195. // currently complex, then adding an offset will morph it into a
  1196. // complex location that is interpreted as being a memory address.
  1197. // This changes a pointer-valued variable to dereference that pointer,
  1198. // which is incorrect. Fix by adding DW_OP_stack_value.
  1199. if (MI.isNonListDebugValue()) {
  1200. unsigned PrependFlags = DIExpression::ApplyOffset;
  1201. if (!MI.isIndirectDebugValue() && !DIExpr->isComplex())
  1202. PrependFlags |= DIExpression::StackValue;
  1203. // If we have DBG_VALUE that is indirect and has a Implicit location
  1204. // expression need to insert a deref before prepending a Memory
  1205. // location expression. Also after doing this we change the DBG_VALUE
  1206. // to be direct.
  1207. if (MI.isIndirectDebugValue() && DIExpr->isImplicit()) {
  1208. SmallVector<uint64_t, 2> Ops = {dwarf::DW_OP_deref_size, Size};
  1209. bool WithStackValue = true;
  1210. DIExpr = DIExpression::prependOpcodes(DIExpr, Ops, WithStackValue);
  1211. // Make the DBG_VALUE direct.
  1212. MI.getDebugOffset().ChangeToRegister(0, false);
  1213. }
  1214. DIExpr = TRI.prependOffsetExpression(DIExpr, PrependFlags, Offset);
  1215. } else {
  1216. // The debug operand at DebugOpIndex was a frame index at offset
  1217. // `Offset`; now the operand has been replaced with the frame
  1218. // register, we must add Offset with `register x, plus Offset`.
  1219. unsigned DebugOpIndex = MI.getDebugOperandIndex(&Op);
  1220. SmallVector<uint64_t, 3> Ops;
  1221. TRI.getOffsetOpcodes(Offset, Ops);
  1222. DIExpr = DIExpression::appendOpsToArg(DIExpr, Ops, DebugOpIndex);
  1223. }
  1224. MI.getDebugExpressionOp().setMetadata(DIExpr);
  1225. return true;
  1226. }
  1227. if (MI.isDebugPHI()) {
  1228. // Allow stack ref to continue onwards.
  1229. return true;
  1230. }
  1231. // TODO: This code should be commoned with the code for
  1232. // PATCHPOINT. There's no good reason for the difference in
  1233. // implementation other than historical accident. The only
  1234. // remaining difference is the unconditional use of the stack
  1235. // pointer as the base register.
  1236. if (MI.getOpcode() == TargetOpcode::STATEPOINT) {
  1237. assert((!MI.isDebugValue() || OpIdx == 0) &&
  1238. "Frame indicies can only appear as the first operand of a "
  1239. "DBG_VALUE machine instruction");
  1240. Register Reg;
  1241. MachineOperand &Offset = MI.getOperand(OpIdx + 1);
  1242. StackOffset refOffset = TFI->getFrameIndexReferencePreferSP(
  1243. MF, MI.getOperand(OpIdx).getIndex(), Reg, /*IgnoreSPUpdates*/ false);
  1244. assert(!refOffset.getScalable() &&
  1245. "Frame offsets with a scalable component are not supported");
  1246. Offset.setImm(Offset.getImm() + refOffset.getFixed() + SPAdj);
  1247. MI.getOperand(OpIdx).ChangeToRegister(Reg, false /*isDef*/);
  1248. return true;
  1249. }
  1250. return false;
  1251. }
  1252. void PEI::replaceFrameIndicesBackward(MachineBasicBlock *BB,
  1253. MachineFunction &MF, int &SPAdj) {
  1254. assert(MF.getSubtarget().getRegisterInfo() &&
  1255. "getRegisterInfo() must be implemented!");
  1256. const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo();
  1257. RS->enterBasicBlockEnd(*BB);
  1258. for (MachineInstr &MI : make_early_inc_range(reverse(*BB))) {
  1259. // Register scavenger backward step
  1260. MachineBasicBlock::iterator Step(MI);
  1261. for (unsigned i = 0; i != MI.getNumOperands(); ++i) {
  1262. if (!MI.getOperand(i).isFI())
  1263. continue;
  1264. if (replaceFrameIndexDebugInstr(MF, MI, i, SPAdj))
  1265. continue;
  1266. // If this instruction has a FrameIndex operand, we need to
  1267. // use that target machine register info object to eliminate
  1268. // it.
  1269. // TRI.eliminateFrameIndex may lower the frame index to a sequence of
  1270. // instructions. It also can remove/change instructions passed by the
  1271. // iterator and invalidate the iterator. We have to take care of this. For
  1272. // that we support two iterators: *Step* - points to the position up to
  1273. // which the scavenger should scan by the next iteration to have liveness
  1274. // information up to date. *Curr* - keeps track of the correct RS->MBBI -
  1275. // the scan start point. It points to the currently processed instruction
  1276. // right before the frame lowering.
  1277. //
  1278. // ITERATORS WORK AS FOLLOWS:
  1279. // *Step* is shifted one step back right before the frame lowering and
  1280. // one step forward right after it. No matter how many instructions were
  1281. // inserted, *Step* will be right after the position which is going to be
  1282. // processed in the next iteration, thus, in the correct position for the
  1283. // scavenger to go up to.
  1284. // *Curr* is shifted one step forward right before calling
  1285. // TRI.eliminateFrameIndex and one step backward after. Thus, we make sure
  1286. // it points right to the position that is the correct starting point for
  1287. // the scavenger to scan.
  1288. MachineBasicBlock::iterator Curr = ++RS->getCurrentPosition();
  1289. // Shift back
  1290. --Step;
  1291. bool Removed = TRI.eliminateFrameIndex(MI, SPAdj, i, RS);
  1292. // Restore to unify logic with a shift back that happens in the end of
  1293. // the outer loop.
  1294. ++Step;
  1295. RS->skipTo(--Curr);
  1296. if (Removed)
  1297. break;
  1298. }
  1299. // Shift it to make RS collect reg info up to the current instruction.
  1300. if (Step != BB->begin())
  1301. Step--;
  1302. // Update register states.
  1303. RS->backward(Step);
  1304. }
  1305. }
  1306. void PEI::replaceFrameIndices(MachineBasicBlock *BB, MachineFunction &MF,
  1307. int &SPAdj) {
  1308. assert(MF.getSubtarget().getRegisterInfo() &&
  1309. "getRegisterInfo() must be implemented!");
  1310. const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo();
  1311. const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo();
  1312. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  1313. if (RS && TRI.supportsBackwardScavenger())
  1314. return replaceFrameIndicesBackward(BB, MF, SPAdj);
  1315. if (RS && FrameIndexEliminationScavenging)
  1316. RS->enterBasicBlock(*BB);
  1317. bool InsideCallSequence = false;
  1318. for (MachineBasicBlock::iterator I = BB->begin(); I != BB->end(); ) {
  1319. if (TII.isFrameInstr(*I)) {
  1320. InsideCallSequence = TII.isFrameSetup(*I);
  1321. SPAdj += TII.getSPAdjust(*I);
  1322. I = TFI->eliminateCallFramePseudoInstr(MF, *BB, I);
  1323. continue;
  1324. }
  1325. MachineInstr &MI = *I;
  1326. bool DoIncr = true;
  1327. bool DidFinishLoop = true;
  1328. for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
  1329. if (!MI.getOperand(i).isFI())
  1330. continue;
  1331. if (replaceFrameIndexDebugInstr(MF, MI, i, SPAdj))
  1332. continue;
  1333. // Some instructions (e.g. inline asm instructions) can have
  1334. // multiple frame indices and/or cause eliminateFrameIndex
  1335. // to insert more than one instruction. We need the register
  1336. // scavenger to go through all of these instructions so that
  1337. // it can update its register information. We keep the
  1338. // iterator at the point before insertion so that we can
  1339. // revisit them in full.
  1340. bool AtBeginning = (I == BB->begin());
  1341. if (!AtBeginning) --I;
  1342. // If this instruction has a FrameIndex operand, we need to
  1343. // use that target machine register info object to eliminate
  1344. // it.
  1345. TRI.eliminateFrameIndex(MI, SPAdj, i,
  1346. FrameIndexEliminationScavenging ? RS : nullptr);
  1347. // Reset the iterator if we were at the beginning of the BB.
  1348. if (AtBeginning) {
  1349. I = BB->begin();
  1350. DoIncr = false;
  1351. }
  1352. DidFinishLoop = false;
  1353. break;
  1354. }
  1355. // If we are looking at a call sequence, we need to keep track of
  1356. // the SP adjustment made by each instruction in the sequence.
  1357. // This includes both the frame setup/destroy pseudos (handled above),
  1358. // as well as other instructions that have side effects w.r.t the SP.
  1359. // Note that this must come after eliminateFrameIndex, because
  1360. // if I itself referred to a frame index, we shouldn't count its own
  1361. // adjustment.
  1362. if (DidFinishLoop && InsideCallSequence)
  1363. SPAdj += TII.getSPAdjust(MI);
  1364. if (DoIncr && I != BB->end()) ++I;
  1365. // Update register states.
  1366. if (RS && FrameIndexEliminationScavenging && DidFinishLoop)
  1367. RS->forward(MI);
  1368. }
  1369. }