Thumb1FrameLowering.cpp 45 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204
  1. //===- Thumb1FrameLowering.cpp - Thumb1 Frame Information -----------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file contains the Thumb1 implementation of TargetFrameLowering class.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "Thumb1FrameLowering.h"
  13. #include "ARMBaseInstrInfo.h"
  14. #include "ARMBaseRegisterInfo.h"
  15. #include "ARMMachineFunctionInfo.h"
  16. #include "ARMSubtarget.h"
  17. #include "Thumb1InstrInfo.h"
  18. #include "ThumbRegisterInfo.h"
  19. #include "Utils/ARMBaseInfo.h"
  20. #include "llvm/ADT/BitVector.h"
  21. #include "llvm/ADT/STLExtras.h"
  22. #include "llvm/ADT/SmallVector.h"
  23. #include "llvm/CodeGen/LivePhysRegs.h"
  24. #include "llvm/CodeGen/MachineBasicBlock.h"
  25. #include "llvm/CodeGen/MachineFrameInfo.h"
  26. #include "llvm/CodeGen/MachineFunction.h"
  27. #include "llvm/CodeGen/MachineInstr.h"
  28. #include "llvm/CodeGen/MachineInstrBuilder.h"
  29. #include "llvm/CodeGen/MachineModuleInfo.h"
  30. #include "llvm/CodeGen/MachineOperand.h"
  31. #include "llvm/CodeGen/MachineRegisterInfo.h"
  32. #include "llvm/CodeGen/TargetInstrInfo.h"
  33. #include "llvm/CodeGen/TargetOpcodes.h"
  34. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  35. #include "llvm/IR/DebugLoc.h"
  36. #include "llvm/MC/MCContext.h"
  37. #include "llvm/MC/MCDwarf.h"
  38. #include "llvm/MC/MCRegisterInfo.h"
  39. #include "llvm/Support/Compiler.h"
  40. #include "llvm/Support/ErrorHandling.h"
  41. #include "llvm/Support/MathExtras.h"
  42. #include <bitset>
  43. #include <cassert>
  44. #include <iterator>
  45. #include <vector>
  46. using namespace llvm;
  47. Thumb1FrameLowering::Thumb1FrameLowering(const ARMSubtarget &sti)
  48. : ARMFrameLowering(sti) {}
  49. bool Thumb1FrameLowering::hasReservedCallFrame(const MachineFunction &MF) const{
  50. const MachineFrameInfo &MFI = MF.getFrameInfo();
  51. unsigned CFSize = MFI.getMaxCallFrameSize();
  52. // It's not always a good idea to include the call frame as part of the
  53. // stack frame. ARM (especially Thumb) has small immediate offset to
  54. // address the stack frame. So a large call frame can cause poor codegen
  55. // and may even makes it impossible to scavenge a register.
  56. if (CFSize >= ((1 << 8) - 1) * 4 / 2) // Half of imm8 * 4
  57. return false;
  58. return !MFI.hasVarSizedObjects();
  59. }
  60. static void
  61. emitPrologueEpilogueSPUpdate(MachineBasicBlock &MBB,
  62. MachineBasicBlock::iterator &MBBI,
  63. const TargetInstrInfo &TII, const DebugLoc &dl,
  64. const ThumbRegisterInfo &MRI, int NumBytes,
  65. unsigned ScratchReg, unsigned MIFlags) {
  66. // If it would take more than three instructions to adjust the stack pointer
  67. // using tADDspi/tSUBspi, load an immediate instead.
  68. if (std::abs(NumBytes) > 508 * 3) {
  69. // We use a different codepath here from the normal
  70. // emitThumbRegPlusImmediate so we don't have to deal with register
  71. // scavenging. (Scavenging could try to use the emergency spill slot
  72. // before we've actually finished setting up the stack.)
  73. if (ScratchReg == ARM::NoRegister)
  74. report_fatal_error("Failed to emit Thumb1 stack adjustment");
  75. MachineFunction &MF = *MBB.getParent();
  76. const ARMSubtarget &ST = MF.getSubtarget<ARMSubtarget>();
  77. if (ST.genExecuteOnly()) {
  78. BuildMI(MBB, MBBI, dl, TII.get(ARM::t2MOVi32imm), ScratchReg)
  79. .addImm(NumBytes).setMIFlags(MIFlags);
  80. } else {
  81. MRI.emitLoadConstPool(MBB, MBBI, dl, ScratchReg, 0, NumBytes, ARMCC::AL,
  82. 0, MIFlags);
  83. }
  84. BuildMI(MBB, MBBI, dl, TII.get(ARM::tADDhirr), ARM::SP)
  85. .addReg(ARM::SP)
  86. .addReg(ScratchReg, RegState::Kill)
  87. .add(predOps(ARMCC::AL))
  88. .setMIFlags(MIFlags);
  89. return;
  90. }
  91. // FIXME: This is assuming the heuristics in emitThumbRegPlusImmediate
  92. // won't change.
  93. emitThumbRegPlusImmediate(MBB, MBBI, dl, ARM::SP, ARM::SP, NumBytes, TII,
  94. MRI, MIFlags);
  95. }
  96. static void emitCallSPUpdate(MachineBasicBlock &MBB,
  97. MachineBasicBlock::iterator &MBBI,
  98. const TargetInstrInfo &TII, const DebugLoc &dl,
  99. const ThumbRegisterInfo &MRI, int NumBytes,
  100. unsigned MIFlags = MachineInstr::NoFlags) {
  101. emitThumbRegPlusImmediate(MBB, MBBI, dl, ARM::SP, ARM::SP, NumBytes, TII,
  102. MRI, MIFlags);
  103. }
  104. MachineBasicBlock::iterator Thumb1FrameLowering::
  105. eliminateCallFramePseudoInstr(MachineFunction &MF, MachineBasicBlock &MBB,
  106. MachineBasicBlock::iterator I) const {
  107. const Thumb1InstrInfo &TII =
  108. *static_cast<const Thumb1InstrInfo *>(STI.getInstrInfo());
  109. const ThumbRegisterInfo *RegInfo =
  110. static_cast<const ThumbRegisterInfo *>(STI.getRegisterInfo());
  111. if (!hasReservedCallFrame(MF)) {
  112. // If we have alloca, convert as follows:
  113. // ADJCALLSTACKDOWN -> sub, sp, sp, amount
  114. // ADJCALLSTACKUP -> add, sp, sp, amount
  115. MachineInstr &Old = *I;
  116. DebugLoc dl = Old.getDebugLoc();
  117. unsigned Amount = TII.getFrameSize(Old);
  118. if (Amount != 0) {
  119. // We need to keep the stack aligned properly. To do this, we round the
  120. // amount of space needed for the outgoing arguments up to the next
  121. // alignment boundary.
  122. Amount = alignTo(Amount, getStackAlign());
  123. // Replace the pseudo instruction with a new instruction...
  124. unsigned Opc = Old.getOpcode();
  125. if (Opc == ARM::ADJCALLSTACKDOWN || Opc == ARM::tADJCALLSTACKDOWN) {
  126. emitCallSPUpdate(MBB, I, TII, dl, *RegInfo, -Amount);
  127. } else {
  128. assert(Opc == ARM::ADJCALLSTACKUP || Opc == ARM::tADJCALLSTACKUP);
  129. emitCallSPUpdate(MBB, I, TII, dl, *RegInfo, Amount);
  130. }
  131. }
  132. }
  133. return MBB.erase(I);
  134. }
  135. void Thumb1FrameLowering::emitPrologue(MachineFunction &MF,
  136. MachineBasicBlock &MBB) const {
  137. MachineBasicBlock::iterator MBBI = MBB.begin();
  138. MachineFrameInfo &MFI = MF.getFrameInfo();
  139. ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>();
  140. MachineModuleInfo &MMI = MF.getMMI();
  141. const MCRegisterInfo *MRI = MMI.getContext().getRegisterInfo();
  142. const ThumbRegisterInfo *RegInfo =
  143. static_cast<const ThumbRegisterInfo *>(STI.getRegisterInfo());
  144. const Thumb1InstrInfo &TII =
  145. *static_cast<const Thumb1InstrInfo *>(STI.getInstrInfo());
  146. unsigned ArgRegsSaveSize = AFI->getArgRegsSaveSize();
  147. unsigned NumBytes = MFI.getStackSize();
  148. assert(NumBytes >= ArgRegsSaveSize &&
  149. "ArgRegsSaveSize is included in NumBytes");
  150. const std::vector<CalleeSavedInfo> &CSI = MFI.getCalleeSavedInfo();
  151. // Debug location must be unknown since the first debug location is used
  152. // to determine the end of the prologue.
  153. DebugLoc dl;
  154. Register FramePtr = RegInfo->getFrameRegister(MF);
  155. Register BasePtr = RegInfo->getBaseRegister();
  156. int CFAOffset = 0;
  157. // Thumb add/sub sp, imm8 instructions implicitly multiply the offset by 4.
  158. NumBytes = (NumBytes + 3) & ~3;
  159. MFI.setStackSize(NumBytes);
  160. // Determine the sizes of each callee-save spill areas and record which frame
  161. // belongs to which callee-save spill areas.
  162. unsigned FRSize = 0, GPRCS1Size = 0, GPRCS2Size = 0, DPRCSSize = 0;
  163. int FramePtrSpillFI = 0;
  164. if (ArgRegsSaveSize) {
  165. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo, -ArgRegsSaveSize,
  166. ARM::NoRegister, MachineInstr::FrameSetup);
  167. CFAOffset += ArgRegsSaveSize;
  168. unsigned CFIIndex =
  169. MF.addFrameInst(MCCFIInstruction::cfiDefCfaOffset(nullptr, CFAOffset));
  170. BuildMI(MBB, MBBI, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  171. .addCFIIndex(CFIIndex)
  172. .setMIFlags(MachineInstr::FrameSetup);
  173. }
  174. if (!AFI->hasStackFrame()) {
  175. if (NumBytes - ArgRegsSaveSize != 0) {
  176. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo,
  177. -(NumBytes - ArgRegsSaveSize),
  178. ARM::NoRegister, MachineInstr::FrameSetup);
  179. CFAOffset += NumBytes - ArgRegsSaveSize;
  180. unsigned CFIIndex = MF.addFrameInst(
  181. MCCFIInstruction::cfiDefCfaOffset(nullptr, CFAOffset));
  182. BuildMI(MBB, MBBI, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  183. .addCFIIndex(CFIIndex)
  184. .setMIFlags(MachineInstr::FrameSetup);
  185. }
  186. return;
  187. }
  188. bool HasFrameRecordArea = hasFP(MF) && ARM::hGPRRegClass.contains(FramePtr);
  189. for (const CalleeSavedInfo &I : CSI) {
  190. Register Reg = I.getReg();
  191. int FI = I.getFrameIdx();
  192. if (Reg == FramePtr)
  193. FramePtrSpillFI = FI;
  194. switch (Reg) {
  195. case ARM::R11:
  196. if (HasFrameRecordArea) {
  197. FRSize += 4;
  198. break;
  199. }
  200. [[fallthrough]];
  201. case ARM::R8:
  202. case ARM::R9:
  203. case ARM::R10:
  204. if (STI.splitFramePushPop(MF)) {
  205. GPRCS2Size += 4;
  206. break;
  207. }
  208. [[fallthrough]];
  209. case ARM::LR:
  210. if (HasFrameRecordArea) {
  211. FRSize += 4;
  212. break;
  213. }
  214. [[fallthrough]];
  215. case ARM::R4:
  216. case ARM::R5:
  217. case ARM::R6:
  218. case ARM::R7:
  219. GPRCS1Size += 4;
  220. break;
  221. default:
  222. DPRCSSize += 8;
  223. }
  224. }
  225. MachineBasicBlock::iterator FRPush, GPRCS1Push, GPRCS2Push;
  226. if (HasFrameRecordArea) {
  227. // Skip Frame Record setup:
  228. // push {lr}
  229. // mov lr, r11
  230. // push {lr}
  231. std::advance(MBBI, 2);
  232. FRPush = MBBI++;
  233. }
  234. if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPUSH) {
  235. GPRCS1Push = MBBI;
  236. ++MBBI;
  237. }
  238. // Find last push instruction for GPRCS2 - spilling of high registers
  239. // (r8-r11) could consist of multiple tPUSH and tMOVr instructions.
  240. while (true) {
  241. MachineBasicBlock::iterator OldMBBI = MBBI;
  242. // Skip a run of tMOVr instructions
  243. while (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tMOVr &&
  244. MBBI->getFlag(MachineInstr::FrameSetup))
  245. MBBI++;
  246. if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPUSH &&
  247. MBBI->getFlag(MachineInstr::FrameSetup)) {
  248. GPRCS2Push = MBBI;
  249. MBBI++;
  250. } else {
  251. // We have reached an instruction which is not a push, so the previous
  252. // run of tMOVr instructions (which may have been empty) was not part of
  253. // the prologue. Reset MBBI back to the last PUSH of the prologue.
  254. MBBI = OldMBBI;
  255. break;
  256. }
  257. }
  258. // Determine starting offsets of spill areas.
  259. unsigned DPRCSOffset = NumBytes - ArgRegsSaveSize -
  260. (FRSize + GPRCS1Size + GPRCS2Size + DPRCSSize);
  261. unsigned GPRCS2Offset = DPRCSOffset + DPRCSSize;
  262. unsigned GPRCS1Offset = GPRCS2Offset + GPRCS2Size;
  263. bool HasFP = hasFP(MF);
  264. if (HasFP)
  265. AFI->setFramePtrSpillOffset(MFI.getObjectOffset(FramePtrSpillFI) +
  266. NumBytes);
  267. if (HasFrameRecordArea)
  268. AFI->setFrameRecordSavedAreaSize(FRSize);
  269. AFI->setGPRCalleeSavedArea1Offset(GPRCS1Offset);
  270. AFI->setGPRCalleeSavedArea2Offset(GPRCS2Offset);
  271. AFI->setDPRCalleeSavedAreaOffset(DPRCSOffset);
  272. NumBytes = DPRCSOffset;
  273. int FramePtrOffsetInBlock = 0;
  274. unsigned adjustedGPRCS1Size = GPRCS1Size;
  275. if (GPRCS1Size > 0 && GPRCS2Size == 0 &&
  276. tryFoldSPUpdateIntoPushPop(STI, MF, &*(GPRCS1Push), NumBytes)) {
  277. FramePtrOffsetInBlock = NumBytes;
  278. adjustedGPRCS1Size += NumBytes;
  279. NumBytes = 0;
  280. }
  281. CFAOffset += adjustedGPRCS1Size;
  282. // Adjust FP so it point to the stack slot that contains the previous FP.
  283. if (HasFP) {
  284. MachineBasicBlock::iterator AfterPush =
  285. HasFrameRecordArea ? std::next(FRPush) : std::next(GPRCS1Push);
  286. if (HasFrameRecordArea) {
  287. // We have just finished pushing the previous FP into the stack,
  288. // so simply capture the SP value as the new Frame Pointer.
  289. BuildMI(MBB, AfterPush, dl, TII.get(ARM::tMOVr), FramePtr)
  290. .addReg(ARM::SP)
  291. .setMIFlags(MachineInstr::FrameSetup)
  292. .add(predOps(ARMCC::AL));
  293. } else {
  294. FramePtrOffsetInBlock +=
  295. MFI.getObjectOffset(FramePtrSpillFI) + GPRCS1Size + ArgRegsSaveSize;
  296. BuildMI(MBB, AfterPush, dl, TII.get(ARM::tADDrSPi), FramePtr)
  297. .addReg(ARM::SP)
  298. .addImm(FramePtrOffsetInBlock / 4)
  299. .setMIFlags(MachineInstr::FrameSetup)
  300. .add(predOps(ARMCC::AL));
  301. }
  302. if(FramePtrOffsetInBlock) {
  303. unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::cfiDefCfa(
  304. nullptr, MRI->getDwarfRegNum(FramePtr, true), (CFAOffset - FramePtrOffsetInBlock)));
  305. BuildMI(MBB, AfterPush, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  306. .addCFIIndex(CFIIndex)
  307. .setMIFlags(MachineInstr::FrameSetup);
  308. } else {
  309. unsigned CFIIndex =
  310. MF.addFrameInst(MCCFIInstruction::createDefCfaRegister(
  311. nullptr, MRI->getDwarfRegNum(FramePtr, true)));
  312. BuildMI(MBB, AfterPush, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  313. .addCFIIndex(CFIIndex)
  314. .setMIFlags(MachineInstr::FrameSetup);
  315. }
  316. if (NumBytes > 508)
  317. // If offset is > 508 then sp cannot be adjusted in a single instruction,
  318. // try restoring from fp instead.
  319. AFI->setShouldRestoreSPFromFP(true);
  320. }
  321. // Emit call frame information for the callee-saved low registers.
  322. if (GPRCS1Size > 0) {
  323. MachineBasicBlock::iterator Pos = std::next(GPRCS1Push);
  324. if (adjustedGPRCS1Size) {
  325. unsigned CFIIndex =
  326. MF.addFrameInst(MCCFIInstruction::cfiDefCfaOffset(nullptr, CFAOffset));
  327. BuildMI(MBB, Pos, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  328. .addCFIIndex(CFIIndex)
  329. .setMIFlags(MachineInstr::FrameSetup);
  330. }
  331. for (const CalleeSavedInfo &I : CSI) {
  332. Register Reg = I.getReg();
  333. int FI = I.getFrameIdx();
  334. switch (Reg) {
  335. case ARM::R8:
  336. case ARM::R9:
  337. case ARM::R10:
  338. case ARM::R11:
  339. case ARM::R12:
  340. if (STI.splitFramePushPop(MF))
  341. break;
  342. [[fallthrough]];
  343. case ARM::R0:
  344. case ARM::R1:
  345. case ARM::R2:
  346. case ARM::R3:
  347. case ARM::R4:
  348. case ARM::R5:
  349. case ARM::R6:
  350. case ARM::R7:
  351. case ARM::LR:
  352. unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::createOffset(
  353. nullptr, MRI->getDwarfRegNum(Reg, true), MFI.getObjectOffset(FI)));
  354. BuildMI(MBB, Pos, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  355. .addCFIIndex(CFIIndex)
  356. .setMIFlags(MachineInstr::FrameSetup);
  357. break;
  358. }
  359. }
  360. }
  361. // Emit call frame information for the callee-saved high registers.
  362. if (GPRCS2Size > 0) {
  363. MachineBasicBlock::iterator Pos = std::next(GPRCS2Push);
  364. for (auto &I : CSI) {
  365. Register Reg = I.getReg();
  366. int FI = I.getFrameIdx();
  367. switch (Reg) {
  368. case ARM::R8:
  369. case ARM::R9:
  370. case ARM::R10:
  371. case ARM::R11:
  372. case ARM::R12: {
  373. unsigned CFIIndex = MF.addFrameInst(MCCFIInstruction::createOffset(
  374. nullptr, MRI->getDwarfRegNum(Reg, true), MFI.getObjectOffset(FI)));
  375. BuildMI(MBB, Pos, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  376. .addCFIIndex(CFIIndex)
  377. .setMIFlags(MachineInstr::FrameSetup);
  378. break;
  379. }
  380. default:
  381. break;
  382. }
  383. }
  384. }
  385. if (NumBytes) {
  386. // Insert it after all the callee-save spills.
  387. //
  388. // For a large stack frame, we might need a scratch register to store
  389. // the size of the frame. We know all callee-save registers are free
  390. // at this point in the prologue, so pick one.
  391. unsigned ScratchRegister = ARM::NoRegister;
  392. for (auto &I : CSI) {
  393. Register Reg = I.getReg();
  394. if (isARMLowRegister(Reg) && !(HasFP && Reg == FramePtr)) {
  395. ScratchRegister = Reg;
  396. break;
  397. }
  398. }
  399. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo, -NumBytes,
  400. ScratchRegister, MachineInstr::FrameSetup);
  401. if (!HasFP) {
  402. CFAOffset += NumBytes;
  403. unsigned CFIIndex = MF.addFrameInst(
  404. MCCFIInstruction::cfiDefCfaOffset(nullptr, CFAOffset));
  405. BuildMI(MBB, MBBI, dl, TII.get(TargetOpcode::CFI_INSTRUCTION))
  406. .addCFIIndex(CFIIndex)
  407. .setMIFlags(MachineInstr::FrameSetup);
  408. }
  409. }
  410. if (STI.isTargetELF() && HasFP)
  411. MFI.setOffsetAdjustment(MFI.getOffsetAdjustment() -
  412. AFI->getFramePtrSpillOffset());
  413. AFI->setGPRCalleeSavedArea1Size(GPRCS1Size);
  414. AFI->setGPRCalleeSavedArea2Size(GPRCS2Size);
  415. AFI->setDPRCalleeSavedAreaSize(DPRCSSize);
  416. if (RegInfo->hasStackRealignment(MF)) {
  417. const unsigned NrBitsToZero = Log2(MFI.getMaxAlign());
  418. // Emit the following sequence, using R4 as a temporary, since we cannot use
  419. // SP as a source or destination register for the shifts:
  420. // mov r4, sp
  421. // lsrs r4, r4, #NrBitsToZero
  422. // lsls r4, r4, #NrBitsToZero
  423. // mov sp, r4
  424. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr), ARM::R4)
  425. .addReg(ARM::SP, RegState::Kill)
  426. .add(predOps(ARMCC::AL));
  427. BuildMI(MBB, MBBI, dl, TII.get(ARM::tLSRri), ARM::R4)
  428. .addDef(ARM::CPSR)
  429. .addReg(ARM::R4, RegState::Kill)
  430. .addImm(NrBitsToZero)
  431. .add(predOps(ARMCC::AL));
  432. BuildMI(MBB, MBBI, dl, TII.get(ARM::tLSLri), ARM::R4)
  433. .addDef(ARM::CPSR)
  434. .addReg(ARM::R4, RegState::Kill)
  435. .addImm(NrBitsToZero)
  436. .add(predOps(ARMCC::AL));
  437. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr), ARM::SP)
  438. .addReg(ARM::R4, RegState::Kill)
  439. .add(predOps(ARMCC::AL));
  440. AFI->setShouldRestoreSPFromFP(true);
  441. }
  442. // If we need a base pointer, set it up here. It's whatever the value
  443. // of the stack pointer is at this point. Any variable size objects
  444. // will be allocated after this, so we can still use the base pointer
  445. // to reference locals.
  446. if (RegInfo->hasBasePointer(MF))
  447. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr), BasePtr)
  448. .addReg(ARM::SP)
  449. .add(predOps(ARMCC::AL));
  450. // If the frame has variable sized objects then the epilogue must restore
  451. // the sp from fp. We can assume there's an FP here since hasFP already
  452. // checks for hasVarSizedObjects.
  453. if (MFI.hasVarSizedObjects())
  454. AFI->setShouldRestoreSPFromFP(true);
  455. // In some cases, virtual registers have been introduced, e.g. by uses of
  456. // emitThumbRegPlusImmInReg.
  457. MF.getProperties().reset(MachineFunctionProperties::Property::NoVRegs);
  458. }
  459. void Thumb1FrameLowering::emitEpilogue(MachineFunction &MF,
  460. MachineBasicBlock &MBB) const {
  461. MachineBasicBlock::iterator MBBI = MBB.getFirstTerminator();
  462. DebugLoc dl = MBBI != MBB.end() ? MBBI->getDebugLoc() : DebugLoc();
  463. MachineFrameInfo &MFI = MF.getFrameInfo();
  464. ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>();
  465. const ThumbRegisterInfo *RegInfo =
  466. static_cast<const ThumbRegisterInfo *>(STI.getRegisterInfo());
  467. const Thumb1InstrInfo &TII =
  468. *static_cast<const Thumb1InstrInfo *>(STI.getInstrInfo());
  469. unsigned ArgRegsSaveSize = AFI->getArgRegsSaveSize();
  470. int NumBytes = (int)MFI.getStackSize();
  471. assert((unsigned)NumBytes >= ArgRegsSaveSize &&
  472. "ArgRegsSaveSize is included in NumBytes");
  473. Register FramePtr = RegInfo->getFrameRegister(MF);
  474. if (!AFI->hasStackFrame()) {
  475. if (NumBytes - ArgRegsSaveSize != 0)
  476. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo,
  477. NumBytes - ArgRegsSaveSize, ARM::NoRegister,
  478. MachineInstr::FrameDestroy);
  479. } else {
  480. // Unwind MBBI to point to first LDR / VLDRD.
  481. if (MBBI != MBB.begin()) {
  482. do
  483. --MBBI;
  484. while (MBBI != MBB.begin() && MBBI->getFlag(MachineInstr::FrameDestroy));
  485. if (!MBBI->getFlag(MachineInstr::FrameDestroy))
  486. ++MBBI;
  487. }
  488. // Move SP to start of FP callee save spill area.
  489. NumBytes -= (AFI->getFrameRecordSavedAreaSize() +
  490. AFI->getGPRCalleeSavedArea1Size() +
  491. AFI->getGPRCalleeSavedArea2Size() +
  492. AFI->getDPRCalleeSavedAreaSize() +
  493. ArgRegsSaveSize);
  494. if (AFI->shouldRestoreSPFromFP()) {
  495. NumBytes = AFI->getFramePtrSpillOffset() - NumBytes;
  496. // Reset SP based on frame pointer only if the stack frame extends beyond
  497. // frame pointer stack slot, the target is ELF and the function has FP, or
  498. // the target uses var sized objects.
  499. if (NumBytes) {
  500. assert(!MFI.getPristineRegs(MF).test(ARM::R4) &&
  501. "No scratch register to restore SP from FP!");
  502. emitThumbRegPlusImmediate(MBB, MBBI, dl, ARM::R4, FramePtr, -NumBytes,
  503. TII, *RegInfo, MachineInstr::FrameDestroy);
  504. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr), ARM::SP)
  505. .addReg(ARM::R4)
  506. .add(predOps(ARMCC::AL))
  507. .setMIFlag(MachineInstr::FrameDestroy);
  508. } else
  509. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr), ARM::SP)
  510. .addReg(FramePtr)
  511. .add(predOps(ARMCC::AL))
  512. .setMIFlag(MachineInstr::FrameDestroy);
  513. } else {
  514. // For a large stack frame, we might need a scratch register to store
  515. // the size of the frame. We know all callee-save registers are free
  516. // at this point in the epilogue, so pick one.
  517. unsigned ScratchRegister = ARM::NoRegister;
  518. bool HasFP = hasFP(MF);
  519. for (auto &I : MFI.getCalleeSavedInfo()) {
  520. Register Reg = I.getReg();
  521. if (isARMLowRegister(Reg) && !(HasFP && Reg == FramePtr)) {
  522. ScratchRegister = Reg;
  523. break;
  524. }
  525. }
  526. if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tBX_RET &&
  527. &MBB.front() != &*MBBI && std::prev(MBBI)->getOpcode() == ARM::tPOP) {
  528. MachineBasicBlock::iterator PMBBI = std::prev(MBBI);
  529. if (!tryFoldSPUpdateIntoPushPop(STI, MF, &*PMBBI, NumBytes))
  530. emitPrologueEpilogueSPUpdate(MBB, PMBBI, TII, dl, *RegInfo, NumBytes,
  531. ScratchRegister, MachineInstr::FrameDestroy);
  532. } else if (!tryFoldSPUpdateIntoPushPop(STI, MF, &*MBBI, NumBytes))
  533. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo, NumBytes,
  534. ScratchRegister, MachineInstr::FrameDestroy);
  535. }
  536. }
  537. if (needPopSpecialFixUp(MF)) {
  538. bool Done = emitPopSpecialFixUp(MBB, /* DoIt */ true);
  539. (void)Done;
  540. assert(Done && "Emission of the special fixup failed!?");
  541. }
  542. }
  543. bool Thumb1FrameLowering::canUseAsEpilogue(const MachineBasicBlock &MBB) const {
  544. if (!needPopSpecialFixUp(*MBB.getParent()))
  545. return true;
  546. MachineBasicBlock *TmpMBB = const_cast<MachineBasicBlock *>(&MBB);
  547. return emitPopSpecialFixUp(*TmpMBB, /* DoIt */ false);
  548. }
  549. bool Thumb1FrameLowering::needPopSpecialFixUp(const MachineFunction &MF) const {
  550. ARMFunctionInfo *AFI =
  551. const_cast<MachineFunction *>(&MF)->getInfo<ARMFunctionInfo>();
  552. if (AFI->getArgRegsSaveSize())
  553. return true;
  554. // LR cannot be encoded with Thumb1, i.e., it requires a special fix-up.
  555. for (const CalleeSavedInfo &CSI : MF.getFrameInfo().getCalleeSavedInfo())
  556. if (CSI.getReg() == ARM::LR)
  557. return true;
  558. return false;
  559. }
  560. static void findTemporariesForLR(const BitVector &GPRsNoLRSP,
  561. const BitVector &PopFriendly,
  562. const LivePhysRegs &UsedRegs, unsigned &PopReg,
  563. unsigned &TmpReg, MachineRegisterInfo &MRI) {
  564. PopReg = TmpReg = 0;
  565. for (auto Reg : GPRsNoLRSP.set_bits()) {
  566. if (UsedRegs.available(MRI, Reg)) {
  567. // Remember the first pop-friendly register and exit.
  568. if (PopFriendly.test(Reg)) {
  569. PopReg = Reg;
  570. TmpReg = 0;
  571. break;
  572. }
  573. // Otherwise, remember that the register will be available to
  574. // save a pop-friendly register.
  575. TmpReg = Reg;
  576. }
  577. }
  578. }
  579. bool Thumb1FrameLowering::emitPopSpecialFixUp(MachineBasicBlock &MBB,
  580. bool DoIt) const {
  581. MachineFunction &MF = *MBB.getParent();
  582. ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>();
  583. unsigned ArgRegsSaveSize = AFI->getArgRegsSaveSize();
  584. const TargetInstrInfo &TII = *STI.getInstrInfo();
  585. const ThumbRegisterInfo *RegInfo =
  586. static_cast<const ThumbRegisterInfo *>(STI.getRegisterInfo());
  587. // If MBBI is a return instruction, or is a tPOP followed by a return
  588. // instruction in the successor BB, we may be able to directly restore
  589. // LR in the PC.
  590. // This is only possible with v5T ops (v4T can't change the Thumb bit via
  591. // a POP PC instruction), and only if we do not need to emit any SP update.
  592. // Otherwise, we need a temporary register to pop the value
  593. // and copy that value into LR.
  594. auto MBBI = MBB.getFirstTerminator();
  595. bool CanRestoreDirectly = STI.hasV5TOps() && !ArgRegsSaveSize;
  596. if (CanRestoreDirectly) {
  597. if (MBBI != MBB.end() && MBBI->getOpcode() != ARM::tB)
  598. CanRestoreDirectly = (MBBI->getOpcode() == ARM::tBX_RET ||
  599. MBBI->getOpcode() == ARM::tPOP_RET);
  600. else {
  601. auto MBBI_prev = MBBI;
  602. MBBI_prev--;
  603. assert(MBBI_prev->getOpcode() == ARM::tPOP);
  604. assert(MBB.succ_size() == 1);
  605. if ((*MBB.succ_begin())->begin()->getOpcode() == ARM::tBX_RET)
  606. MBBI = MBBI_prev; // Replace the final tPOP with a tPOP_RET.
  607. else
  608. CanRestoreDirectly = false;
  609. }
  610. }
  611. if (CanRestoreDirectly) {
  612. if (!DoIt || MBBI->getOpcode() == ARM::tPOP_RET)
  613. return true;
  614. MachineInstrBuilder MIB =
  615. BuildMI(MBB, MBBI, MBBI->getDebugLoc(), TII.get(ARM::tPOP_RET))
  616. .add(predOps(ARMCC::AL))
  617. .setMIFlag(MachineInstr::FrameDestroy);
  618. // Copy implicit ops and popped registers, if any.
  619. for (auto MO: MBBI->operands())
  620. if (MO.isReg() && (MO.isImplicit() || MO.isDef()))
  621. MIB.add(MO);
  622. MIB.addReg(ARM::PC, RegState::Define);
  623. // Erase the old instruction (tBX_RET or tPOP).
  624. MBB.erase(MBBI);
  625. return true;
  626. }
  627. // Look for a temporary register to use.
  628. // First, compute the liveness information.
  629. const TargetRegisterInfo &TRI = *STI.getRegisterInfo();
  630. LivePhysRegs UsedRegs(TRI);
  631. UsedRegs.addLiveOuts(MBB);
  632. // The semantic of pristines changed recently and now,
  633. // the callee-saved registers that are touched in the function
  634. // are not part of the pristines set anymore.
  635. // Add those callee-saved now.
  636. const MCPhysReg *CSRegs = TRI.getCalleeSavedRegs(&MF);
  637. for (unsigned i = 0; CSRegs[i]; ++i)
  638. UsedRegs.addReg(CSRegs[i]);
  639. DebugLoc dl = DebugLoc();
  640. if (MBBI != MBB.end()) {
  641. dl = MBBI->getDebugLoc();
  642. auto InstUpToMBBI = MBB.end();
  643. while (InstUpToMBBI != MBBI)
  644. // The pre-decrement is on purpose here.
  645. // We want to have the liveness right before MBBI.
  646. UsedRegs.stepBackward(*--InstUpToMBBI);
  647. }
  648. // Look for a register that can be directly use in the POP.
  649. unsigned PopReg = 0;
  650. // And some temporary register, just in case.
  651. unsigned TemporaryReg = 0;
  652. BitVector PopFriendly =
  653. TRI.getAllocatableSet(MF, TRI.getRegClass(ARM::tGPRRegClassID));
  654. // R7 may be used as a frame pointer, hence marked as not generally
  655. // allocatable, however there's no reason to not use it as a temporary for
  656. // restoring LR.
  657. if (STI.getFramePointerReg() == ARM::R7)
  658. PopFriendly.set(ARM::R7);
  659. assert(PopFriendly.any() && "No allocatable pop-friendly register?!");
  660. // Rebuild the GPRs from the high registers because they are removed
  661. // form the GPR reg class for thumb1.
  662. BitVector GPRsNoLRSP =
  663. TRI.getAllocatableSet(MF, TRI.getRegClass(ARM::hGPRRegClassID));
  664. GPRsNoLRSP |= PopFriendly;
  665. GPRsNoLRSP.reset(ARM::LR);
  666. GPRsNoLRSP.reset(ARM::SP);
  667. GPRsNoLRSP.reset(ARM::PC);
  668. findTemporariesForLR(GPRsNoLRSP, PopFriendly, UsedRegs, PopReg, TemporaryReg,
  669. MF.getRegInfo());
  670. // If we couldn't find a pop-friendly register, try restoring LR before
  671. // popping the other callee-saved registers, so we could use one of them as a
  672. // temporary.
  673. bool UseLDRSP = false;
  674. if (!PopReg && MBBI != MBB.begin()) {
  675. auto PrevMBBI = MBBI;
  676. PrevMBBI--;
  677. if (PrevMBBI->getOpcode() == ARM::tPOP) {
  678. UsedRegs.stepBackward(*PrevMBBI);
  679. findTemporariesForLR(GPRsNoLRSP, PopFriendly, UsedRegs, PopReg,
  680. TemporaryReg, MF.getRegInfo());
  681. if (PopReg) {
  682. MBBI = PrevMBBI;
  683. UseLDRSP = true;
  684. }
  685. }
  686. }
  687. if (!DoIt && !PopReg && !TemporaryReg)
  688. return false;
  689. assert((PopReg || TemporaryReg) && "Cannot get LR");
  690. if (UseLDRSP) {
  691. assert(PopReg && "Do not know how to get LR");
  692. // Load the LR via LDR tmp, [SP, #off]
  693. BuildMI(MBB, MBBI, dl, TII.get(ARM::tLDRspi))
  694. .addReg(PopReg, RegState::Define)
  695. .addReg(ARM::SP)
  696. .addImm(MBBI->getNumExplicitOperands() - 2)
  697. .add(predOps(ARMCC::AL))
  698. .setMIFlag(MachineInstr::FrameDestroy);
  699. // Move from the temporary register to the LR.
  700. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr))
  701. .addReg(ARM::LR, RegState::Define)
  702. .addReg(PopReg, RegState::Kill)
  703. .add(predOps(ARMCC::AL))
  704. .setMIFlag(MachineInstr::FrameDestroy);
  705. // Advance past the pop instruction.
  706. MBBI++;
  707. // Increment the SP.
  708. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo,
  709. ArgRegsSaveSize + 4, ARM::NoRegister,
  710. MachineInstr::FrameDestroy);
  711. return true;
  712. }
  713. if (TemporaryReg) {
  714. assert(!PopReg && "Unnecessary MOV is about to be inserted");
  715. PopReg = PopFriendly.find_first();
  716. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr))
  717. .addReg(TemporaryReg, RegState::Define)
  718. .addReg(PopReg, RegState::Kill)
  719. .add(predOps(ARMCC::AL))
  720. .setMIFlag(MachineInstr::FrameDestroy);
  721. }
  722. if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPOP_RET) {
  723. // We couldn't use the direct restoration above, so
  724. // perform the opposite conversion: tPOP_RET to tPOP.
  725. MachineInstrBuilder MIB =
  726. BuildMI(MBB, MBBI, MBBI->getDebugLoc(), TII.get(ARM::tPOP))
  727. .add(predOps(ARMCC::AL))
  728. .setMIFlag(MachineInstr::FrameDestroy);
  729. bool Popped = false;
  730. for (auto MO: MBBI->operands())
  731. if (MO.isReg() && (MO.isImplicit() || MO.isDef()) &&
  732. MO.getReg() != ARM::PC) {
  733. MIB.add(MO);
  734. if (!MO.isImplicit())
  735. Popped = true;
  736. }
  737. // Is there anything left to pop?
  738. if (!Popped)
  739. MBB.erase(MIB.getInstr());
  740. // Erase the old instruction.
  741. MBB.erase(MBBI);
  742. MBBI = BuildMI(MBB, MBB.end(), dl, TII.get(ARM::tBX_RET))
  743. .add(predOps(ARMCC::AL))
  744. .setMIFlag(MachineInstr::FrameDestroy);
  745. }
  746. assert(PopReg && "Do not know how to get LR");
  747. BuildMI(MBB, MBBI, dl, TII.get(ARM::tPOP))
  748. .add(predOps(ARMCC::AL))
  749. .addReg(PopReg, RegState::Define)
  750. .setMIFlag(MachineInstr::FrameDestroy);
  751. emitPrologueEpilogueSPUpdate(MBB, MBBI, TII, dl, *RegInfo, ArgRegsSaveSize,
  752. ARM::NoRegister, MachineInstr::FrameDestroy);
  753. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr))
  754. .addReg(ARM::LR, RegState::Define)
  755. .addReg(PopReg, RegState::Kill)
  756. .add(predOps(ARMCC::AL))
  757. .setMIFlag(MachineInstr::FrameDestroy);
  758. if (TemporaryReg)
  759. BuildMI(MBB, MBBI, dl, TII.get(ARM::tMOVr))
  760. .addReg(PopReg, RegState::Define)
  761. .addReg(TemporaryReg, RegState::Kill)
  762. .add(predOps(ARMCC::AL))
  763. .setMIFlag(MachineInstr::FrameDestroy);
  764. return true;
  765. }
  766. static const SmallVector<Register> OrderedLowRegs = {ARM::R4, ARM::R5, ARM::R6,
  767. ARM::R7, ARM::LR};
  768. static const SmallVector<Register> OrderedHighRegs = {ARM::R8, ARM::R9,
  769. ARM::R10, ARM::R11};
  770. static const SmallVector<Register> OrderedCopyRegs = {
  771. ARM::R0, ARM::R1, ARM::R2, ARM::R3, ARM::R4,
  772. ARM::R5, ARM::R6, ARM::R7, ARM::LR};
  773. static void splitLowAndHighRegs(const std::set<Register> &Regs,
  774. std::set<Register> &LowRegs,
  775. std::set<Register> &HighRegs) {
  776. for (Register Reg : Regs) {
  777. if (ARM::tGPRRegClass.contains(Reg) || Reg == ARM::LR) {
  778. LowRegs.insert(Reg);
  779. } else if (ARM::hGPRRegClass.contains(Reg) && Reg != ARM::LR) {
  780. HighRegs.insert(Reg);
  781. } else {
  782. llvm_unreachable("callee-saved register of unexpected class");
  783. }
  784. }
  785. }
  786. template <typename It>
  787. It getNextOrderedReg(It OrderedStartIt, It OrderedEndIt,
  788. const std::set<Register> &RegSet) {
  789. return std::find_if(OrderedStartIt, OrderedEndIt,
  790. [&](Register Reg) { return RegSet.count(Reg); });
  791. }
  792. static void pushRegsToStack(MachineBasicBlock &MBB,
  793. MachineBasicBlock::iterator MI,
  794. const TargetInstrInfo &TII,
  795. const std::set<Register> &RegsToSave,
  796. const std::set<Register> &CopyRegs) {
  797. MachineFunction &MF = *MBB.getParent();
  798. const MachineRegisterInfo &MRI = MF.getRegInfo();
  799. DebugLoc DL;
  800. std::set<Register> LowRegs, HighRegs;
  801. splitLowAndHighRegs(RegsToSave, LowRegs, HighRegs);
  802. // Push low regs first
  803. if (!LowRegs.empty()) {
  804. MachineInstrBuilder MIB =
  805. BuildMI(MBB, MI, DL, TII.get(ARM::tPUSH)).add(predOps(ARMCC::AL));
  806. for (unsigned Reg : OrderedLowRegs) {
  807. if (LowRegs.count(Reg)) {
  808. bool isKill = !MRI.isLiveIn(Reg);
  809. if (isKill && !MRI.isReserved(Reg))
  810. MBB.addLiveIn(Reg);
  811. MIB.addReg(Reg, getKillRegState(isKill));
  812. }
  813. }
  814. MIB.setMIFlags(MachineInstr::FrameSetup);
  815. }
  816. // Now push the high registers
  817. // There are no store instructions that can access high registers directly,
  818. // so we have to move them to low registers, and push them.
  819. // This might take multiple pushes, as it is possible for there to
  820. // be fewer low registers available than high registers which need saving.
  821. // Find the first register to save.
  822. // Registers must be processed in reverse order so that in case we need to use
  823. // multiple PUSH instructions, the order of the registers on the stack still
  824. // matches the unwind info. They need to be swicthed back to ascending order
  825. // before adding to the PUSH instruction.
  826. auto HiRegToSave = getNextOrderedReg(OrderedHighRegs.rbegin(),
  827. OrderedHighRegs.rend(),
  828. HighRegs);
  829. while (HiRegToSave != OrderedHighRegs.rend()) {
  830. // Find the first low register to use.
  831. auto CopyRegIt = getNextOrderedReg(OrderedCopyRegs.rbegin(),
  832. OrderedCopyRegs.rend(),
  833. CopyRegs);
  834. // Create the PUSH, but don't insert it yet (the MOVs need to come first).
  835. MachineInstrBuilder PushMIB = BuildMI(MF, DL, TII.get(ARM::tPUSH))
  836. .add(predOps(ARMCC::AL))
  837. .setMIFlags(MachineInstr::FrameSetup);
  838. SmallVector<unsigned, 4> RegsToPush;
  839. while (HiRegToSave != OrderedHighRegs.rend() &&
  840. CopyRegIt != OrderedCopyRegs.rend()) {
  841. if (HighRegs.count(*HiRegToSave)) {
  842. bool isKill = !MRI.isLiveIn(*HiRegToSave);
  843. if (isKill && !MRI.isReserved(*HiRegToSave))
  844. MBB.addLiveIn(*HiRegToSave);
  845. // Emit a MOV from the high reg to the low reg.
  846. BuildMI(MBB, MI, DL, TII.get(ARM::tMOVr))
  847. .addReg(*CopyRegIt, RegState::Define)
  848. .addReg(*HiRegToSave, getKillRegState(isKill))
  849. .add(predOps(ARMCC::AL))
  850. .setMIFlags(MachineInstr::FrameSetup);
  851. // Record the register that must be added to the PUSH.
  852. RegsToPush.push_back(*CopyRegIt);
  853. CopyRegIt = getNextOrderedReg(std::next(CopyRegIt),
  854. OrderedCopyRegs.rend(),
  855. CopyRegs);
  856. HiRegToSave = getNextOrderedReg(std::next(HiRegToSave),
  857. OrderedHighRegs.rend(),
  858. HighRegs);
  859. }
  860. }
  861. // Add the low registers to the PUSH, in ascending order.
  862. for (unsigned Reg : llvm::reverse(RegsToPush))
  863. PushMIB.addReg(Reg, RegState::Kill);
  864. // Insert the PUSH instruction after the MOVs.
  865. MBB.insert(MI, PushMIB);
  866. }
  867. }
  868. static void popRegsFromStack(MachineBasicBlock &MBB,
  869. MachineBasicBlock::iterator &MI,
  870. const TargetInstrInfo &TII,
  871. const std::set<Register> &RegsToRestore,
  872. const std::set<Register> &AvailableCopyRegs,
  873. bool IsVarArg, bool HasV5Ops) {
  874. if (RegsToRestore.empty())
  875. return;
  876. MachineFunction &MF = *MBB.getParent();
  877. ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>();
  878. DebugLoc DL = MI != MBB.end() ? MI->getDebugLoc() : DebugLoc();
  879. std::set<Register> LowRegs, HighRegs;
  880. splitLowAndHighRegs(RegsToRestore, LowRegs, HighRegs);
  881. // Pop the high registers first
  882. // There are no store instructions that can access high registers directly,
  883. // so we have to pop into low registers and them move to the high registers.
  884. // This might take multiple pops, as it is possible for there to
  885. // be fewer low registers available than high registers which need restoring.
  886. // Find the first register to restore.
  887. auto HiRegToRestore = getNextOrderedReg(OrderedHighRegs.begin(),
  888. OrderedHighRegs.end(),
  889. HighRegs);
  890. std::set<Register> CopyRegs = AvailableCopyRegs;
  891. Register LowScratchReg;
  892. if (!HighRegs.empty() && CopyRegs.empty()) {
  893. // No copy regs are available to pop high regs. Let's make use of a return
  894. // register and the scratch register (IP/R12) to copy things around.
  895. LowScratchReg = ARM::R0;
  896. BuildMI(MBB, MI, DL, TII.get(ARM::tMOVr))
  897. .addReg(ARM::R12, RegState::Define)
  898. .addReg(LowScratchReg, RegState::Kill)
  899. .add(predOps(ARMCC::AL))
  900. .setMIFlag(MachineInstr::FrameDestroy);
  901. CopyRegs.insert(LowScratchReg);
  902. }
  903. while (HiRegToRestore != OrderedHighRegs.end()) {
  904. assert(!CopyRegs.empty());
  905. // Find the first low register to use.
  906. auto CopyReg = getNextOrderedReg(OrderedCopyRegs.begin(),
  907. OrderedCopyRegs.end(),
  908. CopyRegs);
  909. // Create the POP instruction.
  910. MachineInstrBuilder PopMIB = BuildMI(MBB, MI, DL, TII.get(ARM::tPOP))
  911. .add(predOps(ARMCC::AL))
  912. .setMIFlag(MachineInstr::FrameDestroy);
  913. while (HiRegToRestore != OrderedHighRegs.end() &&
  914. CopyReg != OrderedCopyRegs.end()) {
  915. // Add the low register to the POP.
  916. PopMIB.addReg(*CopyReg, RegState::Define);
  917. // Create the MOV from low to high register.
  918. BuildMI(MBB, MI, DL, TII.get(ARM::tMOVr))
  919. .addReg(*HiRegToRestore, RegState::Define)
  920. .addReg(*CopyReg, RegState::Kill)
  921. .add(predOps(ARMCC::AL))
  922. .setMIFlag(MachineInstr::FrameDestroy);
  923. CopyReg = getNextOrderedReg(std::next(CopyReg),
  924. OrderedCopyRegs.end(),
  925. CopyRegs);
  926. HiRegToRestore = getNextOrderedReg(std::next(HiRegToRestore),
  927. OrderedHighRegs.end(),
  928. HighRegs);
  929. }
  930. }
  931. // Restore low register used as scratch if necessary
  932. if (LowScratchReg.isValid()) {
  933. BuildMI(MBB, MI, DL, TII.get(ARM::tMOVr))
  934. .addReg(LowScratchReg, RegState::Define)
  935. .addReg(ARM::R12, RegState::Kill)
  936. .add(predOps(ARMCC::AL))
  937. .setMIFlag(MachineInstr::FrameDestroy);
  938. }
  939. // Now pop the low registers
  940. if (!LowRegs.empty()) {
  941. MachineInstrBuilder MIB = BuildMI(MF, DL, TII.get(ARM::tPOP))
  942. .add(predOps(ARMCC::AL))
  943. .setMIFlag(MachineInstr::FrameDestroy);
  944. bool NeedsPop = false;
  945. for (Register Reg : OrderedLowRegs) {
  946. if (!LowRegs.count(Reg))
  947. continue;
  948. if (Reg == ARM::LR) {
  949. if (!MBB.succ_empty() ||
  950. MI->getOpcode() == ARM::TCRETURNdi ||
  951. MI->getOpcode() == ARM::TCRETURNri)
  952. // LR may only be popped into PC, as part of return sequence.
  953. // If this isn't the return sequence, we'll need emitPopSpecialFixUp
  954. // to restore LR the hard way.
  955. // FIXME: if we don't pass any stack arguments it would be actually
  956. // advantageous *and* correct to do the conversion to an ordinary call
  957. // instruction here.
  958. continue;
  959. // Special epilogue for vararg functions. See emitEpilogue
  960. if (IsVarArg)
  961. continue;
  962. // ARMv4T requires BX, see emitEpilogue
  963. if (!HasV5Ops)
  964. continue;
  965. // CMSE entry functions must return via BXNS, see emitEpilogue.
  966. if (AFI->isCmseNSEntryFunction())
  967. continue;
  968. // Pop LR into PC.
  969. Reg = ARM::PC;
  970. (*MIB).setDesc(TII.get(ARM::tPOP_RET));
  971. if (MI != MBB.end())
  972. MIB.copyImplicitOps(*MI);
  973. MI = MBB.erase(MI);
  974. }
  975. MIB.addReg(Reg, getDefRegState(true));
  976. NeedsPop = true;
  977. }
  978. // It's illegal to emit pop instruction without operands.
  979. if (NeedsPop)
  980. MBB.insert(MI, &*MIB);
  981. else
  982. MF.deleteMachineInstr(MIB);
  983. }
  984. }
  985. bool Thumb1FrameLowering::spillCalleeSavedRegisters(
  986. MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
  987. ArrayRef<CalleeSavedInfo> CSI, const TargetRegisterInfo *TRI) const {
  988. if (CSI.empty())
  989. return false;
  990. const TargetInstrInfo &TII = *STI.getInstrInfo();
  991. MachineFunction &MF = *MBB.getParent();
  992. const ARMBaseRegisterInfo *RegInfo = static_cast<const ARMBaseRegisterInfo *>(
  993. MF.getSubtarget().getRegisterInfo());
  994. Register FPReg = RegInfo->getFrameRegister(MF);
  995. // In case FP is a high reg, we need a separate push sequence to generate
  996. // a correct Frame Record
  997. bool NeedsFrameRecordPush = hasFP(MF) && ARM::hGPRRegClass.contains(FPReg);
  998. std::set<Register> FrameRecord;
  999. std::set<Register> SpilledGPRs;
  1000. for (const CalleeSavedInfo &I : CSI) {
  1001. Register Reg = I.getReg();
  1002. if (NeedsFrameRecordPush && (Reg == FPReg || Reg == ARM::LR))
  1003. FrameRecord.insert(Reg);
  1004. else
  1005. SpilledGPRs.insert(Reg);
  1006. }
  1007. pushRegsToStack(MBB, MI, TII, FrameRecord, {ARM::LR});
  1008. // Determine intermediate registers which can be used for pushing high regs:
  1009. // - Spilled low regs
  1010. // - Unused argument registers
  1011. std::set<Register> CopyRegs;
  1012. for (Register Reg : SpilledGPRs)
  1013. if ((ARM::tGPRRegClass.contains(Reg) || Reg == ARM::LR) &&
  1014. !MF.getRegInfo().isLiveIn(Reg) && !(hasFP(MF) && Reg == FPReg))
  1015. CopyRegs.insert(Reg);
  1016. for (unsigned ArgReg : {ARM::R0, ARM::R1, ARM::R2, ARM::R3})
  1017. if (!MF.getRegInfo().isLiveIn(ArgReg))
  1018. CopyRegs.insert(ArgReg);
  1019. pushRegsToStack(MBB, MI, TII, SpilledGPRs, CopyRegs);
  1020. return true;
  1021. }
  1022. bool Thumb1FrameLowering::restoreCalleeSavedRegisters(
  1023. MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
  1024. MutableArrayRef<CalleeSavedInfo> CSI, const TargetRegisterInfo *TRI) const {
  1025. if (CSI.empty())
  1026. return false;
  1027. MachineFunction &MF = *MBB.getParent();
  1028. ARMFunctionInfo *AFI = MF.getInfo<ARMFunctionInfo>();
  1029. const TargetInstrInfo &TII = *STI.getInstrInfo();
  1030. const ARMBaseRegisterInfo *RegInfo = static_cast<const ARMBaseRegisterInfo *>(
  1031. MF.getSubtarget().getRegisterInfo());
  1032. bool IsVarArg = AFI->getArgRegsSaveSize() > 0;
  1033. Register FPReg = RegInfo->getFrameRegister(MF);
  1034. // In case FP is a high reg, we need a separate pop sequence to generate
  1035. // a correct Frame Record
  1036. bool NeedsFrameRecordPop = hasFP(MF) && ARM::hGPRRegClass.contains(FPReg);
  1037. std::set<Register> FrameRecord;
  1038. std::set<Register> SpilledGPRs;
  1039. for (CalleeSavedInfo &I : CSI) {
  1040. Register Reg = I.getReg();
  1041. if (NeedsFrameRecordPop && (Reg == FPReg || Reg == ARM::LR))
  1042. FrameRecord.insert(Reg);
  1043. else
  1044. SpilledGPRs.insert(Reg);
  1045. if (Reg == ARM::LR)
  1046. I.setRestored(false);
  1047. }
  1048. // Determine intermidiate registers which can be used for popping high regs:
  1049. // - Spilled low regs
  1050. // - Unused return registers
  1051. std::set<Register> CopyRegs;
  1052. std::set<Register> UnusedReturnRegs;
  1053. for (Register Reg : SpilledGPRs)
  1054. if ((ARM::tGPRRegClass.contains(Reg)) && !(hasFP(MF) && Reg == FPReg))
  1055. CopyRegs.insert(Reg);
  1056. auto Terminator = MBB.getFirstTerminator();
  1057. if (Terminator != MBB.end() && Terminator->getOpcode() == ARM::tBX_RET) {
  1058. UnusedReturnRegs.insert(ARM::R0);
  1059. UnusedReturnRegs.insert(ARM::R1);
  1060. UnusedReturnRegs.insert(ARM::R2);
  1061. UnusedReturnRegs.insert(ARM::R3);
  1062. for (auto Op : Terminator->implicit_operands()) {
  1063. if (Op.isReg())
  1064. UnusedReturnRegs.erase(Op.getReg());
  1065. }
  1066. }
  1067. CopyRegs.insert(UnusedReturnRegs.begin(), UnusedReturnRegs.end());
  1068. // First pop regular spilled regs.
  1069. popRegsFromStack(MBB, MI, TII, SpilledGPRs, CopyRegs, IsVarArg,
  1070. STI.hasV5TOps());
  1071. // LR may only be popped into pc, as part of a return sequence.
  1072. // Check that no other pop instructions are inserted after that.
  1073. assert((!SpilledGPRs.count(ARM::LR) || FrameRecord.empty()) &&
  1074. "Can't insert pop after return sequence");
  1075. // Now pop Frame Record regs.
  1076. // Only unused return registers can be used as copy regs at this point.
  1077. popRegsFromStack(MBB, MI, TII, FrameRecord, UnusedReturnRegs, IsVarArg,
  1078. STI.hasV5TOps());
  1079. return true;
  1080. }