MachineFunction.cpp 54 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517
  1. //===- MachineFunction.cpp ------------------------------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Collect native machine code information for a function. This allows
  10. // target-specific information about the generated code to be stored with each
  11. // function.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "llvm/CodeGen/MachineFunction.h"
  15. #include "llvm/ADT/BitVector.h"
  16. #include "llvm/ADT/DenseMap.h"
  17. #include "llvm/ADT/DenseSet.h"
  18. #include "llvm/ADT/STLExtras.h"
  19. #include "llvm/ADT/SmallString.h"
  20. #include "llvm/ADT/SmallVector.h"
  21. #include "llvm/ADT/StringRef.h"
  22. #include "llvm/ADT/Twine.h"
  23. #include "llvm/Analysis/ConstantFolding.h"
  24. #include "llvm/Analysis/EHPersonalities.h"
  25. #include "llvm/CodeGen/MachineBasicBlock.h"
  26. #include "llvm/CodeGen/MachineConstantPool.h"
  27. #include "llvm/CodeGen/MachineFrameInfo.h"
  28. #include "llvm/CodeGen/MachineInstr.h"
  29. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  30. #include "llvm/CodeGen/MachineMemOperand.h"
  31. #include "llvm/CodeGen/MachineModuleInfo.h"
  32. #include "llvm/CodeGen/MachineRegisterInfo.h"
  33. #include "llvm/CodeGen/PseudoSourceValue.h"
  34. #include "llvm/CodeGen/TargetFrameLowering.h"
  35. #include "llvm/CodeGen/TargetInstrInfo.h"
  36. #include "llvm/CodeGen/TargetLowering.h"
  37. #include "llvm/CodeGen/TargetRegisterInfo.h"
  38. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  39. #include "llvm/CodeGen/WasmEHFuncInfo.h"
  40. #include "llvm/CodeGen/WinEHFuncInfo.h"
  41. #include "llvm/Config/llvm-config.h"
  42. #include "llvm/IR/Attributes.h"
  43. #include "llvm/IR/BasicBlock.h"
  44. #include "llvm/IR/Constant.h"
  45. #include "llvm/IR/DataLayout.h"
  46. #include "llvm/IR/DebugInfoMetadata.h"
  47. #include "llvm/IR/DerivedTypes.h"
  48. #include "llvm/IR/Function.h"
  49. #include "llvm/IR/GlobalValue.h"
  50. #include "llvm/IR/Instruction.h"
  51. #include "llvm/IR/Instructions.h"
  52. #include "llvm/IR/Metadata.h"
  53. #include "llvm/IR/Module.h"
  54. #include "llvm/IR/ModuleSlotTracker.h"
  55. #include "llvm/IR/Value.h"
  56. #include "llvm/MC/MCContext.h"
  57. #include "llvm/MC/MCSymbol.h"
  58. #include "llvm/MC/SectionKind.h"
  59. #include "llvm/Support/Casting.h"
  60. #include "llvm/Support/CommandLine.h"
  61. #include "llvm/Support/Compiler.h"
  62. #include "llvm/Support/DOTGraphTraits.h"
  63. #include "llvm/Support/Debug.h"
  64. #include "llvm/Support/ErrorHandling.h"
  65. #include "llvm/Support/GraphWriter.h"
  66. #include "llvm/Support/raw_ostream.h"
  67. #include "llvm/Target/TargetMachine.h"
  68. #include <algorithm>
  69. #include <cassert>
  70. #include <cstddef>
  71. #include <cstdint>
  72. #include <iterator>
  73. #include <string>
  74. #include <type_traits>
  75. #include <utility>
  76. #include <vector>
  77. #include "LiveDebugValues/LiveDebugValues.h"
  78. using namespace llvm;
  79. #define DEBUG_TYPE "codegen"
  80. static cl::opt<unsigned> AlignAllFunctions(
  81. "align-all-functions",
  82. cl::desc("Force the alignment of all functions in log2 format (e.g. 4 "
  83. "means align on 16B boundaries)."),
  84. cl::init(0), cl::Hidden);
  85. static const char *getPropertyName(MachineFunctionProperties::Property Prop) {
  86. using P = MachineFunctionProperties::Property;
  87. // clang-format off
  88. switch(Prop) {
  89. case P::FailedISel: return "FailedISel";
  90. case P::IsSSA: return "IsSSA";
  91. case P::Legalized: return "Legalized";
  92. case P::NoPHIs: return "NoPHIs";
  93. case P::NoVRegs: return "NoVRegs";
  94. case P::RegBankSelected: return "RegBankSelected";
  95. case P::Selected: return "Selected";
  96. case P::TracksLiveness: return "TracksLiveness";
  97. case P::TiedOpsRewritten: return "TiedOpsRewritten";
  98. case P::FailsVerification: return "FailsVerification";
  99. case P::TracksDebugUserValues: return "TracksDebugUserValues";
  100. }
  101. // clang-format on
  102. llvm_unreachable("Invalid machine function property");
  103. }
  104. // Pin the vtable to this file.
  105. void MachineFunction::Delegate::anchor() {}
  106. void MachineFunctionProperties::print(raw_ostream &OS) const {
  107. const char *Separator = "";
  108. for (BitVector::size_type I = 0; I < Properties.size(); ++I) {
  109. if (!Properties[I])
  110. continue;
  111. OS << Separator << getPropertyName(static_cast<Property>(I));
  112. Separator = ", ";
  113. }
  114. }
  115. //===----------------------------------------------------------------------===//
  116. // MachineFunction implementation
  117. //===----------------------------------------------------------------------===//
  118. // Out-of-line virtual method.
  119. MachineFunctionInfo::~MachineFunctionInfo() = default;
  120. void ilist_alloc_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) {
  121. MBB->getParent()->deleteMachineBasicBlock(MBB);
  122. }
  123. static inline unsigned getFnStackAlignment(const TargetSubtargetInfo *STI,
  124. const Function &F) {
  125. if (auto MA = F.getFnStackAlign())
  126. return MA->value();
  127. return STI->getFrameLowering()->getStackAlign().value();
  128. }
  129. MachineFunction::MachineFunction(Function &F, const LLVMTargetMachine &Target,
  130. const TargetSubtargetInfo &STI,
  131. unsigned FunctionNum, MachineModuleInfo &mmi)
  132. : F(F), Target(Target), STI(&STI), Ctx(mmi.getContext()), MMI(mmi) {
  133. FunctionNumber = FunctionNum;
  134. init();
  135. }
  136. void MachineFunction::handleInsertion(MachineInstr &MI) {
  137. if (TheDelegate)
  138. TheDelegate->MF_HandleInsertion(MI);
  139. }
  140. void MachineFunction::handleRemoval(MachineInstr &MI) {
  141. if (TheDelegate)
  142. TheDelegate->MF_HandleRemoval(MI);
  143. }
  144. void MachineFunction::init() {
  145. // Assume the function starts in SSA form with correct liveness.
  146. Properties.set(MachineFunctionProperties::Property::IsSSA);
  147. Properties.set(MachineFunctionProperties::Property::TracksLiveness);
  148. if (STI->getRegisterInfo())
  149. RegInfo = new (Allocator) MachineRegisterInfo(this);
  150. else
  151. RegInfo = nullptr;
  152. MFInfo = nullptr;
  153. // We can realign the stack if the target supports it and the user hasn't
  154. // explicitly asked us not to.
  155. bool CanRealignSP = STI->getFrameLowering()->isStackRealignable() &&
  156. !F.hasFnAttribute("no-realign-stack");
  157. FrameInfo = new (Allocator) MachineFrameInfo(
  158. getFnStackAlignment(STI, F), /*StackRealignable=*/CanRealignSP,
  159. /*ForcedRealign=*/CanRealignSP &&
  160. F.hasFnAttribute(Attribute::StackAlignment));
  161. if (F.hasFnAttribute(Attribute::StackAlignment))
  162. FrameInfo->ensureMaxAlignment(*F.getFnStackAlign());
  163. ConstantPool = new (Allocator) MachineConstantPool(getDataLayout());
  164. Alignment = STI->getTargetLowering()->getMinFunctionAlignment();
  165. // FIXME: Shouldn't use pref alignment if explicit alignment is set on F.
  166. // FIXME: Use Function::hasOptSize().
  167. if (!F.hasFnAttribute(Attribute::OptimizeForSize))
  168. Alignment = std::max(Alignment,
  169. STI->getTargetLowering()->getPrefFunctionAlignment());
  170. if (AlignAllFunctions)
  171. Alignment = Align(1ULL << AlignAllFunctions);
  172. JumpTableInfo = nullptr;
  173. if (isFuncletEHPersonality(classifyEHPersonality(
  174. F.hasPersonalityFn() ? F.getPersonalityFn() : nullptr))) {
  175. WinEHInfo = new (Allocator) WinEHFuncInfo();
  176. }
  177. if (isScopedEHPersonality(classifyEHPersonality(
  178. F.hasPersonalityFn() ? F.getPersonalityFn() : nullptr))) {
  179. WasmEHInfo = new (Allocator) WasmEHFuncInfo();
  180. }
  181. assert(Target.isCompatibleDataLayout(getDataLayout()) &&
  182. "Can't create a MachineFunction using a Module with a "
  183. "Target-incompatible DataLayout attached\n");
  184. PSVManager =
  185. std::make_unique<PseudoSourceValueManager>(*(getSubtarget().
  186. getInstrInfo()));
  187. }
  188. MachineFunction::~MachineFunction() {
  189. clear();
  190. }
  191. void MachineFunction::clear() {
  192. Properties.reset();
  193. // Don't call destructors on MachineInstr and MachineOperand. All of their
  194. // memory comes from the BumpPtrAllocator which is about to be purged.
  195. //
  196. // Do call MachineBasicBlock destructors, it contains std::vectors.
  197. for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I))
  198. I->Insts.clearAndLeakNodesUnsafely();
  199. MBBNumbering.clear();
  200. InstructionRecycler.clear(Allocator);
  201. OperandRecycler.clear(Allocator);
  202. BasicBlockRecycler.clear(Allocator);
  203. CodeViewAnnotations.clear();
  204. VariableDbgInfos.clear();
  205. if (RegInfo) {
  206. RegInfo->~MachineRegisterInfo();
  207. Allocator.Deallocate(RegInfo);
  208. }
  209. if (MFInfo) {
  210. MFInfo->~MachineFunctionInfo();
  211. Allocator.Deallocate(MFInfo);
  212. }
  213. FrameInfo->~MachineFrameInfo();
  214. Allocator.Deallocate(FrameInfo);
  215. ConstantPool->~MachineConstantPool();
  216. Allocator.Deallocate(ConstantPool);
  217. if (JumpTableInfo) {
  218. JumpTableInfo->~MachineJumpTableInfo();
  219. Allocator.Deallocate(JumpTableInfo);
  220. }
  221. if (WinEHInfo) {
  222. WinEHInfo->~WinEHFuncInfo();
  223. Allocator.Deallocate(WinEHInfo);
  224. }
  225. if (WasmEHInfo) {
  226. WasmEHInfo->~WasmEHFuncInfo();
  227. Allocator.Deallocate(WasmEHInfo);
  228. }
  229. }
  230. const DataLayout &MachineFunction::getDataLayout() const {
  231. return F.getParent()->getDataLayout();
  232. }
  233. /// Get the JumpTableInfo for this function.
  234. /// If it does not already exist, allocate one.
  235. MachineJumpTableInfo *MachineFunction::
  236. getOrCreateJumpTableInfo(unsigned EntryKind) {
  237. if (JumpTableInfo) return JumpTableInfo;
  238. JumpTableInfo = new (Allocator)
  239. MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind);
  240. return JumpTableInfo;
  241. }
  242. DenormalMode MachineFunction::getDenormalMode(const fltSemantics &FPType) const {
  243. return F.getDenormalMode(FPType);
  244. }
  245. /// Should we be emitting segmented stack stuff for the function
  246. bool MachineFunction::shouldSplitStack() const {
  247. return getFunction().hasFnAttribute("split-stack");
  248. }
  249. LLVM_NODISCARD unsigned
  250. MachineFunction::addFrameInst(const MCCFIInstruction &Inst) {
  251. FrameInstructions.push_back(Inst);
  252. return FrameInstructions.size() - 1;
  253. }
  254. /// This discards all of the MachineBasicBlock numbers and recomputes them.
  255. /// This guarantees that the MBB numbers are sequential, dense, and match the
  256. /// ordering of the blocks within the function. If a specific MachineBasicBlock
  257. /// is specified, only that block and those after it are renumbered.
  258. void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) {
  259. if (empty()) { MBBNumbering.clear(); return; }
  260. MachineFunction::iterator MBBI, E = end();
  261. if (MBB == nullptr)
  262. MBBI = begin();
  263. else
  264. MBBI = MBB->getIterator();
  265. // Figure out the block number this should have.
  266. unsigned BlockNo = 0;
  267. if (MBBI != begin())
  268. BlockNo = std::prev(MBBI)->getNumber() + 1;
  269. for (; MBBI != E; ++MBBI, ++BlockNo) {
  270. if (MBBI->getNumber() != (int)BlockNo) {
  271. // Remove use of the old number.
  272. if (MBBI->getNumber() != -1) {
  273. assert(MBBNumbering[MBBI->getNumber()] == &*MBBI &&
  274. "MBB number mismatch!");
  275. MBBNumbering[MBBI->getNumber()] = nullptr;
  276. }
  277. // If BlockNo is already taken, set that block's number to -1.
  278. if (MBBNumbering[BlockNo])
  279. MBBNumbering[BlockNo]->setNumber(-1);
  280. MBBNumbering[BlockNo] = &*MBBI;
  281. MBBI->setNumber(BlockNo);
  282. }
  283. }
  284. // Okay, all the blocks are renumbered. If we have compactified the block
  285. // numbering, shrink MBBNumbering now.
  286. assert(BlockNo <= MBBNumbering.size() && "Mismatch!");
  287. MBBNumbering.resize(BlockNo);
  288. }
  289. /// This method iterates over the basic blocks and assigns their IsBeginSection
  290. /// and IsEndSection fields. This must be called after MBB layout is finalized
  291. /// and the SectionID's are assigned to MBBs.
  292. void MachineFunction::assignBeginEndSections() {
  293. front().setIsBeginSection();
  294. auto CurrentSectionID = front().getSectionID();
  295. for (auto MBBI = std::next(begin()), E = end(); MBBI != E; ++MBBI) {
  296. if (MBBI->getSectionID() == CurrentSectionID)
  297. continue;
  298. MBBI->setIsBeginSection();
  299. std::prev(MBBI)->setIsEndSection();
  300. CurrentSectionID = MBBI->getSectionID();
  301. }
  302. back().setIsEndSection();
  303. }
  304. /// Allocate a new MachineInstr. Use this instead of `new MachineInstr'.
  305. MachineInstr *MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID,
  306. DebugLoc DL,
  307. bool NoImplicit) {
  308. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  309. MachineInstr(*this, MCID, std::move(DL), NoImplicit);
  310. }
  311. /// Create a new MachineInstr which is a copy of the 'Orig' instruction,
  312. /// identical in all ways except the instruction has no parent, prev, or next.
  313. MachineInstr *
  314. MachineFunction::CloneMachineInstr(const MachineInstr *Orig) {
  315. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  316. MachineInstr(*this, *Orig);
  317. }
  318. MachineInstr &MachineFunction::cloneMachineInstrBundle(
  319. MachineBasicBlock &MBB, MachineBasicBlock::iterator InsertBefore,
  320. const MachineInstr &Orig) {
  321. MachineInstr *FirstClone = nullptr;
  322. MachineBasicBlock::const_instr_iterator I = Orig.getIterator();
  323. while (true) {
  324. MachineInstr *Cloned = CloneMachineInstr(&*I);
  325. MBB.insert(InsertBefore, Cloned);
  326. if (FirstClone == nullptr) {
  327. FirstClone = Cloned;
  328. } else {
  329. Cloned->bundleWithPred();
  330. }
  331. if (!I->isBundledWithSucc())
  332. break;
  333. ++I;
  334. }
  335. // Copy over call site info to the cloned instruction if needed. If Orig is in
  336. // a bundle, copyCallSiteInfo takes care of finding the call instruction in
  337. // the bundle.
  338. if (Orig.shouldUpdateCallSiteInfo())
  339. copyCallSiteInfo(&Orig, FirstClone);
  340. return *FirstClone;
  341. }
  342. /// Delete the given MachineInstr.
  343. ///
  344. /// This function also serves as the MachineInstr destructor - the real
  345. /// ~MachineInstr() destructor must be empty.
  346. void MachineFunction::deleteMachineInstr(MachineInstr *MI) {
  347. // Verify that a call site info is at valid state. This assertion should
  348. // be triggered during the implementation of support for the
  349. // call site info of a new architecture. If the assertion is triggered,
  350. // back trace will tell where to insert a call to updateCallSiteInfo().
  351. assert((!MI->isCandidateForCallSiteEntry() ||
  352. CallSitesInfo.find(MI) == CallSitesInfo.end()) &&
  353. "Call site info was not updated!");
  354. // Strip it for parts. The operand array and the MI object itself are
  355. // independently recyclable.
  356. if (MI->Operands)
  357. deallocateOperandArray(MI->CapOperands, MI->Operands);
  358. // Don't call ~MachineInstr() which must be trivial anyway because
  359. // ~MachineFunction drops whole lists of MachineInstrs wihout calling their
  360. // destructors.
  361. InstructionRecycler.Deallocate(Allocator, MI);
  362. }
  363. /// Allocate a new MachineBasicBlock. Use this instead of
  364. /// `new MachineBasicBlock'.
  365. MachineBasicBlock *
  366. MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) {
  367. return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator))
  368. MachineBasicBlock(*this, bb);
  369. }
  370. /// Delete the given MachineBasicBlock.
  371. void MachineFunction::deleteMachineBasicBlock(MachineBasicBlock *MBB) {
  372. assert(MBB->getParent() == this && "MBB parent mismatch!");
  373. // Clean up any references to MBB in jump tables before deleting it.
  374. if (JumpTableInfo)
  375. JumpTableInfo->RemoveMBBFromJumpTables(MBB);
  376. MBB->~MachineBasicBlock();
  377. BasicBlockRecycler.Deallocate(Allocator, MBB);
  378. }
  379. MachineMemOperand *MachineFunction::getMachineMemOperand(
  380. MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, uint64_t s,
  381. Align base_alignment, const AAMDNodes &AAInfo, const MDNode *Ranges,
  382. SyncScope::ID SSID, AtomicOrdering Ordering,
  383. AtomicOrdering FailureOrdering) {
  384. return new (Allocator)
  385. MachineMemOperand(PtrInfo, f, s, base_alignment, AAInfo, Ranges,
  386. SSID, Ordering, FailureOrdering);
  387. }
  388. MachineMemOperand *MachineFunction::getMachineMemOperand(
  389. MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, LLT MemTy,
  390. Align base_alignment, const AAMDNodes &AAInfo, const MDNode *Ranges,
  391. SyncScope::ID SSID, AtomicOrdering Ordering,
  392. AtomicOrdering FailureOrdering) {
  393. return new (Allocator)
  394. MachineMemOperand(PtrInfo, f, MemTy, base_alignment, AAInfo, Ranges, SSID,
  395. Ordering, FailureOrdering);
  396. }
  397. MachineMemOperand *MachineFunction::getMachineMemOperand(
  398. const MachineMemOperand *MMO, const MachinePointerInfo &PtrInfo, uint64_t Size) {
  399. return new (Allocator)
  400. MachineMemOperand(PtrInfo, MMO->getFlags(), Size, MMO->getBaseAlign(),
  401. AAMDNodes(), nullptr, MMO->getSyncScopeID(),
  402. MMO->getSuccessOrdering(), MMO->getFailureOrdering());
  403. }
  404. MachineMemOperand *MachineFunction::getMachineMemOperand(
  405. const MachineMemOperand *MMO, const MachinePointerInfo &PtrInfo, LLT Ty) {
  406. return new (Allocator)
  407. MachineMemOperand(PtrInfo, MMO->getFlags(), Ty, MMO->getBaseAlign(),
  408. AAMDNodes(), nullptr, MMO->getSyncScopeID(),
  409. MMO->getSuccessOrdering(), MMO->getFailureOrdering());
  410. }
  411. MachineMemOperand *
  412. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  413. int64_t Offset, LLT Ty) {
  414. const MachinePointerInfo &PtrInfo = MMO->getPointerInfo();
  415. // If there is no pointer value, the offset isn't tracked so we need to adjust
  416. // the base alignment.
  417. Align Alignment = PtrInfo.V.isNull()
  418. ? commonAlignment(MMO->getBaseAlign(), Offset)
  419. : MMO->getBaseAlign();
  420. // Do not preserve ranges, since we don't necessarily know what the high bits
  421. // are anymore.
  422. return new (Allocator) MachineMemOperand(
  423. PtrInfo.getWithOffset(Offset), MMO->getFlags(), Ty, Alignment,
  424. MMO->getAAInfo(), nullptr, MMO->getSyncScopeID(),
  425. MMO->getSuccessOrdering(), MMO->getFailureOrdering());
  426. }
  427. MachineMemOperand *
  428. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  429. const AAMDNodes &AAInfo) {
  430. MachinePointerInfo MPI = MMO->getValue() ?
  431. MachinePointerInfo(MMO->getValue(), MMO->getOffset()) :
  432. MachinePointerInfo(MMO->getPseudoValue(), MMO->getOffset());
  433. return new (Allocator) MachineMemOperand(
  434. MPI, MMO->getFlags(), MMO->getSize(), MMO->getBaseAlign(), AAInfo,
  435. MMO->getRanges(), MMO->getSyncScopeID(), MMO->getSuccessOrdering(),
  436. MMO->getFailureOrdering());
  437. }
  438. MachineMemOperand *
  439. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  440. MachineMemOperand::Flags Flags) {
  441. return new (Allocator) MachineMemOperand(
  442. MMO->getPointerInfo(), Flags, MMO->getSize(), MMO->getBaseAlign(),
  443. MMO->getAAInfo(), MMO->getRanges(), MMO->getSyncScopeID(),
  444. MMO->getSuccessOrdering(), MMO->getFailureOrdering());
  445. }
  446. MachineInstr::ExtraInfo *MachineFunction::createMIExtraInfo(
  447. ArrayRef<MachineMemOperand *> MMOs, MCSymbol *PreInstrSymbol,
  448. MCSymbol *PostInstrSymbol, MDNode *HeapAllocMarker) {
  449. return MachineInstr::ExtraInfo::create(Allocator, MMOs, PreInstrSymbol,
  450. PostInstrSymbol, HeapAllocMarker);
  451. }
  452. const char *MachineFunction::createExternalSymbolName(StringRef Name) {
  453. char *Dest = Allocator.Allocate<char>(Name.size() + 1);
  454. llvm::copy(Name, Dest);
  455. Dest[Name.size()] = 0;
  456. return Dest;
  457. }
  458. uint32_t *MachineFunction::allocateRegMask() {
  459. unsigned NumRegs = getSubtarget().getRegisterInfo()->getNumRegs();
  460. unsigned Size = MachineOperand::getRegMaskSize(NumRegs);
  461. uint32_t *Mask = Allocator.Allocate<uint32_t>(Size);
  462. memset(Mask, 0, Size * sizeof(Mask[0]));
  463. return Mask;
  464. }
  465. ArrayRef<int> MachineFunction::allocateShuffleMask(ArrayRef<int> Mask) {
  466. int* AllocMask = Allocator.Allocate<int>(Mask.size());
  467. copy(Mask, AllocMask);
  468. return {AllocMask, Mask.size()};
  469. }
  470. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  471. LLVM_DUMP_METHOD void MachineFunction::dump() const {
  472. print(dbgs());
  473. }
  474. #endif
  475. StringRef MachineFunction::getName() const {
  476. return getFunction().getName();
  477. }
  478. void MachineFunction::print(raw_ostream &OS, const SlotIndexes *Indexes) const {
  479. OS << "# Machine code for function " << getName() << ": ";
  480. getProperties().print(OS);
  481. OS << '\n';
  482. // Print Frame Information
  483. FrameInfo->print(*this, OS);
  484. // Print JumpTable Information
  485. if (JumpTableInfo)
  486. JumpTableInfo->print(OS);
  487. // Print Constant Pool
  488. ConstantPool->print(OS);
  489. const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo();
  490. if (RegInfo && !RegInfo->livein_empty()) {
  491. OS << "Function Live Ins: ";
  492. for (MachineRegisterInfo::livein_iterator
  493. I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) {
  494. OS << printReg(I->first, TRI);
  495. if (I->second)
  496. OS << " in " << printReg(I->second, TRI);
  497. if (std::next(I) != E)
  498. OS << ", ";
  499. }
  500. OS << '\n';
  501. }
  502. ModuleSlotTracker MST(getFunction().getParent());
  503. MST.incorporateFunction(getFunction());
  504. for (const auto &BB : *this) {
  505. OS << '\n';
  506. // If we print the whole function, print it at its most verbose level.
  507. BB.print(OS, MST, Indexes, /*IsStandalone=*/true);
  508. }
  509. OS << "\n# End machine code for function " << getName() << ".\n\n";
  510. }
  511. /// True if this function needs frame moves for debug or exceptions.
  512. bool MachineFunction::needsFrameMoves() const {
  513. return getMMI().hasDebugInfo() ||
  514. getTarget().Options.ForceDwarfFrameSection ||
  515. F.needsUnwindTableEntry();
  516. }
  517. namespace llvm {
  518. template<>
  519. struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits {
  520. DOTGraphTraits(bool isSimple = false) : DefaultDOTGraphTraits(isSimple) {}
  521. static std::string getGraphName(const MachineFunction *F) {
  522. return ("CFG for '" + F->getName() + "' function").str();
  523. }
  524. std::string getNodeLabel(const MachineBasicBlock *Node,
  525. const MachineFunction *Graph) {
  526. std::string OutStr;
  527. {
  528. raw_string_ostream OSS(OutStr);
  529. if (isSimple()) {
  530. OSS << printMBBReference(*Node);
  531. if (const BasicBlock *BB = Node->getBasicBlock())
  532. OSS << ": " << BB->getName();
  533. } else
  534. Node->print(OSS);
  535. }
  536. if (OutStr[0] == '\n') OutStr.erase(OutStr.begin());
  537. // Process string output to make it nicer...
  538. for (unsigned i = 0; i != OutStr.length(); ++i)
  539. if (OutStr[i] == '\n') { // Left justify
  540. OutStr[i] = '\\';
  541. OutStr.insert(OutStr.begin()+i+1, 'l');
  542. }
  543. return OutStr;
  544. }
  545. };
  546. } // end namespace llvm
  547. void MachineFunction::viewCFG() const
  548. {
  549. #ifndef NDEBUG
  550. ViewGraph(this, "mf" + getName());
  551. #else
  552. errs() << "MachineFunction::viewCFG is only available in debug builds on "
  553. << "systems with Graphviz or gv!\n";
  554. #endif // NDEBUG
  555. }
  556. void MachineFunction::viewCFGOnly() const
  557. {
  558. #ifndef NDEBUG
  559. ViewGraph(this, "mf" + getName(), true);
  560. #else
  561. errs() << "MachineFunction::viewCFGOnly is only available in debug builds on "
  562. << "systems with Graphviz or gv!\n";
  563. #endif // NDEBUG
  564. }
  565. /// Add the specified physical register as a live-in value and
  566. /// create a corresponding virtual register for it.
  567. Register MachineFunction::addLiveIn(MCRegister PReg,
  568. const TargetRegisterClass *RC) {
  569. MachineRegisterInfo &MRI = getRegInfo();
  570. Register VReg = MRI.getLiveInVirtReg(PReg);
  571. if (VReg) {
  572. const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg);
  573. (void)VRegRC;
  574. // A physical register can be added several times.
  575. // Between two calls, the register class of the related virtual register
  576. // may have been constrained to match some operation constraints.
  577. // In that case, check that the current register class includes the
  578. // physical register and is a sub class of the specified RC.
  579. assert((VRegRC == RC || (VRegRC->contains(PReg) &&
  580. RC->hasSubClassEq(VRegRC))) &&
  581. "Register class mismatch!");
  582. return VReg;
  583. }
  584. VReg = MRI.createVirtualRegister(RC);
  585. MRI.addLiveIn(PReg, VReg);
  586. return VReg;
  587. }
  588. /// Return the MCSymbol for the specified non-empty jump table.
  589. /// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
  590. /// normal 'L' label is returned.
  591. MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
  592. bool isLinkerPrivate) const {
  593. const DataLayout &DL = getDataLayout();
  594. assert(JumpTableInfo && "No jump tables");
  595. assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!");
  596. StringRef Prefix = isLinkerPrivate ? DL.getLinkerPrivateGlobalPrefix()
  597. : DL.getPrivateGlobalPrefix();
  598. SmallString<60> Name;
  599. raw_svector_ostream(Name)
  600. << Prefix << "JTI" << getFunctionNumber() << '_' << JTI;
  601. return Ctx.getOrCreateSymbol(Name);
  602. }
  603. /// Return a function-local symbol to represent the PIC base.
  604. MCSymbol *MachineFunction::getPICBaseSymbol() const {
  605. const DataLayout &DL = getDataLayout();
  606. return Ctx.getOrCreateSymbol(Twine(DL.getPrivateGlobalPrefix()) +
  607. Twine(getFunctionNumber()) + "$pb");
  608. }
  609. /// \name Exception Handling
  610. /// \{
  611. LandingPadInfo &
  612. MachineFunction::getOrCreateLandingPadInfo(MachineBasicBlock *LandingPad) {
  613. unsigned N = LandingPads.size();
  614. for (unsigned i = 0; i < N; ++i) {
  615. LandingPadInfo &LP = LandingPads[i];
  616. if (LP.LandingPadBlock == LandingPad)
  617. return LP;
  618. }
  619. LandingPads.push_back(LandingPadInfo(LandingPad));
  620. return LandingPads[N];
  621. }
  622. void MachineFunction::addInvoke(MachineBasicBlock *LandingPad,
  623. MCSymbol *BeginLabel, MCSymbol *EndLabel) {
  624. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  625. LP.BeginLabels.push_back(BeginLabel);
  626. LP.EndLabels.push_back(EndLabel);
  627. }
  628. MCSymbol *MachineFunction::addLandingPad(MachineBasicBlock *LandingPad) {
  629. MCSymbol *LandingPadLabel = Ctx.createTempSymbol();
  630. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  631. LP.LandingPadLabel = LandingPadLabel;
  632. const Instruction *FirstI = LandingPad->getBasicBlock()->getFirstNonPHI();
  633. if (const auto *LPI = dyn_cast<LandingPadInst>(FirstI)) {
  634. if (const auto *PF =
  635. dyn_cast<Function>(F.getPersonalityFn()->stripPointerCasts()))
  636. getMMI().addPersonality(PF);
  637. if (LPI->isCleanup())
  638. addCleanup(LandingPad);
  639. // FIXME: New EH - Add the clauses in reverse order. This isn't 100%
  640. // correct, but we need to do it this way because of how the DWARF EH
  641. // emitter processes the clauses.
  642. for (unsigned I = LPI->getNumClauses(); I != 0; --I) {
  643. Value *Val = LPI->getClause(I - 1);
  644. if (LPI->isCatch(I - 1)) {
  645. addCatchTypeInfo(LandingPad,
  646. dyn_cast<GlobalValue>(Val->stripPointerCasts()));
  647. } else {
  648. // Add filters in a list.
  649. auto *CVal = cast<Constant>(Val);
  650. SmallVector<const GlobalValue *, 4> FilterList;
  651. for (const Use &U : CVal->operands())
  652. FilterList.push_back(cast<GlobalValue>(U->stripPointerCasts()));
  653. addFilterTypeInfo(LandingPad, FilterList);
  654. }
  655. }
  656. } else if (const auto *CPI = dyn_cast<CatchPadInst>(FirstI)) {
  657. for (unsigned I = CPI->getNumArgOperands(); I != 0; --I) {
  658. Value *TypeInfo = CPI->getArgOperand(I - 1)->stripPointerCasts();
  659. addCatchTypeInfo(LandingPad, dyn_cast<GlobalValue>(TypeInfo));
  660. }
  661. } else {
  662. assert(isa<CleanupPadInst>(FirstI) && "Invalid landingpad!");
  663. }
  664. return LandingPadLabel;
  665. }
  666. void MachineFunction::addCatchTypeInfo(MachineBasicBlock *LandingPad,
  667. ArrayRef<const GlobalValue *> TyInfo) {
  668. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  669. for (const GlobalValue *GV : llvm::reverse(TyInfo))
  670. LP.TypeIds.push_back(getTypeIDFor(GV));
  671. }
  672. void MachineFunction::addFilterTypeInfo(MachineBasicBlock *LandingPad,
  673. ArrayRef<const GlobalValue *> TyInfo) {
  674. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  675. std::vector<unsigned> IdsInFilter(TyInfo.size());
  676. for (unsigned I = 0, E = TyInfo.size(); I != E; ++I)
  677. IdsInFilter[I] = getTypeIDFor(TyInfo[I]);
  678. LP.TypeIds.push_back(getFilterIDFor(IdsInFilter));
  679. }
  680. void MachineFunction::tidyLandingPads(DenseMap<MCSymbol *, uintptr_t> *LPMap,
  681. bool TidyIfNoBeginLabels) {
  682. for (unsigned i = 0; i != LandingPads.size(); ) {
  683. LandingPadInfo &LandingPad = LandingPads[i];
  684. if (LandingPad.LandingPadLabel &&
  685. !LandingPad.LandingPadLabel->isDefined() &&
  686. (!LPMap || (*LPMap)[LandingPad.LandingPadLabel] == 0))
  687. LandingPad.LandingPadLabel = nullptr;
  688. // Special case: we *should* emit LPs with null LP MBB. This indicates
  689. // "nounwind" case.
  690. if (!LandingPad.LandingPadLabel && LandingPad.LandingPadBlock) {
  691. LandingPads.erase(LandingPads.begin() + i);
  692. continue;
  693. }
  694. if (TidyIfNoBeginLabels) {
  695. for (unsigned j = 0, e = LandingPads[i].BeginLabels.size(); j != e; ++j) {
  696. MCSymbol *BeginLabel = LandingPad.BeginLabels[j];
  697. MCSymbol *EndLabel = LandingPad.EndLabels[j];
  698. if ((BeginLabel->isDefined() || (LPMap && (*LPMap)[BeginLabel] != 0)) &&
  699. (EndLabel->isDefined() || (LPMap && (*LPMap)[EndLabel] != 0)))
  700. continue;
  701. LandingPad.BeginLabels.erase(LandingPad.BeginLabels.begin() + j);
  702. LandingPad.EndLabels.erase(LandingPad.EndLabels.begin() + j);
  703. --j;
  704. --e;
  705. }
  706. // Remove landing pads with no try-ranges.
  707. if (LandingPads[i].BeginLabels.empty()) {
  708. LandingPads.erase(LandingPads.begin() + i);
  709. continue;
  710. }
  711. }
  712. // If there is no landing pad, ensure that the list of typeids is empty.
  713. // If the only typeid is a cleanup, this is the same as having no typeids.
  714. if (!LandingPad.LandingPadBlock ||
  715. (LandingPad.TypeIds.size() == 1 && !LandingPad.TypeIds[0]))
  716. LandingPad.TypeIds.clear();
  717. ++i;
  718. }
  719. }
  720. void MachineFunction::addCleanup(MachineBasicBlock *LandingPad) {
  721. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  722. LP.TypeIds.push_back(0);
  723. }
  724. void MachineFunction::addSEHCatchHandler(MachineBasicBlock *LandingPad,
  725. const Function *Filter,
  726. const BlockAddress *RecoverBA) {
  727. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  728. SEHHandler Handler;
  729. Handler.FilterOrFinally = Filter;
  730. Handler.RecoverBA = RecoverBA;
  731. LP.SEHHandlers.push_back(Handler);
  732. }
  733. void MachineFunction::addSEHCleanupHandler(MachineBasicBlock *LandingPad,
  734. const Function *Cleanup) {
  735. LandingPadInfo &LP = getOrCreateLandingPadInfo(LandingPad);
  736. SEHHandler Handler;
  737. Handler.FilterOrFinally = Cleanup;
  738. Handler.RecoverBA = nullptr;
  739. LP.SEHHandlers.push_back(Handler);
  740. }
  741. void MachineFunction::setCallSiteLandingPad(MCSymbol *Sym,
  742. ArrayRef<unsigned> Sites) {
  743. LPadToCallSiteMap[Sym].append(Sites.begin(), Sites.end());
  744. }
  745. unsigned MachineFunction::getTypeIDFor(const GlobalValue *TI) {
  746. for (unsigned i = 0, N = TypeInfos.size(); i != N; ++i)
  747. if (TypeInfos[i] == TI) return i + 1;
  748. TypeInfos.push_back(TI);
  749. return TypeInfos.size();
  750. }
  751. int MachineFunction::getFilterIDFor(std::vector<unsigned> &TyIds) {
  752. // If the new filter coincides with the tail of an existing filter, then
  753. // re-use the existing filter. Folding filters more than this requires
  754. // re-ordering filters and/or their elements - probably not worth it.
  755. for (unsigned i : FilterEnds) {
  756. unsigned j = TyIds.size();
  757. while (i && j)
  758. if (FilterIds[--i] != TyIds[--j])
  759. goto try_next;
  760. if (!j)
  761. // The new filter coincides with range [i, end) of the existing filter.
  762. return -(1 + i);
  763. try_next:;
  764. }
  765. // Add the new filter.
  766. int FilterID = -(1 + FilterIds.size());
  767. FilterIds.reserve(FilterIds.size() + TyIds.size() + 1);
  768. llvm::append_range(FilterIds, TyIds);
  769. FilterEnds.push_back(FilterIds.size());
  770. FilterIds.push_back(0); // terminator
  771. return FilterID;
  772. }
  773. MachineFunction::CallSiteInfoMap::iterator
  774. MachineFunction::getCallSiteInfo(const MachineInstr *MI) {
  775. assert(MI->isCandidateForCallSiteEntry() &&
  776. "Call site info refers only to call (MI) candidates");
  777. if (!Target.Options.EmitCallSiteInfo)
  778. return CallSitesInfo.end();
  779. return CallSitesInfo.find(MI);
  780. }
  781. /// Return the call machine instruction or find a call within bundle.
  782. static const MachineInstr *getCallInstr(const MachineInstr *MI) {
  783. if (!MI->isBundle())
  784. return MI;
  785. for (auto &BMI : make_range(getBundleStart(MI->getIterator()),
  786. getBundleEnd(MI->getIterator())))
  787. if (BMI.isCandidateForCallSiteEntry())
  788. return &BMI;
  789. llvm_unreachable("Unexpected bundle without a call site candidate");
  790. }
  791. void MachineFunction::eraseCallSiteInfo(const MachineInstr *MI) {
  792. assert(MI->shouldUpdateCallSiteInfo() &&
  793. "Call site info refers only to call (MI) candidates or "
  794. "candidates inside bundles");
  795. const MachineInstr *CallMI = getCallInstr(MI);
  796. CallSiteInfoMap::iterator CSIt = getCallSiteInfo(CallMI);
  797. if (CSIt == CallSitesInfo.end())
  798. return;
  799. CallSitesInfo.erase(CSIt);
  800. }
  801. void MachineFunction::copyCallSiteInfo(const MachineInstr *Old,
  802. const MachineInstr *New) {
  803. assert(Old->shouldUpdateCallSiteInfo() &&
  804. "Call site info refers only to call (MI) candidates or "
  805. "candidates inside bundles");
  806. if (!New->isCandidateForCallSiteEntry())
  807. return eraseCallSiteInfo(Old);
  808. const MachineInstr *OldCallMI = getCallInstr(Old);
  809. CallSiteInfoMap::iterator CSIt = getCallSiteInfo(OldCallMI);
  810. if (CSIt == CallSitesInfo.end())
  811. return;
  812. CallSiteInfo CSInfo = CSIt->second;
  813. CallSitesInfo[New] = CSInfo;
  814. }
  815. void MachineFunction::moveCallSiteInfo(const MachineInstr *Old,
  816. const MachineInstr *New) {
  817. assert(Old->shouldUpdateCallSiteInfo() &&
  818. "Call site info refers only to call (MI) candidates or "
  819. "candidates inside bundles");
  820. if (!New->isCandidateForCallSiteEntry())
  821. return eraseCallSiteInfo(Old);
  822. const MachineInstr *OldCallMI = getCallInstr(Old);
  823. CallSiteInfoMap::iterator CSIt = getCallSiteInfo(OldCallMI);
  824. if (CSIt == CallSitesInfo.end())
  825. return;
  826. CallSiteInfo CSInfo = std::move(CSIt->second);
  827. CallSitesInfo.erase(CSIt);
  828. CallSitesInfo[New] = CSInfo;
  829. }
  830. void MachineFunction::setDebugInstrNumberingCount(unsigned Num) {
  831. DebugInstrNumberingCount = Num;
  832. }
  833. void MachineFunction::makeDebugValueSubstitution(DebugInstrOperandPair A,
  834. DebugInstrOperandPair B,
  835. unsigned Subreg) {
  836. // Catch any accidental self-loops.
  837. assert(A.first != B.first);
  838. // Don't allow any substitutions _from_ the memory operand number.
  839. assert(A.second != DebugOperandMemNumber);
  840. DebugValueSubstitutions.push_back({A, B, Subreg});
  841. }
  842. void MachineFunction::substituteDebugValuesForInst(const MachineInstr &Old,
  843. MachineInstr &New,
  844. unsigned MaxOperand) {
  845. // If the Old instruction wasn't tracked at all, there is no work to do.
  846. unsigned OldInstrNum = Old.peekDebugInstrNum();
  847. if (!OldInstrNum)
  848. return;
  849. // Iterate over all operands looking for defs to create substitutions for.
  850. // Avoid creating new instr numbers unless we create a new substitution.
  851. // While this has no functional effect, it risks confusing someone reading
  852. // MIR output.
  853. // Examine all the operands, or the first N specified by the caller.
  854. MaxOperand = std::min(MaxOperand, Old.getNumOperands());
  855. for (unsigned int I = 0; I < MaxOperand; ++I) {
  856. const auto &OldMO = Old.getOperand(I);
  857. auto &NewMO = New.getOperand(I);
  858. (void)NewMO;
  859. if (!OldMO.isReg() || !OldMO.isDef())
  860. continue;
  861. assert(NewMO.isDef());
  862. unsigned NewInstrNum = New.getDebugInstrNum();
  863. makeDebugValueSubstitution(std::make_pair(OldInstrNum, I),
  864. std::make_pair(NewInstrNum, I));
  865. }
  866. }
  867. auto MachineFunction::salvageCopySSA(MachineInstr &MI)
  868. -> DebugInstrOperandPair {
  869. MachineRegisterInfo &MRI = getRegInfo();
  870. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  871. const TargetInstrInfo &TII = *getSubtarget().getInstrInfo();
  872. // Chase the value read by a copy-like instruction back to the instruction
  873. // that ultimately _defines_ that value. This may pass:
  874. // * Through multiple intermediate copies, including subregister moves /
  875. // copies,
  876. // * Copies from physical registers that must then be traced back to the
  877. // defining instruction,
  878. // * Or, physical registers may be live-in to (only) the entry block, which
  879. // requires a DBG_PHI to be created.
  880. // We can pursue this problem in that order: trace back through copies,
  881. // optionally through a physical register, to a defining instruction. We
  882. // should never move from physreg to vreg. As we're still in SSA form, no need
  883. // to worry about partial definitions of registers.
  884. // Helper lambda to interpret a copy-like instruction. Takes instruction,
  885. // returns the register read and any subregister identifying which part is
  886. // read.
  887. auto GetRegAndSubreg =
  888. [&](const MachineInstr &Cpy) -> std::pair<Register, unsigned> {
  889. Register NewReg, OldReg;
  890. unsigned SubReg;
  891. if (Cpy.isCopy()) {
  892. OldReg = Cpy.getOperand(0).getReg();
  893. NewReg = Cpy.getOperand(1).getReg();
  894. SubReg = Cpy.getOperand(1).getSubReg();
  895. } else if (Cpy.isSubregToReg()) {
  896. OldReg = Cpy.getOperand(0).getReg();
  897. NewReg = Cpy.getOperand(2).getReg();
  898. SubReg = Cpy.getOperand(3).getImm();
  899. } else {
  900. auto CopyDetails = *TII.isCopyInstr(Cpy);
  901. const MachineOperand &Src = *CopyDetails.Source;
  902. const MachineOperand &Dest = *CopyDetails.Destination;
  903. OldReg = Dest.getReg();
  904. NewReg = Src.getReg();
  905. SubReg = Src.getSubReg();
  906. }
  907. return {NewReg, SubReg};
  908. };
  909. // First seek either the defining instruction, or a copy from a physreg.
  910. // During search, the current state is the current copy instruction, and which
  911. // register we've read. Accumulate qualifying subregisters into SubregsSeen;
  912. // deal with those later.
  913. auto State = GetRegAndSubreg(MI);
  914. auto CurInst = MI.getIterator();
  915. SmallVector<unsigned, 4> SubregsSeen;
  916. while (true) {
  917. // If we've found a copy from a physreg, first portion of search is over.
  918. if (!State.first.isVirtual())
  919. break;
  920. // Record any subregister qualifier.
  921. if (State.second)
  922. SubregsSeen.push_back(State.second);
  923. assert(MRI.hasOneDef(State.first));
  924. MachineInstr &Inst = *MRI.def_begin(State.first)->getParent();
  925. CurInst = Inst.getIterator();
  926. // Any non-copy instruction is the defining instruction we're seeking.
  927. if (!Inst.isCopyLike() && !TII.isCopyInstr(Inst))
  928. break;
  929. State = GetRegAndSubreg(Inst);
  930. };
  931. // Helper lambda to apply additional subregister substitutions to a known
  932. // instruction/operand pair. Adds new (fake) substitutions so that we can
  933. // record the subregister. FIXME: this isn't very space efficient if multiple
  934. // values are tracked back through the same copies; cache something later.
  935. auto ApplySubregisters =
  936. [&](DebugInstrOperandPair P) -> DebugInstrOperandPair {
  937. for (unsigned Subreg : reverse(SubregsSeen)) {
  938. // Fetch a new instruction number, not attached to an actual instruction.
  939. unsigned NewInstrNumber = getNewDebugInstrNum();
  940. // Add a substitution from the "new" number to the known one, with a
  941. // qualifying subreg.
  942. makeDebugValueSubstitution({NewInstrNumber, 0}, P, Subreg);
  943. // Return the new number; to find the underlying value, consumers need to
  944. // deal with the qualifying subreg.
  945. P = {NewInstrNumber, 0};
  946. }
  947. return P;
  948. };
  949. // If we managed to find the defining instruction after COPYs, return an
  950. // instruction / operand pair after adding subregister qualifiers.
  951. if (State.first.isVirtual()) {
  952. // Virtual register def -- we can just look up where this happens.
  953. MachineInstr *Inst = MRI.def_begin(State.first)->getParent();
  954. for (auto &MO : Inst->operands()) {
  955. if (!MO.isReg() || !MO.isDef() || MO.getReg() != State.first)
  956. continue;
  957. return ApplySubregisters(
  958. {Inst->getDebugInstrNum(), Inst->getOperandNo(&MO)});
  959. }
  960. llvm_unreachable("Vreg def with no corresponding operand?");
  961. }
  962. // Our search ended in a copy from a physreg: walk back up the function
  963. // looking for whatever defines the physreg.
  964. assert(CurInst->isCopyLike() || TII.isCopyInstr(*CurInst));
  965. State = GetRegAndSubreg(*CurInst);
  966. Register RegToSeek = State.first;
  967. auto RMII = CurInst->getReverseIterator();
  968. auto PrevInstrs = make_range(RMII, CurInst->getParent()->instr_rend());
  969. for (auto &ToExamine : PrevInstrs) {
  970. for (auto &MO : ToExamine.operands()) {
  971. // Test for operand that defines something aliasing RegToSeek.
  972. if (!MO.isReg() || !MO.isDef() ||
  973. !TRI.regsOverlap(RegToSeek, MO.getReg()))
  974. continue;
  975. return ApplySubregisters(
  976. {ToExamine.getDebugInstrNum(), ToExamine.getOperandNo(&MO)});
  977. }
  978. }
  979. MachineBasicBlock &InsertBB = *CurInst->getParent();
  980. // We reached the start of the block before finding a defining instruction.
  981. // It could be from a constant register, otherwise it must be an argument.
  982. if (TRI.isConstantPhysReg(State.first)) {
  983. // We can produce a DBG_PHI that identifies the constant physreg. Doesn't
  984. // matter where we put it, as it's constant valued.
  985. assert(CurInst->isCopy());
  986. } else if (State.first == TRI.getFrameRegister(*this)) {
  987. // LLVM IR is allowed to read the framepointer by calling a
  988. // llvm.frameaddress.* intrinsic. We can support this by emitting a
  989. // DBG_PHI $fp. This isn't ideal, because it extends the behaviours /
  990. // position that DBG_PHIs appear at, limiting what can be done later.
  991. // TODO: see if there's a better way of expressing these variable
  992. // locations.
  993. ;
  994. } else {
  995. // Assert that this is the entry block, or an EH pad. If it isn't, then
  996. // there is some code construct we don't recognise that deals with physregs
  997. // across blocks.
  998. assert(!State.first.isVirtual());
  999. assert(&*InsertBB.getParent()->begin() == &InsertBB || InsertBB.isEHPad());
  1000. }
  1001. // Create DBG_PHI for specified physreg.
  1002. auto Builder = BuildMI(InsertBB, InsertBB.getFirstNonPHI(), DebugLoc(),
  1003. TII.get(TargetOpcode::DBG_PHI));
  1004. Builder.addReg(State.first);
  1005. unsigned NewNum = getNewDebugInstrNum();
  1006. Builder.addImm(NewNum);
  1007. return ApplySubregisters({NewNum, 0u});
  1008. }
  1009. void MachineFunction::finalizeDebugInstrRefs() {
  1010. auto *TII = getSubtarget().getInstrInfo();
  1011. auto MakeUndefDbgValue = [&](MachineInstr &MI) {
  1012. const MCInstrDesc &RefII = TII->get(TargetOpcode::DBG_VALUE);
  1013. MI.setDesc(RefII);
  1014. MI.getOperand(0).setReg(0);
  1015. MI.getOperand(1).ChangeToRegister(0, false);
  1016. };
  1017. for (auto &MBB : *this) {
  1018. for (auto &MI : MBB) {
  1019. if (!MI.isDebugRef() || !MI.getOperand(0).isReg())
  1020. continue;
  1021. Register Reg = MI.getOperand(0).getReg();
  1022. // Some vregs can be deleted as redundant in the meantime. Mark those
  1023. // as DBG_VALUE $noreg. Additionally, some normal instructions are
  1024. // quickly deleted, leaving dangling references to vregs with no def.
  1025. if (Reg == 0 || !RegInfo->hasOneDef(Reg)) {
  1026. MakeUndefDbgValue(MI);
  1027. continue;
  1028. }
  1029. assert(Reg.isVirtual());
  1030. MachineInstr &DefMI = *RegInfo->def_instr_begin(Reg);
  1031. // If we've found a copy-like instruction, follow it back to the
  1032. // instruction that defines the source value, see salvageCopySSA docs
  1033. // for why this is important.
  1034. if (DefMI.isCopyLike() || TII->isCopyInstr(DefMI)) {
  1035. auto Result = salvageCopySSA(DefMI);
  1036. MI.getOperand(0).ChangeToImmediate(Result.first);
  1037. MI.getOperand(1).setImm(Result.second);
  1038. } else {
  1039. // Otherwise, identify the operand number that the VReg refers to.
  1040. unsigned OperandIdx = 0;
  1041. for (const auto &MO : DefMI.operands()) {
  1042. if (MO.isReg() && MO.isDef() && MO.getReg() == Reg)
  1043. break;
  1044. ++OperandIdx;
  1045. }
  1046. assert(OperandIdx < DefMI.getNumOperands());
  1047. // Morph this instr ref to point at the given instruction and operand.
  1048. unsigned ID = DefMI.getDebugInstrNum();
  1049. MI.getOperand(0).ChangeToImmediate(ID);
  1050. MI.getOperand(1).setImm(OperandIdx);
  1051. }
  1052. }
  1053. }
  1054. }
  1055. bool MachineFunction::useDebugInstrRef() const {
  1056. // Disable instr-ref at -O0: it's very slow (in compile time). We can still
  1057. // have optimized code inlined into this unoptimized code, however with
  1058. // fewer and less aggressive optimizations happening, coverage and accuracy
  1059. // should not suffer.
  1060. if (getTarget().getOptLevel() == CodeGenOpt::None)
  1061. return false;
  1062. // Don't use instr-ref if this function is marked optnone.
  1063. if (F.hasFnAttribute(Attribute::OptimizeNone))
  1064. return false;
  1065. if (llvm::debuginfoShouldUseDebugInstrRef(getTarget().getTargetTriple()))
  1066. return true;
  1067. return false;
  1068. }
  1069. // Use one million as a high / reserved number.
  1070. const unsigned MachineFunction::DebugOperandMemNumber = 1000000;
  1071. /// \}
  1072. //===----------------------------------------------------------------------===//
  1073. // MachineJumpTableInfo implementation
  1074. //===----------------------------------------------------------------------===//
  1075. /// Return the size of each entry in the jump table.
  1076. unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const {
  1077. // The size of a jump table entry is 4 bytes unless the entry is just the
  1078. // address of a block, in which case it is the pointer size.
  1079. switch (getEntryKind()) {
  1080. case MachineJumpTableInfo::EK_BlockAddress:
  1081. return TD.getPointerSize();
  1082. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  1083. return 8;
  1084. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  1085. case MachineJumpTableInfo::EK_LabelDifference32:
  1086. case MachineJumpTableInfo::EK_Custom32:
  1087. return 4;
  1088. case MachineJumpTableInfo::EK_Inline:
  1089. return 0;
  1090. }
  1091. llvm_unreachable("Unknown jump table encoding!");
  1092. }
  1093. /// Return the alignment of each entry in the jump table.
  1094. unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const {
  1095. // The alignment of a jump table entry is the alignment of int32 unless the
  1096. // entry is just the address of a block, in which case it is the pointer
  1097. // alignment.
  1098. switch (getEntryKind()) {
  1099. case MachineJumpTableInfo::EK_BlockAddress:
  1100. return TD.getPointerABIAlignment(0).value();
  1101. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  1102. return TD.getABIIntegerTypeAlignment(64).value();
  1103. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  1104. case MachineJumpTableInfo::EK_LabelDifference32:
  1105. case MachineJumpTableInfo::EK_Custom32:
  1106. return TD.getABIIntegerTypeAlignment(32).value();
  1107. case MachineJumpTableInfo::EK_Inline:
  1108. return 1;
  1109. }
  1110. llvm_unreachable("Unknown jump table encoding!");
  1111. }
  1112. /// Create a new jump table entry in the jump table info.
  1113. unsigned MachineJumpTableInfo::createJumpTableIndex(
  1114. const std::vector<MachineBasicBlock*> &DestBBs) {
  1115. assert(!DestBBs.empty() && "Cannot create an empty jump table!");
  1116. JumpTables.push_back(MachineJumpTableEntry(DestBBs));
  1117. return JumpTables.size()-1;
  1118. }
  1119. /// If Old is the target of any jump tables, update the jump tables to branch
  1120. /// to New instead.
  1121. bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old,
  1122. MachineBasicBlock *New) {
  1123. assert(Old != New && "Not making a change?");
  1124. bool MadeChange = false;
  1125. for (size_t i = 0, e = JumpTables.size(); i != e; ++i)
  1126. ReplaceMBBInJumpTable(i, Old, New);
  1127. return MadeChange;
  1128. }
  1129. /// If MBB is present in any jump tables, remove it.
  1130. bool MachineJumpTableInfo::RemoveMBBFromJumpTables(MachineBasicBlock *MBB) {
  1131. bool MadeChange = false;
  1132. for (MachineJumpTableEntry &JTE : JumpTables) {
  1133. auto removeBeginItr = std::remove(JTE.MBBs.begin(), JTE.MBBs.end(), MBB);
  1134. MadeChange |= (removeBeginItr != JTE.MBBs.end());
  1135. JTE.MBBs.erase(removeBeginItr, JTE.MBBs.end());
  1136. }
  1137. return MadeChange;
  1138. }
  1139. /// If Old is a target of the jump tables, update the jump table to branch to
  1140. /// New instead.
  1141. bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx,
  1142. MachineBasicBlock *Old,
  1143. MachineBasicBlock *New) {
  1144. assert(Old != New && "Not making a change?");
  1145. bool MadeChange = false;
  1146. MachineJumpTableEntry &JTE = JumpTables[Idx];
  1147. for (MachineBasicBlock *&MBB : JTE.MBBs)
  1148. if (MBB == Old) {
  1149. MBB = New;
  1150. MadeChange = true;
  1151. }
  1152. return MadeChange;
  1153. }
  1154. void MachineJumpTableInfo::print(raw_ostream &OS) const {
  1155. if (JumpTables.empty()) return;
  1156. OS << "Jump Tables:\n";
  1157. for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) {
  1158. OS << printJumpTableEntryReference(i) << ':';
  1159. for (const MachineBasicBlock *MBB : JumpTables[i].MBBs)
  1160. OS << ' ' << printMBBReference(*MBB);
  1161. if (i != e)
  1162. OS << '\n';
  1163. }
  1164. OS << '\n';
  1165. }
  1166. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1167. LLVM_DUMP_METHOD void MachineJumpTableInfo::dump() const { print(dbgs()); }
  1168. #endif
  1169. Printable llvm::printJumpTableEntryReference(unsigned Idx) {
  1170. return Printable([Idx](raw_ostream &OS) { OS << "%jump-table." << Idx; });
  1171. }
  1172. //===----------------------------------------------------------------------===//
  1173. // MachineConstantPool implementation
  1174. //===----------------------------------------------------------------------===//
  1175. void MachineConstantPoolValue::anchor() {}
  1176. unsigned MachineConstantPoolValue::getSizeInBytes(const DataLayout &DL) const {
  1177. return DL.getTypeAllocSize(Ty);
  1178. }
  1179. unsigned MachineConstantPoolEntry::getSizeInBytes(const DataLayout &DL) const {
  1180. if (isMachineConstantPoolEntry())
  1181. return Val.MachineCPVal->getSizeInBytes(DL);
  1182. return DL.getTypeAllocSize(Val.ConstVal->getType());
  1183. }
  1184. bool MachineConstantPoolEntry::needsRelocation() const {
  1185. if (isMachineConstantPoolEntry())
  1186. return true;
  1187. return Val.ConstVal->needsDynamicRelocation();
  1188. }
  1189. SectionKind
  1190. MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const {
  1191. if (needsRelocation())
  1192. return SectionKind::getReadOnlyWithRel();
  1193. switch (getSizeInBytes(*DL)) {
  1194. case 4:
  1195. return SectionKind::getMergeableConst4();
  1196. case 8:
  1197. return SectionKind::getMergeableConst8();
  1198. case 16:
  1199. return SectionKind::getMergeableConst16();
  1200. case 32:
  1201. return SectionKind::getMergeableConst32();
  1202. default:
  1203. return SectionKind::getReadOnly();
  1204. }
  1205. }
  1206. MachineConstantPool::~MachineConstantPool() {
  1207. // A constant may be a member of both Constants and MachineCPVsSharingEntries,
  1208. // so keep track of which we've deleted to avoid double deletions.
  1209. DenseSet<MachineConstantPoolValue*> Deleted;
  1210. for (const MachineConstantPoolEntry &C : Constants)
  1211. if (C.isMachineConstantPoolEntry()) {
  1212. Deleted.insert(C.Val.MachineCPVal);
  1213. delete C.Val.MachineCPVal;
  1214. }
  1215. for (MachineConstantPoolValue *CPV : MachineCPVsSharingEntries) {
  1216. if (Deleted.count(CPV) == 0)
  1217. delete CPV;
  1218. }
  1219. }
  1220. /// Test whether the given two constants can be allocated the same constant pool
  1221. /// entry.
  1222. static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
  1223. const DataLayout &DL) {
  1224. // Handle the trivial case quickly.
  1225. if (A == B) return true;
  1226. // If they have the same type but weren't the same constant, quickly
  1227. // reject them.
  1228. if (A->getType() == B->getType()) return false;
  1229. // We can't handle structs or arrays.
  1230. if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
  1231. isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
  1232. return false;
  1233. // For now, only support constants with the same size.
  1234. uint64_t StoreSize = DL.getTypeStoreSize(A->getType());
  1235. if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128)
  1236. return false;
  1237. Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
  1238. // Try constant folding a bitcast of both instructions to an integer. If we
  1239. // get two identical ConstantInt's, then we are good to share them. We use
  1240. // the constant folding APIs to do this so that we get the benefit of
  1241. // DataLayout.
  1242. if (isa<PointerType>(A->getType()))
  1243. A = ConstantFoldCastOperand(Instruction::PtrToInt,
  1244. const_cast<Constant *>(A), IntTy, DL);
  1245. else if (A->getType() != IntTy)
  1246. A = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(A),
  1247. IntTy, DL);
  1248. if (isa<PointerType>(B->getType()))
  1249. B = ConstantFoldCastOperand(Instruction::PtrToInt,
  1250. const_cast<Constant *>(B), IntTy, DL);
  1251. else if (B->getType() != IntTy)
  1252. B = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(B),
  1253. IntTy, DL);
  1254. return A == B;
  1255. }
  1256. /// Create a new entry in the constant pool or return an existing one.
  1257. /// User must specify the log2 of the minimum required alignment for the object.
  1258. unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
  1259. Align Alignment) {
  1260. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  1261. // Check to see if we already have this constant.
  1262. //
  1263. // FIXME, this could be made much more efficient for large constant pools.
  1264. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  1265. if (!Constants[i].isMachineConstantPoolEntry() &&
  1266. CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) {
  1267. if (Constants[i].getAlign() < Alignment)
  1268. Constants[i].Alignment = Alignment;
  1269. return i;
  1270. }
  1271. Constants.push_back(MachineConstantPoolEntry(C, Alignment));
  1272. return Constants.size()-1;
  1273. }
  1274. unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V,
  1275. Align Alignment) {
  1276. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  1277. // Check to see if we already have this constant.
  1278. //
  1279. // FIXME, this could be made much more efficient for large constant pools.
  1280. int Idx = V->getExistingMachineCPValue(this, Alignment);
  1281. if (Idx != -1) {
  1282. MachineCPVsSharingEntries.insert(V);
  1283. return (unsigned)Idx;
  1284. }
  1285. Constants.push_back(MachineConstantPoolEntry(V, Alignment));
  1286. return Constants.size()-1;
  1287. }
  1288. void MachineConstantPool::print(raw_ostream &OS) const {
  1289. if (Constants.empty()) return;
  1290. OS << "Constant Pool:\n";
  1291. for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
  1292. OS << " cp#" << i << ": ";
  1293. if (Constants[i].isMachineConstantPoolEntry())
  1294. Constants[i].Val.MachineCPVal->print(OS);
  1295. else
  1296. Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false);
  1297. OS << ", align=" << Constants[i].getAlign().value();
  1298. OS << "\n";
  1299. }
  1300. }
  1301. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1302. LLVM_DUMP_METHOD void MachineConstantPool::dump() const { print(dbgs()); }
  1303. #endif