AArch64AsmPrinter.cpp 60 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683
  1. //===- AArch64AsmPrinter.cpp - AArch64 LLVM assembly writer ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file contains a printer that converts from our internal representation
  10. // of machine-dependent LLVM code to the AArch64 assembly language.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "AArch64.h"
  14. #include "AArch64MCInstLower.h"
  15. #include "AArch64MachineFunctionInfo.h"
  16. #include "AArch64RegisterInfo.h"
  17. #include "AArch64Subtarget.h"
  18. #include "AArch64TargetObjectFile.h"
  19. #include "MCTargetDesc/AArch64AddressingModes.h"
  20. #include "MCTargetDesc/AArch64InstPrinter.h"
  21. #include "MCTargetDesc/AArch64MCExpr.h"
  22. #include "MCTargetDesc/AArch64MCTargetDesc.h"
  23. #include "MCTargetDesc/AArch64TargetStreamer.h"
  24. #include "TargetInfo/AArch64TargetInfo.h"
  25. #include "Utils/AArch64BaseInfo.h"
  26. #include "llvm/ADT/SmallString.h"
  27. #include "llvm/ADT/SmallVector.h"
  28. #include "llvm/ADT/StringRef.h"
  29. #include "llvm/ADT/Triple.h"
  30. #include "llvm/ADT/Twine.h"
  31. #include "llvm/BinaryFormat/COFF.h"
  32. #include "llvm/BinaryFormat/ELF.h"
  33. #include "llvm/CodeGen/AsmPrinter.h"
  34. #include "llvm/CodeGen/FaultMaps.h"
  35. #include "llvm/CodeGen/MachineBasicBlock.h"
  36. #include "llvm/CodeGen/MachineFunction.h"
  37. #include "llvm/CodeGen/MachineInstr.h"
  38. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  39. #include "llvm/CodeGen/MachineModuleInfoImpls.h"
  40. #include "llvm/CodeGen/MachineOperand.h"
  41. #include "llvm/CodeGen/StackMaps.h"
  42. #include "llvm/CodeGen/TargetRegisterInfo.h"
  43. #include "llvm/IR/DataLayout.h"
  44. #include "llvm/IR/DebugInfoMetadata.h"
  45. #include "llvm/MC/MCAsmInfo.h"
  46. #include "llvm/MC/MCContext.h"
  47. #include "llvm/MC/MCInst.h"
  48. #include "llvm/MC/MCInstBuilder.h"
  49. #include "llvm/MC/MCSectionELF.h"
  50. #include "llvm/MC/MCStreamer.h"
  51. #include "llvm/MC/MCSymbol.h"
  52. #include "llvm/MC/TargetRegistry.h"
  53. #include "llvm/Support/Casting.h"
  54. #include "llvm/Support/ErrorHandling.h"
  55. #include "llvm/Support/raw_ostream.h"
  56. #include "llvm/Target/TargetMachine.h"
  57. #include "llvm/Transforms/Instrumentation/HWAddressSanitizer.h"
  58. #include <algorithm>
  59. #include <cassert>
  60. #include <cstdint>
  61. #include <map>
  62. #include <memory>
  63. using namespace llvm;
  64. #define DEBUG_TYPE "asm-printer"
  65. namespace {
  66. class AArch64AsmPrinter : public AsmPrinter {
  67. AArch64MCInstLower MCInstLowering;
  68. FaultMaps FM;
  69. const AArch64Subtarget *STI;
  70. bool ShouldEmitWeakSwiftAsyncExtendedFramePointerFlags = false;
  71. public:
  72. AArch64AsmPrinter(TargetMachine &TM, std::unique_ptr<MCStreamer> Streamer)
  73. : AsmPrinter(TM, std::move(Streamer)), MCInstLowering(OutContext, *this),
  74. FM(*this) {}
  75. StringRef getPassName() const override { return "AArch64 Assembly Printer"; }
  76. /// Wrapper for MCInstLowering.lowerOperand() for the
  77. /// tblgen'erated pseudo lowering.
  78. bool lowerOperand(const MachineOperand &MO, MCOperand &MCOp) const {
  79. return MCInstLowering.lowerOperand(MO, MCOp);
  80. }
  81. void emitStartOfAsmFile(Module &M) override;
  82. void emitJumpTableInfo() override;
  83. void emitFunctionEntryLabel() override;
  84. void LowerJumpTableDest(MCStreamer &OutStreamer, const MachineInstr &MI);
  85. void LowerMOPS(MCStreamer &OutStreamer, const MachineInstr &MI);
  86. void LowerSTACKMAP(MCStreamer &OutStreamer, StackMaps &SM,
  87. const MachineInstr &MI);
  88. void LowerPATCHPOINT(MCStreamer &OutStreamer, StackMaps &SM,
  89. const MachineInstr &MI);
  90. void LowerSTATEPOINT(MCStreamer &OutStreamer, StackMaps &SM,
  91. const MachineInstr &MI);
  92. void LowerFAULTING_OP(const MachineInstr &MI);
  93. void LowerPATCHABLE_FUNCTION_ENTER(const MachineInstr &MI);
  94. void LowerPATCHABLE_FUNCTION_EXIT(const MachineInstr &MI);
  95. void LowerPATCHABLE_TAIL_CALL(const MachineInstr &MI);
  96. typedef std::tuple<unsigned, bool, uint32_t> HwasanMemaccessTuple;
  97. std::map<HwasanMemaccessTuple, MCSymbol *> HwasanMemaccessSymbols;
  98. void LowerKCFI_CHECK(const MachineInstr &MI);
  99. void LowerHWASAN_CHECK_MEMACCESS(const MachineInstr &MI);
  100. void emitHwasanMemaccessSymbols(Module &M);
  101. void emitSled(const MachineInstr &MI, SledKind Kind);
  102. /// tblgen'erated driver function for lowering simple MI->MC
  103. /// pseudo instructions.
  104. bool emitPseudoExpansionLowering(MCStreamer &OutStreamer,
  105. const MachineInstr *MI);
  106. void emitInstruction(const MachineInstr *MI) override;
  107. void emitFunctionHeaderComment() override;
  108. void getAnalysisUsage(AnalysisUsage &AU) const override {
  109. AsmPrinter::getAnalysisUsage(AU);
  110. AU.setPreservesAll();
  111. }
  112. bool runOnMachineFunction(MachineFunction &MF) override {
  113. AArch64FI = MF.getInfo<AArch64FunctionInfo>();
  114. STI = &MF.getSubtarget<AArch64Subtarget>();
  115. SetupMachineFunction(MF);
  116. if (STI->isTargetCOFF()) {
  117. bool Internal = MF.getFunction().hasInternalLinkage();
  118. COFF::SymbolStorageClass Scl = Internal ? COFF::IMAGE_SYM_CLASS_STATIC
  119. : COFF::IMAGE_SYM_CLASS_EXTERNAL;
  120. int Type =
  121. COFF::IMAGE_SYM_DTYPE_FUNCTION << COFF::SCT_COMPLEX_TYPE_SHIFT;
  122. OutStreamer->beginCOFFSymbolDef(CurrentFnSym);
  123. OutStreamer->emitCOFFSymbolStorageClass(Scl);
  124. OutStreamer->emitCOFFSymbolType(Type);
  125. OutStreamer->endCOFFSymbolDef();
  126. }
  127. // Emit the rest of the function body.
  128. emitFunctionBody();
  129. // Emit the XRay table for this function.
  130. emitXRayTable();
  131. // We didn't modify anything.
  132. return false;
  133. }
  134. private:
  135. void printOperand(const MachineInstr *MI, unsigned OpNum, raw_ostream &O);
  136. bool printAsmMRegister(const MachineOperand &MO, char Mode, raw_ostream &O);
  137. bool printAsmRegInClass(const MachineOperand &MO,
  138. const TargetRegisterClass *RC, unsigned AltName,
  139. raw_ostream &O);
  140. bool PrintAsmOperand(const MachineInstr *MI, unsigned OpNum,
  141. const char *ExtraCode, raw_ostream &O) override;
  142. bool PrintAsmMemoryOperand(const MachineInstr *MI, unsigned OpNum,
  143. const char *ExtraCode, raw_ostream &O) override;
  144. void PrintDebugValueComment(const MachineInstr *MI, raw_ostream &OS);
  145. void emitFunctionBodyEnd() override;
  146. MCSymbol *GetCPISymbol(unsigned CPID) const override;
  147. void emitEndOfAsmFile(Module &M) override;
  148. AArch64FunctionInfo *AArch64FI = nullptr;
  149. /// Emit the LOHs contained in AArch64FI.
  150. void emitLOHs();
  151. /// Emit instruction to set float register to zero.
  152. void emitFMov0(const MachineInstr &MI);
  153. using MInstToMCSymbol = std::map<const MachineInstr *, MCSymbol *>;
  154. MInstToMCSymbol LOHInstToLabel;
  155. bool shouldEmitWeakSwiftAsyncExtendedFramePointerFlags() const override {
  156. return ShouldEmitWeakSwiftAsyncExtendedFramePointerFlags;
  157. }
  158. };
  159. } // end anonymous namespace
  160. void AArch64AsmPrinter::emitStartOfAsmFile(Module &M) {
  161. const Triple &TT = TM.getTargetTriple();
  162. if (TT.isOSBinFormatCOFF()) {
  163. // Emit an absolute @feat.00 symbol
  164. MCSymbol *S = MMI->getContext().getOrCreateSymbol(StringRef("@feat.00"));
  165. OutStreamer->beginCOFFSymbolDef(S);
  166. OutStreamer->emitCOFFSymbolStorageClass(COFF::IMAGE_SYM_CLASS_STATIC);
  167. OutStreamer->emitCOFFSymbolType(COFF::IMAGE_SYM_DTYPE_NULL);
  168. OutStreamer->endCOFFSymbolDef();
  169. int64_t Feat00Value = 0;
  170. if (M.getModuleFlag("cfguard")) {
  171. // Object is CFG-aware.
  172. Feat00Value |= COFF::Feat00Flags::GuardCF;
  173. }
  174. if (M.getModuleFlag("ehcontguard")) {
  175. // Object also has EHCont.
  176. Feat00Value |= COFF::Feat00Flags::GuardEHCont;
  177. }
  178. if (M.getModuleFlag("ms-kernel")) {
  179. // Object is compiled with /kernel.
  180. Feat00Value |= COFF::Feat00Flags::Kernel;
  181. }
  182. OutStreamer->emitSymbolAttribute(S, MCSA_Global);
  183. OutStreamer->emitAssignment(
  184. S, MCConstantExpr::create(Feat00Value, MMI->getContext()));
  185. }
  186. if (!TT.isOSBinFormatELF())
  187. return;
  188. // Assemble feature flags that may require creation of a note section.
  189. unsigned Flags = 0;
  190. if (const auto *BTE = mdconst::extract_or_null<ConstantInt>(
  191. M.getModuleFlag("branch-target-enforcement")))
  192. if (BTE->getZExtValue())
  193. Flags |= ELF::GNU_PROPERTY_AARCH64_FEATURE_1_BTI;
  194. if (const auto *Sign = mdconst::extract_or_null<ConstantInt>(
  195. M.getModuleFlag("sign-return-address")))
  196. if (Sign->getZExtValue())
  197. Flags |= ELF::GNU_PROPERTY_AARCH64_FEATURE_1_PAC;
  198. if (Flags == 0)
  199. return;
  200. // Emit a .note.gnu.property section with the flags.
  201. auto *TS =
  202. static_cast<AArch64TargetStreamer *>(OutStreamer->getTargetStreamer());
  203. TS->emitNoteSection(Flags);
  204. }
  205. void AArch64AsmPrinter::emitFunctionHeaderComment() {
  206. const AArch64FunctionInfo *FI = MF->getInfo<AArch64FunctionInfo>();
  207. std::optional<std::string> OutlinerString = FI->getOutliningStyle();
  208. if (OutlinerString != std::nullopt)
  209. OutStreamer->getCommentOS() << ' ' << OutlinerString;
  210. }
  211. void AArch64AsmPrinter::LowerPATCHABLE_FUNCTION_ENTER(const MachineInstr &MI)
  212. {
  213. const Function &F = MF->getFunction();
  214. if (F.hasFnAttribute("patchable-function-entry")) {
  215. unsigned Num;
  216. if (F.getFnAttribute("patchable-function-entry")
  217. .getValueAsString()
  218. .getAsInteger(10, Num))
  219. return;
  220. emitNops(Num);
  221. return;
  222. }
  223. emitSled(MI, SledKind::FUNCTION_ENTER);
  224. }
  225. void AArch64AsmPrinter::LowerPATCHABLE_FUNCTION_EXIT(const MachineInstr &MI) {
  226. emitSled(MI, SledKind::FUNCTION_EXIT);
  227. }
  228. void AArch64AsmPrinter::LowerPATCHABLE_TAIL_CALL(const MachineInstr &MI) {
  229. emitSled(MI, SledKind::TAIL_CALL);
  230. }
  231. void AArch64AsmPrinter::emitSled(const MachineInstr &MI, SledKind Kind) {
  232. static const int8_t NoopsInSledCount = 7;
  233. // We want to emit the following pattern:
  234. //
  235. // .Lxray_sled_N:
  236. // ALIGN
  237. // B #32
  238. // ; 7 NOP instructions (28 bytes)
  239. // .tmpN
  240. //
  241. // We need the 28 bytes (7 instructions) because at runtime, we'd be patching
  242. // over the full 32 bytes (8 instructions) with the following pattern:
  243. //
  244. // STP X0, X30, [SP, #-16]! ; push X0 and the link register to the stack
  245. // LDR W0, #12 ; W0 := function ID
  246. // LDR X16,#12 ; X16 := addr of __xray_FunctionEntry or __xray_FunctionExit
  247. // BLR X16 ; call the tracing trampoline
  248. // ;DATA: 32 bits of function ID
  249. // ;DATA: lower 32 bits of the address of the trampoline
  250. // ;DATA: higher 32 bits of the address of the trampoline
  251. // LDP X0, X30, [SP], #16 ; pop X0 and the link register from the stack
  252. //
  253. OutStreamer->emitCodeAlignment(Align(4), &getSubtargetInfo());
  254. auto CurSled = OutContext.createTempSymbol("xray_sled_", true);
  255. OutStreamer->emitLabel(CurSled);
  256. auto Target = OutContext.createTempSymbol();
  257. // Emit "B #32" instruction, which jumps over the next 28 bytes.
  258. // The operand has to be the number of 4-byte instructions to jump over,
  259. // including the current instruction.
  260. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::B).addImm(8));
  261. for (int8_t I = 0; I < NoopsInSledCount; I++)
  262. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::HINT).addImm(0));
  263. OutStreamer->emitLabel(Target);
  264. recordSled(CurSled, MI, Kind, 2);
  265. }
  266. void AArch64AsmPrinter::LowerKCFI_CHECK(const MachineInstr &MI) {
  267. Register AddrReg = MI.getOperand(0).getReg();
  268. assert(std::next(MI.getIterator())->isCall() &&
  269. "KCFI_CHECK not followed by a call instruction");
  270. assert(std::next(MI.getIterator())->getOperand(0).getReg() == AddrReg &&
  271. "KCFI_CHECK call target doesn't match call operand");
  272. // Default to using the intra-procedure-call temporary registers for
  273. // comparing the hashes.
  274. unsigned ScratchRegs[] = {AArch64::W16, AArch64::W17};
  275. if (AddrReg == AArch64::XZR) {
  276. // Checking XZR makes no sense. Instead of emitting a load, zero
  277. // ScratchRegs[0] and use it for the ESR AddrIndex below.
  278. AddrReg = getXRegFromWReg(ScratchRegs[0]);
  279. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::ORRXrs)
  280. .addReg(AddrReg)
  281. .addReg(AArch64::XZR)
  282. .addReg(AArch64::XZR)
  283. .addImm(0));
  284. } else {
  285. // If one of the scratch registers is used for the call target (e.g.
  286. // with AArch64::TCRETURNriBTI), we can clobber another caller-saved
  287. // temporary register instead (in this case, AArch64::W9) as the check
  288. // is immediately followed by the call instruction.
  289. for (auto &Reg : ScratchRegs) {
  290. if (Reg == getWRegFromXReg(AddrReg)) {
  291. Reg = AArch64::W9;
  292. break;
  293. }
  294. }
  295. assert(ScratchRegs[0] != AddrReg && ScratchRegs[1] != AddrReg &&
  296. "Invalid scratch registers for KCFI_CHECK");
  297. // Adjust the offset for patchable-function-prefix. This assumes that
  298. // patchable-function-prefix is the same for all functions.
  299. int64_t PrefixNops = 0;
  300. (void)MI.getMF()
  301. ->getFunction()
  302. .getFnAttribute("patchable-function-prefix")
  303. .getValueAsString()
  304. .getAsInteger(10, PrefixNops);
  305. // Load the target function type hash.
  306. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::LDURWi)
  307. .addReg(ScratchRegs[0])
  308. .addReg(AddrReg)
  309. .addImm(-(PrefixNops * 4 + 4)));
  310. }
  311. // Load the expected type hash.
  312. const int64_t Type = MI.getOperand(1).getImm();
  313. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::MOVKWi)
  314. .addReg(ScratchRegs[1])
  315. .addReg(ScratchRegs[1])
  316. .addImm(Type & 0xFFFF)
  317. .addImm(0));
  318. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::MOVKWi)
  319. .addReg(ScratchRegs[1])
  320. .addReg(ScratchRegs[1])
  321. .addImm((Type >> 16) & 0xFFFF)
  322. .addImm(16));
  323. // Compare the hashes and trap if there's a mismatch.
  324. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::SUBSWrs)
  325. .addReg(AArch64::WZR)
  326. .addReg(ScratchRegs[0])
  327. .addReg(ScratchRegs[1])
  328. .addImm(0));
  329. MCSymbol *Pass = OutContext.createTempSymbol();
  330. EmitToStreamer(*OutStreamer,
  331. MCInstBuilder(AArch64::Bcc)
  332. .addImm(AArch64CC::EQ)
  333. .addExpr(MCSymbolRefExpr::create(Pass, OutContext)));
  334. // The base ESR is 0x8000 and the register information is encoded in bits
  335. // 0-9 as follows:
  336. // - 0-4: n, where the register Xn contains the target address
  337. // - 5-9: m, where the register Wm contains the expected type hash
  338. // Where n, m are in [0, 30].
  339. unsigned TypeIndex = ScratchRegs[1] - AArch64::W0;
  340. unsigned AddrIndex;
  341. switch (AddrReg) {
  342. default:
  343. AddrIndex = AddrReg - AArch64::X0;
  344. break;
  345. case AArch64::FP:
  346. AddrIndex = 29;
  347. break;
  348. case AArch64::LR:
  349. AddrIndex = 30;
  350. break;
  351. }
  352. assert(AddrIndex < 31 && TypeIndex < 31);
  353. unsigned ESR = 0x8000 | ((TypeIndex & 31) << 5) | (AddrIndex & 31);
  354. EmitToStreamer(*OutStreamer, MCInstBuilder(AArch64::BRK).addImm(ESR));
  355. OutStreamer->emitLabel(Pass);
  356. }
  357. void AArch64AsmPrinter::LowerHWASAN_CHECK_MEMACCESS(const MachineInstr &MI) {
  358. Register Reg = MI.getOperand(0).getReg();
  359. bool IsShort =
  360. MI.getOpcode() == AArch64::HWASAN_CHECK_MEMACCESS_SHORTGRANULES;
  361. uint32_t AccessInfo = MI.getOperand(1).getImm();
  362. MCSymbol *&Sym =
  363. HwasanMemaccessSymbols[HwasanMemaccessTuple(Reg, IsShort, AccessInfo)];
  364. if (!Sym) {
  365. // FIXME: Make this work on non-ELF.
  366. if (!TM.getTargetTriple().isOSBinFormatELF())
  367. report_fatal_error("llvm.hwasan.check.memaccess only supported on ELF");
  368. std::string SymName = "__hwasan_check_x" + utostr(Reg - AArch64::X0) + "_" +
  369. utostr(AccessInfo);
  370. if (IsShort)
  371. SymName += "_short_v2";
  372. Sym = OutContext.getOrCreateSymbol(SymName);
  373. }
  374. EmitToStreamer(*OutStreamer,
  375. MCInstBuilder(AArch64::BL)
  376. .addExpr(MCSymbolRefExpr::create(Sym, OutContext)));
  377. }
  378. void AArch64AsmPrinter::emitHwasanMemaccessSymbols(Module &M) {
  379. if (HwasanMemaccessSymbols.empty())
  380. return;
  381. const Triple &TT = TM.getTargetTriple();
  382. assert(TT.isOSBinFormatELF());
  383. std::unique_ptr<MCSubtargetInfo> STI(
  384. TM.getTarget().createMCSubtargetInfo(TT.str(), "", ""));
  385. assert(STI && "Unable to create subtarget info");
  386. MCSymbol *HwasanTagMismatchV1Sym =
  387. OutContext.getOrCreateSymbol("__hwasan_tag_mismatch");
  388. MCSymbol *HwasanTagMismatchV2Sym =
  389. OutContext.getOrCreateSymbol("__hwasan_tag_mismatch_v2");
  390. const MCSymbolRefExpr *HwasanTagMismatchV1Ref =
  391. MCSymbolRefExpr::create(HwasanTagMismatchV1Sym, OutContext);
  392. const MCSymbolRefExpr *HwasanTagMismatchV2Ref =
  393. MCSymbolRefExpr::create(HwasanTagMismatchV2Sym, OutContext);
  394. for (auto &P : HwasanMemaccessSymbols) {
  395. unsigned Reg = std::get<0>(P.first);
  396. bool IsShort = std::get<1>(P.first);
  397. uint32_t AccessInfo = std::get<2>(P.first);
  398. const MCSymbolRefExpr *HwasanTagMismatchRef =
  399. IsShort ? HwasanTagMismatchV2Ref : HwasanTagMismatchV1Ref;
  400. MCSymbol *Sym = P.second;
  401. bool HasMatchAllTag =
  402. (AccessInfo >> HWASanAccessInfo::HasMatchAllShift) & 1;
  403. uint8_t MatchAllTag =
  404. (AccessInfo >> HWASanAccessInfo::MatchAllShift) & 0xff;
  405. unsigned Size =
  406. 1 << ((AccessInfo >> HWASanAccessInfo::AccessSizeShift) & 0xf);
  407. bool CompileKernel =
  408. (AccessInfo >> HWASanAccessInfo::CompileKernelShift) & 1;
  409. OutStreamer->switchSection(OutContext.getELFSection(
  410. ".text.hot", ELF::SHT_PROGBITS,
  411. ELF::SHF_EXECINSTR | ELF::SHF_ALLOC | ELF::SHF_GROUP, 0, Sym->getName(),
  412. /*IsComdat=*/true));
  413. OutStreamer->emitSymbolAttribute(Sym, MCSA_ELF_TypeFunction);
  414. OutStreamer->emitSymbolAttribute(Sym, MCSA_Weak);
  415. OutStreamer->emitSymbolAttribute(Sym, MCSA_Hidden);
  416. OutStreamer->emitLabel(Sym);
  417. OutStreamer->emitInstruction(MCInstBuilder(AArch64::SBFMXri)
  418. .addReg(AArch64::X16)
  419. .addReg(Reg)
  420. .addImm(4)
  421. .addImm(55),
  422. *STI);
  423. OutStreamer->emitInstruction(
  424. MCInstBuilder(AArch64::LDRBBroX)
  425. .addReg(AArch64::W16)
  426. .addReg(IsShort ? AArch64::X20 : AArch64::X9)
  427. .addReg(AArch64::X16)
  428. .addImm(0)
  429. .addImm(0),
  430. *STI);
  431. OutStreamer->emitInstruction(
  432. MCInstBuilder(AArch64::SUBSXrs)
  433. .addReg(AArch64::XZR)
  434. .addReg(AArch64::X16)
  435. .addReg(Reg)
  436. .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSR, 56)),
  437. *STI);
  438. MCSymbol *HandleMismatchOrPartialSym = OutContext.createTempSymbol();
  439. OutStreamer->emitInstruction(
  440. MCInstBuilder(AArch64::Bcc)
  441. .addImm(AArch64CC::NE)
  442. .addExpr(MCSymbolRefExpr::create(HandleMismatchOrPartialSym,
  443. OutContext)),
  444. *STI);
  445. MCSymbol *ReturnSym = OutContext.createTempSymbol();
  446. OutStreamer->emitLabel(ReturnSym);
  447. OutStreamer->emitInstruction(
  448. MCInstBuilder(AArch64::RET).addReg(AArch64::LR), *STI);
  449. OutStreamer->emitLabel(HandleMismatchOrPartialSym);
  450. if (HasMatchAllTag) {
  451. OutStreamer->emitInstruction(MCInstBuilder(AArch64::UBFMXri)
  452. .addReg(AArch64::X16)
  453. .addReg(Reg)
  454. .addImm(56)
  455. .addImm(63),
  456. *STI);
  457. OutStreamer->emitInstruction(MCInstBuilder(AArch64::SUBSXri)
  458. .addReg(AArch64::XZR)
  459. .addReg(AArch64::X16)
  460. .addImm(MatchAllTag)
  461. .addImm(0),
  462. *STI);
  463. OutStreamer->emitInstruction(
  464. MCInstBuilder(AArch64::Bcc)
  465. .addImm(AArch64CC::EQ)
  466. .addExpr(MCSymbolRefExpr::create(ReturnSym, OutContext)),
  467. *STI);
  468. }
  469. if (IsShort) {
  470. OutStreamer->emitInstruction(MCInstBuilder(AArch64::SUBSWri)
  471. .addReg(AArch64::WZR)
  472. .addReg(AArch64::W16)
  473. .addImm(15)
  474. .addImm(0),
  475. *STI);
  476. MCSymbol *HandleMismatchSym = OutContext.createTempSymbol();
  477. OutStreamer->emitInstruction(
  478. MCInstBuilder(AArch64::Bcc)
  479. .addImm(AArch64CC::HI)
  480. .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
  481. *STI);
  482. OutStreamer->emitInstruction(
  483. MCInstBuilder(AArch64::ANDXri)
  484. .addReg(AArch64::X17)
  485. .addReg(Reg)
  486. .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
  487. *STI);
  488. if (Size != 1)
  489. OutStreamer->emitInstruction(MCInstBuilder(AArch64::ADDXri)
  490. .addReg(AArch64::X17)
  491. .addReg(AArch64::X17)
  492. .addImm(Size - 1)
  493. .addImm(0),
  494. *STI);
  495. OutStreamer->emitInstruction(MCInstBuilder(AArch64::SUBSWrs)
  496. .addReg(AArch64::WZR)
  497. .addReg(AArch64::W16)
  498. .addReg(AArch64::W17)
  499. .addImm(0),
  500. *STI);
  501. OutStreamer->emitInstruction(
  502. MCInstBuilder(AArch64::Bcc)
  503. .addImm(AArch64CC::LS)
  504. .addExpr(MCSymbolRefExpr::create(HandleMismatchSym, OutContext)),
  505. *STI);
  506. OutStreamer->emitInstruction(
  507. MCInstBuilder(AArch64::ORRXri)
  508. .addReg(AArch64::X16)
  509. .addReg(Reg)
  510. .addImm(AArch64_AM::encodeLogicalImmediate(0xf, 64)),
  511. *STI);
  512. OutStreamer->emitInstruction(MCInstBuilder(AArch64::LDRBBui)
  513. .addReg(AArch64::W16)
  514. .addReg(AArch64::X16)
  515. .addImm(0),
  516. *STI);
  517. OutStreamer->emitInstruction(
  518. MCInstBuilder(AArch64::SUBSXrs)
  519. .addReg(AArch64::XZR)
  520. .addReg(AArch64::X16)
  521. .addReg(Reg)
  522. .addImm(AArch64_AM::getShifterImm(AArch64_AM::LSR, 56)),
  523. *STI);
  524. OutStreamer->emitInstruction(
  525. MCInstBuilder(AArch64::Bcc)
  526. .addImm(AArch64CC::EQ)
  527. .addExpr(MCSymbolRefExpr::create(ReturnSym, OutContext)),
  528. *STI);
  529. OutStreamer->emitLabel(HandleMismatchSym);
  530. }
  531. OutStreamer->emitInstruction(MCInstBuilder(AArch64::STPXpre)
  532. .addReg(AArch64::SP)
  533. .addReg(AArch64::X0)
  534. .addReg(AArch64::X1)
  535. .addReg(AArch64::SP)
  536. .addImm(-32),
  537. *STI);
  538. OutStreamer->emitInstruction(MCInstBuilder(AArch64::STPXi)
  539. .addReg(AArch64::FP)
  540. .addReg(AArch64::LR)
  541. .addReg(AArch64::SP)
  542. .addImm(29),
  543. *STI);
  544. if (Reg != AArch64::X0)
  545. OutStreamer->emitInstruction(MCInstBuilder(AArch64::ORRXrs)
  546. .addReg(AArch64::X0)
  547. .addReg(AArch64::XZR)
  548. .addReg(Reg)
  549. .addImm(0),
  550. *STI);
  551. OutStreamer->emitInstruction(
  552. MCInstBuilder(AArch64::MOVZXi)
  553. .addReg(AArch64::X1)
  554. .addImm(AccessInfo & HWASanAccessInfo::RuntimeMask)
  555. .addImm(0),
  556. *STI);
  557. if (CompileKernel) {
  558. // The Linux kernel's dynamic loader doesn't support GOT relative
  559. // relocations, but it doesn't support late binding either, so just call
  560. // the function directly.
  561. OutStreamer->emitInstruction(
  562. MCInstBuilder(AArch64::B).addExpr(HwasanTagMismatchRef), *STI);
  563. } else {
  564. // Intentionally load the GOT entry and branch to it, rather than possibly
  565. // late binding the function, which may clobber the registers before we
  566. // have a chance to save them.
  567. OutStreamer->emitInstruction(
  568. MCInstBuilder(AArch64::ADRP)
  569. .addReg(AArch64::X16)
  570. .addExpr(AArch64MCExpr::create(
  571. HwasanTagMismatchRef, AArch64MCExpr::VariantKind::VK_GOT_PAGE,
  572. OutContext)),
  573. *STI);
  574. OutStreamer->emitInstruction(
  575. MCInstBuilder(AArch64::LDRXui)
  576. .addReg(AArch64::X16)
  577. .addReg(AArch64::X16)
  578. .addExpr(AArch64MCExpr::create(
  579. HwasanTagMismatchRef, AArch64MCExpr::VariantKind::VK_GOT_LO12,
  580. OutContext)),
  581. *STI);
  582. OutStreamer->emitInstruction(
  583. MCInstBuilder(AArch64::BR).addReg(AArch64::X16), *STI);
  584. }
  585. }
  586. }
  587. void AArch64AsmPrinter::emitEndOfAsmFile(Module &M) {
  588. emitHwasanMemaccessSymbols(M);
  589. const Triple &TT = TM.getTargetTriple();
  590. if (TT.isOSBinFormatMachO()) {
  591. // Funny Darwin hack: This flag tells the linker that no global symbols
  592. // contain code that falls through to other global symbols (e.g. the obvious
  593. // implementation of multiple entry points). If this doesn't occur, the
  594. // linker can safely perform dead code stripping. Since LLVM never
  595. // generates code that does this, it is always safe to set.
  596. OutStreamer->emitAssemblerFlag(MCAF_SubsectionsViaSymbols);
  597. }
  598. // Emit stack and fault map information.
  599. FM.serializeToFaultMapSection();
  600. }
  601. void AArch64AsmPrinter::emitLOHs() {
  602. SmallVector<MCSymbol *, 3> MCArgs;
  603. for (const auto &D : AArch64FI->getLOHContainer()) {
  604. for (const MachineInstr *MI : D.getArgs()) {
  605. MInstToMCSymbol::iterator LabelIt = LOHInstToLabel.find(MI);
  606. assert(LabelIt != LOHInstToLabel.end() &&
  607. "Label hasn't been inserted for LOH related instruction");
  608. MCArgs.push_back(LabelIt->second);
  609. }
  610. OutStreamer->emitLOHDirective(D.getKind(), MCArgs);
  611. MCArgs.clear();
  612. }
  613. }
  614. void AArch64AsmPrinter::emitFunctionBodyEnd() {
  615. if (!AArch64FI->getLOHRelated().empty())
  616. emitLOHs();
  617. }
  618. /// GetCPISymbol - Return the symbol for the specified constant pool entry.
  619. MCSymbol *AArch64AsmPrinter::GetCPISymbol(unsigned CPID) const {
  620. // Darwin uses a linker-private symbol name for constant-pools (to
  621. // avoid addends on the relocation?), ELF has no such concept and
  622. // uses a normal private symbol.
  623. if (!getDataLayout().getLinkerPrivateGlobalPrefix().empty())
  624. return OutContext.getOrCreateSymbol(
  625. Twine(getDataLayout().getLinkerPrivateGlobalPrefix()) + "CPI" +
  626. Twine(getFunctionNumber()) + "_" + Twine(CPID));
  627. return AsmPrinter::GetCPISymbol(CPID);
  628. }
  629. void AArch64AsmPrinter::printOperand(const MachineInstr *MI, unsigned OpNum,
  630. raw_ostream &O) {
  631. const MachineOperand &MO = MI->getOperand(OpNum);
  632. switch (MO.getType()) {
  633. default:
  634. llvm_unreachable("<unknown operand type>");
  635. case MachineOperand::MO_Register: {
  636. Register Reg = MO.getReg();
  637. assert(Reg.isPhysical());
  638. assert(!MO.getSubReg() && "Subregs should be eliminated!");
  639. O << AArch64InstPrinter::getRegisterName(Reg);
  640. break;
  641. }
  642. case MachineOperand::MO_Immediate: {
  643. O << MO.getImm();
  644. break;
  645. }
  646. case MachineOperand::MO_GlobalAddress: {
  647. PrintSymbolOperand(MO, O);
  648. break;
  649. }
  650. case MachineOperand::MO_BlockAddress: {
  651. MCSymbol *Sym = GetBlockAddressSymbol(MO.getBlockAddress());
  652. Sym->print(O, MAI);
  653. break;
  654. }
  655. }
  656. }
  657. bool AArch64AsmPrinter::printAsmMRegister(const MachineOperand &MO, char Mode,
  658. raw_ostream &O) {
  659. Register Reg = MO.getReg();
  660. switch (Mode) {
  661. default:
  662. return true; // Unknown mode.
  663. case 'w':
  664. Reg = getWRegFromXReg(Reg);
  665. break;
  666. case 'x':
  667. Reg = getXRegFromWReg(Reg);
  668. break;
  669. case 't':
  670. Reg = getXRegFromXRegTuple(Reg);
  671. break;
  672. }
  673. O << AArch64InstPrinter::getRegisterName(Reg);
  674. return false;
  675. }
  676. // Prints the register in MO using class RC using the offset in the
  677. // new register class. This should not be used for cross class
  678. // printing.
  679. bool AArch64AsmPrinter::printAsmRegInClass(const MachineOperand &MO,
  680. const TargetRegisterClass *RC,
  681. unsigned AltName, raw_ostream &O) {
  682. assert(MO.isReg() && "Should only get here with a register!");
  683. const TargetRegisterInfo *RI = STI->getRegisterInfo();
  684. Register Reg = MO.getReg();
  685. unsigned RegToPrint = RC->getRegister(RI->getEncodingValue(Reg));
  686. if (!RI->regsOverlap(RegToPrint, Reg))
  687. return true;
  688. O << AArch64InstPrinter::getRegisterName(RegToPrint, AltName);
  689. return false;
  690. }
  691. bool AArch64AsmPrinter::PrintAsmOperand(const MachineInstr *MI, unsigned OpNum,
  692. const char *ExtraCode, raw_ostream &O) {
  693. const MachineOperand &MO = MI->getOperand(OpNum);
  694. // First try the generic code, which knows about modifiers like 'c' and 'n'.
  695. if (!AsmPrinter::PrintAsmOperand(MI, OpNum, ExtraCode, O))
  696. return false;
  697. // Does this asm operand have a single letter operand modifier?
  698. if (ExtraCode && ExtraCode[0]) {
  699. if (ExtraCode[1] != 0)
  700. return true; // Unknown modifier.
  701. switch (ExtraCode[0]) {
  702. default:
  703. return true; // Unknown modifier.
  704. case 'w': // Print W register
  705. case 'x': // Print X register
  706. if (MO.isReg())
  707. return printAsmMRegister(MO, ExtraCode[0], O);
  708. if (MO.isImm() && MO.getImm() == 0) {
  709. unsigned Reg = ExtraCode[0] == 'w' ? AArch64::WZR : AArch64::XZR;
  710. O << AArch64InstPrinter::getRegisterName(Reg);
  711. return false;
  712. }
  713. printOperand(MI, OpNum, O);
  714. return false;
  715. case 'b': // Print B register.
  716. case 'h': // Print H register.
  717. case 's': // Print S register.
  718. case 'd': // Print D register.
  719. case 'q': // Print Q register.
  720. case 'z': // Print Z register.
  721. if (MO.isReg()) {
  722. const TargetRegisterClass *RC;
  723. switch (ExtraCode[0]) {
  724. case 'b':
  725. RC = &AArch64::FPR8RegClass;
  726. break;
  727. case 'h':
  728. RC = &AArch64::FPR16RegClass;
  729. break;
  730. case 's':
  731. RC = &AArch64::FPR32RegClass;
  732. break;
  733. case 'd':
  734. RC = &AArch64::FPR64RegClass;
  735. break;
  736. case 'q':
  737. RC = &AArch64::FPR128RegClass;
  738. break;
  739. case 'z':
  740. RC = &AArch64::ZPRRegClass;
  741. break;
  742. default:
  743. return true;
  744. }
  745. return printAsmRegInClass(MO, RC, AArch64::NoRegAltName, O);
  746. }
  747. printOperand(MI, OpNum, O);
  748. return false;
  749. }
  750. }
  751. // According to ARM, we should emit x and v registers unless we have a
  752. // modifier.
  753. if (MO.isReg()) {
  754. Register Reg = MO.getReg();
  755. // If this is a w or x register, print an x register.
  756. if (AArch64::GPR32allRegClass.contains(Reg) ||
  757. AArch64::GPR64allRegClass.contains(Reg))
  758. return printAsmMRegister(MO, 'x', O);
  759. // If this is an x register tuple, print an x register.
  760. if (AArch64::GPR64x8ClassRegClass.contains(Reg))
  761. return printAsmMRegister(MO, 't', O);
  762. unsigned AltName = AArch64::NoRegAltName;
  763. const TargetRegisterClass *RegClass;
  764. if (AArch64::ZPRRegClass.contains(Reg)) {
  765. RegClass = &AArch64::ZPRRegClass;
  766. } else if (AArch64::PPRRegClass.contains(Reg)) {
  767. RegClass = &AArch64::PPRRegClass;
  768. } else {
  769. RegClass = &AArch64::FPR128RegClass;
  770. AltName = AArch64::vreg;
  771. }
  772. // If this is a b, h, s, d, or q register, print it as a v register.
  773. return printAsmRegInClass(MO, RegClass, AltName, O);
  774. }
  775. printOperand(MI, OpNum, O);
  776. return false;
  777. }
  778. bool AArch64AsmPrinter::PrintAsmMemoryOperand(const MachineInstr *MI,
  779. unsigned OpNum,
  780. const char *ExtraCode,
  781. raw_ostream &O) {
  782. if (ExtraCode && ExtraCode[0] && ExtraCode[0] != 'a')
  783. return true; // Unknown modifier.
  784. const MachineOperand &MO = MI->getOperand(OpNum);
  785. assert(MO.isReg() && "unexpected inline asm memory operand");
  786. O << "[" << AArch64InstPrinter::getRegisterName(MO.getReg()) << "]";
  787. return false;
  788. }
  789. void AArch64AsmPrinter::PrintDebugValueComment(const MachineInstr *MI,
  790. raw_ostream &OS) {
  791. unsigned NOps = MI->getNumOperands();
  792. assert(NOps == 4);
  793. OS << '\t' << MAI->getCommentString() << "DEBUG_VALUE: ";
  794. // cast away const; DIetc do not take const operands for some reason.
  795. OS << MI->getDebugVariable()->getName();
  796. OS << " <- ";
  797. // Frame address. Currently handles register +- offset only.
  798. assert(MI->isIndirectDebugValue());
  799. OS << '[';
  800. for (unsigned I = 0, E = std::distance(MI->debug_operands().begin(),
  801. MI->debug_operands().end());
  802. I < E; ++I) {
  803. if (I != 0)
  804. OS << ", ";
  805. printOperand(MI, I, OS);
  806. }
  807. OS << ']';
  808. OS << "+";
  809. printOperand(MI, NOps - 2, OS);
  810. }
  811. void AArch64AsmPrinter::emitJumpTableInfo() {
  812. const MachineJumpTableInfo *MJTI = MF->getJumpTableInfo();
  813. if (!MJTI) return;
  814. const std::vector<MachineJumpTableEntry> &JT = MJTI->getJumpTables();
  815. if (JT.empty()) return;
  816. const TargetLoweringObjectFile &TLOF = getObjFileLowering();
  817. MCSection *ReadOnlySec = TLOF.getSectionForJumpTable(MF->getFunction(), TM);
  818. OutStreamer->switchSection(ReadOnlySec);
  819. auto AFI = MF->getInfo<AArch64FunctionInfo>();
  820. for (unsigned JTI = 0, e = JT.size(); JTI != e; ++JTI) {
  821. const std::vector<MachineBasicBlock*> &JTBBs = JT[JTI].MBBs;
  822. // If this jump table was deleted, ignore it.
  823. if (JTBBs.empty()) continue;
  824. unsigned Size = AFI->getJumpTableEntrySize(JTI);
  825. emitAlignment(Align(Size));
  826. OutStreamer->emitLabel(GetJTISymbol(JTI));
  827. const MCSymbol *BaseSym = AArch64FI->getJumpTableEntryPCRelSymbol(JTI);
  828. const MCExpr *Base = MCSymbolRefExpr::create(BaseSym, OutContext);
  829. for (auto *JTBB : JTBBs) {
  830. const MCExpr *Value =
  831. MCSymbolRefExpr::create(JTBB->getSymbol(), OutContext);
  832. // Each entry is:
  833. // .byte/.hword (LBB - Lbase)>>2
  834. // or plain:
  835. // .word LBB - Lbase
  836. Value = MCBinaryExpr::createSub(Value, Base, OutContext);
  837. if (Size != 4)
  838. Value = MCBinaryExpr::createLShr(
  839. Value, MCConstantExpr::create(2, OutContext), OutContext);
  840. OutStreamer->emitValue(Value, Size);
  841. }
  842. }
  843. }
  844. void AArch64AsmPrinter::emitFunctionEntryLabel() {
  845. if (MF->getFunction().getCallingConv() == CallingConv::AArch64_VectorCall ||
  846. MF->getFunction().getCallingConv() ==
  847. CallingConv::AArch64_SVE_VectorCall ||
  848. MF->getInfo<AArch64FunctionInfo>()->isSVECC()) {
  849. auto *TS =
  850. static_cast<AArch64TargetStreamer *>(OutStreamer->getTargetStreamer());
  851. TS->emitDirectiveVariantPCS(CurrentFnSym);
  852. }
  853. return AsmPrinter::emitFunctionEntryLabel();
  854. }
  855. /// Small jump tables contain an unsigned byte or half, representing the offset
  856. /// from the lowest-addressed possible destination to the desired basic
  857. /// block. Since all instructions are 4-byte aligned, this is further compressed
  858. /// by counting in instructions rather than bytes (i.e. divided by 4). So, to
  859. /// materialize the correct destination we need:
  860. ///
  861. /// adr xDest, .LBB0_0
  862. /// ldrb wScratch, [xTable, xEntry] (with "lsl #1" for ldrh).
  863. /// add xDest, xDest, xScratch (with "lsl #2" for smaller entries)
  864. void AArch64AsmPrinter::LowerJumpTableDest(llvm::MCStreamer &OutStreamer,
  865. const llvm::MachineInstr &MI) {
  866. Register DestReg = MI.getOperand(0).getReg();
  867. Register ScratchReg = MI.getOperand(1).getReg();
  868. Register ScratchRegW =
  869. STI->getRegisterInfo()->getSubReg(ScratchReg, AArch64::sub_32);
  870. Register TableReg = MI.getOperand(2).getReg();
  871. Register EntryReg = MI.getOperand(3).getReg();
  872. int JTIdx = MI.getOperand(4).getIndex();
  873. int Size = AArch64FI->getJumpTableEntrySize(JTIdx);
  874. // This has to be first because the compression pass based its reachability
  875. // calculations on the start of the JumpTableDest instruction.
  876. auto Label =
  877. MF->getInfo<AArch64FunctionInfo>()->getJumpTableEntryPCRelSymbol(JTIdx);
  878. // If we don't already have a symbol to use as the base, use the ADR
  879. // instruction itself.
  880. if (!Label) {
  881. Label = MF->getContext().createTempSymbol();
  882. AArch64FI->setJumpTableEntryInfo(JTIdx, Size, Label);
  883. OutStreamer.emitLabel(Label);
  884. }
  885. auto LabelExpr = MCSymbolRefExpr::create(Label, MF->getContext());
  886. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::ADR)
  887. .addReg(DestReg)
  888. .addExpr(LabelExpr));
  889. // Load the number of instruction-steps to offset from the label.
  890. unsigned LdrOpcode;
  891. switch (Size) {
  892. case 1: LdrOpcode = AArch64::LDRBBroX; break;
  893. case 2: LdrOpcode = AArch64::LDRHHroX; break;
  894. case 4: LdrOpcode = AArch64::LDRSWroX; break;
  895. default:
  896. llvm_unreachable("Unknown jump table size");
  897. }
  898. EmitToStreamer(OutStreamer, MCInstBuilder(LdrOpcode)
  899. .addReg(Size == 4 ? ScratchReg : ScratchRegW)
  900. .addReg(TableReg)
  901. .addReg(EntryReg)
  902. .addImm(0)
  903. .addImm(Size == 1 ? 0 : 1));
  904. // Add to the already materialized base label address, multiplying by 4 if
  905. // compressed.
  906. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::ADDXrs)
  907. .addReg(DestReg)
  908. .addReg(DestReg)
  909. .addReg(ScratchReg)
  910. .addImm(Size == 4 ? 0 : 2));
  911. }
  912. void AArch64AsmPrinter::LowerMOPS(llvm::MCStreamer &OutStreamer,
  913. const llvm::MachineInstr &MI) {
  914. unsigned Opcode = MI.getOpcode();
  915. assert(STI->hasMOPS());
  916. assert(STI->hasMTE() || Opcode != AArch64::MOPSMemorySetTaggingPseudo);
  917. const auto Ops = [Opcode]() -> std::array<unsigned, 3> {
  918. if (Opcode == AArch64::MOPSMemoryCopyPseudo)
  919. return {AArch64::CPYFP, AArch64::CPYFM, AArch64::CPYFE};
  920. if (Opcode == AArch64::MOPSMemoryMovePseudo)
  921. return {AArch64::CPYP, AArch64::CPYM, AArch64::CPYE};
  922. if (Opcode == AArch64::MOPSMemorySetPseudo)
  923. return {AArch64::SETP, AArch64::SETM, AArch64::SETE};
  924. if (Opcode == AArch64::MOPSMemorySetTaggingPseudo)
  925. return {AArch64::SETGP, AArch64::SETGM, AArch64::MOPSSETGE};
  926. llvm_unreachable("Unhandled memory operation pseudo");
  927. }();
  928. const bool IsSet = Opcode == AArch64::MOPSMemorySetPseudo ||
  929. Opcode == AArch64::MOPSMemorySetTaggingPseudo;
  930. for (auto Op : Ops) {
  931. int i = 0;
  932. auto MCIB = MCInstBuilder(Op);
  933. // Destination registers
  934. MCIB.addReg(MI.getOperand(i++).getReg());
  935. MCIB.addReg(MI.getOperand(i++).getReg());
  936. if (!IsSet)
  937. MCIB.addReg(MI.getOperand(i++).getReg());
  938. // Input registers
  939. MCIB.addReg(MI.getOperand(i++).getReg());
  940. MCIB.addReg(MI.getOperand(i++).getReg());
  941. MCIB.addReg(MI.getOperand(i++).getReg());
  942. EmitToStreamer(OutStreamer, MCIB);
  943. }
  944. }
  945. void AArch64AsmPrinter::LowerSTACKMAP(MCStreamer &OutStreamer, StackMaps &SM,
  946. const MachineInstr &MI) {
  947. unsigned NumNOPBytes = StackMapOpers(&MI).getNumPatchBytes();
  948. auto &Ctx = OutStreamer.getContext();
  949. MCSymbol *MILabel = Ctx.createTempSymbol();
  950. OutStreamer.emitLabel(MILabel);
  951. SM.recordStackMap(*MILabel, MI);
  952. assert(NumNOPBytes % 4 == 0 && "Invalid number of NOP bytes requested!");
  953. // Scan ahead to trim the shadow.
  954. const MachineBasicBlock &MBB = *MI.getParent();
  955. MachineBasicBlock::const_iterator MII(MI);
  956. ++MII;
  957. while (NumNOPBytes > 0) {
  958. if (MII == MBB.end() || MII->isCall() ||
  959. MII->getOpcode() == AArch64::DBG_VALUE ||
  960. MII->getOpcode() == TargetOpcode::PATCHPOINT ||
  961. MII->getOpcode() == TargetOpcode::STACKMAP)
  962. break;
  963. ++MII;
  964. NumNOPBytes -= 4;
  965. }
  966. // Emit nops.
  967. for (unsigned i = 0; i < NumNOPBytes; i += 4)
  968. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::HINT).addImm(0));
  969. }
  970. // Lower a patchpoint of the form:
  971. // [<def>], <id>, <numBytes>, <target>, <numArgs>
  972. void AArch64AsmPrinter::LowerPATCHPOINT(MCStreamer &OutStreamer, StackMaps &SM,
  973. const MachineInstr &MI) {
  974. auto &Ctx = OutStreamer.getContext();
  975. MCSymbol *MILabel = Ctx.createTempSymbol();
  976. OutStreamer.emitLabel(MILabel);
  977. SM.recordPatchPoint(*MILabel, MI);
  978. PatchPointOpers Opers(&MI);
  979. int64_t CallTarget = Opers.getCallTarget().getImm();
  980. unsigned EncodedBytes = 0;
  981. if (CallTarget) {
  982. assert((CallTarget & 0xFFFFFFFFFFFF) == CallTarget &&
  983. "High 16 bits of call target should be zero.");
  984. Register ScratchReg = MI.getOperand(Opers.getNextScratchIdx()).getReg();
  985. EncodedBytes = 16;
  986. // Materialize the jump address:
  987. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::MOVZXi)
  988. .addReg(ScratchReg)
  989. .addImm((CallTarget >> 32) & 0xFFFF)
  990. .addImm(32));
  991. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::MOVKXi)
  992. .addReg(ScratchReg)
  993. .addReg(ScratchReg)
  994. .addImm((CallTarget >> 16) & 0xFFFF)
  995. .addImm(16));
  996. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::MOVKXi)
  997. .addReg(ScratchReg)
  998. .addReg(ScratchReg)
  999. .addImm(CallTarget & 0xFFFF)
  1000. .addImm(0));
  1001. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::BLR).addReg(ScratchReg));
  1002. }
  1003. // Emit padding.
  1004. unsigned NumBytes = Opers.getNumPatchBytes();
  1005. assert(NumBytes >= EncodedBytes &&
  1006. "Patchpoint can't request size less than the length of a call.");
  1007. assert((NumBytes - EncodedBytes) % 4 == 0 &&
  1008. "Invalid number of NOP bytes requested!");
  1009. for (unsigned i = EncodedBytes; i < NumBytes; i += 4)
  1010. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::HINT).addImm(0));
  1011. }
  1012. void AArch64AsmPrinter::LowerSTATEPOINT(MCStreamer &OutStreamer, StackMaps &SM,
  1013. const MachineInstr &MI) {
  1014. StatepointOpers SOpers(&MI);
  1015. if (unsigned PatchBytes = SOpers.getNumPatchBytes()) {
  1016. assert(PatchBytes % 4 == 0 && "Invalid number of NOP bytes requested!");
  1017. for (unsigned i = 0; i < PatchBytes; i += 4)
  1018. EmitToStreamer(OutStreamer, MCInstBuilder(AArch64::HINT).addImm(0));
  1019. } else {
  1020. // Lower call target and choose correct opcode
  1021. const MachineOperand &CallTarget = SOpers.getCallTarget();
  1022. MCOperand CallTargetMCOp;
  1023. unsigned CallOpcode;
  1024. switch (CallTarget.getType()) {
  1025. case MachineOperand::MO_GlobalAddress:
  1026. case MachineOperand::MO_ExternalSymbol:
  1027. MCInstLowering.lowerOperand(CallTarget, CallTargetMCOp);
  1028. CallOpcode = AArch64::BL;
  1029. break;
  1030. case MachineOperand::MO_Immediate:
  1031. CallTargetMCOp = MCOperand::createImm(CallTarget.getImm());
  1032. CallOpcode = AArch64::BL;
  1033. break;
  1034. case MachineOperand::MO_Register:
  1035. CallTargetMCOp = MCOperand::createReg(CallTarget.getReg());
  1036. CallOpcode = AArch64::BLR;
  1037. break;
  1038. default:
  1039. llvm_unreachable("Unsupported operand type in statepoint call target");
  1040. break;
  1041. }
  1042. EmitToStreamer(OutStreamer,
  1043. MCInstBuilder(CallOpcode).addOperand(CallTargetMCOp));
  1044. }
  1045. auto &Ctx = OutStreamer.getContext();
  1046. MCSymbol *MILabel = Ctx.createTempSymbol();
  1047. OutStreamer.emitLabel(MILabel);
  1048. SM.recordStatepoint(*MILabel, MI);
  1049. }
  1050. void AArch64AsmPrinter::LowerFAULTING_OP(const MachineInstr &FaultingMI) {
  1051. // FAULTING_LOAD_OP <def>, <faltinf type>, <MBB handler>,
  1052. // <opcode>, <operands>
  1053. Register DefRegister = FaultingMI.getOperand(0).getReg();
  1054. FaultMaps::FaultKind FK =
  1055. static_cast<FaultMaps::FaultKind>(FaultingMI.getOperand(1).getImm());
  1056. MCSymbol *HandlerLabel = FaultingMI.getOperand(2).getMBB()->getSymbol();
  1057. unsigned Opcode = FaultingMI.getOperand(3).getImm();
  1058. unsigned OperandsBeginIdx = 4;
  1059. auto &Ctx = OutStreamer->getContext();
  1060. MCSymbol *FaultingLabel = Ctx.createTempSymbol();
  1061. OutStreamer->emitLabel(FaultingLabel);
  1062. assert(FK < FaultMaps::FaultKindMax && "Invalid Faulting Kind!");
  1063. FM.recordFaultingOp(FK, FaultingLabel, HandlerLabel);
  1064. MCInst MI;
  1065. MI.setOpcode(Opcode);
  1066. if (DefRegister != (Register)0)
  1067. MI.addOperand(MCOperand::createReg(DefRegister));
  1068. for (const MachineOperand &MO :
  1069. llvm::drop_begin(FaultingMI.operands(), OperandsBeginIdx)) {
  1070. MCOperand Dest;
  1071. lowerOperand(MO, Dest);
  1072. MI.addOperand(Dest);
  1073. }
  1074. OutStreamer->AddComment("on-fault: " + HandlerLabel->getName());
  1075. OutStreamer->emitInstruction(MI, getSubtargetInfo());
  1076. }
  1077. void AArch64AsmPrinter::emitFMov0(const MachineInstr &MI) {
  1078. Register DestReg = MI.getOperand(0).getReg();
  1079. if (STI->hasZeroCycleZeroingFP() && !STI->hasZeroCycleZeroingFPWorkaround() &&
  1080. STI->hasNEON()) {
  1081. // Convert H/S register to corresponding D register
  1082. if (AArch64::H0 <= DestReg && DestReg <= AArch64::H31)
  1083. DestReg = AArch64::D0 + (DestReg - AArch64::H0);
  1084. else if (AArch64::S0 <= DestReg && DestReg <= AArch64::S31)
  1085. DestReg = AArch64::D0 + (DestReg - AArch64::S0);
  1086. else
  1087. assert(AArch64::D0 <= DestReg && DestReg <= AArch64::D31);
  1088. MCInst MOVI;
  1089. MOVI.setOpcode(AArch64::MOVID);
  1090. MOVI.addOperand(MCOperand::createReg(DestReg));
  1091. MOVI.addOperand(MCOperand::createImm(0));
  1092. EmitToStreamer(*OutStreamer, MOVI);
  1093. } else {
  1094. MCInst FMov;
  1095. switch (MI.getOpcode()) {
  1096. default: llvm_unreachable("Unexpected opcode");
  1097. case AArch64::FMOVH0:
  1098. FMov.setOpcode(AArch64::FMOVWHr);
  1099. FMov.addOperand(MCOperand::createReg(DestReg));
  1100. FMov.addOperand(MCOperand::createReg(AArch64::WZR));
  1101. break;
  1102. case AArch64::FMOVS0:
  1103. FMov.setOpcode(AArch64::FMOVWSr);
  1104. FMov.addOperand(MCOperand::createReg(DestReg));
  1105. FMov.addOperand(MCOperand::createReg(AArch64::WZR));
  1106. break;
  1107. case AArch64::FMOVD0:
  1108. FMov.setOpcode(AArch64::FMOVXDr);
  1109. FMov.addOperand(MCOperand::createReg(DestReg));
  1110. FMov.addOperand(MCOperand::createReg(AArch64::XZR));
  1111. break;
  1112. }
  1113. EmitToStreamer(*OutStreamer, FMov);
  1114. }
  1115. }
  1116. // Simple pseudo-instructions have their lowering (with expansion to real
  1117. // instructions) auto-generated.
  1118. #include "AArch64GenMCPseudoLowering.inc"
  1119. void AArch64AsmPrinter::emitInstruction(const MachineInstr *MI) {
  1120. AArch64_MC::verifyInstructionPredicates(MI->getOpcode(), STI->getFeatureBits());
  1121. // Do any auto-generated pseudo lowerings.
  1122. if (emitPseudoExpansionLowering(*OutStreamer, MI))
  1123. return;
  1124. if (MI->getOpcode() == AArch64::ADRP) {
  1125. for (auto &Opd : MI->operands()) {
  1126. if (Opd.isSymbol() && StringRef(Opd.getSymbolName()) ==
  1127. "swift_async_extendedFramePointerFlags") {
  1128. ShouldEmitWeakSwiftAsyncExtendedFramePointerFlags = true;
  1129. }
  1130. }
  1131. }
  1132. if (AArch64FI->getLOHRelated().count(MI)) {
  1133. // Generate a label for LOH related instruction
  1134. MCSymbol *LOHLabel = createTempSymbol("loh");
  1135. // Associate the instruction with the label
  1136. LOHInstToLabel[MI] = LOHLabel;
  1137. OutStreamer->emitLabel(LOHLabel);
  1138. }
  1139. AArch64TargetStreamer *TS =
  1140. static_cast<AArch64TargetStreamer *>(OutStreamer->getTargetStreamer());
  1141. // Do any manual lowerings.
  1142. switch (MI->getOpcode()) {
  1143. default:
  1144. break;
  1145. case AArch64::HINT: {
  1146. // CurrentPatchableFunctionEntrySym can be CurrentFnBegin only for
  1147. // -fpatchable-function-entry=N,0. The entry MBB is guaranteed to be
  1148. // non-empty. If MI is the initial BTI, place the
  1149. // __patchable_function_entries label after BTI.
  1150. if (CurrentPatchableFunctionEntrySym &&
  1151. CurrentPatchableFunctionEntrySym == CurrentFnBegin &&
  1152. MI == &MF->front().front()) {
  1153. int64_t Imm = MI->getOperand(0).getImm();
  1154. if ((Imm & 32) && (Imm & 6)) {
  1155. MCInst Inst;
  1156. MCInstLowering.Lower(MI, Inst);
  1157. EmitToStreamer(*OutStreamer, Inst);
  1158. CurrentPatchableFunctionEntrySym = createTempSymbol("patch");
  1159. OutStreamer->emitLabel(CurrentPatchableFunctionEntrySym);
  1160. return;
  1161. }
  1162. }
  1163. break;
  1164. }
  1165. case AArch64::MOVMCSym: {
  1166. Register DestReg = MI->getOperand(0).getReg();
  1167. const MachineOperand &MO_Sym = MI->getOperand(1);
  1168. MachineOperand Hi_MOSym(MO_Sym), Lo_MOSym(MO_Sym);
  1169. MCOperand Hi_MCSym, Lo_MCSym;
  1170. Hi_MOSym.setTargetFlags(AArch64II::MO_G1 | AArch64II::MO_S);
  1171. Lo_MOSym.setTargetFlags(AArch64II::MO_G0 | AArch64II::MO_NC);
  1172. MCInstLowering.lowerOperand(Hi_MOSym, Hi_MCSym);
  1173. MCInstLowering.lowerOperand(Lo_MOSym, Lo_MCSym);
  1174. MCInst MovZ;
  1175. MovZ.setOpcode(AArch64::MOVZXi);
  1176. MovZ.addOperand(MCOperand::createReg(DestReg));
  1177. MovZ.addOperand(Hi_MCSym);
  1178. MovZ.addOperand(MCOperand::createImm(16));
  1179. EmitToStreamer(*OutStreamer, MovZ);
  1180. MCInst MovK;
  1181. MovK.setOpcode(AArch64::MOVKXi);
  1182. MovK.addOperand(MCOperand::createReg(DestReg));
  1183. MovK.addOperand(MCOperand::createReg(DestReg));
  1184. MovK.addOperand(Lo_MCSym);
  1185. MovK.addOperand(MCOperand::createImm(0));
  1186. EmitToStreamer(*OutStreamer, MovK);
  1187. return;
  1188. }
  1189. case AArch64::MOVIv2d_ns:
  1190. // If the target has <rdar://problem/16473581>, lower this
  1191. // instruction to movi.16b instead.
  1192. if (STI->hasZeroCycleZeroingFPWorkaround() &&
  1193. MI->getOperand(1).getImm() == 0) {
  1194. MCInst TmpInst;
  1195. TmpInst.setOpcode(AArch64::MOVIv16b_ns);
  1196. TmpInst.addOperand(MCOperand::createReg(MI->getOperand(0).getReg()));
  1197. TmpInst.addOperand(MCOperand::createImm(MI->getOperand(1).getImm()));
  1198. EmitToStreamer(*OutStreamer, TmpInst);
  1199. return;
  1200. }
  1201. break;
  1202. case AArch64::DBG_VALUE:
  1203. case AArch64::DBG_VALUE_LIST:
  1204. if (isVerbose() && OutStreamer->hasRawTextSupport()) {
  1205. SmallString<128> TmpStr;
  1206. raw_svector_ostream OS(TmpStr);
  1207. PrintDebugValueComment(MI, OS);
  1208. OutStreamer->emitRawText(StringRef(OS.str()));
  1209. }
  1210. return;
  1211. case AArch64::EMITBKEY: {
  1212. ExceptionHandling ExceptionHandlingType = MAI->getExceptionHandlingType();
  1213. if (ExceptionHandlingType != ExceptionHandling::DwarfCFI &&
  1214. ExceptionHandlingType != ExceptionHandling::ARM)
  1215. return;
  1216. if (getFunctionCFISectionType(*MF) == CFISection::None)
  1217. return;
  1218. OutStreamer->emitCFIBKeyFrame();
  1219. return;
  1220. }
  1221. case AArch64::EMITMTETAGGED: {
  1222. ExceptionHandling ExceptionHandlingType = MAI->getExceptionHandlingType();
  1223. if (ExceptionHandlingType != ExceptionHandling::DwarfCFI &&
  1224. ExceptionHandlingType != ExceptionHandling::ARM)
  1225. return;
  1226. if (getFunctionCFISectionType(*MF) != CFISection::None)
  1227. OutStreamer->emitCFIMTETaggedFrame();
  1228. return;
  1229. }
  1230. // Tail calls use pseudo instructions so they have the proper code-gen
  1231. // attributes (isCall, isReturn, etc.). We lower them to the real
  1232. // instruction here.
  1233. case AArch64::TCRETURNri:
  1234. case AArch64::TCRETURNriBTI:
  1235. case AArch64::TCRETURNriALL: {
  1236. MCInst TmpInst;
  1237. TmpInst.setOpcode(AArch64::BR);
  1238. TmpInst.addOperand(MCOperand::createReg(MI->getOperand(0).getReg()));
  1239. EmitToStreamer(*OutStreamer, TmpInst);
  1240. return;
  1241. }
  1242. case AArch64::TCRETURNdi: {
  1243. MCOperand Dest;
  1244. MCInstLowering.lowerOperand(MI->getOperand(0), Dest);
  1245. MCInst TmpInst;
  1246. TmpInst.setOpcode(AArch64::B);
  1247. TmpInst.addOperand(Dest);
  1248. EmitToStreamer(*OutStreamer, TmpInst);
  1249. return;
  1250. }
  1251. case AArch64::SpeculationBarrierISBDSBEndBB: {
  1252. // Print DSB SYS + ISB
  1253. MCInst TmpInstDSB;
  1254. TmpInstDSB.setOpcode(AArch64::DSB);
  1255. TmpInstDSB.addOperand(MCOperand::createImm(0xf));
  1256. EmitToStreamer(*OutStreamer, TmpInstDSB);
  1257. MCInst TmpInstISB;
  1258. TmpInstISB.setOpcode(AArch64::ISB);
  1259. TmpInstISB.addOperand(MCOperand::createImm(0xf));
  1260. EmitToStreamer(*OutStreamer, TmpInstISB);
  1261. return;
  1262. }
  1263. case AArch64::SpeculationBarrierSBEndBB: {
  1264. // Print SB
  1265. MCInst TmpInstSB;
  1266. TmpInstSB.setOpcode(AArch64::SB);
  1267. EmitToStreamer(*OutStreamer, TmpInstSB);
  1268. return;
  1269. }
  1270. case AArch64::TLSDESC_CALLSEQ: {
  1271. /// lower this to:
  1272. /// adrp x0, :tlsdesc:var
  1273. /// ldr x1, [x0, #:tlsdesc_lo12:var]
  1274. /// add x0, x0, #:tlsdesc_lo12:var
  1275. /// .tlsdesccall var
  1276. /// blr x1
  1277. /// (TPIDR_EL0 offset now in x0)
  1278. const MachineOperand &MO_Sym = MI->getOperand(0);
  1279. MachineOperand MO_TLSDESC_LO12(MO_Sym), MO_TLSDESC(MO_Sym);
  1280. MCOperand Sym, SymTLSDescLo12, SymTLSDesc;
  1281. MO_TLSDESC_LO12.setTargetFlags(AArch64II::MO_TLS | AArch64II::MO_PAGEOFF);
  1282. MO_TLSDESC.setTargetFlags(AArch64II::MO_TLS | AArch64II::MO_PAGE);
  1283. MCInstLowering.lowerOperand(MO_Sym, Sym);
  1284. MCInstLowering.lowerOperand(MO_TLSDESC_LO12, SymTLSDescLo12);
  1285. MCInstLowering.lowerOperand(MO_TLSDESC, SymTLSDesc);
  1286. MCInst Adrp;
  1287. Adrp.setOpcode(AArch64::ADRP);
  1288. Adrp.addOperand(MCOperand::createReg(AArch64::X0));
  1289. Adrp.addOperand(SymTLSDesc);
  1290. EmitToStreamer(*OutStreamer, Adrp);
  1291. MCInst Ldr;
  1292. if (STI->isTargetILP32()) {
  1293. Ldr.setOpcode(AArch64::LDRWui);
  1294. Ldr.addOperand(MCOperand::createReg(AArch64::W1));
  1295. } else {
  1296. Ldr.setOpcode(AArch64::LDRXui);
  1297. Ldr.addOperand(MCOperand::createReg(AArch64::X1));
  1298. }
  1299. Ldr.addOperand(MCOperand::createReg(AArch64::X0));
  1300. Ldr.addOperand(SymTLSDescLo12);
  1301. Ldr.addOperand(MCOperand::createImm(0));
  1302. EmitToStreamer(*OutStreamer, Ldr);
  1303. MCInst Add;
  1304. if (STI->isTargetILP32()) {
  1305. Add.setOpcode(AArch64::ADDWri);
  1306. Add.addOperand(MCOperand::createReg(AArch64::W0));
  1307. Add.addOperand(MCOperand::createReg(AArch64::W0));
  1308. } else {
  1309. Add.setOpcode(AArch64::ADDXri);
  1310. Add.addOperand(MCOperand::createReg(AArch64::X0));
  1311. Add.addOperand(MCOperand::createReg(AArch64::X0));
  1312. }
  1313. Add.addOperand(SymTLSDescLo12);
  1314. Add.addOperand(MCOperand::createImm(AArch64_AM::getShiftValue(0)));
  1315. EmitToStreamer(*OutStreamer, Add);
  1316. // Emit a relocation-annotation. This expands to no code, but requests
  1317. // the following instruction gets an R_AARCH64_TLSDESC_CALL.
  1318. MCInst TLSDescCall;
  1319. TLSDescCall.setOpcode(AArch64::TLSDESCCALL);
  1320. TLSDescCall.addOperand(Sym);
  1321. EmitToStreamer(*OutStreamer, TLSDescCall);
  1322. MCInst Blr;
  1323. Blr.setOpcode(AArch64::BLR);
  1324. Blr.addOperand(MCOperand::createReg(AArch64::X1));
  1325. EmitToStreamer(*OutStreamer, Blr);
  1326. return;
  1327. }
  1328. case AArch64::JumpTableDest32:
  1329. case AArch64::JumpTableDest16:
  1330. case AArch64::JumpTableDest8:
  1331. LowerJumpTableDest(*OutStreamer, *MI);
  1332. return;
  1333. case AArch64::FMOVH0:
  1334. case AArch64::FMOVS0:
  1335. case AArch64::FMOVD0:
  1336. emitFMov0(*MI);
  1337. return;
  1338. case AArch64::MOPSMemoryCopyPseudo:
  1339. case AArch64::MOPSMemoryMovePseudo:
  1340. case AArch64::MOPSMemorySetPseudo:
  1341. case AArch64::MOPSMemorySetTaggingPseudo:
  1342. LowerMOPS(*OutStreamer, *MI);
  1343. return;
  1344. case TargetOpcode::STACKMAP:
  1345. return LowerSTACKMAP(*OutStreamer, SM, *MI);
  1346. case TargetOpcode::PATCHPOINT:
  1347. return LowerPATCHPOINT(*OutStreamer, SM, *MI);
  1348. case TargetOpcode::STATEPOINT:
  1349. return LowerSTATEPOINT(*OutStreamer, SM, *MI);
  1350. case TargetOpcode::FAULTING_OP:
  1351. return LowerFAULTING_OP(*MI);
  1352. case TargetOpcode::PATCHABLE_FUNCTION_ENTER:
  1353. LowerPATCHABLE_FUNCTION_ENTER(*MI);
  1354. return;
  1355. case TargetOpcode::PATCHABLE_FUNCTION_EXIT:
  1356. LowerPATCHABLE_FUNCTION_EXIT(*MI);
  1357. return;
  1358. case TargetOpcode::PATCHABLE_TAIL_CALL:
  1359. LowerPATCHABLE_TAIL_CALL(*MI);
  1360. return;
  1361. case AArch64::KCFI_CHECK:
  1362. LowerKCFI_CHECK(*MI);
  1363. return;
  1364. case AArch64::HWASAN_CHECK_MEMACCESS:
  1365. case AArch64::HWASAN_CHECK_MEMACCESS_SHORTGRANULES:
  1366. LowerHWASAN_CHECK_MEMACCESS(*MI);
  1367. return;
  1368. case AArch64::SEH_StackAlloc:
  1369. TS->emitARM64WinCFIAllocStack(MI->getOperand(0).getImm());
  1370. return;
  1371. case AArch64::SEH_SaveFPLR:
  1372. TS->emitARM64WinCFISaveFPLR(MI->getOperand(0).getImm());
  1373. return;
  1374. case AArch64::SEH_SaveFPLR_X:
  1375. assert(MI->getOperand(0).getImm() < 0 &&
  1376. "Pre increment SEH opcode must have a negative offset");
  1377. TS->emitARM64WinCFISaveFPLRX(-MI->getOperand(0).getImm());
  1378. return;
  1379. case AArch64::SEH_SaveReg:
  1380. TS->emitARM64WinCFISaveReg(MI->getOperand(0).getImm(),
  1381. MI->getOperand(1).getImm());
  1382. return;
  1383. case AArch64::SEH_SaveReg_X:
  1384. assert(MI->getOperand(1).getImm() < 0 &&
  1385. "Pre increment SEH opcode must have a negative offset");
  1386. TS->emitARM64WinCFISaveRegX(MI->getOperand(0).getImm(),
  1387. -MI->getOperand(1).getImm());
  1388. return;
  1389. case AArch64::SEH_SaveRegP:
  1390. if (MI->getOperand(1).getImm() == 30 && MI->getOperand(0).getImm() >= 19 &&
  1391. MI->getOperand(0).getImm() <= 28) {
  1392. assert((MI->getOperand(0).getImm() - 19) % 2 == 0 &&
  1393. "Register paired with LR must be odd");
  1394. TS->emitARM64WinCFISaveLRPair(MI->getOperand(0).getImm(),
  1395. MI->getOperand(2).getImm());
  1396. return;
  1397. }
  1398. assert((MI->getOperand(1).getImm() - MI->getOperand(0).getImm() == 1) &&
  1399. "Non-consecutive registers not allowed for save_regp");
  1400. TS->emitARM64WinCFISaveRegP(MI->getOperand(0).getImm(),
  1401. MI->getOperand(2).getImm());
  1402. return;
  1403. case AArch64::SEH_SaveRegP_X:
  1404. assert((MI->getOperand(1).getImm() - MI->getOperand(0).getImm() == 1) &&
  1405. "Non-consecutive registers not allowed for save_regp_x");
  1406. assert(MI->getOperand(2).getImm() < 0 &&
  1407. "Pre increment SEH opcode must have a negative offset");
  1408. TS->emitARM64WinCFISaveRegPX(MI->getOperand(0).getImm(),
  1409. -MI->getOperand(2).getImm());
  1410. return;
  1411. case AArch64::SEH_SaveFReg:
  1412. TS->emitARM64WinCFISaveFReg(MI->getOperand(0).getImm(),
  1413. MI->getOperand(1).getImm());
  1414. return;
  1415. case AArch64::SEH_SaveFReg_X:
  1416. assert(MI->getOperand(1).getImm() < 0 &&
  1417. "Pre increment SEH opcode must have a negative offset");
  1418. TS->emitARM64WinCFISaveFRegX(MI->getOperand(0).getImm(),
  1419. -MI->getOperand(1).getImm());
  1420. return;
  1421. case AArch64::SEH_SaveFRegP:
  1422. assert((MI->getOperand(1).getImm() - MI->getOperand(0).getImm() == 1) &&
  1423. "Non-consecutive registers not allowed for save_regp");
  1424. TS->emitARM64WinCFISaveFRegP(MI->getOperand(0).getImm(),
  1425. MI->getOperand(2).getImm());
  1426. return;
  1427. case AArch64::SEH_SaveFRegP_X:
  1428. assert((MI->getOperand(1).getImm() - MI->getOperand(0).getImm() == 1) &&
  1429. "Non-consecutive registers not allowed for save_regp_x");
  1430. assert(MI->getOperand(2).getImm() < 0 &&
  1431. "Pre increment SEH opcode must have a negative offset");
  1432. TS->emitARM64WinCFISaveFRegPX(MI->getOperand(0).getImm(),
  1433. -MI->getOperand(2).getImm());
  1434. return;
  1435. case AArch64::SEH_SetFP:
  1436. TS->emitARM64WinCFISetFP();
  1437. return;
  1438. case AArch64::SEH_AddFP:
  1439. TS->emitARM64WinCFIAddFP(MI->getOperand(0).getImm());
  1440. return;
  1441. case AArch64::SEH_Nop:
  1442. TS->emitARM64WinCFINop();
  1443. return;
  1444. case AArch64::SEH_PrologEnd:
  1445. TS->emitARM64WinCFIPrologEnd();
  1446. return;
  1447. case AArch64::SEH_EpilogStart:
  1448. TS->emitARM64WinCFIEpilogStart();
  1449. return;
  1450. case AArch64::SEH_EpilogEnd:
  1451. TS->emitARM64WinCFIEpilogEnd();
  1452. return;
  1453. case AArch64::SEH_PACSignLR:
  1454. TS->emitARM64WinCFIPACSignLR();
  1455. return;
  1456. }
  1457. // Finally, do the automated lowerings for everything else.
  1458. MCInst TmpInst;
  1459. MCInstLowering.Lower(MI, TmpInst);
  1460. EmitToStreamer(*OutStreamer, TmpInst);
  1461. }
  1462. // Force static initialization.
  1463. extern "C" LLVM_EXTERNAL_VISIBILITY void LLVMInitializeAArch64AsmPrinter() {
  1464. RegisterAsmPrinter<AArch64AsmPrinter> X(getTheAArch64leTarget());
  1465. RegisterAsmPrinter<AArch64AsmPrinter> Y(getTheAArch64beTarget());
  1466. RegisterAsmPrinter<AArch64AsmPrinter> Z(getTheARM64Target());
  1467. RegisterAsmPrinter<AArch64AsmPrinter> W(getTheARM64_32Target());
  1468. RegisterAsmPrinter<AArch64AsmPrinter> V(getTheAArch64_32Target());
  1469. }