X86MCTargetDesc.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924
  1. //===-- X86MCTargetDesc.cpp - X86 Target Descriptions ---------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file provides X86 specific target descriptions.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "X86MCTargetDesc.h"
  13. #include "TargetInfo/X86TargetInfo.h"
  14. #include "X86ATTInstPrinter.h"
  15. #include "X86BaseInfo.h"
  16. #include "X86IntelInstPrinter.h"
  17. #include "X86MCAsmInfo.h"
  18. #include "X86TargetStreamer.h"
  19. #include "llvm/ADT/APInt.h"
  20. #include "llvm/ADT/Triple.h"
  21. #include "llvm/DebugInfo/CodeView/CodeView.h"
  22. #include "llvm/MC/MCDwarf.h"
  23. #include "llvm/MC/MCInstrAnalysis.h"
  24. #include "llvm/MC/MCInstrInfo.h"
  25. #include "llvm/MC/MCRegisterInfo.h"
  26. #include "llvm/MC/MCStreamer.h"
  27. #include "llvm/MC/MCSubtargetInfo.h"
  28. #include "llvm/MC/MachineLocation.h"
  29. #include "llvm/MC/TargetRegistry.h"
  30. #include "llvm/Support/ErrorHandling.h"
  31. #include "llvm/Support/Host.h"
  32. using namespace llvm;
  33. #define GET_REGINFO_MC_DESC
  34. #include "X86GenRegisterInfo.inc"
  35. #define GET_INSTRINFO_MC_DESC
  36. #define GET_INSTRINFO_MC_HELPERS
  37. #define ENABLE_INSTR_PREDICATE_VERIFIER
  38. #include "X86GenInstrInfo.inc"
  39. #define GET_SUBTARGETINFO_MC_DESC
  40. #include "X86GenSubtargetInfo.inc"
  41. std::string X86_MC::ParseX86Triple(const Triple &TT) {
  42. std::string FS;
  43. // SSE2 should default to enabled in 64-bit mode, but can be turned off
  44. // explicitly.
  45. if (TT.isArch64Bit())
  46. FS = "+64bit-mode,-32bit-mode,-16bit-mode,+sse2";
  47. else if (TT.getEnvironment() != Triple::CODE16)
  48. FS = "-64bit-mode,+32bit-mode,-16bit-mode";
  49. else
  50. FS = "-64bit-mode,-32bit-mode,+16bit-mode";
  51. return FS;
  52. }
  53. unsigned X86_MC::getDwarfRegFlavour(const Triple &TT, bool isEH) {
  54. if (TT.getArch() == Triple::x86_64)
  55. return DWARFFlavour::X86_64;
  56. if (TT.isOSDarwin())
  57. return isEH ? DWARFFlavour::X86_32_DarwinEH : DWARFFlavour::X86_32_Generic;
  58. if (TT.isOSCygMing())
  59. // Unsupported by now, just quick fallback
  60. return DWARFFlavour::X86_32_Generic;
  61. return DWARFFlavour::X86_32_Generic;
  62. }
  63. bool X86_MC::hasLockPrefix(const MCInst &MI) {
  64. return MI.getFlags() & X86::IP_HAS_LOCK;
  65. }
  66. static bool isMemOperand(const MCInst &MI, unsigned Op, unsigned RegClassID) {
  67. const MCOperand &Base = MI.getOperand(Op + X86::AddrBaseReg);
  68. const MCOperand &Index = MI.getOperand(Op + X86::AddrIndexReg);
  69. const MCRegisterClass &RC = X86MCRegisterClasses[RegClassID];
  70. return (Base.isReg() && Base.getReg() != 0 && RC.contains(Base.getReg())) ||
  71. (Index.isReg() && Index.getReg() != 0 && RC.contains(Index.getReg()));
  72. }
  73. bool X86_MC::is16BitMemOperand(const MCInst &MI, unsigned Op,
  74. const MCSubtargetInfo &STI) {
  75. const MCOperand &Base = MI.getOperand(Op + X86::AddrBaseReg);
  76. const MCOperand &Index = MI.getOperand(Op + X86::AddrIndexReg);
  77. if (STI.hasFeature(X86::Is16Bit) && Base.isReg() && Base.getReg() == 0 &&
  78. Index.isReg() && Index.getReg() == 0)
  79. return true;
  80. return isMemOperand(MI, Op, X86::GR16RegClassID);
  81. }
  82. bool X86_MC::is32BitMemOperand(const MCInst &MI, unsigned Op) {
  83. const MCOperand &Base = MI.getOperand(Op + X86::AddrBaseReg);
  84. const MCOperand &Index = MI.getOperand(Op + X86::AddrIndexReg);
  85. if (Base.isReg() && Base.getReg() == X86::EIP) {
  86. assert(Index.isReg() && Index.getReg() == 0 && "Invalid eip-based address");
  87. return true;
  88. }
  89. if (Index.isReg() && Index.getReg() == X86::EIZ)
  90. return true;
  91. return isMemOperand(MI, Op, X86::GR32RegClassID);
  92. }
  93. #ifndef NDEBUG
  94. bool X86_MC::is64BitMemOperand(const MCInst &MI, unsigned Op) {
  95. return isMemOperand(MI, Op, X86::GR64RegClassID);
  96. }
  97. #endif
  98. bool X86_MC::needsAddressSizeOverride(const MCInst &MI,
  99. const MCSubtargetInfo &STI,
  100. int MemoryOperand, uint64_t TSFlags) {
  101. uint64_t AdSize = TSFlags & X86II::AdSizeMask;
  102. bool Is16BitMode = STI.hasFeature(X86::Is16Bit);
  103. bool Is32BitMode = STI.hasFeature(X86::Is32Bit);
  104. bool Is64BitMode = STI.hasFeature(X86::Is64Bit);
  105. if ((Is16BitMode && AdSize == X86II::AdSize32) ||
  106. (Is32BitMode && AdSize == X86II::AdSize16) ||
  107. (Is64BitMode && AdSize == X86II::AdSize32))
  108. return true;
  109. uint64_t Form = TSFlags & X86II::FormMask;
  110. switch (Form) {
  111. default:
  112. break;
  113. case X86II::RawFrmDstSrc: {
  114. unsigned siReg = MI.getOperand(1).getReg();
  115. assert(((siReg == X86::SI && MI.getOperand(0).getReg() == X86::DI) ||
  116. (siReg == X86::ESI && MI.getOperand(0).getReg() == X86::EDI) ||
  117. (siReg == X86::RSI && MI.getOperand(0).getReg() == X86::RDI)) &&
  118. "SI and DI register sizes do not match");
  119. return (!Is32BitMode && siReg == X86::ESI) ||
  120. (Is32BitMode && siReg == X86::SI);
  121. }
  122. case X86II::RawFrmSrc: {
  123. unsigned siReg = MI.getOperand(0).getReg();
  124. return (!Is32BitMode && siReg == X86::ESI) ||
  125. (Is32BitMode && siReg == X86::SI);
  126. }
  127. case X86II::RawFrmDst: {
  128. unsigned siReg = MI.getOperand(0).getReg();
  129. return (!Is32BitMode && siReg == X86::EDI) ||
  130. (Is32BitMode && siReg == X86::DI);
  131. }
  132. }
  133. // Determine where the memory operand starts, if present.
  134. if (MemoryOperand < 0)
  135. return false;
  136. if (STI.hasFeature(X86::Is64Bit)) {
  137. assert(!is16BitMemOperand(MI, MemoryOperand, STI));
  138. return is32BitMemOperand(MI, MemoryOperand);
  139. }
  140. if (STI.hasFeature(X86::Is32Bit)) {
  141. assert(!is64BitMemOperand(MI, MemoryOperand));
  142. return is16BitMemOperand(MI, MemoryOperand, STI);
  143. }
  144. assert(STI.hasFeature(X86::Is16Bit));
  145. assert(!is64BitMemOperand(MI, MemoryOperand));
  146. return !is16BitMemOperand(MI, MemoryOperand, STI);
  147. }
  148. void X86_MC::initLLVMToSEHAndCVRegMapping(MCRegisterInfo *MRI) {
  149. // FIXME: TableGen these.
  150. for (unsigned Reg = X86::NoRegister + 1; Reg < X86::NUM_TARGET_REGS; ++Reg) {
  151. unsigned SEH = MRI->getEncodingValue(Reg);
  152. MRI->mapLLVMRegToSEHReg(Reg, SEH);
  153. }
  154. // Mapping from CodeView to MC register id.
  155. static const struct {
  156. codeview::RegisterId CVReg;
  157. MCPhysReg Reg;
  158. } RegMap[] = {
  159. {codeview::RegisterId::AL, X86::AL},
  160. {codeview::RegisterId::CL, X86::CL},
  161. {codeview::RegisterId::DL, X86::DL},
  162. {codeview::RegisterId::BL, X86::BL},
  163. {codeview::RegisterId::AH, X86::AH},
  164. {codeview::RegisterId::CH, X86::CH},
  165. {codeview::RegisterId::DH, X86::DH},
  166. {codeview::RegisterId::BH, X86::BH},
  167. {codeview::RegisterId::AX, X86::AX},
  168. {codeview::RegisterId::CX, X86::CX},
  169. {codeview::RegisterId::DX, X86::DX},
  170. {codeview::RegisterId::BX, X86::BX},
  171. {codeview::RegisterId::SP, X86::SP},
  172. {codeview::RegisterId::BP, X86::BP},
  173. {codeview::RegisterId::SI, X86::SI},
  174. {codeview::RegisterId::DI, X86::DI},
  175. {codeview::RegisterId::EAX, X86::EAX},
  176. {codeview::RegisterId::ECX, X86::ECX},
  177. {codeview::RegisterId::EDX, X86::EDX},
  178. {codeview::RegisterId::EBX, X86::EBX},
  179. {codeview::RegisterId::ESP, X86::ESP},
  180. {codeview::RegisterId::EBP, X86::EBP},
  181. {codeview::RegisterId::ESI, X86::ESI},
  182. {codeview::RegisterId::EDI, X86::EDI},
  183. {codeview::RegisterId::EFLAGS, X86::EFLAGS},
  184. {codeview::RegisterId::ST0, X86::ST0},
  185. {codeview::RegisterId::ST1, X86::ST1},
  186. {codeview::RegisterId::ST2, X86::ST2},
  187. {codeview::RegisterId::ST3, X86::ST3},
  188. {codeview::RegisterId::ST4, X86::ST4},
  189. {codeview::RegisterId::ST5, X86::ST5},
  190. {codeview::RegisterId::ST6, X86::ST6},
  191. {codeview::RegisterId::ST7, X86::ST7},
  192. {codeview::RegisterId::ST0, X86::FP0},
  193. {codeview::RegisterId::ST1, X86::FP1},
  194. {codeview::RegisterId::ST2, X86::FP2},
  195. {codeview::RegisterId::ST3, X86::FP3},
  196. {codeview::RegisterId::ST4, X86::FP4},
  197. {codeview::RegisterId::ST5, X86::FP5},
  198. {codeview::RegisterId::ST6, X86::FP6},
  199. {codeview::RegisterId::ST7, X86::FP7},
  200. {codeview::RegisterId::MM0, X86::MM0},
  201. {codeview::RegisterId::MM1, X86::MM1},
  202. {codeview::RegisterId::MM2, X86::MM2},
  203. {codeview::RegisterId::MM3, X86::MM3},
  204. {codeview::RegisterId::MM4, X86::MM4},
  205. {codeview::RegisterId::MM5, X86::MM5},
  206. {codeview::RegisterId::MM6, X86::MM6},
  207. {codeview::RegisterId::MM7, X86::MM7},
  208. {codeview::RegisterId::XMM0, X86::XMM0},
  209. {codeview::RegisterId::XMM1, X86::XMM1},
  210. {codeview::RegisterId::XMM2, X86::XMM2},
  211. {codeview::RegisterId::XMM3, X86::XMM3},
  212. {codeview::RegisterId::XMM4, X86::XMM4},
  213. {codeview::RegisterId::XMM5, X86::XMM5},
  214. {codeview::RegisterId::XMM6, X86::XMM6},
  215. {codeview::RegisterId::XMM7, X86::XMM7},
  216. {codeview::RegisterId::XMM8, X86::XMM8},
  217. {codeview::RegisterId::XMM9, X86::XMM9},
  218. {codeview::RegisterId::XMM10, X86::XMM10},
  219. {codeview::RegisterId::XMM11, X86::XMM11},
  220. {codeview::RegisterId::XMM12, X86::XMM12},
  221. {codeview::RegisterId::XMM13, X86::XMM13},
  222. {codeview::RegisterId::XMM14, X86::XMM14},
  223. {codeview::RegisterId::XMM15, X86::XMM15},
  224. {codeview::RegisterId::SIL, X86::SIL},
  225. {codeview::RegisterId::DIL, X86::DIL},
  226. {codeview::RegisterId::BPL, X86::BPL},
  227. {codeview::RegisterId::SPL, X86::SPL},
  228. {codeview::RegisterId::RAX, X86::RAX},
  229. {codeview::RegisterId::RBX, X86::RBX},
  230. {codeview::RegisterId::RCX, X86::RCX},
  231. {codeview::RegisterId::RDX, X86::RDX},
  232. {codeview::RegisterId::RSI, X86::RSI},
  233. {codeview::RegisterId::RDI, X86::RDI},
  234. {codeview::RegisterId::RBP, X86::RBP},
  235. {codeview::RegisterId::RSP, X86::RSP},
  236. {codeview::RegisterId::R8, X86::R8},
  237. {codeview::RegisterId::R9, X86::R9},
  238. {codeview::RegisterId::R10, X86::R10},
  239. {codeview::RegisterId::R11, X86::R11},
  240. {codeview::RegisterId::R12, X86::R12},
  241. {codeview::RegisterId::R13, X86::R13},
  242. {codeview::RegisterId::R14, X86::R14},
  243. {codeview::RegisterId::R15, X86::R15},
  244. {codeview::RegisterId::R8B, X86::R8B},
  245. {codeview::RegisterId::R9B, X86::R9B},
  246. {codeview::RegisterId::R10B, X86::R10B},
  247. {codeview::RegisterId::R11B, X86::R11B},
  248. {codeview::RegisterId::R12B, X86::R12B},
  249. {codeview::RegisterId::R13B, X86::R13B},
  250. {codeview::RegisterId::R14B, X86::R14B},
  251. {codeview::RegisterId::R15B, X86::R15B},
  252. {codeview::RegisterId::R8W, X86::R8W},
  253. {codeview::RegisterId::R9W, X86::R9W},
  254. {codeview::RegisterId::R10W, X86::R10W},
  255. {codeview::RegisterId::R11W, X86::R11W},
  256. {codeview::RegisterId::R12W, X86::R12W},
  257. {codeview::RegisterId::R13W, X86::R13W},
  258. {codeview::RegisterId::R14W, X86::R14W},
  259. {codeview::RegisterId::R15W, X86::R15W},
  260. {codeview::RegisterId::R8D, X86::R8D},
  261. {codeview::RegisterId::R9D, X86::R9D},
  262. {codeview::RegisterId::R10D, X86::R10D},
  263. {codeview::RegisterId::R11D, X86::R11D},
  264. {codeview::RegisterId::R12D, X86::R12D},
  265. {codeview::RegisterId::R13D, X86::R13D},
  266. {codeview::RegisterId::R14D, X86::R14D},
  267. {codeview::RegisterId::R15D, X86::R15D},
  268. {codeview::RegisterId::AMD64_YMM0, X86::YMM0},
  269. {codeview::RegisterId::AMD64_YMM1, X86::YMM1},
  270. {codeview::RegisterId::AMD64_YMM2, X86::YMM2},
  271. {codeview::RegisterId::AMD64_YMM3, X86::YMM3},
  272. {codeview::RegisterId::AMD64_YMM4, X86::YMM4},
  273. {codeview::RegisterId::AMD64_YMM5, X86::YMM5},
  274. {codeview::RegisterId::AMD64_YMM6, X86::YMM6},
  275. {codeview::RegisterId::AMD64_YMM7, X86::YMM7},
  276. {codeview::RegisterId::AMD64_YMM8, X86::YMM8},
  277. {codeview::RegisterId::AMD64_YMM9, X86::YMM9},
  278. {codeview::RegisterId::AMD64_YMM10, X86::YMM10},
  279. {codeview::RegisterId::AMD64_YMM11, X86::YMM11},
  280. {codeview::RegisterId::AMD64_YMM12, X86::YMM12},
  281. {codeview::RegisterId::AMD64_YMM13, X86::YMM13},
  282. {codeview::RegisterId::AMD64_YMM14, X86::YMM14},
  283. {codeview::RegisterId::AMD64_YMM15, X86::YMM15},
  284. {codeview::RegisterId::AMD64_YMM16, X86::YMM16},
  285. {codeview::RegisterId::AMD64_YMM17, X86::YMM17},
  286. {codeview::RegisterId::AMD64_YMM18, X86::YMM18},
  287. {codeview::RegisterId::AMD64_YMM19, X86::YMM19},
  288. {codeview::RegisterId::AMD64_YMM20, X86::YMM20},
  289. {codeview::RegisterId::AMD64_YMM21, X86::YMM21},
  290. {codeview::RegisterId::AMD64_YMM22, X86::YMM22},
  291. {codeview::RegisterId::AMD64_YMM23, X86::YMM23},
  292. {codeview::RegisterId::AMD64_YMM24, X86::YMM24},
  293. {codeview::RegisterId::AMD64_YMM25, X86::YMM25},
  294. {codeview::RegisterId::AMD64_YMM26, X86::YMM26},
  295. {codeview::RegisterId::AMD64_YMM27, X86::YMM27},
  296. {codeview::RegisterId::AMD64_YMM28, X86::YMM28},
  297. {codeview::RegisterId::AMD64_YMM29, X86::YMM29},
  298. {codeview::RegisterId::AMD64_YMM30, X86::YMM30},
  299. {codeview::RegisterId::AMD64_YMM31, X86::YMM31},
  300. {codeview::RegisterId::AMD64_ZMM0, X86::ZMM0},
  301. {codeview::RegisterId::AMD64_ZMM1, X86::ZMM1},
  302. {codeview::RegisterId::AMD64_ZMM2, X86::ZMM2},
  303. {codeview::RegisterId::AMD64_ZMM3, X86::ZMM3},
  304. {codeview::RegisterId::AMD64_ZMM4, X86::ZMM4},
  305. {codeview::RegisterId::AMD64_ZMM5, X86::ZMM5},
  306. {codeview::RegisterId::AMD64_ZMM6, X86::ZMM6},
  307. {codeview::RegisterId::AMD64_ZMM7, X86::ZMM7},
  308. {codeview::RegisterId::AMD64_ZMM8, X86::ZMM8},
  309. {codeview::RegisterId::AMD64_ZMM9, X86::ZMM9},
  310. {codeview::RegisterId::AMD64_ZMM10, X86::ZMM10},
  311. {codeview::RegisterId::AMD64_ZMM11, X86::ZMM11},
  312. {codeview::RegisterId::AMD64_ZMM12, X86::ZMM12},
  313. {codeview::RegisterId::AMD64_ZMM13, X86::ZMM13},
  314. {codeview::RegisterId::AMD64_ZMM14, X86::ZMM14},
  315. {codeview::RegisterId::AMD64_ZMM15, X86::ZMM15},
  316. {codeview::RegisterId::AMD64_ZMM16, X86::ZMM16},
  317. {codeview::RegisterId::AMD64_ZMM17, X86::ZMM17},
  318. {codeview::RegisterId::AMD64_ZMM18, X86::ZMM18},
  319. {codeview::RegisterId::AMD64_ZMM19, X86::ZMM19},
  320. {codeview::RegisterId::AMD64_ZMM20, X86::ZMM20},
  321. {codeview::RegisterId::AMD64_ZMM21, X86::ZMM21},
  322. {codeview::RegisterId::AMD64_ZMM22, X86::ZMM22},
  323. {codeview::RegisterId::AMD64_ZMM23, X86::ZMM23},
  324. {codeview::RegisterId::AMD64_ZMM24, X86::ZMM24},
  325. {codeview::RegisterId::AMD64_ZMM25, X86::ZMM25},
  326. {codeview::RegisterId::AMD64_ZMM26, X86::ZMM26},
  327. {codeview::RegisterId::AMD64_ZMM27, X86::ZMM27},
  328. {codeview::RegisterId::AMD64_ZMM28, X86::ZMM28},
  329. {codeview::RegisterId::AMD64_ZMM29, X86::ZMM29},
  330. {codeview::RegisterId::AMD64_ZMM30, X86::ZMM30},
  331. {codeview::RegisterId::AMD64_ZMM31, X86::ZMM31},
  332. {codeview::RegisterId::AMD64_K0, X86::K0},
  333. {codeview::RegisterId::AMD64_K1, X86::K1},
  334. {codeview::RegisterId::AMD64_K2, X86::K2},
  335. {codeview::RegisterId::AMD64_K3, X86::K3},
  336. {codeview::RegisterId::AMD64_K4, X86::K4},
  337. {codeview::RegisterId::AMD64_K5, X86::K5},
  338. {codeview::RegisterId::AMD64_K6, X86::K6},
  339. {codeview::RegisterId::AMD64_K7, X86::K7},
  340. {codeview::RegisterId::AMD64_XMM16, X86::XMM16},
  341. {codeview::RegisterId::AMD64_XMM17, X86::XMM17},
  342. {codeview::RegisterId::AMD64_XMM18, X86::XMM18},
  343. {codeview::RegisterId::AMD64_XMM19, X86::XMM19},
  344. {codeview::RegisterId::AMD64_XMM20, X86::XMM20},
  345. {codeview::RegisterId::AMD64_XMM21, X86::XMM21},
  346. {codeview::RegisterId::AMD64_XMM22, X86::XMM22},
  347. {codeview::RegisterId::AMD64_XMM23, X86::XMM23},
  348. {codeview::RegisterId::AMD64_XMM24, X86::XMM24},
  349. {codeview::RegisterId::AMD64_XMM25, X86::XMM25},
  350. {codeview::RegisterId::AMD64_XMM26, X86::XMM26},
  351. {codeview::RegisterId::AMD64_XMM27, X86::XMM27},
  352. {codeview::RegisterId::AMD64_XMM28, X86::XMM28},
  353. {codeview::RegisterId::AMD64_XMM29, X86::XMM29},
  354. {codeview::RegisterId::AMD64_XMM30, X86::XMM30},
  355. {codeview::RegisterId::AMD64_XMM31, X86::XMM31},
  356. };
  357. for (const auto &I : RegMap)
  358. MRI->mapLLVMRegToCVReg(I.Reg, static_cast<int>(I.CVReg));
  359. }
  360. MCSubtargetInfo *X86_MC::createX86MCSubtargetInfo(const Triple &TT,
  361. StringRef CPU, StringRef FS) {
  362. std::string ArchFS = X86_MC::ParseX86Triple(TT);
  363. assert(!ArchFS.empty() && "Failed to parse X86 triple");
  364. if (!FS.empty())
  365. ArchFS = (Twine(ArchFS) + "," + FS).str();
  366. if (CPU.empty())
  367. CPU = "generic";
  368. return createX86MCSubtargetInfoImpl(TT, CPU, /*TuneCPU*/ CPU, ArchFS);
  369. }
  370. static MCInstrInfo *createX86MCInstrInfo() {
  371. MCInstrInfo *X = new MCInstrInfo();
  372. InitX86MCInstrInfo(X);
  373. return X;
  374. }
  375. static MCRegisterInfo *createX86MCRegisterInfo(const Triple &TT) {
  376. unsigned RA = (TT.getArch() == Triple::x86_64)
  377. ? X86::RIP // Should have dwarf #16.
  378. : X86::EIP; // Should have dwarf #8.
  379. MCRegisterInfo *X = new MCRegisterInfo();
  380. InitX86MCRegisterInfo(X, RA, X86_MC::getDwarfRegFlavour(TT, false),
  381. X86_MC::getDwarfRegFlavour(TT, true), RA);
  382. X86_MC::initLLVMToSEHAndCVRegMapping(X);
  383. return X;
  384. }
  385. static MCAsmInfo *createX86MCAsmInfo(const MCRegisterInfo &MRI,
  386. const Triple &TheTriple,
  387. const MCTargetOptions &Options) {
  388. bool is64Bit = TheTriple.getArch() == Triple::x86_64;
  389. MCAsmInfo *MAI;
  390. if (TheTriple.isOSBinFormatMachO()) {
  391. if (is64Bit)
  392. MAI = new X86_64MCAsmInfoDarwin(TheTriple);
  393. else
  394. MAI = new X86MCAsmInfoDarwin(TheTriple);
  395. } else if (TheTriple.isOSBinFormatELF()) {
  396. // Force the use of an ELF container.
  397. MAI = new X86ELFMCAsmInfo(TheTriple);
  398. } else if (TheTriple.isWindowsMSVCEnvironment() ||
  399. TheTriple.isWindowsCoreCLREnvironment()) {
  400. if (Options.getAssemblyLanguage().equals_insensitive("masm"))
  401. MAI = new X86MCAsmInfoMicrosoftMASM(TheTriple);
  402. else
  403. MAI = new X86MCAsmInfoMicrosoft(TheTriple);
  404. } else if (TheTriple.isOSCygMing() ||
  405. TheTriple.isWindowsItaniumEnvironment()) {
  406. MAI = new X86MCAsmInfoGNUCOFF(TheTriple);
  407. } else {
  408. // The default is ELF.
  409. MAI = new X86ELFMCAsmInfo(TheTriple);
  410. }
  411. // Initialize initial frame state.
  412. // Calculate amount of bytes used for return address storing
  413. int stackGrowth = is64Bit ? -8 : -4;
  414. // Initial state of the frame pointer is esp+stackGrowth.
  415. unsigned StackPtr = is64Bit ? X86::RSP : X86::ESP;
  416. MCCFIInstruction Inst = MCCFIInstruction::cfiDefCfa(
  417. nullptr, MRI.getDwarfRegNum(StackPtr, true), -stackGrowth);
  418. MAI->addInitialFrameState(Inst);
  419. // Add return address to move list
  420. unsigned InstPtr = is64Bit ? X86::RIP : X86::EIP;
  421. MCCFIInstruction Inst2 = MCCFIInstruction::createOffset(
  422. nullptr, MRI.getDwarfRegNum(InstPtr, true), stackGrowth);
  423. MAI->addInitialFrameState(Inst2);
  424. return MAI;
  425. }
  426. static MCInstPrinter *createX86MCInstPrinter(const Triple &T,
  427. unsigned SyntaxVariant,
  428. const MCAsmInfo &MAI,
  429. const MCInstrInfo &MII,
  430. const MCRegisterInfo &MRI) {
  431. if (SyntaxVariant == 0)
  432. return new X86ATTInstPrinter(MAI, MII, MRI);
  433. if (SyntaxVariant == 1)
  434. return new X86IntelInstPrinter(MAI, MII, MRI);
  435. return nullptr;
  436. }
  437. static MCRelocationInfo *createX86MCRelocationInfo(const Triple &TheTriple,
  438. MCContext &Ctx) {
  439. // Default to the stock relocation info.
  440. return llvm::createMCRelocationInfo(TheTriple, Ctx);
  441. }
  442. namespace llvm {
  443. namespace X86_MC {
  444. class X86MCInstrAnalysis : public MCInstrAnalysis {
  445. X86MCInstrAnalysis(const X86MCInstrAnalysis &) = delete;
  446. X86MCInstrAnalysis &operator=(const X86MCInstrAnalysis &) = delete;
  447. virtual ~X86MCInstrAnalysis() = default;
  448. public:
  449. X86MCInstrAnalysis(const MCInstrInfo *MCII) : MCInstrAnalysis(MCII) {}
  450. #define GET_STIPREDICATE_DECLS_FOR_MC_ANALYSIS
  451. #include "X86GenSubtargetInfo.inc"
  452. bool clearsSuperRegisters(const MCRegisterInfo &MRI, const MCInst &Inst,
  453. APInt &Mask) const override;
  454. std::vector<std::pair<uint64_t, uint64_t>>
  455. findPltEntries(uint64_t PltSectionVA, ArrayRef<uint8_t> PltContents,
  456. uint64_t GotSectionVA,
  457. const Triple &TargetTriple) const override;
  458. bool evaluateBranch(const MCInst &Inst, uint64_t Addr, uint64_t Size,
  459. uint64_t &Target) const override;
  460. std::optional<uint64_t>
  461. evaluateMemoryOperandAddress(const MCInst &Inst, const MCSubtargetInfo *STI,
  462. uint64_t Addr, uint64_t Size) const override;
  463. std::optional<uint64_t>
  464. getMemoryOperandRelocationOffset(const MCInst &Inst,
  465. uint64_t Size) const override;
  466. };
  467. #define GET_STIPREDICATE_DEFS_FOR_MC_ANALYSIS
  468. #include "X86GenSubtargetInfo.inc"
  469. bool X86MCInstrAnalysis::clearsSuperRegisters(const MCRegisterInfo &MRI,
  470. const MCInst &Inst,
  471. APInt &Mask) const {
  472. const MCInstrDesc &Desc = Info->get(Inst.getOpcode());
  473. unsigned NumDefs = Desc.getNumDefs();
  474. unsigned NumImplicitDefs = Desc.implicit_defs().size();
  475. assert(Mask.getBitWidth() == NumDefs + NumImplicitDefs &&
  476. "Unexpected number of bits in the mask!");
  477. bool HasVEX = (Desc.TSFlags & X86II::EncodingMask) == X86II::VEX;
  478. bool HasEVEX = (Desc.TSFlags & X86II::EncodingMask) == X86II::EVEX;
  479. bool HasXOP = (Desc.TSFlags & X86II::EncodingMask) == X86II::XOP;
  480. const MCRegisterClass &GR32RC = MRI.getRegClass(X86::GR32RegClassID);
  481. const MCRegisterClass &VR128XRC = MRI.getRegClass(X86::VR128XRegClassID);
  482. const MCRegisterClass &VR256XRC = MRI.getRegClass(X86::VR256XRegClassID);
  483. auto ClearsSuperReg = [=](unsigned RegID) {
  484. // On X86-64, a general purpose integer register is viewed as a 64-bit
  485. // register internal to the processor.
  486. // An update to the lower 32 bits of a 64 bit integer register is
  487. // architecturally defined to zero extend the upper 32 bits.
  488. if (GR32RC.contains(RegID))
  489. return true;
  490. // Early exit if this instruction has no vex/evex/xop prefix.
  491. if (!HasEVEX && !HasVEX && !HasXOP)
  492. return false;
  493. // All VEX and EVEX encoded instructions are defined to zero the high bits
  494. // of the destination register up to VLMAX (i.e. the maximum vector register
  495. // width pertaining to the instruction).
  496. // We assume the same behavior for XOP instructions too.
  497. return VR128XRC.contains(RegID) || VR256XRC.contains(RegID);
  498. };
  499. Mask.clearAllBits();
  500. for (unsigned I = 0, E = NumDefs; I < E; ++I) {
  501. const MCOperand &Op = Inst.getOperand(I);
  502. if (ClearsSuperReg(Op.getReg()))
  503. Mask.setBit(I);
  504. }
  505. for (unsigned I = 0, E = NumImplicitDefs; I < E; ++I) {
  506. const MCPhysReg Reg = Desc.implicit_defs()[I];
  507. if (ClearsSuperReg(Reg))
  508. Mask.setBit(NumDefs + I);
  509. }
  510. return Mask.getBoolValue();
  511. }
  512. static std::vector<std::pair<uint64_t, uint64_t>>
  513. findX86PltEntries(uint64_t PltSectionVA, ArrayRef<uint8_t> PltContents,
  514. uint64_t GotPltSectionVA) {
  515. // Do a lightweight parsing of PLT entries.
  516. std::vector<std::pair<uint64_t, uint64_t>> Result;
  517. for (uint64_t Byte = 0, End = PltContents.size(); Byte + 6 < End; ) {
  518. // Recognize a jmp.
  519. if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0xa3) {
  520. // The jmp instruction at the beginning of each PLT entry jumps to the
  521. // address of the base of the .got.plt section plus the immediate.
  522. uint32_t Imm = support::endian::read32le(PltContents.data() + Byte + 2);
  523. Result.push_back(
  524. std::make_pair(PltSectionVA + Byte, GotPltSectionVA + Imm));
  525. Byte += 6;
  526. } else if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0x25) {
  527. // The jmp instruction at the beginning of each PLT entry jumps to the
  528. // immediate.
  529. uint32_t Imm = support::endian::read32le(PltContents.data() + Byte + 2);
  530. Result.push_back(std::make_pair(PltSectionVA + Byte, Imm));
  531. Byte += 6;
  532. } else
  533. Byte++;
  534. }
  535. return Result;
  536. }
  537. static std::vector<std::pair<uint64_t, uint64_t>>
  538. findX86_64PltEntries(uint64_t PltSectionVA, ArrayRef<uint8_t> PltContents) {
  539. // Do a lightweight parsing of PLT entries.
  540. std::vector<std::pair<uint64_t, uint64_t>> Result;
  541. for (uint64_t Byte = 0, End = PltContents.size(); Byte + 6 < End; ) {
  542. // Recognize a jmp.
  543. if (PltContents[Byte] == 0xff && PltContents[Byte + 1] == 0x25) {
  544. // The jmp instruction at the beginning of each PLT entry jumps to the
  545. // address of the next instruction plus the immediate.
  546. uint32_t Imm = support::endian::read32le(PltContents.data() + Byte + 2);
  547. Result.push_back(
  548. std::make_pair(PltSectionVA + Byte, PltSectionVA + Byte + 6 + Imm));
  549. Byte += 6;
  550. } else
  551. Byte++;
  552. }
  553. return Result;
  554. }
  555. std::vector<std::pair<uint64_t, uint64_t>> X86MCInstrAnalysis::findPltEntries(
  556. uint64_t PltSectionVA, ArrayRef<uint8_t> PltContents,
  557. uint64_t GotPltSectionVA, const Triple &TargetTriple) const {
  558. switch (TargetTriple.getArch()) {
  559. case Triple::x86:
  560. return findX86PltEntries(PltSectionVA, PltContents, GotPltSectionVA);
  561. case Triple::x86_64:
  562. return findX86_64PltEntries(PltSectionVA, PltContents);
  563. default:
  564. return {};
  565. }
  566. }
  567. bool X86MCInstrAnalysis::evaluateBranch(const MCInst &Inst, uint64_t Addr,
  568. uint64_t Size, uint64_t &Target) const {
  569. if (Inst.getNumOperands() == 0 ||
  570. Info->get(Inst.getOpcode()).operands()[0].OperandType !=
  571. MCOI::OPERAND_PCREL)
  572. return false;
  573. Target = Addr + Size + Inst.getOperand(0).getImm();
  574. return true;
  575. }
  576. std::optional<uint64_t> X86MCInstrAnalysis::evaluateMemoryOperandAddress(
  577. const MCInst &Inst, const MCSubtargetInfo *STI, uint64_t Addr,
  578. uint64_t Size) const {
  579. const MCInstrDesc &MCID = Info->get(Inst.getOpcode());
  580. int MemOpStart = X86II::getMemoryOperandNo(MCID.TSFlags);
  581. if (MemOpStart == -1)
  582. return std::nullopt;
  583. MemOpStart += X86II::getOperandBias(MCID);
  584. const MCOperand &SegReg = Inst.getOperand(MemOpStart + X86::AddrSegmentReg);
  585. const MCOperand &BaseReg = Inst.getOperand(MemOpStart + X86::AddrBaseReg);
  586. const MCOperand &IndexReg = Inst.getOperand(MemOpStart + X86::AddrIndexReg);
  587. const MCOperand &ScaleAmt = Inst.getOperand(MemOpStart + X86::AddrScaleAmt);
  588. const MCOperand &Disp = Inst.getOperand(MemOpStart + X86::AddrDisp);
  589. if (SegReg.getReg() != 0 || IndexReg.getReg() != 0 || ScaleAmt.getImm() != 1 ||
  590. !Disp.isImm())
  591. return std::nullopt;
  592. // RIP-relative addressing.
  593. if (BaseReg.getReg() == X86::RIP)
  594. return Addr + Size + Disp.getImm();
  595. return std::nullopt;
  596. }
  597. std::optional<uint64_t>
  598. X86MCInstrAnalysis::getMemoryOperandRelocationOffset(const MCInst &Inst,
  599. uint64_t Size) const {
  600. if (Inst.getOpcode() != X86::LEA64r)
  601. return std::nullopt;
  602. const MCInstrDesc &MCID = Info->get(Inst.getOpcode());
  603. int MemOpStart = X86II::getMemoryOperandNo(MCID.TSFlags);
  604. if (MemOpStart == -1)
  605. return std::nullopt;
  606. MemOpStart += X86II::getOperandBias(MCID);
  607. const MCOperand &SegReg = Inst.getOperand(MemOpStart + X86::AddrSegmentReg);
  608. const MCOperand &BaseReg = Inst.getOperand(MemOpStart + X86::AddrBaseReg);
  609. const MCOperand &IndexReg = Inst.getOperand(MemOpStart + X86::AddrIndexReg);
  610. const MCOperand &ScaleAmt = Inst.getOperand(MemOpStart + X86::AddrScaleAmt);
  611. const MCOperand &Disp = Inst.getOperand(MemOpStart + X86::AddrDisp);
  612. // Must be a simple rip-relative address.
  613. if (BaseReg.getReg() != X86::RIP || SegReg.getReg() != 0 ||
  614. IndexReg.getReg() != 0 || ScaleAmt.getImm() != 1 || !Disp.isImm())
  615. return std::nullopt;
  616. // rip-relative ModR/M immediate is 32 bits.
  617. assert(Size > 4 && "invalid instruction size for rip-relative lea");
  618. return Size - 4;
  619. }
  620. } // end of namespace X86_MC
  621. } // end of namespace llvm
  622. static MCInstrAnalysis *createX86MCInstrAnalysis(const MCInstrInfo *Info) {
  623. return new X86_MC::X86MCInstrAnalysis(Info);
  624. }
  625. // Force static initialization.
  626. extern "C" LLVM_EXTERNAL_VISIBILITY void LLVMInitializeX86TargetMC() {
  627. for (Target *T : {&getTheX86_32Target(), &getTheX86_64Target()}) {
  628. // Register the MC asm info.
  629. RegisterMCAsmInfoFn X(*T, createX86MCAsmInfo);
  630. // Register the MC instruction info.
  631. TargetRegistry::RegisterMCInstrInfo(*T, createX86MCInstrInfo);
  632. // Register the MC register info.
  633. TargetRegistry::RegisterMCRegInfo(*T, createX86MCRegisterInfo);
  634. // Register the MC subtarget info.
  635. TargetRegistry::RegisterMCSubtargetInfo(*T,
  636. X86_MC::createX86MCSubtargetInfo);
  637. // Register the MC instruction analyzer.
  638. TargetRegistry::RegisterMCInstrAnalysis(*T, createX86MCInstrAnalysis);
  639. // Register the code emitter.
  640. TargetRegistry::RegisterMCCodeEmitter(*T, createX86MCCodeEmitter);
  641. // Register the obj target streamer.
  642. TargetRegistry::RegisterObjectTargetStreamer(*T,
  643. createX86ObjectTargetStreamer);
  644. // Register the asm target streamer.
  645. TargetRegistry::RegisterAsmTargetStreamer(*T, createX86AsmTargetStreamer);
  646. // Register the null streamer.
  647. TargetRegistry::RegisterNullTargetStreamer(*T, createX86NullTargetStreamer);
  648. TargetRegistry::RegisterCOFFStreamer(*T, createX86WinCOFFStreamer);
  649. // Register the MCInstPrinter.
  650. TargetRegistry::RegisterMCInstPrinter(*T, createX86MCInstPrinter);
  651. // Register the MC relocation info.
  652. TargetRegistry::RegisterMCRelocationInfo(*T, createX86MCRelocationInfo);
  653. }
  654. // Register the asm backend.
  655. TargetRegistry::RegisterMCAsmBackend(getTheX86_32Target(),
  656. createX86_32AsmBackend);
  657. TargetRegistry::RegisterMCAsmBackend(getTheX86_64Target(),
  658. createX86_64AsmBackend);
  659. }
  660. MCRegister llvm::getX86SubSuperRegisterOrZero(MCRegister Reg, unsigned Size,
  661. bool High) {
  662. switch (Size) {
  663. default: return X86::NoRegister;
  664. case 8:
  665. if (High) {
  666. switch (Reg.id()) {
  667. default: return getX86SubSuperRegisterOrZero(Reg, 64);
  668. case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
  669. return X86::SI;
  670. case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
  671. return X86::DI;
  672. case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
  673. return X86::BP;
  674. case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
  675. return X86::SP;
  676. case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
  677. return X86::AH;
  678. case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
  679. return X86::DH;
  680. case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
  681. return X86::CH;
  682. case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
  683. return X86::BH;
  684. }
  685. } else {
  686. switch (Reg.id()) {
  687. default: return X86::NoRegister;
  688. case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
  689. return X86::AL;
  690. case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
  691. return X86::DL;
  692. case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
  693. return X86::CL;
  694. case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
  695. return X86::BL;
  696. case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
  697. return X86::SIL;
  698. case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
  699. return X86::DIL;
  700. case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
  701. return X86::BPL;
  702. case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
  703. return X86::SPL;
  704. case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
  705. return X86::R8B;
  706. case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
  707. return X86::R9B;
  708. case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
  709. return X86::R10B;
  710. case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
  711. return X86::R11B;
  712. case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
  713. return X86::R12B;
  714. case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
  715. return X86::R13B;
  716. case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
  717. return X86::R14B;
  718. case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
  719. return X86::R15B;
  720. }
  721. }
  722. case 16:
  723. switch (Reg.id()) {
  724. default: return X86::NoRegister;
  725. case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
  726. return X86::AX;
  727. case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
  728. return X86::DX;
  729. case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
  730. return X86::CX;
  731. case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
  732. return X86::BX;
  733. case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
  734. return X86::SI;
  735. case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
  736. return X86::DI;
  737. case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
  738. return X86::BP;
  739. case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
  740. return X86::SP;
  741. case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
  742. return X86::R8W;
  743. case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
  744. return X86::R9W;
  745. case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
  746. return X86::R10W;
  747. case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
  748. return X86::R11W;
  749. case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
  750. return X86::R12W;
  751. case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
  752. return X86::R13W;
  753. case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
  754. return X86::R14W;
  755. case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
  756. return X86::R15W;
  757. }
  758. case 32:
  759. switch (Reg.id()) {
  760. default: return X86::NoRegister;
  761. case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
  762. return X86::EAX;
  763. case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
  764. return X86::EDX;
  765. case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
  766. return X86::ECX;
  767. case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
  768. return X86::EBX;
  769. case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
  770. return X86::ESI;
  771. case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
  772. return X86::EDI;
  773. case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
  774. return X86::EBP;
  775. case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
  776. return X86::ESP;
  777. case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
  778. return X86::R8D;
  779. case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
  780. return X86::R9D;
  781. case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
  782. return X86::R10D;
  783. case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
  784. return X86::R11D;
  785. case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
  786. return X86::R12D;
  787. case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
  788. return X86::R13D;
  789. case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
  790. return X86::R14D;
  791. case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
  792. return X86::R15D;
  793. }
  794. case 64:
  795. switch (Reg.id()) {
  796. default: return 0;
  797. case X86::AH: case X86::AL: case X86::AX: case X86::EAX: case X86::RAX:
  798. return X86::RAX;
  799. case X86::DH: case X86::DL: case X86::DX: case X86::EDX: case X86::RDX:
  800. return X86::RDX;
  801. case X86::CH: case X86::CL: case X86::CX: case X86::ECX: case X86::RCX:
  802. return X86::RCX;
  803. case X86::BH: case X86::BL: case X86::BX: case X86::EBX: case X86::RBX:
  804. return X86::RBX;
  805. case X86::SIL: case X86::SI: case X86::ESI: case X86::RSI:
  806. return X86::RSI;
  807. case X86::DIL: case X86::DI: case X86::EDI: case X86::RDI:
  808. return X86::RDI;
  809. case X86::BPL: case X86::BP: case X86::EBP: case X86::RBP:
  810. return X86::RBP;
  811. case X86::SPL: case X86::SP: case X86::ESP: case X86::RSP:
  812. return X86::RSP;
  813. case X86::R8B: case X86::R8W: case X86::R8D: case X86::R8:
  814. return X86::R8;
  815. case X86::R9B: case X86::R9W: case X86::R9D: case X86::R9:
  816. return X86::R9;
  817. case X86::R10B: case X86::R10W: case X86::R10D: case X86::R10:
  818. return X86::R10;
  819. case X86::R11B: case X86::R11W: case X86::R11D: case X86::R11:
  820. return X86::R11;
  821. case X86::R12B: case X86::R12W: case X86::R12D: case X86::R12:
  822. return X86::R12;
  823. case X86::R13B: case X86::R13W: case X86::R13D: case X86::R13:
  824. return X86::R13;
  825. case X86::R14B: case X86::R14W: case X86::R14D: case X86::R14:
  826. return X86::R14;
  827. case X86::R15B: case X86::R15W: case X86::R15D: case X86::R15:
  828. return X86::R15;
  829. }
  830. }
  831. }
  832. MCRegister llvm::getX86SubSuperRegister(MCRegister Reg, unsigned Size, bool High) {
  833. MCRegister Res = getX86SubSuperRegisterOrZero(Reg, Size, High);
  834. assert(Res != X86::NoRegister && "Unexpected register or VT");
  835. return Res;
  836. }