X86Operand.h 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724
  1. //===- X86Operand.h - Parsed X86 machine instruction ------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #ifndef LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H
  9. #define LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H
  10. #include "MCTargetDesc/X86IntelInstPrinter.h"
  11. #include "MCTargetDesc/X86MCTargetDesc.h"
  12. #include "X86AsmParserCommon.h"
  13. #include "llvm/ADT/STLExtras.h"
  14. #include "llvm/ADT/StringRef.h"
  15. #include "llvm/MC/MCExpr.h"
  16. #include "llvm/MC/MCInst.h"
  17. #include "llvm/MC/MCParser/MCParsedAsmOperand.h"
  18. #include "llvm/Support/Casting.h"
  19. #include "llvm/Support/SMLoc.h"
  20. #include <cassert>
  21. #include <memory>
  22. namespace llvm {
  23. /// X86Operand - Instances of this class represent a parsed X86 machine
  24. /// instruction.
  25. struct X86Operand final : public MCParsedAsmOperand {
  26. enum KindTy { Token, Register, Immediate, Memory, Prefix, DXRegister } Kind;
  27. SMLoc StartLoc, EndLoc;
  28. SMLoc OffsetOfLoc;
  29. StringRef SymName;
  30. void *OpDecl;
  31. bool AddressOf;
  32. struct TokOp {
  33. const char *Data;
  34. unsigned Length;
  35. };
  36. struct RegOp {
  37. unsigned RegNo;
  38. };
  39. struct PrefOp {
  40. unsigned Prefixes;
  41. };
  42. struct ImmOp {
  43. const MCExpr *Val;
  44. bool LocalRef;
  45. };
  46. struct MemOp {
  47. unsigned SegReg;
  48. const MCExpr *Disp;
  49. unsigned BaseReg;
  50. unsigned DefaultBaseReg;
  51. unsigned IndexReg;
  52. unsigned Scale;
  53. unsigned Size;
  54. unsigned ModeSize;
  55. /// If the memory operand is unsized and there are multiple instruction
  56. /// matches, prefer the one with this size.
  57. unsigned FrontendSize;
  58. };
  59. union {
  60. struct TokOp Tok;
  61. struct RegOp Reg;
  62. struct ImmOp Imm;
  63. struct MemOp Mem;
  64. struct PrefOp Pref;
  65. };
  66. X86Operand(KindTy K, SMLoc Start, SMLoc End)
  67. : Kind(K), StartLoc(Start), EndLoc(End), OpDecl(nullptr),
  68. AddressOf(false) {}
  69. StringRef getSymName() override { return SymName; }
  70. void *getOpDecl() override { return OpDecl; }
  71. /// getStartLoc - Get the location of the first token of this operand.
  72. SMLoc getStartLoc() const override { return StartLoc; }
  73. /// getEndLoc - Get the location of the last token of this operand.
  74. SMLoc getEndLoc() const override { return EndLoc; }
  75. /// getLocRange - Get the range between the first and last token of this
  76. /// operand.
  77. SMRange getLocRange() const { return SMRange(StartLoc, EndLoc); }
  78. /// getOffsetOfLoc - Get the location of the offset operator.
  79. SMLoc getOffsetOfLoc() const override { return OffsetOfLoc; }
  80. void print(raw_ostream &OS) const override {
  81. auto PrintImmValue = [&](const MCExpr *Val, const char *VName) {
  82. if (Val->getKind() == MCExpr::Constant) {
  83. if (auto Imm = cast<MCConstantExpr>(Val)->getValue())
  84. OS << VName << Imm;
  85. } else if (Val->getKind() == MCExpr::SymbolRef) {
  86. if (auto *SRE = dyn_cast<MCSymbolRefExpr>(Val)) {
  87. const MCSymbol &Sym = SRE->getSymbol();
  88. if (const char *SymNameStr = Sym.getName().data())
  89. OS << VName << SymNameStr;
  90. }
  91. }
  92. };
  93. switch (Kind) {
  94. case Token:
  95. OS << Tok.Data;
  96. break;
  97. case Register:
  98. OS << "Reg:" << X86IntelInstPrinter::getRegisterName(Reg.RegNo);
  99. break;
  100. case DXRegister:
  101. OS << "DXReg";
  102. break;
  103. case Immediate:
  104. PrintImmValue(Imm.Val, "Imm:");
  105. break;
  106. case Prefix:
  107. OS << "Prefix:" << Pref.Prefixes;
  108. break;
  109. case Memory:
  110. OS << "Memory: ModeSize=" << Mem.ModeSize;
  111. if (Mem.Size)
  112. OS << ",Size=" << Mem.Size;
  113. if (Mem.BaseReg)
  114. OS << ",BaseReg=" << X86IntelInstPrinter::getRegisterName(Mem.BaseReg);
  115. if (Mem.IndexReg)
  116. OS << ",IndexReg="
  117. << X86IntelInstPrinter::getRegisterName(Mem.IndexReg);
  118. if (Mem.Scale)
  119. OS << ",Scale=" << Mem.Scale;
  120. if (Mem.Disp)
  121. PrintImmValue(Mem.Disp, ",Disp=");
  122. if (Mem.SegReg)
  123. OS << ",SegReg=" << X86IntelInstPrinter::getRegisterName(Mem.SegReg);
  124. break;
  125. }
  126. }
  127. StringRef getToken() const {
  128. assert(Kind == Token && "Invalid access!");
  129. return StringRef(Tok.Data, Tok.Length);
  130. }
  131. void setTokenValue(StringRef Value) {
  132. assert(Kind == Token && "Invalid access!");
  133. Tok.Data = Value.data();
  134. Tok.Length = Value.size();
  135. }
  136. unsigned getReg() const override {
  137. assert(Kind == Register && "Invalid access!");
  138. return Reg.RegNo;
  139. }
  140. unsigned getPrefix() const {
  141. assert(Kind == Prefix && "Invalid access!");
  142. return Pref.Prefixes;
  143. }
  144. const MCExpr *getImm() const {
  145. assert(Kind == Immediate && "Invalid access!");
  146. return Imm.Val;
  147. }
  148. const MCExpr *getMemDisp() const {
  149. assert(Kind == Memory && "Invalid access!");
  150. return Mem.Disp;
  151. }
  152. unsigned getMemSegReg() const {
  153. assert(Kind == Memory && "Invalid access!");
  154. return Mem.SegReg;
  155. }
  156. unsigned getMemBaseReg() const {
  157. assert(Kind == Memory && "Invalid access!");
  158. return Mem.BaseReg;
  159. }
  160. unsigned getMemDefaultBaseReg() const {
  161. assert(Kind == Memory && "Invalid access!");
  162. return Mem.DefaultBaseReg;
  163. }
  164. unsigned getMemIndexReg() const {
  165. assert(Kind == Memory && "Invalid access!");
  166. return Mem.IndexReg;
  167. }
  168. unsigned getMemScale() const {
  169. assert(Kind == Memory && "Invalid access!");
  170. return Mem.Scale;
  171. }
  172. unsigned getMemModeSize() const {
  173. assert(Kind == Memory && "Invalid access!");
  174. return Mem.ModeSize;
  175. }
  176. unsigned getMemFrontendSize() const {
  177. assert(Kind == Memory && "Invalid access!");
  178. return Mem.FrontendSize;
  179. }
  180. bool isToken() const override {return Kind == Token; }
  181. bool isImm() const override { return Kind == Immediate; }
  182. bool isImmSExti16i8() const {
  183. if (!isImm())
  184. return false;
  185. // If this isn't a constant expr, just assume it fits and let relaxation
  186. // handle it.
  187. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  188. if (!CE)
  189. return true;
  190. // Otherwise, check the value is in a range that makes sense for this
  191. // extension.
  192. return isImmSExti16i8Value(CE->getValue());
  193. }
  194. bool isImmSExti32i8() const {
  195. if (!isImm())
  196. return false;
  197. // If this isn't a constant expr, just assume it fits and let relaxation
  198. // handle it.
  199. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  200. if (!CE)
  201. return true;
  202. // Otherwise, check the value is in a range that makes sense for this
  203. // extension.
  204. return isImmSExti32i8Value(CE->getValue());
  205. }
  206. bool isImmSExti64i8() const {
  207. if (!isImm())
  208. return false;
  209. // If this isn't a constant expr, just assume it fits and let relaxation
  210. // handle it.
  211. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  212. if (!CE)
  213. return true;
  214. // Otherwise, check the value is in a range that makes sense for this
  215. // extension.
  216. return isImmSExti64i8Value(CE->getValue());
  217. }
  218. bool isImmSExti64i32() const {
  219. if (!isImm())
  220. return false;
  221. // If this isn't a constant expr, just assume it fits and let relaxation
  222. // handle it.
  223. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  224. if (!CE)
  225. return true;
  226. // Otherwise, check the value is in a range that makes sense for this
  227. // extension.
  228. return isImmSExti64i32Value(CE->getValue());
  229. }
  230. bool isImmUnsignedi4() const {
  231. if (!isImm()) return false;
  232. // If this isn't a constant expr, reject it. The immediate byte is shared
  233. // with a register encoding. We can't have it affected by a relocation.
  234. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  235. if (!CE) return false;
  236. return isImmUnsignedi4Value(CE->getValue());
  237. }
  238. bool isImmUnsignedi8() const {
  239. if (!isImm()) return false;
  240. // If this isn't a constant expr, just assume it fits and let relaxation
  241. // handle it.
  242. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  243. if (!CE) return true;
  244. return isImmUnsignedi8Value(CE->getValue());
  245. }
  246. bool isOffsetOfLocal() const override { return isImm() && Imm.LocalRef; }
  247. bool isMemPlaceholder(const MCInstrDesc &Desc) const override {
  248. // Only MS InlineAsm uses global variables with registers rather than
  249. // rip/eip.
  250. return isMem() && !Mem.DefaultBaseReg && Mem.FrontendSize;
  251. }
  252. bool needAddressOf() const override { return AddressOf; }
  253. bool isMem() const override { return Kind == Memory; }
  254. bool isMemUnsized() const {
  255. return Kind == Memory && Mem.Size == 0;
  256. }
  257. bool isMem8() const {
  258. return Kind == Memory && (!Mem.Size || Mem.Size == 8);
  259. }
  260. bool isMem16() const {
  261. return Kind == Memory && (!Mem.Size || Mem.Size == 16);
  262. }
  263. bool isMem32() const {
  264. return Kind == Memory && (!Mem.Size || Mem.Size == 32);
  265. }
  266. bool isMem64() const {
  267. return Kind == Memory && (!Mem.Size || Mem.Size == 64);
  268. }
  269. bool isMem80() const {
  270. return Kind == Memory && (!Mem.Size || Mem.Size == 80);
  271. }
  272. bool isMem128() const {
  273. return Kind == Memory && (!Mem.Size || Mem.Size == 128);
  274. }
  275. bool isMem256() const {
  276. return Kind == Memory && (!Mem.Size || Mem.Size == 256);
  277. }
  278. bool isMem512() const {
  279. return Kind == Memory && (!Mem.Size || Mem.Size == 512);
  280. }
  281. bool isSibMem() const {
  282. return isMem() && Mem.BaseReg != X86::RIP && Mem.BaseReg != X86::EIP;
  283. }
  284. bool isMemIndexReg(unsigned LowR, unsigned HighR) const {
  285. assert(Kind == Memory && "Invalid access!");
  286. return Mem.IndexReg >= LowR && Mem.IndexReg <= HighR;
  287. }
  288. bool isMem64_RC128() const {
  289. return isMem64() && isMemIndexReg(X86::XMM0, X86::XMM15);
  290. }
  291. bool isMem128_RC128() const {
  292. return isMem128() && isMemIndexReg(X86::XMM0, X86::XMM15);
  293. }
  294. bool isMem128_RC256() const {
  295. return isMem128() && isMemIndexReg(X86::YMM0, X86::YMM15);
  296. }
  297. bool isMem256_RC128() const {
  298. return isMem256() && isMemIndexReg(X86::XMM0, X86::XMM15);
  299. }
  300. bool isMem256_RC256() const {
  301. return isMem256() && isMemIndexReg(X86::YMM0, X86::YMM15);
  302. }
  303. bool isMem64_RC128X() const {
  304. return isMem64() && isMemIndexReg(X86::XMM0, X86::XMM31);
  305. }
  306. bool isMem128_RC128X() const {
  307. return isMem128() && isMemIndexReg(X86::XMM0, X86::XMM31);
  308. }
  309. bool isMem128_RC256X() const {
  310. return isMem128() && isMemIndexReg(X86::YMM0, X86::YMM31);
  311. }
  312. bool isMem256_RC128X() const {
  313. return isMem256() && isMemIndexReg(X86::XMM0, X86::XMM31);
  314. }
  315. bool isMem256_RC256X() const {
  316. return isMem256() && isMemIndexReg(X86::YMM0, X86::YMM31);
  317. }
  318. bool isMem256_RC512() const {
  319. return isMem256() && isMemIndexReg(X86::ZMM0, X86::ZMM31);
  320. }
  321. bool isMem512_RC256X() const {
  322. return isMem512() && isMemIndexReg(X86::YMM0, X86::YMM31);
  323. }
  324. bool isMem512_RC512() const {
  325. return isMem512() && isMemIndexReg(X86::ZMM0, X86::ZMM31);
  326. }
  327. bool isAbsMem() const {
  328. return Kind == Memory && !getMemSegReg() && !getMemBaseReg() &&
  329. !getMemIndexReg() && getMemScale() == 1;
  330. }
  331. bool isAVX512RC() const{
  332. return isImm();
  333. }
  334. bool isAbsMem16() const {
  335. return isAbsMem() && Mem.ModeSize == 16;
  336. }
  337. bool isSrcIdx() const {
  338. return !getMemIndexReg() && getMemScale() == 1 &&
  339. (getMemBaseReg() == X86::RSI || getMemBaseReg() == X86::ESI ||
  340. getMemBaseReg() == X86::SI) && isa<MCConstantExpr>(getMemDisp()) &&
  341. cast<MCConstantExpr>(getMemDisp())->getValue() == 0;
  342. }
  343. bool isSrcIdx8() const {
  344. return isMem8() && isSrcIdx();
  345. }
  346. bool isSrcIdx16() const {
  347. return isMem16() && isSrcIdx();
  348. }
  349. bool isSrcIdx32() const {
  350. return isMem32() && isSrcIdx();
  351. }
  352. bool isSrcIdx64() const {
  353. return isMem64() && isSrcIdx();
  354. }
  355. bool isDstIdx() const {
  356. return !getMemIndexReg() && getMemScale() == 1 &&
  357. (getMemSegReg() == 0 || getMemSegReg() == X86::ES) &&
  358. (getMemBaseReg() == X86::RDI || getMemBaseReg() == X86::EDI ||
  359. getMemBaseReg() == X86::DI) && isa<MCConstantExpr>(getMemDisp()) &&
  360. cast<MCConstantExpr>(getMemDisp())->getValue() == 0;
  361. }
  362. bool isDstIdx8() const {
  363. return isMem8() && isDstIdx();
  364. }
  365. bool isDstIdx16() const {
  366. return isMem16() && isDstIdx();
  367. }
  368. bool isDstIdx32() const {
  369. return isMem32() && isDstIdx();
  370. }
  371. bool isDstIdx64() const {
  372. return isMem64() && isDstIdx();
  373. }
  374. bool isMemOffs() const {
  375. return Kind == Memory && !getMemBaseReg() && !getMemIndexReg() &&
  376. getMemScale() == 1;
  377. }
  378. bool isMemOffs16_8() const {
  379. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 8);
  380. }
  381. bool isMemOffs16_16() const {
  382. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 16);
  383. }
  384. bool isMemOffs16_32() const {
  385. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 32);
  386. }
  387. bool isMemOffs32_8() const {
  388. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 8);
  389. }
  390. bool isMemOffs32_16() const {
  391. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 16);
  392. }
  393. bool isMemOffs32_32() const {
  394. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 32);
  395. }
  396. bool isMemOffs32_64() const {
  397. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 64);
  398. }
  399. bool isMemOffs64_8() const {
  400. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 8);
  401. }
  402. bool isMemOffs64_16() const {
  403. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 16);
  404. }
  405. bool isMemOffs64_32() const {
  406. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 32);
  407. }
  408. bool isMemOffs64_64() const {
  409. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 64);
  410. }
  411. bool isPrefix() const { return Kind == Prefix; }
  412. bool isReg() const override { return Kind == Register; }
  413. bool isDXReg() const { return Kind == DXRegister; }
  414. bool isGR32orGR64() const {
  415. return Kind == Register &&
  416. (X86MCRegisterClasses[X86::GR32RegClassID].contains(getReg()) ||
  417. X86MCRegisterClasses[X86::GR64RegClassID].contains(getReg()));
  418. }
  419. bool isGR16orGR32orGR64() const {
  420. return Kind == Register &&
  421. (X86MCRegisterClasses[X86::GR16RegClassID].contains(getReg()) ||
  422. X86MCRegisterClasses[X86::GR32RegClassID].contains(getReg()) ||
  423. X86MCRegisterClasses[X86::GR64RegClassID].contains(getReg()));
  424. }
  425. bool isVectorReg() const {
  426. return Kind == Register &&
  427. (X86MCRegisterClasses[X86::VR64RegClassID].contains(getReg()) ||
  428. X86MCRegisterClasses[X86::VR128XRegClassID].contains(getReg()) ||
  429. X86MCRegisterClasses[X86::VR256XRegClassID].contains(getReg()) ||
  430. X86MCRegisterClasses[X86::VR512RegClassID].contains(getReg()));
  431. }
  432. bool isVK1Pair() const {
  433. return Kind == Register &&
  434. X86MCRegisterClasses[X86::VK1RegClassID].contains(getReg());
  435. }
  436. bool isVK2Pair() const {
  437. return Kind == Register &&
  438. X86MCRegisterClasses[X86::VK2RegClassID].contains(getReg());
  439. }
  440. bool isVK4Pair() const {
  441. return Kind == Register &&
  442. X86MCRegisterClasses[X86::VK4RegClassID].contains(getReg());
  443. }
  444. bool isVK8Pair() const {
  445. return Kind == Register &&
  446. X86MCRegisterClasses[X86::VK8RegClassID].contains(getReg());
  447. }
  448. bool isVK16Pair() const {
  449. return Kind == Register &&
  450. X86MCRegisterClasses[X86::VK16RegClassID].contains(getReg());
  451. }
  452. void addExpr(MCInst &Inst, const MCExpr *Expr) const {
  453. // Add as immediates when possible.
  454. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Expr))
  455. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  456. else
  457. Inst.addOperand(MCOperand::createExpr(Expr));
  458. }
  459. void addRegOperands(MCInst &Inst, unsigned N) const {
  460. assert(N == 1 && "Invalid number of operands!");
  461. Inst.addOperand(MCOperand::createReg(getReg()));
  462. }
  463. void addGR32orGR64Operands(MCInst &Inst, unsigned N) const {
  464. assert(N == 1 && "Invalid number of operands!");
  465. MCRegister RegNo = getReg();
  466. if (X86MCRegisterClasses[X86::GR64RegClassID].contains(RegNo))
  467. RegNo = getX86SubSuperRegister(RegNo, 32);
  468. Inst.addOperand(MCOperand::createReg(RegNo));
  469. }
  470. void addGR16orGR32orGR64Operands(MCInst &Inst, unsigned N) const {
  471. assert(N == 1 && "Invalid number of operands!");
  472. MCRegister RegNo = getReg();
  473. if (X86MCRegisterClasses[X86::GR32RegClassID].contains(RegNo) ||
  474. X86MCRegisterClasses[X86::GR64RegClassID].contains(RegNo))
  475. RegNo = getX86SubSuperRegister(RegNo, 16);
  476. Inst.addOperand(MCOperand::createReg(RegNo));
  477. }
  478. void addAVX512RCOperands(MCInst &Inst, unsigned N) const {
  479. assert(N == 1 && "Invalid number of operands!");
  480. addExpr(Inst, getImm());
  481. }
  482. void addImmOperands(MCInst &Inst, unsigned N) const {
  483. assert(N == 1 && "Invalid number of operands!");
  484. addExpr(Inst, getImm());
  485. }
  486. void addMaskPairOperands(MCInst &Inst, unsigned N) const {
  487. assert(N == 1 && "Invalid number of operands!");
  488. unsigned Reg = getReg();
  489. switch (Reg) {
  490. case X86::K0:
  491. case X86::K1:
  492. Reg = X86::K0_K1;
  493. break;
  494. case X86::K2:
  495. case X86::K3:
  496. Reg = X86::K2_K3;
  497. break;
  498. case X86::K4:
  499. case X86::K5:
  500. Reg = X86::K4_K5;
  501. break;
  502. case X86::K6:
  503. case X86::K7:
  504. Reg = X86::K6_K7;
  505. break;
  506. }
  507. Inst.addOperand(MCOperand::createReg(Reg));
  508. }
  509. void addMemOperands(MCInst &Inst, unsigned N) const {
  510. assert((N == 5) && "Invalid number of operands!");
  511. if (getMemBaseReg())
  512. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  513. else
  514. Inst.addOperand(MCOperand::createReg(getMemDefaultBaseReg()));
  515. Inst.addOperand(MCOperand::createImm(getMemScale()));
  516. Inst.addOperand(MCOperand::createReg(getMemIndexReg()));
  517. addExpr(Inst, getMemDisp());
  518. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  519. }
  520. void addAbsMemOperands(MCInst &Inst, unsigned N) const {
  521. assert((N == 1) && "Invalid number of operands!");
  522. // Add as immediates when possible.
  523. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getMemDisp()))
  524. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  525. else
  526. Inst.addOperand(MCOperand::createExpr(getMemDisp()));
  527. }
  528. void addSrcIdxOperands(MCInst &Inst, unsigned N) const {
  529. assert((N == 2) && "Invalid number of operands!");
  530. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  531. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  532. }
  533. void addDstIdxOperands(MCInst &Inst, unsigned N) const {
  534. assert((N == 1) && "Invalid number of operands!");
  535. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  536. }
  537. void addMemOffsOperands(MCInst &Inst, unsigned N) const {
  538. assert((N == 2) && "Invalid number of operands!");
  539. // Add as immediates when possible.
  540. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getMemDisp()))
  541. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  542. else
  543. Inst.addOperand(MCOperand::createExpr(getMemDisp()));
  544. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  545. }
  546. static std::unique_ptr<X86Operand> CreateToken(StringRef Str, SMLoc Loc) {
  547. SMLoc EndLoc = SMLoc::getFromPointer(Loc.getPointer() + Str.size());
  548. auto Res = std::make_unique<X86Operand>(Token, Loc, EndLoc);
  549. Res->Tok.Data = Str.data();
  550. Res->Tok.Length = Str.size();
  551. return Res;
  552. }
  553. static std::unique_ptr<X86Operand>
  554. CreateReg(unsigned RegNo, SMLoc StartLoc, SMLoc EndLoc,
  555. bool AddressOf = false, SMLoc OffsetOfLoc = SMLoc(),
  556. StringRef SymName = StringRef(), void *OpDecl = nullptr) {
  557. auto Res = std::make_unique<X86Operand>(Register, StartLoc, EndLoc);
  558. Res->Reg.RegNo = RegNo;
  559. Res->AddressOf = AddressOf;
  560. Res->OffsetOfLoc = OffsetOfLoc;
  561. Res->SymName = SymName;
  562. Res->OpDecl = OpDecl;
  563. return Res;
  564. }
  565. static std::unique_ptr<X86Operand>
  566. CreateDXReg(SMLoc StartLoc, SMLoc EndLoc) {
  567. return std::make_unique<X86Operand>(DXRegister, StartLoc, EndLoc);
  568. }
  569. static std::unique_ptr<X86Operand>
  570. CreatePrefix(unsigned Prefixes, SMLoc StartLoc, SMLoc EndLoc) {
  571. auto Res = std::make_unique<X86Operand>(Prefix, StartLoc, EndLoc);
  572. Res->Pref.Prefixes = Prefixes;
  573. return Res;
  574. }
  575. static std::unique_ptr<X86Operand> CreateImm(const MCExpr *Val,
  576. SMLoc StartLoc, SMLoc EndLoc,
  577. StringRef SymName = StringRef(),
  578. void *OpDecl = nullptr,
  579. bool GlobalRef = true) {
  580. auto Res = std::make_unique<X86Operand>(Immediate, StartLoc, EndLoc);
  581. Res->Imm.Val = Val;
  582. Res->Imm.LocalRef = !GlobalRef;
  583. Res->SymName = SymName;
  584. Res->OpDecl = OpDecl;
  585. Res->AddressOf = true;
  586. return Res;
  587. }
  588. /// Create an absolute memory operand.
  589. static std::unique_ptr<X86Operand>
  590. CreateMem(unsigned ModeSize, const MCExpr *Disp, SMLoc StartLoc, SMLoc EndLoc,
  591. unsigned Size = 0, StringRef SymName = StringRef(),
  592. void *OpDecl = nullptr, unsigned FrontendSize = 0) {
  593. auto Res = std::make_unique<X86Operand>(Memory, StartLoc, EndLoc);
  594. Res->Mem.SegReg = 0;
  595. Res->Mem.Disp = Disp;
  596. Res->Mem.BaseReg = 0;
  597. Res->Mem.DefaultBaseReg = 0;
  598. Res->Mem.IndexReg = 0;
  599. Res->Mem.Scale = 1;
  600. Res->Mem.Size = Size;
  601. Res->Mem.ModeSize = ModeSize;
  602. Res->Mem.FrontendSize = FrontendSize;
  603. Res->SymName = SymName;
  604. Res->OpDecl = OpDecl;
  605. Res->AddressOf = false;
  606. return Res;
  607. }
  608. /// Create a generalized memory operand.
  609. static std::unique_ptr<X86Operand>
  610. CreateMem(unsigned ModeSize, unsigned SegReg, const MCExpr *Disp,
  611. unsigned BaseReg, unsigned IndexReg, unsigned Scale, SMLoc StartLoc,
  612. SMLoc EndLoc, unsigned Size = 0,
  613. unsigned DefaultBaseReg = X86::NoRegister,
  614. StringRef SymName = StringRef(), void *OpDecl = nullptr,
  615. unsigned FrontendSize = 0) {
  616. // We should never just have a displacement, that should be parsed as an
  617. // absolute memory operand.
  618. assert((SegReg || BaseReg || IndexReg || DefaultBaseReg) &&
  619. "Invalid memory operand!");
  620. // The scale should always be one of {1,2,4,8}.
  621. assert(((Scale == 1 || Scale == 2 || Scale == 4 || Scale == 8)) &&
  622. "Invalid scale!");
  623. auto Res = std::make_unique<X86Operand>(Memory, StartLoc, EndLoc);
  624. Res->Mem.SegReg = SegReg;
  625. Res->Mem.Disp = Disp;
  626. Res->Mem.BaseReg = BaseReg;
  627. Res->Mem.DefaultBaseReg = DefaultBaseReg;
  628. Res->Mem.IndexReg = IndexReg;
  629. Res->Mem.Scale = Scale;
  630. Res->Mem.Size = Size;
  631. Res->Mem.ModeSize = ModeSize;
  632. Res->Mem.FrontendSize = FrontendSize;
  633. Res->SymName = SymName;
  634. Res->OpDecl = OpDecl;
  635. Res->AddressOf = false;
  636. return Res;
  637. }
  638. };
  639. } // end namespace llvm
  640. #endif // LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H