X86Operand.h 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742
  1. //===- X86Operand.h - Parsed X86 machine instruction ------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #ifndef LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H
  9. #define LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H
  10. #include "MCTargetDesc/X86IntelInstPrinter.h"
  11. #include "MCTargetDesc/X86MCTargetDesc.h"
  12. #include "X86AsmParserCommon.h"
  13. #include "llvm/ADT/STLExtras.h"
  14. #include "llvm/ADT/StringRef.h"
  15. #include "llvm/MC/MCExpr.h"
  16. #include "llvm/MC/MCInst.h"
  17. #include "llvm/MC/MCParser/MCParsedAsmOperand.h"
  18. #include "llvm/MC/MCRegisterInfo.h"
  19. #include "llvm/MC/MCSymbol.h"
  20. #include "llvm/Support/Casting.h"
  21. #include "llvm/Support/SMLoc.h"
  22. #include <cassert>
  23. #include <memory>
  24. namespace llvm {
  25. /// X86Operand - Instances of this class represent a parsed X86 machine
  26. /// instruction.
  27. struct X86Operand final : public MCParsedAsmOperand {
  28. enum KindTy { Token, Register, Immediate, Memory, Prefix, DXRegister } Kind;
  29. SMLoc StartLoc, EndLoc;
  30. SMLoc OffsetOfLoc;
  31. StringRef SymName;
  32. void *OpDecl;
  33. bool AddressOf;
  34. /// This used for inline asm which may specify base reg and index reg for
  35. /// MemOp. e.g. ARR[eax + ecx*4], so no extra reg can be used for MemOp.
  36. bool UseUpRegs = false;
  37. struct TokOp {
  38. const char *Data;
  39. unsigned Length;
  40. };
  41. struct RegOp {
  42. unsigned RegNo;
  43. };
  44. struct PrefOp {
  45. unsigned Prefixes;
  46. };
  47. struct ImmOp {
  48. const MCExpr *Val;
  49. bool LocalRef;
  50. };
  51. struct MemOp {
  52. unsigned SegReg;
  53. const MCExpr *Disp;
  54. unsigned BaseReg;
  55. unsigned DefaultBaseReg;
  56. unsigned IndexReg;
  57. unsigned Scale;
  58. unsigned Size;
  59. unsigned ModeSize;
  60. /// If the memory operand is unsized and there are multiple instruction
  61. /// matches, prefer the one with this size.
  62. unsigned FrontendSize;
  63. /// If false, then this operand must be a memory operand for an indirect
  64. /// branch instruction. Otherwise, this operand may belong to either a
  65. /// direct or indirect branch instruction.
  66. bool MaybeDirectBranchDest;
  67. };
  68. union {
  69. struct TokOp Tok;
  70. struct RegOp Reg;
  71. struct ImmOp Imm;
  72. struct MemOp Mem;
  73. struct PrefOp Pref;
  74. };
  75. X86Operand(KindTy K, SMLoc Start, SMLoc End)
  76. : Kind(K), StartLoc(Start), EndLoc(End), OpDecl(nullptr),
  77. AddressOf(false) {}
  78. StringRef getSymName() override { return SymName; }
  79. void *getOpDecl() override { return OpDecl; }
  80. /// getStartLoc - Get the location of the first token of this operand.
  81. SMLoc getStartLoc() const override { return StartLoc; }
  82. /// getEndLoc - Get the location of the last token of this operand.
  83. SMLoc getEndLoc() const override { return EndLoc; }
  84. /// getLocRange - Get the range between the first and last token of this
  85. /// operand.
  86. SMRange getLocRange() const { return SMRange(StartLoc, EndLoc); }
  87. /// getOffsetOfLoc - Get the location of the offset operator.
  88. SMLoc getOffsetOfLoc() const override { return OffsetOfLoc; }
  89. void print(raw_ostream &OS) const override {
  90. auto PrintImmValue = [&](const MCExpr *Val, const char *VName) {
  91. if (Val->getKind() == MCExpr::Constant) {
  92. if (auto Imm = cast<MCConstantExpr>(Val)->getValue())
  93. OS << VName << Imm;
  94. } else if (Val->getKind() == MCExpr::SymbolRef) {
  95. if (auto *SRE = dyn_cast<MCSymbolRefExpr>(Val)) {
  96. const MCSymbol &Sym = SRE->getSymbol();
  97. if (const char *SymNameStr = Sym.getName().data())
  98. OS << VName << SymNameStr;
  99. }
  100. }
  101. };
  102. switch (Kind) {
  103. case Token:
  104. OS << Tok.Data;
  105. break;
  106. case Register:
  107. OS << "Reg:" << X86IntelInstPrinter::getRegisterName(Reg.RegNo);
  108. break;
  109. case DXRegister:
  110. OS << "DXReg";
  111. break;
  112. case Immediate:
  113. PrintImmValue(Imm.Val, "Imm:");
  114. break;
  115. case Prefix:
  116. OS << "Prefix:" << Pref.Prefixes;
  117. break;
  118. case Memory:
  119. OS << "Memory: ModeSize=" << Mem.ModeSize;
  120. if (Mem.Size)
  121. OS << ",Size=" << Mem.Size;
  122. if (Mem.BaseReg)
  123. OS << ",BaseReg=" << X86IntelInstPrinter::getRegisterName(Mem.BaseReg);
  124. if (Mem.IndexReg)
  125. OS << ",IndexReg="
  126. << X86IntelInstPrinter::getRegisterName(Mem.IndexReg);
  127. if (Mem.Scale)
  128. OS << ",Scale=" << Mem.Scale;
  129. if (Mem.Disp)
  130. PrintImmValue(Mem.Disp, ",Disp=");
  131. if (Mem.SegReg)
  132. OS << ",SegReg=" << X86IntelInstPrinter::getRegisterName(Mem.SegReg);
  133. break;
  134. }
  135. }
  136. StringRef getToken() const {
  137. assert(Kind == Token && "Invalid access!");
  138. return StringRef(Tok.Data, Tok.Length);
  139. }
  140. void setTokenValue(StringRef Value) {
  141. assert(Kind == Token && "Invalid access!");
  142. Tok.Data = Value.data();
  143. Tok.Length = Value.size();
  144. }
  145. unsigned getReg() const override {
  146. assert(Kind == Register && "Invalid access!");
  147. return Reg.RegNo;
  148. }
  149. unsigned getPrefix() const {
  150. assert(Kind == Prefix && "Invalid access!");
  151. return Pref.Prefixes;
  152. }
  153. const MCExpr *getImm() const {
  154. assert(Kind == Immediate && "Invalid access!");
  155. return Imm.Val;
  156. }
  157. const MCExpr *getMemDisp() const {
  158. assert(Kind == Memory && "Invalid access!");
  159. return Mem.Disp;
  160. }
  161. unsigned getMemSegReg() const {
  162. assert(Kind == Memory && "Invalid access!");
  163. return Mem.SegReg;
  164. }
  165. unsigned getMemBaseReg() const {
  166. assert(Kind == Memory && "Invalid access!");
  167. return Mem.BaseReg;
  168. }
  169. unsigned getMemDefaultBaseReg() const {
  170. assert(Kind == Memory && "Invalid access!");
  171. return Mem.DefaultBaseReg;
  172. }
  173. unsigned getMemIndexReg() const {
  174. assert(Kind == Memory && "Invalid access!");
  175. return Mem.IndexReg;
  176. }
  177. unsigned getMemScale() const {
  178. assert(Kind == Memory && "Invalid access!");
  179. return Mem.Scale;
  180. }
  181. unsigned getMemModeSize() const {
  182. assert(Kind == Memory && "Invalid access!");
  183. return Mem.ModeSize;
  184. }
  185. unsigned getMemFrontendSize() const {
  186. assert(Kind == Memory && "Invalid access!");
  187. return Mem.FrontendSize;
  188. }
  189. bool isMaybeDirectBranchDest() const {
  190. assert(Kind == Memory && "Invalid access!");
  191. return Mem.MaybeDirectBranchDest;
  192. }
  193. bool isToken() const override {return Kind == Token; }
  194. bool isImm() const override { return Kind == Immediate; }
  195. bool isImmSExti16i8() const {
  196. if (!isImm())
  197. return false;
  198. // If this isn't a constant expr, just assume it fits and let relaxation
  199. // handle it.
  200. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  201. if (!CE)
  202. return true;
  203. // Otherwise, check the value is in a range that makes sense for this
  204. // extension.
  205. return isImmSExti16i8Value(CE->getValue());
  206. }
  207. bool isImmSExti32i8() const {
  208. if (!isImm())
  209. return false;
  210. // If this isn't a constant expr, just assume it fits and let relaxation
  211. // handle it.
  212. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  213. if (!CE)
  214. return true;
  215. // Otherwise, check the value is in a range that makes sense for this
  216. // extension.
  217. return isImmSExti32i8Value(CE->getValue());
  218. }
  219. bool isImmSExti64i8() const {
  220. if (!isImm())
  221. return false;
  222. // If this isn't a constant expr, just assume it fits and let relaxation
  223. // handle it.
  224. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  225. if (!CE)
  226. return true;
  227. // Otherwise, check the value is in a range that makes sense for this
  228. // extension.
  229. return isImmSExti64i8Value(CE->getValue());
  230. }
  231. bool isImmSExti64i32() const {
  232. if (!isImm())
  233. return false;
  234. // If this isn't a constant expr, just assume it fits and let relaxation
  235. // handle it.
  236. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  237. if (!CE)
  238. return true;
  239. // Otherwise, check the value is in a range that makes sense for this
  240. // extension.
  241. return isImmSExti64i32Value(CE->getValue());
  242. }
  243. bool isImmUnsignedi4() const {
  244. if (!isImm()) return false;
  245. // If this isn't a constant expr, reject it. The immediate byte is shared
  246. // with a register encoding. We can't have it affected by a relocation.
  247. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  248. if (!CE) return false;
  249. return isImmUnsignedi4Value(CE->getValue());
  250. }
  251. bool isImmUnsignedi8() const {
  252. if (!isImm()) return false;
  253. // If this isn't a constant expr, just assume it fits and let relaxation
  254. // handle it.
  255. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  256. if (!CE) return true;
  257. return isImmUnsignedi8Value(CE->getValue());
  258. }
  259. bool isOffsetOfLocal() const override { return isImm() && Imm.LocalRef; }
  260. bool needAddressOf() const override { return AddressOf; }
  261. bool isMem() const override { return Kind == Memory; }
  262. bool isMemUnsized() const {
  263. return Kind == Memory && Mem.Size == 0;
  264. }
  265. bool isMem8() const {
  266. return Kind == Memory && (!Mem.Size || Mem.Size == 8);
  267. }
  268. bool isMem16() const {
  269. return Kind == Memory && (!Mem.Size || Mem.Size == 16);
  270. }
  271. bool isMem32() const {
  272. return Kind == Memory && (!Mem.Size || Mem.Size == 32);
  273. }
  274. bool isMem64() const {
  275. return Kind == Memory && (!Mem.Size || Mem.Size == 64);
  276. }
  277. bool isMem80() const {
  278. return Kind == Memory && (!Mem.Size || Mem.Size == 80);
  279. }
  280. bool isMem128() const {
  281. return Kind == Memory && (!Mem.Size || Mem.Size == 128);
  282. }
  283. bool isMem256() const {
  284. return Kind == Memory && (!Mem.Size || Mem.Size == 256);
  285. }
  286. bool isMem512() const {
  287. return Kind == Memory && (!Mem.Size || Mem.Size == 512);
  288. }
  289. bool isSibMem() const {
  290. return isMem() && Mem.BaseReg != X86::RIP && Mem.BaseReg != X86::EIP;
  291. }
  292. bool isMemIndexReg(unsigned LowR, unsigned HighR) const {
  293. assert(Kind == Memory && "Invalid access!");
  294. return Mem.IndexReg >= LowR && Mem.IndexReg <= HighR;
  295. }
  296. bool isMem64_RC128() const {
  297. return isMem64() && isMemIndexReg(X86::XMM0, X86::XMM15);
  298. }
  299. bool isMem128_RC128() const {
  300. return isMem128() && isMemIndexReg(X86::XMM0, X86::XMM15);
  301. }
  302. bool isMem128_RC256() const {
  303. return isMem128() && isMemIndexReg(X86::YMM0, X86::YMM15);
  304. }
  305. bool isMem256_RC128() const {
  306. return isMem256() && isMemIndexReg(X86::XMM0, X86::XMM15);
  307. }
  308. bool isMem256_RC256() const {
  309. return isMem256() && isMemIndexReg(X86::YMM0, X86::YMM15);
  310. }
  311. bool isMem64_RC128X() const {
  312. return isMem64() && isMemIndexReg(X86::XMM0, X86::XMM31);
  313. }
  314. bool isMem128_RC128X() const {
  315. return isMem128() && isMemIndexReg(X86::XMM0, X86::XMM31);
  316. }
  317. bool isMem128_RC256X() const {
  318. return isMem128() && isMemIndexReg(X86::YMM0, X86::YMM31);
  319. }
  320. bool isMem256_RC128X() const {
  321. return isMem256() && isMemIndexReg(X86::XMM0, X86::XMM31);
  322. }
  323. bool isMem256_RC256X() const {
  324. return isMem256() && isMemIndexReg(X86::YMM0, X86::YMM31);
  325. }
  326. bool isMem256_RC512() const {
  327. return isMem256() && isMemIndexReg(X86::ZMM0, X86::ZMM31);
  328. }
  329. bool isMem512_RC256X() const {
  330. return isMem512() && isMemIndexReg(X86::YMM0, X86::YMM31);
  331. }
  332. bool isMem512_RC512() const {
  333. return isMem512() && isMemIndexReg(X86::ZMM0, X86::ZMM31);
  334. }
  335. bool isAbsMem() const {
  336. return Kind == Memory && !getMemSegReg() && !getMemBaseReg() &&
  337. !getMemIndexReg() && getMemScale() == 1 && isMaybeDirectBranchDest();
  338. }
  339. bool isAVX512RC() const{
  340. return isImm();
  341. }
  342. bool isAbsMem16() const {
  343. return isAbsMem() && Mem.ModeSize == 16;
  344. }
  345. bool isMemUseUpRegs() const override { return UseUpRegs; }
  346. bool isSrcIdx() const {
  347. return !getMemIndexReg() && getMemScale() == 1 &&
  348. (getMemBaseReg() == X86::RSI || getMemBaseReg() == X86::ESI ||
  349. getMemBaseReg() == X86::SI) && isa<MCConstantExpr>(getMemDisp()) &&
  350. cast<MCConstantExpr>(getMemDisp())->getValue() == 0;
  351. }
  352. bool isSrcIdx8() const {
  353. return isMem8() && isSrcIdx();
  354. }
  355. bool isSrcIdx16() const {
  356. return isMem16() && isSrcIdx();
  357. }
  358. bool isSrcIdx32() const {
  359. return isMem32() && isSrcIdx();
  360. }
  361. bool isSrcIdx64() const {
  362. return isMem64() && isSrcIdx();
  363. }
  364. bool isDstIdx() const {
  365. return !getMemIndexReg() && getMemScale() == 1 &&
  366. (getMemSegReg() == 0 || getMemSegReg() == X86::ES) &&
  367. (getMemBaseReg() == X86::RDI || getMemBaseReg() == X86::EDI ||
  368. getMemBaseReg() == X86::DI) && isa<MCConstantExpr>(getMemDisp()) &&
  369. cast<MCConstantExpr>(getMemDisp())->getValue() == 0;
  370. }
  371. bool isDstIdx8() const {
  372. return isMem8() && isDstIdx();
  373. }
  374. bool isDstIdx16() const {
  375. return isMem16() && isDstIdx();
  376. }
  377. bool isDstIdx32() const {
  378. return isMem32() && isDstIdx();
  379. }
  380. bool isDstIdx64() const {
  381. return isMem64() && isDstIdx();
  382. }
  383. bool isMemOffs() const {
  384. return Kind == Memory && !getMemBaseReg() && !getMemIndexReg() &&
  385. getMemScale() == 1;
  386. }
  387. bool isMemOffs16_8() const {
  388. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 8);
  389. }
  390. bool isMemOffs16_16() const {
  391. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 16);
  392. }
  393. bool isMemOffs16_32() const {
  394. return isMemOffs() && Mem.ModeSize == 16 && (!Mem.Size || Mem.Size == 32);
  395. }
  396. bool isMemOffs32_8() const {
  397. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 8);
  398. }
  399. bool isMemOffs32_16() const {
  400. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 16);
  401. }
  402. bool isMemOffs32_32() const {
  403. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 32);
  404. }
  405. bool isMemOffs32_64() const {
  406. return isMemOffs() && Mem.ModeSize == 32 && (!Mem.Size || Mem.Size == 64);
  407. }
  408. bool isMemOffs64_8() const {
  409. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 8);
  410. }
  411. bool isMemOffs64_16() const {
  412. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 16);
  413. }
  414. bool isMemOffs64_32() const {
  415. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 32);
  416. }
  417. bool isMemOffs64_64() const {
  418. return isMemOffs() && Mem.ModeSize == 64 && (!Mem.Size || Mem.Size == 64);
  419. }
  420. bool isPrefix() const { return Kind == Prefix; }
  421. bool isReg() const override { return Kind == Register; }
  422. bool isDXReg() const { return Kind == DXRegister; }
  423. bool isGR32orGR64() const {
  424. return Kind == Register &&
  425. (X86MCRegisterClasses[X86::GR32RegClassID].contains(getReg()) ||
  426. X86MCRegisterClasses[X86::GR64RegClassID].contains(getReg()));
  427. }
  428. bool isGR16orGR32orGR64() const {
  429. return Kind == Register &&
  430. (X86MCRegisterClasses[X86::GR16RegClassID].contains(getReg()) ||
  431. X86MCRegisterClasses[X86::GR32RegClassID].contains(getReg()) ||
  432. X86MCRegisterClasses[X86::GR64RegClassID].contains(getReg()));
  433. }
  434. bool isVectorReg() const {
  435. return Kind == Register &&
  436. (X86MCRegisterClasses[X86::VR64RegClassID].contains(getReg()) ||
  437. X86MCRegisterClasses[X86::VR128XRegClassID].contains(getReg()) ||
  438. X86MCRegisterClasses[X86::VR256XRegClassID].contains(getReg()) ||
  439. X86MCRegisterClasses[X86::VR512RegClassID].contains(getReg()));
  440. }
  441. bool isVK1Pair() const {
  442. return Kind == Register &&
  443. X86MCRegisterClasses[X86::VK1RegClassID].contains(getReg());
  444. }
  445. bool isVK2Pair() const {
  446. return Kind == Register &&
  447. X86MCRegisterClasses[X86::VK2RegClassID].contains(getReg());
  448. }
  449. bool isVK4Pair() const {
  450. return Kind == Register &&
  451. X86MCRegisterClasses[X86::VK4RegClassID].contains(getReg());
  452. }
  453. bool isVK8Pair() const {
  454. return Kind == Register &&
  455. X86MCRegisterClasses[X86::VK8RegClassID].contains(getReg());
  456. }
  457. bool isVK16Pair() const {
  458. return Kind == Register &&
  459. X86MCRegisterClasses[X86::VK16RegClassID].contains(getReg());
  460. }
  461. void addExpr(MCInst &Inst, const MCExpr *Expr) const {
  462. // Add as immediates when possible.
  463. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Expr))
  464. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  465. else
  466. Inst.addOperand(MCOperand::createExpr(Expr));
  467. }
  468. void addRegOperands(MCInst &Inst, unsigned N) const {
  469. assert(N == 1 && "Invalid number of operands!");
  470. Inst.addOperand(MCOperand::createReg(getReg()));
  471. }
  472. void addGR32orGR64Operands(MCInst &Inst, unsigned N) const {
  473. assert(N == 1 && "Invalid number of operands!");
  474. MCRegister RegNo = getReg();
  475. if (X86MCRegisterClasses[X86::GR64RegClassID].contains(RegNo))
  476. RegNo = getX86SubSuperRegister(RegNo, 32);
  477. Inst.addOperand(MCOperand::createReg(RegNo));
  478. }
  479. void addGR16orGR32orGR64Operands(MCInst &Inst, unsigned N) const {
  480. assert(N == 1 && "Invalid number of operands!");
  481. MCRegister RegNo = getReg();
  482. if (X86MCRegisterClasses[X86::GR32RegClassID].contains(RegNo) ||
  483. X86MCRegisterClasses[X86::GR64RegClassID].contains(RegNo))
  484. RegNo = getX86SubSuperRegister(RegNo, 16);
  485. Inst.addOperand(MCOperand::createReg(RegNo));
  486. }
  487. void addAVX512RCOperands(MCInst &Inst, unsigned N) const {
  488. assert(N == 1 && "Invalid number of operands!");
  489. addExpr(Inst, getImm());
  490. }
  491. void addImmOperands(MCInst &Inst, unsigned N) const {
  492. assert(N == 1 && "Invalid number of operands!");
  493. addExpr(Inst, getImm());
  494. }
  495. void addMaskPairOperands(MCInst &Inst, unsigned N) const {
  496. assert(N == 1 && "Invalid number of operands!");
  497. unsigned Reg = getReg();
  498. switch (Reg) {
  499. case X86::K0:
  500. case X86::K1:
  501. Reg = X86::K0_K1;
  502. break;
  503. case X86::K2:
  504. case X86::K3:
  505. Reg = X86::K2_K3;
  506. break;
  507. case X86::K4:
  508. case X86::K5:
  509. Reg = X86::K4_K5;
  510. break;
  511. case X86::K6:
  512. case X86::K7:
  513. Reg = X86::K6_K7;
  514. break;
  515. }
  516. Inst.addOperand(MCOperand::createReg(Reg));
  517. }
  518. void addMemOperands(MCInst &Inst, unsigned N) const {
  519. assert((N == 5) && "Invalid number of operands!");
  520. if (getMemBaseReg())
  521. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  522. else
  523. Inst.addOperand(MCOperand::createReg(getMemDefaultBaseReg()));
  524. Inst.addOperand(MCOperand::createImm(getMemScale()));
  525. Inst.addOperand(MCOperand::createReg(getMemIndexReg()));
  526. addExpr(Inst, getMemDisp());
  527. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  528. }
  529. void addAbsMemOperands(MCInst &Inst, unsigned N) const {
  530. assert((N == 1) && "Invalid number of operands!");
  531. // Add as immediates when possible.
  532. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getMemDisp()))
  533. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  534. else
  535. Inst.addOperand(MCOperand::createExpr(getMemDisp()));
  536. }
  537. void addSrcIdxOperands(MCInst &Inst, unsigned N) const {
  538. assert((N == 2) && "Invalid number of operands!");
  539. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  540. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  541. }
  542. void addDstIdxOperands(MCInst &Inst, unsigned N) const {
  543. assert((N == 1) && "Invalid number of operands!");
  544. Inst.addOperand(MCOperand::createReg(getMemBaseReg()));
  545. }
  546. void addMemOffsOperands(MCInst &Inst, unsigned N) const {
  547. assert((N == 2) && "Invalid number of operands!");
  548. // Add as immediates when possible.
  549. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getMemDisp()))
  550. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  551. else
  552. Inst.addOperand(MCOperand::createExpr(getMemDisp()));
  553. Inst.addOperand(MCOperand::createReg(getMemSegReg()));
  554. }
  555. static std::unique_ptr<X86Operand> CreateToken(StringRef Str, SMLoc Loc) {
  556. SMLoc EndLoc = SMLoc::getFromPointer(Loc.getPointer() + Str.size());
  557. auto Res = std::make_unique<X86Operand>(Token, Loc, EndLoc);
  558. Res->Tok.Data = Str.data();
  559. Res->Tok.Length = Str.size();
  560. return Res;
  561. }
  562. static std::unique_ptr<X86Operand>
  563. CreateReg(unsigned RegNo, SMLoc StartLoc, SMLoc EndLoc,
  564. bool AddressOf = false, SMLoc OffsetOfLoc = SMLoc(),
  565. StringRef SymName = StringRef(), void *OpDecl = nullptr) {
  566. auto Res = std::make_unique<X86Operand>(Register, StartLoc, EndLoc);
  567. Res->Reg.RegNo = RegNo;
  568. Res->AddressOf = AddressOf;
  569. Res->OffsetOfLoc = OffsetOfLoc;
  570. Res->SymName = SymName;
  571. Res->OpDecl = OpDecl;
  572. return Res;
  573. }
  574. static std::unique_ptr<X86Operand>
  575. CreateDXReg(SMLoc StartLoc, SMLoc EndLoc) {
  576. return std::make_unique<X86Operand>(DXRegister, StartLoc, EndLoc);
  577. }
  578. static std::unique_ptr<X86Operand>
  579. CreatePrefix(unsigned Prefixes, SMLoc StartLoc, SMLoc EndLoc) {
  580. auto Res = std::make_unique<X86Operand>(Prefix, StartLoc, EndLoc);
  581. Res->Pref.Prefixes = Prefixes;
  582. return Res;
  583. }
  584. static std::unique_ptr<X86Operand> CreateImm(const MCExpr *Val,
  585. SMLoc StartLoc, SMLoc EndLoc,
  586. StringRef SymName = StringRef(),
  587. void *OpDecl = nullptr,
  588. bool GlobalRef = true) {
  589. auto Res = std::make_unique<X86Operand>(Immediate, StartLoc, EndLoc);
  590. Res->Imm.Val = Val;
  591. Res->Imm.LocalRef = !GlobalRef;
  592. Res->SymName = SymName;
  593. Res->OpDecl = OpDecl;
  594. Res->AddressOf = true;
  595. return Res;
  596. }
  597. /// Create an absolute memory operand.
  598. static std::unique_ptr<X86Operand>
  599. CreateMem(unsigned ModeSize, const MCExpr *Disp, SMLoc StartLoc, SMLoc EndLoc,
  600. unsigned Size = 0, StringRef SymName = StringRef(),
  601. void *OpDecl = nullptr, unsigned FrontendSize = 0,
  602. bool UseUpRegs = false, bool MaybeDirectBranchDest = true) {
  603. auto Res = std::make_unique<X86Operand>(Memory, StartLoc, EndLoc);
  604. Res->Mem.SegReg = 0;
  605. Res->Mem.Disp = Disp;
  606. Res->Mem.BaseReg = 0;
  607. Res->Mem.DefaultBaseReg = 0;
  608. Res->Mem.IndexReg = 0;
  609. Res->Mem.Scale = 1;
  610. Res->Mem.Size = Size;
  611. Res->Mem.ModeSize = ModeSize;
  612. Res->Mem.FrontendSize = FrontendSize;
  613. Res->Mem.MaybeDirectBranchDest = MaybeDirectBranchDest;
  614. Res->UseUpRegs = UseUpRegs;
  615. Res->SymName = SymName;
  616. Res->OpDecl = OpDecl;
  617. Res->AddressOf = false;
  618. return Res;
  619. }
  620. /// Create a generalized memory operand.
  621. static std::unique_ptr<X86Operand>
  622. CreateMem(unsigned ModeSize, unsigned SegReg, const MCExpr *Disp,
  623. unsigned BaseReg, unsigned IndexReg, unsigned Scale, SMLoc StartLoc,
  624. SMLoc EndLoc, unsigned Size = 0,
  625. unsigned DefaultBaseReg = X86::NoRegister,
  626. StringRef SymName = StringRef(), void *OpDecl = nullptr,
  627. unsigned FrontendSize = 0, bool UseUpRegs = false,
  628. bool MaybeDirectBranchDest = true) {
  629. // We should never just have a displacement, that should be parsed as an
  630. // absolute memory operand.
  631. assert((SegReg || BaseReg || IndexReg || DefaultBaseReg) &&
  632. "Invalid memory operand!");
  633. // The scale should always be one of {1,2,4,8}.
  634. assert(((Scale == 1 || Scale == 2 || Scale == 4 || Scale == 8)) &&
  635. "Invalid scale!");
  636. auto Res = std::make_unique<X86Operand>(Memory, StartLoc, EndLoc);
  637. Res->Mem.SegReg = SegReg;
  638. Res->Mem.Disp = Disp;
  639. Res->Mem.BaseReg = BaseReg;
  640. Res->Mem.DefaultBaseReg = DefaultBaseReg;
  641. Res->Mem.IndexReg = IndexReg;
  642. Res->Mem.Scale = Scale;
  643. Res->Mem.Size = Size;
  644. Res->Mem.ModeSize = ModeSize;
  645. Res->Mem.FrontendSize = FrontendSize;
  646. Res->Mem.MaybeDirectBranchDest = MaybeDirectBranchDest;
  647. Res->UseUpRegs = UseUpRegs;
  648. Res->SymName = SymName;
  649. Res->OpDecl = OpDecl;
  650. Res->AddressOf = false;
  651. return Res;
  652. }
  653. };
  654. } // end namespace llvm
  655. #endif // LLVM_LIB_TARGET_X86_ASMPARSER_X86OPERAND_H