BPFInstrInfo.td 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996
  1. //===-- BPFInstrInfo.td - Target Description for BPF Target ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file describes the BPF instructions in TableGen format.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. include "BPFInstrFormats.td"
  13. // Instruction Operands and Patterns
  14. // These are target-independent nodes, but have target-specific formats.
  15. def SDT_BPFCallSeqStart : SDCallSeqStart<[SDTCisVT<0, iPTR>,
  16. SDTCisVT<1, iPTR>]>;
  17. def SDT_BPFCallSeqEnd : SDCallSeqEnd<[SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>;
  18. def SDT_BPFCall : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>;
  19. def SDT_BPFSetFlag : SDTypeProfile<0, 3, [SDTCisSameAs<0, 1>]>;
  20. def SDT_BPFSelectCC : SDTypeProfile<1, 5, [SDTCisSameAs<1, 2>,
  21. SDTCisSameAs<0, 4>,
  22. SDTCisSameAs<4, 5>]>;
  23. def SDT_BPFBrCC : SDTypeProfile<0, 4, [SDTCisSameAs<0, 1>,
  24. SDTCisVT<3, OtherVT>]>;
  25. def SDT_BPFWrapper : SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>,
  26. SDTCisPtrTy<0>]>;
  27. def SDT_BPFMEMCPY : SDTypeProfile<0, 4, [SDTCisVT<0, i64>,
  28. SDTCisVT<1, i64>,
  29. SDTCisVT<2, i64>,
  30. SDTCisVT<3, i64>]>;
  31. def BPFcall : SDNode<"BPFISD::CALL", SDT_BPFCall,
  32. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue,
  33. SDNPVariadic]>;
  34. def BPFretflag : SDNode<"BPFISD::RET_FLAG", SDTNone,
  35. [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>;
  36. def BPFcallseq_start: SDNode<"ISD::CALLSEQ_START", SDT_BPFCallSeqStart,
  37. [SDNPHasChain, SDNPOutGlue]>;
  38. def BPFcallseq_end : SDNode<"ISD::CALLSEQ_END", SDT_BPFCallSeqEnd,
  39. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>;
  40. def BPFbrcc : SDNode<"BPFISD::BR_CC", SDT_BPFBrCC,
  41. [SDNPHasChain, SDNPOutGlue, SDNPInGlue]>;
  42. def BPFselectcc : SDNode<"BPFISD::SELECT_CC", SDT_BPFSelectCC, [SDNPInGlue]>;
  43. def BPFWrapper : SDNode<"BPFISD::Wrapper", SDT_BPFWrapper>;
  44. def BPFmemcpy : SDNode<"BPFISD::MEMCPY", SDT_BPFMEMCPY,
  45. [SDNPHasChain, SDNPInGlue, SDNPOutGlue,
  46. SDNPMayStore, SDNPMayLoad]>;
  47. def BPFIsLittleEndian : Predicate<"CurDAG->getDataLayout().isLittleEndian()">;
  48. def BPFIsBigEndian : Predicate<"!CurDAG->getDataLayout().isLittleEndian()">;
  49. def BPFHasALU32 : Predicate<"Subtarget->getHasAlu32()">;
  50. def BPFNoALU32 : Predicate<"!Subtarget->getHasAlu32()">;
  51. def brtarget : Operand<OtherVT> {
  52. let PrintMethod = "printBrTargetOperand";
  53. }
  54. def calltarget : Operand<i64>;
  55. def u64imm : Operand<i64> {
  56. let PrintMethod = "printImm64Operand";
  57. }
  58. def i64immSExt32 : PatLeaf<(i64 imm),
  59. [{return isInt<32>(N->getSExtValue()); }]>;
  60. def i32immSExt32 : PatLeaf<(i32 imm),
  61. [{return isInt<32>(N->getSExtValue()); }]>;
  62. // Addressing modes.
  63. def ADDRri : ComplexPattern<i64, 2, "SelectAddr", [], []>;
  64. def FIri : ComplexPattern<i64, 2, "SelectFIAddr", [add, or], []>;
  65. // Address operands
  66. def MEMri : Operand<i64> {
  67. let PrintMethod = "printMemOperand";
  68. let EncoderMethod = "getMemoryOpValue";
  69. let DecoderMethod = "decodeMemoryOpValue";
  70. let MIOperandInfo = (ops GPR, i16imm);
  71. }
  72. // Conditional code predicates - used for pattern matching for jump instructions
  73. def BPF_CC_EQ : PatLeaf<(i64 imm),
  74. [{return (N->getZExtValue() == ISD::SETEQ);}]>;
  75. def BPF_CC_NE : PatLeaf<(i64 imm),
  76. [{return (N->getZExtValue() == ISD::SETNE);}]>;
  77. def BPF_CC_GE : PatLeaf<(i64 imm),
  78. [{return (N->getZExtValue() == ISD::SETGE);}]>;
  79. def BPF_CC_GT : PatLeaf<(i64 imm),
  80. [{return (N->getZExtValue() == ISD::SETGT);}]>;
  81. def BPF_CC_GTU : PatLeaf<(i64 imm),
  82. [{return (N->getZExtValue() == ISD::SETUGT);}]>;
  83. def BPF_CC_GEU : PatLeaf<(i64 imm),
  84. [{return (N->getZExtValue() == ISD::SETUGE);}]>;
  85. def BPF_CC_LE : PatLeaf<(i64 imm),
  86. [{return (N->getZExtValue() == ISD::SETLE);}]>;
  87. def BPF_CC_LT : PatLeaf<(i64 imm),
  88. [{return (N->getZExtValue() == ISD::SETLT);}]>;
  89. def BPF_CC_LTU : PatLeaf<(i64 imm),
  90. [{return (N->getZExtValue() == ISD::SETULT);}]>;
  91. def BPF_CC_LEU : PatLeaf<(i64 imm),
  92. [{return (N->getZExtValue() == ISD::SETULE);}]>;
  93. def BPF_CC_EQ_32 : PatLeaf<(i32 imm),
  94. [{return (N->getZExtValue() == ISD::SETEQ);}]>;
  95. def BPF_CC_NE_32 : PatLeaf<(i32 imm),
  96. [{return (N->getZExtValue() == ISD::SETNE);}]>;
  97. def BPF_CC_GE_32 : PatLeaf<(i32 imm),
  98. [{return (N->getZExtValue() == ISD::SETGE);}]>;
  99. def BPF_CC_GT_32 : PatLeaf<(i32 imm),
  100. [{return (N->getZExtValue() == ISD::SETGT);}]>;
  101. def BPF_CC_GTU_32 : PatLeaf<(i32 imm),
  102. [{return (N->getZExtValue() == ISD::SETUGT);}]>;
  103. def BPF_CC_GEU_32 : PatLeaf<(i32 imm),
  104. [{return (N->getZExtValue() == ISD::SETUGE);}]>;
  105. def BPF_CC_LE_32 : PatLeaf<(i32 imm),
  106. [{return (N->getZExtValue() == ISD::SETLE);}]>;
  107. def BPF_CC_LT_32 : PatLeaf<(i32 imm),
  108. [{return (N->getZExtValue() == ISD::SETLT);}]>;
  109. def BPF_CC_LTU_32 : PatLeaf<(i32 imm),
  110. [{return (N->getZExtValue() == ISD::SETULT);}]>;
  111. def BPF_CC_LEU_32 : PatLeaf<(i32 imm),
  112. [{return (N->getZExtValue() == ISD::SETULE);}]>;
  113. // For arithmetic and jump instructions the 8-bit 'code'
  114. // field is divided into three parts:
  115. //
  116. // +----------------+--------+--------------------+
  117. // | 4 bits | 1 bit | 3 bits |
  118. // | operation code | source | instruction class |
  119. // +----------------+--------+--------------------+
  120. // (MSB) (LSB)
  121. class TYPE_ALU_JMP<bits<4> op, bits<1> srctype,
  122. dag outs, dag ins, string asmstr, list<dag> pattern>
  123. : InstBPF<outs, ins, asmstr, pattern> {
  124. let Inst{63-60} = op;
  125. let Inst{59} = srctype;
  126. }
  127. //For load and store instructions the 8-bit 'code' field is divided as:
  128. //
  129. // +--------+--------+-------------------+
  130. // | 3 bits | 2 bits | 3 bits |
  131. // | mode | size | instruction class |
  132. // +--------+--------+-------------------+
  133. // (MSB) (LSB)
  134. class TYPE_LD_ST<bits<3> mode, bits<2> size,
  135. dag outs, dag ins, string asmstr, list<dag> pattern>
  136. : InstBPF<outs, ins, asmstr, pattern> {
  137. let Inst{63-61} = mode;
  138. let Inst{60-59} = size;
  139. }
  140. // jump instructions
  141. class JMP_RR<BPFJumpOp Opc, string OpcodeStr, PatLeaf Cond>
  142. : TYPE_ALU_JMP<Opc.Value, BPF_X.Value,
  143. (outs),
  144. (ins GPR:$dst, GPR:$src, brtarget:$BrDst),
  145. "if $dst "#OpcodeStr#" $src goto $BrDst",
  146. [(BPFbrcc i64:$dst, i64:$src, Cond, bb:$BrDst)]> {
  147. bits<4> dst;
  148. bits<4> src;
  149. bits<16> BrDst;
  150. let Inst{55-52} = src;
  151. let Inst{51-48} = dst;
  152. let Inst{47-32} = BrDst;
  153. let BPFClass = BPF_JMP;
  154. }
  155. class JMP_RI<BPFJumpOp Opc, string OpcodeStr, PatLeaf Cond>
  156. : TYPE_ALU_JMP<Opc.Value, BPF_K.Value,
  157. (outs),
  158. (ins GPR:$dst, i64imm:$imm, brtarget:$BrDst),
  159. "if $dst "#OpcodeStr#" $imm goto $BrDst",
  160. [(BPFbrcc i64:$dst, i64immSExt32:$imm, Cond, bb:$BrDst)]> {
  161. bits<4> dst;
  162. bits<16> BrDst;
  163. bits<32> imm;
  164. let Inst{51-48} = dst;
  165. let Inst{47-32} = BrDst;
  166. let Inst{31-0} = imm;
  167. let BPFClass = BPF_JMP;
  168. }
  169. class JMP_RR_32<BPFJumpOp Opc, string OpcodeStr, PatLeaf Cond>
  170. : TYPE_ALU_JMP<Opc.Value, BPF_X.Value,
  171. (outs),
  172. (ins GPR32:$dst, GPR32:$src, brtarget:$BrDst),
  173. "if $dst "#OpcodeStr#" $src goto $BrDst",
  174. [(BPFbrcc i32:$dst, i32:$src, Cond, bb:$BrDst)]> {
  175. bits<4> dst;
  176. bits<4> src;
  177. bits<16> BrDst;
  178. let Inst{55-52} = src;
  179. let Inst{51-48} = dst;
  180. let Inst{47-32} = BrDst;
  181. let BPFClass = BPF_JMP32;
  182. }
  183. class JMP_RI_32<BPFJumpOp Opc, string OpcodeStr, PatLeaf Cond>
  184. : TYPE_ALU_JMP<Opc.Value, BPF_K.Value,
  185. (outs),
  186. (ins GPR32:$dst, i32imm:$imm, brtarget:$BrDst),
  187. "if $dst "#OpcodeStr#" $imm goto $BrDst",
  188. [(BPFbrcc i32:$dst, i32immSExt32:$imm, Cond, bb:$BrDst)]> {
  189. bits<4> dst;
  190. bits<16> BrDst;
  191. bits<32> imm;
  192. let Inst{51-48} = dst;
  193. let Inst{47-32} = BrDst;
  194. let Inst{31-0} = imm;
  195. let BPFClass = BPF_JMP32;
  196. }
  197. multiclass J<BPFJumpOp Opc, string OpcodeStr, PatLeaf Cond, PatLeaf Cond32> {
  198. def _rr : JMP_RR<Opc, OpcodeStr, Cond>;
  199. def _ri : JMP_RI<Opc, OpcodeStr, Cond>;
  200. def _rr_32 : JMP_RR_32<Opc, OpcodeStr, Cond32>;
  201. def _ri_32 : JMP_RI_32<Opc, OpcodeStr, Cond32>;
  202. }
  203. let isBranch = 1, isTerminator = 1, hasDelaySlot=0 in {
  204. // cmp+goto instructions
  205. defm JEQ : J<BPF_JEQ, "==", BPF_CC_EQ, BPF_CC_EQ_32>;
  206. defm JUGT : J<BPF_JGT, ">", BPF_CC_GTU, BPF_CC_GTU_32>;
  207. defm JUGE : J<BPF_JGE, ">=", BPF_CC_GEU, BPF_CC_GEU_32>;
  208. defm JNE : J<BPF_JNE, "!=", BPF_CC_NE, BPF_CC_NE_32>;
  209. defm JSGT : J<BPF_JSGT, "s>", BPF_CC_GT, BPF_CC_GT_32>;
  210. defm JSGE : J<BPF_JSGE, "s>=", BPF_CC_GE, BPF_CC_GE_32>;
  211. defm JULT : J<BPF_JLT, "<", BPF_CC_LTU, BPF_CC_LTU_32>;
  212. defm JULE : J<BPF_JLE, "<=", BPF_CC_LEU, BPF_CC_LEU_32>;
  213. defm JSLT : J<BPF_JSLT, "s<", BPF_CC_LT, BPF_CC_LT_32>;
  214. defm JSLE : J<BPF_JSLE, "s<=", BPF_CC_LE, BPF_CC_LE_32>;
  215. }
  216. // ALU instructions
  217. class ALU_RI<BPFOpClass Class, BPFArithOp Opc,
  218. dag outs, dag ins, string asmstr, list<dag> pattern>
  219. : TYPE_ALU_JMP<Opc.Value, BPF_K.Value, outs, ins, asmstr, pattern> {
  220. bits<4> dst;
  221. bits<32> imm;
  222. let Inst{51-48} = dst;
  223. let Inst{31-0} = imm;
  224. let BPFClass = Class;
  225. }
  226. class ALU_RR<BPFOpClass Class, BPFArithOp Opc,
  227. dag outs, dag ins, string asmstr, list<dag> pattern>
  228. : TYPE_ALU_JMP<Opc.Value, BPF_X.Value, outs, ins, asmstr, pattern> {
  229. bits<4> dst;
  230. bits<4> src;
  231. let Inst{55-52} = src;
  232. let Inst{51-48} = dst;
  233. let BPFClass = Class;
  234. }
  235. multiclass ALU<BPFArithOp Opc, string OpcodeStr, SDNode OpNode> {
  236. def _rr : ALU_RR<BPF_ALU64, Opc,
  237. (outs GPR:$dst),
  238. (ins GPR:$src2, GPR:$src),
  239. "$dst "#OpcodeStr#" $src",
  240. [(set GPR:$dst, (OpNode i64:$src2, i64:$src))]>;
  241. def _ri : ALU_RI<BPF_ALU64, Opc,
  242. (outs GPR:$dst),
  243. (ins GPR:$src2, i64imm:$imm),
  244. "$dst "#OpcodeStr#" $imm",
  245. [(set GPR:$dst, (OpNode GPR:$src2, i64immSExt32:$imm))]>;
  246. def _rr_32 : ALU_RR<BPF_ALU, Opc,
  247. (outs GPR32:$dst),
  248. (ins GPR32:$src2, GPR32:$src),
  249. "$dst "#OpcodeStr#" $src",
  250. [(set GPR32:$dst, (OpNode i32:$src2, i32:$src))]>;
  251. def _ri_32 : ALU_RI<BPF_ALU, Opc,
  252. (outs GPR32:$dst),
  253. (ins GPR32:$src2, i32imm:$imm),
  254. "$dst "#OpcodeStr#" $imm",
  255. [(set GPR32:$dst, (OpNode GPR32:$src2, i32immSExt32:$imm))]>;
  256. }
  257. let Constraints = "$dst = $src2" in {
  258. let isAsCheapAsAMove = 1 in {
  259. defm ADD : ALU<BPF_ADD, "+=", add>;
  260. defm SUB : ALU<BPF_SUB, "-=", sub>;
  261. defm OR : ALU<BPF_OR, "|=", or>;
  262. defm AND : ALU<BPF_AND, "&=", and>;
  263. defm SLL : ALU<BPF_LSH, "<<=", shl>;
  264. defm SRL : ALU<BPF_RSH, ">>=", srl>;
  265. defm XOR : ALU<BPF_XOR, "^=", xor>;
  266. defm SRA : ALU<BPF_ARSH, "s>>=", sra>;
  267. }
  268. defm MUL : ALU<BPF_MUL, "*=", mul>;
  269. defm DIV : ALU<BPF_DIV, "/=", udiv>;
  270. defm MOD : ALU<BPF_MOD, "%=", urem>;
  271. }
  272. class NEG_RR<BPFOpClass Class, BPFArithOp Opc,
  273. dag outs, dag ins, string asmstr, list<dag> pattern>
  274. : TYPE_ALU_JMP<Opc.Value, 0, outs, ins, asmstr, pattern> {
  275. bits<4> dst;
  276. let Inst{51-48} = dst;
  277. let BPFClass = Class;
  278. }
  279. let Constraints = "$dst = $src", isAsCheapAsAMove = 1 in {
  280. def NEG_64: NEG_RR<BPF_ALU64, BPF_NEG, (outs GPR:$dst), (ins GPR:$src),
  281. "$dst = -$src",
  282. [(set GPR:$dst, (ineg i64:$src))]>;
  283. def NEG_32: NEG_RR<BPF_ALU, BPF_NEG, (outs GPR32:$dst), (ins GPR32:$src),
  284. "$dst = -$src",
  285. [(set GPR32:$dst, (ineg i32:$src))]>;
  286. }
  287. class LD_IMM64<bits<4> Pseudo, string OpcodeStr>
  288. : TYPE_LD_ST<BPF_IMM.Value, BPF_DW.Value,
  289. (outs GPR:$dst),
  290. (ins u64imm:$imm),
  291. "$dst "#OpcodeStr#" ${imm} ll",
  292. [(set GPR:$dst, (i64 imm:$imm))]> {
  293. bits<4> dst;
  294. bits<64> imm;
  295. let Inst{51-48} = dst;
  296. let Inst{55-52} = Pseudo;
  297. let Inst{47-32} = 0;
  298. let Inst{31-0} = imm{31-0};
  299. let BPFClass = BPF_LD;
  300. }
  301. let isReMaterializable = 1, isAsCheapAsAMove = 1 in {
  302. def LD_imm64 : LD_IMM64<0, "=">;
  303. def MOV_rr : ALU_RR<BPF_ALU64, BPF_MOV,
  304. (outs GPR:$dst),
  305. (ins GPR:$src),
  306. "$dst = $src",
  307. []>;
  308. def MOV_ri : ALU_RI<BPF_ALU64, BPF_MOV,
  309. (outs GPR:$dst),
  310. (ins i64imm:$imm),
  311. "$dst = $imm",
  312. [(set GPR:$dst, (i64 i64immSExt32:$imm))]>;
  313. def MOV_rr_32 : ALU_RR<BPF_ALU, BPF_MOV,
  314. (outs GPR32:$dst),
  315. (ins GPR32:$src),
  316. "$dst = $src",
  317. []>;
  318. def MOV_ri_32 : ALU_RI<BPF_ALU, BPF_MOV,
  319. (outs GPR32:$dst),
  320. (ins i32imm:$imm),
  321. "$dst = $imm",
  322. [(set GPR32:$dst, (i32 i32immSExt32:$imm))]>;
  323. }
  324. def FI_ri
  325. : TYPE_LD_ST<BPF_IMM.Value, BPF_DW.Value,
  326. (outs GPR:$dst),
  327. (ins MEMri:$addr),
  328. "lea\t$dst, $addr",
  329. [(set i64:$dst, FIri:$addr)]> {
  330. // This is a tentative instruction, and will be replaced
  331. // with MOV_rr and ADD_ri in PEI phase
  332. let Inst{51-48} = 0;
  333. let Inst{55-52} = 2;
  334. let Inst{47-32} = 0;
  335. let Inst{31-0} = 0;
  336. let BPFClass = BPF_LD;
  337. bit isPseudo = true;
  338. }
  339. def LD_pseudo
  340. : TYPE_LD_ST<BPF_IMM.Value, BPF_DW.Value,
  341. (outs GPR:$dst),
  342. (ins i64imm:$pseudo, u64imm:$imm),
  343. "ld_pseudo\t$dst, $pseudo, $imm",
  344. [(set GPR:$dst, (int_bpf_pseudo imm:$pseudo, imm:$imm))]> {
  345. bits<4> dst;
  346. bits<64> imm;
  347. bits<4> pseudo;
  348. let Inst{51-48} = dst;
  349. let Inst{55-52} = pseudo;
  350. let Inst{47-32} = 0;
  351. let Inst{31-0} = imm{31-0};
  352. let BPFClass = BPF_LD;
  353. }
  354. // STORE instructions
  355. class STORE<BPFWidthModifer SizeOp, string OpcodeStr, list<dag> Pattern>
  356. : TYPE_LD_ST<BPF_MEM.Value, SizeOp.Value,
  357. (outs),
  358. (ins GPR:$src, MEMri:$addr),
  359. "*("#OpcodeStr#" *)($addr) = $src",
  360. Pattern> {
  361. bits<4> src;
  362. bits<20> addr;
  363. let Inst{51-48} = addr{19-16}; // base reg
  364. let Inst{55-52} = src;
  365. let Inst{47-32} = addr{15-0}; // offset
  366. let BPFClass = BPF_STX;
  367. }
  368. class STOREi64<BPFWidthModifer Opc, string OpcodeStr, PatFrag OpNode>
  369. : STORE<Opc, OpcodeStr, [(OpNode i64:$src, ADDRri:$addr)]>;
  370. let Predicates = [BPFNoALU32] in {
  371. def STW : STOREi64<BPF_W, "u32", truncstorei32>;
  372. def STH : STOREi64<BPF_H, "u16", truncstorei16>;
  373. def STB : STOREi64<BPF_B, "u8", truncstorei8>;
  374. }
  375. def STD : STOREi64<BPF_DW, "u64", store>;
  376. // LOAD instructions
  377. class LOAD<BPFWidthModifer SizeOp, string OpcodeStr, list<dag> Pattern>
  378. : TYPE_LD_ST<BPF_MEM.Value, SizeOp.Value,
  379. (outs GPR:$dst),
  380. (ins MEMri:$addr),
  381. "$dst = *("#OpcodeStr#" *)($addr)",
  382. Pattern> {
  383. bits<4> dst;
  384. bits<20> addr;
  385. let Inst{51-48} = dst;
  386. let Inst{55-52} = addr{19-16};
  387. let Inst{47-32} = addr{15-0};
  388. let BPFClass = BPF_LDX;
  389. }
  390. class LOADi64<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  391. : LOAD<SizeOp, OpcodeStr, [(set i64:$dst, (OpNode ADDRri:$addr))]>;
  392. let isCodeGenOnly = 1 in {
  393. def CORE_MEM : TYPE_LD_ST<BPF_MEM.Value, BPF_W.Value,
  394. (outs GPR:$dst),
  395. (ins u64imm:$opcode, GPR:$src, u64imm:$offset),
  396. "$dst = core_mem($opcode, $src, $offset)",
  397. []>;
  398. def CORE_ALU32_MEM : TYPE_LD_ST<BPF_MEM.Value, BPF_W.Value,
  399. (outs GPR32:$dst),
  400. (ins u64imm:$opcode, GPR:$src, u64imm:$offset),
  401. "$dst = core_alu32_mem($opcode, $src, $offset)",
  402. []>;
  403. let Constraints = "$dst = $src" in {
  404. def CORE_SHIFT : ALU_RR<BPF_ALU64, BPF_LSH,
  405. (outs GPR:$dst),
  406. (ins u64imm:$opcode, GPR:$src, u64imm:$offset),
  407. "$dst = core_shift($opcode, $src, $offset)",
  408. []>;
  409. }
  410. }
  411. let Predicates = [BPFNoALU32] in {
  412. def LDW : LOADi64<BPF_W, "u32", zextloadi32>;
  413. def LDH : LOADi64<BPF_H, "u16", zextloadi16>;
  414. def LDB : LOADi64<BPF_B, "u8", zextloadi8>;
  415. }
  416. def LDD : LOADi64<BPF_DW, "u64", load>;
  417. class BRANCH<BPFJumpOp Opc, string OpcodeStr, list<dag> Pattern>
  418. : TYPE_ALU_JMP<Opc.Value, BPF_K.Value,
  419. (outs),
  420. (ins brtarget:$BrDst),
  421. !strconcat(OpcodeStr, " $BrDst"),
  422. Pattern> {
  423. bits<16> BrDst;
  424. let Inst{47-32} = BrDst;
  425. let BPFClass = BPF_JMP;
  426. }
  427. class CALL<string OpcodeStr>
  428. : TYPE_ALU_JMP<BPF_CALL.Value, BPF_K.Value,
  429. (outs),
  430. (ins calltarget:$BrDst),
  431. !strconcat(OpcodeStr, " $BrDst"),
  432. []> {
  433. bits<32> BrDst;
  434. let Inst{31-0} = BrDst;
  435. let BPFClass = BPF_JMP;
  436. }
  437. class CALLX<string OpcodeStr>
  438. : TYPE_ALU_JMP<BPF_CALL.Value, BPF_X.Value,
  439. (outs),
  440. (ins GPR:$BrDst),
  441. !strconcat(OpcodeStr, " $BrDst"),
  442. []> {
  443. bits<32> BrDst;
  444. let Inst{31-0} = BrDst;
  445. let BPFClass = BPF_JMP;
  446. }
  447. // Jump always
  448. let isBranch = 1, isTerminator = 1, hasDelaySlot=0, isBarrier = 1 in {
  449. def JMP : BRANCH<BPF_JA, "goto", [(br bb:$BrDst)]>;
  450. }
  451. // Jump and link
  452. let isCall=1, hasDelaySlot=0, Uses = [R11],
  453. // Potentially clobbered registers
  454. Defs = [R0, R1, R2, R3, R4, R5] in {
  455. def JAL : CALL<"call">;
  456. def JALX : CALLX<"callx">;
  457. }
  458. class NOP_I<string OpcodeStr>
  459. : TYPE_ALU_JMP<BPF_MOV.Value, BPF_X.Value,
  460. (outs),
  461. (ins i32imm:$imm),
  462. !strconcat(OpcodeStr, "\t$imm"),
  463. []> {
  464. // mov r0, r0 == nop
  465. let Inst{55-52} = 0;
  466. let Inst{51-48} = 0;
  467. let BPFClass = BPF_ALU64;
  468. }
  469. let hasSideEffects = 0, isCodeGenOnly = 1 in
  470. def NOP : NOP_I<"nop">;
  471. class RET<string OpcodeStr>
  472. : TYPE_ALU_JMP<BPF_EXIT.Value, BPF_K.Value,
  473. (outs),
  474. (ins),
  475. !strconcat(OpcodeStr, ""),
  476. [(BPFretflag)]> {
  477. let Inst{31-0} = 0;
  478. let BPFClass = BPF_JMP;
  479. }
  480. let isReturn = 1, isTerminator = 1, hasDelaySlot=0, isBarrier = 1,
  481. isNotDuplicable = 1 in {
  482. def RET : RET<"exit">;
  483. }
  484. // ADJCALLSTACKDOWN/UP pseudo insns
  485. let Defs = [R11], Uses = [R11], isCodeGenOnly = 1 in {
  486. def ADJCALLSTACKDOWN : Pseudo<(outs), (ins i64imm:$amt1, i64imm:$amt2),
  487. "#ADJCALLSTACKDOWN $amt1 $amt2",
  488. [(BPFcallseq_start timm:$amt1, timm:$amt2)]>;
  489. def ADJCALLSTACKUP : Pseudo<(outs), (ins i64imm:$amt1, i64imm:$amt2),
  490. "#ADJCALLSTACKUP $amt1 $amt2",
  491. [(BPFcallseq_end timm:$amt1, timm:$amt2)]>;
  492. }
  493. let usesCustomInserter = 1, isCodeGenOnly = 1 in {
  494. def Select : Pseudo<(outs GPR:$dst),
  495. (ins GPR:$lhs, GPR:$rhs, i64imm:$imm, GPR:$src, GPR:$src2),
  496. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  497. [(set i64:$dst,
  498. (BPFselectcc i64:$lhs, i64:$rhs, (i64 imm:$imm), i64:$src, i64:$src2))]>;
  499. def Select_Ri : Pseudo<(outs GPR:$dst),
  500. (ins GPR:$lhs, i64imm:$rhs, i64imm:$imm, GPR:$src, GPR:$src2),
  501. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  502. [(set i64:$dst,
  503. (BPFselectcc i64:$lhs, (i64immSExt32:$rhs), (i64 imm:$imm), i64:$src, i64:$src2))]>;
  504. def Select_64_32 : Pseudo<(outs GPR32:$dst),
  505. (ins GPR:$lhs, GPR:$rhs, i64imm:$imm, GPR32:$src, GPR32:$src2),
  506. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  507. [(set i32:$dst,
  508. (BPFselectcc i64:$lhs, i64:$rhs, (i64 imm:$imm), i32:$src, i32:$src2))]>;
  509. def Select_Ri_64_32 : Pseudo<(outs GPR32:$dst),
  510. (ins GPR:$lhs, i64imm:$rhs, i64imm:$imm, GPR32:$src, GPR32:$src2),
  511. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  512. [(set i32:$dst,
  513. (BPFselectcc i64:$lhs, (i64immSExt32:$rhs), (i64 imm:$imm), i32:$src, i32:$src2))]>;
  514. def Select_32 : Pseudo<(outs GPR32:$dst),
  515. (ins GPR32:$lhs, GPR32:$rhs, i32imm:$imm, GPR32:$src, GPR32:$src2),
  516. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  517. [(set i32:$dst,
  518. (BPFselectcc i32:$lhs, i32:$rhs, (i32 imm:$imm), i32:$src, i32:$src2))]>;
  519. def Select_Ri_32 : Pseudo<(outs GPR32:$dst),
  520. (ins GPR32:$lhs, i32imm:$rhs, i32imm:$imm, GPR32:$src, GPR32:$src2),
  521. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  522. [(set i32:$dst,
  523. (BPFselectcc i32:$lhs, (i32immSExt32:$rhs), (i32 imm:$imm), i32:$src, i32:$src2))]>;
  524. def Select_32_64 : Pseudo<(outs GPR:$dst),
  525. (ins GPR32:$lhs, GPR32:$rhs, i32imm:$imm, GPR:$src, GPR:$src2),
  526. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  527. [(set i64:$dst,
  528. (BPFselectcc i32:$lhs, i32:$rhs, (i32 imm:$imm), i64:$src, i64:$src2))]>;
  529. def Select_Ri_32_64 : Pseudo<(outs GPR:$dst),
  530. (ins GPR32:$lhs, i32imm:$rhs, i32imm:$imm, GPR:$src, GPR:$src2),
  531. "# Select PSEUDO $dst = $lhs $imm $rhs ? $src : $src2",
  532. [(set i64:$dst,
  533. (BPFselectcc i32:$lhs, (i32immSExt32:$rhs), (i32 imm:$imm), i64:$src, i64:$src2))]>;
  534. }
  535. // load 64-bit global addr into register
  536. def : Pat<(BPFWrapper tglobaladdr:$in), (LD_imm64 tglobaladdr:$in)>;
  537. // 0xffffFFFF doesn't fit into simm32, optimize common case
  538. def : Pat<(i64 (and (i64 GPR:$src), 0xffffFFFF)),
  539. (SRL_ri (SLL_ri (i64 GPR:$src), 32), 32)>;
  540. // Calls
  541. def : Pat<(BPFcall tglobaladdr:$dst), (JAL tglobaladdr:$dst)>;
  542. def : Pat<(BPFcall texternalsym:$dst), (JAL texternalsym:$dst)>;
  543. def : Pat<(BPFcall imm:$dst), (JAL imm:$dst)>;
  544. def : Pat<(BPFcall GPR:$dst), (JALX GPR:$dst)>;
  545. // Loads
  546. let Predicates = [BPFNoALU32] in {
  547. def : Pat<(i64 (extloadi8 ADDRri:$src)), (i64 (LDB ADDRri:$src))>;
  548. def : Pat<(i64 (extloadi16 ADDRri:$src)), (i64 (LDH ADDRri:$src))>;
  549. def : Pat<(i64 (extloadi32 ADDRri:$src)), (i64 (LDW ADDRri:$src))>;
  550. }
  551. // Atomic XADD for BPFNoALU32
  552. class XADD<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  553. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  554. (outs GPR:$dst),
  555. (ins MEMri:$addr, GPR:$val),
  556. "lock *("#OpcodeStr#" *)($addr) += $val",
  557. [(set GPR:$dst, (OpNode ADDRri:$addr, GPR:$val))]> {
  558. bits<4> dst;
  559. bits<20> addr;
  560. let Inst{51-48} = addr{19-16}; // base reg
  561. let Inst{55-52} = dst;
  562. let Inst{47-32} = addr{15-0}; // offset
  563. let Inst{7-4} = BPF_ADD.Value;
  564. let BPFClass = BPF_STX;
  565. }
  566. let Constraints = "$dst = $val" in {
  567. let Predicates = [BPFNoALU32] in {
  568. def XADDW : XADD<BPF_W, "u32", atomic_load_add_32>;
  569. }
  570. }
  571. // Atomic add, and, or, xor
  572. class ATOMIC_NOFETCH<BPFArithOp Opc, string Opstr>
  573. : TYPE_LD_ST<BPF_ATOMIC.Value, BPF_DW.Value,
  574. (outs GPR:$dst),
  575. (ins MEMri:$addr, GPR:$val),
  576. "lock *(u64 *)($addr) " #Opstr# "= $val",
  577. []> {
  578. bits<4> dst;
  579. bits<20> addr;
  580. let Inst{51-48} = addr{19-16}; // base reg
  581. let Inst{55-52} = dst;
  582. let Inst{47-32} = addr{15-0}; // offset
  583. let Inst{7-4} = Opc.Value;
  584. let BPFClass = BPF_STX;
  585. }
  586. class ATOMIC32_NOFETCH<BPFArithOp Opc, string Opstr>
  587. : TYPE_LD_ST<BPF_ATOMIC.Value, BPF_W.Value,
  588. (outs GPR32:$dst),
  589. (ins MEMri:$addr, GPR32:$val),
  590. "lock *(u32 *)($addr) " #Opstr# "= $val",
  591. []> {
  592. bits<4> dst;
  593. bits<20> addr;
  594. let Inst{51-48} = addr{19-16}; // base reg
  595. let Inst{55-52} = dst;
  596. let Inst{47-32} = addr{15-0}; // offset
  597. let Inst{7-4} = Opc.Value;
  598. let BPFClass = BPF_STX;
  599. }
  600. let Constraints = "$dst = $val" in {
  601. let Predicates = [BPFHasALU32], DecoderNamespace = "BPFALU32" in {
  602. def XADDW32 : ATOMIC32_NOFETCH<BPF_ADD, "+">;
  603. def XANDW32 : ATOMIC32_NOFETCH<BPF_AND, "&">;
  604. def XORW32 : ATOMIC32_NOFETCH<BPF_OR, "|">;
  605. def XXORW32 : ATOMIC32_NOFETCH<BPF_XOR, "^">;
  606. }
  607. def XADDD : ATOMIC_NOFETCH<BPF_ADD, "+">;
  608. def XANDD : ATOMIC_NOFETCH<BPF_AND, "&">;
  609. def XORD : ATOMIC_NOFETCH<BPF_OR, "|">;
  610. def XXORD : ATOMIC_NOFETCH<BPF_XOR, "^">;
  611. }
  612. // Atomic Fetch-and-<add, and, or, xor> operations
  613. class XFALU64<BPFWidthModifer SizeOp, BPFArithOp Opc, string OpcodeStr,
  614. string OpcStr, PatFrag OpNode>
  615. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  616. (outs GPR:$dst),
  617. (ins MEMri:$addr, GPR:$val),
  618. "$dst = atomic_fetch_"#OpcStr#"(("#OpcodeStr#" *)($addr), $val)",
  619. [(set GPR:$dst, (OpNode ADDRri:$addr, GPR:$val))]> {
  620. bits<4> dst;
  621. bits<20> addr;
  622. let Inst{51-48} = addr{19-16}; // base reg
  623. let Inst{55-52} = dst;
  624. let Inst{47-32} = addr{15-0}; // offset
  625. let Inst{7-4} = Opc.Value;
  626. let Inst{3-0} = BPF_FETCH.Value;
  627. let BPFClass = BPF_STX;
  628. }
  629. class XFALU32<BPFWidthModifer SizeOp, BPFArithOp Opc, string OpcodeStr,
  630. string OpcStr, PatFrag OpNode>
  631. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  632. (outs GPR32:$dst),
  633. (ins MEMri:$addr, GPR32:$val),
  634. "$dst = atomic_fetch_"#OpcStr#"(("#OpcodeStr#" *)($addr), $val)",
  635. [(set GPR32:$dst, (OpNode ADDRri:$addr, GPR32:$val))]> {
  636. bits<4> dst;
  637. bits<20> addr;
  638. let Inst{51-48} = addr{19-16}; // base reg
  639. let Inst{55-52} = dst;
  640. let Inst{47-32} = addr{15-0}; // offset
  641. let Inst{7-4} = Opc.Value;
  642. let Inst{3-0} = BPF_FETCH.Value;
  643. let BPFClass = BPF_STX;
  644. }
  645. let Constraints = "$dst = $val" in {
  646. let Predicates = [BPFHasALU32], DecoderNamespace = "BPFALU32" in {
  647. def XFADDW32 : XFALU32<BPF_W, BPF_ADD, "u32", "add", atomic_load_add_32>;
  648. def XFANDW32 : XFALU32<BPF_W, BPF_AND, "u32", "and", atomic_load_and_32>;
  649. def XFORW32 : XFALU32<BPF_W, BPF_OR, "u32", "or", atomic_load_or_32>;
  650. def XFXORW32 : XFALU32<BPF_W, BPF_XOR, "u32", "xor", atomic_load_xor_32>;
  651. }
  652. def XFADDD : XFALU64<BPF_DW, BPF_ADD, "u64", "add", atomic_load_add_64>;
  653. def XFANDD : XFALU64<BPF_DW, BPF_AND, "u64", "and", atomic_load_and_64>;
  654. def XFORD : XFALU64<BPF_DW, BPF_OR, "u64", "or", atomic_load_or_64>;
  655. def XFXORD : XFALU64<BPF_DW, BPF_XOR, "u64", "xor", atomic_load_xor_64>;
  656. }
  657. // atomic_load_sub can be represented as a neg followed
  658. // by an atomic_load_add.
  659. def : Pat<(atomic_load_sub_32 ADDRri:$addr, GPR32:$val),
  660. (XFADDW32 ADDRri:$addr, (NEG_32 GPR32:$val))>;
  661. def : Pat<(atomic_load_sub_64 ADDRri:$addr, GPR:$val),
  662. (XFADDD ADDRri:$addr, (NEG_64 GPR:$val))>;
  663. // Atomic Exchange
  664. class XCHG<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  665. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  666. (outs GPR:$dst),
  667. (ins MEMri:$addr, GPR:$val),
  668. "$dst = xchg_"#OpcodeStr#"($addr, $val)",
  669. [(set GPR:$dst, (OpNode ADDRri:$addr,GPR:$val))]> {
  670. bits<4> dst;
  671. bits<20> addr;
  672. let Inst{51-48} = addr{19-16}; // base reg
  673. let Inst{55-52} = dst;
  674. let Inst{47-32} = addr{15-0}; // offset
  675. let Inst{7-4} = BPF_XCHG.Value;
  676. let Inst{3-0} = BPF_FETCH.Value;
  677. let BPFClass = BPF_STX;
  678. }
  679. class XCHG32<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  680. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  681. (outs GPR32:$dst),
  682. (ins MEMri:$addr, GPR32:$val),
  683. "$dst = xchg32_"#OpcodeStr#"($addr, $val)",
  684. [(set GPR32:$dst, (OpNode ADDRri:$addr,GPR32:$val))]> {
  685. bits<4> dst;
  686. bits<20> addr;
  687. let Inst{51-48} = addr{19-16}; // base reg
  688. let Inst{55-52} = dst;
  689. let Inst{47-32} = addr{15-0}; // offset
  690. let Inst{7-4} = BPF_XCHG.Value;
  691. let Inst{3-0} = BPF_FETCH.Value;
  692. let BPFClass = BPF_STX;
  693. }
  694. let Constraints = "$dst = $val" in {
  695. let Predicates = [BPFHasALU32], DecoderNamespace = "BPFALU32" in {
  696. def XCHGW32 : XCHG32<BPF_W, "32", atomic_swap_32>;
  697. }
  698. def XCHGD : XCHG<BPF_DW, "64", atomic_swap_64>;
  699. }
  700. // Compare-And-Exchange
  701. class CMPXCHG<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  702. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  703. (outs),
  704. (ins MEMri:$addr, GPR:$new),
  705. "r0 = cmpxchg_"#OpcodeStr#"($addr, r0, $new)",
  706. [(set R0, (OpNode ADDRri:$addr, R0, GPR:$new))]> {
  707. bits<4> new;
  708. bits<20> addr;
  709. let Inst{51-48} = addr{19-16}; // base reg
  710. let Inst{55-52} = new;
  711. let Inst{47-32} = addr{15-0}; // offset
  712. let Inst{7-4} = BPF_CMPXCHG.Value;
  713. let Inst{3-0} = BPF_FETCH.Value;
  714. let BPFClass = BPF_STX;
  715. }
  716. class CMPXCHG32<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  717. : TYPE_LD_ST<BPF_ATOMIC.Value, SizeOp.Value,
  718. (outs),
  719. (ins MEMri:$addr, GPR32:$new),
  720. "w0 = cmpxchg32_"#OpcodeStr#"($addr, w0, $new)",
  721. [(set W0, (OpNode ADDRri:$addr, W0, GPR32:$new))]> {
  722. bits<4> new;
  723. bits<20> addr;
  724. let Inst{51-48} = addr{19-16}; // base reg
  725. let Inst{55-52} = new;
  726. let Inst{47-32} = addr{15-0}; // offset
  727. let Inst{7-4} = BPF_CMPXCHG.Value;
  728. let Inst{3-0} = BPF_FETCH.Value;
  729. let BPFClass = BPF_STX;
  730. }
  731. let Predicates = [BPFHasALU32], Defs = [W0], Uses = [W0],
  732. DecoderNamespace = "BPFALU32" in {
  733. def CMPXCHGW32 : CMPXCHG32<BPF_W, "32", atomic_cmp_swap_32>;
  734. }
  735. let Defs = [R0], Uses = [R0] in {
  736. def CMPXCHGD : CMPXCHG<BPF_DW, "64", atomic_cmp_swap_64>;
  737. }
  738. // bswap16, bswap32, bswap64
  739. class BSWAP<bits<32> SizeOp, string OpcodeStr, BPFSrcType SrcType, list<dag> Pattern>
  740. : TYPE_ALU_JMP<BPF_END.Value, SrcType.Value,
  741. (outs GPR:$dst),
  742. (ins GPR:$src),
  743. "$dst = "#OpcodeStr#" $src",
  744. Pattern> {
  745. bits<4> dst;
  746. let Inst{51-48} = dst;
  747. let Inst{31-0} = SizeOp;
  748. let BPFClass = BPF_ALU;
  749. }
  750. let Constraints = "$dst = $src" in {
  751. let Predicates = [BPFIsLittleEndian] in {
  752. def BE16 : BSWAP<16, "be16", BPF_TO_BE, [(set GPR:$dst, (srl (bswap GPR:$src), (i64 48)))]>;
  753. def BE32 : BSWAP<32, "be32", BPF_TO_BE, [(set GPR:$dst, (srl (bswap GPR:$src), (i64 32)))]>;
  754. def BE64 : BSWAP<64, "be64", BPF_TO_BE, [(set GPR:$dst, (bswap GPR:$src))]>;
  755. }
  756. let Predicates = [BPFIsBigEndian] in {
  757. def LE16 : BSWAP<16, "le16", BPF_TO_LE, [(set GPR:$dst, (srl (bswap GPR:$src), (i64 48)))]>;
  758. def LE32 : BSWAP<32, "le32", BPF_TO_LE, [(set GPR:$dst, (srl (bswap GPR:$src), (i64 32)))]>;
  759. def LE64 : BSWAP<64, "le64", BPF_TO_LE, [(set GPR:$dst, (bswap GPR:$src))]>;
  760. }
  761. }
  762. let Defs = [R0, R1, R2, R3, R4, R5], Uses = [R6], hasSideEffects = 1,
  763. hasExtraDefRegAllocReq = 1, hasExtraSrcRegAllocReq = 1, mayLoad = 1 in {
  764. class LOAD_ABS<BPFWidthModifer SizeOp, string OpcodeStr, Intrinsic OpNode>
  765. : TYPE_LD_ST<BPF_ABS.Value, SizeOp.Value,
  766. (outs),
  767. (ins GPR:$skb, i64imm:$imm),
  768. "r0 = *("#OpcodeStr#" *)skb[$imm]",
  769. [(set R0, (OpNode GPR:$skb, i64immSExt32:$imm))]> {
  770. bits<32> imm;
  771. let Inst{31-0} = imm;
  772. let BPFClass = BPF_LD;
  773. }
  774. class LOAD_IND<BPFWidthModifer SizeOp, string OpcodeStr, Intrinsic OpNode>
  775. : TYPE_LD_ST<BPF_IND.Value, SizeOp.Value,
  776. (outs),
  777. (ins GPR:$skb, GPR:$val),
  778. "r0 = *("#OpcodeStr#" *)skb[$val]",
  779. [(set R0, (OpNode GPR:$skb, GPR:$val))]> {
  780. bits<4> val;
  781. let Inst{55-52} = val;
  782. let BPFClass = BPF_LD;
  783. }
  784. }
  785. def LD_ABS_B : LOAD_ABS<BPF_B, "u8", int_bpf_load_byte>;
  786. def LD_ABS_H : LOAD_ABS<BPF_H, "u16", int_bpf_load_half>;
  787. def LD_ABS_W : LOAD_ABS<BPF_W, "u32", int_bpf_load_word>;
  788. def LD_IND_B : LOAD_IND<BPF_B, "u8", int_bpf_load_byte>;
  789. def LD_IND_H : LOAD_IND<BPF_H, "u16", int_bpf_load_half>;
  790. def LD_IND_W : LOAD_IND<BPF_W, "u32", int_bpf_load_word>;
  791. let isCodeGenOnly = 1 in {
  792. def MOV_32_64 : ALU_RR<BPF_ALU, BPF_MOV,
  793. (outs GPR:$dst), (ins GPR32:$src),
  794. "$dst = $src", []>;
  795. }
  796. def : Pat<(i64 (sext GPR32:$src)),
  797. (SRA_ri (SLL_ri (MOV_32_64 GPR32:$src), 32), 32)>;
  798. def : Pat<(i64 (zext GPR32:$src)), (MOV_32_64 GPR32:$src)>;
  799. // For i64 -> i32 truncation, use the 32-bit subregister directly.
  800. def : Pat<(i32 (trunc GPR:$src)),
  801. (i32 (EXTRACT_SUBREG GPR:$src, sub_32))>;
  802. // For i32 -> i64 anyext, we don't care about the high bits.
  803. def : Pat<(i64 (anyext GPR32:$src)),
  804. (INSERT_SUBREG (i64 (IMPLICIT_DEF)), GPR32:$src, sub_32)>;
  805. class STORE32<BPFWidthModifer SizeOp, string OpcodeStr, list<dag> Pattern>
  806. : TYPE_LD_ST<BPF_MEM.Value, SizeOp.Value,
  807. (outs),
  808. (ins GPR32:$src, MEMri:$addr),
  809. "*("#OpcodeStr#" *)($addr) = $src",
  810. Pattern> {
  811. bits<4> src;
  812. bits<20> addr;
  813. let Inst{51-48} = addr{19-16}; // base reg
  814. let Inst{55-52} = src;
  815. let Inst{47-32} = addr{15-0}; // offset
  816. let BPFClass = BPF_STX;
  817. }
  818. class STOREi32<BPFWidthModifer Opc, string OpcodeStr, PatFrag OpNode>
  819. : STORE32<Opc, OpcodeStr, [(OpNode i32:$src, ADDRri:$addr)]>;
  820. let Predicates = [BPFHasALU32], DecoderNamespace = "BPFALU32" in {
  821. def STW32 : STOREi32<BPF_W, "u32", store>;
  822. def STH32 : STOREi32<BPF_H, "u16", truncstorei16>;
  823. def STB32 : STOREi32<BPF_B, "u8", truncstorei8>;
  824. }
  825. class LOAD32<BPFWidthModifer SizeOp, string OpcodeStr, list<dag> Pattern>
  826. : TYPE_LD_ST<BPF_MEM.Value, SizeOp.Value,
  827. (outs GPR32:$dst),
  828. (ins MEMri:$addr),
  829. "$dst = *("#OpcodeStr#" *)($addr)",
  830. Pattern> {
  831. bits<4> dst;
  832. bits<20> addr;
  833. let Inst{51-48} = dst;
  834. let Inst{55-52} = addr{19-16};
  835. let Inst{47-32} = addr{15-0};
  836. let BPFClass = BPF_LDX;
  837. }
  838. class LOADi32<BPFWidthModifer SizeOp, string OpcodeStr, PatFrag OpNode>
  839. : LOAD32<SizeOp, OpcodeStr, [(set i32:$dst, (OpNode ADDRri:$addr))]>;
  840. let Predicates = [BPFHasALU32], DecoderNamespace = "BPFALU32" in {
  841. def LDW32 : LOADi32<BPF_W, "u32", load>;
  842. def LDH32 : LOADi32<BPF_H, "u16", zextloadi16>;
  843. def LDB32 : LOADi32<BPF_B, "u8", zextloadi8>;
  844. }
  845. let Predicates = [BPFHasALU32] in {
  846. def : Pat<(truncstorei8 GPR:$src, ADDRri:$dst),
  847. (STB32 (EXTRACT_SUBREG GPR:$src, sub_32), ADDRri:$dst)>;
  848. def : Pat<(truncstorei16 GPR:$src, ADDRri:$dst),
  849. (STH32 (EXTRACT_SUBREG GPR:$src, sub_32), ADDRri:$dst)>;
  850. def : Pat<(truncstorei32 GPR:$src, ADDRri:$dst),
  851. (STW32 (EXTRACT_SUBREG GPR:$src, sub_32), ADDRri:$dst)>;
  852. def : Pat<(i32 (extloadi8 ADDRri:$src)), (i32 (LDB32 ADDRri:$src))>;
  853. def : Pat<(i32 (extloadi16 ADDRri:$src)), (i32 (LDH32 ADDRri:$src))>;
  854. def : Pat<(i64 (zextloadi8 ADDRri:$src)),
  855. (SUBREG_TO_REG (i64 0), (LDB32 ADDRri:$src), sub_32)>;
  856. def : Pat<(i64 (zextloadi16 ADDRri:$src)),
  857. (SUBREG_TO_REG (i64 0), (LDH32 ADDRri:$src), sub_32)>;
  858. def : Pat<(i64 (zextloadi32 ADDRri:$src)),
  859. (SUBREG_TO_REG (i64 0), (LDW32 ADDRri:$src), sub_32)>;
  860. def : Pat<(i64 (extloadi8 ADDRri:$src)),
  861. (SUBREG_TO_REG (i64 0), (LDB32 ADDRri:$src), sub_32)>;
  862. def : Pat<(i64 (extloadi16 ADDRri:$src)),
  863. (SUBREG_TO_REG (i64 0), (LDH32 ADDRri:$src), sub_32)>;
  864. def : Pat<(i64 (extloadi32 ADDRri:$src)),
  865. (SUBREG_TO_REG (i64 0), (LDW32 ADDRri:$src), sub_32)>;
  866. }
  867. let usesCustomInserter = 1, isCodeGenOnly = 1 in {
  868. def MEMCPY : Pseudo<
  869. (outs),
  870. (ins GPR:$dst, GPR:$src, i64imm:$len, i64imm:$align, variable_ops),
  871. "#memcpy dst: $dst, src: $src, len: $len, align: $align",
  872. [(BPFmemcpy GPR:$dst, GPR:$src, imm:$len, imm:$align)]>;
  873. }