X86InstrFragmentsSIMD.td 65 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265
  1. //===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file provides pattern fragments useful for SIMD instructions.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. //===----------------------------------------------------------------------===//
  13. // MMX specific DAG Nodes.
  14. //===----------------------------------------------------------------------===//
  15. // Low word of MMX to GPR.
  16. def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1,
  17. [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>;
  18. // GPR to low word of MMX.
  19. def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1,
  20. [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>;
  21. //===----------------------------------------------------------------------===//
  22. // MMX Pattern Fragments
  23. //===----------------------------------------------------------------------===//
  24. def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>;
  25. //===----------------------------------------------------------------------===//
  26. // SSE specific DAG Nodes.
  27. //===----------------------------------------------------------------------===//
  28. def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
  29. SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>,
  30. SDTCisVT<3, i8>]>;
  31. def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>;
  32. def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>;
  33. def X86fmins : SDNode<"X86ISD::FMINS", SDTFPBinOp>;
  34. def X86fmaxs : SDNode<"X86ISD::FMAXS", SDTFPBinOp>;
  35. // Commutative and Associative FMIN and FMAX.
  36. def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp,
  37. [SDNPCommutative, SDNPAssociative]>;
  38. def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp,
  39. [SDNPCommutative, SDNPAssociative]>;
  40. def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp,
  41. [SDNPCommutative, SDNPAssociative]>;
  42. def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp,
  43. [SDNPCommutative, SDNPAssociative]>;
  44. def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp,
  45. [SDNPCommutative, SDNPAssociative]>;
  46. def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp>;
  47. def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>;
  48. def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>;
  49. def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>;
  50. def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>;
  51. def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>;
  52. def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>;
  53. def X86comi : SDNode<"X86ISD::COMI", SDTX86FCmp>;
  54. def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86FCmp>;
  55. def SDTX86Cmps : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisSameAs<0, 1>,
  56. SDTCisSameAs<1, 2>, SDTCisVT<3, i8>]>;
  57. def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>;
  58. def X86pshufb : SDNode<"X86ISD::PSHUFB",
  59. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>,
  60. SDTCisSameAs<0,2>]>>;
  61. def X86psadbw : SDNode<"X86ISD::PSADBW",
  62. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
  63. SDTCVecEltisVT<1, i8>,
  64. SDTCisSameSizeAs<0,1>,
  65. SDTCisSameAs<1,2>]>, [SDNPCommutative]>;
  66. def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW",
  67. SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>,
  68. SDTCVecEltisVT<1, i8>,
  69. SDTCisSameSizeAs<0,1>,
  70. SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>>;
  71. def X86andnp : SDNode<"X86ISD::ANDNP",
  72. SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  73. SDTCisSameAs<0,2>]>>;
  74. def X86multishift : SDNode<"X86ISD::MULTISHIFT",
  75. SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
  76. SDTCisSameAs<1,2>]>>;
  77. def X86pextrb : SDNode<"X86ISD::PEXTRB",
  78. SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>,
  79. SDTCisVT<2, i8>]>>;
  80. def X86pextrw : SDNode<"X86ISD::PEXTRW",
  81. SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>,
  82. SDTCisVT<2, i8>]>>;
  83. def X86pinsrb : SDNode<"X86ISD::PINSRB",
  84. SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>,
  85. SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>;
  86. def X86pinsrw : SDNode<"X86ISD::PINSRW",
  87. SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>,
  88. SDTCisVT<2, i32>, SDTCisVT<3, i8>]>>;
  89. def X86insertps : SDNode<"X86ISD::INSERTPS",
  90. SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>,
  91. SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>;
  92. def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL",
  93. SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>;
  94. def X86vzld : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad,
  95. [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
  96. def X86vextractst : SDNode<"X86ISD::VEXTRACT_STORE", SDTStore,
  97. [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
  98. def X86VBroadcastld : SDNode<"X86ISD::VBROADCAST_LOAD", SDTLoad,
  99. [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
  100. def X86SubVBroadcastld : SDNode<"X86ISD::SUBV_BROADCAST_LOAD", SDTLoad,
  101. [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>;
  102. def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
  103. SDTCisInt<0>, SDTCisInt<1>,
  104. SDTCisOpSmallerThanOp<0, 1>]>;
  105. def SDTVmtrunc : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
  106. SDTCisInt<0>, SDTCisInt<1>,
  107. SDTCisOpSmallerThanOp<0, 1>,
  108. SDTCisSameAs<0, 2>,
  109. SDTCVecEltisVT<3, i1>,
  110. SDTCisSameNumEltsAs<1, 3>]>;
  111. def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>;
  112. def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>;
  113. def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>;
  114. def X86vmtrunc : SDNode<"X86ISD::VMTRUNC", SDTVmtrunc>;
  115. def X86vmtruncs : SDNode<"X86ISD::VMTRUNCS", SDTVmtrunc>;
  116. def X86vmtruncus : SDNode<"X86ISD::VMTRUNCUS", SDTVmtrunc>;
  117. def X86vfpext : SDNode<"X86ISD::VFPEXT",
  118. SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
  119. SDTCisFP<1>, SDTCisVec<1>]>>;
  120. def X86strict_vfpext : SDNode<"X86ISD::STRICT_VFPEXT",
  121. SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
  122. SDTCisFP<1>, SDTCisVec<1>]>,
  123. [SDNPHasChain]>;
  124. def X86any_vfpext : PatFrags<(ops node:$src),
  125. [(X86strict_vfpext node:$src),
  126. (X86vfpext node:$src)]>;
  127. def X86vfpround: SDNode<"X86ISD::VFPROUND",
  128. SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
  129. SDTCisFP<1>, SDTCisVec<1>,
  130. SDTCisOpSmallerThanOp<0, 1>]>>;
  131. def X86strict_vfpround: SDNode<"X86ISD::STRICT_VFPROUND",
  132. SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
  133. SDTCisFP<1>, SDTCisVec<1>,
  134. SDTCisOpSmallerThanOp<0, 1>]>,
  135. [SDNPHasChain]>;
  136. def X86any_vfpround : PatFrags<(ops node:$src),
  137. [(X86strict_vfpround node:$src),
  138. (X86vfpround node:$src)]>;
  139. def X86frounds : SDNode<"X86ISD::VFPROUNDS",
  140. SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
  141. SDTCisSameAs<0, 1>,
  142. SDTCisFP<2>, SDTCisVec<2>,
  143. SDTCisSameSizeAs<0, 2>]>>;
  144. def X86froundsRnd: SDNode<"X86ISD::VFPROUNDS_RND",
  145. SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
  146. SDTCisSameAs<0, 1>,
  147. SDTCisFP<2>, SDTCisVec<2>,
  148. SDTCisSameSizeAs<0, 2>,
  149. SDTCisVT<3, i32>]>>;
  150. def X86fpexts : SDNode<"X86ISD::VFPEXTS",
  151. SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
  152. SDTCisSameAs<0, 1>,
  153. SDTCisFP<2>, SDTCisVec<2>,
  154. SDTCisSameSizeAs<0, 2>]>>;
  155. def X86fpextsSAE : SDNode<"X86ISD::VFPEXTS_SAE",
  156. SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
  157. SDTCisSameAs<0, 1>,
  158. SDTCisFP<2>, SDTCisVec<2>,
  159. SDTCisSameSizeAs<0, 2>]>>;
  160. def X86vmfpround: SDNode<"X86ISD::VMFPROUND",
  161. SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
  162. SDTCisFP<1>, SDTCisVec<1>,
  163. SDTCisSameAs<0, 2>,
  164. SDTCVecEltisVT<3, i1>,
  165. SDTCisSameNumEltsAs<1, 3>]>>;
  166. def X86vshiftimm : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  167. SDTCisVT<2, i8>, SDTCisInt<0>]>;
  168. def X86vshldq : SDNode<"X86ISD::VSHLDQ", X86vshiftimm>;
  169. def X86vshrdq : SDNode<"X86ISD::VSRLDQ", X86vshiftimm>;
  170. def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>;
  171. def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>;
  172. def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>;
  173. def X86strict_cmpp : SDNode<"X86ISD::STRICT_CMPP", SDTX86VFCMP, [SDNPHasChain]>;
  174. def X86any_cmpp : PatFrags<(ops node:$src1, node:$src2, node:$src3),
  175. [(X86strict_cmpp node:$src1, node:$src2, node:$src3),
  176. (X86cmpp node:$src1, node:$src2, node:$src3)]>;
  177. def X86CmpMaskCC :
  178. SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>,
  179. SDTCisVec<1>, SDTCisSameAs<2, 1>,
  180. SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>;
  181. def X86MaskCmpMaskCC :
  182. SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>,
  183. SDTCisVec<1>, SDTCisSameAs<2, 1>,
  184. SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, SDTCisSameAs<4, 0>]>;
  185. def X86CmpMaskCCScalar :
  186. SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>,
  187. SDTCisVT<3, i8>]>;
  188. def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>;
  189. def X86cmpmm : SDNode<"X86ISD::CMPMM", X86MaskCmpMaskCC>;
  190. def X86strict_cmpm : SDNode<"X86ISD::STRICT_CMPM", X86CmpMaskCC, [SDNPHasChain]>;
  191. def X86any_cmpm : PatFrags<(ops node:$src1, node:$src2, node:$src3),
  192. [(X86strict_cmpm node:$src1, node:$src2, node:$src3),
  193. (X86cmpm node:$src1, node:$src2, node:$src3)]>;
  194. def X86cmpmmSAE : SDNode<"X86ISD::CMPMM_SAE", X86MaskCmpMaskCC>;
  195. def X86cmpms : SDNode<"X86ISD::FSETCCM", X86CmpMaskCCScalar>;
  196. def X86cmpmsSAE : SDNode<"X86ISD::FSETCCM_SAE", X86CmpMaskCCScalar>;
  197. def X86phminpos: SDNode<"X86ISD::PHMINPOS",
  198. SDTypeProfile<1, 1, [SDTCisVT<0, v8i16>, SDTCisVT<1, v8i16>]>>;
  199. def X86vshiftuniform : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  200. SDTCisVec<2>, SDTCisInt<0>,
  201. SDTCisInt<2>]>;
  202. def X86vshl : SDNode<"X86ISD::VSHL", X86vshiftuniform>;
  203. def X86vsrl : SDNode<"X86ISD::VSRL", X86vshiftuniform>;
  204. def X86vsra : SDNode<"X86ISD::VSRA", X86vshiftuniform>;
  205. def X86vshiftvariable : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  206. SDTCisSameAs<0,2>, SDTCisInt<0>]>;
  207. def X86vshlv : SDNode<"X86ISD::VSHLV", X86vshiftvariable>;
  208. def X86vsrlv : SDNode<"X86ISD::VSRLV", X86vshiftvariable>;
  209. def X86vsrav : SDNode<"X86ISD::VSRAV", X86vshiftvariable>;
  210. def X86vshli : SDNode<"X86ISD::VSHLI", X86vshiftimm>;
  211. def X86vsrli : SDNode<"X86ISD::VSRLI", X86vshiftimm>;
  212. def X86vsrai : SDNode<"X86ISD::VSRAI", X86vshiftimm>;
  213. def X86kshiftl : SDNode<"X86ISD::KSHIFTL",
  214. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
  215. SDTCisSameAs<0, 1>,
  216. SDTCisVT<2, i8>]>>;
  217. def X86kshiftr : SDNode<"X86ISD::KSHIFTR",
  218. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
  219. SDTCisSameAs<0, 1>,
  220. SDTCisVT<2, i8>]>>;
  221. def X86kadd : SDNode<"X86ISD::KADD", SDTIntBinOp, [SDNPCommutative]>;
  222. def X86vrotli : SDNode<"X86ISD::VROTLI", X86vshiftimm>;
  223. def X86vrotri : SDNode<"X86ISD::VROTRI", X86vshiftimm>;
  224. def X86vpshl : SDNode<"X86ISD::VPSHL", X86vshiftvariable>;
  225. def X86vpsha : SDNode<"X86ISD::VPSHA", X86vshiftvariable>;
  226. def X86vpcom : SDNode<"X86ISD::VPCOM",
  227. SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  228. SDTCisSameAs<0,2>,
  229. SDTCisVT<3, i8>, SDTCisInt<0>]>>;
  230. def X86vpcomu : SDNode<"X86ISD::VPCOMU",
  231. SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  232. SDTCisSameAs<0,2>,
  233. SDTCisVT<3, i8>, SDTCisInt<0>]>>;
  234. def X86vpermil2 : SDNode<"X86ISD::VPERMIL2",
  235. SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  236. SDTCisSameAs<0,2>,
  237. SDTCisFP<0>, SDTCisInt<3>,
  238. SDTCisSameNumEltsAs<0, 3>,
  239. SDTCisSameSizeAs<0,3>,
  240. SDTCisVT<4, i8>]>>;
  241. def X86vpperm : SDNode<"X86ISD::VPPERM",
  242. SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>,
  243. SDTCisSameAs<0,2>, SDTCisSameAs<0, 3>]>>;
  244. def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>,
  245. SDTCisVec<1>,
  246. SDTCisSameAs<2, 1>]>;
  247. def X86mulhrs : SDNode<"X86ISD::MULHRS", SDTIntBinOp, [SDNPCommutative]>;
  248. def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>;
  249. def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>;
  250. def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>;
  251. def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>;
  252. def X86movmsk : SDNode<"X86ISD::MOVMSK",
  253. SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVec<1>]>>;
  254. def X86selects : SDNode<"X86ISD::SELECTS",
  255. SDTypeProfile<1, 3, [SDTCisVT<1, v1i1>,
  256. SDTCisSameAs<0, 2>,
  257. SDTCisSameAs<2, 3>]>>;
  258. def X86pmuludq : SDNode<"X86ISD::PMULUDQ",
  259. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
  260. SDTCisSameAs<0,1>,
  261. SDTCisSameAs<1,2>]>,
  262. [SDNPCommutative]>;
  263. def X86pmuldq : SDNode<"X86ISD::PMULDQ",
  264. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>,
  265. SDTCisSameAs<0,1>,
  266. SDTCisSameAs<1,2>]>,
  267. [SDNPCommutative]>;
  268. def X86extrqi : SDNode<"X86ISD::EXTRQI",
  269. SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>,
  270. SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>;
  271. def X86insertqi : SDNode<"X86ISD::INSERTQI",
  272. SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>,
  273. SDTCisSameAs<1,2>, SDTCisVT<3, i8>,
  274. SDTCisVT<4, i8>]>>;
  275. // Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get
  276. // translated into one of the target nodes below during lowering.
  277. // Note: this is a work in progress...
  278. def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>;
  279. def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  280. SDTCisSameAs<0,2>]>;
  281. def SDTShuff2OpFP : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>,
  282. SDTCisSameAs<0,1>, SDTCisSameAs<0,2>]>;
  283. def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  284. SDTCisFP<0>, SDTCisInt<2>,
  285. SDTCisSameNumEltsAs<0,2>,
  286. SDTCisSameSizeAs<0,2>]>;
  287. def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>,
  288. SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>;
  289. def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  290. SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>;
  291. def SDTFPBinOpImm: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>,
  292. SDTCisSameAs<0,1>,
  293. SDTCisSameAs<0,2>,
  294. SDTCisVT<3, i32>]>;
  295. def SDTFPTernaryOpImm: SDTypeProfile<1, 4, [SDTCisFP<0>, SDTCisSameAs<0,1>,
  296. SDTCisSameAs<0,2>,
  297. SDTCisInt<3>,
  298. SDTCisSameSizeAs<0, 3>,
  299. SDTCisSameNumEltsAs<0, 3>,
  300. SDTCisVT<4, i32>]>;
  301. def SDTFPUnaryOpImm: SDTypeProfile<1, 2, [SDTCisFP<0>,
  302. SDTCisSameAs<0,1>,
  303. SDTCisVT<2, i32>]>;
  304. def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>;
  305. def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>,
  306. SDTCisInt<0>, SDTCisInt<1>]>;
  307. def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  308. SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>;
  309. def SDTTernlog : SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisVec<0>,
  310. SDTCisSameAs<0,1>, SDTCisSameAs<0,2>,
  311. SDTCisSameAs<0,3>, SDTCisVT<4, i8>]>;
  312. def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc.
  313. SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisVT<3, i32>]>;
  314. def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc.
  315. SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisVT<2, i32>]>;
  316. def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>,
  317. SDTCisSameAs<1,2>, SDTCisSameAs<1,3>,
  318. SDTCisFP<0>, SDTCisVT<4, i32>]>;
  319. def X86PAlignr : SDNode<"X86ISD::PALIGNR",
  320. SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i8>,
  321. SDTCisSameAs<0,1>,
  322. SDTCisSameAs<0,2>,
  323. SDTCisVT<3, i8>]>>;
  324. def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>;
  325. def X86VShld : SDNode<"X86ISD::VSHLD", SDTShuff3OpI>;
  326. def X86VShrd : SDNode<"X86ISD::VSHRD", SDTShuff3OpI>;
  327. def X86VShldv : SDNode<"X86ISD::VSHLDV",
  328. SDTypeProfile<1, 3, [SDTCisVec<0>,
  329. SDTCisSameAs<0,1>,
  330. SDTCisSameAs<0,2>,
  331. SDTCisSameAs<0,3>]>>;
  332. def X86VShrdv : SDNode<"X86ISD::VSHRDV",
  333. SDTypeProfile<1, 3, [SDTCisVec<0>,
  334. SDTCisSameAs<0,1>,
  335. SDTCisSameAs<0,2>,
  336. SDTCisSameAs<0,3>]>>;
  337. def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>;
  338. def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>;
  339. def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>;
  340. def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>;
  341. def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>;
  342. def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>;
  343. def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>;
  344. def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>;
  345. def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>;
  346. def X86Movsd : SDNode<"X86ISD::MOVSD",
  347. SDTypeProfile<1, 2, [SDTCisVT<0, v2f64>,
  348. SDTCisVT<1, v2f64>,
  349. SDTCisVT<2, v2f64>]>>;
  350. def X86Movss : SDNode<"X86ISD::MOVSS",
  351. SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
  352. SDTCisVT<1, v4f32>,
  353. SDTCisVT<2, v4f32>]>>;
  354. def X86Movsh : SDNode<"X86ISD::MOVSH",
  355. SDTypeProfile<1, 2, [SDTCisVT<0, v8f16>,
  356. SDTCisVT<1, v8f16>,
  357. SDTCisVT<2, v8f16>]>>;
  358. def X86Movlhps : SDNode<"X86ISD::MOVLHPS",
  359. SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
  360. SDTCisVT<1, v4f32>,
  361. SDTCisVT<2, v4f32>]>>;
  362. def X86Movhlps : SDNode<"X86ISD::MOVHLPS",
  363. SDTypeProfile<1, 2, [SDTCisVT<0, v4f32>,
  364. SDTCisVT<1, v4f32>,
  365. SDTCisVT<2, v4f32>]>>;
  366. def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<0>,
  367. SDTCisVec<1>, SDTCisInt<1>,
  368. SDTCisSameSizeAs<0,1>,
  369. SDTCisSameAs<1,2>,
  370. SDTCisOpSmallerThanOp<0, 1>]>;
  371. def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>;
  372. def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>;
  373. def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>;
  374. def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>;
  375. def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW",
  376. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>,
  377. SDTCVecEltisVT<1, i8>,
  378. SDTCisSameSizeAs<0,1>,
  379. SDTCisSameAs<1,2>]>>;
  380. def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD",
  381. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i32>,
  382. SDTCVecEltisVT<1, i16>,
  383. SDTCisSameSizeAs<0,1>,
  384. SDTCisSameAs<1,2>]>,
  385. [SDNPCommutative]>;
  386. def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>;
  387. def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>;
  388. def X86VPermv : SDNode<"X86ISD::VPERMV",
  389. SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>,
  390. SDTCisSameNumEltsAs<0,1>,
  391. SDTCisSameSizeAs<0,1>,
  392. SDTCisSameAs<0,2>]>>;
  393. def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>;
  394. def X86VPermt2 : SDNode<"X86ISD::VPERMV3",
  395. SDTypeProfile<1, 3, [SDTCisVec<0>,
  396. SDTCisSameAs<0,1>, SDTCisInt<2>,
  397. SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>,
  398. SDTCisSameSizeAs<0,2>,
  399. SDTCisSameAs<0,3>]>, []>;
  400. def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>;
  401. def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>;
  402. def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPTernaryOpImm>;
  403. def X86VFixupimmSAE : SDNode<"X86ISD::VFIXUPIMM_SAE", SDTFPTernaryOpImm>;
  404. def X86VFixupimms : SDNode<"X86ISD::VFIXUPIMMS", SDTFPTernaryOpImm>;
  405. def X86VFixupimmSAEs : SDNode<"X86ISD::VFIXUPIMMS_SAE", SDTFPTernaryOpImm>;
  406. def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImm>;
  407. def X86VRangeSAE : SDNode<"X86ISD::VRANGE_SAE", SDTFPBinOpImm>;
  408. def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImm>;
  409. def X86VReduceSAE : SDNode<"X86ISD::VREDUCE_SAE", SDTFPUnaryOpImm>;
  410. def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImm>;
  411. def X86strict_VRndScale : SDNode<"X86ISD::STRICT_VRNDSCALE", SDTFPUnaryOpImm,
  412. [SDNPHasChain]>;
  413. def X86any_VRndScale : PatFrags<(ops node:$src1, node:$src2),
  414. [(X86strict_VRndScale node:$src1, node:$src2),
  415. (X86VRndScale node:$src1, node:$src2)]>;
  416. def X86VRndScaleSAE: SDNode<"X86ISD::VRNDSCALE_SAE", SDTFPUnaryOpImm>;
  417. def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImm>;
  418. def X86VGetMantSAE : SDNode<"X86ISD::VGETMANT_SAE", SDTFPUnaryOpImm>;
  419. def X86Vfpclass : SDNode<"X86ISD::VFPCLASS",
  420. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>,
  421. SDTCisFP<1>,
  422. SDTCisSameNumEltsAs<0,1>,
  423. SDTCisVT<2, i32>]>, []>;
  424. def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS",
  425. SDTypeProfile<1, 2, [SDTCisVT<0, v1i1>,
  426. SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>;
  427. def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>;
  428. def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>;
  429. def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>;
  430. def X86Blendv : SDNode<"X86ISD::BLENDV",
  431. SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisInt<1>,
  432. SDTCisSameAs<0, 2>,
  433. SDTCisSameAs<2, 3>,
  434. SDTCisSameNumEltsAs<0, 1>,
  435. SDTCisSameSizeAs<0, 1>]>>;
  436. def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>;
  437. def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>;
  438. def X86fadds : SDNode<"X86ISD::FADDS", SDTFPBinOp>;
  439. def X86faddRnds : SDNode<"X86ISD::FADDS_RND", SDTFPBinOpRound>;
  440. def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>;
  441. def X86fsubs : SDNode<"X86ISD::FSUBS", SDTFPBinOp>;
  442. def X86fsubRnds : SDNode<"X86ISD::FSUBS_RND", SDTFPBinOpRound>;
  443. def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>;
  444. def X86fmuls : SDNode<"X86ISD::FMULS", SDTFPBinOp>;
  445. def X86fmulRnds : SDNode<"X86ISD::FMULS_RND", SDTFPBinOpRound>;
  446. def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>;
  447. def X86fdivs : SDNode<"X86ISD::FDIVS", SDTFPBinOp>;
  448. def X86fdivRnds : SDNode<"X86ISD::FDIVS_RND", SDTFPBinOpRound>;
  449. def X86fmaxSAE : SDNode<"X86ISD::FMAX_SAE", SDTFPBinOp>;
  450. def X86fmaxSAEs : SDNode<"X86ISD::FMAXS_SAE", SDTFPBinOp>;
  451. def X86fminSAE : SDNode<"X86ISD::FMIN_SAE", SDTFPBinOp>;
  452. def X86fminSAEs : SDNode<"X86ISD::FMINS_SAE", SDTFPBinOp>;
  453. def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOp>;
  454. def X86scalefRnd : SDNode<"X86ISD::SCALEF_RND", SDTFPBinOpRound>;
  455. def X86scalefs : SDNode<"X86ISD::SCALEFS", SDTFPBinOp>;
  456. def X86scalefsRnd: SDNode<"X86ISD::SCALEFS_RND", SDTFPBinOpRound>;
  457. def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>;
  458. def X86fsqrts : SDNode<"X86ISD::FSQRTS", SDTFPBinOp>;
  459. def X86fsqrtRnds : SDNode<"X86ISD::FSQRTS_RND", SDTFPBinOpRound>;
  460. def X86fgetexp : SDNode<"X86ISD::FGETEXP", SDTFPUnaryOp>;
  461. def X86fgetexpSAE : SDNode<"X86ISD::FGETEXP_SAE", SDTFPUnaryOp>;
  462. def X86fgetexps : SDNode<"X86ISD::FGETEXPS", SDTFPBinOp>;
  463. def X86fgetexpSAEs : SDNode<"X86ISD::FGETEXPS_SAE", SDTFPBinOp>;
  464. def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFPTernaryOp, [SDNPCommutative]>;
  465. def X86strict_Fnmadd : SDNode<"X86ISD::STRICT_FNMADD", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
  466. def X86any_Fnmadd : PatFrags<(ops node:$src1, node:$src2, node:$src3),
  467. [(X86strict_Fnmadd node:$src1, node:$src2, node:$src3),
  468. (X86Fnmadd node:$src1, node:$src2, node:$src3)]>;
  469. def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFPTernaryOp, [SDNPCommutative]>;
  470. def X86strict_Fmsub : SDNode<"X86ISD::STRICT_FMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
  471. def X86any_Fmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3),
  472. [(X86strict_Fmsub node:$src1, node:$src2, node:$src3),
  473. (X86Fmsub node:$src1, node:$src2, node:$src3)]>;
  474. def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFPTernaryOp, [SDNPCommutative]>;
  475. def X86strict_Fnmsub : SDNode<"X86ISD::STRICT_FNMSUB", SDTFPTernaryOp, [SDNPCommutative, SDNPHasChain]>;
  476. def X86any_Fnmsub : PatFrags<(ops node:$src1, node:$src2, node:$src3),
  477. [(X86strict_Fnmsub node:$src1, node:$src2, node:$src3),
  478. (X86Fnmsub node:$src1, node:$src2, node:$src3)]>;
  479. def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFPTernaryOp, [SDNPCommutative]>;
  480. def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFPTernaryOp, [SDNPCommutative]>;
  481. def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound, [SDNPCommutative]>;
  482. def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound, [SDNPCommutative]>;
  483. def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound, [SDNPCommutative]>;
  484. def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound, [SDNPCommutative]>;
  485. def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound, [SDNPCommutative]>;
  486. def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound, [SDNPCommutative]>;
  487. def X86vp2intersect : SDNode<"X86ISD::VP2INTERSECT",
  488. SDTypeProfile<1, 2, [SDTCisVT<0, untyped>,
  489. SDTCisVec<1>, SDTCisSameAs<1, 2>]>>;
  490. def SDTIFma : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<0,1>,
  491. SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>;
  492. def x86vpmadd52l : SDNode<"X86ISD::VPMADD52L", SDTIFma, [SDNPCommutative]>;
  493. def x86vpmadd52h : SDNode<"X86ISD::VPMADD52H", SDTIFma, [SDNPCommutative]>;
  494. def x86vfmaddc : SDNode<"X86ISD::VFMADDC", SDTFPTernaryOp, [SDNPCommutative]>;
  495. def x86vfmaddcRnd : SDNode<"X86ISD::VFMADDC_RND", SDTFmaRound, [SDNPCommutative]>;
  496. def x86vfcmaddc : SDNode<"X86ISD::VFCMADDC", SDTFPTernaryOp>;
  497. def x86vfcmaddcRnd : SDNode<"X86ISD::VFCMADDC_RND", SDTFmaRound>;
  498. def x86vfmulc : SDNode<"X86ISD::VFMULC", SDTFPBinOp, [SDNPCommutative]>;
  499. def x86vfmulcRnd : SDNode<"X86ISD::VFMULC_RND", SDTFPBinOpRound, [SDNPCommutative]>;
  500. def x86vfcmulc : SDNode<"X86ISD::VFCMULC", SDTFPBinOp>;
  501. def x86vfcmulcRnd : SDNode<"X86ISD::VFCMULC_RND", SDTFPBinOpRound>;
  502. def x86vfmaddcSh : SDNode<"X86ISD::VFMADDCSH", SDTFPTernaryOp, [SDNPCommutative]>;
  503. def x86vfcmaddcSh : SDNode<"X86ISD::VFCMADDCSH", SDTFPTernaryOp>;
  504. def x86vfmulcSh : SDNode<"X86ISD::VFMULCSH", SDTFPBinOp, [SDNPCommutative]>;
  505. def x86vfcmulcSh : SDNode<"X86ISD::VFCMULCSH", SDTFPBinOp>;
  506. def x86vfmaddcShRnd : SDNode<"X86ISD::VFMADDCSH_RND", SDTFmaRound, [SDNPCommutative]>;
  507. def x86vfcmaddcShRnd : SDNode<"X86ISD::VFCMADDCSH_RND",SDTFmaRound>;
  508. def x86vfmulcShRnd : SDNode<"X86ISD::VFMULCSH_RND", SDTFPBinOpRound, [SDNPCommutative]>;
  509. def x86vfcmulcShRnd : SDNode<"X86ISD::VFCMULCSH_RND", SDTFPBinOpRound>;
  510. def X86rsqrt14 : SDNode<"X86ISD::RSQRT14", SDTFPUnaryOp>;
  511. def X86rcp14 : SDNode<"X86ISD::RCP14", SDTFPUnaryOp>;
  512. // VNNI
  513. def SDTVnni : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>,
  514. SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>;
  515. def X86Vpdpbusd : SDNode<"X86ISD::VPDPBUSD", SDTVnni>;
  516. def X86Vpdpbusds : SDNode<"X86ISD::VPDPBUSDS", SDTVnni>;
  517. def X86Vpdpwssd : SDNode<"X86ISD::VPDPWSSD", SDTVnni>;
  518. def X86Vpdpwssds : SDNode<"X86ISD::VPDPWSSDS", SDTVnni>;
  519. def X86rsqrt28 : SDNode<"X86ISD::RSQRT28", SDTFPUnaryOp>;
  520. def X86rsqrt28SAE: SDNode<"X86ISD::RSQRT28_SAE", SDTFPUnaryOp>;
  521. def X86rcp28 : SDNode<"X86ISD::RCP28", SDTFPUnaryOp>;
  522. def X86rcp28SAE : SDNode<"X86ISD::RCP28_SAE", SDTFPUnaryOp>;
  523. def X86exp2 : SDNode<"X86ISD::EXP2", SDTFPUnaryOp>;
  524. def X86exp2SAE : SDNode<"X86ISD::EXP2_SAE", SDTFPUnaryOp>;
  525. def X86rsqrt14s : SDNode<"X86ISD::RSQRT14S", SDTFPBinOp>;
  526. def X86rcp14s : SDNode<"X86ISD::RCP14S", SDTFPBinOp>;
  527. def X86rsqrt28s : SDNode<"X86ISD::RSQRT28S", SDTFPBinOp>;
  528. def X86rsqrt28SAEs : SDNode<"X86ISD::RSQRT28S_SAE", SDTFPBinOp>;
  529. def X86rcp28s : SDNode<"X86ISD::RCP28S", SDTFPBinOp>;
  530. def X86rcp28SAEs : SDNode<"X86ISD::RCP28S_SAE", SDTFPBinOp>;
  531. def X86Ranges : SDNode<"X86ISD::VRANGES", SDTFPBinOpImm>;
  532. def X86RndScales : SDNode<"X86ISD::VRNDSCALES", SDTFPBinOpImm>;
  533. def X86Reduces : SDNode<"X86ISD::VREDUCES", SDTFPBinOpImm>;
  534. def X86GetMants : SDNode<"X86ISD::VGETMANTS", SDTFPBinOpImm>;
  535. def X86RangesSAE : SDNode<"X86ISD::VRANGES_SAE", SDTFPBinOpImm>;
  536. def X86RndScalesSAE : SDNode<"X86ISD::VRNDSCALES_SAE", SDTFPBinOpImm>;
  537. def X86ReducesSAE : SDNode<"X86ISD::VREDUCES_SAE", SDTFPBinOpImm>;
  538. def X86GetMantsSAE : SDNode<"X86ISD::VGETMANTS_SAE", SDTFPBinOpImm>;
  539. def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 3,
  540. [SDTCisSameAs<0, 1>, SDTCisVec<1>,
  541. SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
  542. SDTCisSameNumEltsAs<0, 3>]>, []>;
  543. def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 3,
  544. [SDTCisSameAs<0, 1>, SDTCisVec<1>,
  545. SDTCisSameAs<0, 2>, SDTCVecEltisVT<3, i1>,
  546. SDTCisSameNumEltsAs<0, 3>]>, []>;
  547. // vpshufbitqmb
  548. def X86Vpshufbitqmb : SDNode<"X86ISD::VPSHUFBITQMB",
  549. SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
  550. SDTCisSameAs<1,2>,
  551. SDTCVecEltisVT<0,i1>,
  552. SDTCisSameNumEltsAs<0,1>]>>;
  553. def SDTintToFP: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>,
  554. SDTCisSameAs<0,1>, SDTCisInt<2>]>;
  555. def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>,
  556. SDTCisSameAs<0,1>, SDTCisInt<2>,
  557. SDTCisVT<3, i32>]>;
  558. def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
  559. SDTCisInt<0>, SDTCisFP<1>]>;
  560. def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
  561. SDTCisInt<0>, SDTCisFP<1>,
  562. SDTCisVT<2, i32>]>;
  563. def SDTSFloatToInt: SDTypeProfile<1, 1, [SDTCisInt<0>, SDTCisFP<1>,
  564. SDTCisVec<1>]>;
  565. def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>,
  566. SDTCisVec<1>, SDTCisVT<2, i32>]>;
  567. def SDTVintToFP: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>,
  568. SDTCisFP<0>, SDTCisInt<1>]>;
  569. def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>,
  570. SDTCisFP<0>, SDTCisInt<1>,
  571. SDTCisVT<2, i32>]>;
  572. // Scalar
  573. def X86SintToFp : SDNode<"X86ISD::SCALAR_SINT_TO_FP", SDTintToFP>;
  574. def X86SintToFpRnd : SDNode<"X86ISD::SCALAR_SINT_TO_FP_RND", SDTintToFPRound>;
  575. def X86UintToFp : SDNode<"X86ISD::SCALAR_UINT_TO_FP", SDTintToFP>;
  576. def X86UintToFpRnd : SDNode<"X86ISD::SCALAR_UINT_TO_FP_RND", SDTintToFPRound>;
  577. def X86cvtts2Int : SDNode<"X86ISD::CVTTS2SI", SDTSFloatToInt>;
  578. def X86cvtts2UInt : SDNode<"X86ISD::CVTTS2UI", SDTSFloatToInt>;
  579. def X86cvtts2IntSAE : SDNode<"X86ISD::CVTTS2SI_SAE", SDTSFloatToInt>;
  580. def X86cvtts2UIntSAE : SDNode<"X86ISD::CVTTS2UI_SAE", SDTSFloatToInt>;
  581. def X86cvts2si : SDNode<"X86ISD::CVTS2SI", SDTSFloatToInt>;
  582. def X86cvts2usi : SDNode<"X86ISD::CVTS2UI", SDTSFloatToInt>;
  583. def X86cvts2siRnd : SDNode<"X86ISD::CVTS2SI_RND", SDTSFloatToIntRnd>;
  584. def X86cvts2usiRnd : SDNode<"X86ISD::CVTS2UI_RND", SDTSFloatToIntRnd>;
  585. // Vector with rounding mode
  586. // cvtt fp-to-int staff
  587. def X86cvttp2siSAE : SDNode<"X86ISD::CVTTP2SI_SAE", SDTFloatToInt>;
  588. def X86cvttp2uiSAE : SDNode<"X86ISD::CVTTP2UI_SAE", SDTFloatToInt>;
  589. def X86VSintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTVintToFPRound>;
  590. def X86VUintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTVintToFPRound>;
  591. // cvt fp-to-int staff
  592. def X86cvtp2IntRnd : SDNode<"X86ISD::CVTP2SI_RND", SDTFloatToIntRnd>;
  593. def X86cvtp2UIntRnd : SDNode<"X86ISD::CVTP2UI_RND", SDTFloatToIntRnd>;
  594. // Vector without rounding mode
  595. // cvtt fp-to-int staff
  596. def X86cvttp2si : SDNode<"X86ISD::CVTTP2SI", SDTFloatToInt>;
  597. def X86cvttp2ui : SDNode<"X86ISD::CVTTP2UI", SDTFloatToInt>;
  598. def X86strict_cvttp2si : SDNode<"X86ISD::STRICT_CVTTP2SI", SDTFloatToInt, [SDNPHasChain]>;
  599. def X86strict_cvttp2ui : SDNode<"X86ISD::STRICT_CVTTP2UI", SDTFloatToInt, [SDNPHasChain]>;
  600. def X86any_cvttp2si : PatFrags<(ops node:$src),
  601. [(X86strict_cvttp2si node:$src),
  602. (X86cvttp2si node:$src)]>;
  603. def X86any_cvttp2ui : PatFrags<(ops node:$src),
  604. [(X86strict_cvttp2ui node:$src),
  605. (X86cvttp2ui node:$src)]>;
  606. def X86VSintToFP : SDNode<"X86ISD::CVTSI2P", SDTVintToFP>;
  607. def X86VUintToFP : SDNode<"X86ISD::CVTUI2P", SDTVintToFP>;
  608. def X86strict_VSintToFP : SDNode<"X86ISD::STRICT_CVTSI2P", SDTVintToFP, [SDNPHasChain]>;
  609. def X86strict_VUintToFP : SDNode<"X86ISD::STRICT_CVTUI2P", SDTVintToFP, [SDNPHasChain]>;
  610. def X86any_VSintToFP : PatFrags<(ops node:$src),
  611. [(X86strict_VSintToFP node:$src),
  612. (X86VSintToFP node:$src)]>;
  613. def X86any_VUintToFP : PatFrags<(ops node:$src),
  614. [(X86strict_VUintToFP node:$src),
  615. (X86VUintToFP node:$src)]>;
  616. // cvt int-to-fp staff
  617. def X86cvtp2Int : SDNode<"X86ISD::CVTP2SI", SDTFloatToInt>;
  618. def X86cvtp2UInt : SDNode<"X86ISD::CVTP2UI", SDTFloatToInt>;
  619. // Masked versions of above
  620. def SDTMVintToFP: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
  621. SDTCisFP<0>, SDTCisInt<1>,
  622. SDTCisSameAs<0, 2>,
  623. SDTCVecEltisVT<3, i1>,
  624. SDTCisSameNumEltsAs<1, 3>]>;
  625. def SDTMFloatToInt: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>,
  626. SDTCisInt<0>, SDTCisFP<1>,
  627. SDTCisSameSizeAs<0, 1>,
  628. SDTCisSameAs<0, 2>,
  629. SDTCVecEltisVT<3, i1>,
  630. SDTCisSameNumEltsAs<1, 3>]>;
  631. def X86VMSintToFP : SDNode<"X86ISD::MCVTSI2P", SDTMVintToFP>;
  632. def X86VMUintToFP : SDNode<"X86ISD::MCVTUI2P", SDTMVintToFP>;
  633. def X86mcvtp2Int : SDNode<"X86ISD::MCVTP2SI", SDTMFloatToInt>;
  634. def X86mcvtp2UInt : SDNode<"X86ISD::MCVTP2UI", SDTMFloatToInt>;
  635. def X86mcvttp2si : SDNode<"X86ISD::MCVTTP2SI", SDTMFloatToInt>;
  636. def X86mcvttp2ui : SDNode<"X86ISD::MCVTTP2UI", SDTMFloatToInt>;
  637. def SDTcvtph2ps : SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>,
  638. SDTCVecEltisVT<1, i16>]>;
  639. def X86cvtph2ps : SDNode<"X86ISD::CVTPH2PS", SDTcvtph2ps>;
  640. def X86strict_cvtph2ps : SDNode<"X86ISD::STRICT_CVTPH2PS", SDTcvtph2ps,
  641. [SDNPHasChain]>;
  642. def X86any_cvtph2ps : PatFrags<(ops node:$src),
  643. [(X86strict_cvtph2ps node:$src),
  644. (X86cvtph2ps node:$src)]>;
  645. def X86cvtph2psSAE : SDNode<"X86ISD::CVTPH2PS_SAE", SDTcvtph2ps>;
  646. def SDTcvtps2ph : SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>,
  647. SDTCVecEltisVT<1, f32>,
  648. SDTCisVT<2, i32>]>;
  649. def X86cvtps2ph : SDNode<"X86ISD::CVTPS2PH", SDTcvtps2ph>;
  650. def X86strict_cvtps2ph : SDNode<"X86ISD::STRICT_CVTPS2PH", SDTcvtps2ph,
  651. [SDNPHasChain]>;
  652. def X86any_cvtps2ph : PatFrags<(ops node:$src1, node:$src2),
  653. [(X86strict_cvtps2ph node:$src1, node:$src2),
  654. (X86cvtps2ph node:$src1, node:$src2)]>;
  655. def X86cvtps2phSAE : SDNode<"X86ISD::CVTPS2PH_SAE", SDTcvtps2ph>;
  656. def SDTmcvtps2ph : SDTypeProfile<1, 4, [SDTCVecEltisVT<0, i16>,
  657. SDTCVecEltisVT<1, f32>,
  658. SDTCisVT<2, i32>,
  659. SDTCisSameAs<0, 3>,
  660. SDTCVecEltisVT<4, i1>,
  661. SDTCisSameNumEltsAs<1, 4>]>;
  662. def X86mcvtps2ph : SDNode<"X86ISD::MCVTPS2PH", SDTmcvtps2ph>;
  663. def X86mcvtps2phSAE : SDNode<"X86ISD::MCVTPS2PH_SAE", SDTmcvtps2ph>;
  664. def X86vfpextSAE : SDNode<"X86ISD::VFPEXT_SAE",
  665. SDTypeProfile<1, 1, [SDTCisFP<0>, SDTCisVec<0>,
  666. SDTCisFP<1>, SDTCisVec<1>,
  667. SDTCisOpSmallerThanOp<1, 0>]>>;
  668. def X86vfproundRnd: SDNode<"X86ISD::VFPROUND_RND",
  669. SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>,
  670. SDTCisFP<1>, SDTCisVec<1>,
  671. SDTCisOpSmallerThanOp<0, 1>,
  672. SDTCisVT<2, i32>]>>;
  673. // cvt fp to bfloat16
  674. def X86cvtne2ps2bf16 : SDNode<"X86ISD::CVTNE2PS2BF16",
  675. SDTypeProfile<1, 2, [SDTCVecEltisVT<0, bf16>,
  676. SDTCVecEltisVT<1, f32>,
  677. SDTCisSameSizeAs<0,1>,
  678. SDTCisSameAs<1,2>]>>;
  679. def X86mcvtneps2bf16 : SDNode<"X86ISD::MCVTNEPS2BF16",
  680. SDTypeProfile<1, 3, [SDTCVecEltisVT<0, bf16>,
  681. SDTCVecEltisVT<1, f32>,
  682. SDTCisSameAs<0, 2>,
  683. SDTCVecEltisVT<3, i1>,
  684. SDTCisSameNumEltsAs<1, 3>]>>;
  685. def X86cvtneps2bf16 : SDNode<"X86ISD::CVTNEPS2BF16",
  686. SDTypeProfile<1, 1, [SDTCVecEltisVT<0, bf16>,
  687. SDTCVecEltisVT<1, f32>]>>;
  688. def X86dpbf16ps : SDNode<"X86ISD::DPBF16PS",
  689. SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>,
  690. SDTCisSameAs<0,1>,
  691. SDTCVecEltisVT<2, bf16>,
  692. SDTCisSameAs<2,3>]>>;
  693. // galois field arithmetic
  694. def X86GF2P8affineinvqb : SDNode<"X86ISD::GF2P8AFFINEINVQB", SDTBlend>;
  695. def X86GF2P8affineqb : SDNode<"X86ISD::GF2P8AFFINEQB", SDTBlend>;
  696. def X86GF2P8mulb : SDNode<"X86ISD::GF2P8MULB", SDTIntBinOp>;
  697. def SDTX86MaskedStore: SDTypeProfile<0, 3, [ // masked store
  698. SDTCisVec<0>, SDTCisPtrTy<1>, SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>
  699. ]>;
  700. def X86vpdpbssd : SDNode<"X86ISD::VPDPBSSD", SDTVnni>;
  701. def X86vpdpbssds : SDNode<"X86ISD::VPDPBSSDS", SDTVnni>;
  702. def X86vpdpbsud : SDNode<"X86ISD::VPDPBSUD", SDTVnni>;
  703. def X86vpdpbsuds : SDNode<"X86ISD::VPDPBSUDS", SDTVnni>;
  704. def X86vpdpbuud : SDNode<"X86ISD::VPDPBUUD", SDTVnni>;
  705. def X86vpdpbuuds : SDNode<"X86ISD::VPDPBUUDS", SDTVnni>;
  706. //===----------------------------------------------------------------------===//
  707. // SSE pattern fragments
  708. //===----------------------------------------------------------------------===//
  709. // 128-bit load pattern fragments
  710. def loadv8f16 : PatFrag<(ops node:$ptr), (v8f16 (load node:$ptr))>;
  711. def loadv8bf16 : PatFrag<(ops node:$ptr), (v8bf16 (load node:$ptr))>;
  712. def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>;
  713. def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>;
  714. def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>;
  715. def loadv4i32 : PatFrag<(ops node:$ptr), (v4i32 (load node:$ptr))>;
  716. def loadv8i16 : PatFrag<(ops node:$ptr), (v8i16 (load node:$ptr))>;
  717. def loadv16i8 : PatFrag<(ops node:$ptr), (v16i8 (load node:$ptr))>;
  718. // 256-bit load pattern fragments
  719. def loadv16f16 : PatFrag<(ops node:$ptr), (v16f16 (load node:$ptr))>;
  720. def loadv16bf16 : PatFrag<(ops node:$ptr), (v16bf16 (load node:$ptr))>;
  721. def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (load node:$ptr))>;
  722. def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (load node:$ptr))>;
  723. def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (load node:$ptr))>;
  724. def loadv8i32 : PatFrag<(ops node:$ptr), (v8i32 (load node:$ptr))>;
  725. def loadv16i16 : PatFrag<(ops node:$ptr), (v16i16 (load node:$ptr))>;
  726. def loadv32i8 : PatFrag<(ops node:$ptr), (v32i8 (load node:$ptr))>;
  727. // 512-bit load pattern fragments
  728. def loadv32f16 : PatFrag<(ops node:$ptr), (v32f16 (load node:$ptr))>;
  729. def loadv32bf16 : PatFrag<(ops node:$ptr), (v32bf16 (load node:$ptr))>;
  730. def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (load node:$ptr))>;
  731. def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (load node:$ptr))>;
  732. def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (load node:$ptr))>;
  733. def loadv16i32 : PatFrag<(ops node:$ptr), (v16i32 (load node:$ptr))>;
  734. def loadv32i16 : PatFrag<(ops node:$ptr), (v32i16 (load node:$ptr))>;
  735. def loadv64i8 : PatFrag<(ops node:$ptr), (v64i8 (load node:$ptr))>;
  736. // 128-/256-/512-bit extload pattern fragments
  737. def extloadv2f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
  738. def extloadv4f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
  739. def extloadv8f32 : PatFrag<(ops node:$ptr), (extloadvf32 node:$ptr)>;
  740. def extloadv2f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
  741. def extloadv4f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
  742. def extloadv8f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
  743. def extloadv16f16 : PatFrag<(ops node:$ptr), (extloadvf16 node:$ptr)>;
  744. // Like 'store', but always requires vector size alignment.
  745. def alignedstore : PatFrag<(ops node:$val, node:$ptr),
  746. (store node:$val, node:$ptr), [{
  747. auto *St = cast<StoreSDNode>(N);
  748. return St->getAlign() >= St->getMemoryVT().getStoreSize();
  749. }]>;
  750. // Like 'load', but always requires vector size alignment.
  751. def alignedload : PatFrag<(ops node:$ptr), (load node:$ptr), [{
  752. auto *Ld = cast<LoadSDNode>(N);
  753. return Ld->getAlign() >= Ld->getMemoryVT().getStoreSize();
  754. }]>;
  755. // 128-bit aligned load pattern fragments
  756. // NOTE: all 128-bit integer vector loads are promoted to v2i64
  757. def alignedloadv8f16 : PatFrag<(ops node:$ptr),
  758. (v8f16 (alignedload node:$ptr))>;
  759. def alignedloadv8bf16 : PatFrag<(ops node:$ptr),
  760. (v8bf16 (alignedload node:$ptr))>;
  761. def alignedloadv4f32 : PatFrag<(ops node:$ptr),
  762. (v4f32 (alignedload node:$ptr))>;
  763. def alignedloadv2f64 : PatFrag<(ops node:$ptr),
  764. (v2f64 (alignedload node:$ptr))>;
  765. def alignedloadv2i64 : PatFrag<(ops node:$ptr),
  766. (v2i64 (alignedload node:$ptr))>;
  767. def alignedloadv4i32 : PatFrag<(ops node:$ptr),
  768. (v4i32 (alignedload node:$ptr))>;
  769. def alignedloadv8i16 : PatFrag<(ops node:$ptr),
  770. (v8i16 (alignedload node:$ptr))>;
  771. def alignedloadv16i8 : PatFrag<(ops node:$ptr),
  772. (v16i8 (alignedload node:$ptr))>;
  773. // 256-bit aligned load pattern fragments
  774. // NOTE: all 256-bit integer vector loads are promoted to v4i64
  775. def alignedloadv16f16 : PatFrag<(ops node:$ptr),
  776. (v16f16 (alignedload node:$ptr))>;
  777. def alignedloadv16bf16 : PatFrag<(ops node:$ptr),
  778. (v16bf16 (alignedload node:$ptr))>;
  779. def alignedloadv8f32 : PatFrag<(ops node:$ptr),
  780. (v8f32 (alignedload node:$ptr))>;
  781. def alignedloadv4f64 : PatFrag<(ops node:$ptr),
  782. (v4f64 (alignedload node:$ptr))>;
  783. def alignedloadv4i64 : PatFrag<(ops node:$ptr),
  784. (v4i64 (alignedload node:$ptr))>;
  785. def alignedloadv8i32 : PatFrag<(ops node:$ptr),
  786. (v8i32 (alignedload node:$ptr))>;
  787. def alignedloadv16i16 : PatFrag<(ops node:$ptr),
  788. (v16i16 (alignedload node:$ptr))>;
  789. def alignedloadv32i8 : PatFrag<(ops node:$ptr),
  790. (v32i8 (alignedload node:$ptr))>;
  791. // 512-bit aligned load pattern fragments
  792. def alignedloadv32f16 : PatFrag<(ops node:$ptr),
  793. (v32f16 (alignedload node:$ptr))>;
  794. def alignedloadv32bf16 : PatFrag<(ops node:$ptr),
  795. (v32bf16 (alignedload node:$ptr))>;
  796. def alignedloadv16f32 : PatFrag<(ops node:$ptr),
  797. (v16f32 (alignedload node:$ptr))>;
  798. def alignedloadv8f64 : PatFrag<(ops node:$ptr),
  799. (v8f64 (alignedload node:$ptr))>;
  800. def alignedloadv8i64 : PatFrag<(ops node:$ptr),
  801. (v8i64 (alignedload node:$ptr))>;
  802. def alignedloadv16i32 : PatFrag<(ops node:$ptr),
  803. (v16i32 (alignedload node:$ptr))>;
  804. def alignedloadv32i16 : PatFrag<(ops node:$ptr),
  805. (v32i16 (alignedload node:$ptr))>;
  806. def alignedloadv64i8 : PatFrag<(ops node:$ptr),
  807. (v64i8 (alignedload node:$ptr))>;
  808. // Like 'load', but uses special alignment checks suitable for use in
  809. // memory operands in most SSE instructions, which are required to
  810. // be naturally aligned on some targets but not on others. If the subtarget
  811. // allows unaligned accesses, match any load, though this may require
  812. // setting a feature bit in the processor (on startup, for example).
  813. // Opteron 10h and later implement such a feature.
  814. def memop : PatFrag<(ops node:$ptr), (load node:$ptr), [{
  815. auto *Ld = cast<LoadSDNode>(N);
  816. return Subtarget->hasSSEUnalignedMem() ||
  817. Ld->getAlign() >= Ld->getMemoryVT().getStoreSize();
  818. }]>;
  819. // 128-bit memop pattern fragments
  820. // NOTE: all 128-bit integer vector loads are promoted to v2i64
  821. def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>;
  822. def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>;
  823. def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>;
  824. def memopv4i32 : PatFrag<(ops node:$ptr), (v4i32 (memop node:$ptr))>;
  825. def memopv8i16 : PatFrag<(ops node:$ptr), (v8i16 (memop node:$ptr))>;
  826. def memopv16i8 : PatFrag<(ops node:$ptr), (v16i8 (memop node:$ptr))>;
  827. // 128-bit bitconvert pattern fragments
  828. def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>;
  829. def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>;
  830. def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>;
  831. def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>;
  832. def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>;
  833. def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>;
  834. // 256-bit bitconvert pattern fragments
  835. def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>;
  836. def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>;
  837. def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>;
  838. def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>;
  839. def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>;
  840. def bc_v4f64 : PatFrag<(ops node:$in), (v4f64 (bitconvert node:$in))>;
  841. // 512-bit bitconvert pattern fragments
  842. def bc_v64i8 : PatFrag<(ops node:$in), (v64i8 (bitconvert node:$in))>;
  843. def bc_v32i16 : PatFrag<(ops node:$in), (v32i16 (bitconvert node:$in))>;
  844. def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>;
  845. def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>;
  846. def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>;
  847. def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>;
  848. def X86vzload16 : PatFrag<(ops node:$src),
  849. (X86vzld node:$src), [{
  850. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2;
  851. }]>;
  852. def X86vzload32 : PatFrag<(ops node:$src),
  853. (X86vzld node:$src), [{
  854. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4;
  855. }]>;
  856. def X86vzload64 : PatFrag<(ops node:$src),
  857. (X86vzld node:$src), [{
  858. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
  859. }]>;
  860. def X86vextractstore64 : PatFrag<(ops node:$val, node:$ptr),
  861. (X86vextractst node:$val, node:$ptr), [{
  862. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
  863. }]>;
  864. def X86VBroadcastld8 : PatFrag<(ops node:$src),
  865. (X86VBroadcastld node:$src), [{
  866. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 1;
  867. }]>;
  868. def X86VBroadcastld16 : PatFrag<(ops node:$src),
  869. (X86VBroadcastld node:$src), [{
  870. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 2;
  871. }]>;
  872. def X86VBroadcastld32 : PatFrag<(ops node:$src),
  873. (X86VBroadcastld node:$src), [{
  874. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 4;
  875. }]>;
  876. def X86VBroadcastld64 : PatFrag<(ops node:$src),
  877. (X86VBroadcastld node:$src), [{
  878. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 8;
  879. }]>;
  880. def X86SubVBroadcastld128 : PatFrag<(ops node:$src),
  881. (X86SubVBroadcastld node:$src), [{
  882. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 16;
  883. }]>;
  884. def X86SubVBroadcastld256 : PatFrag<(ops node:$src),
  885. (X86SubVBroadcastld node:$src), [{
  886. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getStoreSize() == 32;
  887. }]>;
  888. // Scalar SSE intrinsic fragments to match several different types of loads.
  889. // Used by scalar SSE intrinsic instructions which have 128 bit types, but
  890. // only load a single element.
  891. // FIXME: We should add more canolicalizing in DAGCombine. Particulary removing
  892. // the simple_load case.
  893. def sse_load_f16 : PatFrags<(ops node:$ptr),
  894. [(v8f16 (simple_load node:$ptr)),
  895. (v8f16 (X86vzload16 node:$ptr)),
  896. (v8f16 (scalar_to_vector (loadf16 node:$ptr)))]>;
  897. def sse_load_f32 : PatFrags<(ops node:$ptr),
  898. [(v4f32 (simple_load node:$ptr)),
  899. (v4f32 (X86vzload32 node:$ptr)),
  900. (v4f32 (scalar_to_vector (loadf32 node:$ptr)))]>;
  901. def sse_load_f64 : PatFrags<(ops node:$ptr),
  902. [(v2f64 (simple_load node:$ptr)),
  903. (v2f64 (X86vzload64 node:$ptr)),
  904. (v2f64 (scalar_to_vector (loadf64 node:$ptr)))]>;
  905. def shmem : X86MemOperand<"printwordmem", X86Mem16AsmOperand>;
  906. def ssmem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>;
  907. def sdmem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>;
  908. def fp16imm0 : PatLeaf<(f16 fpimm), [{
  909. return N->isExactlyValue(+0.0);
  910. }]>;
  911. def fp32imm0 : PatLeaf<(f32 fpimm), [{
  912. return N->isExactlyValue(+0.0);
  913. }]>;
  914. def fp64imm0 : PatLeaf<(f64 fpimm), [{
  915. return N->isExactlyValue(+0.0);
  916. }]>;
  917. def fp128imm0 : PatLeaf<(f128 fpimm), [{
  918. return N->isExactlyValue(+0.0);
  919. }]>;
  920. // EXTRACT_get_vextract128_imm xform function: convert extract_subvector index
  921. // to VEXTRACTF128/VEXTRACTI128 imm.
  922. def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{
  923. return getExtractVEXTRACTImmediate(N, 128, SDLoc(N));
  924. }]>;
  925. // INSERT_get_vinsert128_imm xform function: convert insert_subvector index to
  926. // VINSERTF128/VINSERTI128 imm.
  927. def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{
  928. return getInsertVINSERTImmediate(N, 128, SDLoc(N));
  929. }]>;
  930. // INSERT_get_vperm2x128_imm xform function: convert insert_subvector index to
  931. // commuted VPERM2F128/VPERM2I128 imm.
  932. def INSERT_get_vperm2x128_commutedimm : SDNodeXForm<insert_subvector, [{
  933. return getPermuteVINSERTCommutedImmediate(N, 128, SDLoc(N));
  934. }]>;
  935. // EXTRACT_get_vextract256_imm xform function: convert extract_subvector index
  936. // to VEXTRACTF64x4 imm.
  937. def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{
  938. return getExtractVEXTRACTImmediate(N, 256, SDLoc(N));
  939. }]>;
  940. // INSERT_get_vinsert256_imm xform function: convert insert_subvector index to
  941. // VINSERTF64x4 imm.
  942. def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{
  943. return getInsertVINSERTImmediate(N, 256, SDLoc(N));
  944. }]>;
  945. def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index),
  946. (extract_subvector node:$bigvec,
  947. node:$index), [{
  948. // Index 0 can be handled via extract_subreg.
  949. return !isNullConstant(N->getOperand(1));
  950. }], EXTRACT_get_vextract128_imm>;
  951. def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec,
  952. node:$index),
  953. (insert_subvector node:$bigvec, node:$smallvec,
  954. node:$index), [{}],
  955. INSERT_get_vinsert128_imm>;
  956. def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index),
  957. (extract_subvector node:$bigvec,
  958. node:$index), [{
  959. // Index 0 can be handled via extract_subreg.
  960. return !isNullConstant(N->getOperand(1));
  961. }], EXTRACT_get_vextract256_imm>;
  962. def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec,
  963. node:$index),
  964. (insert_subvector node:$bigvec, node:$smallvec,
  965. node:$index), [{}],
  966. INSERT_get_vinsert256_imm>;
  967. def masked_load : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  968. (masked_ld node:$src1, undef, node:$src2, node:$src3), [{
  969. return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() &&
  970. cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD &&
  971. cast<MaskedLoadSDNode>(N)->isUnindexed();
  972. }]>;
  973. def masked_load_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  974. (masked_load node:$src1, node:$src2, node:$src3), [{
  975. // Use the node type to determine the size the alignment needs to match.
  976. // We can't use memory VT because type widening changes the node VT, but
  977. // not the memory VT.
  978. auto *Ld = cast<MaskedLoadSDNode>(N);
  979. return Ld->getAlign() >= Ld->getValueType(0).getStoreSize();
  980. }]>;
  981. def X86mExpandingLoad : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  982. (masked_ld node:$src1, undef, node:$src2, node:$src3), [{
  983. return cast<MaskedLoadSDNode>(N)->isExpandingLoad() &&
  984. cast<MaskedLoadSDNode>(N)->isUnindexed();
  985. }]>;
  986. // Masked store fragments.
  987. // X86mstore can't be implemented in core DAG files because some targets
  988. // do not support vector types (llvm-tblgen will fail).
  989. def masked_store : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  990. (masked_st node:$src1, node:$src2, undef, node:$src3), [{
  991. return !cast<MaskedStoreSDNode>(N)->isTruncatingStore() &&
  992. !cast<MaskedStoreSDNode>(N)->isCompressingStore() &&
  993. cast<MaskedStoreSDNode>(N)->isUnindexed();
  994. }]>;
  995. def masked_store_aligned : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  996. (masked_store node:$src1, node:$src2, node:$src3), [{
  997. // Use the node type to determine the size the alignment needs to match.
  998. // We can't use memory VT because type widening changes the node VT, but
  999. // not the memory VT.
  1000. auto *St = cast<MaskedStoreSDNode>(N);
  1001. return St->getAlign() >= St->getOperand(1).getValueType().getStoreSize();
  1002. }]>;
  1003. def X86mCompressingStore : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1004. (masked_st node:$src1, node:$src2, undef, node:$src3), [{
  1005. return cast<MaskedStoreSDNode>(N)->isCompressingStore() &&
  1006. cast<MaskedStoreSDNode>(N)->isUnindexed();
  1007. }]>;
  1008. // masked truncstore fragments
  1009. // X86mtruncstore can't be implemented in core DAG files because some targets
  1010. // doesn't support vector type ( llvm-tblgen will fail)
  1011. def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1012. (masked_st node:$src1, node:$src2, undef, node:$src3), [{
  1013. return cast<MaskedStoreSDNode>(N)->isTruncatingStore() &&
  1014. cast<MaskedStoreSDNode>(N)->isUnindexed();
  1015. }]>;
  1016. def masked_truncstorevi8 :
  1017. PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1018. (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
  1019. return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
  1020. }]>;
  1021. def masked_truncstorevi16 :
  1022. PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1023. (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
  1024. return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
  1025. }]>;
  1026. def masked_truncstorevi32 :
  1027. PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1028. (X86mtruncstore node:$src1, node:$src2, node:$src3), [{
  1029. return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
  1030. }]>;
  1031. def X86TruncSStore : SDNode<"X86ISD::VTRUNCSTORES", SDTStore,
  1032. [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
  1033. def X86TruncUSStore : SDNode<"X86ISD::VTRUNCSTOREUS", SDTStore,
  1034. [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
  1035. def X86MTruncSStore : SDNode<"X86ISD::VMTRUNCSTORES", SDTX86MaskedStore,
  1036. [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
  1037. def X86MTruncUSStore : SDNode<"X86ISD::VMTRUNCSTOREUS", SDTX86MaskedStore,
  1038. [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>;
  1039. def truncstore_s_vi8 : PatFrag<(ops node:$val, node:$ptr),
  1040. (X86TruncSStore node:$val, node:$ptr), [{
  1041. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
  1042. }]>;
  1043. def truncstore_us_vi8 : PatFrag<(ops node:$val, node:$ptr),
  1044. (X86TruncUSStore node:$val, node:$ptr), [{
  1045. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
  1046. }]>;
  1047. def truncstore_s_vi16 : PatFrag<(ops node:$val, node:$ptr),
  1048. (X86TruncSStore node:$val, node:$ptr), [{
  1049. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
  1050. }]>;
  1051. def truncstore_us_vi16 : PatFrag<(ops node:$val, node:$ptr),
  1052. (X86TruncUSStore node:$val, node:$ptr), [{
  1053. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
  1054. }]>;
  1055. def truncstore_s_vi32 : PatFrag<(ops node:$val, node:$ptr),
  1056. (X86TruncSStore node:$val, node:$ptr), [{
  1057. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
  1058. }]>;
  1059. def truncstore_us_vi32 : PatFrag<(ops node:$val, node:$ptr),
  1060. (X86TruncUSStore node:$val, node:$ptr), [{
  1061. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
  1062. }]>;
  1063. def masked_truncstore_s_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1064. (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
  1065. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
  1066. }]>;
  1067. def masked_truncstore_us_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1068. (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
  1069. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8;
  1070. }]>;
  1071. def masked_truncstore_s_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1072. (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
  1073. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
  1074. }]>;
  1075. def masked_truncstore_us_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1076. (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
  1077. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16;
  1078. }]>;
  1079. def masked_truncstore_s_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1080. (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{
  1081. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
  1082. }]>;
  1083. def masked_truncstore_us_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3),
  1084. (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{
  1085. return cast<MemIntrinsicSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32;
  1086. }]>;