X86InstrInfo.td 173 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746
  1. //===-- X86InstrInfo.td - Main X86 Instruction Definition --*- tablegen -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file describes the X86 instruction set, defining the instructions, and
  10. // properties of the instructions which are needed for code generation, machine
  11. // code emission, and analysis.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. //===----------------------------------------------------------------------===//
  15. // X86 specific DAG Nodes.
  16. //
  17. def SDTX86CmpTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisInt<1>,
  18. SDTCisSameAs<1, 2>]>;
  19. def SDTX86FCmp : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisFP<1>,
  20. SDTCisSameAs<1, 2>]>;
  21. def SDTX86Cmov : SDTypeProfile<1, 4,
  22. [SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>,
  23. SDTCisVT<3, i8>, SDTCisVT<4, i32>]>;
  24. // Unary and binary operator instructions that set EFLAGS as a side-effect.
  25. def SDTUnaryArithWithFlags : SDTypeProfile<2, 1,
  26. [SDTCisSameAs<0, 2>,
  27. SDTCisInt<0>, SDTCisVT<1, i32>]>;
  28. def SDTBinaryArithWithFlags : SDTypeProfile<2, 2,
  29. [SDTCisSameAs<0, 2>,
  30. SDTCisSameAs<0, 3>,
  31. SDTCisInt<0>, SDTCisVT<1, i32>]>;
  32. // SDTBinaryArithWithFlagsInOut - RES1, EFLAGS = op LHS, RHS, EFLAGS
  33. def SDTBinaryArithWithFlagsInOut : SDTypeProfile<2, 3,
  34. [SDTCisSameAs<0, 2>,
  35. SDTCisSameAs<0, 3>,
  36. SDTCisInt<0>,
  37. SDTCisVT<1, i32>,
  38. SDTCisVT<4, i32>]>;
  39. // RES1, RES2, FLAGS = op LHS, RHS
  40. def SDT2ResultBinaryArithWithFlags : SDTypeProfile<3, 2,
  41. [SDTCisSameAs<0, 1>,
  42. SDTCisSameAs<0, 2>,
  43. SDTCisSameAs<0, 3>,
  44. SDTCisInt<0>, SDTCisVT<1, i32>]>;
  45. def SDTX86BrCond : SDTypeProfile<0, 3,
  46. [SDTCisVT<0, OtherVT>,
  47. SDTCisVT<1, i8>, SDTCisVT<2, i32>]>;
  48. def SDTX86SetCC : SDTypeProfile<1, 2,
  49. [SDTCisVT<0, i8>,
  50. SDTCisVT<1, i8>, SDTCisVT<2, i32>]>;
  51. def SDTX86SetCC_C : SDTypeProfile<1, 2,
  52. [SDTCisInt<0>,
  53. SDTCisVT<1, i8>, SDTCisVT<2, i32>]>;
  54. def SDTX86sahf : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i8>]>;
  55. def SDTX86rdrand : SDTypeProfile<2, 0, [SDTCisInt<0>, SDTCisVT<1, i32>]>;
  56. def SDTX86rdpkru : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i32>]>;
  57. def SDTX86wrpkru : SDTypeProfile<0, 3, [SDTCisVT<0, i32>, SDTCisVT<1, i32>,
  58. SDTCisVT<2, i32>]>;
  59. def SDTX86cas : SDTypeProfile<0, 3, [SDTCisPtrTy<0>, SDTCisInt<1>,
  60. SDTCisVT<2, i8>]>;
  61. def SDTX86cas8pair : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>;
  62. def SDTX86cas16pair : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i64>]>;
  63. def SDTLockBinaryArithWithFlags : SDTypeProfile<1, 2, [SDTCisVT<0, i32>,
  64. SDTCisPtrTy<1>,
  65. SDTCisInt<2>]>;
  66. def SDTLockUnaryArithWithFlags : SDTypeProfile<1, 1, [SDTCisVT<0, i32>,
  67. SDTCisPtrTy<1>]>;
  68. def SDTX86Ret : SDTypeProfile<0, -1, [SDTCisVT<0, i32>]>;
  69. def SDT_X86CallSeqStart : SDCallSeqStart<[SDTCisVT<0, i32>,
  70. SDTCisVT<1, i32>]>;
  71. def SDT_X86CallSeqEnd : SDCallSeqEnd<[SDTCisVT<0, i32>,
  72. SDTCisVT<1, i32>]>;
  73. def SDT_X86Call : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>;
  74. def SDT_X86NtBrind : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>;
  75. def SDT_X86VASTART_SAVE_XMM_REGS : SDTypeProfile<0, -1, [SDTCisVT<0, i8>,
  76. SDTCisPtrTy<1>]>;
  77. def SDT_X86VAARG : SDTypeProfile<1, -1, [SDTCisPtrTy<0>,
  78. SDTCisPtrTy<1>,
  79. SDTCisVT<2, i32>,
  80. SDTCisVT<3, i8>,
  81. SDTCisVT<4, i32>]>;
  82. def SDTX86RepStr : SDTypeProfile<0, 1, [SDTCisVT<0, OtherVT>]>;
  83. def SDTX86Void : SDTypeProfile<0, 0, []>;
  84. def SDTX86Wrapper : SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>, SDTCisPtrTy<0>]>;
  85. def SDT_X86TLSADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>;
  86. def SDT_X86TLSBASEADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>;
  87. def SDT_X86TLSCALL : SDTypeProfile<0, 1, [SDTCisInt<0>]>;
  88. def SDT_X86DYN_ALLOCA : SDTypeProfile<0, 1, [SDTCisVT<0, iPTR>]>;
  89. def SDT_X86SEG_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>;
  90. def SDT_X86PROBED_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>;
  91. def SDT_X86EHRET : SDTypeProfile<0, 1, [SDTCisInt<0>]>;
  92. def SDT_X86TCRET : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i32>]>;
  93. def SDT_X86MEMBARRIER : SDTypeProfile<0, 0, []>;
  94. def SDT_X86ENQCMD : SDTypeProfile<1, 2, [SDTCisVT<0, i32>,
  95. SDTCisPtrTy<1>, SDTCisSameAs<1, 2>]>;
  96. def SDT_X86AESENCDECKL : SDTypeProfile<2, 2, [SDTCisVT<0, v2i64>,
  97. SDTCisVT<1, i32>,
  98. SDTCisVT<2, v2i64>,
  99. SDTCisPtrTy<3>]>;
  100. def X86MemBarrier : SDNode<"X86ISD::MEMBARRIER", SDT_X86MEMBARRIER,
  101. [SDNPHasChain,SDNPSideEffect]>;
  102. def X86MFence : SDNode<"X86ISD::MFENCE", SDT_X86MEMBARRIER,
  103. [SDNPHasChain]>;
  104. def X86bsf : SDNode<"X86ISD::BSF", SDTUnaryArithWithFlags>;
  105. def X86bsr : SDNode<"X86ISD::BSR", SDTUnaryArithWithFlags>;
  106. def X86fshl : SDNode<"X86ISD::FSHL", SDTIntShiftDOp>;
  107. def X86fshr : SDNode<"X86ISD::FSHR", SDTIntShiftDOp>;
  108. def X86cmp : SDNode<"X86ISD::CMP" , SDTX86CmpTest>;
  109. def X86fcmp : SDNode<"X86ISD::FCMP", SDTX86FCmp>;
  110. def X86strict_fcmp : SDNode<"X86ISD::STRICT_FCMP", SDTX86FCmp, [SDNPHasChain]>;
  111. def X86strict_fcmps : SDNode<"X86ISD::STRICT_FCMPS", SDTX86FCmp, [SDNPHasChain]>;
  112. def X86bt : SDNode<"X86ISD::BT", SDTX86CmpTest>;
  113. def X86cmov : SDNode<"X86ISD::CMOV", SDTX86Cmov>;
  114. def X86brcond : SDNode<"X86ISD::BRCOND", SDTX86BrCond,
  115. [SDNPHasChain]>;
  116. def X86setcc : SDNode<"X86ISD::SETCC", SDTX86SetCC>;
  117. def X86setcc_c : SDNode<"X86ISD::SETCC_CARRY", SDTX86SetCC_C>;
  118. def X86rdrand : SDNode<"X86ISD::RDRAND", SDTX86rdrand,
  119. [SDNPHasChain, SDNPSideEffect]>;
  120. def X86rdseed : SDNode<"X86ISD::RDSEED", SDTX86rdrand,
  121. [SDNPHasChain, SDNPSideEffect]>;
  122. def X86rdpkru : SDNode<"X86ISD::RDPKRU", SDTX86rdpkru,
  123. [SDNPHasChain, SDNPSideEffect]>;
  124. def X86wrpkru : SDNode<"X86ISD::WRPKRU", SDTX86wrpkru,
  125. [SDNPHasChain, SDNPSideEffect]>;
  126. def X86cas : SDNode<"X86ISD::LCMPXCHG_DAG", SDTX86cas,
  127. [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore,
  128. SDNPMayLoad, SDNPMemOperand]>;
  129. def X86cas8 : SDNode<"X86ISD::LCMPXCHG8_DAG", SDTX86cas8pair,
  130. [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore,
  131. SDNPMayLoad, SDNPMemOperand]>;
  132. def X86cas16 : SDNode<"X86ISD::LCMPXCHG16_DAG", SDTX86cas16pair,
  133. [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore,
  134. SDNPMayLoad, SDNPMemOperand]>;
  135. def X86retflag : SDNode<"X86ISD::RET_FLAG", SDTX86Ret,
  136. [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>;
  137. def X86iret : SDNode<"X86ISD::IRET", SDTX86Ret,
  138. [SDNPHasChain, SDNPOptInGlue]>;
  139. def X86vastart_save_xmm_regs :
  140. SDNode<"X86ISD::VASTART_SAVE_XMM_REGS",
  141. SDT_X86VASTART_SAVE_XMM_REGS,
  142. [SDNPHasChain, SDNPMayStore, SDNPMemOperand, SDNPVariadic]>;
  143. def X86vaarg64 :
  144. SDNode<"X86ISD::VAARG_64", SDT_X86VAARG,
  145. [SDNPHasChain, SDNPMayLoad, SDNPMayStore,
  146. SDNPMemOperand]>;
  147. def X86vaargx32 :
  148. SDNode<"X86ISD::VAARG_X32", SDT_X86VAARG,
  149. [SDNPHasChain, SDNPMayLoad, SDNPMayStore,
  150. SDNPMemOperand]>;
  151. def X86callseq_start :
  152. SDNode<"ISD::CALLSEQ_START", SDT_X86CallSeqStart,
  153. [SDNPHasChain, SDNPOutGlue]>;
  154. def X86callseq_end :
  155. SDNode<"ISD::CALLSEQ_END", SDT_X86CallSeqEnd,
  156. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>;
  157. def X86call : SDNode<"X86ISD::CALL", SDT_X86Call,
  158. [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue,
  159. SDNPVariadic]>;
  160. def X86call_rvmarker : SDNode<"X86ISD::CALL_RVMARKER", SDT_X86Call,
  161. [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue,
  162. SDNPVariadic]>;
  163. def X86NoTrackCall : SDNode<"X86ISD::NT_CALL", SDT_X86Call,
  164. [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue,
  165. SDNPVariadic]>;
  166. def X86NoTrackBrind : SDNode<"X86ISD::NT_BRIND", SDT_X86NtBrind,
  167. [SDNPHasChain]>;
  168. def X86rep_stos: SDNode<"X86ISD::REP_STOS", SDTX86RepStr,
  169. [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore]>;
  170. def X86rep_movs: SDNode<"X86ISD::REP_MOVS", SDTX86RepStr,
  171. [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore,
  172. SDNPMayLoad]>;
  173. def X86Wrapper : SDNode<"X86ISD::Wrapper", SDTX86Wrapper>;
  174. def X86WrapperRIP : SDNode<"X86ISD::WrapperRIP", SDTX86Wrapper>;
  175. def X86RecoverFrameAlloc : SDNode<"ISD::LOCAL_RECOVER",
  176. SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>,
  177. SDTCisInt<1>]>>;
  178. def X86tlsaddr : SDNode<"X86ISD::TLSADDR", SDT_X86TLSADDR,
  179. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>;
  180. def X86tlsbaseaddr : SDNode<"X86ISD::TLSBASEADDR", SDT_X86TLSBASEADDR,
  181. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>;
  182. def X86ehret : SDNode<"X86ISD::EH_RETURN", SDT_X86EHRET,
  183. [SDNPHasChain]>;
  184. def X86eh_sjlj_setjmp : SDNode<"X86ISD::EH_SJLJ_SETJMP",
  185. SDTypeProfile<1, 1, [SDTCisInt<0>,
  186. SDTCisPtrTy<1>]>,
  187. [SDNPHasChain, SDNPSideEffect]>;
  188. def X86eh_sjlj_longjmp : SDNode<"X86ISD::EH_SJLJ_LONGJMP",
  189. SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>,
  190. [SDNPHasChain, SDNPSideEffect]>;
  191. def X86eh_sjlj_setup_dispatch : SDNode<"X86ISD::EH_SJLJ_SETUP_DISPATCH",
  192. SDTypeProfile<0, 0, []>,
  193. [SDNPHasChain, SDNPSideEffect]>;
  194. def X86tcret : SDNode<"X86ISD::TC_RETURN", SDT_X86TCRET,
  195. [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>;
  196. def X86add_flag : SDNode<"X86ISD::ADD", SDTBinaryArithWithFlags,
  197. [SDNPCommutative]>;
  198. def X86sub_flag : SDNode<"X86ISD::SUB", SDTBinaryArithWithFlags>;
  199. def X86smul_flag : SDNode<"X86ISD::SMUL", SDTBinaryArithWithFlags,
  200. [SDNPCommutative]>;
  201. def X86umul_flag : SDNode<"X86ISD::UMUL", SDT2ResultBinaryArithWithFlags,
  202. [SDNPCommutative]>;
  203. def X86adc_flag : SDNode<"X86ISD::ADC", SDTBinaryArithWithFlagsInOut>;
  204. def X86sbb_flag : SDNode<"X86ISD::SBB", SDTBinaryArithWithFlagsInOut>;
  205. def X86or_flag : SDNode<"X86ISD::OR", SDTBinaryArithWithFlags,
  206. [SDNPCommutative]>;
  207. def X86xor_flag : SDNode<"X86ISD::XOR", SDTBinaryArithWithFlags,
  208. [SDNPCommutative]>;
  209. def X86and_flag : SDNode<"X86ISD::AND", SDTBinaryArithWithFlags,
  210. [SDNPCommutative]>;
  211. def X86lock_add : SDNode<"X86ISD::LADD", SDTLockBinaryArithWithFlags,
  212. [SDNPHasChain, SDNPMayStore, SDNPMayLoad,
  213. SDNPMemOperand]>;
  214. def X86lock_sub : SDNode<"X86ISD::LSUB", SDTLockBinaryArithWithFlags,
  215. [SDNPHasChain, SDNPMayStore, SDNPMayLoad,
  216. SDNPMemOperand]>;
  217. def X86lock_or : SDNode<"X86ISD::LOR", SDTLockBinaryArithWithFlags,
  218. [SDNPHasChain, SDNPMayStore, SDNPMayLoad,
  219. SDNPMemOperand]>;
  220. def X86lock_xor : SDNode<"X86ISD::LXOR", SDTLockBinaryArithWithFlags,
  221. [SDNPHasChain, SDNPMayStore, SDNPMayLoad,
  222. SDNPMemOperand]>;
  223. def X86lock_and : SDNode<"X86ISD::LAND", SDTLockBinaryArithWithFlags,
  224. [SDNPHasChain, SDNPMayStore, SDNPMayLoad,
  225. SDNPMemOperand]>;
  226. def X86bextr : SDNode<"X86ISD::BEXTR", SDTIntBinOp>;
  227. def X86bextri : SDNode<"X86ISD::BEXTRI", SDTIntBinOp>;
  228. def X86bzhi : SDNode<"X86ISD::BZHI", SDTIntBinOp>;
  229. def X86pdep : SDNode<"X86ISD::PDEP", SDTIntBinOp>;
  230. def X86pext : SDNode<"X86ISD::PEXT", SDTIntBinOp>;
  231. def X86mul_imm : SDNode<"X86ISD::MUL_IMM", SDTIntBinOp>;
  232. def X86DynAlloca : SDNode<"X86ISD::DYN_ALLOCA", SDT_X86DYN_ALLOCA,
  233. [SDNPHasChain, SDNPOutGlue]>;
  234. def X86SegAlloca : SDNode<"X86ISD::SEG_ALLOCA", SDT_X86SEG_ALLOCA,
  235. [SDNPHasChain]>;
  236. def X86ProbedAlloca : SDNode<"X86ISD::PROBED_ALLOCA", SDT_X86PROBED_ALLOCA,
  237. [SDNPHasChain]>;
  238. def X86TLSCall : SDNode<"X86ISD::TLSCALL", SDT_X86TLSCALL,
  239. [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>;
  240. def X86lwpins : SDNode<"X86ISD::LWPINS",
  241. SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>,
  242. SDTCisVT<2, i32>, SDTCisVT<3, i32>]>,
  243. [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPSideEffect]>;
  244. def X86umwait : SDNode<"X86ISD::UMWAIT",
  245. SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>,
  246. SDTCisVT<2, i32>, SDTCisVT<3, i32>]>,
  247. [SDNPHasChain, SDNPSideEffect]>;
  248. def X86tpause : SDNode<"X86ISD::TPAUSE",
  249. SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>,
  250. SDTCisVT<2, i32>, SDTCisVT<3, i32>]>,
  251. [SDNPHasChain, SDNPSideEffect]>;
  252. def X86enqcmd : SDNode<"X86ISD::ENQCMD", SDT_X86ENQCMD,
  253. [SDNPHasChain, SDNPSideEffect]>;
  254. def X86enqcmds : SDNode<"X86ISD::ENQCMDS", SDT_X86ENQCMD,
  255. [SDNPHasChain, SDNPSideEffect]>;
  256. def X86testui : SDNode<"X86ISD::TESTUI",
  257. SDTypeProfile<1, 0, [SDTCisVT<0, i32>]>,
  258. [SDNPHasChain, SDNPSideEffect]>;
  259. def X86aesenc128kl : SDNode<"X86ISD::AESENC128KL", SDT_X86AESENCDECKL,
  260. [SDNPHasChain, SDNPMayLoad, SDNPSideEffect,
  261. SDNPMemOperand]>;
  262. def X86aesdec128kl : SDNode<"X86ISD::AESDEC128KL", SDT_X86AESENCDECKL,
  263. [SDNPHasChain, SDNPMayLoad, SDNPSideEffect,
  264. SDNPMemOperand]>;
  265. def X86aesenc256kl : SDNode<"X86ISD::AESENC256KL", SDT_X86AESENCDECKL,
  266. [SDNPHasChain, SDNPMayLoad, SDNPSideEffect,
  267. SDNPMemOperand]>;
  268. def X86aesdec256kl : SDNode<"X86ISD::AESDEC256KL", SDT_X86AESENCDECKL,
  269. [SDNPHasChain, SDNPMayLoad, SDNPSideEffect,
  270. SDNPMemOperand]>;
  271. //===----------------------------------------------------------------------===//
  272. // X86 Operand Definitions.
  273. //
  274. // A version of ptr_rc which excludes SP, ESP, and RSP. This is used for
  275. // the index operand of an address, to conform to x86 encoding restrictions.
  276. def ptr_rc_nosp : PointerLikeRegClass<1>;
  277. // *mem - Operand definitions for the funky X86 addressing mode operands.
  278. //
  279. def X86MemAsmOperand : AsmOperandClass {
  280. let Name = "Mem";
  281. }
  282. let RenderMethod = "addMemOperands", SuperClasses = [X86MemAsmOperand] in {
  283. def X86Mem8AsmOperand : AsmOperandClass { let Name = "Mem8"; }
  284. def X86Mem16AsmOperand : AsmOperandClass { let Name = "Mem16"; }
  285. def X86Mem32AsmOperand : AsmOperandClass { let Name = "Mem32"; }
  286. def X86Mem64AsmOperand : AsmOperandClass { let Name = "Mem64"; }
  287. def X86Mem80AsmOperand : AsmOperandClass { let Name = "Mem80"; }
  288. def X86Mem128AsmOperand : AsmOperandClass { let Name = "Mem128"; }
  289. def X86Mem256AsmOperand : AsmOperandClass { let Name = "Mem256"; }
  290. def X86Mem512AsmOperand : AsmOperandClass { let Name = "Mem512"; }
  291. // Gather mem operands
  292. def X86Mem64_RC128Operand : AsmOperandClass { let Name = "Mem64_RC128"; }
  293. def X86Mem128_RC128Operand : AsmOperandClass { let Name = "Mem128_RC128"; }
  294. def X86Mem256_RC128Operand : AsmOperandClass { let Name = "Mem256_RC128"; }
  295. def X86Mem128_RC256Operand : AsmOperandClass { let Name = "Mem128_RC256"; }
  296. def X86Mem256_RC256Operand : AsmOperandClass { let Name = "Mem256_RC256"; }
  297. def X86Mem64_RC128XOperand : AsmOperandClass { let Name = "Mem64_RC128X"; }
  298. def X86Mem128_RC128XOperand : AsmOperandClass { let Name = "Mem128_RC128X"; }
  299. def X86Mem256_RC128XOperand : AsmOperandClass { let Name = "Mem256_RC128X"; }
  300. def X86Mem128_RC256XOperand : AsmOperandClass { let Name = "Mem128_RC256X"; }
  301. def X86Mem256_RC256XOperand : AsmOperandClass { let Name = "Mem256_RC256X"; }
  302. def X86Mem512_RC256XOperand : AsmOperandClass { let Name = "Mem512_RC256X"; }
  303. def X86Mem256_RC512Operand : AsmOperandClass { let Name = "Mem256_RC512"; }
  304. def X86Mem512_RC512Operand : AsmOperandClass { let Name = "Mem512_RC512"; }
  305. def X86SibMemOperand : AsmOperandClass { let Name = "SibMem"; }
  306. }
  307. def X86AbsMemAsmOperand : AsmOperandClass {
  308. let Name = "AbsMem";
  309. let SuperClasses = [X86MemAsmOperand];
  310. }
  311. class X86MemOperand<string printMethod,
  312. AsmOperandClass parserMatchClass = X86MemAsmOperand> : Operand<iPTR> {
  313. let PrintMethod = printMethod;
  314. let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, SEGMENT_REG);
  315. let ParserMatchClass = parserMatchClass;
  316. let OperandType = "OPERAND_MEMORY";
  317. }
  318. // Gather mem operands
  319. class X86VMemOperand<RegisterClass RC, string printMethod,
  320. AsmOperandClass parserMatchClass>
  321. : X86MemOperand<printMethod, parserMatchClass> {
  322. let MIOperandInfo = (ops ptr_rc, i8imm, RC, i32imm, SEGMENT_REG);
  323. }
  324. def anymem : X86MemOperand<"printMemReference">;
  325. def X86any_fcmp : PatFrags<(ops node:$lhs, node:$rhs),
  326. [(X86strict_fcmp node:$lhs, node:$rhs),
  327. (X86fcmp node:$lhs, node:$rhs)]>;
  328. // FIXME: Right now we allow any size during parsing, but we might want to
  329. // restrict to only unsized memory.
  330. def opaquemem : X86MemOperand<"printMemReference">;
  331. def sibmem: X86MemOperand<"printMemReference", X86SibMemOperand>;
  332. def i8mem : X86MemOperand<"printbytemem", X86Mem8AsmOperand>;
  333. def i16mem : X86MemOperand<"printwordmem", X86Mem16AsmOperand>;
  334. def i32mem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>;
  335. def i64mem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>;
  336. def i128mem : X86MemOperand<"printxmmwordmem", X86Mem128AsmOperand>;
  337. def i256mem : X86MemOperand<"printymmwordmem", X86Mem256AsmOperand>;
  338. def i512mem : X86MemOperand<"printzmmwordmem", X86Mem512AsmOperand>;
  339. def f16mem : X86MemOperand<"printwordmem", X86Mem16AsmOperand>;
  340. def f32mem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>;
  341. def f64mem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>;
  342. def f80mem : X86MemOperand<"printtbytemem", X86Mem80AsmOperand>;
  343. def f128mem : X86MemOperand<"printxmmwordmem", X86Mem128AsmOperand>;
  344. def f256mem : X86MemOperand<"printymmwordmem", X86Mem256AsmOperand>;
  345. def f512mem : X86MemOperand<"printzmmwordmem", X86Mem512AsmOperand>;
  346. // Gather mem operands
  347. def vx64mem : X86VMemOperand<VR128, "printqwordmem", X86Mem64_RC128Operand>;
  348. def vx128mem : X86VMemOperand<VR128, "printxmmwordmem", X86Mem128_RC128Operand>;
  349. def vx256mem : X86VMemOperand<VR128, "printymmwordmem", X86Mem256_RC128Operand>;
  350. def vy128mem : X86VMemOperand<VR256, "printxmmwordmem", X86Mem128_RC256Operand>;
  351. def vy256mem : X86VMemOperand<VR256, "printymmwordmem", X86Mem256_RC256Operand>;
  352. def vx64xmem : X86VMemOperand<VR128X, "printqwordmem", X86Mem64_RC128XOperand>;
  353. def vx128xmem : X86VMemOperand<VR128X, "printxmmwordmem", X86Mem128_RC128XOperand>;
  354. def vx256xmem : X86VMemOperand<VR128X, "printymmwordmem", X86Mem256_RC128XOperand>;
  355. def vy128xmem : X86VMemOperand<VR256X, "printxmmwordmem", X86Mem128_RC256XOperand>;
  356. def vy256xmem : X86VMemOperand<VR256X, "printymmwordmem", X86Mem256_RC256XOperand>;
  357. def vy512xmem : X86VMemOperand<VR256X, "printzmmwordmem", X86Mem512_RC256XOperand>;
  358. def vz256mem : X86VMemOperand<VR512, "printymmwordmem", X86Mem256_RC512Operand>;
  359. def vz512mem : X86VMemOperand<VR512, "printzmmwordmem", X86Mem512_RC512Operand>;
  360. // A version of i8mem for use on x86-64 and x32 that uses a NOREX GPR instead
  361. // of a plain GPR, so that it doesn't potentially require a REX prefix.
  362. def ptr_rc_norex : PointerLikeRegClass<2>;
  363. def ptr_rc_norex_nosp : PointerLikeRegClass<3>;
  364. def i8mem_NOREX : Operand<iPTR> {
  365. let PrintMethod = "printbytemem";
  366. let MIOperandInfo = (ops ptr_rc_norex, i8imm, ptr_rc_norex_nosp, i32imm,
  367. SEGMENT_REG);
  368. let ParserMatchClass = X86Mem8AsmOperand;
  369. let OperandType = "OPERAND_MEMORY";
  370. }
  371. // GPRs available for tailcall.
  372. // It represents GR32_TC, GR64_TC or GR64_TCW64.
  373. def ptr_rc_tailcall : PointerLikeRegClass<4>;
  374. // Special i32mem for addresses of load folding tail calls. These are not
  375. // allowed to use callee-saved registers since they must be scheduled
  376. // after callee-saved register are popped.
  377. def i32mem_TC : Operand<i32> {
  378. let PrintMethod = "printdwordmem";
  379. let MIOperandInfo = (ops ptr_rc_tailcall, i8imm, ptr_rc_tailcall,
  380. i32imm, SEGMENT_REG);
  381. let ParserMatchClass = X86Mem32AsmOperand;
  382. let OperandType = "OPERAND_MEMORY";
  383. }
  384. // Special i64mem for addresses of load folding tail calls. These are not
  385. // allowed to use callee-saved registers since they must be scheduled
  386. // after callee-saved register are popped.
  387. def i64mem_TC : Operand<i64> {
  388. let PrintMethod = "printqwordmem";
  389. let MIOperandInfo = (ops ptr_rc_tailcall, i8imm,
  390. ptr_rc_tailcall, i32imm, SEGMENT_REG);
  391. let ParserMatchClass = X86Mem64AsmOperand;
  392. let OperandType = "OPERAND_MEMORY";
  393. }
  394. // Special parser to detect 16-bit mode to select 16-bit displacement.
  395. def X86AbsMem16AsmOperand : AsmOperandClass {
  396. let Name = "AbsMem16";
  397. let RenderMethod = "addAbsMemOperands";
  398. let SuperClasses = [X86AbsMemAsmOperand];
  399. }
  400. // Branch targets print as pc-relative values.
  401. class BranchTargetOperand<ValueType ty> : Operand<ty> {
  402. let OperandType = "OPERAND_PCREL";
  403. let PrintMethod = "printPCRelImm";
  404. let ParserMatchClass = X86AbsMemAsmOperand;
  405. }
  406. def i32imm_brtarget : BranchTargetOperand<i32>;
  407. def i16imm_brtarget : BranchTargetOperand<i16>;
  408. // 64-bits but only 32 bits are significant, and those bits are treated as being
  409. // pc relative.
  410. def i64i32imm_brtarget : BranchTargetOperand<i64>;
  411. def brtarget : BranchTargetOperand<OtherVT>;
  412. def brtarget8 : BranchTargetOperand<OtherVT>;
  413. def brtarget16 : BranchTargetOperand<OtherVT> {
  414. let ParserMatchClass = X86AbsMem16AsmOperand;
  415. }
  416. def brtarget32 : BranchTargetOperand<OtherVT>;
  417. let RenderMethod = "addSrcIdxOperands" in {
  418. def X86SrcIdx8Operand : AsmOperandClass {
  419. let Name = "SrcIdx8";
  420. let SuperClasses = [X86Mem8AsmOperand];
  421. }
  422. def X86SrcIdx16Operand : AsmOperandClass {
  423. let Name = "SrcIdx16";
  424. let SuperClasses = [X86Mem16AsmOperand];
  425. }
  426. def X86SrcIdx32Operand : AsmOperandClass {
  427. let Name = "SrcIdx32";
  428. let SuperClasses = [X86Mem32AsmOperand];
  429. }
  430. def X86SrcIdx64Operand : AsmOperandClass {
  431. let Name = "SrcIdx64";
  432. let SuperClasses = [X86Mem64AsmOperand];
  433. }
  434. } // RenderMethod = "addSrcIdxOperands"
  435. let RenderMethod = "addDstIdxOperands" in {
  436. def X86DstIdx8Operand : AsmOperandClass {
  437. let Name = "DstIdx8";
  438. let SuperClasses = [X86Mem8AsmOperand];
  439. }
  440. def X86DstIdx16Operand : AsmOperandClass {
  441. let Name = "DstIdx16";
  442. let SuperClasses = [X86Mem16AsmOperand];
  443. }
  444. def X86DstIdx32Operand : AsmOperandClass {
  445. let Name = "DstIdx32";
  446. let SuperClasses = [X86Mem32AsmOperand];
  447. }
  448. def X86DstIdx64Operand : AsmOperandClass {
  449. let Name = "DstIdx64";
  450. let SuperClasses = [X86Mem64AsmOperand];
  451. }
  452. } // RenderMethod = "addDstIdxOperands"
  453. let RenderMethod = "addMemOffsOperands" in {
  454. def X86MemOffs16_8AsmOperand : AsmOperandClass {
  455. let Name = "MemOffs16_8";
  456. let SuperClasses = [X86Mem8AsmOperand];
  457. }
  458. def X86MemOffs16_16AsmOperand : AsmOperandClass {
  459. let Name = "MemOffs16_16";
  460. let SuperClasses = [X86Mem16AsmOperand];
  461. }
  462. def X86MemOffs16_32AsmOperand : AsmOperandClass {
  463. let Name = "MemOffs16_32";
  464. let SuperClasses = [X86Mem32AsmOperand];
  465. }
  466. def X86MemOffs32_8AsmOperand : AsmOperandClass {
  467. let Name = "MemOffs32_8";
  468. let SuperClasses = [X86Mem8AsmOperand];
  469. }
  470. def X86MemOffs32_16AsmOperand : AsmOperandClass {
  471. let Name = "MemOffs32_16";
  472. let SuperClasses = [X86Mem16AsmOperand];
  473. }
  474. def X86MemOffs32_32AsmOperand : AsmOperandClass {
  475. let Name = "MemOffs32_32";
  476. let SuperClasses = [X86Mem32AsmOperand];
  477. }
  478. def X86MemOffs32_64AsmOperand : AsmOperandClass {
  479. let Name = "MemOffs32_64";
  480. let SuperClasses = [X86Mem64AsmOperand];
  481. }
  482. def X86MemOffs64_8AsmOperand : AsmOperandClass {
  483. let Name = "MemOffs64_8";
  484. let SuperClasses = [X86Mem8AsmOperand];
  485. }
  486. def X86MemOffs64_16AsmOperand : AsmOperandClass {
  487. let Name = "MemOffs64_16";
  488. let SuperClasses = [X86Mem16AsmOperand];
  489. }
  490. def X86MemOffs64_32AsmOperand : AsmOperandClass {
  491. let Name = "MemOffs64_32";
  492. let SuperClasses = [X86Mem32AsmOperand];
  493. }
  494. def X86MemOffs64_64AsmOperand : AsmOperandClass {
  495. let Name = "MemOffs64_64";
  496. let SuperClasses = [X86Mem64AsmOperand];
  497. }
  498. } // RenderMethod = "addMemOffsOperands"
  499. class X86SrcIdxOperand<string printMethod, AsmOperandClass parserMatchClass>
  500. : X86MemOperand<printMethod, parserMatchClass> {
  501. let MIOperandInfo = (ops ptr_rc, SEGMENT_REG);
  502. }
  503. class X86DstIdxOperand<string printMethod, AsmOperandClass parserMatchClass>
  504. : X86MemOperand<printMethod, parserMatchClass> {
  505. let MIOperandInfo = (ops ptr_rc);
  506. }
  507. def srcidx8 : X86SrcIdxOperand<"printSrcIdx8", X86SrcIdx8Operand>;
  508. def srcidx16 : X86SrcIdxOperand<"printSrcIdx16", X86SrcIdx16Operand>;
  509. def srcidx32 : X86SrcIdxOperand<"printSrcIdx32", X86SrcIdx32Operand>;
  510. def srcidx64 : X86SrcIdxOperand<"printSrcIdx64", X86SrcIdx64Operand>;
  511. def dstidx8 : X86DstIdxOperand<"printDstIdx8", X86DstIdx8Operand>;
  512. def dstidx16 : X86DstIdxOperand<"printDstIdx16", X86DstIdx16Operand>;
  513. def dstidx32 : X86DstIdxOperand<"printDstIdx32", X86DstIdx32Operand>;
  514. def dstidx64 : X86DstIdxOperand<"printDstIdx64", X86DstIdx64Operand>;
  515. class X86MemOffsOperand<Operand immOperand, string printMethod,
  516. AsmOperandClass parserMatchClass>
  517. : X86MemOperand<printMethod, parserMatchClass> {
  518. let MIOperandInfo = (ops immOperand, SEGMENT_REG);
  519. }
  520. def offset16_8 : X86MemOffsOperand<i16imm, "printMemOffs8",
  521. X86MemOffs16_8AsmOperand>;
  522. def offset16_16 : X86MemOffsOperand<i16imm, "printMemOffs16",
  523. X86MemOffs16_16AsmOperand>;
  524. def offset16_32 : X86MemOffsOperand<i16imm, "printMemOffs32",
  525. X86MemOffs16_32AsmOperand>;
  526. def offset32_8 : X86MemOffsOperand<i32imm, "printMemOffs8",
  527. X86MemOffs32_8AsmOperand>;
  528. def offset32_16 : X86MemOffsOperand<i32imm, "printMemOffs16",
  529. X86MemOffs32_16AsmOperand>;
  530. def offset32_32 : X86MemOffsOperand<i32imm, "printMemOffs32",
  531. X86MemOffs32_32AsmOperand>;
  532. def offset32_64 : X86MemOffsOperand<i32imm, "printMemOffs64",
  533. X86MemOffs32_64AsmOperand>;
  534. def offset64_8 : X86MemOffsOperand<i64imm, "printMemOffs8",
  535. X86MemOffs64_8AsmOperand>;
  536. def offset64_16 : X86MemOffsOperand<i64imm, "printMemOffs16",
  537. X86MemOffs64_16AsmOperand>;
  538. def offset64_32 : X86MemOffsOperand<i64imm, "printMemOffs32",
  539. X86MemOffs64_32AsmOperand>;
  540. def offset64_64 : X86MemOffsOperand<i64imm, "printMemOffs64",
  541. X86MemOffs64_64AsmOperand>;
  542. def ccode : Operand<i8> {
  543. let PrintMethod = "printCondCode";
  544. let OperandNamespace = "X86";
  545. let OperandType = "OPERAND_COND_CODE";
  546. }
  547. class ImmSExtAsmOperandClass : AsmOperandClass {
  548. let SuperClasses = [ImmAsmOperand];
  549. let RenderMethod = "addImmOperands";
  550. }
  551. def X86GR32orGR64AsmOperand : AsmOperandClass {
  552. let Name = "GR32orGR64";
  553. }
  554. def GR32orGR64 : RegisterOperand<GR32> {
  555. let ParserMatchClass = X86GR32orGR64AsmOperand;
  556. }
  557. def X86GR16orGR32orGR64AsmOperand : AsmOperandClass {
  558. let Name = "GR16orGR32orGR64";
  559. }
  560. def GR16orGR32orGR64 : RegisterOperand<GR16> {
  561. let ParserMatchClass = X86GR16orGR32orGR64AsmOperand;
  562. }
  563. def AVX512RCOperand : AsmOperandClass {
  564. let Name = "AVX512RC";
  565. }
  566. def AVX512RC : Operand<i32> {
  567. let PrintMethod = "printRoundingControl";
  568. let OperandNamespace = "X86";
  569. let OperandType = "OPERAND_ROUNDING_CONTROL";
  570. let ParserMatchClass = AVX512RCOperand;
  571. }
  572. // Sign-extended immediate classes. We don't need to define the full lattice
  573. // here because there is no instruction with an ambiguity between ImmSExti64i32
  574. // and ImmSExti32i8.
  575. //
  576. // The strange ranges come from the fact that the assembler always works with
  577. // 64-bit immediates, but for a 16-bit target value we want to accept both "-1"
  578. // (which will be a -1ULL), and "0xFF" (-1 in 16-bits).
  579. // [0, 0x7FFFFFFF] |
  580. // [0xFFFFFFFF80000000, 0xFFFFFFFFFFFFFFFF]
  581. def ImmSExti64i32AsmOperand : ImmSExtAsmOperandClass {
  582. let Name = "ImmSExti64i32";
  583. }
  584. // [0, 0x0000007F] | [0x000000000000FF80, 0x000000000000FFFF] |
  585. // [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF]
  586. def ImmSExti16i8AsmOperand : ImmSExtAsmOperandClass {
  587. let Name = "ImmSExti16i8";
  588. let SuperClasses = [ImmSExti64i32AsmOperand];
  589. }
  590. // [0, 0x0000007F] | [0x00000000FFFFFF80, 0x00000000FFFFFFFF] |
  591. // [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF]
  592. def ImmSExti32i8AsmOperand : ImmSExtAsmOperandClass {
  593. let Name = "ImmSExti32i8";
  594. }
  595. // [0, 0x0000007F] |
  596. // [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF]
  597. def ImmSExti64i8AsmOperand : ImmSExtAsmOperandClass {
  598. let Name = "ImmSExti64i8";
  599. let SuperClasses = [ImmSExti16i8AsmOperand, ImmSExti32i8AsmOperand,
  600. ImmSExti64i32AsmOperand];
  601. }
  602. // 4-bit immediate used by some XOP instructions
  603. // [0, 0xF]
  604. def ImmUnsignedi4AsmOperand : AsmOperandClass {
  605. let Name = "ImmUnsignedi4";
  606. let RenderMethod = "addImmOperands";
  607. let DiagnosticType = "InvalidImmUnsignedi4";
  608. }
  609. // Unsigned immediate used by SSE/AVX instructions
  610. // [0, 0xFF]
  611. // [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF]
  612. def ImmUnsignedi8AsmOperand : AsmOperandClass {
  613. let Name = "ImmUnsignedi8";
  614. let RenderMethod = "addImmOperands";
  615. }
  616. // A couple of more descriptive operand definitions.
  617. // 16-bits but only 8 bits are significant.
  618. def i16i8imm : Operand<i16> {
  619. let ParserMatchClass = ImmSExti16i8AsmOperand;
  620. let OperandType = "OPERAND_IMMEDIATE";
  621. }
  622. // 32-bits but only 8 bits are significant.
  623. def i32i8imm : Operand<i32> {
  624. let ParserMatchClass = ImmSExti32i8AsmOperand;
  625. let OperandType = "OPERAND_IMMEDIATE";
  626. }
  627. // 64-bits but only 32 bits are significant.
  628. def i64i32imm : Operand<i64> {
  629. let ParserMatchClass = ImmSExti64i32AsmOperand;
  630. let OperandType = "OPERAND_IMMEDIATE";
  631. }
  632. // 64-bits but only 8 bits are significant.
  633. def i64i8imm : Operand<i64> {
  634. let ParserMatchClass = ImmSExti64i8AsmOperand;
  635. let OperandType = "OPERAND_IMMEDIATE";
  636. }
  637. // Unsigned 4-bit immediate used by some XOP instructions.
  638. def u4imm : Operand<i8> {
  639. let PrintMethod = "printU8Imm";
  640. let ParserMatchClass = ImmUnsignedi4AsmOperand;
  641. let OperandType = "OPERAND_IMMEDIATE";
  642. }
  643. // Unsigned 8-bit immediate used by SSE/AVX instructions.
  644. def u8imm : Operand<i8> {
  645. let PrintMethod = "printU8Imm";
  646. let ParserMatchClass = ImmUnsignedi8AsmOperand;
  647. let OperandType = "OPERAND_IMMEDIATE";
  648. }
  649. // 16-bit immediate but only 8-bits are significant and they are unsigned.
  650. // Used by BT instructions.
  651. def i16u8imm : Operand<i16> {
  652. let PrintMethod = "printU8Imm";
  653. let ParserMatchClass = ImmUnsignedi8AsmOperand;
  654. let OperandType = "OPERAND_IMMEDIATE";
  655. }
  656. // 32-bit immediate but only 8-bits are significant and they are unsigned.
  657. // Used by some SSE/AVX instructions that use intrinsics.
  658. def i32u8imm : Operand<i32> {
  659. let PrintMethod = "printU8Imm";
  660. let ParserMatchClass = ImmUnsignedi8AsmOperand;
  661. let OperandType = "OPERAND_IMMEDIATE";
  662. }
  663. // 64-bit immediate but only 8-bits are significant and they are unsigned.
  664. // Used by BT instructions.
  665. def i64u8imm : Operand<i64> {
  666. let PrintMethod = "printU8Imm";
  667. let ParserMatchClass = ImmUnsignedi8AsmOperand;
  668. let OperandType = "OPERAND_IMMEDIATE";
  669. }
  670. def lea64_32mem : Operand<i32> {
  671. let PrintMethod = "printMemReference";
  672. let MIOperandInfo = (ops GR64, i8imm, GR64_NOSP, i32imm, SEGMENT_REG);
  673. let ParserMatchClass = X86MemAsmOperand;
  674. }
  675. // Memory operands that use 64-bit pointers in both ILP32 and LP64.
  676. def lea64mem : Operand<i64> {
  677. let PrintMethod = "printMemReference";
  678. let MIOperandInfo = (ops GR64, i8imm, GR64_NOSP, i32imm, SEGMENT_REG);
  679. let ParserMatchClass = X86MemAsmOperand;
  680. }
  681. let RenderMethod = "addMaskPairOperands" in {
  682. def VK1PairAsmOperand : AsmOperandClass { let Name = "VK1Pair"; }
  683. def VK2PairAsmOperand : AsmOperandClass { let Name = "VK2Pair"; }
  684. def VK4PairAsmOperand : AsmOperandClass { let Name = "VK4Pair"; }
  685. def VK8PairAsmOperand : AsmOperandClass { let Name = "VK8Pair"; }
  686. def VK16PairAsmOperand : AsmOperandClass { let Name = "VK16Pair"; }
  687. }
  688. def VK1Pair : RegisterOperand<VK1PAIR, "printVKPair"> {
  689. let ParserMatchClass = VK1PairAsmOperand;
  690. }
  691. def VK2Pair : RegisterOperand<VK2PAIR, "printVKPair"> {
  692. let ParserMatchClass = VK2PairAsmOperand;
  693. }
  694. def VK4Pair : RegisterOperand<VK4PAIR, "printVKPair"> {
  695. let ParserMatchClass = VK4PairAsmOperand;
  696. }
  697. def VK8Pair : RegisterOperand<VK8PAIR, "printVKPair"> {
  698. let ParserMatchClass = VK8PairAsmOperand;
  699. }
  700. def VK16Pair : RegisterOperand<VK16PAIR, "printVKPair"> {
  701. let ParserMatchClass = VK16PairAsmOperand;
  702. }
  703. //===----------------------------------------------------------------------===//
  704. // X86 Complex Pattern Definitions.
  705. //
  706. // Define X86-specific addressing mode.
  707. def addr : ComplexPattern<iPTR, 5, "selectAddr", [], [SDNPWantParent]>;
  708. def lea32addr : ComplexPattern<i32, 5, "selectLEAAddr",
  709. [add, sub, mul, X86mul_imm, shl, or, frameindex],
  710. []>;
  711. // In 64-bit mode 32-bit LEAs can use RIP-relative addressing.
  712. def lea64_32addr : ComplexPattern<i32, 5, "selectLEA64_32Addr",
  713. [add, sub, mul, X86mul_imm, shl, or,
  714. frameindex, X86WrapperRIP],
  715. []>;
  716. def tls32addr : ComplexPattern<i32, 5, "selectTLSADDRAddr",
  717. [tglobaltlsaddr], []>;
  718. def tls32baseaddr : ComplexPattern<i32, 5, "selectTLSADDRAddr",
  719. [tglobaltlsaddr], []>;
  720. def lea64addr : ComplexPattern<i64, 5, "selectLEAAddr",
  721. [add, sub, mul, X86mul_imm, shl, or, frameindex,
  722. X86WrapperRIP], []>;
  723. def tls64addr : ComplexPattern<i64, 5, "selectTLSADDRAddr",
  724. [tglobaltlsaddr], []>;
  725. def tls64baseaddr : ComplexPattern<i64, 5, "selectTLSADDRAddr",
  726. [tglobaltlsaddr], []>;
  727. def vectoraddr : ComplexPattern<iPTR, 5, "selectVectorAddr", [],[SDNPWantParent]>;
  728. // A relocatable immediate is an operand that can be relocated by the linker to
  729. // an immediate, such as a regular symbol in non-PIC code.
  730. def relocImm : ComplexPattern<iAny, 1, "selectRelocImm",
  731. [X86Wrapper], [], 0>;
  732. //===----------------------------------------------------------------------===//
  733. // X86 Instruction Predicate Definitions.
  734. def TruePredicate : Predicate<"true">;
  735. def HasCMov : Predicate<"Subtarget->hasCMov()">;
  736. def NoCMov : Predicate<"!Subtarget->hasCMov()">;
  737. def HasMMX : Predicate<"Subtarget->hasMMX()">;
  738. def Has3DNow : Predicate<"Subtarget->has3DNow()">;
  739. def Has3DNowA : Predicate<"Subtarget->has3DNowA()">;
  740. def HasSSE1 : Predicate<"Subtarget->hasSSE1()">;
  741. def UseSSE1 : Predicate<"Subtarget->hasSSE1() && !Subtarget->hasAVX()">;
  742. def HasSSE2 : Predicate<"Subtarget->hasSSE2()">;
  743. def UseSSE2 : Predicate<"Subtarget->hasSSE2() && !Subtarget->hasAVX()">;
  744. def HasSSE3 : Predicate<"Subtarget->hasSSE3()">;
  745. def UseSSE3 : Predicate<"Subtarget->hasSSE3() && !Subtarget->hasAVX()">;
  746. def HasSSSE3 : Predicate<"Subtarget->hasSSSE3()">;
  747. def UseSSSE3 : Predicate<"Subtarget->hasSSSE3() && !Subtarget->hasAVX()">;
  748. def HasSSE41 : Predicate<"Subtarget->hasSSE41()">;
  749. def NoSSE41 : Predicate<"!Subtarget->hasSSE41()">;
  750. def UseSSE41 : Predicate<"Subtarget->hasSSE41() && !Subtarget->hasAVX()">;
  751. def HasSSE42 : Predicate<"Subtarget->hasSSE42()">;
  752. def UseSSE42 : Predicate<"Subtarget->hasSSE42() && !Subtarget->hasAVX()">;
  753. def HasSSE4A : Predicate<"Subtarget->hasSSE4A()">;
  754. def NoAVX : Predicate<"!Subtarget->hasAVX()">;
  755. def HasAVX : Predicate<"Subtarget->hasAVX()">;
  756. def HasAVX2 : Predicate<"Subtarget->hasAVX2()">;
  757. def HasAVX1Only : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX2()">;
  758. def HasAVX512 : Predicate<"Subtarget->hasAVX512()">;
  759. def UseAVX : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX512()">;
  760. def UseAVX2 : Predicate<"Subtarget->hasAVX2() && !Subtarget->hasAVX512()">;
  761. def NoAVX512 : Predicate<"!Subtarget->hasAVX512()">;
  762. def HasCDI : Predicate<"Subtarget->hasCDI()">;
  763. def HasVPOPCNTDQ : Predicate<"Subtarget->hasVPOPCNTDQ()">;
  764. def HasPFI : Predicate<"Subtarget->hasPFI()">;
  765. def HasERI : Predicate<"Subtarget->hasERI()">;
  766. def HasDQI : Predicate<"Subtarget->hasDQI()">;
  767. def NoDQI : Predicate<"!Subtarget->hasDQI()">;
  768. def HasBWI : Predicate<"Subtarget->hasBWI()">;
  769. def NoBWI : Predicate<"!Subtarget->hasBWI()">;
  770. def HasVLX : Predicate<"Subtarget->hasVLX()">;
  771. def NoVLX : Predicate<"!Subtarget->hasVLX()">;
  772. def NoVLX_Or_NoBWI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasBWI()">;
  773. def NoVLX_Or_NoDQI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasDQI()">;
  774. def PKU : Predicate<"Subtarget->hasPKU()">;
  775. def HasVNNI : Predicate<"Subtarget->hasVNNI()">;
  776. def HasVP2INTERSECT : Predicate<"Subtarget->hasVP2INTERSECT()">;
  777. def HasBF16 : Predicate<"Subtarget->hasBF16()">;
  778. def HasFP16 : Predicate<"Subtarget->hasFP16()">;
  779. def HasAVXVNNI : Predicate <"Subtarget->hasAVXVNNI()">;
  780. def NoVLX_Or_NoVNNI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVNNI()">;
  781. def HasBITALG : Predicate<"Subtarget->hasBITALG()">;
  782. def HasPOPCNT : Predicate<"Subtarget->hasPOPCNT()">;
  783. def HasAES : Predicate<"Subtarget->hasAES()">;
  784. def HasVAES : Predicate<"Subtarget->hasVAES()">;
  785. def NoVLX_Or_NoVAES : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVAES()">;
  786. def HasFXSR : Predicate<"Subtarget->hasFXSR()">;
  787. def HasXSAVE : Predicate<"Subtarget->hasXSAVE()">;
  788. def HasXSAVEOPT : Predicate<"Subtarget->hasXSAVEOPT()">;
  789. def HasXSAVEC : Predicate<"Subtarget->hasXSAVEC()">;
  790. def HasXSAVES : Predicate<"Subtarget->hasXSAVES()">;
  791. def HasPCLMUL : Predicate<"Subtarget->hasPCLMUL()">;
  792. def NoVLX_Or_NoVPCLMULQDQ :
  793. Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVPCLMULQDQ()">;
  794. def HasVPCLMULQDQ : Predicate<"Subtarget->hasVPCLMULQDQ()">;
  795. def HasGFNI : Predicate<"Subtarget->hasGFNI()">;
  796. def HasFMA : Predicate<"Subtarget->hasFMA()">;
  797. def HasFMA4 : Predicate<"Subtarget->hasFMA4()">;
  798. def NoFMA4 : Predicate<"!Subtarget->hasFMA4()">;
  799. def HasXOP : Predicate<"Subtarget->hasXOP()">;
  800. def HasTBM : Predicate<"Subtarget->hasTBM()">;
  801. def NoTBM : Predicate<"!Subtarget->hasTBM()">;
  802. def HasLWP : Predicate<"Subtarget->hasLWP()">;
  803. def HasMOVBE : Predicate<"Subtarget->hasMOVBE()">;
  804. def HasRDRAND : Predicate<"Subtarget->hasRDRAND()">;
  805. def HasF16C : Predicate<"Subtarget->hasF16C()">;
  806. def HasFSGSBase : Predicate<"Subtarget->hasFSGSBase()">;
  807. def HasLZCNT : Predicate<"Subtarget->hasLZCNT()">;
  808. def HasBMI : Predicate<"Subtarget->hasBMI()">;
  809. def HasBMI2 : Predicate<"Subtarget->hasBMI2()">;
  810. def NoBMI2 : Predicate<"!Subtarget->hasBMI2()">;
  811. def HasVBMI : Predicate<"Subtarget->hasVBMI()">;
  812. def HasVBMI2 : Predicate<"Subtarget->hasVBMI2()">;
  813. def HasIFMA : Predicate<"Subtarget->hasIFMA()">;
  814. def HasRTM : Predicate<"Subtarget->hasRTM()">;
  815. def HasADX : Predicate<"Subtarget->hasADX()">;
  816. def HasSHA : Predicate<"Subtarget->hasSHA()">;
  817. def HasSGX : Predicate<"Subtarget->hasSGX()">;
  818. def HasRDSEED : Predicate<"Subtarget->hasRDSEED()">;
  819. def HasSSEPrefetch : Predicate<"Subtarget->hasSSEPrefetch()">;
  820. def NoSSEPrefetch : Predicate<"!Subtarget->hasSSEPrefetch()">;
  821. def HasPrefetchW : Predicate<"Subtarget->hasPrefetchW()">;
  822. def HasPREFETCHWT1 : Predicate<"Subtarget->hasPREFETCHWT1()">;
  823. def HasLAHFSAHF : Predicate<"Subtarget->hasLAHFSAHF()">;
  824. def HasMWAITX : Predicate<"Subtarget->hasMWAITX()">;
  825. def HasCLZERO : Predicate<"Subtarget->hasCLZERO()">;
  826. def HasCLDEMOTE : Predicate<"Subtarget->hasCLDEMOTE()">;
  827. def HasMOVDIRI : Predicate<"Subtarget->hasMOVDIRI()">;
  828. def HasMOVDIR64B : Predicate<"Subtarget->hasMOVDIR64B()">;
  829. def HasPTWRITE : Predicate<"Subtarget->hasPTWRITE()">;
  830. def FPStackf32 : Predicate<"!Subtarget->hasSSE1()">;
  831. def FPStackf64 : Predicate<"!Subtarget->hasSSE2()">;
  832. def HasSHSTK : Predicate<"Subtarget->hasSHSTK()">;
  833. def HasCLFLUSHOPT : Predicate<"Subtarget->hasCLFLUSHOPT()">;
  834. def HasCLWB : Predicate<"Subtarget->hasCLWB()">;
  835. def HasWBNOINVD : Predicate<"Subtarget->hasWBNOINVD()">;
  836. def HasRDPID : Predicate<"Subtarget->hasRDPID()">;
  837. def HasWAITPKG : Predicate<"Subtarget->hasWAITPKG()">;
  838. def HasINVPCID : Predicate<"Subtarget->hasINVPCID()">;
  839. def HasCmpxchg8b : Predicate<"Subtarget->hasCmpxchg8b()">;
  840. def HasCmpxchg16b: Predicate<"Subtarget->hasCmpxchg16b()">;
  841. def HasPCONFIG : Predicate<"Subtarget->hasPCONFIG()">;
  842. def HasENQCMD : Predicate<"Subtarget->hasENQCMD()">;
  843. def HasKL : Predicate<"Subtarget->hasKL()">;
  844. def HasWIDEKL : Predicate<"Subtarget->hasWIDEKL()">;
  845. def HasHRESET : Predicate<"Subtarget->hasHRESET()">;
  846. def HasSERIALIZE : Predicate<"Subtarget->hasSERIALIZE()">;
  847. def HasTSXLDTRK : Predicate<"Subtarget->hasTSXLDTRK()">;
  848. def HasAMXTILE : Predicate<"Subtarget->hasAMXTILE()">;
  849. def HasAMXBF16 : Predicate<"Subtarget->hasAMXBF16()">;
  850. def HasAMXINT8 : Predicate<"Subtarget->hasAMXINT8()">;
  851. def HasUINTR : Predicate<"Subtarget->hasUINTR()">;
  852. def HasCRC32 : Predicate<"Subtarget->hasCRC32()">;
  853. def Not64BitMode : Predicate<"!Subtarget->is64Bit()">,
  854. AssemblerPredicate<(all_of (not Mode64Bit)), "Not 64-bit mode">;
  855. def In64BitMode : Predicate<"Subtarget->is64Bit()">,
  856. AssemblerPredicate<(all_of Mode64Bit), "64-bit mode">;
  857. def IsLP64 : Predicate<"Subtarget->isTarget64BitLP64()">;
  858. def NotLP64 : Predicate<"!Subtarget->isTarget64BitLP64()">;
  859. def In16BitMode : Predicate<"Subtarget->is16Bit()">,
  860. AssemblerPredicate<(all_of Mode16Bit), "16-bit mode">;
  861. def Not16BitMode : Predicate<"!Subtarget->is16Bit()">,
  862. AssemblerPredicate<(all_of (not Mode16Bit)), "Not 16-bit mode">;
  863. def In32BitMode : Predicate<"Subtarget->is32Bit()">,
  864. AssemblerPredicate<(all_of Mode32Bit), "32-bit mode">;
  865. def IsWin64 : Predicate<"Subtarget->isTargetWin64()">;
  866. def NotWin64 : Predicate<"!Subtarget->isTargetWin64()">;
  867. def NotWin64WithoutFP : Predicate<"!Subtarget->isTargetWin64() ||"
  868. "Subtarget->getFrameLowering()->hasFP(*MF)"> {
  869. let RecomputePerFunction = 1;
  870. }
  871. def IsPS4 : Predicate<"Subtarget->isTargetPS4()">;
  872. def NotPS4 : Predicate<"!Subtarget->isTargetPS4()">;
  873. def IsNaCl : Predicate<"Subtarget->isTargetNaCl()">;
  874. def NotNaCl : Predicate<"!Subtarget->isTargetNaCl()">;
  875. def SmallCode : Predicate<"TM.getCodeModel() == CodeModel::Small">;
  876. def KernelCode : Predicate<"TM.getCodeModel() == CodeModel::Kernel">;
  877. def NearData : Predicate<"TM.getCodeModel() == CodeModel::Small ||"
  878. "TM.getCodeModel() == CodeModel::Kernel">;
  879. def IsNotPIC : Predicate<"!TM.isPositionIndependent()">;
  880. // We could compute these on a per-module basis but doing so requires accessing
  881. // the Function object through the <Target>Subtarget and objections were raised
  882. // to that (see post-commit review comments for r301750).
  883. let RecomputePerFunction = 1 in {
  884. def OptForSize : Predicate<"shouldOptForSize(MF)">;
  885. def OptForMinSize : Predicate<"MF->getFunction().hasMinSize()">;
  886. def OptForSpeed : Predicate<"!shouldOptForSize(MF)">;
  887. def UseIncDec : Predicate<"!Subtarget->slowIncDec() || "
  888. "shouldOptForSize(MF)">;
  889. def NoSSE41_Or_OptForSize : Predicate<"shouldOptForSize(MF) || "
  890. "!Subtarget->hasSSE41()">;
  891. }
  892. def CallImmAddr : Predicate<"Subtarget->isLegalToCallImmediateAddr()">;
  893. def FavorMemIndirectCall : Predicate<"!Subtarget->slowTwoMemOps()">;
  894. def HasFastMem32 : Predicate<"!Subtarget->isUnalignedMem32Slow()">;
  895. def HasFastLZCNT : Predicate<"Subtarget->hasFastLZCNT()">;
  896. def HasFastSHLDRotate : Predicate<"Subtarget->hasFastSHLDRotate()">;
  897. def HasERMSB : Predicate<"Subtarget->hasERMSB()">;
  898. def HasFSRM : Predicate<"Subtarget->hasFSRM()">;
  899. def HasMFence : Predicate<"Subtarget->hasMFence()">;
  900. def UseIndirectThunkCalls : Predicate<"Subtarget->useIndirectThunkCalls()">;
  901. def NotUseIndirectThunkCalls : Predicate<"!Subtarget->useIndirectThunkCalls()">;
  902. //===----------------------------------------------------------------------===//
  903. // X86 Instruction Format Definitions.
  904. //
  905. include "X86InstrFormats.td"
  906. //===----------------------------------------------------------------------===//
  907. // Pattern fragments.
  908. //
  909. // X86 specific condition code. These correspond to CondCode in
  910. // X86InstrInfo.h. They must be kept in synch.
  911. def X86_COND_O : PatLeaf<(i8 0)>;
  912. def X86_COND_NO : PatLeaf<(i8 1)>;
  913. def X86_COND_B : PatLeaf<(i8 2)>; // alt. COND_C
  914. def X86_COND_AE : PatLeaf<(i8 3)>; // alt. COND_NC
  915. def X86_COND_E : PatLeaf<(i8 4)>; // alt. COND_Z
  916. def X86_COND_NE : PatLeaf<(i8 5)>; // alt. COND_NZ
  917. def X86_COND_BE : PatLeaf<(i8 6)>; // alt. COND_NA
  918. def X86_COND_A : PatLeaf<(i8 7)>; // alt. COND_NBE
  919. def X86_COND_S : PatLeaf<(i8 8)>;
  920. def X86_COND_NS : PatLeaf<(i8 9)>;
  921. def X86_COND_P : PatLeaf<(i8 10)>; // alt. COND_PE
  922. def X86_COND_NP : PatLeaf<(i8 11)>; // alt. COND_PO
  923. def X86_COND_L : PatLeaf<(i8 12)>; // alt. COND_NGE
  924. def X86_COND_GE : PatLeaf<(i8 13)>; // alt. COND_NL
  925. def X86_COND_LE : PatLeaf<(i8 14)>; // alt. COND_NG
  926. def X86_COND_G : PatLeaf<(i8 15)>; // alt. COND_NLE
  927. def i16immSExt8 : ImmLeaf<i16, [{ return isInt<8>(Imm); }]>;
  928. def i32immSExt8 : ImmLeaf<i32, [{ return isInt<8>(Imm); }]>;
  929. def i64immSExt8 : ImmLeaf<i64, [{ return isInt<8>(Imm); }]>;
  930. def i64immSExt32 : ImmLeaf<i64, [{ return isInt<32>(Imm); }]>;
  931. def i64timmSExt32 : TImmLeaf<i64, [{ return isInt<32>(Imm); }]>;
  932. def i16relocImmSExt8 : PatLeaf<(i16 relocImm), [{
  933. return isSExtAbsoluteSymbolRef(8, N);
  934. }]>;
  935. def i32relocImmSExt8 : PatLeaf<(i32 relocImm), [{
  936. return isSExtAbsoluteSymbolRef(8, N);
  937. }]>;
  938. def i64relocImmSExt8 : PatLeaf<(i64 relocImm), [{
  939. return isSExtAbsoluteSymbolRef(8, N);
  940. }]>;
  941. def i64relocImmSExt32 : PatLeaf<(i64 relocImm), [{
  942. return isSExtAbsoluteSymbolRef(32, N);
  943. }]>;
  944. // If we have multiple users of an immediate, it's much smaller to reuse
  945. // the register, rather than encode the immediate in every instruction.
  946. // This has the risk of increasing register pressure from stretched live
  947. // ranges, however, the immediates should be trivial to rematerialize by
  948. // the RA in the event of high register pressure.
  949. // TODO : This is currently enabled for stores and binary ops. There are more
  950. // cases for which this can be enabled, though this catches the bulk of the
  951. // issues.
  952. // TODO2 : This should really also be enabled under O2, but there's currently
  953. // an issue with RA where we don't pull the constants into their users
  954. // when we rematerialize them. I'll follow-up on enabling O2 after we fix that
  955. // issue.
  956. // TODO3 : This is currently limited to single basic blocks (DAG creation
  957. // pulls block immediates to the top and merges them if necessary).
  958. // Eventually, it would be nice to allow ConstantHoisting to merge constants
  959. // globally for potentially added savings.
  960. //
  961. def imm_su : PatLeaf<(imm), [{
  962. return !shouldAvoidImmediateInstFormsForSize(N);
  963. }]>;
  964. def i64immSExt32_su : PatLeaf<(i64immSExt32), [{
  965. return !shouldAvoidImmediateInstFormsForSize(N);
  966. }]>;
  967. def relocImm8_su : PatLeaf<(i8 relocImm), [{
  968. return !shouldAvoidImmediateInstFormsForSize(N);
  969. }]>;
  970. def relocImm16_su : PatLeaf<(i16 relocImm), [{
  971. return !shouldAvoidImmediateInstFormsForSize(N);
  972. }]>;
  973. def relocImm32_su : PatLeaf<(i32 relocImm), [{
  974. return !shouldAvoidImmediateInstFormsForSize(N);
  975. }]>;
  976. def i16relocImmSExt8_su : PatLeaf<(i16relocImmSExt8), [{
  977. return !shouldAvoidImmediateInstFormsForSize(N);
  978. }]>;
  979. def i32relocImmSExt8_su : PatLeaf<(i32relocImmSExt8), [{
  980. return !shouldAvoidImmediateInstFormsForSize(N);
  981. }]>;
  982. def i64relocImmSExt8_su : PatLeaf<(i64relocImmSExt8), [{
  983. return !shouldAvoidImmediateInstFormsForSize(N);
  984. }]>;
  985. def i64relocImmSExt32_su : PatLeaf<(i64relocImmSExt32), [{
  986. return !shouldAvoidImmediateInstFormsForSize(N);
  987. }]>;
  988. def i16immSExt8_su : PatLeaf<(i16immSExt8), [{
  989. return !shouldAvoidImmediateInstFormsForSize(N);
  990. }]>;
  991. def i32immSExt8_su : PatLeaf<(i32immSExt8), [{
  992. return !shouldAvoidImmediateInstFormsForSize(N);
  993. }]>;
  994. def i64immSExt8_su : PatLeaf<(i64immSExt8), [{
  995. return !shouldAvoidImmediateInstFormsForSize(N);
  996. }]>;
  997. // i64immZExt32 predicate - True if the 64-bit immediate fits in a 32-bit
  998. // unsigned field.
  999. def i64immZExt32 : ImmLeaf<i64, [{ return isUInt<32>(Imm); }]>;
  1000. def i64immZExt32SExt8 : ImmLeaf<i64, [{
  1001. return isUInt<32>(Imm) && isInt<8>(static_cast<int32_t>(Imm));
  1002. }]>;
  1003. // Helper fragments for loads.
  1004. // It's safe to fold a zextload/extload from i1 as a regular i8 load. The
  1005. // upper bits are guaranteed to be zero and we were going to emit a MOV8rm
  1006. // which might get folded during peephole anyway.
  1007. def loadi8 : PatFrag<(ops node:$ptr), (i8 (unindexedload node:$ptr)), [{
  1008. LoadSDNode *LD = cast<LoadSDNode>(N);
  1009. ISD::LoadExtType ExtType = LD->getExtensionType();
  1010. return ExtType == ISD::NON_EXTLOAD || ExtType == ISD::EXTLOAD ||
  1011. ExtType == ISD::ZEXTLOAD;
  1012. }]>;
  1013. // It's always safe to treat a anyext i16 load as a i32 load if the i16 is
  1014. // known to be 32-bit aligned or better. Ditto for i8 to i16.
  1015. def loadi16 : PatFrag<(ops node:$ptr), (i16 (unindexedload node:$ptr)), [{
  1016. LoadSDNode *LD = cast<LoadSDNode>(N);
  1017. ISD::LoadExtType ExtType = LD->getExtensionType();
  1018. if (ExtType == ISD::NON_EXTLOAD)
  1019. return true;
  1020. if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad)
  1021. return LD->getAlignment() >= 2 && LD->isSimple();
  1022. return false;
  1023. }]>;
  1024. def loadi32 : PatFrag<(ops node:$ptr), (i32 (unindexedload node:$ptr)), [{
  1025. LoadSDNode *LD = cast<LoadSDNode>(N);
  1026. ISD::LoadExtType ExtType = LD->getExtensionType();
  1027. if (ExtType == ISD::NON_EXTLOAD)
  1028. return true;
  1029. if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad)
  1030. return LD->getAlignment() >= 4 && LD->isSimple();
  1031. return false;
  1032. }]>;
  1033. def loadi64 : PatFrag<(ops node:$ptr), (i64 (load node:$ptr))>;
  1034. def loadf16 : PatFrag<(ops node:$ptr), (f16 (load node:$ptr))>;
  1035. def loadf32 : PatFrag<(ops node:$ptr), (f32 (load node:$ptr))>;
  1036. def loadf64 : PatFrag<(ops node:$ptr), (f64 (load node:$ptr))>;
  1037. def loadf80 : PatFrag<(ops node:$ptr), (f80 (load node:$ptr))>;
  1038. def loadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr))>;
  1039. def alignedloadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{
  1040. LoadSDNode *Ld = cast<LoadSDNode>(N);
  1041. return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize();
  1042. }]>;
  1043. def memopf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{
  1044. LoadSDNode *Ld = cast<LoadSDNode>(N);
  1045. return Subtarget->hasSSEUnalignedMem() ||
  1046. Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize();
  1047. }]>;
  1048. def sextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (sextloadi8 node:$ptr))>;
  1049. def sextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (sextloadi8 node:$ptr))>;
  1050. def sextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (sextloadi16 node:$ptr))>;
  1051. def sextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (sextloadi8 node:$ptr))>;
  1052. def sextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (sextloadi16 node:$ptr))>;
  1053. def sextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (sextloadi32 node:$ptr))>;
  1054. def zextloadi8i1 : PatFrag<(ops node:$ptr), (i8 (zextloadi1 node:$ptr))>;
  1055. def zextloadi16i1 : PatFrag<(ops node:$ptr), (i16 (zextloadi1 node:$ptr))>;
  1056. def zextloadi32i1 : PatFrag<(ops node:$ptr), (i32 (zextloadi1 node:$ptr))>;
  1057. def zextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (zextloadi8 node:$ptr))>;
  1058. def zextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (zextloadi8 node:$ptr))>;
  1059. def zextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (zextloadi16 node:$ptr))>;
  1060. def zextloadi64i1 : PatFrag<(ops node:$ptr), (i64 (zextloadi1 node:$ptr))>;
  1061. def zextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (zextloadi8 node:$ptr))>;
  1062. def zextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (zextloadi16 node:$ptr))>;
  1063. def zextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (zextloadi32 node:$ptr))>;
  1064. def extloadi8i1 : PatFrag<(ops node:$ptr), (i8 (extloadi1 node:$ptr))>;
  1065. def extloadi16i1 : PatFrag<(ops node:$ptr), (i16 (extloadi1 node:$ptr))>;
  1066. def extloadi32i1 : PatFrag<(ops node:$ptr), (i32 (extloadi1 node:$ptr))>;
  1067. def extloadi16i8 : PatFrag<(ops node:$ptr), (i16 (extloadi8 node:$ptr))>;
  1068. def extloadi32i8 : PatFrag<(ops node:$ptr), (i32 (extloadi8 node:$ptr))>;
  1069. def extloadi32i16 : PatFrag<(ops node:$ptr), (i32 (extloadi16 node:$ptr))>;
  1070. def extloadi64i1 : PatFrag<(ops node:$ptr), (i64 (extloadi1 node:$ptr))>;
  1071. def extloadi64i8 : PatFrag<(ops node:$ptr), (i64 (extloadi8 node:$ptr))>;
  1072. def extloadi64i16 : PatFrag<(ops node:$ptr), (i64 (extloadi16 node:$ptr))>;
  1073. // We can treat an i8/i16 extending load to i64 as a 32 bit load if its known
  1074. // to be 4 byte aligned or better.
  1075. def extloadi64i32 : PatFrag<(ops node:$ptr), (i64 (unindexedload node:$ptr)), [{
  1076. LoadSDNode *LD = cast<LoadSDNode>(N);
  1077. ISD::LoadExtType ExtType = LD->getExtensionType();
  1078. if (ExtType != ISD::EXTLOAD)
  1079. return false;
  1080. if (LD->getMemoryVT() == MVT::i32)
  1081. return true;
  1082. return LD->getAlignment() >= 4 && LD->isSimple();
  1083. }]>;
  1084. // An 'and' node with a single use.
  1085. def and_su : PatFrag<(ops node:$lhs, node:$rhs), (and node:$lhs, node:$rhs), [{
  1086. return N->hasOneUse();
  1087. }]>;
  1088. // An 'srl' node with a single use.
  1089. def srl_su : PatFrag<(ops node:$lhs, node:$rhs), (srl node:$lhs, node:$rhs), [{
  1090. return N->hasOneUse();
  1091. }]>;
  1092. // An 'trunc' node with a single use.
  1093. def trunc_su : PatFrag<(ops node:$src), (trunc node:$src), [{
  1094. return N->hasOneUse();
  1095. }]>;
  1096. //===----------------------------------------------------------------------===//
  1097. // Instruction list.
  1098. //
  1099. // Nop
  1100. let hasSideEffects = 0, SchedRW = [WriteNop] in {
  1101. def NOOP : I<0x90, RawFrm, (outs), (ins), "nop", []>;
  1102. def NOOPW : I<0x1f, MRMXm, (outs), (ins i16mem:$zero),
  1103. "nop{w}\t$zero", []>, TB, OpSize16, NotMemoryFoldable;
  1104. def NOOPL : I<0x1f, MRMXm, (outs), (ins i32mem:$zero),
  1105. "nop{l}\t$zero", []>, TB, OpSize32, NotMemoryFoldable;
  1106. def NOOPQ : RI<0x1f, MRMXm, (outs), (ins i64mem:$zero),
  1107. "nop{q}\t$zero", []>, TB, NotMemoryFoldable,
  1108. Requires<[In64BitMode]>;
  1109. // Also allow register so we can assemble/disassemble
  1110. def NOOPWr : I<0x1f, MRMXr, (outs), (ins GR16:$zero),
  1111. "nop{w}\t$zero", []>, TB, OpSize16, NotMemoryFoldable;
  1112. def NOOPLr : I<0x1f, MRMXr, (outs), (ins GR32:$zero),
  1113. "nop{l}\t$zero", []>, TB, OpSize32, NotMemoryFoldable;
  1114. def NOOPQr : RI<0x1f, MRMXr, (outs), (ins GR64:$zero),
  1115. "nop{q}\t$zero", []>, TB, NotMemoryFoldable,
  1116. Requires<[In64BitMode]>;
  1117. }
  1118. // Constructing a stack frame.
  1119. def ENTER : Ii16<0xC8, RawFrmImm8, (outs), (ins i16imm:$len, i8imm:$lvl),
  1120. "enter\t$len, $lvl", []>, Sched<[WriteMicrocoded]>;
  1121. let SchedRW = [WriteALU] in {
  1122. let Defs = [EBP, ESP], Uses = [EBP, ESP], mayLoad = 1, hasSideEffects=0 in
  1123. def LEAVE : I<0xC9, RawFrm, (outs), (ins), "leave", []>,
  1124. Requires<[Not64BitMode]>;
  1125. let Defs = [RBP,RSP], Uses = [RBP,RSP], mayLoad = 1, hasSideEffects = 0 in
  1126. def LEAVE64 : I<0xC9, RawFrm, (outs), (ins), "leave", []>,
  1127. Requires<[In64BitMode]>;
  1128. } // SchedRW
  1129. //===----------------------------------------------------------------------===//
  1130. // Miscellaneous Instructions.
  1131. //
  1132. let isBarrier = 1, hasSideEffects = 1, usesCustomInserter = 1,
  1133. SchedRW = [WriteSystem] in
  1134. def Int_eh_sjlj_setup_dispatch
  1135. : PseudoI<(outs), (ins), [(X86eh_sjlj_setup_dispatch)]>;
  1136. let Defs = [ESP], Uses = [ESP], hasSideEffects=0 in {
  1137. let mayLoad = 1, SchedRW = [WriteLoad] in {
  1138. def POP16r : I<0x58, AddRegFrm, (outs GR16:$reg), (ins), "pop{w}\t$reg", []>,
  1139. OpSize16;
  1140. def POP32r : I<0x58, AddRegFrm, (outs GR32:$reg), (ins), "pop{l}\t$reg", []>,
  1141. OpSize32, Requires<[Not64BitMode]>;
  1142. // Long form for the disassembler.
  1143. let isCodeGenOnly = 1, ForceDisassemble = 1 in {
  1144. def POP16rmr: I<0x8F, MRM0r, (outs GR16:$reg), (ins), "pop{w}\t$reg", []>,
  1145. OpSize16, NotMemoryFoldable;
  1146. def POP32rmr: I<0x8F, MRM0r, (outs GR32:$reg), (ins), "pop{l}\t$reg", []>,
  1147. OpSize32, Requires<[Not64BitMode]>, NotMemoryFoldable;
  1148. } // isCodeGenOnly = 1, ForceDisassemble = 1
  1149. } // mayLoad, SchedRW
  1150. let mayStore = 1, mayLoad = 1, SchedRW = [WriteCopy] in {
  1151. def POP16rmm: I<0x8F, MRM0m, (outs), (ins i16mem:$dst), "pop{w}\t$dst", []>,
  1152. OpSize16;
  1153. def POP32rmm: I<0x8F, MRM0m, (outs), (ins i32mem:$dst), "pop{l}\t$dst", []>,
  1154. OpSize32, Requires<[Not64BitMode]>;
  1155. } // mayStore, mayLoad, SchedRW
  1156. let mayStore = 1, SchedRW = [WriteStore] in {
  1157. def PUSH16r : I<0x50, AddRegFrm, (outs), (ins GR16:$reg), "push{w}\t$reg",[]>,
  1158. OpSize16;
  1159. def PUSH32r : I<0x50, AddRegFrm, (outs), (ins GR32:$reg), "push{l}\t$reg",[]>,
  1160. OpSize32, Requires<[Not64BitMode]>;
  1161. // Long form for the disassembler.
  1162. let isCodeGenOnly = 1, ForceDisassemble = 1 in {
  1163. def PUSH16rmr: I<0xFF, MRM6r, (outs), (ins GR16:$reg), "push{w}\t$reg",[]>,
  1164. OpSize16, NotMemoryFoldable;
  1165. def PUSH32rmr: I<0xFF, MRM6r, (outs), (ins GR32:$reg), "push{l}\t$reg",[]>,
  1166. OpSize32, Requires<[Not64BitMode]>, NotMemoryFoldable;
  1167. } // isCodeGenOnly = 1, ForceDisassemble = 1
  1168. def PUSH16i8 : Ii8<0x6a, RawFrm, (outs), (ins i16i8imm:$imm),
  1169. "push{w}\t$imm", []>, OpSize16;
  1170. def PUSHi16 : Ii16<0x68, RawFrm, (outs), (ins i16imm:$imm),
  1171. "push{w}\t$imm", []>, OpSize16;
  1172. def PUSH32i8 : Ii8<0x6a, RawFrm, (outs), (ins i32i8imm:$imm),
  1173. "push{l}\t$imm", []>, OpSize32,
  1174. Requires<[Not64BitMode]>;
  1175. def PUSHi32 : Ii32<0x68, RawFrm, (outs), (ins i32imm:$imm),
  1176. "push{l}\t$imm", []>, OpSize32,
  1177. Requires<[Not64BitMode]>;
  1178. } // mayStore, SchedRW
  1179. let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in {
  1180. def PUSH16rmm: I<0xFF, MRM6m, (outs), (ins i16mem:$src), "push{w}\t$src", []>,
  1181. OpSize16;
  1182. def PUSH32rmm: I<0xFF, MRM6m, (outs), (ins i32mem:$src), "push{l}\t$src", []>,
  1183. OpSize32, Requires<[Not64BitMode]>;
  1184. } // mayLoad, mayStore, SchedRW
  1185. }
  1186. let mayLoad = 1, mayStore = 1, usesCustomInserter = 1,
  1187. SchedRW = [WriteRMW], Defs = [ESP] in {
  1188. let Uses = [ESP] in
  1189. def RDFLAGS32 : PseudoI<(outs GR32:$dst), (ins),
  1190. [(set GR32:$dst, (int_x86_flags_read_u32))]>,
  1191. Requires<[Not64BitMode]>;
  1192. let Uses = [RSP] in
  1193. def RDFLAGS64 : PseudoI<(outs GR64:$dst), (ins),
  1194. [(set GR64:$dst, (int_x86_flags_read_u64))]>,
  1195. Requires<[In64BitMode]>;
  1196. }
  1197. let mayLoad = 1, mayStore = 1, usesCustomInserter = 1,
  1198. SchedRW = [WriteRMW] in {
  1199. let Defs = [ESP, EFLAGS, DF], Uses = [ESP] in
  1200. def WRFLAGS32 : PseudoI<(outs), (ins GR32:$src),
  1201. [(int_x86_flags_write_u32 GR32:$src)]>,
  1202. Requires<[Not64BitMode]>;
  1203. let Defs = [RSP, EFLAGS, DF], Uses = [RSP] in
  1204. def WRFLAGS64 : PseudoI<(outs), (ins GR64:$src),
  1205. [(int_x86_flags_write_u64 GR64:$src)]>,
  1206. Requires<[In64BitMode]>;
  1207. }
  1208. let Defs = [ESP, EFLAGS, DF], Uses = [ESP], mayLoad = 1, hasSideEffects=0,
  1209. SchedRW = [WriteLoad] in {
  1210. def POPF16 : I<0x9D, RawFrm, (outs), (ins), "popf{w}", []>, OpSize16;
  1211. def POPF32 : I<0x9D, RawFrm, (outs), (ins), "popf{l|d}", []>, OpSize32,
  1212. Requires<[Not64BitMode]>;
  1213. }
  1214. let Defs = [ESP], Uses = [ESP, EFLAGS, DF], mayStore = 1, hasSideEffects=0,
  1215. SchedRW = [WriteStore] in {
  1216. def PUSHF16 : I<0x9C, RawFrm, (outs), (ins), "pushf{w}", []>, OpSize16;
  1217. def PUSHF32 : I<0x9C, RawFrm, (outs), (ins), "pushf{l|d}", []>, OpSize32,
  1218. Requires<[Not64BitMode]>;
  1219. }
  1220. let Defs = [RSP], Uses = [RSP], hasSideEffects=0 in {
  1221. let mayLoad = 1, SchedRW = [WriteLoad] in {
  1222. def POP64r : I<0x58, AddRegFrm, (outs GR64:$reg), (ins), "pop{q}\t$reg", []>,
  1223. OpSize32, Requires<[In64BitMode]>;
  1224. // Long form for the disassembler.
  1225. let isCodeGenOnly = 1, ForceDisassemble = 1 in {
  1226. def POP64rmr: I<0x8F, MRM0r, (outs GR64:$reg), (ins), "pop{q}\t$reg", []>,
  1227. OpSize32, Requires<[In64BitMode]>, NotMemoryFoldable;
  1228. } // isCodeGenOnly = 1, ForceDisassemble = 1
  1229. } // mayLoad, SchedRW
  1230. let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in
  1231. def POP64rmm: I<0x8F, MRM0m, (outs), (ins i64mem:$dst), "pop{q}\t$dst", []>,
  1232. OpSize32, Requires<[In64BitMode]>;
  1233. let mayStore = 1, SchedRW = [WriteStore] in {
  1234. def PUSH64r : I<0x50, AddRegFrm, (outs), (ins GR64:$reg), "push{q}\t$reg", []>,
  1235. OpSize32, Requires<[In64BitMode]>;
  1236. // Long form for the disassembler.
  1237. let isCodeGenOnly = 1, ForceDisassemble = 1 in {
  1238. def PUSH64rmr: I<0xFF, MRM6r, (outs), (ins GR64:$reg), "push{q}\t$reg", []>,
  1239. OpSize32, Requires<[In64BitMode]>, NotMemoryFoldable;
  1240. } // isCodeGenOnly = 1, ForceDisassemble = 1
  1241. } // mayStore, SchedRW
  1242. let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in {
  1243. def PUSH64rmm: I<0xFF, MRM6m, (outs), (ins i64mem:$src), "push{q}\t$src", []>,
  1244. OpSize32, Requires<[In64BitMode]>;
  1245. } // mayLoad, mayStore, SchedRW
  1246. }
  1247. let Defs = [RSP], Uses = [RSP], hasSideEffects = 0, mayStore = 1,
  1248. SchedRW = [WriteStore] in {
  1249. def PUSH64i8 : Ii8<0x6a, RawFrm, (outs), (ins i64i8imm:$imm),
  1250. "push{q}\t$imm", []>, OpSize32,
  1251. Requires<[In64BitMode]>;
  1252. def PUSH64i32 : Ii32S<0x68, RawFrm, (outs), (ins i64i32imm:$imm),
  1253. "push{q}\t$imm", []>, OpSize32,
  1254. Requires<[In64BitMode]>;
  1255. }
  1256. let Defs = [RSP, EFLAGS, DF], Uses = [RSP], mayLoad = 1, hasSideEffects=0 in
  1257. def POPF64 : I<0x9D, RawFrm, (outs), (ins), "popfq", []>,
  1258. OpSize32, Requires<[In64BitMode]>, Sched<[WriteLoad]>;
  1259. let Defs = [RSP], Uses = [RSP, EFLAGS, DF], mayStore = 1, hasSideEffects=0 in
  1260. def PUSHF64 : I<0x9C, RawFrm, (outs), (ins), "pushfq", []>,
  1261. OpSize32, Requires<[In64BitMode]>, Sched<[WriteStore]>;
  1262. let Defs = [EDI, ESI, EBP, EBX, EDX, ECX, EAX, ESP], Uses = [ESP],
  1263. mayLoad = 1, hasSideEffects = 0, SchedRW = [WriteLoad] in {
  1264. def POPA32 : I<0x61, RawFrm, (outs), (ins), "popal", []>,
  1265. OpSize32, Requires<[Not64BitMode]>;
  1266. def POPA16 : I<0x61, RawFrm, (outs), (ins), "popaw", []>,
  1267. OpSize16, Requires<[Not64BitMode]>;
  1268. }
  1269. let Defs = [ESP], Uses = [EDI, ESI, EBP, EBX, EDX, ECX, EAX, ESP],
  1270. mayStore = 1, hasSideEffects = 0, SchedRW = [WriteStore] in {
  1271. def PUSHA32 : I<0x60, RawFrm, (outs), (ins), "pushal", []>,
  1272. OpSize32, Requires<[Not64BitMode]>;
  1273. def PUSHA16 : I<0x60, RawFrm, (outs), (ins), "pushaw", []>,
  1274. OpSize16, Requires<[Not64BitMode]>;
  1275. }
  1276. let Constraints = "$src = $dst", SchedRW = [WriteBSWAP32] in {
  1277. // This instruction is a consequence of BSWAP32r observing operand size. The
  1278. // encoding is valid, but the behavior is undefined.
  1279. let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0 in
  1280. def BSWAP16r_BAD : I<0xC8, AddRegFrm, (outs GR16:$dst), (ins GR16:$src),
  1281. "bswap{w}\t$dst", []>, OpSize16, TB;
  1282. // GR32 = bswap GR32
  1283. def BSWAP32r : I<0xC8, AddRegFrm, (outs GR32:$dst), (ins GR32:$src),
  1284. "bswap{l}\t$dst",
  1285. [(set GR32:$dst, (bswap GR32:$src))]>, OpSize32, TB;
  1286. let SchedRW = [WriteBSWAP64] in
  1287. def BSWAP64r : RI<0xC8, AddRegFrm, (outs GR64:$dst), (ins GR64:$src),
  1288. "bswap{q}\t$dst",
  1289. [(set GR64:$dst, (bswap GR64:$src))]>, TB;
  1290. } // Constraints = "$src = $dst", SchedRW
  1291. // Bit scan instructions.
  1292. let Defs = [EFLAGS] in {
  1293. def BSF16rr : I<0xBC, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src),
  1294. "bsf{w}\t{$src, $dst|$dst, $src}",
  1295. [(set GR16:$dst, EFLAGS, (X86bsf GR16:$src))]>,
  1296. PS, OpSize16, Sched<[WriteBSF]>;
  1297. def BSF16rm : I<0xBC, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  1298. "bsf{w}\t{$src, $dst|$dst, $src}",
  1299. [(set GR16:$dst, EFLAGS, (X86bsf (loadi16 addr:$src)))]>,
  1300. PS, OpSize16, Sched<[WriteBSFLd]>;
  1301. def BSF32rr : I<0xBC, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src),
  1302. "bsf{l}\t{$src, $dst|$dst, $src}",
  1303. [(set GR32:$dst, EFLAGS, (X86bsf GR32:$src))]>,
  1304. PS, OpSize32, Sched<[WriteBSF]>;
  1305. def BSF32rm : I<0xBC, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  1306. "bsf{l}\t{$src, $dst|$dst, $src}",
  1307. [(set GR32:$dst, EFLAGS, (X86bsf (loadi32 addr:$src)))]>,
  1308. PS, OpSize32, Sched<[WriteBSFLd]>;
  1309. def BSF64rr : RI<0xBC, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src),
  1310. "bsf{q}\t{$src, $dst|$dst, $src}",
  1311. [(set GR64:$dst, EFLAGS, (X86bsf GR64:$src))]>,
  1312. PS, Sched<[WriteBSF]>;
  1313. def BSF64rm : RI<0xBC, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  1314. "bsf{q}\t{$src, $dst|$dst, $src}",
  1315. [(set GR64:$dst, EFLAGS, (X86bsf (loadi64 addr:$src)))]>,
  1316. PS, Sched<[WriteBSFLd]>;
  1317. def BSR16rr : I<0xBD, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src),
  1318. "bsr{w}\t{$src, $dst|$dst, $src}",
  1319. [(set GR16:$dst, EFLAGS, (X86bsr GR16:$src))]>,
  1320. PS, OpSize16, Sched<[WriteBSR]>;
  1321. def BSR16rm : I<0xBD, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  1322. "bsr{w}\t{$src, $dst|$dst, $src}",
  1323. [(set GR16:$dst, EFLAGS, (X86bsr (loadi16 addr:$src)))]>,
  1324. PS, OpSize16, Sched<[WriteBSRLd]>;
  1325. def BSR32rr : I<0xBD, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src),
  1326. "bsr{l}\t{$src, $dst|$dst, $src}",
  1327. [(set GR32:$dst, EFLAGS, (X86bsr GR32:$src))]>,
  1328. PS, OpSize32, Sched<[WriteBSR]>;
  1329. def BSR32rm : I<0xBD, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  1330. "bsr{l}\t{$src, $dst|$dst, $src}",
  1331. [(set GR32:$dst, EFLAGS, (X86bsr (loadi32 addr:$src)))]>,
  1332. PS, OpSize32, Sched<[WriteBSRLd]>;
  1333. def BSR64rr : RI<0xBD, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src),
  1334. "bsr{q}\t{$src, $dst|$dst, $src}",
  1335. [(set GR64:$dst, EFLAGS, (X86bsr GR64:$src))]>,
  1336. PS, Sched<[WriteBSR]>;
  1337. def BSR64rm : RI<0xBD, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  1338. "bsr{q}\t{$src, $dst|$dst, $src}",
  1339. [(set GR64:$dst, EFLAGS, (X86bsr (loadi64 addr:$src)))]>,
  1340. PS, Sched<[WriteBSRLd]>;
  1341. } // Defs = [EFLAGS]
  1342. let SchedRW = [WriteMicrocoded] in {
  1343. let Defs = [EDI,ESI], Uses = [EDI,ESI,DF] in {
  1344. def MOVSB : I<0xA4, RawFrmDstSrc, (outs), (ins dstidx8:$dst, srcidx8:$src),
  1345. "movsb\t{$src, $dst|$dst, $src}", []>;
  1346. def MOVSW : I<0xA5, RawFrmDstSrc, (outs), (ins dstidx16:$dst, srcidx16:$src),
  1347. "movsw\t{$src, $dst|$dst, $src}", []>, OpSize16;
  1348. def MOVSL : I<0xA5, RawFrmDstSrc, (outs), (ins dstidx32:$dst, srcidx32:$src),
  1349. "movs{l|d}\t{$src, $dst|$dst, $src}", []>, OpSize32;
  1350. def MOVSQ : RI<0xA5, RawFrmDstSrc, (outs), (ins dstidx64:$dst, srcidx64:$src),
  1351. "movsq\t{$src, $dst|$dst, $src}", []>,
  1352. Requires<[In64BitMode]>;
  1353. }
  1354. let Defs = [EDI], Uses = [AL,EDI,DF] in
  1355. def STOSB : I<0xAA, RawFrmDst, (outs), (ins dstidx8:$dst),
  1356. "stosb\t{%al, $dst|$dst, al}", []>;
  1357. let Defs = [EDI], Uses = [AX,EDI,DF] in
  1358. def STOSW : I<0xAB, RawFrmDst, (outs), (ins dstidx16:$dst),
  1359. "stosw\t{%ax, $dst|$dst, ax}", []>, OpSize16;
  1360. let Defs = [EDI], Uses = [EAX,EDI,DF] in
  1361. def STOSL : I<0xAB, RawFrmDst, (outs), (ins dstidx32:$dst),
  1362. "stos{l|d}\t{%eax, $dst|$dst, eax}", []>, OpSize32;
  1363. let Defs = [RDI], Uses = [RAX,RDI,DF] in
  1364. def STOSQ : RI<0xAB, RawFrmDst, (outs), (ins dstidx64:$dst),
  1365. "stosq\t{%rax, $dst|$dst, rax}", []>,
  1366. Requires<[In64BitMode]>;
  1367. let Defs = [EDI,EFLAGS], Uses = [AL,EDI,DF] in
  1368. def SCASB : I<0xAE, RawFrmDst, (outs), (ins dstidx8:$dst),
  1369. "scasb\t{$dst, %al|al, $dst}", []>;
  1370. let Defs = [EDI,EFLAGS], Uses = [AX,EDI,DF] in
  1371. def SCASW : I<0xAF, RawFrmDst, (outs), (ins dstidx16:$dst),
  1372. "scasw\t{$dst, %ax|ax, $dst}", []>, OpSize16;
  1373. let Defs = [EDI,EFLAGS], Uses = [EAX,EDI,DF] in
  1374. def SCASL : I<0xAF, RawFrmDst, (outs), (ins dstidx32:$dst),
  1375. "scas{l|d}\t{$dst, %eax|eax, $dst}", []>, OpSize32;
  1376. let Defs = [EDI,EFLAGS], Uses = [RAX,EDI,DF] in
  1377. def SCASQ : RI<0xAF, RawFrmDst, (outs), (ins dstidx64:$dst),
  1378. "scasq\t{$dst, %rax|rax, $dst}", []>,
  1379. Requires<[In64BitMode]>;
  1380. let Defs = [EDI,ESI,EFLAGS], Uses = [EDI,ESI,DF] in {
  1381. def CMPSB : I<0xA6, RawFrmDstSrc, (outs), (ins dstidx8:$dst, srcidx8:$src),
  1382. "cmpsb\t{$dst, $src|$src, $dst}", []>;
  1383. def CMPSW : I<0xA7, RawFrmDstSrc, (outs), (ins dstidx16:$dst, srcidx16:$src),
  1384. "cmpsw\t{$dst, $src|$src, $dst}", []>, OpSize16;
  1385. def CMPSL : I<0xA7, RawFrmDstSrc, (outs), (ins dstidx32:$dst, srcidx32:$src),
  1386. "cmps{l|d}\t{$dst, $src|$src, $dst}", []>, OpSize32;
  1387. def CMPSQ : RI<0xA7, RawFrmDstSrc, (outs), (ins dstidx64:$dst, srcidx64:$src),
  1388. "cmpsq\t{$dst, $src|$src, $dst}", []>,
  1389. Requires<[In64BitMode]>;
  1390. }
  1391. } // SchedRW
  1392. //===----------------------------------------------------------------------===//
  1393. // Move Instructions.
  1394. //
  1395. let SchedRW = [WriteMove] in {
  1396. let hasSideEffects = 0, isMoveReg = 1 in {
  1397. def MOV8rr : I<0x88, MRMDestReg, (outs GR8 :$dst), (ins GR8 :$src),
  1398. "mov{b}\t{$src, $dst|$dst, $src}", []>;
  1399. def MOV16rr : I<0x89, MRMDestReg, (outs GR16:$dst), (ins GR16:$src),
  1400. "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16;
  1401. def MOV32rr : I<0x89, MRMDestReg, (outs GR32:$dst), (ins GR32:$src),
  1402. "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32;
  1403. def MOV64rr : RI<0x89, MRMDestReg, (outs GR64:$dst), (ins GR64:$src),
  1404. "mov{q}\t{$src, $dst|$dst, $src}", []>;
  1405. }
  1406. let isReMaterializable = 1, isAsCheapAsAMove = 1, isMoveImm = 1 in {
  1407. def MOV8ri : Ii8 <0xB0, AddRegFrm, (outs GR8 :$dst), (ins i8imm :$src),
  1408. "mov{b}\t{$src, $dst|$dst, $src}",
  1409. [(set GR8:$dst, imm:$src)]>;
  1410. def MOV16ri : Ii16<0xB8, AddRegFrm, (outs GR16:$dst), (ins i16imm:$src),
  1411. "mov{w}\t{$src, $dst|$dst, $src}",
  1412. [(set GR16:$dst, imm:$src)]>, OpSize16;
  1413. def MOV32ri : Ii32<0xB8, AddRegFrm, (outs GR32:$dst), (ins i32imm:$src),
  1414. "mov{l}\t{$src, $dst|$dst, $src}",
  1415. [(set GR32:$dst, imm:$src)]>, OpSize32;
  1416. def MOV64ri32 : RIi32S<0xC7, MRM0r, (outs GR64:$dst), (ins i64i32imm:$src),
  1417. "mov{q}\t{$src, $dst|$dst, $src}",
  1418. [(set GR64:$dst, i64immSExt32:$src)]>;
  1419. }
  1420. let isReMaterializable = 1, isMoveImm = 1 in {
  1421. def MOV64ri : RIi64<0xB8, AddRegFrm, (outs GR64:$dst), (ins i64imm:$src),
  1422. "movabs{q}\t{$src, $dst|$dst, $src}",
  1423. [(set GR64:$dst, imm:$src)]>;
  1424. }
  1425. // Longer forms that use a ModR/M byte. Needed for disassembler
  1426. let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0 in {
  1427. def MOV8ri_alt : Ii8 <0xC6, MRM0r, (outs GR8 :$dst), (ins i8imm :$src),
  1428. "mov{b}\t{$src, $dst|$dst, $src}", []>,
  1429. FoldGenData<"MOV8ri">;
  1430. def MOV16ri_alt : Ii16<0xC7, MRM0r, (outs GR16:$dst), (ins i16imm:$src),
  1431. "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16,
  1432. FoldGenData<"MOV16ri">;
  1433. def MOV32ri_alt : Ii32<0xC7, MRM0r, (outs GR32:$dst), (ins i32imm:$src),
  1434. "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32,
  1435. FoldGenData<"MOV32ri">;
  1436. }
  1437. } // SchedRW
  1438. let SchedRW = [WriteStore] in {
  1439. def MOV8mi : Ii8 <0xC6, MRM0m, (outs), (ins i8mem :$dst, i8imm :$src),
  1440. "mov{b}\t{$src, $dst|$dst, $src}",
  1441. [(store (i8 imm_su:$src), addr:$dst)]>;
  1442. def MOV16mi : Ii16<0xC7, MRM0m, (outs), (ins i16mem:$dst, i16imm:$src),
  1443. "mov{w}\t{$src, $dst|$dst, $src}",
  1444. [(store (i16 imm_su:$src), addr:$dst)]>, OpSize16;
  1445. def MOV32mi : Ii32<0xC7, MRM0m, (outs), (ins i32mem:$dst, i32imm:$src),
  1446. "mov{l}\t{$src, $dst|$dst, $src}",
  1447. [(store (i32 imm_su:$src), addr:$dst)]>, OpSize32;
  1448. def MOV64mi32 : RIi32S<0xC7, MRM0m, (outs), (ins i64mem:$dst, i64i32imm:$src),
  1449. "mov{q}\t{$src, $dst|$dst, $src}",
  1450. [(store i64immSExt32_su:$src, addr:$dst)]>,
  1451. Requires<[In64BitMode]>;
  1452. } // SchedRW
  1453. def : Pat<(i32 relocImm:$src), (MOV32ri relocImm:$src)>;
  1454. def : Pat<(i64 relocImm:$src), (MOV64ri relocImm:$src)>;
  1455. def : Pat<(store (i8 relocImm8_su:$src), addr:$dst),
  1456. (MOV8mi addr:$dst, relocImm8_su:$src)>;
  1457. def : Pat<(store (i16 relocImm16_su:$src), addr:$dst),
  1458. (MOV16mi addr:$dst, relocImm16_su:$src)>;
  1459. def : Pat<(store (i32 relocImm32_su:$src), addr:$dst),
  1460. (MOV32mi addr:$dst, relocImm32_su:$src)>;
  1461. def : Pat<(store (i64 i64relocImmSExt32_su:$src), addr:$dst),
  1462. (MOV64mi32 addr:$dst, i64immSExt32_su:$src)>;
  1463. let hasSideEffects = 0 in {
  1464. /// Memory offset versions of moves. The immediate is an address mode sized
  1465. /// offset from the segment base.
  1466. let SchedRW = [WriteALU] in {
  1467. let mayLoad = 1 in {
  1468. let Defs = [AL] in
  1469. def MOV8ao32 : Ii32<0xA0, RawFrmMemOffs, (outs), (ins offset32_8:$src),
  1470. "mov{b}\t{$src, %al|al, $src}", []>,
  1471. AdSize32;
  1472. let Defs = [AX] in
  1473. def MOV16ao32 : Ii32<0xA1, RawFrmMemOffs, (outs), (ins offset32_16:$src),
  1474. "mov{w}\t{$src, %ax|ax, $src}", []>,
  1475. OpSize16, AdSize32;
  1476. let Defs = [EAX] in
  1477. def MOV32ao32 : Ii32<0xA1, RawFrmMemOffs, (outs), (ins offset32_32:$src),
  1478. "mov{l}\t{$src, %eax|eax, $src}", []>,
  1479. OpSize32, AdSize32;
  1480. let Defs = [RAX] in
  1481. def MOV64ao32 : RIi32<0xA1, RawFrmMemOffs, (outs), (ins offset32_64:$src),
  1482. "mov{q}\t{$src, %rax|rax, $src}", []>,
  1483. AdSize32;
  1484. let Defs = [AL] in
  1485. def MOV8ao16 : Ii16<0xA0, RawFrmMemOffs, (outs), (ins offset16_8:$src),
  1486. "mov{b}\t{$src, %al|al, $src}", []>, AdSize16;
  1487. let Defs = [AX] in
  1488. def MOV16ao16 : Ii16<0xA1, RawFrmMemOffs, (outs), (ins offset16_16:$src),
  1489. "mov{w}\t{$src, %ax|ax, $src}", []>,
  1490. OpSize16, AdSize16;
  1491. let Defs = [EAX] in
  1492. def MOV32ao16 : Ii16<0xA1, RawFrmMemOffs, (outs), (ins offset16_32:$src),
  1493. "mov{l}\t{$src, %eax|eax, $src}", []>,
  1494. AdSize16, OpSize32;
  1495. } // mayLoad
  1496. let mayStore = 1 in {
  1497. let Uses = [AL] in
  1498. def MOV8o32a : Ii32<0xA2, RawFrmMemOffs, (outs), (ins offset32_8:$dst),
  1499. "mov{b}\t{%al, $dst|$dst, al}", []>, AdSize32;
  1500. let Uses = [AX] in
  1501. def MOV16o32a : Ii32<0xA3, RawFrmMemOffs, (outs), (ins offset32_16:$dst),
  1502. "mov{w}\t{%ax, $dst|$dst, ax}", []>,
  1503. OpSize16, AdSize32;
  1504. let Uses = [EAX] in
  1505. def MOV32o32a : Ii32<0xA3, RawFrmMemOffs, (outs), (ins offset32_32:$dst),
  1506. "mov{l}\t{%eax, $dst|$dst, eax}", []>,
  1507. OpSize32, AdSize32;
  1508. let Uses = [RAX] in
  1509. def MOV64o32a : RIi32<0xA3, RawFrmMemOffs, (outs), (ins offset32_64:$dst),
  1510. "mov{q}\t{%rax, $dst|$dst, rax}", []>,
  1511. AdSize32;
  1512. let Uses = [AL] in
  1513. def MOV8o16a : Ii16<0xA2, RawFrmMemOffs, (outs), (ins offset16_8:$dst),
  1514. "mov{b}\t{%al, $dst|$dst, al}", []>, AdSize16;
  1515. let Uses = [AX] in
  1516. def MOV16o16a : Ii16<0xA3, RawFrmMemOffs, (outs), (ins offset16_16:$dst),
  1517. "mov{w}\t{%ax, $dst|$dst, ax}", []>,
  1518. OpSize16, AdSize16;
  1519. let Uses = [EAX] in
  1520. def MOV32o16a : Ii16<0xA3, RawFrmMemOffs, (outs), (ins offset16_32:$dst),
  1521. "mov{l}\t{%eax, $dst|$dst, eax}", []>,
  1522. OpSize32, AdSize16;
  1523. } // mayStore
  1524. // These forms all have full 64-bit absolute addresses in their instructions
  1525. // and use the movabs mnemonic to indicate this specific form.
  1526. let mayLoad = 1 in {
  1527. let Defs = [AL] in
  1528. def MOV8ao64 : Ii64<0xA0, RawFrmMemOffs, (outs), (ins offset64_8:$src),
  1529. "movabs{b}\t{$src, %al|al, $src}", []>,
  1530. AdSize64;
  1531. let Defs = [AX] in
  1532. def MOV16ao64 : Ii64<0xA1, RawFrmMemOffs, (outs), (ins offset64_16:$src),
  1533. "movabs{w}\t{$src, %ax|ax, $src}", []>,
  1534. OpSize16, AdSize64;
  1535. let Defs = [EAX] in
  1536. def MOV32ao64 : Ii64<0xA1, RawFrmMemOffs, (outs), (ins offset64_32:$src),
  1537. "movabs{l}\t{$src, %eax|eax, $src}", []>,
  1538. OpSize32, AdSize64;
  1539. let Defs = [RAX] in
  1540. def MOV64ao64 : RIi64<0xA1, RawFrmMemOffs, (outs), (ins offset64_64:$src),
  1541. "movabs{q}\t{$src, %rax|rax, $src}", []>,
  1542. AdSize64;
  1543. } // mayLoad
  1544. let mayStore = 1 in {
  1545. let Uses = [AL] in
  1546. def MOV8o64a : Ii64<0xA2, RawFrmMemOffs, (outs), (ins offset64_8:$dst),
  1547. "movabs{b}\t{%al, $dst|$dst, al}", []>,
  1548. AdSize64;
  1549. let Uses = [AX] in
  1550. def MOV16o64a : Ii64<0xA3, RawFrmMemOffs, (outs), (ins offset64_16:$dst),
  1551. "movabs{w}\t{%ax, $dst|$dst, ax}", []>,
  1552. OpSize16, AdSize64;
  1553. let Uses = [EAX] in
  1554. def MOV32o64a : Ii64<0xA3, RawFrmMemOffs, (outs), (ins offset64_32:$dst),
  1555. "movabs{l}\t{%eax, $dst|$dst, eax}", []>,
  1556. OpSize32, AdSize64;
  1557. let Uses = [RAX] in
  1558. def MOV64o64a : RIi64<0xA3, RawFrmMemOffs, (outs), (ins offset64_64:$dst),
  1559. "movabs{q}\t{%rax, $dst|$dst, rax}", []>,
  1560. AdSize64;
  1561. } // mayStore
  1562. } // SchedRW
  1563. } // hasSideEffects = 0
  1564. let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0,
  1565. SchedRW = [WriteMove], isMoveReg = 1 in {
  1566. def MOV8rr_REV : I<0x8A, MRMSrcReg, (outs GR8:$dst), (ins GR8:$src),
  1567. "mov{b}\t{$src, $dst|$dst, $src}", []>,
  1568. FoldGenData<"MOV8rr">;
  1569. def MOV16rr_REV : I<0x8B, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src),
  1570. "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16,
  1571. FoldGenData<"MOV16rr">;
  1572. def MOV32rr_REV : I<0x8B, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src),
  1573. "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32,
  1574. FoldGenData<"MOV32rr">;
  1575. def MOV64rr_REV : RI<0x8B, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src),
  1576. "mov{q}\t{$src, $dst|$dst, $src}", []>,
  1577. FoldGenData<"MOV64rr">;
  1578. }
  1579. // Reversed version with ".s" suffix for GAS compatibility.
  1580. def : InstAlias<"mov{b}.s\t{$src, $dst|$dst, $src}",
  1581. (MOV8rr_REV GR8:$dst, GR8:$src), 0>;
  1582. def : InstAlias<"mov{w}.s\t{$src, $dst|$dst, $src}",
  1583. (MOV16rr_REV GR16:$dst, GR16:$src), 0>;
  1584. def : InstAlias<"mov{l}.s\t{$src, $dst|$dst, $src}",
  1585. (MOV32rr_REV GR32:$dst, GR32:$src), 0>;
  1586. def : InstAlias<"mov{q}.s\t{$src, $dst|$dst, $src}",
  1587. (MOV64rr_REV GR64:$dst, GR64:$src), 0>;
  1588. def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}",
  1589. (MOV8rr_REV GR8:$dst, GR8:$src), 0, "att">;
  1590. def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}",
  1591. (MOV16rr_REV GR16:$dst, GR16:$src), 0, "att">;
  1592. def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}",
  1593. (MOV32rr_REV GR32:$dst, GR32:$src), 0, "att">;
  1594. def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}",
  1595. (MOV64rr_REV GR64:$dst, GR64:$src), 0, "att">;
  1596. let canFoldAsLoad = 1, isReMaterializable = 1, SchedRW = [WriteLoad] in {
  1597. def MOV8rm : I<0x8A, MRMSrcMem, (outs GR8 :$dst), (ins i8mem :$src),
  1598. "mov{b}\t{$src, $dst|$dst, $src}",
  1599. [(set GR8:$dst, (loadi8 addr:$src))]>;
  1600. def MOV16rm : I<0x8B, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  1601. "mov{w}\t{$src, $dst|$dst, $src}",
  1602. [(set GR16:$dst, (loadi16 addr:$src))]>, OpSize16;
  1603. def MOV32rm : I<0x8B, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  1604. "mov{l}\t{$src, $dst|$dst, $src}",
  1605. [(set GR32:$dst, (loadi32 addr:$src))]>, OpSize32;
  1606. def MOV64rm : RI<0x8B, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  1607. "mov{q}\t{$src, $dst|$dst, $src}",
  1608. [(set GR64:$dst, (load addr:$src))]>;
  1609. }
  1610. let SchedRW = [WriteStore] in {
  1611. def MOV8mr : I<0x88, MRMDestMem, (outs), (ins i8mem :$dst, GR8 :$src),
  1612. "mov{b}\t{$src, $dst|$dst, $src}",
  1613. [(store GR8:$src, addr:$dst)]>;
  1614. def MOV16mr : I<0x89, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src),
  1615. "mov{w}\t{$src, $dst|$dst, $src}",
  1616. [(store GR16:$src, addr:$dst)]>, OpSize16;
  1617. def MOV32mr : I<0x89, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src),
  1618. "mov{l}\t{$src, $dst|$dst, $src}",
  1619. [(store GR32:$src, addr:$dst)]>, OpSize32;
  1620. def MOV64mr : RI<0x89, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src),
  1621. "mov{q}\t{$src, $dst|$dst, $src}",
  1622. [(store GR64:$src, addr:$dst)]>;
  1623. } // SchedRW
  1624. // Versions of MOV8rr, MOV8mr, and MOV8rm that use i8mem_NOREX and GR8_NOREX so
  1625. // that they can be used for copying and storing h registers, which can't be
  1626. // encoded when a REX prefix is present.
  1627. let isCodeGenOnly = 1 in {
  1628. let hasSideEffects = 0, isMoveReg = 1 in
  1629. def MOV8rr_NOREX : I<0x88, MRMDestReg,
  1630. (outs GR8_NOREX:$dst), (ins GR8_NOREX:$src),
  1631. "mov{b}\t{$src, $dst|$dst, $src}", []>,
  1632. Sched<[WriteMove]>;
  1633. let mayStore = 1, hasSideEffects = 0 in
  1634. def MOV8mr_NOREX : I<0x88, MRMDestMem,
  1635. (outs), (ins i8mem_NOREX:$dst, GR8_NOREX:$src),
  1636. "mov{b}\t{$src, $dst|$dst, $src}", []>,
  1637. Sched<[WriteStore]>;
  1638. let mayLoad = 1, hasSideEffects = 0,
  1639. canFoldAsLoad = 1, isReMaterializable = 1 in
  1640. def MOV8rm_NOREX : I<0x8A, MRMSrcMem,
  1641. (outs GR8_NOREX:$dst), (ins i8mem_NOREX:$src),
  1642. "mov{b}\t{$src, $dst|$dst, $src}", []>,
  1643. Sched<[WriteLoad]>;
  1644. }
  1645. // Condition code ops, incl. set if equal/not equal/...
  1646. let SchedRW = [WriteLAHFSAHF] in {
  1647. let Defs = [EFLAGS], Uses = [AH], hasSideEffects = 0 in
  1648. def SAHF : I<0x9E, RawFrm, (outs), (ins), "sahf", []>, // flags = AH
  1649. Requires<[HasLAHFSAHF]>;
  1650. let Defs = [AH], Uses = [EFLAGS], hasSideEffects = 0 in
  1651. def LAHF : I<0x9F, RawFrm, (outs), (ins), "lahf", []>, // AH = flags
  1652. Requires<[HasLAHFSAHF]>;
  1653. } // SchedRW
  1654. //===----------------------------------------------------------------------===//
  1655. // Bit tests instructions: BT, BTS, BTR, BTC.
  1656. let Defs = [EFLAGS] in {
  1657. let SchedRW = [WriteBitTest] in {
  1658. def BT16rr : I<0xA3, MRMDestReg, (outs), (ins GR16:$src1, GR16:$src2),
  1659. "bt{w}\t{$src2, $src1|$src1, $src2}",
  1660. [(set EFLAGS, (X86bt GR16:$src1, GR16:$src2))]>,
  1661. OpSize16, TB, NotMemoryFoldable;
  1662. def BT32rr : I<0xA3, MRMDestReg, (outs), (ins GR32:$src1, GR32:$src2),
  1663. "bt{l}\t{$src2, $src1|$src1, $src2}",
  1664. [(set EFLAGS, (X86bt GR32:$src1, GR32:$src2))]>,
  1665. OpSize32, TB, NotMemoryFoldable;
  1666. def BT64rr : RI<0xA3, MRMDestReg, (outs), (ins GR64:$src1, GR64:$src2),
  1667. "bt{q}\t{$src2, $src1|$src1, $src2}",
  1668. [(set EFLAGS, (X86bt GR64:$src1, GR64:$src2))]>, TB,
  1669. NotMemoryFoldable;
  1670. } // SchedRW
  1671. // Unlike with the register+register form, the memory+register form of the
  1672. // bt instruction does not ignore the high bits of the index. From ISel's
  1673. // perspective, this is pretty bizarre. Make these instructions disassembly
  1674. // only for now. These instructions are also slow on modern CPUs so that's
  1675. // another reason to avoid generating them.
  1676. let mayLoad = 1, hasSideEffects = 0, SchedRW = [WriteBitTestRegLd] in {
  1677. def BT16mr : I<0xA3, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2),
  1678. "bt{w}\t{$src2, $src1|$src1, $src2}",
  1679. []>, OpSize16, TB, NotMemoryFoldable;
  1680. def BT32mr : I<0xA3, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2),
  1681. "bt{l}\t{$src2, $src1|$src1, $src2}",
  1682. []>, OpSize32, TB, NotMemoryFoldable;
  1683. def BT64mr : RI<0xA3, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2),
  1684. "bt{q}\t{$src2, $src1|$src1, $src2}",
  1685. []>, TB, NotMemoryFoldable;
  1686. }
  1687. let SchedRW = [WriteBitTest] in {
  1688. def BT16ri8 : Ii8<0xBA, MRM4r, (outs), (ins GR16:$src1, i16u8imm:$src2),
  1689. "bt{w}\t{$src2, $src1|$src1, $src2}",
  1690. [(set EFLAGS, (X86bt GR16:$src1, imm:$src2))]>,
  1691. OpSize16, TB;
  1692. def BT32ri8 : Ii8<0xBA, MRM4r, (outs), (ins GR32:$src1, i32u8imm:$src2),
  1693. "bt{l}\t{$src2, $src1|$src1, $src2}",
  1694. [(set EFLAGS, (X86bt GR32:$src1, imm:$src2))]>,
  1695. OpSize32, TB;
  1696. def BT64ri8 : RIi8<0xBA, MRM4r, (outs), (ins GR64:$src1, i64u8imm:$src2),
  1697. "bt{q}\t{$src2, $src1|$src1, $src2}",
  1698. [(set EFLAGS, (X86bt GR64:$src1, imm:$src2))]>, TB;
  1699. } // SchedRW
  1700. // Note that these instructions aren't slow because that only applies when the
  1701. // other operand is in a register. When it's an immediate, bt is still fast.
  1702. let SchedRW = [WriteBitTestImmLd] in {
  1703. def BT16mi8 : Ii8<0xBA, MRM4m, (outs), (ins i16mem:$src1, i16u8imm:$src2),
  1704. "bt{w}\t{$src2, $src1|$src1, $src2}",
  1705. [(set EFLAGS, (X86bt (loadi16 addr:$src1),
  1706. imm:$src2))]>,
  1707. OpSize16, TB;
  1708. def BT32mi8 : Ii8<0xBA, MRM4m, (outs), (ins i32mem:$src1, i32u8imm:$src2),
  1709. "bt{l}\t{$src2, $src1|$src1, $src2}",
  1710. [(set EFLAGS, (X86bt (loadi32 addr:$src1),
  1711. imm:$src2))]>,
  1712. OpSize32, TB;
  1713. def BT64mi8 : RIi8<0xBA, MRM4m, (outs), (ins i64mem:$src1, i64u8imm:$src2),
  1714. "bt{q}\t{$src2, $src1|$src1, $src2}",
  1715. [(set EFLAGS, (X86bt (loadi64 addr:$src1),
  1716. imm:$src2))]>, TB,
  1717. Requires<[In64BitMode]>;
  1718. } // SchedRW
  1719. let hasSideEffects = 0 in {
  1720. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1721. def BTC16rr : I<0xBB, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2),
  1722. "btc{w}\t{$src2, $src1|$src1, $src2}", []>,
  1723. OpSize16, TB, NotMemoryFoldable;
  1724. def BTC32rr : I<0xBB, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2),
  1725. "btc{l}\t{$src2, $src1|$src1, $src2}", []>,
  1726. OpSize32, TB, NotMemoryFoldable;
  1727. def BTC64rr : RI<0xBB, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2),
  1728. "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1729. NotMemoryFoldable;
  1730. } // SchedRW
  1731. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in {
  1732. def BTC16mr : I<0xBB, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2),
  1733. "btc{w}\t{$src2, $src1|$src1, $src2}", []>,
  1734. OpSize16, TB, NotMemoryFoldable;
  1735. def BTC32mr : I<0xBB, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2),
  1736. "btc{l}\t{$src2, $src1|$src1, $src2}", []>,
  1737. OpSize32, TB, NotMemoryFoldable;
  1738. def BTC64mr : RI<0xBB, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2),
  1739. "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1740. NotMemoryFoldable;
  1741. }
  1742. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1743. def BTC16ri8 : Ii8<0xBA, MRM7r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2),
  1744. "btc{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB;
  1745. def BTC32ri8 : Ii8<0xBA, MRM7r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2),
  1746. "btc{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB;
  1747. def BTC64ri8 : RIi8<0xBA, MRM7r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2),
  1748. "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB;
  1749. } // SchedRW
  1750. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in {
  1751. def BTC16mi8 : Ii8<0xBA, MRM7m, (outs), (ins i16mem:$src1, i16u8imm:$src2),
  1752. "btc{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB;
  1753. def BTC32mi8 : Ii8<0xBA, MRM7m, (outs), (ins i32mem:$src1, i32u8imm:$src2),
  1754. "btc{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB;
  1755. def BTC64mi8 : RIi8<0xBA, MRM7m, (outs), (ins i64mem:$src1, i64u8imm:$src2),
  1756. "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1757. Requires<[In64BitMode]>;
  1758. }
  1759. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1760. def BTR16rr : I<0xB3, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2),
  1761. "btr{w}\t{$src2, $src1|$src1, $src2}", []>,
  1762. OpSize16, TB, NotMemoryFoldable;
  1763. def BTR32rr : I<0xB3, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2),
  1764. "btr{l}\t{$src2, $src1|$src1, $src2}", []>,
  1765. OpSize32, TB, NotMemoryFoldable;
  1766. def BTR64rr : RI<0xB3, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2),
  1767. "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1768. NotMemoryFoldable;
  1769. } // SchedRW
  1770. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in {
  1771. def BTR16mr : I<0xB3, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2),
  1772. "btr{w}\t{$src2, $src1|$src1, $src2}", []>,
  1773. OpSize16, TB, NotMemoryFoldable;
  1774. def BTR32mr : I<0xB3, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2),
  1775. "btr{l}\t{$src2, $src1|$src1, $src2}", []>,
  1776. OpSize32, TB, NotMemoryFoldable;
  1777. def BTR64mr : RI<0xB3, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2),
  1778. "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1779. NotMemoryFoldable;
  1780. }
  1781. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1782. def BTR16ri8 : Ii8<0xBA, MRM6r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2),
  1783. "btr{w}\t{$src2, $src1|$src1, $src2}", []>,
  1784. OpSize16, TB;
  1785. def BTR32ri8 : Ii8<0xBA, MRM6r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2),
  1786. "btr{l}\t{$src2, $src1|$src1, $src2}", []>,
  1787. OpSize32, TB;
  1788. def BTR64ri8 : RIi8<0xBA, MRM6r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2),
  1789. "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB;
  1790. } // SchedRW
  1791. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in {
  1792. def BTR16mi8 : Ii8<0xBA, MRM6m, (outs), (ins i16mem:$src1, i16u8imm:$src2),
  1793. "btr{w}\t{$src2, $src1|$src1, $src2}", []>,
  1794. OpSize16, TB;
  1795. def BTR32mi8 : Ii8<0xBA, MRM6m, (outs), (ins i32mem:$src1, i32u8imm:$src2),
  1796. "btr{l}\t{$src2, $src1|$src1, $src2}", []>,
  1797. OpSize32, TB;
  1798. def BTR64mi8 : RIi8<0xBA, MRM6m, (outs), (ins i64mem:$src1, i64u8imm:$src2),
  1799. "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1800. Requires<[In64BitMode]>;
  1801. }
  1802. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1803. def BTS16rr : I<0xAB, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2),
  1804. "bts{w}\t{$src2, $src1|$src1, $src2}", []>,
  1805. OpSize16, TB, NotMemoryFoldable;
  1806. def BTS32rr : I<0xAB, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2),
  1807. "bts{l}\t{$src2, $src1|$src1, $src2}", []>,
  1808. OpSize32, TB, NotMemoryFoldable;
  1809. def BTS64rr : RI<0xAB, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2),
  1810. "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1811. NotMemoryFoldable;
  1812. } // SchedRW
  1813. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in {
  1814. def BTS16mr : I<0xAB, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2),
  1815. "bts{w}\t{$src2, $src1|$src1, $src2}", []>,
  1816. OpSize16, TB, NotMemoryFoldable;
  1817. def BTS32mr : I<0xAB, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2),
  1818. "bts{l}\t{$src2, $src1|$src1, $src2}", []>,
  1819. OpSize32, TB, NotMemoryFoldable;
  1820. def BTS64mr : RI<0xAB, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2),
  1821. "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1822. NotMemoryFoldable;
  1823. }
  1824. let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in {
  1825. def BTS16ri8 : Ii8<0xBA, MRM5r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2),
  1826. "bts{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB;
  1827. def BTS32ri8 : Ii8<0xBA, MRM5r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2),
  1828. "bts{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB;
  1829. def BTS64ri8 : RIi8<0xBA, MRM5r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2),
  1830. "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB;
  1831. } // SchedRW
  1832. let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in {
  1833. def BTS16mi8 : Ii8<0xBA, MRM5m, (outs), (ins i16mem:$src1, i16u8imm:$src2),
  1834. "bts{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB;
  1835. def BTS32mi8 : Ii8<0xBA, MRM5m, (outs), (ins i32mem:$src1, i32u8imm:$src2),
  1836. "bts{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB;
  1837. def BTS64mi8 : RIi8<0xBA, MRM5m, (outs), (ins i64mem:$src1, i64u8imm:$src2),
  1838. "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB,
  1839. Requires<[In64BitMode]>;
  1840. }
  1841. } // hasSideEffects = 0
  1842. } // Defs = [EFLAGS]
  1843. //===----------------------------------------------------------------------===//
  1844. // Atomic support
  1845. //
  1846. // Atomic swap. These are just normal xchg instructions. But since a memory
  1847. // operand is referenced, the atomicity is ensured.
  1848. multiclass ATOMIC_SWAP<bits<8> opc8, bits<8> opc, string mnemonic, string frag> {
  1849. let Constraints = "$val = $dst", SchedRW = [WriteALULd, WriteRMW] in {
  1850. def NAME#8rm : I<opc8, MRMSrcMem, (outs GR8:$dst),
  1851. (ins GR8:$val, i8mem:$ptr),
  1852. !strconcat(mnemonic, "{b}\t{$val, $ptr|$ptr, $val}"),
  1853. [(set
  1854. GR8:$dst,
  1855. (!cast<PatFrag>(frag # "_8") addr:$ptr, GR8:$val))]>;
  1856. def NAME#16rm : I<opc, MRMSrcMem, (outs GR16:$dst),
  1857. (ins GR16:$val, i16mem:$ptr),
  1858. !strconcat(mnemonic, "{w}\t{$val, $ptr|$ptr, $val}"),
  1859. [(set
  1860. GR16:$dst,
  1861. (!cast<PatFrag>(frag # "_16") addr:$ptr, GR16:$val))]>,
  1862. OpSize16;
  1863. def NAME#32rm : I<opc, MRMSrcMem, (outs GR32:$dst),
  1864. (ins GR32:$val, i32mem:$ptr),
  1865. !strconcat(mnemonic, "{l}\t{$val, $ptr|$ptr, $val}"),
  1866. [(set
  1867. GR32:$dst,
  1868. (!cast<PatFrag>(frag # "_32") addr:$ptr, GR32:$val))]>,
  1869. OpSize32;
  1870. def NAME#64rm : RI<opc, MRMSrcMem, (outs GR64:$dst),
  1871. (ins GR64:$val, i64mem:$ptr),
  1872. !strconcat(mnemonic, "{q}\t{$val, $ptr|$ptr, $val}"),
  1873. [(set
  1874. GR64:$dst,
  1875. (!cast<PatFrag>(frag # "_64") addr:$ptr, GR64:$val))]>;
  1876. }
  1877. }
  1878. defm XCHG : ATOMIC_SWAP<0x86, 0x87, "xchg", "atomic_swap">, NotMemoryFoldable;
  1879. // Swap between registers.
  1880. let SchedRW = [WriteXCHG] in {
  1881. let Constraints = "$src1 = $dst1, $src2 = $dst2", hasSideEffects = 0 in {
  1882. def XCHG8rr : I<0x86, MRMSrcReg, (outs GR8:$dst1, GR8:$dst2),
  1883. (ins GR8:$src1, GR8:$src2),
  1884. "xchg{b}\t{$src2, $src1|$src1, $src2}", []>, NotMemoryFoldable;
  1885. def XCHG16rr : I<0x87, MRMSrcReg, (outs GR16:$dst1, GR16:$dst2),
  1886. (ins GR16:$src1, GR16:$src2),
  1887. "xchg{w}\t{$src2, $src1|$src1, $src2}", []>,
  1888. OpSize16, NotMemoryFoldable;
  1889. def XCHG32rr : I<0x87, MRMSrcReg, (outs GR32:$dst1, GR32:$dst2),
  1890. (ins GR32:$src1, GR32:$src2),
  1891. "xchg{l}\t{$src2, $src1|$src1, $src2}", []>,
  1892. OpSize32, NotMemoryFoldable;
  1893. def XCHG64rr : RI<0x87, MRMSrcReg, (outs GR64:$dst1, GR64:$dst2),
  1894. (ins GR64:$src1 ,GR64:$src2),
  1895. "xchg{q}\t{$src2, $src1|$src1, $src2}", []>, NotMemoryFoldable;
  1896. }
  1897. // Swap between EAX and other registers.
  1898. let Constraints = "$src = $dst", hasSideEffects = 0 in {
  1899. let Uses = [AX], Defs = [AX] in
  1900. def XCHG16ar : I<0x90, AddRegFrm, (outs GR16:$dst), (ins GR16:$src),
  1901. "xchg{w}\t{$src, %ax|ax, $src}", []>, OpSize16;
  1902. let Uses = [EAX], Defs = [EAX] in
  1903. def XCHG32ar : I<0x90, AddRegFrm, (outs GR32:$dst), (ins GR32:$src),
  1904. "xchg{l}\t{$src, %eax|eax, $src}", []>, OpSize32;
  1905. let Uses = [RAX], Defs = [RAX] in
  1906. def XCHG64ar : RI<0x90, AddRegFrm, (outs GR64:$dst), (ins GR64:$src),
  1907. "xchg{q}\t{$src, %rax|rax, $src}", []>;
  1908. }
  1909. } // SchedRW
  1910. let hasSideEffects = 0, Constraints = "$src1 = $dst1, $src2 = $dst2",
  1911. Defs = [EFLAGS], SchedRW = [WriteXCHG] in {
  1912. def XADD8rr : I<0xC0, MRMDestReg, (outs GR8:$dst1, GR8:$dst2),
  1913. (ins GR8:$src1, GR8:$src2),
  1914. "xadd{b}\t{$src2, $src1|$src1, $src2}", []>, TB;
  1915. def XADD16rr : I<0xC1, MRMDestReg, (outs GR16:$dst1, GR16:$dst2),
  1916. (ins GR16:$src1, GR16:$src2),
  1917. "xadd{w}\t{$src2, $src1|$src1, $src2}", []>, TB, OpSize16;
  1918. def XADD32rr : I<0xC1, MRMDestReg, (outs GR32:$dst1, GR32:$dst2),
  1919. (ins GR32:$src1, GR32:$src2),
  1920. "xadd{l}\t{$src2, $src1|$src1, $src2}", []>, TB, OpSize32;
  1921. def XADD64rr : RI<0xC1, MRMDestReg, (outs GR64:$dst1, GR64:$dst2),
  1922. (ins GR64:$src1, GR64:$src2),
  1923. "xadd{q}\t{$src2, $src1|$src1, $src2}", []>, TB;
  1924. } // SchedRW
  1925. let mayLoad = 1, mayStore = 1, hasSideEffects = 0, Constraints = "$val = $dst",
  1926. Defs = [EFLAGS], SchedRW = [WriteALULd, WriteRMW] in {
  1927. def XADD8rm : I<0xC0, MRMSrcMem, (outs GR8:$dst),
  1928. (ins GR8:$val, i8mem:$ptr),
  1929. "xadd{b}\t{$val, $ptr|$ptr, $val}", []>, TB;
  1930. def XADD16rm : I<0xC1, MRMSrcMem, (outs GR16:$dst),
  1931. (ins GR16:$val, i16mem:$ptr),
  1932. "xadd{w}\t{$val, $ptr|$ptr, $val}", []>, TB,
  1933. OpSize16;
  1934. def XADD32rm : I<0xC1, MRMSrcMem, (outs GR32:$dst),
  1935. (ins GR32:$val, i32mem:$ptr),
  1936. "xadd{l}\t{$val, $ptr|$ptr, $val}", []>, TB,
  1937. OpSize32;
  1938. def XADD64rm : RI<0xC1, MRMSrcMem, (outs GR64:$dst),
  1939. (ins GR64:$val, i64mem:$ptr),
  1940. "xadd{q}\t{$val, $ptr|$ptr, $val}", []>, TB;
  1941. }
  1942. let SchedRW = [WriteCMPXCHG], hasSideEffects = 0 in {
  1943. let Defs = [AL, EFLAGS], Uses = [AL] in
  1944. def CMPXCHG8rr : I<0xB0, MRMDestReg, (outs GR8:$dst), (ins GR8:$src),
  1945. "cmpxchg{b}\t{$src, $dst|$dst, $src}", []>, TB,
  1946. NotMemoryFoldable;
  1947. let Defs = [AX, EFLAGS], Uses = [AX] in
  1948. def CMPXCHG16rr : I<0xB1, MRMDestReg, (outs GR16:$dst), (ins GR16:$src),
  1949. "cmpxchg{w}\t{$src, $dst|$dst, $src}", []>, TB, OpSize16,
  1950. NotMemoryFoldable;
  1951. let Defs = [EAX, EFLAGS], Uses = [EAX] in
  1952. def CMPXCHG32rr : I<0xB1, MRMDestReg, (outs GR32:$dst), (ins GR32:$src),
  1953. "cmpxchg{l}\t{$src, $dst|$dst, $src}", []>, TB, OpSize32,
  1954. NotMemoryFoldable;
  1955. let Defs = [RAX, EFLAGS], Uses = [RAX] in
  1956. def CMPXCHG64rr : RI<0xB1, MRMDestReg, (outs GR64:$dst), (ins GR64:$src),
  1957. "cmpxchg{q}\t{$src, $dst|$dst, $src}", []>, TB,
  1958. NotMemoryFoldable;
  1959. } // SchedRW, hasSideEffects
  1960. let SchedRW = [WriteCMPXCHGRMW], mayLoad = 1, mayStore = 1,
  1961. hasSideEffects = 0 in {
  1962. let Defs = [AL, EFLAGS], Uses = [AL] in
  1963. def CMPXCHG8rm : I<0xB0, MRMDestMem, (outs), (ins i8mem:$dst, GR8:$src),
  1964. "cmpxchg{b}\t{$src, $dst|$dst, $src}", []>, TB,
  1965. NotMemoryFoldable;
  1966. let Defs = [AX, EFLAGS], Uses = [AX] in
  1967. def CMPXCHG16rm : I<0xB1, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src),
  1968. "cmpxchg{w}\t{$src, $dst|$dst, $src}", []>, TB, OpSize16,
  1969. NotMemoryFoldable;
  1970. let Defs = [EAX, EFLAGS], Uses = [EAX] in
  1971. def CMPXCHG32rm : I<0xB1, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src),
  1972. "cmpxchg{l}\t{$src, $dst|$dst, $src}", []>, TB, OpSize32,
  1973. NotMemoryFoldable;
  1974. let Defs = [RAX, EFLAGS], Uses = [RAX] in
  1975. def CMPXCHG64rm : RI<0xB1, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src),
  1976. "cmpxchg{q}\t{$src, $dst|$dst, $src}", []>, TB,
  1977. NotMemoryFoldable;
  1978. let Defs = [EAX, EDX, EFLAGS], Uses = [EAX, EBX, ECX, EDX] in
  1979. def CMPXCHG8B : I<0xC7, MRM1m, (outs), (ins i64mem:$dst),
  1980. "cmpxchg8b\t$dst", []>, TB, Requires<[HasCmpxchg8b]>;
  1981. let Defs = [RAX, RDX, EFLAGS], Uses = [RAX, RBX, RCX, RDX] in
  1982. // NOTE: In64BitMode check needed for the AssemblerPredicate.
  1983. def CMPXCHG16B : RI<0xC7, MRM1m, (outs), (ins i128mem:$dst),
  1984. "cmpxchg16b\t$dst", []>,
  1985. TB, Requires<[HasCmpxchg16b,In64BitMode]>;
  1986. } // SchedRW, mayLoad, mayStore, hasSideEffects
  1987. // Lock instruction prefix
  1988. let SchedRW = [WriteMicrocoded] in
  1989. def LOCK_PREFIX : I<0xF0, PrefixByte, (outs), (ins), "lock", []>;
  1990. let SchedRW = [WriteNop] in {
  1991. // Rex64 instruction prefix
  1992. def REX64_PREFIX : I<0x48, PrefixByte, (outs), (ins), "rex64", []>,
  1993. Requires<[In64BitMode]>;
  1994. // Data16 instruction prefix
  1995. def DATA16_PREFIX : I<0x66, PrefixByte, (outs), (ins), "data16", []>;
  1996. } // SchedRW
  1997. // Repeat string operation instruction prefixes
  1998. let Defs = [ECX], Uses = [ECX,DF], SchedRW = [WriteMicrocoded] in {
  1999. // Repeat (used with INS, OUTS, MOVS, LODS and STOS)
  2000. def REP_PREFIX : I<0xF3, PrefixByte, (outs), (ins), "rep", []>;
  2001. // Repeat while not equal (used with CMPS and SCAS)
  2002. def REPNE_PREFIX : I<0xF2, PrefixByte, (outs), (ins), "repne", []>;
  2003. }
  2004. // String manipulation instructions
  2005. let SchedRW = [WriteMicrocoded] in {
  2006. let Defs = [AL,ESI], Uses = [ESI,DF] in
  2007. def LODSB : I<0xAC, RawFrmSrc, (outs), (ins srcidx8:$src),
  2008. "lodsb\t{$src, %al|al, $src}", []>;
  2009. let Defs = [AX,ESI], Uses = [ESI,DF] in
  2010. def LODSW : I<0xAD, RawFrmSrc, (outs), (ins srcidx16:$src),
  2011. "lodsw\t{$src, %ax|ax, $src}", []>, OpSize16;
  2012. let Defs = [EAX,ESI], Uses = [ESI,DF] in
  2013. def LODSL : I<0xAD, RawFrmSrc, (outs), (ins srcidx32:$src),
  2014. "lods{l|d}\t{$src, %eax|eax, $src}", []>, OpSize32;
  2015. let Defs = [RAX,ESI], Uses = [ESI,DF] in
  2016. def LODSQ : RI<0xAD, RawFrmSrc, (outs), (ins srcidx64:$src),
  2017. "lodsq\t{$src, %rax|rax, $src}", []>,
  2018. Requires<[In64BitMode]>;
  2019. }
  2020. let SchedRW = [WriteSystem] in {
  2021. let Defs = [ESI], Uses = [DX,ESI,DF] in {
  2022. def OUTSB : I<0x6E, RawFrmSrc, (outs), (ins srcidx8:$src),
  2023. "outsb\t{$src, %dx|dx, $src}", []>;
  2024. def OUTSW : I<0x6F, RawFrmSrc, (outs), (ins srcidx16:$src),
  2025. "outsw\t{$src, %dx|dx, $src}", []>, OpSize16;
  2026. def OUTSL : I<0x6F, RawFrmSrc, (outs), (ins srcidx32:$src),
  2027. "outs{l|d}\t{$src, %dx|dx, $src}", []>, OpSize32;
  2028. }
  2029. let Defs = [EDI], Uses = [DX,EDI,DF] in {
  2030. def INSB : I<0x6C, RawFrmDst, (outs), (ins dstidx8:$dst),
  2031. "insb\t{%dx, $dst|$dst, dx}", []>;
  2032. def INSW : I<0x6D, RawFrmDst, (outs), (ins dstidx16:$dst),
  2033. "insw\t{%dx, $dst|$dst, dx}", []>, OpSize16;
  2034. def INSL : I<0x6D, RawFrmDst, (outs), (ins dstidx32:$dst),
  2035. "ins{l|d}\t{%dx, $dst|$dst, dx}", []>, OpSize32;
  2036. }
  2037. }
  2038. // EFLAGS management instructions.
  2039. let SchedRW = [WriteALU], Defs = [EFLAGS], Uses = [EFLAGS] in {
  2040. def CLC : I<0xF8, RawFrm, (outs), (ins), "clc", []>;
  2041. def STC : I<0xF9, RawFrm, (outs), (ins), "stc", []>;
  2042. def CMC : I<0xF5, RawFrm, (outs), (ins), "cmc", []>;
  2043. }
  2044. // DF management instructions.
  2045. let SchedRW = [WriteALU], Defs = [DF] in {
  2046. def CLD : I<0xFC, RawFrm, (outs), (ins), "cld", []>;
  2047. def STD : I<0xFD, RawFrm, (outs), (ins), "std", []>;
  2048. }
  2049. // Table lookup instructions
  2050. let Uses = [AL,EBX], Defs = [AL], hasSideEffects = 0, mayLoad = 1 in
  2051. def XLAT : I<0xD7, RawFrm, (outs), (ins), "xlatb", []>, Sched<[WriteLoad]>;
  2052. let SchedRW = [WriteMicrocoded] in {
  2053. // ASCII Adjust After Addition
  2054. let Uses = [AL,EFLAGS], Defs = [AX,EFLAGS], hasSideEffects = 0 in
  2055. def AAA : I<0x37, RawFrm, (outs), (ins), "aaa", []>,
  2056. Requires<[Not64BitMode]>;
  2057. // ASCII Adjust AX Before Division
  2058. let Uses = [AX], Defs = [AX,EFLAGS], hasSideEffects = 0 in
  2059. def AAD8i8 : Ii8<0xD5, RawFrm, (outs), (ins i8imm:$src),
  2060. "aad\t$src", []>, Requires<[Not64BitMode]>;
  2061. // ASCII Adjust AX After Multiply
  2062. let Uses = [AL], Defs = [AX,EFLAGS], hasSideEffects = 0 in
  2063. def AAM8i8 : Ii8<0xD4, RawFrm, (outs), (ins i8imm:$src),
  2064. "aam\t$src", []>, Requires<[Not64BitMode]>;
  2065. // ASCII Adjust AL After Subtraction - sets
  2066. let Uses = [AL,EFLAGS], Defs = [AX,EFLAGS], hasSideEffects = 0 in
  2067. def AAS : I<0x3F, RawFrm, (outs), (ins), "aas", []>,
  2068. Requires<[Not64BitMode]>;
  2069. // Decimal Adjust AL after Addition
  2070. let Uses = [AL,EFLAGS], Defs = [AL,EFLAGS], hasSideEffects = 0 in
  2071. def DAA : I<0x27, RawFrm, (outs), (ins), "daa", []>,
  2072. Requires<[Not64BitMode]>;
  2073. // Decimal Adjust AL after Subtraction
  2074. let Uses = [AL,EFLAGS], Defs = [AL,EFLAGS], hasSideEffects = 0 in
  2075. def DAS : I<0x2F, RawFrm, (outs), (ins), "das", []>,
  2076. Requires<[Not64BitMode]>;
  2077. } // SchedRW
  2078. let SchedRW = [WriteSystem] in {
  2079. // Check Array Index Against Bounds
  2080. // Note: "bound" does not have reversed operands in at&t syntax.
  2081. def BOUNDS16rm : I<0x62, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  2082. "bound\t$dst, $src", []>, OpSize16,
  2083. Requires<[Not64BitMode]>;
  2084. def BOUNDS32rm : I<0x62, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  2085. "bound\t$dst, $src", []>, OpSize32,
  2086. Requires<[Not64BitMode]>;
  2087. // Adjust RPL Field of Segment Selector
  2088. def ARPL16rr : I<0x63, MRMDestReg, (outs GR16:$dst), (ins GR16:$src),
  2089. "arpl\t{$src, $dst|$dst, $src}", []>,
  2090. Requires<[Not64BitMode]>, NotMemoryFoldable;
  2091. let mayStore = 1 in
  2092. def ARPL16mr : I<0x63, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src),
  2093. "arpl\t{$src, $dst|$dst, $src}", []>,
  2094. Requires<[Not64BitMode]>, NotMemoryFoldable;
  2095. } // SchedRW
  2096. //===----------------------------------------------------------------------===//
  2097. // MOVBE Instructions
  2098. //
  2099. let Predicates = [HasMOVBE] in {
  2100. let SchedRW = [WriteALULd] in {
  2101. def MOVBE16rm : I<0xF0, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  2102. "movbe{w}\t{$src, $dst|$dst, $src}",
  2103. [(set GR16:$dst, (bswap (loadi16 addr:$src)))]>,
  2104. OpSize16, T8PS;
  2105. def MOVBE32rm : I<0xF0, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  2106. "movbe{l}\t{$src, $dst|$dst, $src}",
  2107. [(set GR32:$dst, (bswap (loadi32 addr:$src)))]>,
  2108. OpSize32, T8PS;
  2109. def MOVBE64rm : RI<0xF0, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  2110. "movbe{q}\t{$src, $dst|$dst, $src}",
  2111. [(set GR64:$dst, (bswap (loadi64 addr:$src)))]>,
  2112. T8PS;
  2113. }
  2114. let SchedRW = [WriteStore] in {
  2115. def MOVBE16mr : I<0xF1, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src),
  2116. "movbe{w}\t{$src, $dst|$dst, $src}",
  2117. [(store (bswap GR16:$src), addr:$dst)]>,
  2118. OpSize16, T8PS;
  2119. def MOVBE32mr : I<0xF1, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src),
  2120. "movbe{l}\t{$src, $dst|$dst, $src}",
  2121. [(store (bswap GR32:$src), addr:$dst)]>,
  2122. OpSize32, T8PS;
  2123. def MOVBE64mr : RI<0xF1, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src),
  2124. "movbe{q}\t{$src, $dst|$dst, $src}",
  2125. [(store (bswap GR64:$src), addr:$dst)]>,
  2126. T8PS;
  2127. }
  2128. }
  2129. //===----------------------------------------------------------------------===//
  2130. // RDRAND Instruction
  2131. //
  2132. let Predicates = [HasRDRAND], Defs = [EFLAGS], SchedRW = [WriteSystem] in {
  2133. def RDRAND16r : I<0xC7, MRM6r, (outs GR16:$dst), (ins),
  2134. "rdrand{w}\t$dst", [(set GR16:$dst, EFLAGS, (X86rdrand))]>,
  2135. OpSize16, PS;
  2136. def RDRAND32r : I<0xC7, MRM6r, (outs GR32:$dst), (ins),
  2137. "rdrand{l}\t$dst", [(set GR32:$dst, EFLAGS, (X86rdrand))]>,
  2138. OpSize32, PS;
  2139. def RDRAND64r : RI<0xC7, MRM6r, (outs GR64:$dst), (ins),
  2140. "rdrand{q}\t$dst", [(set GR64:$dst, EFLAGS, (X86rdrand))]>,
  2141. PS;
  2142. }
  2143. //===----------------------------------------------------------------------===//
  2144. // RDSEED Instruction
  2145. //
  2146. let Predicates = [HasRDSEED], Defs = [EFLAGS], SchedRW = [WriteSystem] in {
  2147. def RDSEED16r : I<0xC7, MRM7r, (outs GR16:$dst), (ins), "rdseed{w}\t$dst",
  2148. [(set GR16:$dst, EFLAGS, (X86rdseed))]>, OpSize16, PS;
  2149. def RDSEED32r : I<0xC7, MRM7r, (outs GR32:$dst), (ins), "rdseed{l}\t$dst",
  2150. [(set GR32:$dst, EFLAGS, (X86rdseed))]>, OpSize32, PS;
  2151. def RDSEED64r : RI<0xC7, MRM7r, (outs GR64:$dst), (ins), "rdseed{q}\t$dst",
  2152. [(set GR64:$dst, EFLAGS, (X86rdseed))]>, PS;
  2153. }
  2154. //===----------------------------------------------------------------------===//
  2155. // LZCNT Instruction
  2156. //
  2157. let Predicates = [HasLZCNT], Defs = [EFLAGS] in {
  2158. def LZCNT16rr : I<0xBD, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src),
  2159. "lzcnt{w}\t{$src, $dst|$dst, $src}",
  2160. [(set GR16:$dst, (ctlz GR16:$src)), (implicit EFLAGS)]>,
  2161. XS, OpSize16, Sched<[WriteLZCNT]>;
  2162. def LZCNT16rm : I<0xBD, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  2163. "lzcnt{w}\t{$src, $dst|$dst, $src}",
  2164. [(set GR16:$dst, (ctlz (loadi16 addr:$src))),
  2165. (implicit EFLAGS)]>, XS, OpSize16, Sched<[WriteLZCNTLd]>;
  2166. def LZCNT32rr : I<0xBD, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src),
  2167. "lzcnt{l}\t{$src, $dst|$dst, $src}",
  2168. [(set GR32:$dst, (ctlz GR32:$src)), (implicit EFLAGS)]>,
  2169. XS, OpSize32, Sched<[WriteLZCNT]>;
  2170. def LZCNT32rm : I<0xBD, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  2171. "lzcnt{l}\t{$src, $dst|$dst, $src}",
  2172. [(set GR32:$dst, (ctlz (loadi32 addr:$src))),
  2173. (implicit EFLAGS)]>, XS, OpSize32, Sched<[WriteLZCNTLd]>;
  2174. def LZCNT64rr : RI<0xBD, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src),
  2175. "lzcnt{q}\t{$src, $dst|$dst, $src}",
  2176. [(set GR64:$dst, (ctlz GR64:$src)), (implicit EFLAGS)]>,
  2177. XS, Sched<[WriteLZCNT]>;
  2178. def LZCNT64rm : RI<0xBD, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  2179. "lzcnt{q}\t{$src, $dst|$dst, $src}",
  2180. [(set GR64:$dst, (ctlz (loadi64 addr:$src))),
  2181. (implicit EFLAGS)]>, XS, Sched<[WriteLZCNTLd]>;
  2182. }
  2183. //===----------------------------------------------------------------------===//
  2184. // BMI Instructions
  2185. //
  2186. let Predicates = [HasBMI], Defs = [EFLAGS] in {
  2187. def TZCNT16rr : I<0xBC, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src),
  2188. "tzcnt{w}\t{$src, $dst|$dst, $src}",
  2189. [(set GR16:$dst, (cttz GR16:$src)), (implicit EFLAGS)]>,
  2190. XS, OpSize16, Sched<[WriteTZCNT]>;
  2191. def TZCNT16rm : I<0xBC, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src),
  2192. "tzcnt{w}\t{$src, $dst|$dst, $src}",
  2193. [(set GR16:$dst, (cttz (loadi16 addr:$src))),
  2194. (implicit EFLAGS)]>, XS, OpSize16, Sched<[WriteTZCNTLd]>;
  2195. def TZCNT32rr : I<0xBC, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src),
  2196. "tzcnt{l}\t{$src, $dst|$dst, $src}",
  2197. [(set GR32:$dst, (cttz GR32:$src)), (implicit EFLAGS)]>,
  2198. XS, OpSize32, Sched<[WriteTZCNT]>;
  2199. def TZCNT32rm : I<0xBC, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src),
  2200. "tzcnt{l}\t{$src, $dst|$dst, $src}",
  2201. [(set GR32:$dst, (cttz (loadi32 addr:$src))),
  2202. (implicit EFLAGS)]>, XS, OpSize32, Sched<[WriteTZCNTLd]>;
  2203. def TZCNT64rr : RI<0xBC, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src),
  2204. "tzcnt{q}\t{$src, $dst|$dst, $src}",
  2205. [(set GR64:$dst, (cttz GR64:$src)), (implicit EFLAGS)]>,
  2206. XS, Sched<[WriteTZCNT]>;
  2207. def TZCNT64rm : RI<0xBC, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src),
  2208. "tzcnt{q}\t{$src, $dst|$dst, $src}",
  2209. [(set GR64:$dst, (cttz (loadi64 addr:$src))),
  2210. (implicit EFLAGS)]>, XS, Sched<[WriteTZCNTLd]>;
  2211. }
  2212. multiclass bmi_bls<string mnemonic, Format RegMRM, Format MemMRM,
  2213. RegisterClass RC, X86MemOperand x86memop,
  2214. X86FoldableSchedWrite sched> {
  2215. let hasSideEffects = 0 in {
  2216. def rr : I<0xF3, RegMRM, (outs RC:$dst), (ins RC:$src),
  2217. !strconcat(mnemonic, "\t{$src, $dst|$dst, $src}"), []>,
  2218. T8PS, VEX_4V, Sched<[sched]>;
  2219. let mayLoad = 1 in
  2220. def rm : I<0xF3, MemMRM, (outs RC:$dst), (ins x86memop:$src),
  2221. !strconcat(mnemonic, "\t{$src, $dst|$dst, $src}"), []>,
  2222. T8PS, VEX_4V, Sched<[sched.Folded]>;
  2223. }
  2224. }
  2225. let Predicates = [HasBMI], Defs = [EFLAGS] in {
  2226. defm BLSR32 : bmi_bls<"blsr{l}", MRM1r, MRM1m, GR32, i32mem, WriteBLS>;
  2227. defm BLSR64 : bmi_bls<"blsr{q}", MRM1r, MRM1m, GR64, i64mem, WriteBLS>, VEX_W;
  2228. defm BLSMSK32 : bmi_bls<"blsmsk{l}", MRM2r, MRM2m, GR32, i32mem, WriteBLS>;
  2229. defm BLSMSK64 : bmi_bls<"blsmsk{q}", MRM2r, MRM2m, GR64, i64mem, WriteBLS>, VEX_W;
  2230. defm BLSI32 : bmi_bls<"blsi{l}", MRM3r, MRM3m, GR32, i32mem, WriteBLS>;
  2231. defm BLSI64 : bmi_bls<"blsi{q}", MRM3r, MRM3m, GR64, i64mem, WriteBLS>, VEX_W;
  2232. }
  2233. //===----------------------------------------------------------------------===//
  2234. // Pattern fragments to auto generate BMI instructions.
  2235. //===----------------------------------------------------------------------===//
  2236. def or_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs),
  2237. (X86or_flag node:$lhs, node:$rhs), [{
  2238. return hasNoCarryFlagUses(SDValue(N, 1));
  2239. }]>;
  2240. def xor_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs),
  2241. (X86xor_flag node:$lhs, node:$rhs), [{
  2242. return hasNoCarryFlagUses(SDValue(N, 1));
  2243. }]>;
  2244. def and_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs),
  2245. (X86and_flag node:$lhs, node:$rhs), [{
  2246. return hasNoCarryFlagUses(SDValue(N, 1));
  2247. }]>;
  2248. let Predicates = [HasBMI] in {
  2249. // FIXME: patterns for the load versions are not implemented
  2250. def : Pat<(and GR32:$src, (add GR32:$src, -1)),
  2251. (BLSR32rr GR32:$src)>;
  2252. def : Pat<(and GR64:$src, (add GR64:$src, -1)),
  2253. (BLSR64rr GR64:$src)>;
  2254. def : Pat<(xor GR32:$src, (add GR32:$src, -1)),
  2255. (BLSMSK32rr GR32:$src)>;
  2256. def : Pat<(xor GR64:$src, (add GR64:$src, -1)),
  2257. (BLSMSK64rr GR64:$src)>;
  2258. def : Pat<(and GR32:$src, (ineg GR32:$src)),
  2259. (BLSI32rr GR32:$src)>;
  2260. def : Pat<(and GR64:$src, (ineg GR64:$src)),
  2261. (BLSI64rr GR64:$src)>;
  2262. // Versions to match flag producing ops.
  2263. def : Pat<(and_flag_nocf GR32:$src, (add GR32:$src, -1)),
  2264. (BLSR32rr GR32:$src)>;
  2265. def : Pat<(and_flag_nocf GR64:$src, (add GR64:$src, -1)),
  2266. (BLSR64rr GR64:$src)>;
  2267. def : Pat<(xor_flag_nocf GR32:$src, (add GR32:$src, -1)),
  2268. (BLSMSK32rr GR32:$src)>;
  2269. def : Pat<(xor_flag_nocf GR64:$src, (add GR64:$src, -1)),
  2270. (BLSMSK64rr GR64:$src)>;
  2271. def : Pat<(and_flag_nocf GR32:$src, (ineg GR32:$src)),
  2272. (BLSI32rr GR32:$src)>;
  2273. def : Pat<(and_flag_nocf GR64:$src, (ineg GR64:$src)),
  2274. (BLSI64rr GR64:$src)>;
  2275. }
  2276. multiclass bmi_bextr<bits<8> opc, string mnemonic, RegisterClass RC,
  2277. X86MemOperand x86memop, SDNode OpNode,
  2278. PatFrag ld_frag, X86FoldableSchedWrite Sched> {
  2279. def rr : I<opc, MRMSrcReg4VOp3, (outs RC:$dst), (ins RC:$src1, RC:$src2),
  2280. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2281. [(set RC:$dst, (OpNode RC:$src1, RC:$src2)), (implicit EFLAGS)]>,
  2282. T8PS, VEX, Sched<[Sched]>;
  2283. def rm : I<opc, MRMSrcMem4VOp3, (outs RC:$dst), (ins x86memop:$src1, RC:$src2),
  2284. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2285. [(set RC:$dst, (OpNode (ld_frag addr:$src1), RC:$src2)),
  2286. (implicit EFLAGS)]>, T8PS, VEX,
  2287. Sched<[Sched.Folded,
  2288. // x86memop:$src1
  2289. ReadDefault, ReadDefault, ReadDefault, ReadDefault,
  2290. ReadDefault,
  2291. // RC:$src2
  2292. Sched.ReadAfterFold]>;
  2293. }
  2294. let Predicates = [HasBMI], Defs = [EFLAGS] in {
  2295. defm BEXTR32 : bmi_bextr<0xF7, "bextr{l}", GR32, i32mem,
  2296. X86bextr, loadi32, WriteBEXTR>;
  2297. defm BEXTR64 : bmi_bextr<0xF7, "bextr{q}", GR64, i64mem,
  2298. X86bextr, loadi64, WriteBEXTR>, VEX_W;
  2299. }
  2300. multiclass bmi_bzhi<bits<8> opc, string mnemonic, RegisterClass RC,
  2301. X86MemOperand x86memop, SDNode Int,
  2302. PatFrag ld_frag, X86FoldableSchedWrite Sched> {
  2303. def rr : I<opc, MRMSrcReg4VOp3, (outs RC:$dst), (ins RC:$src1, RC:$src2),
  2304. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2305. [(set RC:$dst, (Int RC:$src1, RC:$src2)), (implicit EFLAGS)]>,
  2306. T8PS, VEX, Sched<[Sched]>;
  2307. def rm : I<opc, MRMSrcMem4VOp3, (outs RC:$dst), (ins x86memop:$src1, RC:$src2),
  2308. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2309. [(set RC:$dst, (Int (ld_frag addr:$src1), RC:$src2)),
  2310. (implicit EFLAGS)]>, T8PS, VEX,
  2311. Sched<[Sched.Folded,
  2312. // x86memop:$src1
  2313. ReadDefault, ReadDefault, ReadDefault, ReadDefault,
  2314. ReadDefault,
  2315. // RC:$src2
  2316. Sched.ReadAfterFold]>;
  2317. }
  2318. let Predicates = [HasBMI2], Defs = [EFLAGS] in {
  2319. defm BZHI32 : bmi_bzhi<0xF5, "bzhi{l}", GR32, i32mem,
  2320. X86bzhi, loadi32, WriteBZHI>;
  2321. defm BZHI64 : bmi_bzhi<0xF5, "bzhi{q}", GR64, i64mem,
  2322. X86bzhi, loadi64, WriteBZHI>, VEX_W;
  2323. }
  2324. def CountTrailingOnes : SDNodeXForm<imm, [{
  2325. // Count the trailing ones in the immediate.
  2326. return getI8Imm(countTrailingOnes(N->getZExtValue()), SDLoc(N));
  2327. }]>;
  2328. def BEXTRMaskXForm : SDNodeXForm<imm, [{
  2329. unsigned Length = countTrailingOnes(N->getZExtValue());
  2330. return getI32Imm(Length << 8, SDLoc(N));
  2331. }]>;
  2332. def AndMask64 : ImmLeaf<i64, [{
  2333. return isMask_64(Imm) && !isUInt<32>(Imm);
  2334. }]>;
  2335. // Use BEXTR for 64-bit 'and' with large immediate 'mask'.
  2336. let Predicates = [HasBMI, NoBMI2, NoTBM] in {
  2337. def : Pat<(and GR64:$src, AndMask64:$mask),
  2338. (BEXTR64rr GR64:$src,
  2339. (SUBREG_TO_REG (i64 0),
  2340. (MOV32ri (BEXTRMaskXForm imm:$mask)), sub_32bit))>;
  2341. def : Pat<(and (loadi64 addr:$src), AndMask64:$mask),
  2342. (BEXTR64rm addr:$src,
  2343. (SUBREG_TO_REG (i64 0),
  2344. (MOV32ri (BEXTRMaskXForm imm:$mask)), sub_32bit))>;
  2345. }
  2346. // Use BZHI for 64-bit 'and' with large immediate 'mask'.
  2347. let Predicates = [HasBMI2, NoTBM] in {
  2348. def : Pat<(and GR64:$src, AndMask64:$mask),
  2349. (BZHI64rr GR64:$src,
  2350. (INSERT_SUBREG (i64 (IMPLICIT_DEF)),
  2351. (MOV8ri (CountTrailingOnes imm:$mask)), sub_8bit))>;
  2352. def : Pat<(and (loadi64 addr:$src), AndMask64:$mask),
  2353. (BZHI64rm addr:$src,
  2354. (INSERT_SUBREG (i64 (IMPLICIT_DEF)),
  2355. (MOV8ri (CountTrailingOnes imm:$mask)), sub_8bit))>;
  2356. }
  2357. multiclass bmi_pdep_pext<string mnemonic, RegisterClass RC,
  2358. X86MemOperand x86memop, SDNode OpNode,
  2359. PatFrag ld_frag> {
  2360. def rr : I<0xF5, MRMSrcReg, (outs RC:$dst), (ins RC:$src1, RC:$src2),
  2361. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2362. [(set RC:$dst, (OpNode RC:$src1, RC:$src2))]>,
  2363. VEX_4V, Sched<[WriteALU]>;
  2364. def rm : I<0xF5, MRMSrcMem, (outs RC:$dst), (ins RC:$src1, x86memop:$src2),
  2365. !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"),
  2366. [(set RC:$dst, (OpNode RC:$src1, (ld_frag addr:$src2)))]>,
  2367. VEX_4V, Sched<[WriteALU.Folded, WriteALU.ReadAfterFold]>;
  2368. }
  2369. let Predicates = [HasBMI2] in {
  2370. defm PDEP32 : bmi_pdep_pext<"pdep{l}", GR32, i32mem,
  2371. X86pdep, loadi32>, T8XD;
  2372. defm PDEP64 : bmi_pdep_pext<"pdep{q}", GR64, i64mem,
  2373. X86pdep, loadi64>, T8XD, VEX_W;
  2374. defm PEXT32 : bmi_pdep_pext<"pext{l}", GR32, i32mem,
  2375. X86pext, loadi32>, T8XS;
  2376. defm PEXT64 : bmi_pdep_pext<"pext{q}", GR64, i64mem,
  2377. X86pext, loadi64>, T8XS, VEX_W;
  2378. }
  2379. //===----------------------------------------------------------------------===//
  2380. // TBM Instructions
  2381. //
  2382. let Predicates = [HasTBM], Defs = [EFLAGS] in {
  2383. multiclass tbm_bextri<bits<8> opc, RegisterClass RC, string OpcodeStr,
  2384. X86MemOperand x86memop, PatFrag ld_frag,
  2385. SDNode OpNode, Operand immtype,
  2386. SDPatternOperator immoperator,
  2387. X86FoldableSchedWrite Sched> {
  2388. def ri : Ii32<opc, MRMSrcReg, (outs RC:$dst), (ins RC:$src1, immtype:$cntl),
  2389. !strconcat(OpcodeStr,
  2390. "\t{$cntl, $src1, $dst|$dst, $src1, $cntl}"),
  2391. [(set RC:$dst, (OpNode RC:$src1, immoperator:$cntl))]>,
  2392. XOP, XOPA, Sched<[Sched]>;
  2393. def mi : Ii32<opc, MRMSrcMem, (outs RC:$dst),
  2394. (ins x86memop:$src1, immtype:$cntl),
  2395. !strconcat(OpcodeStr,
  2396. "\t{$cntl, $src1, $dst|$dst, $src1, $cntl}"),
  2397. [(set RC:$dst, (OpNode (ld_frag addr:$src1), immoperator:$cntl))]>,
  2398. XOP, XOPA, Sched<[Sched.Folded]>;
  2399. }
  2400. defm BEXTRI32 : tbm_bextri<0x10, GR32, "bextr{l}", i32mem, loadi32,
  2401. X86bextri, i32imm, timm, WriteBEXTR>;
  2402. let ImmT = Imm32S in
  2403. defm BEXTRI64 : tbm_bextri<0x10, GR64, "bextr{q}", i64mem, loadi64,
  2404. X86bextri, i64i32imm,
  2405. i64timmSExt32, WriteBEXTR>, VEX_W;
  2406. multiclass tbm_binary_rm<bits<8> opc, Format FormReg, Format FormMem,
  2407. RegisterClass RC, string OpcodeStr,
  2408. X86MemOperand x86memop, X86FoldableSchedWrite Sched> {
  2409. let hasSideEffects = 0 in {
  2410. def rr : I<opc, FormReg, (outs RC:$dst), (ins RC:$src),
  2411. !strconcat(OpcodeStr,"\t{$src, $dst|$dst, $src}"), []>,
  2412. XOP_4V, XOP9, Sched<[Sched]>;
  2413. let mayLoad = 1 in
  2414. def rm : I<opc, FormMem, (outs RC:$dst), (ins x86memop:$src),
  2415. !strconcat(OpcodeStr,"\t{$src, $dst|$dst, $src}"), []>,
  2416. XOP_4V, XOP9, Sched<[Sched.Folded]>;
  2417. }
  2418. }
  2419. multiclass tbm_binary_intr<bits<8> opc, string OpcodeStr,
  2420. X86FoldableSchedWrite Sched,
  2421. Format FormReg, Format FormMem> {
  2422. defm NAME#32 : tbm_binary_rm<opc, FormReg, FormMem, GR32, OpcodeStr#"{l}",
  2423. i32mem, Sched>;
  2424. defm NAME#64 : tbm_binary_rm<opc, FormReg, FormMem, GR64, OpcodeStr#"{q}",
  2425. i64mem, Sched>, VEX_W;
  2426. }
  2427. defm BLCFILL : tbm_binary_intr<0x01, "blcfill", WriteALU, MRM1r, MRM1m>;
  2428. defm BLCI : tbm_binary_intr<0x02, "blci", WriteALU, MRM6r, MRM6m>;
  2429. defm BLCIC : tbm_binary_intr<0x01, "blcic", WriteALU, MRM5r, MRM5m>;
  2430. defm BLCMSK : tbm_binary_intr<0x02, "blcmsk", WriteALU, MRM1r, MRM1m>;
  2431. defm BLCS : tbm_binary_intr<0x01, "blcs", WriteALU, MRM3r, MRM3m>;
  2432. defm BLSFILL : tbm_binary_intr<0x01, "blsfill", WriteALU, MRM2r, MRM2m>;
  2433. defm BLSIC : tbm_binary_intr<0x01, "blsic", WriteALU, MRM6r, MRM6m>;
  2434. defm T1MSKC : tbm_binary_intr<0x01, "t1mskc", WriteALU, MRM7r, MRM7m>;
  2435. defm TZMSK : tbm_binary_intr<0x01, "tzmsk", WriteALU, MRM4r, MRM4m>;
  2436. } // HasTBM, EFLAGS
  2437. // Use BEXTRI for 64-bit 'and' with large immediate 'mask'.
  2438. let Predicates = [HasTBM] in {
  2439. def : Pat<(and GR64:$src, AndMask64:$mask),
  2440. (BEXTRI64ri GR64:$src, (BEXTRMaskXForm imm:$mask))>;
  2441. def : Pat<(and (loadi64 addr:$src), AndMask64:$mask),
  2442. (BEXTRI64mi addr:$src, (BEXTRMaskXForm imm:$mask))>;
  2443. }
  2444. //===----------------------------------------------------------------------===//
  2445. // Lightweight Profiling Instructions
  2446. let Predicates = [HasLWP], SchedRW = [WriteSystem] in {
  2447. def LLWPCB : I<0x12, MRM0r, (outs), (ins GR32:$src), "llwpcb\t$src",
  2448. [(int_x86_llwpcb GR32:$src)]>, XOP, XOP9;
  2449. def SLWPCB : I<0x12, MRM1r, (outs GR32:$dst), (ins), "slwpcb\t$dst",
  2450. [(set GR32:$dst, (int_x86_slwpcb))]>, XOP, XOP9;
  2451. def LLWPCB64 : I<0x12, MRM0r, (outs), (ins GR64:$src), "llwpcb\t$src",
  2452. [(int_x86_llwpcb GR64:$src)]>, XOP, XOP9, VEX_W;
  2453. def SLWPCB64 : I<0x12, MRM1r, (outs GR64:$dst), (ins), "slwpcb\t$dst",
  2454. [(set GR64:$dst, (int_x86_slwpcb))]>, XOP, XOP9, VEX_W;
  2455. multiclass lwpins_intr<RegisterClass RC> {
  2456. def rri : Ii32<0x12, MRM0r, (outs), (ins RC:$src0, GR32:$src1, i32imm:$cntl),
  2457. "lwpins\t{$cntl, $src1, $src0|$src0, $src1, $cntl}",
  2458. [(set EFLAGS, (X86lwpins RC:$src0, GR32:$src1, timm:$cntl))]>,
  2459. XOP_4V, XOPA;
  2460. let mayLoad = 1 in
  2461. def rmi : Ii32<0x12, MRM0m, (outs), (ins RC:$src0, i32mem:$src1, i32imm:$cntl),
  2462. "lwpins\t{$cntl, $src1, $src0|$src0, $src1, $cntl}",
  2463. [(set EFLAGS, (X86lwpins RC:$src0, (loadi32 addr:$src1), timm:$cntl))]>,
  2464. XOP_4V, XOPA;
  2465. }
  2466. let Defs = [EFLAGS] in {
  2467. defm LWPINS32 : lwpins_intr<GR32>;
  2468. defm LWPINS64 : lwpins_intr<GR64>, VEX_W;
  2469. } // EFLAGS
  2470. multiclass lwpval_intr<RegisterClass RC, Intrinsic Int> {
  2471. def rri : Ii32<0x12, MRM1r, (outs), (ins RC:$src0, GR32:$src1, i32imm:$cntl),
  2472. "lwpval\t{$cntl, $src1, $src0|$src0, $src1, $cntl}",
  2473. [(Int RC:$src0, GR32:$src1, timm:$cntl)]>, XOP_4V, XOPA;
  2474. let mayLoad = 1 in
  2475. def rmi : Ii32<0x12, MRM1m, (outs), (ins RC:$src0, i32mem:$src1, i32imm:$cntl),
  2476. "lwpval\t{$cntl, $src1, $src0|$src0, $src1, $cntl}",
  2477. [(Int RC:$src0, (loadi32 addr:$src1), timm:$cntl)]>,
  2478. XOP_4V, XOPA;
  2479. }
  2480. defm LWPVAL32 : lwpval_intr<GR32, int_x86_lwpval32>;
  2481. defm LWPVAL64 : lwpval_intr<GR64, int_x86_lwpval64>, VEX_W;
  2482. } // HasLWP, SchedRW
  2483. //===----------------------------------------------------------------------===//
  2484. // MONITORX/MWAITX Instructions
  2485. //
  2486. let SchedRW = [ WriteSystem ] in {
  2487. let Uses = [ EAX, ECX, EDX ] in
  2488. def MONITORX32rrr : I<0x01, MRM_FA, (outs), (ins), "monitorx", []>,
  2489. TB, Requires<[ HasMWAITX, Not64BitMode ]>;
  2490. let Uses = [ RAX, ECX, EDX ] in
  2491. def MONITORX64rrr : I<0x01, MRM_FA, (outs), (ins), "monitorx", []>,
  2492. TB, Requires<[ HasMWAITX, In64BitMode ]>;
  2493. let Uses = [ ECX, EAX, EBX ] in {
  2494. def MWAITXrrr : I<0x01, MRM_FB, (outs), (ins), "mwaitx",
  2495. []>, TB, Requires<[ HasMWAITX ]>;
  2496. }
  2497. } // SchedRW
  2498. def : InstAlias<"mwaitx\t{%eax, %ecx, %ebx|ebx, ecx, eax}", (MWAITXrrr)>,
  2499. Requires<[ Not64BitMode ]>;
  2500. def : InstAlias<"mwaitx\t{%rax, %rcx, %rbx|rbx, rcx, rax}", (MWAITXrrr)>,
  2501. Requires<[ In64BitMode ]>;
  2502. def : InstAlias<"monitorx\t{%eax, %ecx, %edx|edx, ecx, eax}", (MONITORX32rrr)>,
  2503. Requires<[ Not64BitMode ]>;
  2504. def : InstAlias<"monitorx\t{%rax, %rcx, %rdx|rdx, rcx, rax}", (MONITORX64rrr)>,
  2505. Requires<[ In64BitMode ]>;
  2506. //===----------------------------------------------------------------------===//
  2507. // WAITPKG Instructions
  2508. //
  2509. let SchedRW = [WriteSystem] in {
  2510. def UMONITOR16 : I<0xAE, MRM6r, (outs), (ins GR16:$src),
  2511. "umonitor\t$src", [(int_x86_umonitor GR16:$src)]>,
  2512. XS, AdSize16, Requires<[HasWAITPKG, Not64BitMode]>;
  2513. def UMONITOR32 : I<0xAE, MRM6r, (outs), (ins GR32:$src),
  2514. "umonitor\t$src", [(int_x86_umonitor GR32:$src)]>,
  2515. XS, AdSize32, Requires<[HasWAITPKG]>;
  2516. def UMONITOR64 : I<0xAE, MRM6r, (outs), (ins GR64:$src),
  2517. "umonitor\t$src", [(int_x86_umonitor GR64:$src)]>,
  2518. XS, AdSize64, Requires<[HasWAITPKG, In64BitMode]>;
  2519. let Uses = [EAX, EDX], Defs = [EFLAGS] in {
  2520. def UMWAIT : I<0xAE, MRM6r,
  2521. (outs), (ins GR32orGR64:$src), "umwait\t$src",
  2522. [(set EFLAGS, (X86umwait GR32orGR64:$src, EDX, EAX))]>,
  2523. XD, Requires<[HasWAITPKG]>;
  2524. def TPAUSE : I<0xAE, MRM6r,
  2525. (outs), (ins GR32orGR64:$src), "tpause\t$src",
  2526. [(set EFLAGS, (X86tpause GR32orGR64:$src, EDX, EAX))]>,
  2527. PD, Requires<[HasWAITPKG]>, NotMemoryFoldable;
  2528. }
  2529. } // SchedRW
  2530. //===----------------------------------------------------------------------===//
  2531. // MOVDIRI - Move doubleword/quadword as direct store
  2532. //
  2533. let SchedRW = [WriteStore] in {
  2534. def MOVDIRI32 : I<0xF9, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src),
  2535. "movdiri\t{$src, $dst|$dst, $src}",
  2536. [(int_x86_directstore32 addr:$dst, GR32:$src)]>,
  2537. T8PS, Requires<[HasMOVDIRI]>;
  2538. def MOVDIRI64 : RI<0xF9, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src),
  2539. "movdiri\t{$src, $dst|$dst, $src}",
  2540. [(int_x86_directstore64 addr:$dst, GR64:$src)]>,
  2541. T8PS, Requires<[In64BitMode, HasMOVDIRI]>;
  2542. } // SchedRW
  2543. //===----------------------------------------------------------------------===//
  2544. // MOVDIR64B - Move 64 bytes as direct store
  2545. //
  2546. let SchedRW = [WriteStore] in {
  2547. def MOVDIR64B16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src),
  2548. "movdir64b\t{$src, $dst|$dst, $src}", []>,
  2549. T8PD, AdSize16, Requires<[HasMOVDIR64B, Not64BitMode]>;
  2550. def MOVDIR64B32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src),
  2551. "movdir64b\t{$src, $dst|$dst, $src}",
  2552. [(int_x86_movdir64b GR32:$dst, addr:$src)]>,
  2553. T8PD, AdSize32, Requires<[HasMOVDIR64B]>;
  2554. def MOVDIR64B64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src),
  2555. "movdir64b\t{$src, $dst|$dst, $src}",
  2556. [(int_x86_movdir64b GR64:$dst, addr:$src)]>,
  2557. T8PD, AdSize64, Requires<[HasMOVDIR64B, In64BitMode]>;
  2558. } // SchedRW
  2559. //===----------------------------------------------------------------------===//
  2560. // ENQCMD/S - Enqueue 64-byte command as user with 64-byte write atomicity
  2561. //
  2562. let SchedRW = [WriteStore], Defs = [EFLAGS] in {
  2563. def ENQCMD16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src),
  2564. "enqcmd\t{$src, $dst|$dst, $src}",
  2565. [(set EFLAGS, (X86enqcmd GR16:$dst, addr:$src))]>,
  2566. T8XD, AdSize16, Requires<[HasENQCMD, Not64BitMode]>;
  2567. def ENQCMD32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src),
  2568. "enqcmd\t{$src, $dst|$dst, $src}",
  2569. [(set EFLAGS, (X86enqcmd GR32:$dst, addr:$src))]>,
  2570. T8XD, AdSize32, Requires<[HasENQCMD]>;
  2571. def ENQCMD64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src),
  2572. "enqcmd\t{$src, $dst|$dst, $src}",
  2573. [(set EFLAGS, (X86enqcmd GR64:$dst, addr:$src))]>,
  2574. T8XD, AdSize64, Requires<[HasENQCMD, In64BitMode]>;
  2575. def ENQCMDS16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src),
  2576. "enqcmds\t{$src, $dst|$dst, $src}",
  2577. [(set EFLAGS, (X86enqcmds GR16:$dst, addr:$src))]>,
  2578. T8XS, AdSize16, Requires<[HasENQCMD, Not64BitMode]>;
  2579. def ENQCMDS32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src),
  2580. "enqcmds\t{$src, $dst|$dst, $src}",
  2581. [(set EFLAGS, (X86enqcmds GR32:$dst, addr:$src))]>,
  2582. T8XS, AdSize32, Requires<[HasENQCMD]>;
  2583. def ENQCMDS64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src),
  2584. "enqcmds\t{$src, $dst|$dst, $src}",
  2585. [(set EFLAGS, (X86enqcmds GR64:$dst, addr:$src))]>,
  2586. T8XS, AdSize64, Requires<[HasENQCMD, In64BitMode]>;
  2587. }
  2588. //===----------------------------------------------------------------------===//
  2589. // CLZERO Instruction
  2590. //
  2591. let SchedRW = [WriteLoad] in {
  2592. let Uses = [EAX] in
  2593. def CLZERO32r : I<0x01, MRM_FC, (outs), (ins), "clzero", []>,
  2594. TB, Requires<[HasCLZERO, Not64BitMode]>;
  2595. let Uses = [RAX] in
  2596. def CLZERO64r : I<0x01, MRM_FC, (outs), (ins), "clzero", []>,
  2597. TB, Requires<[HasCLZERO, In64BitMode]>;
  2598. } // SchedRW
  2599. def : InstAlias<"clzero\t{%eax|eax}", (CLZERO32r)>, Requires<[Not64BitMode]>;
  2600. def : InstAlias<"clzero\t{%rax|rax}", (CLZERO64r)>, Requires<[In64BitMode]>;
  2601. //===----------------------------------------------------------------------===//
  2602. // INVLPGB Instruction
  2603. // OPCODE 0F 01 FE
  2604. //
  2605. let SchedRW = [WriteSystem] in {
  2606. let Uses = [EAX, EDX] in
  2607. def INVLPGB32 : I<0x01, MRM_FE, (outs), (ins),
  2608. "invlpgb}", []>,
  2609. PS, Requires<[Not64BitMode]>;
  2610. let Uses = [RAX, EDX] in
  2611. def INVLPGB64 : I<0x01, MRM_FE, (outs), (ins),
  2612. "invlpgb", []>,
  2613. PS, Requires<[In64BitMode]>;
  2614. } // SchedRW
  2615. def : InstAlias<"invlpgb\t{%eax, %edx|eax, edx}", (INVLPGB32)>, Requires<[Not64BitMode]>;
  2616. def : InstAlias<"invlpgb\t{%rax, %edx|rax, edx}", (INVLPGB64)>, Requires<[In64BitMode]>;
  2617. //===----------------------------------------------------------------------===//
  2618. // TLBSYNC Instruction
  2619. // OPCODE 0F 01 FF
  2620. //
  2621. let SchedRW = [WriteSystem] in {
  2622. def TLBSYNC : I<0x01, MRM_FF, (outs), (ins),
  2623. "tlbsync", []>,
  2624. PS, Requires<[]>;
  2625. } // SchedRW
  2626. //===----------------------------------------------------------------------===//
  2627. // HRESET Instruction
  2628. //
  2629. let Uses = [EAX], SchedRW = [WriteSystem] in
  2630. def HRESET : Ii8<0xF0, MRM_C0, (outs), (ins i32u8imm:$imm), "hreset\t$imm", []>,
  2631. Requires<[HasHRESET]>, TAXS;
  2632. //===----------------------------------------------------------------------===//
  2633. // SERIALIZE Instruction
  2634. //
  2635. let SchedRW = [WriteSystem] in
  2636. def SERIALIZE : I<0x01, MRM_E8, (outs), (ins), "serialize",
  2637. [(int_x86_serialize)]>, PS,
  2638. Requires<[HasSERIALIZE]>;
  2639. //===----------------------------------------------------------------------===//
  2640. // TSXLDTRK - TSX Suspend Load Address Tracking
  2641. //
  2642. let Predicates = [HasTSXLDTRK], SchedRW = [WriteSystem] in {
  2643. def XSUSLDTRK : I<0x01, MRM_E8, (outs), (ins), "xsusldtrk",
  2644. [(int_x86_xsusldtrk)]>, XD;
  2645. def XRESLDTRK : I<0x01, MRM_E9, (outs), (ins), "xresldtrk",
  2646. [(int_x86_xresldtrk)]>, XD;
  2647. }
  2648. //===----------------------------------------------------------------------===//
  2649. // UINTR Instructions
  2650. //
  2651. let Predicates = [HasUINTR, In64BitMode], SchedRW = [WriteSystem] in {
  2652. def UIRET : I<0x01, MRM_EC, (outs), (ins), "uiret",
  2653. []>, XS;
  2654. def CLUI : I<0x01, MRM_EE, (outs), (ins), "clui",
  2655. [(int_x86_clui)]>, XS;
  2656. def STUI : I<0x01, MRM_EF, (outs), (ins), "stui",
  2657. [(int_x86_stui)]>, XS;
  2658. def SENDUIPI : I<0xC7, MRM6r, (outs), (ins GR64:$arg), "senduipi\t$arg",
  2659. [(int_x86_senduipi GR64:$arg)]>, XS;
  2660. let Defs = [EFLAGS] in
  2661. def TESTUI : I<0x01, MRM_ED, (outs), (ins), "testui",
  2662. [(set EFLAGS, (X86testui))]>, XS;
  2663. }
  2664. //===----------------------------------------------------------------------===//
  2665. // Pattern fragments to auto generate TBM instructions.
  2666. //===----------------------------------------------------------------------===//
  2667. let Predicates = [HasTBM] in {
  2668. // FIXME: patterns for the load versions are not implemented
  2669. def : Pat<(and GR32:$src, (add GR32:$src, 1)),
  2670. (BLCFILL32rr GR32:$src)>;
  2671. def : Pat<(and GR64:$src, (add GR64:$src, 1)),
  2672. (BLCFILL64rr GR64:$src)>;
  2673. def : Pat<(or GR32:$src, (not (add GR32:$src, 1))),
  2674. (BLCI32rr GR32:$src)>;
  2675. def : Pat<(or GR64:$src, (not (add GR64:$src, 1))),
  2676. (BLCI64rr GR64:$src)>;
  2677. // Extra patterns because opt can optimize the above patterns to this.
  2678. def : Pat<(or GR32:$src, (sub -2, GR32:$src)),
  2679. (BLCI32rr GR32:$src)>;
  2680. def : Pat<(or GR64:$src, (sub -2, GR64:$src)),
  2681. (BLCI64rr GR64:$src)>;
  2682. def : Pat<(and (not GR32:$src), (add GR32:$src, 1)),
  2683. (BLCIC32rr GR32:$src)>;
  2684. def : Pat<(and (not GR64:$src), (add GR64:$src, 1)),
  2685. (BLCIC64rr GR64:$src)>;
  2686. def : Pat<(xor GR32:$src, (add GR32:$src, 1)),
  2687. (BLCMSK32rr GR32:$src)>;
  2688. def : Pat<(xor GR64:$src, (add GR64:$src, 1)),
  2689. (BLCMSK64rr GR64:$src)>;
  2690. def : Pat<(or GR32:$src, (add GR32:$src, 1)),
  2691. (BLCS32rr GR32:$src)>;
  2692. def : Pat<(or GR64:$src, (add GR64:$src, 1)),
  2693. (BLCS64rr GR64:$src)>;
  2694. def : Pat<(or GR32:$src, (add GR32:$src, -1)),
  2695. (BLSFILL32rr GR32:$src)>;
  2696. def : Pat<(or GR64:$src, (add GR64:$src, -1)),
  2697. (BLSFILL64rr GR64:$src)>;
  2698. def : Pat<(or (not GR32:$src), (add GR32:$src, -1)),
  2699. (BLSIC32rr GR32:$src)>;
  2700. def : Pat<(or (not GR64:$src), (add GR64:$src, -1)),
  2701. (BLSIC64rr GR64:$src)>;
  2702. def : Pat<(or (not GR32:$src), (add GR32:$src, 1)),
  2703. (T1MSKC32rr GR32:$src)>;
  2704. def : Pat<(or (not GR64:$src), (add GR64:$src, 1)),
  2705. (T1MSKC64rr GR64:$src)>;
  2706. def : Pat<(and (not GR32:$src), (add GR32:$src, -1)),
  2707. (TZMSK32rr GR32:$src)>;
  2708. def : Pat<(and (not GR64:$src), (add GR64:$src, -1)),
  2709. (TZMSK64rr GR64:$src)>;
  2710. // Patterns to match flag producing ops.
  2711. def : Pat<(and_flag_nocf GR32:$src, (add GR32:$src, 1)),
  2712. (BLCFILL32rr GR32:$src)>;
  2713. def : Pat<(and_flag_nocf GR64:$src, (add GR64:$src, 1)),
  2714. (BLCFILL64rr GR64:$src)>;
  2715. def : Pat<(or_flag_nocf GR32:$src, (not (add GR32:$src, 1))),
  2716. (BLCI32rr GR32:$src)>;
  2717. def : Pat<(or_flag_nocf GR64:$src, (not (add GR64:$src, 1))),
  2718. (BLCI64rr GR64:$src)>;
  2719. // Extra patterns because opt can optimize the above patterns to this.
  2720. def : Pat<(or_flag_nocf GR32:$src, (sub -2, GR32:$src)),
  2721. (BLCI32rr GR32:$src)>;
  2722. def : Pat<(or_flag_nocf GR64:$src, (sub -2, GR64:$src)),
  2723. (BLCI64rr GR64:$src)>;
  2724. def : Pat<(and_flag_nocf (not GR32:$src), (add GR32:$src, 1)),
  2725. (BLCIC32rr GR32:$src)>;
  2726. def : Pat<(and_flag_nocf (not GR64:$src), (add GR64:$src, 1)),
  2727. (BLCIC64rr GR64:$src)>;
  2728. def : Pat<(xor_flag_nocf GR32:$src, (add GR32:$src, 1)),
  2729. (BLCMSK32rr GR32:$src)>;
  2730. def : Pat<(xor_flag_nocf GR64:$src, (add GR64:$src, 1)),
  2731. (BLCMSK64rr GR64:$src)>;
  2732. def : Pat<(or_flag_nocf GR32:$src, (add GR32:$src, 1)),
  2733. (BLCS32rr GR32:$src)>;
  2734. def : Pat<(or_flag_nocf GR64:$src, (add GR64:$src, 1)),
  2735. (BLCS64rr GR64:$src)>;
  2736. def : Pat<(or_flag_nocf GR32:$src, (add GR32:$src, -1)),
  2737. (BLSFILL32rr GR32:$src)>;
  2738. def : Pat<(or_flag_nocf GR64:$src, (add GR64:$src, -1)),
  2739. (BLSFILL64rr GR64:$src)>;
  2740. def : Pat<(or_flag_nocf (not GR32:$src), (add GR32:$src, -1)),
  2741. (BLSIC32rr GR32:$src)>;
  2742. def : Pat<(or_flag_nocf (not GR64:$src), (add GR64:$src, -1)),
  2743. (BLSIC64rr GR64:$src)>;
  2744. def : Pat<(or_flag_nocf (not GR32:$src), (add GR32:$src, 1)),
  2745. (T1MSKC32rr GR32:$src)>;
  2746. def : Pat<(or_flag_nocf (not GR64:$src), (add GR64:$src, 1)),
  2747. (T1MSKC64rr GR64:$src)>;
  2748. def : Pat<(and_flag_nocf (not GR32:$src), (add GR32:$src, -1)),
  2749. (TZMSK32rr GR32:$src)>;
  2750. def : Pat<(and_flag_nocf (not GR64:$src), (add GR64:$src, -1)),
  2751. (TZMSK64rr GR64:$src)>;
  2752. } // HasTBM
  2753. //===----------------------------------------------------------------------===//
  2754. // Memory Instructions
  2755. //
  2756. let Predicates = [HasCLFLUSHOPT], SchedRW = [WriteLoad] in
  2757. def CLFLUSHOPT : I<0xAE, MRM7m, (outs), (ins i8mem:$src),
  2758. "clflushopt\t$src", [(int_x86_clflushopt addr:$src)]>, PD;
  2759. let Predicates = [HasCLWB], SchedRW = [WriteLoad] in
  2760. def CLWB : I<0xAE, MRM6m, (outs), (ins i8mem:$src), "clwb\t$src",
  2761. [(int_x86_clwb addr:$src)]>, PD, NotMemoryFoldable;
  2762. let Predicates = [HasCLDEMOTE], SchedRW = [WriteLoad] in
  2763. def CLDEMOTE : I<0x1C, MRM0m, (outs), (ins i8mem:$src), "cldemote\t$src",
  2764. [(int_x86_cldemote addr:$src)]>, PS;
  2765. //===----------------------------------------------------------------------===//
  2766. // Subsystems.
  2767. //===----------------------------------------------------------------------===//
  2768. include "X86InstrArithmetic.td"
  2769. include "X86InstrCMovSetCC.td"
  2770. include "X86InstrExtension.td"
  2771. include "X86InstrControl.td"
  2772. include "X86InstrShiftRotate.td"
  2773. // X87 Floating Point Stack.
  2774. include "X86InstrFPStack.td"
  2775. // SIMD support (SSE, MMX and AVX)
  2776. include "X86InstrFragmentsSIMD.td"
  2777. // FMA - Fused Multiply-Add support (requires FMA)
  2778. include "X86InstrFMA.td"
  2779. // XOP
  2780. include "X86InstrXOP.td"
  2781. // SSE, MMX and 3DNow! vector support.
  2782. include "X86InstrSSE.td"
  2783. include "X86InstrAVX512.td"
  2784. include "X86InstrMMX.td"
  2785. include "X86Instr3DNow.td"
  2786. include "X86InstrVMX.td"
  2787. include "X86InstrSVM.td"
  2788. include "X86InstrSNP.td"
  2789. include "X86InstrTSX.td"
  2790. include "X86InstrSGX.td"
  2791. include "X86InstrTDX.td"
  2792. // Key Locker instructions
  2793. include "X86InstrKL.td"
  2794. // AMX instructions
  2795. include "X86InstrAMX.td"
  2796. // System instructions.
  2797. include "X86InstrSystem.td"
  2798. // Compiler Pseudo Instructions and Pat Patterns
  2799. include "X86InstrCompiler.td"
  2800. include "X86InstrVecCompiler.td"
  2801. //===----------------------------------------------------------------------===//
  2802. // Assembler Mnemonic Aliases
  2803. //===----------------------------------------------------------------------===//
  2804. def : MnemonicAlias<"call", "callw", "att">, Requires<[In16BitMode]>;
  2805. def : MnemonicAlias<"call", "calll", "att">, Requires<[In32BitMode]>;
  2806. def : MnemonicAlias<"call", "callq", "att">, Requires<[In64BitMode]>;
  2807. def : MnemonicAlias<"cbw", "cbtw", "att">;
  2808. def : MnemonicAlias<"cwde", "cwtl", "att">;
  2809. def : MnemonicAlias<"cwd", "cwtd", "att">;
  2810. def : MnemonicAlias<"cdq", "cltd", "att">;
  2811. def : MnemonicAlias<"cdqe", "cltq", "att">;
  2812. def : MnemonicAlias<"cqo", "cqto", "att">;
  2813. // In 64-bit mode lret maps to lretl; it is not ambiguous with lretq.
  2814. def : MnemonicAlias<"lret", "lretw", "att">, Requires<[In16BitMode]>;
  2815. def : MnemonicAlias<"lret", "lretl", "att">, Requires<[Not16BitMode]>;
  2816. def : MnemonicAlias<"leavel", "leave", "att">, Requires<[Not64BitMode]>;
  2817. def : MnemonicAlias<"leaveq", "leave", "att">, Requires<[In64BitMode]>;
  2818. def : MnemonicAlias<"loopz", "loope">;
  2819. def : MnemonicAlias<"loopnz", "loopne">;
  2820. def : MnemonicAlias<"pop", "popw", "att">, Requires<[In16BitMode]>;
  2821. def : MnemonicAlias<"pop", "popl", "att">, Requires<[In32BitMode]>;
  2822. def : MnemonicAlias<"pop", "popq", "att">, Requires<[In64BitMode]>;
  2823. def : MnemonicAlias<"popf", "popfw", "att">, Requires<[In16BitMode]>;
  2824. def : MnemonicAlias<"popf", "popfl", "att">, Requires<[In32BitMode]>;
  2825. def : MnemonicAlias<"popf", "popfq", "att">, Requires<[In64BitMode]>;
  2826. def : MnemonicAlias<"popf", "popfq", "intel">, Requires<[In64BitMode]>;
  2827. def : MnemonicAlias<"popfd", "popfl", "att">;
  2828. def : MnemonicAlias<"popfw", "popf", "intel">, Requires<[In32BitMode]>;
  2829. def : MnemonicAlias<"popfw", "popf", "intel">, Requires<[In64BitMode]>;
  2830. // FIXME: This is wrong for "push reg". "push %bx" should turn into pushw in
  2831. // all modes. However: "push (addr)" and "push $42" should default to
  2832. // pushl/pushq depending on the current mode. Similar for "pop %bx"
  2833. def : MnemonicAlias<"push", "pushw", "att">, Requires<[In16BitMode]>;
  2834. def : MnemonicAlias<"push", "pushl", "att">, Requires<[In32BitMode]>;
  2835. def : MnemonicAlias<"push", "pushq", "att">, Requires<[In64BitMode]>;
  2836. def : MnemonicAlias<"pushf", "pushfw", "att">, Requires<[In16BitMode]>;
  2837. def : MnemonicAlias<"pushf", "pushfl", "att">, Requires<[In32BitMode]>;
  2838. def : MnemonicAlias<"pushf", "pushfq", "att">, Requires<[In64BitMode]>;
  2839. def : MnemonicAlias<"pushf", "pushfq", "intel">, Requires<[In64BitMode]>;
  2840. def : MnemonicAlias<"pushfd", "pushfl", "att">;
  2841. def : MnemonicAlias<"pushfw", "pushf", "intel">, Requires<[In32BitMode]>;
  2842. def : MnemonicAlias<"pushfw", "pushf", "intel">, Requires<[In64BitMode]>;
  2843. def : MnemonicAlias<"popad", "popal", "intel">, Requires<[Not64BitMode]>;
  2844. def : MnemonicAlias<"pushad", "pushal", "intel">, Requires<[Not64BitMode]>;
  2845. def : MnemonicAlias<"popa", "popaw", "intel">, Requires<[In16BitMode]>;
  2846. def : MnemonicAlias<"pusha", "pushaw", "intel">, Requires<[In16BitMode]>;
  2847. def : MnemonicAlias<"popa", "popal", "intel">, Requires<[In32BitMode]>;
  2848. def : MnemonicAlias<"pusha", "pushal", "intel">, Requires<[In32BitMode]>;
  2849. def : MnemonicAlias<"popa", "popaw", "att">, Requires<[In16BitMode]>;
  2850. def : MnemonicAlias<"pusha", "pushaw", "att">, Requires<[In16BitMode]>;
  2851. def : MnemonicAlias<"popa", "popal", "att">, Requires<[In32BitMode]>;
  2852. def : MnemonicAlias<"pusha", "pushal", "att">, Requires<[In32BitMode]>;
  2853. def : MnemonicAlias<"repe", "rep">;
  2854. def : MnemonicAlias<"repz", "rep">;
  2855. def : MnemonicAlias<"repnz", "repne">;
  2856. def : MnemonicAlias<"ret", "retw", "att">, Requires<[In16BitMode]>;
  2857. def : MnemonicAlias<"ret", "retl", "att">, Requires<[In32BitMode]>;
  2858. def : MnemonicAlias<"ret", "retq", "att">, Requires<[In64BitMode]>;
  2859. // Apply 'ret' behavior to 'retn'
  2860. def : MnemonicAlias<"retn", "retw", "att">, Requires<[In16BitMode]>;
  2861. def : MnemonicAlias<"retn", "retl", "att">, Requires<[In32BitMode]>;
  2862. def : MnemonicAlias<"retn", "retq", "att">, Requires<[In64BitMode]>;
  2863. def : MnemonicAlias<"retn", "ret", "intel">;
  2864. def : MnemonicAlias<"sal", "shl", "intel">;
  2865. def : MnemonicAlias<"salb", "shlb", "att">;
  2866. def : MnemonicAlias<"salw", "shlw", "att">;
  2867. def : MnemonicAlias<"sall", "shll", "att">;
  2868. def : MnemonicAlias<"salq", "shlq", "att">;
  2869. def : MnemonicAlias<"smovb", "movsb", "att">;
  2870. def : MnemonicAlias<"smovw", "movsw", "att">;
  2871. def : MnemonicAlias<"smovl", "movsl", "att">;
  2872. def : MnemonicAlias<"smovq", "movsq", "att">;
  2873. def : MnemonicAlias<"ud2a", "ud2", "att">;
  2874. def : MnemonicAlias<"ud2bw", "ud1w", "att">;
  2875. def : MnemonicAlias<"ud2bl", "ud1l", "att">;
  2876. def : MnemonicAlias<"ud2bq", "ud1q", "att">;
  2877. def : MnemonicAlias<"verrw", "verr", "att">;
  2878. // MS recognizes 'xacquire'/'xrelease' as 'acquire'/'release'
  2879. def : MnemonicAlias<"acquire", "xacquire", "intel">;
  2880. def : MnemonicAlias<"release", "xrelease", "intel">;
  2881. // System instruction aliases.
  2882. def : MnemonicAlias<"iret", "iretw", "att">, Requires<[In16BitMode]>;
  2883. def : MnemonicAlias<"iret", "iretl", "att">, Requires<[Not16BitMode]>;
  2884. def : MnemonicAlias<"sysret", "sysretl", "att">;
  2885. def : MnemonicAlias<"sysexit", "sysexitl", "att">;
  2886. def : MnemonicAlias<"lgdt", "lgdtw", "att">, Requires<[In16BitMode]>;
  2887. def : MnemonicAlias<"lgdt", "lgdtl", "att">, Requires<[In32BitMode]>;
  2888. def : MnemonicAlias<"lgdt", "lgdtq", "att">, Requires<[In64BitMode]>;
  2889. def : MnemonicAlias<"lidt", "lidtw", "att">, Requires<[In16BitMode]>;
  2890. def : MnemonicAlias<"lidt", "lidtl", "att">, Requires<[In32BitMode]>;
  2891. def : MnemonicAlias<"lidt", "lidtq", "att">, Requires<[In64BitMode]>;
  2892. def : MnemonicAlias<"sgdt", "sgdtw", "att">, Requires<[In16BitMode]>;
  2893. def : MnemonicAlias<"sgdt", "sgdtl", "att">, Requires<[In32BitMode]>;
  2894. def : MnemonicAlias<"sgdt", "sgdtq", "att">, Requires<[In64BitMode]>;
  2895. def : MnemonicAlias<"sidt", "sidtw", "att">, Requires<[In16BitMode]>;
  2896. def : MnemonicAlias<"sidt", "sidtl", "att">, Requires<[In32BitMode]>;
  2897. def : MnemonicAlias<"sidt", "sidtq", "att">, Requires<[In64BitMode]>;
  2898. def : MnemonicAlias<"lgdt", "lgdtw", "intel">, Requires<[In16BitMode]>;
  2899. def : MnemonicAlias<"lgdt", "lgdtd", "intel">, Requires<[In32BitMode]>;
  2900. def : MnemonicAlias<"lidt", "lidtw", "intel">, Requires<[In16BitMode]>;
  2901. def : MnemonicAlias<"lidt", "lidtd", "intel">, Requires<[In32BitMode]>;
  2902. def : MnemonicAlias<"sgdt", "sgdtw", "intel">, Requires<[In16BitMode]>;
  2903. def : MnemonicAlias<"sgdt", "sgdtd", "intel">, Requires<[In32BitMode]>;
  2904. def : MnemonicAlias<"sidt", "sidtw", "intel">, Requires<[In16BitMode]>;
  2905. def : MnemonicAlias<"sidt", "sidtd", "intel">, Requires<[In32BitMode]>;
  2906. // Floating point stack aliases.
  2907. def : MnemonicAlias<"fcmovz", "fcmove", "att">;
  2908. def : MnemonicAlias<"fcmova", "fcmovnbe", "att">;
  2909. def : MnemonicAlias<"fcmovnae", "fcmovb", "att">;
  2910. def : MnemonicAlias<"fcmovna", "fcmovbe", "att">;
  2911. def : MnemonicAlias<"fcmovae", "fcmovnb", "att">;
  2912. def : MnemonicAlias<"fcomip", "fcompi">;
  2913. def : MnemonicAlias<"fildq", "fildll", "att">;
  2914. def : MnemonicAlias<"fistpq", "fistpll", "att">;
  2915. def : MnemonicAlias<"fisttpq", "fisttpll", "att">;
  2916. def : MnemonicAlias<"fldcww", "fldcw", "att">;
  2917. def : MnemonicAlias<"fnstcww", "fnstcw", "att">;
  2918. def : MnemonicAlias<"fnstsww", "fnstsw", "att">;
  2919. def : MnemonicAlias<"fucomip", "fucompi">;
  2920. def : MnemonicAlias<"fwait", "wait">;
  2921. def : MnemonicAlias<"fxsaveq", "fxsave64", "att">;
  2922. def : MnemonicAlias<"fxrstorq", "fxrstor64", "att">;
  2923. def : MnemonicAlias<"xsaveq", "xsave64", "att">;
  2924. def : MnemonicAlias<"xrstorq", "xrstor64", "att">;
  2925. def : MnemonicAlias<"xsaveoptq", "xsaveopt64", "att">;
  2926. def : MnemonicAlias<"xrstorsq", "xrstors64", "att">;
  2927. def : MnemonicAlias<"xsavecq", "xsavec64", "att">;
  2928. def : MnemonicAlias<"xsavesq", "xsaves64", "att">;
  2929. class CondCodeAlias<string Prefix,string Suffix, string OldCond, string NewCond,
  2930. string VariantName>
  2931. : MnemonicAlias<!strconcat(Prefix, OldCond, Suffix),
  2932. !strconcat(Prefix, NewCond, Suffix), VariantName>;
  2933. /// IntegerCondCodeMnemonicAlias - This multiclass defines a bunch of
  2934. /// MnemonicAlias's that canonicalize the condition code in a mnemonic, for
  2935. /// example "setz" -> "sete".
  2936. multiclass IntegerCondCodeMnemonicAlias<string Prefix, string Suffix,
  2937. string V = ""> {
  2938. def C : CondCodeAlias<Prefix, Suffix, "c", "b", V>; // setc -> setb
  2939. def Z : CondCodeAlias<Prefix, Suffix, "z" , "e", V>; // setz -> sete
  2940. def NA : CondCodeAlias<Prefix, Suffix, "na", "be", V>; // setna -> setbe
  2941. def NB : CondCodeAlias<Prefix, Suffix, "nb", "ae", V>; // setnb -> setae
  2942. def NC : CondCodeAlias<Prefix, Suffix, "nc", "ae", V>; // setnc -> setae
  2943. def NG : CondCodeAlias<Prefix, Suffix, "ng", "le", V>; // setng -> setle
  2944. def NL : CondCodeAlias<Prefix, Suffix, "nl", "ge", V>; // setnl -> setge
  2945. def NZ : CondCodeAlias<Prefix, Suffix, "nz", "ne", V>; // setnz -> setne
  2946. def PE : CondCodeAlias<Prefix, Suffix, "pe", "p", V>; // setpe -> setp
  2947. def PO : CondCodeAlias<Prefix, Suffix, "po", "np", V>; // setpo -> setnp
  2948. def NAE : CondCodeAlias<Prefix, Suffix, "nae", "b", V>; // setnae -> setb
  2949. def NBE : CondCodeAlias<Prefix, Suffix, "nbe", "a", V>; // setnbe -> seta
  2950. def NGE : CondCodeAlias<Prefix, Suffix, "nge", "l", V>; // setnge -> setl
  2951. def NLE : CondCodeAlias<Prefix, Suffix, "nle", "g", V>; // setnle -> setg
  2952. }
  2953. // Aliases for set<CC>
  2954. defm : IntegerCondCodeMnemonicAlias<"set", "">;
  2955. // Aliases for j<CC>
  2956. defm : IntegerCondCodeMnemonicAlias<"j", "">;
  2957. // Aliases for cmov<CC>{w,l,q}
  2958. defm : IntegerCondCodeMnemonicAlias<"cmov", "w", "att">;
  2959. defm : IntegerCondCodeMnemonicAlias<"cmov", "l", "att">;
  2960. defm : IntegerCondCodeMnemonicAlias<"cmov", "q", "att">;
  2961. // No size suffix for intel-style asm.
  2962. defm : IntegerCondCodeMnemonicAlias<"cmov", "", "intel">;
  2963. //===----------------------------------------------------------------------===//
  2964. // Assembler Instruction Aliases
  2965. //===----------------------------------------------------------------------===//
  2966. // aad/aam default to base 10 if no operand is specified.
  2967. def : InstAlias<"aad", (AAD8i8 10)>, Requires<[Not64BitMode]>;
  2968. def : InstAlias<"aam", (AAM8i8 10)>, Requires<[Not64BitMode]>;
  2969. // Disambiguate the mem/imm form of bt-without-a-suffix as btl.
  2970. // Likewise for btc/btr/bts.
  2971. def : InstAlias<"bt\t{$imm, $mem|$mem, $imm}",
  2972. (BT32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">;
  2973. def : InstAlias<"btc\t{$imm, $mem|$mem, $imm}",
  2974. (BTC32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">;
  2975. def : InstAlias<"btr\t{$imm, $mem|$mem, $imm}",
  2976. (BTR32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">;
  2977. def : InstAlias<"bts\t{$imm, $mem|$mem, $imm}",
  2978. (BTS32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">;
  2979. // clr aliases.
  2980. def : InstAlias<"clr{b}\t$reg", (XOR8rr GR8 :$reg, GR8 :$reg), 0>;
  2981. def : InstAlias<"clr{w}\t$reg", (XOR16rr GR16:$reg, GR16:$reg), 0>;
  2982. def : InstAlias<"clr{l}\t$reg", (XOR32rr GR32:$reg, GR32:$reg), 0>;
  2983. def : InstAlias<"clr{q}\t$reg", (XOR64rr GR64:$reg, GR64:$reg), 0>;
  2984. // lods aliases. Accept the destination being omitted because it's implicit
  2985. // in the mnemonic, or the mnemonic suffix being omitted because it's implicit
  2986. // in the destination.
  2987. def : InstAlias<"lodsb\t$src", (LODSB srcidx8:$src), 0>;
  2988. def : InstAlias<"lodsw\t$src", (LODSW srcidx16:$src), 0>;
  2989. def : InstAlias<"lods{l|d}\t$src", (LODSL srcidx32:$src), 0>;
  2990. def : InstAlias<"lodsq\t$src", (LODSQ srcidx64:$src), 0>, Requires<[In64BitMode]>;
  2991. def : InstAlias<"lods\t{$src, %al|al, $src}", (LODSB srcidx8:$src), 0>;
  2992. def : InstAlias<"lods\t{$src, %ax|ax, $src}", (LODSW srcidx16:$src), 0>;
  2993. def : InstAlias<"lods\t{$src, %eax|eax, $src}", (LODSL srcidx32:$src), 0>;
  2994. def : InstAlias<"lods\t{$src, %rax|rax, $src}", (LODSQ srcidx64:$src), 0>, Requires<[In64BitMode]>;
  2995. def : InstAlias<"lods\t$src", (LODSB srcidx8:$src), 0, "intel">;
  2996. def : InstAlias<"lods\t$src", (LODSW srcidx16:$src), 0, "intel">;
  2997. def : InstAlias<"lods\t$src", (LODSL srcidx32:$src), 0, "intel">;
  2998. def : InstAlias<"lods\t$src", (LODSQ srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>;
  2999. // stos aliases. Accept the source being omitted because it's implicit in
  3000. // the mnemonic, or the mnemonic suffix being omitted because it's implicit
  3001. // in the source.
  3002. def : InstAlias<"stosb\t$dst", (STOSB dstidx8:$dst), 0>;
  3003. def : InstAlias<"stosw\t$dst", (STOSW dstidx16:$dst), 0>;
  3004. def : InstAlias<"stos{l|d}\t$dst", (STOSL dstidx32:$dst), 0>;
  3005. def : InstAlias<"stosq\t$dst", (STOSQ dstidx64:$dst), 0>, Requires<[In64BitMode]>;
  3006. def : InstAlias<"stos\t{%al, $dst|$dst, al}", (STOSB dstidx8:$dst), 0>;
  3007. def : InstAlias<"stos\t{%ax, $dst|$dst, ax}", (STOSW dstidx16:$dst), 0>;
  3008. def : InstAlias<"stos\t{%eax, $dst|$dst, eax}", (STOSL dstidx32:$dst), 0>;
  3009. def : InstAlias<"stos\t{%rax, $dst|$dst, rax}", (STOSQ dstidx64:$dst), 0>, Requires<[In64BitMode]>;
  3010. def : InstAlias<"stos\t$dst", (STOSB dstidx8:$dst), 0, "intel">;
  3011. def : InstAlias<"stos\t$dst", (STOSW dstidx16:$dst), 0, "intel">;
  3012. def : InstAlias<"stos\t$dst", (STOSL dstidx32:$dst), 0, "intel">;
  3013. def : InstAlias<"stos\t$dst", (STOSQ dstidx64:$dst), 0, "intel">, Requires<[In64BitMode]>;
  3014. // scas aliases. Accept the destination being omitted because it's implicit
  3015. // in the mnemonic, or the mnemonic suffix being omitted because it's implicit
  3016. // in the destination.
  3017. def : InstAlias<"scasb\t$dst", (SCASB dstidx8:$dst), 0>;
  3018. def : InstAlias<"scasw\t$dst", (SCASW dstidx16:$dst), 0>;
  3019. def : InstAlias<"scas{l|d}\t$dst", (SCASL dstidx32:$dst), 0>;
  3020. def : InstAlias<"scasq\t$dst", (SCASQ dstidx64:$dst), 0>, Requires<[In64BitMode]>;
  3021. def : InstAlias<"scas\t{$dst, %al|al, $dst}", (SCASB dstidx8:$dst), 0>;
  3022. def : InstAlias<"scas\t{$dst, %ax|ax, $dst}", (SCASW dstidx16:$dst), 0>;
  3023. def : InstAlias<"scas\t{$dst, %eax|eax, $dst}", (SCASL dstidx32:$dst), 0>;
  3024. def : InstAlias<"scas\t{$dst, %rax|rax, $dst}", (SCASQ dstidx64:$dst), 0>, Requires<[In64BitMode]>;
  3025. def : InstAlias<"scas\t$dst", (SCASB dstidx8:$dst), 0, "intel">;
  3026. def : InstAlias<"scas\t$dst", (SCASW dstidx16:$dst), 0, "intel">;
  3027. def : InstAlias<"scas\t$dst", (SCASL dstidx32:$dst), 0, "intel">;
  3028. def : InstAlias<"scas\t$dst", (SCASQ dstidx64:$dst), 0, "intel">, Requires<[In64BitMode]>;
  3029. // cmps aliases. Mnemonic suffix being omitted because it's implicit
  3030. // in the destination.
  3031. def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSB dstidx8:$dst, srcidx8:$src), 0, "intel">;
  3032. def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSW dstidx16:$dst, srcidx16:$src), 0, "intel">;
  3033. def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSL dstidx32:$dst, srcidx32:$src), 0, "intel">;
  3034. def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSQ dstidx64:$dst, srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>;
  3035. // movs aliases. Mnemonic suffix being omitted because it's implicit
  3036. // in the destination.
  3037. def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSB dstidx8:$dst, srcidx8:$src), 0, "intel">;
  3038. def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSW dstidx16:$dst, srcidx16:$src), 0, "intel">;
  3039. def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSL dstidx32:$dst, srcidx32:$src), 0, "intel">;
  3040. def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSQ dstidx64:$dst, srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>;
  3041. // div and idiv aliases for explicit A register.
  3042. def : InstAlias<"div{b}\t{$src, %al|al, $src}", (DIV8r GR8 :$src)>;
  3043. def : InstAlias<"div{w}\t{$src, %ax|ax, $src}", (DIV16r GR16:$src)>;
  3044. def : InstAlias<"div{l}\t{$src, %eax|eax, $src}", (DIV32r GR32:$src)>;
  3045. def : InstAlias<"div{q}\t{$src, %rax|rax, $src}", (DIV64r GR64:$src)>;
  3046. def : InstAlias<"div{b}\t{$src, %al|al, $src}", (DIV8m i8mem :$src)>;
  3047. def : InstAlias<"div{w}\t{$src, %ax|ax, $src}", (DIV16m i16mem:$src)>;
  3048. def : InstAlias<"div{l}\t{$src, %eax|eax, $src}", (DIV32m i32mem:$src)>;
  3049. def : InstAlias<"div{q}\t{$src, %rax|rax, $src}", (DIV64m i64mem:$src)>;
  3050. def : InstAlias<"idiv{b}\t{$src, %al|al, $src}", (IDIV8r GR8 :$src)>;
  3051. def : InstAlias<"idiv{w}\t{$src, %ax|ax, $src}", (IDIV16r GR16:$src)>;
  3052. def : InstAlias<"idiv{l}\t{$src, %eax|eax, $src}", (IDIV32r GR32:$src)>;
  3053. def : InstAlias<"idiv{q}\t{$src, %rax|rax, $src}", (IDIV64r GR64:$src)>;
  3054. def : InstAlias<"idiv{b}\t{$src, %al|al, $src}", (IDIV8m i8mem :$src)>;
  3055. def : InstAlias<"idiv{w}\t{$src, %ax|ax, $src}", (IDIV16m i16mem:$src)>;
  3056. def : InstAlias<"idiv{l}\t{$src, %eax|eax, $src}", (IDIV32m i32mem:$src)>;
  3057. def : InstAlias<"idiv{q}\t{$src, %rax|rax, $src}", (IDIV64m i64mem:$src)>;
  3058. // Various unary fpstack operations default to operating on ST1.
  3059. // For example, "fxch" -> "fxch %st(1)"
  3060. def : InstAlias<"faddp", (ADD_FPrST0 ST1), 0>;
  3061. def: InstAlias<"fadd", (ADD_FPrST0 ST1), 0>;
  3062. def : InstAlias<"fsub{|r}p", (SUBR_FPrST0 ST1), 0>;
  3063. def : InstAlias<"fsub{r|}p", (SUB_FPrST0 ST1), 0>;
  3064. def : InstAlias<"fmul", (MUL_FPrST0 ST1), 0>;
  3065. def : InstAlias<"fmulp", (MUL_FPrST0 ST1), 0>;
  3066. def : InstAlias<"fdiv{|r}p", (DIVR_FPrST0 ST1), 0>;
  3067. def : InstAlias<"fdiv{r|}p", (DIV_FPrST0 ST1), 0>;
  3068. def : InstAlias<"fxch", (XCH_F ST1), 0>;
  3069. def : InstAlias<"fcom", (COM_FST0r ST1), 0>;
  3070. def : InstAlias<"fcomp", (COMP_FST0r ST1), 0>;
  3071. def : InstAlias<"fcomi", (COM_FIr ST1), 0>;
  3072. def : InstAlias<"fcompi", (COM_FIPr ST1), 0>;
  3073. def : InstAlias<"fucom", (UCOM_Fr ST1), 0>;
  3074. def : InstAlias<"fucomp", (UCOM_FPr ST1), 0>;
  3075. def : InstAlias<"fucomi", (UCOM_FIr ST1), 0>;
  3076. def : InstAlias<"fucompi", (UCOM_FIPr ST1), 0>;
  3077. // Handle fmul/fadd/fsub/fdiv instructions with explicitly written st(0) op.
  3078. // For example, "fadd %st(4), %st(0)" -> "fadd %st(4)". We also disambiguate
  3079. // instructions like "fadd %st(0), %st(0)" as "fadd %st(0)" for consistency with
  3080. // gas.
  3081. multiclass FpUnaryAlias<string Mnemonic, Instruction Inst, bit EmitAlias = 1> {
  3082. def : InstAlias<!strconcat(Mnemonic, "\t$op"),
  3083. (Inst RSTi:$op), EmitAlias>;
  3084. def : InstAlias<!strconcat(Mnemonic, "\t{%st, %st|st, st}"),
  3085. (Inst ST0), EmitAlias>;
  3086. }
  3087. defm : FpUnaryAlias<"fadd", ADD_FST0r, 0>;
  3088. defm : FpUnaryAlias<"faddp", ADD_FPrST0, 0>;
  3089. defm : FpUnaryAlias<"fsub", SUB_FST0r, 0>;
  3090. defm : FpUnaryAlias<"fsub{|r}p", SUBR_FPrST0, 0>;
  3091. defm : FpUnaryAlias<"fsubr", SUBR_FST0r, 0>;
  3092. defm : FpUnaryAlias<"fsub{r|}p", SUB_FPrST0, 0>;
  3093. defm : FpUnaryAlias<"fmul", MUL_FST0r, 0>;
  3094. defm : FpUnaryAlias<"fmulp", MUL_FPrST0, 0>;
  3095. defm : FpUnaryAlias<"fdiv", DIV_FST0r, 0>;
  3096. defm : FpUnaryAlias<"fdiv{|r}p", DIVR_FPrST0, 0>;
  3097. defm : FpUnaryAlias<"fdivr", DIVR_FST0r, 0>;
  3098. defm : FpUnaryAlias<"fdiv{r|}p", DIV_FPrST0, 0>;
  3099. defm : FpUnaryAlias<"fcomi", COM_FIr, 0>;
  3100. defm : FpUnaryAlias<"fucomi", UCOM_FIr, 0>;
  3101. defm : FpUnaryAlias<"fcompi", COM_FIPr, 0>;
  3102. defm : FpUnaryAlias<"fucompi", UCOM_FIPr, 0>;
  3103. // Handle "f{mulp,addp} $op, %st(0)" the same as "f{mulp,addp} $op", since they
  3104. // commute. We also allow fdiv[r]p/fsubrp even though they don't commute,
  3105. // solely because gas supports it.
  3106. def : InstAlias<"faddp\t{$op, %st|st, $op}", (ADD_FPrST0 RSTi:$op), 0>;
  3107. def : InstAlias<"fmulp\t{$op, %st|st, $op}", (MUL_FPrST0 RSTi:$op), 0>;
  3108. def : InstAlias<"fsub{|r}p\t{$op, %st|st, $op}", (SUBR_FPrST0 RSTi:$op), 0>;
  3109. def : InstAlias<"fsub{r|}p\t{$op, %st|st, $op}", (SUB_FPrST0 RSTi:$op), 0>;
  3110. def : InstAlias<"fdiv{|r}p\t{$op, %st|st, $op}", (DIVR_FPrST0 RSTi:$op), 0>;
  3111. def : InstAlias<"fdiv{r|}p\t{$op, %st|st, $op}", (DIV_FPrST0 RSTi:$op), 0>;
  3112. def : InstAlias<"fnstsw" , (FNSTSW16r), 0>;
  3113. // lcall and ljmp aliases. This seems to be an odd mapping in 64-bit mode, but
  3114. // this is compatible with what GAS does.
  3115. def : InstAlias<"lcall\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg), 0>, Requires<[In32BitMode]>;
  3116. def : InstAlias<"ljmp\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg), 0>, Requires<[In32BitMode]>;
  3117. def : InstAlias<"lcall\t{*}$dst", (FARCALL32m opaquemem:$dst), 0>, Requires<[Not16BitMode]>;
  3118. def : InstAlias<"ljmp\t{*}$dst", (FARJMP32m opaquemem:$dst), 0>, Requires<[Not16BitMode]>;
  3119. def : InstAlias<"lcall\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg), 0>, Requires<[In16BitMode]>;
  3120. def : InstAlias<"ljmp\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg), 0>, Requires<[In16BitMode]>;
  3121. def : InstAlias<"lcall\t{*}$dst", (FARCALL16m opaquemem:$dst), 0>, Requires<[In16BitMode]>;
  3122. def : InstAlias<"ljmp\t{*}$dst", (FARJMP16m opaquemem:$dst), 0>, Requires<[In16BitMode]>;
  3123. def : InstAlias<"jmp\t{*}$dst", (JMP64m i64mem:$dst), 0, "att">, Requires<[In64BitMode]>;
  3124. def : InstAlias<"jmp\t{*}$dst", (JMP32m i32mem:$dst), 0, "att">, Requires<[In32BitMode]>;
  3125. def : InstAlias<"jmp\t{*}$dst", (JMP16m i16mem:$dst), 0, "att">, Requires<[In16BitMode]>;
  3126. // "imul <imm>, B" is an alias for "imul <imm>, B, B".
  3127. def : InstAlias<"imul{w}\t{$imm, $r|$r, $imm}", (IMUL16rri GR16:$r, GR16:$r, i16imm:$imm), 0>;
  3128. def : InstAlias<"imul{w}\t{$imm, $r|$r, $imm}", (IMUL16rri8 GR16:$r, GR16:$r, i16i8imm:$imm), 0>;
  3129. def : InstAlias<"imul{l}\t{$imm, $r|$r, $imm}", (IMUL32rri GR32:$r, GR32:$r, i32imm:$imm), 0>;
  3130. def : InstAlias<"imul{l}\t{$imm, $r|$r, $imm}", (IMUL32rri8 GR32:$r, GR32:$r, i32i8imm:$imm), 0>;
  3131. def : InstAlias<"imul{q}\t{$imm, $r|$r, $imm}", (IMUL64rri32 GR64:$r, GR64:$r, i64i32imm:$imm), 0>;
  3132. def : InstAlias<"imul{q}\t{$imm, $r|$r, $imm}", (IMUL64rri8 GR64:$r, GR64:$r, i64i8imm:$imm), 0>;
  3133. // ins aliases. Accept the mnemonic suffix being omitted because it's implicit
  3134. // in the destination.
  3135. def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSB dstidx8:$dst), 0, "intel">;
  3136. def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSW dstidx16:$dst), 0, "intel">;
  3137. def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSL dstidx32:$dst), 0, "intel">;
  3138. // outs aliases. Accept the mnemonic suffix being omitted because it's implicit
  3139. // in the source.
  3140. def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSB srcidx8:$src), 0, "intel">;
  3141. def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSW srcidx16:$src), 0, "intel">;
  3142. def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSL srcidx32:$src), 0, "intel">;
  3143. // inb %dx -> inb %al, %dx
  3144. def : InstAlias<"inb\t{%dx|dx}", (IN8rr), 0>;
  3145. def : InstAlias<"inw\t{%dx|dx}", (IN16rr), 0>;
  3146. def : InstAlias<"inl\t{%dx|dx}", (IN32rr), 0>;
  3147. def : InstAlias<"inb\t$port", (IN8ri u8imm:$port), 0>;
  3148. def : InstAlias<"inw\t$port", (IN16ri u8imm:$port), 0>;
  3149. def : InstAlias<"inl\t$port", (IN32ri u8imm:$port), 0>;
  3150. // jmp and call aliases for lcall and ljmp. jmp $42,$5 -> ljmp
  3151. def : InstAlias<"call\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg)>, Requires<[In16BitMode]>;
  3152. def : InstAlias<"jmp\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg)>, Requires<[In16BitMode]>;
  3153. def : InstAlias<"call\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg)>, Requires<[In32BitMode]>;
  3154. def : InstAlias<"jmp\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg)>, Requires<[In32BitMode]>;
  3155. def : InstAlias<"callw\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>;
  3156. def : InstAlias<"jmpw\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>;
  3157. def : InstAlias<"calll\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>;
  3158. def : InstAlias<"jmpl\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>;
  3159. // Match 'movq <largeimm>, <reg>' as an alias for movabsq.
  3160. def : InstAlias<"mov{q}\t{$imm, $reg|$reg, $imm}", (MOV64ri GR64:$reg, i64imm:$imm), 0>;
  3161. // Match 'movd GR64, MMX' as an alias for movq to be compatible with gas,
  3162. // which supports this due to an old AMD documentation bug when 64-bit mode was
  3163. // created.
  3164. def : InstAlias<"movd\t{$src, $dst|$dst, $src}",
  3165. (MMX_MOVD64to64rr VR64:$dst, GR64:$src), 0>;
  3166. def : InstAlias<"movd\t{$src, $dst|$dst, $src}",
  3167. (MMX_MOVD64from64rr GR64:$dst, VR64:$src), 0>;
  3168. // movsx aliases
  3169. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX16rr8 GR16:$dst, GR8:$src), 0, "att">;
  3170. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX16rm8 GR16:$dst, i8mem:$src), 0, "att">;
  3171. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX32rr8 GR32:$dst, GR8:$src), 0, "att">;
  3172. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX32rr16 GR32:$dst, GR16:$src), 0, "att">;
  3173. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr8 GR64:$dst, GR8:$src), 0, "att">;
  3174. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr16 GR64:$dst, GR16:$src), 0, "att">;
  3175. def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr32 GR64:$dst, GR32:$src), 0, "att">;
  3176. // movzx aliases
  3177. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX16rr8 GR16:$dst, GR8:$src), 0, "att">;
  3178. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX16rm8 GR16:$dst, i8mem:$src), 0, "att">;
  3179. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX32rr8 GR32:$dst, GR8:$src), 0, "att">;
  3180. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX32rr16 GR32:$dst, GR16:$src), 0, "att">;
  3181. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX64rr8 GR64:$dst, GR8:$src), 0, "att">;
  3182. def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX64rr16 GR64:$dst, GR16:$src), 0, "att">;
  3183. // Note: No GR32->GR64 movzx form.
  3184. // outb %dx -> outb %al, %dx
  3185. def : InstAlias<"outb\t{%dx|dx}", (OUT8rr), 0>;
  3186. def : InstAlias<"outw\t{%dx|dx}", (OUT16rr), 0>;
  3187. def : InstAlias<"outl\t{%dx|dx}", (OUT32rr), 0>;
  3188. def : InstAlias<"outb\t$port", (OUT8ir u8imm:$port), 0>;
  3189. def : InstAlias<"outw\t$port", (OUT16ir u8imm:$port), 0>;
  3190. def : InstAlias<"outl\t$port", (OUT32ir u8imm:$port), 0>;
  3191. // 'sldt <mem>' can be encoded with either sldtw or sldtq with the same
  3192. // effect (both store to a 16-bit mem). Force to sldtw to avoid ambiguity
  3193. // errors, since its encoding is the most compact.
  3194. def : InstAlias<"sldt $mem", (SLDT16m i16mem:$mem), 0>;
  3195. // shld/shrd op,op -> shld op, op, CL
  3196. def : InstAlias<"shld{w}\t{$r2, $r1|$r1, $r2}", (SHLD16rrCL GR16:$r1, GR16:$r2), 0>;
  3197. def : InstAlias<"shld{l}\t{$r2, $r1|$r1, $r2}", (SHLD32rrCL GR32:$r1, GR32:$r2), 0>;
  3198. def : InstAlias<"shld{q}\t{$r2, $r1|$r1, $r2}", (SHLD64rrCL GR64:$r1, GR64:$r2), 0>;
  3199. def : InstAlias<"shrd{w}\t{$r2, $r1|$r1, $r2}", (SHRD16rrCL GR16:$r1, GR16:$r2), 0>;
  3200. def : InstAlias<"shrd{l}\t{$r2, $r1|$r1, $r2}", (SHRD32rrCL GR32:$r1, GR32:$r2), 0>;
  3201. def : InstAlias<"shrd{q}\t{$r2, $r1|$r1, $r2}", (SHRD64rrCL GR64:$r1, GR64:$r2), 0>;
  3202. def : InstAlias<"shld{w}\t{$reg, $mem|$mem, $reg}", (SHLD16mrCL i16mem:$mem, GR16:$reg), 0>;
  3203. def : InstAlias<"shld{l}\t{$reg, $mem|$mem, $reg}", (SHLD32mrCL i32mem:$mem, GR32:$reg), 0>;
  3204. def : InstAlias<"shld{q}\t{$reg, $mem|$mem, $reg}", (SHLD64mrCL i64mem:$mem, GR64:$reg), 0>;
  3205. def : InstAlias<"shrd{w}\t{$reg, $mem|$mem, $reg}", (SHRD16mrCL i16mem:$mem, GR16:$reg), 0>;
  3206. def : InstAlias<"shrd{l}\t{$reg, $mem|$mem, $reg}", (SHRD32mrCL i32mem:$mem, GR32:$reg), 0>;
  3207. def : InstAlias<"shrd{q}\t{$reg, $mem|$mem, $reg}", (SHRD64mrCL i64mem:$mem, GR64:$reg), 0>;
  3208. /* FIXME: This is disabled because the asm matcher is currently incapable of
  3209. * matching a fixed immediate like $1.
  3210. // "shl X, $1" is an alias for "shl X".
  3211. multiclass ShiftRotateByOneAlias<string Mnemonic, string Opc> {
  3212. def : InstAlias<!strconcat(Mnemonic, "b $op, $$1"),
  3213. (!cast<Instruction>(!strconcat(Opc, "8r1")) GR8:$op)>;
  3214. def : InstAlias<!strconcat(Mnemonic, "w $op, $$1"),
  3215. (!cast<Instruction>(!strconcat(Opc, "16r1")) GR16:$op)>;
  3216. def : InstAlias<!strconcat(Mnemonic, "l $op, $$1"),
  3217. (!cast<Instruction>(!strconcat(Opc, "32r1")) GR32:$op)>;
  3218. def : InstAlias<!strconcat(Mnemonic, "q $op, $$1"),
  3219. (!cast<Instruction>(!strconcat(Opc, "64r1")) GR64:$op)>;
  3220. def : InstAlias<!strconcat(Mnemonic, "b $op, $$1"),
  3221. (!cast<Instruction>(!strconcat(Opc, "8m1")) i8mem:$op)>;
  3222. def : InstAlias<!strconcat(Mnemonic, "w $op, $$1"),
  3223. (!cast<Instruction>(!strconcat(Opc, "16m1")) i16mem:$op)>;
  3224. def : InstAlias<!strconcat(Mnemonic, "l $op, $$1"),
  3225. (!cast<Instruction>(!strconcat(Opc, "32m1")) i32mem:$op)>;
  3226. def : InstAlias<!strconcat(Mnemonic, "q $op, $$1"),
  3227. (!cast<Instruction>(!strconcat(Opc, "64m1")) i64mem:$op)>;
  3228. }
  3229. defm : ShiftRotateByOneAlias<"rcl", "RCL">;
  3230. defm : ShiftRotateByOneAlias<"rcr", "RCR">;
  3231. defm : ShiftRotateByOneAlias<"rol", "ROL">;
  3232. defm : ShiftRotateByOneAlias<"ror", "ROR">;
  3233. FIXME */
  3234. // test: We accept "testX <reg>, <mem>" and "testX <mem>, <reg>" as synonyms.
  3235. def : InstAlias<"test{b}\t{$mem, $val|$val, $mem}",
  3236. (TEST8mr i8mem :$mem, GR8 :$val), 0>;
  3237. def : InstAlias<"test{w}\t{$mem, $val|$val, $mem}",
  3238. (TEST16mr i16mem:$mem, GR16:$val), 0>;
  3239. def : InstAlias<"test{l}\t{$mem, $val|$val, $mem}",
  3240. (TEST32mr i32mem:$mem, GR32:$val), 0>;
  3241. def : InstAlias<"test{q}\t{$mem, $val|$val, $mem}",
  3242. (TEST64mr i64mem:$mem, GR64:$val), 0>;
  3243. // xchg: We accept "xchgX <reg>, <mem>" and "xchgX <mem>, <reg>" as synonyms.
  3244. def : InstAlias<"xchg{b}\t{$mem, $val|$val, $mem}",
  3245. (XCHG8rm GR8 :$val, i8mem :$mem), 0>;
  3246. def : InstAlias<"xchg{w}\t{$mem, $val|$val, $mem}",
  3247. (XCHG16rm GR16:$val, i16mem:$mem), 0>;
  3248. def : InstAlias<"xchg{l}\t{$mem, $val|$val, $mem}",
  3249. (XCHG32rm GR32:$val, i32mem:$mem), 0>;
  3250. def : InstAlias<"xchg{q}\t{$mem, $val|$val, $mem}",
  3251. (XCHG64rm GR64:$val, i64mem:$mem), 0>;
  3252. // xchg: We accept "xchgX <reg>, %eax" and "xchgX %eax, <reg>" as synonyms.
  3253. def : InstAlias<"xchg{w}\t{%ax, $src|$src, ax}", (XCHG16ar GR16:$src), 0>;
  3254. def : InstAlias<"xchg{l}\t{%eax, $src|$src, eax}", (XCHG32ar GR32:$src), 0>;
  3255. def : InstAlias<"xchg{q}\t{%rax, $src|$src, rax}", (XCHG64ar GR64:$src), 0>;
  3256. // In 64-bit mode, xchg %eax, %eax can't be encoded with the 0x90 opcode we
  3257. // would get by default because it's defined as NOP. But xchg %eax, %eax implies
  3258. // implicit zeroing of the upper 32 bits. So alias to the longer encoding.
  3259. def : InstAlias<"xchg{l}\t{%eax, %eax|eax, eax}",
  3260. (XCHG32rr EAX, EAX), 0>, Requires<[In64BitMode]>;
  3261. // xchg %rax, %rax is a nop in x86-64 and can be encoded as such. Without this
  3262. // we emit an unneeded REX.w prefix.
  3263. def : InstAlias<"xchg{q}\t{%rax, %rax|rax, rax}", (NOOP), 0>;
  3264. // These aliases exist to get the parser to prioritize matching 8-bit
  3265. // immediate encodings over matching the implicit ax/eax/rax encodings. By
  3266. // explicitly mentioning the A register here, these entries will be ordered
  3267. // first due to the more explicit immediate type.
  3268. def : InstAlias<"adc{w}\t{$imm, %ax|ax, $imm}", (ADC16ri8 AX, i16i8imm:$imm), 0>;
  3269. def : InstAlias<"add{w}\t{$imm, %ax|ax, $imm}", (ADD16ri8 AX, i16i8imm:$imm), 0>;
  3270. def : InstAlias<"and{w}\t{$imm, %ax|ax, $imm}", (AND16ri8 AX, i16i8imm:$imm), 0>;
  3271. def : InstAlias<"cmp{w}\t{$imm, %ax|ax, $imm}", (CMP16ri8 AX, i16i8imm:$imm), 0>;
  3272. def : InstAlias<"or{w}\t{$imm, %ax|ax, $imm}", (OR16ri8 AX, i16i8imm:$imm), 0>;
  3273. def : InstAlias<"sbb{w}\t{$imm, %ax|ax, $imm}", (SBB16ri8 AX, i16i8imm:$imm), 0>;
  3274. def : InstAlias<"sub{w}\t{$imm, %ax|ax, $imm}", (SUB16ri8 AX, i16i8imm:$imm), 0>;
  3275. def : InstAlias<"xor{w}\t{$imm, %ax|ax, $imm}", (XOR16ri8 AX, i16i8imm:$imm), 0>;
  3276. def : InstAlias<"adc{l}\t{$imm, %eax|eax, $imm}", (ADC32ri8 EAX, i32i8imm:$imm), 0>;
  3277. def : InstAlias<"add{l}\t{$imm, %eax|eax, $imm}", (ADD32ri8 EAX, i32i8imm:$imm), 0>;
  3278. def : InstAlias<"and{l}\t{$imm, %eax|eax, $imm}", (AND32ri8 EAX, i32i8imm:$imm), 0>;
  3279. def : InstAlias<"cmp{l}\t{$imm, %eax|eax, $imm}", (CMP32ri8 EAX, i32i8imm:$imm), 0>;
  3280. def : InstAlias<"or{l}\t{$imm, %eax|eax, $imm}", (OR32ri8 EAX, i32i8imm:$imm), 0>;
  3281. def : InstAlias<"sbb{l}\t{$imm, %eax|eax, $imm}", (SBB32ri8 EAX, i32i8imm:$imm), 0>;
  3282. def : InstAlias<"sub{l}\t{$imm, %eax|eax, $imm}", (SUB32ri8 EAX, i32i8imm:$imm), 0>;
  3283. def : InstAlias<"xor{l}\t{$imm, %eax|eax, $imm}", (XOR32ri8 EAX, i32i8imm:$imm), 0>;
  3284. def : InstAlias<"adc{q}\t{$imm, %rax|rax, $imm}", (ADC64ri8 RAX, i64i8imm:$imm), 0>;
  3285. def : InstAlias<"add{q}\t{$imm, %rax|rax, $imm}", (ADD64ri8 RAX, i64i8imm:$imm), 0>;
  3286. def : InstAlias<"and{q}\t{$imm, %rax|rax, $imm}", (AND64ri8 RAX, i64i8imm:$imm), 0>;
  3287. def : InstAlias<"cmp{q}\t{$imm, %rax|rax, $imm}", (CMP64ri8 RAX, i64i8imm:$imm), 0>;
  3288. def : InstAlias<"or{q}\t{$imm, %rax|rax, $imm}", (OR64ri8 RAX, i64i8imm:$imm), 0>;
  3289. def : InstAlias<"sbb{q}\t{$imm, %rax|rax, $imm}", (SBB64ri8 RAX, i64i8imm:$imm), 0>;
  3290. def : InstAlias<"sub{q}\t{$imm, %rax|rax, $imm}", (SUB64ri8 RAX, i64i8imm:$imm), 0>;
  3291. def : InstAlias<"xor{q}\t{$imm, %rax|rax, $imm}", (XOR64ri8 RAX, i64i8imm:$imm), 0>;