AArch64AsmParser.cpp 275 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955
  1. //==- AArch64AsmParser.cpp - Parse AArch64 assembly to MCInst instructions -==//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #include "AArch64InstrInfo.h"
  9. #include "MCTargetDesc/AArch64AddressingModes.h"
  10. #include "MCTargetDesc/AArch64InstPrinter.h"
  11. #include "MCTargetDesc/AArch64MCExpr.h"
  12. #include "MCTargetDesc/AArch64MCTargetDesc.h"
  13. #include "MCTargetDesc/AArch64TargetStreamer.h"
  14. #include "TargetInfo/AArch64TargetInfo.h"
  15. #include "Utils/AArch64BaseInfo.h"
  16. #include "llvm/ADT/APFloat.h"
  17. #include "llvm/ADT/APInt.h"
  18. #include "llvm/ADT/ArrayRef.h"
  19. #include "llvm/ADT/STLExtras.h"
  20. #include "llvm/ADT/SmallSet.h"
  21. #include "llvm/ADT/SmallVector.h"
  22. #include "llvm/ADT/StringExtras.h"
  23. #include "llvm/ADT/StringMap.h"
  24. #include "llvm/ADT/StringRef.h"
  25. #include "llvm/ADT/StringSwitch.h"
  26. #include "llvm/ADT/Twine.h"
  27. #include "llvm/MC/MCContext.h"
  28. #include "llvm/MC/MCExpr.h"
  29. #include "llvm/MC/MCInst.h"
  30. #include "llvm/MC/MCLinkerOptimizationHint.h"
  31. #include "llvm/MC/MCObjectFileInfo.h"
  32. #include "llvm/MC/MCParser/MCAsmLexer.h"
  33. #include "llvm/MC/MCParser/MCAsmParser.h"
  34. #include "llvm/MC/MCParser/MCAsmParserExtension.h"
  35. #include "llvm/MC/MCParser/MCParsedAsmOperand.h"
  36. #include "llvm/MC/MCParser/MCTargetAsmParser.h"
  37. #include "llvm/MC/MCRegisterInfo.h"
  38. #include "llvm/MC/MCStreamer.h"
  39. #include "llvm/MC/MCSubtargetInfo.h"
  40. #include "llvm/MC/MCSymbol.h"
  41. #include "llvm/MC/MCTargetOptions.h"
  42. #include "llvm/MC/MCValue.h"
  43. #include "llvm/MC/SubtargetFeature.h"
  44. #include "llvm/MC/TargetRegistry.h"
  45. #include "llvm/Support/Casting.h"
  46. #include "llvm/Support/Compiler.h"
  47. #include "llvm/Support/ErrorHandling.h"
  48. #include "llvm/Support/MathExtras.h"
  49. #include "llvm/Support/SMLoc.h"
  50. #include "llvm/Support/AArch64TargetParser.h"
  51. #include "llvm/Support/TargetParser.h"
  52. #include "llvm/Support/raw_ostream.h"
  53. #include <cassert>
  54. #include <cctype>
  55. #include <cstdint>
  56. #include <cstdio>
  57. #include <optional>
  58. #include <string>
  59. #include <tuple>
  60. #include <utility>
  61. #include <vector>
  62. using namespace llvm;
  63. namespace {
  64. enum class RegKind {
  65. Scalar,
  66. NeonVector,
  67. SVEDataVector,
  68. SVEPredicateAsCounter,
  69. SVEPredicateVector,
  70. Matrix,
  71. LookupTable
  72. };
  73. enum class MatrixKind { Array, Tile, Row, Col };
  74. enum RegConstraintEqualityTy {
  75. EqualsReg,
  76. EqualsSuperReg,
  77. EqualsSubReg
  78. };
  79. class AArch64AsmParser : public MCTargetAsmParser {
  80. private:
  81. StringRef Mnemonic; ///< Instruction mnemonic.
  82. // Map of register aliases registers via the .req directive.
  83. StringMap<std::pair<RegKind, unsigned>> RegisterReqs;
  84. class PrefixInfo {
  85. public:
  86. static PrefixInfo CreateFromInst(const MCInst &Inst, uint64_t TSFlags) {
  87. PrefixInfo Prefix;
  88. switch (Inst.getOpcode()) {
  89. case AArch64::MOVPRFX_ZZ:
  90. Prefix.Active = true;
  91. Prefix.Dst = Inst.getOperand(0).getReg();
  92. break;
  93. case AArch64::MOVPRFX_ZPmZ_B:
  94. case AArch64::MOVPRFX_ZPmZ_H:
  95. case AArch64::MOVPRFX_ZPmZ_S:
  96. case AArch64::MOVPRFX_ZPmZ_D:
  97. Prefix.Active = true;
  98. Prefix.Predicated = true;
  99. Prefix.ElementSize = TSFlags & AArch64::ElementSizeMask;
  100. assert(Prefix.ElementSize != AArch64::ElementSizeNone &&
  101. "No destructive element size set for movprfx");
  102. Prefix.Dst = Inst.getOperand(0).getReg();
  103. Prefix.Pg = Inst.getOperand(2).getReg();
  104. break;
  105. case AArch64::MOVPRFX_ZPzZ_B:
  106. case AArch64::MOVPRFX_ZPzZ_H:
  107. case AArch64::MOVPRFX_ZPzZ_S:
  108. case AArch64::MOVPRFX_ZPzZ_D:
  109. Prefix.Active = true;
  110. Prefix.Predicated = true;
  111. Prefix.ElementSize = TSFlags & AArch64::ElementSizeMask;
  112. assert(Prefix.ElementSize != AArch64::ElementSizeNone &&
  113. "No destructive element size set for movprfx");
  114. Prefix.Dst = Inst.getOperand(0).getReg();
  115. Prefix.Pg = Inst.getOperand(1).getReg();
  116. break;
  117. default:
  118. break;
  119. }
  120. return Prefix;
  121. }
  122. PrefixInfo() = default;
  123. bool isActive() const { return Active; }
  124. bool isPredicated() const { return Predicated; }
  125. unsigned getElementSize() const {
  126. assert(Predicated);
  127. return ElementSize;
  128. }
  129. unsigned getDstReg() const { return Dst; }
  130. unsigned getPgReg() const {
  131. assert(Predicated);
  132. return Pg;
  133. }
  134. private:
  135. bool Active = false;
  136. bool Predicated = false;
  137. unsigned ElementSize;
  138. unsigned Dst;
  139. unsigned Pg;
  140. } NextPrefix;
  141. AArch64TargetStreamer &getTargetStreamer() {
  142. MCTargetStreamer &TS = *getParser().getStreamer().getTargetStreamer();
  143. return static_cast<AArch64TargetStreamer &>(TS);
  144. }
  145. SMLoc getLoc() const { return getParser().getTok().getLoc(); }
  146. bool parseSysAlias(StringRef Name, SMLoc NameLoc, OperandVector &Operands);
  147. bool parseSyspAlias(StringRef Name, SMLoc NameLoc, OperandVector &Operands);
  148. void createSysAlias(uint16_t Encoding, OperandVector &Operands, SMLoc S);
  149. AArch64CC::CondCode parseCondCodeString(StringRef Cond,
  150. std::string &Suggestion);
  151. bool parseCondCode(OperandVector &Operands, bool invertCondCode);
  152. unsigned matchRegisterNameAlias(StringRef Name, RegKind Kind);
  153. bool parseRegister(OperandVector &Operands);
  154. bool parseSymbolicImmVal(const MCExpr *&ImmVal);
  155. bool parseNeonVectorList(OperandVector &Operands);
  156. bool parseOptionalMulOperand(OperandVector &Operands);
  157. bool parseOptionalVGOperand(OperandVector &Operands, StringRef &VecGroup);
  158. bool parseKeywordOperand(OperandVector &Operands);
  159. bool parseOperand(OperandVector &Operands, bool isCondCode,
  160. bool invertCondCode);
  161. bool parseImmExpr(int64_t &Out);
  162. bool parseComma();
  163. bool parseRegisterInRange(unsigned &Out, unsigned Base, unsigned First,
  164. unsigned Last);
  165. bool showMatchError(SMLoc Loc, unsigned ErrCode, uint64_t ErrorInfo,
  166. OperandVector &Operands);
  167. bool parseDirectiveArch(SMLoc L);
  168. bool parseDirectiveArchExtension(SMLoc L);
  169. bool parseDirectiveCPU(SMLoc L);
  170. bool parseDirectiveInst(SMLoc L);
  171. bool parseDirectiveTLSDescCall(SMLoc L);
  172. bool parseDirectiveLOH(StringRef LOH, SMLoc L);
  173. bool parseDirectiveLtorg(SMLoc L);
  174. bool parseDirectiveReq(StringRef Name, SMLoc L);
  175. bool parseDirectiveUnreq(SMLoc L);
  176. bool parseDirectiveCFINegateRAState();
  177. bool parseDirectiveCFIBKeyFrame();
  178. bool parseDirectiveCFIMTETaggedFrame();
  179. bool parseDirectiveVariantPCS(SMLoc L);
  180. bool parseDirectiveSEHAllocStack(SMLoc L);
  181. bool parseDirectiveSEHPrologEnd(SMLoc L);
  182. bool parseDirectiveSEHSaveR19R20X(SMLoc L);
  183. bool parseDirectiveSEHSaveFPLR(SMLoc L);
  184. bool parseDirectiveSEHSaveFPLRX(SMLoc L);
  185. bool parseDirectiveSEHSaveReg(SMLoc L);
  186. bool parseDirectiveSEHSaveRegX(SMLoc L);
  187. bool parseDirectiveSEHSaveRegP(SMLoc L);
  188. bool parseDirectiveSEHSaveRegPX(SMLoc L);
  189. bool parseDirectiveSEHSaveLRPair(SMLoc L);
  190. bool parseDirectiveSEHSaveFReg(SMLoc L);
  191. bool parseDirectiveSEHSaveFRegX(SMLoc L);
  192. bool parseDirectiveSEHSaveFRegP(SMLoc L);
  193. bool parseDirectiveSEHSaveFRegPX(SMLoc L);
  194. bool parseDirectiveSEHSetFP(SMLoc L);
  195. bool parseDirectiveSEHAddFP(SMLoc L);
  196. bool parseDirectiveSEHNop(SMLoc L);
  197. bool parseDirectiveSEHSaveNext(SMLoc L);
  198. bool parseDirectiveSEHEpilogStart(SMLoc L);
  199. bool parseDirectiveSEHEpilogEnd(SMLoc L);
  200. bool parseDirectiveSEHTrapFrame(SMLoc L);
  201. bool parseDirectiveSEHMachineFrame(SMLoc L);
  202. bool parseDirectiveSEHContext(SMLoc L);
  203. bool parseDirectiveSEHClearUnwoundToCall(SMLoc L);
  204. bool parseDirectiveSEHPACSignLR(SMLoc L);
  205. bool parseDirectiveSEHSaveAnyReg(SMLoc L, bool Paired, bool Writeback);
  206. bool validateInstruction(MCInst &Inst, SMLoc &IDLoc,
  207. SmallVectorImpl<SMLoc> &Loc);
  208. unsigned getNumRegsForRegKind(RegKind K);
  209. bool MatchAndEmitInstruction(SMLoc IDLoc, unsigned &Opcode,
  210. OperandVector &Operands, MCStreamer &Out,
  211. uint64_t &ErrorInfo,
  212. bool MatchingInlineAsm) override;
  213. /// @name Auto-generated Match Functions
  214. /// {
  215. #define GET_ASSEMBLER_HEADER
  216. #include "AArch64GenAsmMatcher.inc"
  217. /// }
  218. OperandMatchResultTy tryParseScalarRegister(MCRegister &Reg);
  219. OperandMatchResultTy tryParseVectorRegister(MCRegister &Reg, StringRef &Kind,
  220. RegKind MatchKind);
  221. OperandMatchResultTy tryParseMatrixRegister(OperandVector &Operands);
  222. OperandMatchResultTy tryParseSVCR(OperandVector &Operands);
  223. OperandMatchResultTy tryParseOptionalShiftExtend(OperandVector &Operands);
  224. OperandMatchResultTy tryParseBarrierOperand(OperandVector &Operands);
  225. OperandMatchResultTy tryParseBarriernXSOperand(OperandVector &Operands);
  226. OperandMatchResultTy tryParseMRSSystemRegister(OperandVector &Operands);
  227. OperandMatchResultTy tryParseSysReg(OperandVector &Operands);
  228. OperandMatchResultTy tryParseSysCROperand(OperandVector &Operands);
  229. template <bool IsSVEPrefetch = false>
  230. OperandMatchResultTy tryParsePrefetch(OperandVector &Operands);
  231. OperandMatchResultTy tryParseRPRFMOperand(OperandVector &Operands);
  232. OperandMatchResultTy tryParsePSBHint(OperandVector &Operands);
  233. OperandMatchResultTy tryParseBTIHint(OperandVector &Operands);
  234. OperandMatchResultTy tryParseAdrpLabel(OperandVector &Operands);
  235. OperandMatchResultTy tryParseAdrLabel(OperandVector &Operands);
  236. template<bool AddFPZeroAsLiteral>
  237. OperandMatchResultTy tryParseFPImm(OperandVector &Operands);
  238. OperandMatchResultTy tryParseImmWithOptionalShift(OperandVector &Operands);
  239. OperandMatchResultTy tryParseGPR64sp0Operand(OperandVector &Operands);
  240. bool tryParseNeonVectorRegister(OperandVector &Operands);
  241. OperandMatchResultTy tryParseVectorIndex(OperandVector &Operands);
  242. OperandMatchResultTy tryParseGPRSeqPair(OperandVector &Operands);
  243. OperandMatchResultTy tryParseSyspXzrPair(OperandVector &Operands);
  244. template <bool ParseShiftExtend,
  245. RegConstraintEqualityTy EqTy = RegConstraintEqualityTy::EqualsReg>
  246. OperandMatchResultTy tryParseGPROperand(OperandVector &Operands);
  247. OperandMatchResultTy tryParseZTOperand(OperandVector &Operands);
  248. template <bool ParseShiftExtend, bool ParseSuffix>
  249. OperandMatchResultTy tryParseSVEDataVector(OperandVector &Operands);
  250. template <RegKind RK>
  251. OperandMatchResultTy tryParseSVEPredicateVector(OperandVector &Operands);
  252. template <RegKind VectorKind>
  253. OperandMatchResultTy tryParseVectorList(OperandVector &Operands,
  254. bool ExpectMatch = false);
  255. OperandMatchResultTy tryParseMatrixTileList(OperandVector &Operands);
  256. OperandMatchResultTy tryParseSVEPattern(OperandVector &Operands);
  257. OperandMatchResultTy tryParseSVEVecLenSpecifier(OperandVector &Operands);
  258. OperandMatchResultTy tryParseGPR64x8(OperandVector &Operands);
  259. OperandMatchResultTy tryParseImmRange(OperandVector &Operands);
  260. public:
  261. enum AArch64MatchResultTy {
  262. Match_InvalidSuffix = FIRST_TARGET_MATCH_RESULT_TY,
  263. #define GET_OPERAND_DIAGNOSTIC_TYPES
  264. #include "AArch64GenAsmMatcher.inc"
  265. };
  266. bool IsILP32;
  267. AArch64AsmParser(const MCSubtargetInfo &STI, MCAsmParser &Parser,
  268. const MCInstrInfo &MII, const MCTargetOptions &Options)
  269. : MCTargetAsmParser(Options, STI, MII) {
  270. IsILP32 = STI.getTargetTriple().getEnvironment() == Triple::GNUILP32;
  271. MCAsmParserExtension::Initialize(Parser);
  272. MCStreamer &S = getParser().getStreamer();
  273. if (S.getTargetStreamer() == nullptr)
  274. new AArch64TargetStreamer(S);
  275. // Alias .hword/.word/.[dx]word to the target-independent
  276. // .2byte/.4byte/.8byte directives as they have the same form and
  277. // semantics:
  278. /// ::= (.hword | .word | .dword | .xword ) [ expression (, expression)* ]
  279. Parser.addAliasForDirective(".hword", ".2byte");
  280. Parser.addAliasForDirective(".word", ".4byte");
  281. Parser.addAliasForDirective(".dword", ".8byte");
  282. Parser.addAliasForDirective(".xword", ".8byte");
  283. // Initialize the set of available features.
  284. setAvailableFeatures(ComputeAvailableFeatures(getSTI().getFeatureBits()));
  285. }
  286. bool areEqualRegs(const MCParsedAsmOperand &Op1,
  287. const MCParsedAsmOperand &Op2) const override;
  288. bool ParseInstruction(ParseInstructionInfo &Info, StringRef Name,
  289. SMLoc NameLoc, OperandVector &Operands) override;
  290. bool parseRegister(MCRegister &RegNo, SMLoc &StartLoc,
  291. SMLoc &EndLoc) override;
  292. OperandMatchResultTy tryParseRegister(MCRegister &RegNo, SMLoc &StartLoc,
  293. SMLoc &EndLoc) override;
  294. bool ParseDirective(AsmToken DirectiveID) override;
  295. unsigned validateTargetOperandClass(MCParsedAsmOperand &Op,
  296. unsigned Kind) override;
  297. static bool classifySymbolRef(const MCExpr *Expr,
  298. AArch64MCExpr::VariantKind &ELFRefKind,
  299. MCSymbolRefExpr::VariantKind &DarwinRefKind,
  300. int64_t &Addend);
  301. };
  302. /// AArch64Operand - Instances of this class represent a parsed AArch64 machine
  303. /// instruction.
  304. class AArch64Operand : public MCParsedAsmOperand {
  305. private:
  306. enum KindTy {
  307. k_Immediate,
  308. k_ShiftedImm,
  309. k_ImmRange,
  310. k_CondCode,
  311. k_Register,
  312. k_MatrixRegister,
  313. k_MatrixTileList,
  314. k_SVCR,
  315. k_VectorList,
  316. k_VectorIndex,
  317. k_Token,
  318. k_SysReg,
  319. k_SysCR,
  320. k_Prefetch,
  321. k_ShiftExtend,
  322. k_FPImm,
  323. k_Barrier,
  324. k_PSBHint,
  325. k_BTIHint,
  326. } Kind;
  327. SMLoc StartLoc, EndLoc;
  328. struct TokOp {
  329. const char *Data;
  330. unsigned Length;
  331. bool IsSuffix; // Is the operand actually a suffix on the mnemonic.
  332. };
  333. // Separate shift/extend operand.
  334. struct ShiftExtendOp {
  335. AArch64_AM::ShiftExtendType Type;
  336. unsigned Amount;
  337. bool HasExplicitAmount;
  338. };
  339. struct RegOp {
  340. unsigned RegNum;
  341. RegKind Kind;
  342. int ElementWidth;
  343. // The register may be allowed as a different register class,
  344. // e.g. for GPR64as32 or GPR32as64.
  345. RegConstraintEqualityTy EqualityTy;
  346. // In some cases the shift/extend needs to be explicitly parsed together
  347. // with the register, rather than as a separate operand. This is needed
  348. // for addressing modes where the instruction as a whole dictates the
  349. // scaling/extend, rather than specific bits in the instruction.
  350. // By parsing them as a single operand, we avoid the need to pass an
  351. // extra operand in all CodeGen patterns (because all operands need to
  352. // have an associated value), and we avoid the need to update TableGen to
  353. // accept operands that have no associated bits in the instruction.
  354. //
  355. // An added benefit of parsing them together is that the assembler
  356. // can give a sensible diagnostic if the scaling is not correct.
  357. //
  358. // The default is 'lsl #0' (HasExplicitAmount = false) if no
  359. // ShiftExtend is specified.
  360. ShiftExtendOp ShiftExtend;
  361. };
  362. struct MatrixRegOp {
  363. unsigned RegNum;
  364. unsigned ElementWidth;
  365. MatrixKind Kind;
  366. };
  367. struct MatrixTileListOp {
  368. unsigned RegMask = 0;
  369. };
  370. struct VectorListOp {
  371. unsigned RegNum;
  372. unsigned Count;
  373. unsigned Stride;
  374. unsigned NumElements;
  375. unsigned ElementWidth;
  376. RegKind RegisterKind;
  377. };
  378. struct VectorIndexOp {
  379. int Val;
  380. };
  381. struct ImmOp {
  382. const MCExpr *Val;
  383. };
  384. struct ShiftedImmOp {
  385. const MCExpr *Val;
  386. unsigned ShiftAmount;
  387. };
  388. struct ImmRangeOp {
  389. unsigned First;
  390. unsigned Last;
  391. };
  392. struct CondCodeOp {
  393. AArch64CC::CondCode Code;
  394. };
  395. struct FPImmOp {
  396. uint64_t Val; // APFloat value bitcasted to uint64_t.
  397. bool IsExact; // describes whether parsed value was exact.
  398. };
  399. struct BarrierOp {
  400. const char *Data;
  401. unsigned Length;
  402. unsigned Val; // Not the enum since not all values have names.
  403. bool HasnXSModifier;
  404. };
  405. struct SysRegOp {
  406. const char *Data;
  407. unsigned Length;
  408. uint32_t MRSReg;
  409. uint32_t MSRReg;
  410. uint32_t PStateField;
  411. };
  412. struct SysCRImmOp {
  413. unsigned Val;
  414. };
  415. struct PrefetchOp {
  416. const char *Data;
  417. unsigned Length;
  418. unsigned Val;
  419. };
  420. struct PSBHintOp {
  421. const char *Data;
  422. unsigned Length;
  423. unsigned Val;
  424. };
  425. struct BTIHintOp {
  426. const char *Data;
  427. unsigned Length;
  428. unsigned Val;
  429. };
  430. struct SVCROp {
  431. const char *Data;
  432. unsigned Length;
  433. unsigned PStateField;
  434. };
  435. union {
  436. struct TokOp Tok;
  437. struct RegOp Reg;
  438. struct MatrixRegOp MatrixReg;
  439. struct MatrixTileListOp MatrixTileList;
  440. struct VectorListOp VectorList;
  441. struct VectorIndexOp VectorIndex;
  442. struct ImmOp Imm;
  443. struct ShiftedImmOp ShiftedImm;
  444. struct ImmRangeOp ImmRange;
  445. struct CondCodeOp CondCode;
  446. struct FPImmOp FPImm;
  447. struct BarrierOp Barrier;
  448. struct SysRegOp SysReg;
  449. struct SysCRImmOp SysCRImm;
  450. struct PrefetchOp Prefetch;
  451. struct PSBHintOp PSBHint;
  452. struct BTIHintOp BTIHint;
  453. struct ShiftExtendOp ShiftExtend;
  454. struct SVCROp SVCR;
  455. };
  456. // Keep the MCContext around as the MCExprs may need manipulated during
  457. // the add<>Operands() calls.
  458. MCContext &Ctx;
  459. public:
  460. AArch64Operand(KindTy K, MCContext &Ctx) : Kind(K), Ctx(Ctx) {}
  461. AArch64Operand(const AArch64Operand &o) : MCParsedAsmOperand(), Ctx(o.Ctx) {
  462. Kind = o.Kind;
  463. StartLoc = o.StartLoc;
  464. EndLoc = o.EndLoc;
  465. switch (Kind) {
  466. case k_Token:
  467. Tok = o.Tok;
  468. break;
  469. case k_Immediate:
  470. Imm = o.Imm;
  471. break;
  472. case k_ShiftedImm:
  473. ShiftedImm = o.ShiftedImm;
  474. break;
  475. case k_ImmRange:
  476. ImmRange = o.ImmRange;
  477. break;
  478. case k_CondCode:
  479. CondCode = o.CondCode;
  480. break;
  481. case k_FPImm:
  482. FPImm = o.FPImm;
  483. break;
  484. case k_Barrier:
  485. Barrier = o.Barrier;
  486. break;
  487. case k_Register:
  488. Reg = o.Reg;
  489. break;
  490. case k_MatrixRegister:
  491. MatrixReg = o.MatrixReg;
  492. break;
  493. case k_MatrixTileList:
  494. MatrixTileList = o.MatrixTileList;
  495. break;
  496. case k_VectorList:
  497. VectorList = o.VectorList;
  498. break;
  499. case k_VectorIndex:
  500. VectorIndex = o.VectorIndex;
  501. break;
  502. case k_SysReg:
  503. SysReg = o.SysReg;
  504. break;
  505. case k_SysCR:
  506. SysCRImm = o.SysCRImm;
  507. break;
  508. case k_Prefetch:
  509. Prefetch = o.Prefetch;
  510. break;
  511. case k_PSBHint:
  512. PSBHint = o.PSBHint;
  513. break;
  514. case k_BTIHint:
  515. BTIHint = o.BTIHint;
  516. break;
  517. case k_ShiftExtend:
  518. ShiftExtend = o.ShiftExtend;
  519. break;
  520. case k_SVCR:
  521. SVCR = o.SVCR;
  522. break;
  523. }
  524. }
  525. /// getStartLoc - Get the location of the first token of this operand.
  526. SMLoc getStartLoc() const override { return StartLoc; }
  527. /// getEndLoc - Get the location of the last token of this operand.
  528. SMLoc getEndLoc() const override { return EndLoc; }
  529. StringRef getToken() const {
  530. assert(Kind == k_Token && "Invalid access!");
  531. return StringRef(Tok.Data, Tok.Length);
  532. }
  533. bool isTokenSuffix() const {
  534. assert(Kind == k_Token && "Invalid access!");
  535. return Tok.IsSuffix;
  536. }
  537. const MCExpr *getImm() const {
  538. assert(Kind == k_Immediate && "Invalid access!");
  539. return Imm.Val;
  540. }
  541. const MCExpr *getShiftedImmVal() const {
  542. assert(Kind == k_ShiftedImm && "Invalid access!");
  543. return ShiftedImm.Val;
  544. }
  545. unsigned getShiftedImmShift() const {
  546. assert(Kind == k_ShiftedImm && "Invalid access!");
  547. return ShiftedImm.ShiftAmount;
  548. }
  549. unsigned getFirstImmVal() const {
  550. assert(Kind == k_ImmRange && "Invalid access!");
  551. return ImmRange.First;
  552. }
  553. unsigned getLastImmVal() const {
  554. assert(Kind == k_ImmRange && "Invalid access!");
  555. return ImmRange.Last;
  556. }
  557. AArch64CC::CondCode getCondCode() const {
  558. assert(Kind == k_CondCode && "Invalid access!");
  559. return CondCode.Code;
  560. }
  561. APFloat getFPImm() const {
  562. assert (Kind == k_FPImm && "Invalid access!");
  563. return APFloat(APFloat::IEEEdouble(), APInt(64, FPImm.Val, true));
  564. }
  565. bool getFPImmIsExact() const {
  566. assert (Kind == k_FPImm && "Invalid access!");
  567. return FPImm.IsExact;
  568. }
  569. unsigned getBarrier() const {
  570. assert(Kind == k_Barrier && "Invalid access!");
  571. return Barrier.Val;
  572. }
  573. StringRef getBarrierName() const {
  574. assert(Kind == k_Barrier && "Invalid access!");
  575. return StringRef(Barrier.Data, Barrier.Length);
  576. }
  577. bool getBarriernXSModifier() const {
  578. assert(Kind == k_Barrier && "Invalid access!");
  579. return Barrier.HasnXSModifier;
  580. }
  581. unsigned getReg() const override {
  582. assert(Kind == k_Register && "Invalid access!");
  583. return Reg.RegNum;
  584. }
  585. unsigned getMatrixReg() const {
  586. assert(Kind == k_MatrixRegister && "Invalid access!");
  587. return MatrixReg.RegNum;
  588. }
  589. unsigned getMatrixElementWidth() const {
  590. assert(Kind == k_MatrixRegister && "Invalid access!");
  591. return MatrixReg.ElementWidth;
  592. }
  593. MatrixKind getMatrixKind() const {
  594. assert(Kind == k_MatrixRegister && "Invalid access!");
  595. return MatrixReg.Kind;
  596. }
  597. unsigned getMatrixTileListRegMask() const {
  598. assert(isMatrixTileList() && "Invalid access!");
  599. return MatrixTileList.RegMask;
  600. }
  601. RegConstraintEqualityTy getRegEqualityTy() const {
  602. assert(Kind == k_Register && "Invalid access!");
  603. return Reg.EqualityTy;
  604. }
  605. unsigned getVectorListStart() const {
  606. assert(Kind == k_VectorList && "Invalid access!");
  607. return VectorList.RegNum;
  608. }
  609. unsigned getVectorListCount() const {
  610. assert(Kind == k_VectorList && "Invalid access!");
  611. return VectorList.Count;
  612. }
  613. unsigned getVectorListStride() const {
  614. assert(Kind == k_VectorList && "Invalid access!");
  615. return VectorList.Stride;
  616. }
  617. int getVectorIndex() const {
  618. assert(Kind == k_VectorIndex && "Invalid access!");
  619. return VectorIndex.Val;
  620. }
  621. StringRef getSysReg() const {
  622. assert(Kind == k_SysReg && "Invalid access!");
  623. return StringRef(SysReg.Data, SysReg.Length);
  624. }
  625. unsigned getSysCR() const {
  626. assert(Kind == k_SysCR && "Invalid access!");
  627. return SysCRImm.Val;
  628. }
  629. unsigned getPrefetch() const {
  630. assert(Kind == k_Prefetch && "Invalid access!");
  631. return Prefetch.Val;
  632. }
  633. unsigned getPSBHint() const {
  634. assert(Kind == k_PSBHint && "Invalid access!");
  635. return PSBHint.Val;
  636. }
  637. StringRef getPSBHintName() const {
  638. assert(Kind == k_PSBHint && "Invalid access!");
  639. return StringRef(PSBHint.Data, PSBHint.Length);
  640. }
  641. unsigned getBTIHint() const {
  642. assert(Kind == k_BTIHint && "Invalid access!");
  643. return BTIHint.Val;
  644. }
  645. StringRef getBTIHintName() const {
  646. assert(Kind == k_BTIHint && "Invalid access!");
  647. return StringRef(BTIHint.Data, BTIHint.Length);
  648. }
  649. StringRef getSVCR() const {
  650. assert(Kind == k_SVCR && "Invalid access!");
  651. return StringRef(SVCR.Data, SVCR.Length);
  652. }
  653. StringRef getPrefetchName() const {
  654. assert(Kind == k_Prefetch && "Invalid access!");
  655. return StringRef(Prefetch.Data, Prefetch.Length);
  656. }
  657. AArch64_AM::ShiftExtendType getShiftExtendType() const {
  658. if (Kind == k_ShiftExtend)
  659. return ShiftExtend.Type;
  660. if (Kind == k_Register)
  661. return Reg.ShiftExtend.Type;
  662. llvm_unreachable("Invalid access!");
  663. }
  664. unsigned getShiftExtendAmount() const {
  665. if (Kind == k_ShiftExtend)
  666. return ShiftExtend.Amount;
  667. if (Kind == k_Register)
  668. return Reg.ShiftExtend.Amount;
  669. llvm_unreachable("Invalid access!");
  670. }
  671. bool hasShiftExtendAmount() const {
  672. if (Kind == k_ShiftExtend)
  673. return ShiftExtend.HasExplicitAmount;
  674. if (Kind == k_Register)
  675. return Reg.ShiftExtend.HasExplicitAmount;
  676. llvm_unreachable("Invalid access!");
  677. }
  678. bool isImm() const override { return Kind == k_Immediate; }
  679. bool isMem() const override { return false; }
  680. bool isUImm6() const {
  681. if (!isImm())
  682. return false;
  683. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  684. if (!MCE)
  685. return false;
  686. int64_t Val = MCE->getValue();
  687. return (Val >= 0 && Val < 64);
  688. }
  689. template <int Width> bool isSImm() const { return isSImmScaled<Width, 1>(); }
  690. template <int Bits, int Scale> DiagnosticPredicate isSImmScaled() const {
  691. return isImmScaled<Bits, Scale>(true);
  692. }
  693. template <int Bits, int Scale, int Offset = 0, bool IsRange = false>
  694. DiagnosticPredicate isUImmScaled() const {
  695. if (IsRange && isImmRange() &&
  696. (getLastImmVal() != getFirstImmVal() + Offset))
  697. return DiagnosticPredicateTy::NoMatch;
  698. return isImmScaled<Bits, Scale, IsRange>(false);
  699. }
  700. template <int Bits, int Scale, bool IsRange = false>
  701. DiagnosticPredicate isImmScaled(bool Signed) const {
  702. if ((!isImm() && !isImmRange()) || (isImm() && IsRange) ||
  703. (isImmRange() && !IsRange))
  704. return DiagnosticPredicateTy::NoMatch;
  705. int64_t Val;
  706. if (isImmRange())
  707. Val = getFirstImmVal();
  708. else {
  709. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  710. if (!MCE)
  711. return DiagnosticPredicateTy::NoMatch;
  712. Val = MCE->getValue();
  713. }
  714. int64_t MinVal, MaxVal;
  715. if (Signed) {
  716. int64_t Shift = Bits - 1;
  717. MinVal = (int64_t(1) << Shift) * -Scale;
  718. MaxVal = ((int64_t(1) << Shift) - 1) * Scale;
  719. } else {
  720. MinVal = 0;
  721. MaxVal = ((int64_t(1) << Bits) - 1) * Scale;
  722. }
  723. if (Val >= MinVal && Val <= MaxVal && (Val % Scale) == 0)
  724. return DiagnosticPredicateTy::Match;
  725. return DiagnosticPredicateTy::NearMatch;
  726. }
  727. DiagnosticPredicate isSVEPattern() const {
  728. if (!isImm())
  729. return DiagnosticPredicateTy::NoMatch;
  730. auto *MCE = dyn_cast<MCConstantExpr>(getImm());
  731. if (!MCE)
  732. return DiagnosticPredicateTy::NoMatch;
  733. int64_t Val = MCE->getValue();
  734. if (Val >= 0 && Val < 32)
  735. return DiagnosticPredicateTy::Match;
  736. return DiagnosticPredicateTy::NearMatch;
  737. }
  738. DiagnosticPredicate isSVEVecLenSpecifier() const {
  739. if (!isImm())
  740. return DiagnosticPredicateTy::NoMatch;
  741. auto *MCE = dyn_cast<MCConstantExpr>(getImm());
  742. if (!MCE)
  743. return DiagnosticPredicateTy::NoMatch;
  744. int64_t Val = MCE->getValue();
  745. if (Val >= 0 && Val <= 1)
  746. return DiagnosticPredicateTy::Match;
  747. return DiagnosticPredicateTy::NearMatch;
  748. }
  749. bool isSymbolicUImm12Offset(const MCExpr *Expr) const {
  750. AArch64MCExpr::VariantKind ELFRefKind;
  751. MCSymbolRefExpr::VariantKind DarwinRefKind;
  752. int64_t Addend;
  753. if (!AArch64AsmParser::classifySymbolRef(Expr, ELFRefKind, DarwinRefKind,
  754. Addend)) {
  755. // If we don't understand the expression, assume the best and
  756. // let the fixup and relocation code deal with it.
  757. return true;
  758. }
  759. if (DarwinRefKind == MCSymbolRefExpr::VK_PAGEOFF ||
  760. ELFRefKind == AArch64MCExpr::VK_LO12 ||
  761. ELFRefKind == AArch64MCExpr::VK_GOT_LO12 ||
  762. ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12 ||
  763. ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12_NC ||
  764. ELFRefKind == AArch64MCExpr::VK_TPREL_LO12 ||
  765. ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC ||
  766. ELFRefKind == AArch64MCExpr::VK_GOTTPREL_LO12_NC ||
  767. ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12 ||
  768. ELFRefKind == AArch64MCExpr::VK_SECREL_LO12 ||
  769. ELFRefKind == AArch64MCExpr::VK_SECREL_HI12 ||
  770. ELFRefKind == AArch64MCExpr::VK_GOT_PAGE_LO15) {
  771. // Note that we don't range-check the addend. It's adjusted modulo page
  772. // size when converted, so there is no "out of range" condition when using
  773. // @pageoff.
  774. return true;
  775. } else if (DarwinRefKind == MCSymbolRefExpr::VK_GOTPAGEOFF ||
  776. DarwinRefKind == MCSymbolRefExpr::VK_TLVPPAGEOFF) {
  777. // @gotpageoff/@tlvppageoff can only be used directly, not with an addend.
  778. return Addend == 0;
  779. }
  780. return false;
  781. }
  782. template <int Scale> bool isUImm12Offset() const {
  783. if (!isImm())
  784. return false;
  785. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  786. if (!MCE)
  787. return isSymbolicUImm12Offset(getImm());
  788. int64_t Val = MCE->getValue();
  789. return (Val % Scale) == 0 && Val >= 0 && (Val / Scale) < 0x1000;
  790. }
  791. template <int N, int M>
  792. bool isImmInRange() const {
  793. if (!isImm())
  794. return false;
  795. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  796. if (!MCE)
  797. return false;
  798. int64_t Val = MCE->getValue();
  799. return (Val >= N && Val <= M);
  800. }
  801. // NOTE: Also used for isLogicalImmNot as anything that can be represented as
  802. // a logical immediate can always be represented when inverted.
  803. template <typename T>
  804. bool isLogicalImm() const {
  805. if (!isImm())
  806. return false;
  807. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  808. if (!MCE)
  809. return false;
  810. int64_t Val = MCE->getValue();
  811. // Avoid left shift by 64 directly.
  812. uint64_t Upper = UINT64_C(-1) << (sizeof(T) * 4) << (sizeof(T) * 4);
  813. // Allow all-0 or all-1 in top bits to permit bitwise NOT.
  814. if ((Val & Upper) && (Val & Upper) != Upper)
  815. return false;
  816. return AArch64_AM::isLogicalImmediate(Val & ~Upper, sizeof(T) * 8);
  817. }
  818. bool isShiftedImm() const { return Kind == k_ShiftedImm; }
  819. bool isImmRange() const { return Kind == k_ImmRange; }
  820. /// Returns the immediate value as a pair of (imm, shift) if the immediate is
  821. /// a shifted immediate by value 'Shift' or '0', or if it is an unshifted
  822. /// immediate that can be shifted by 'Shift'.
  823. template <unsigned Width>
  824. std::optional<std::pair<int64_t, unsigned>> getShiftedVal() const {
  825. if (isShiftedImm() && Width == getShiftedImmShift())
  826. if (auto *CE = dyn_cast<MCConstantExpr>(getShiftedImmVal()))
  827. return std::make_pair(CE->getValue(), Width);
  828. if (isImm())
  829. if (auto *CE = dyn_cast<MCConstantExpr>(getImm())) {
  830. int64_t Val = CE->getValue();
  831. if ((Val != 0) && (uint64_t(Val >> Width) << Width) == uint64_t(Val))
  832. return std::make_pair(Val >> Width, Width);
  833. else
  834. return std::make_pair(Val, 0u);
  835. }
  836. return {};
  837. }
  838. bool isAddSubImm() const {
  839. if (!isShiftedImm() && !isImm())
  840. return false;
  841. const MCExpr *Expr;
  842. // An ADD/SUB shifter is either 'lsl #0' or 'lsl #12'.
  843. if (isShiftedImm()) {
  844. unsigned Shift = ShiftedImm.ShiftAmount;
  845. Expr = ShiftedImm.Val;
  846. if (Shift != 0 && Shift != 12)
  847. return false;
  848. } else {
  849. Expr = getImm();
  850. }
  851. AArch64MCExpr::VariantKind ELFRefKind;
  852. MCSymbolRefExpr::VariantKind DarwinRefKind;
  853. int64_t Addend;
  854. if (AArch64AsmParser::classifySymbolRef(Expr, ELFRefKind,
  855. DarwinRefKind, Addend)) {
  856. return DarwinRefKind == MCSymbolRefExpr::VK_PAGEOFF
  857. || DarwinRefKind == MCSymbolRefExpr::VK_TLVPPAGEOFF
  858. || (DarwinRefKind == MCSymbolRefExpr::VK_GOTPAGEOFF && Addend == 0)
  859. || ELFRefKind == AArch64MCExpr::VK_LO12
  860. || ELFRefKind == AArch64MCExpr::VK_DTPREL_HI12
  861. || ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12
  862. || ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12_NC
  863. || ELFRefKind == AArch64MCExpr::VK_TPREL_HI12
  864. || ELFRefKind == AArch64MCExpr::VK_TPREL_LO12
  865. || ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC
  866. || ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12
  867. || ELFRefKind == AArch64MCExpr::VK_SECREL_HI12
  868. || ELFRefKind == AArch64MCExpr::VK_SECREL_LO12;
  869. }
  870. // If it's a constant, it should be a real immediate in range.
  871. if (auto ShiftedVal = getShiftedVal<12>())
  872. return ShiftedVal->first >= 0 && ShiftedVal->first <= 0xfff;
  873. // If it's an expression, we hope for the best and let the fixup/relocation
  874. // code deal with it.
  875. return true;
  876. }
  877. bool isAddSubImmNeg() const {
  878. if (!isShiftedImm() && !isImm())
  879. return false;
  880. // Otherwise it should be a real negative immediate in range.
  881. if (auto ShiftedVal = getShiftedVal<12>())
  882. return ShiftedVal->first < 0 && -ShiftedVal->first <= 0xfff;
  883. return false;
  884. }
  885. // Signed value in the range -128 to +127. For element widths of
  886. // 16 bits or higher it may also be a signed multiple of 256 in the
  887. // range -32768 to +32512.
  888. // For element-width of 8 bits a range of -128 to 255 is accepted,
  889. // since a copy of a byte can be either signed/unsigned.
  890. template <typename T>
  891. DiagnosticPredicate isSVECpyImm() const {
  892. if (!isShiftedImm() && (!isImm() || !isa<MCConstantExpr>(getImm())))
  893. return DiagnosticPredicateTy::NoMatch;
  894. bool IsByte = std::is_same<int8_t, std::make_signed_t<T>>::value ||
  895. std::is_same<int8_t, T>::value;
  896. if (auto ShiftedImm = getShiftedVal<8>())
  897. if (!(IsByte && ShiftedImm->second) &&
  898. AArch64_AM::isSVECpyImm<T>(uint64_t(ShiftedImm->first)
  899. << ShiftedImm->second))
  900. return DiagnosticPredicateTy::Match;
  901. return DiagnosticPredicateTy::NearMatch;
  902. }
  903. // Unsigned value in the range 0 to 255. For element widths of
  904. // 16 bits or higher it may also be a signed multiple of 256 in the
  905. // range 0 to 65280.
  906. template <typename T> DiagnosticPredicate isSVEAddSubImm() const {
  907. if (!isShiftedImm() && (!isImm() || !isa<MCConstantExpr>(getImm())))
  908. return DiagnosticPredicateTy::NoMatch;
  909. bool IsByte = std::is_same<int8_t, std::make_signed_t<T>>::value ||
  910. std::is_same<int8_t, T>::value;
  911. if (auto ShiftedImm = getShiftedVal<8>())
  912. if (!(IsByte && ShiftedImm->second) &&
  913. AArch64_AM::isSVEAddSubImm<T>(ShiftedImm->first
  914. << ShiftedImm->second))
  915. return DiagnosticPredicateTy::Match;
  916. return DiagnosticPredicateTy::NearMatch;
  917. }
  918. template <typename T> DiagnosticPredicate isSVEPreferredLogicalImm() const {
  919. if (isLogicalImm<T>() && !isSVECpyImm<T>())
  920. return DiagnosticPredicateTy::Match;
  921. return DiagnosticPredicateTy::NoMatch;
  922. }
  923. bool isCondCode() const { return Kind == k_CondCode; }
  924. bool isSIMDImmType10() const {
  925. if (!isImm())
  926. return false;
  927. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  928. if (!MCE)
  929. return false;
  930. return AArch64_AM::isAdvSIMDModImmType10(MCE->getValue());
  931. }
  932. template<int N>
  933. bool isBranchTarget() const {
  934. if (!isImm())
  935. return false;
  936. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  937. if (!MCE)
  938. return true;
  939. int64_t Val = MCE->getValue();
  940. if (Val & 0x3)
  941. return false;
  942. assert(N > 0 && "Branch target immediate cannot be 0 bits!");
  943. return (Val >= -((1<<(N-1)) << 2) && Val <= (((1<<(N-1))-1) << 2));
  944. }
  945. bool
  946. isMovWSymbol(ArrayRef<AArch64MCExpr::VariantKind> AllowedModifiers) const {
  947. if (!isImm())
  948. return false;
  949. AArch64MCExpr::VariantKind ELFRefKind;
  950. MCSymbolRefExpr::VariantKind DarwinRefKind;
  951. int64_t Addend;
  952. if (!AArch64AsmParser::classifySymbolRef(getImm(), ELFRefKind,
  953. DarwinRefKind, Addend)) {
  954. return false;
  955. }
  956. if (DarwinRefKind != MCSymbolRefExpr::VK_None)
  957. return false;
  958. return llvm::is_contained(AllowedModifiers, ELFRefKind);
  959. }
  960. bool isMovWSymbolG3() const {
  961. return isMovWSymbol({AArch64MCExpr::VK_ABS_G3, AArch64MCExpr::VK_PREL_G3});
  962. }
  963. bool isMovWSymbolG2() const {
  964. return isMovWSymbol(
  965. {AArch64MCExpr::VK_ABS_G2, AArch64MCExpr::VK_ABS_G2_S,
  966. AArch64MCExpr::VK_ABS_G2_NC, AArch64MCExpr::VK_PREL_G2,
  967. AArch64MCExpr::VK_PREL_G2_NC, AArch64MCExpr::VK_TPREL_G2,
  968. AArch64MCExpr::VK_DTPREL_G2});
  969. }
  970. bool isMovWSymbolG1() const {
  971. return isMovWSymbol(
  972. {AArch64MCExpr::VK_ABS_G1, AArch64MCExpr::VK_ABS_G1_S,
  973. AArch64MCExpr::VK_ABS_G1_NC, AArch64MCExpr::VK_PREL_G1,
  974. AArch64MCExpr::VK_PREL_G1_NC, AArch64MCExpr::VK_GOTTPREL_G1,
  975. AArch64MCExpr::VK_TPREL_G1, AArch64MCExpr::VK_TPREL_G1_NC,
  976. AArch64MCExpr::VK_DTPREL_G1, AArch64MCExpr::VK_DTPREL_G1_NC});
  977. }
  978. bool isMovWSymbolG0() const {
  979. return isMovWSymbol(
  980. {AArch64MCExpr::VK_ABS_G0, AArch64MCExpr::VK_ABS_G0_S,
  981. AArch64MCExpr::VK_ABS_G0_NC, AArch64MCExpr::VK_PREL_G0,
  982. AArch64MCExpr::VK_PREL_G0_NC, AArch64MCExpr::VK_GOTTPREL_G0_NC,
  983. AArch64MCExpr::VK_TPREL_G0, AArch64MCExpr::VK_TPREL_G0_NC,
  984. AArch64MCExpr::VK_DTPREL_G0, AArch64MCExpr::VK_DTPREL_G0_NC});
  985. }
  986. template<int RegWidth, int Shift>
  987. bool isMOVZMovAlias() const {
  988. if (!isImm()) return false;
  989. const MCExpr *E = getImm();
  990. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(E)) {
  991. uint64_t Value = CE->getValue();
  992. return AArch64_AM::isMOVZMovAlias(Value, Shift, RegWidth);
  993. }
  994. // Only supports the case of Shift being 0 if an expression is used as an
  995. // operand
  996. return !Shift && E;
  997. }
  998. template<int RegWidth, int Shift>
  999. bool isMOVNMovAlias() const {
  1000. if (!isImm()) return false;
  1001. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  1002. if (!CE) return false;
  1003. uint64_t Value = CE->getValue();
  1004. return AArch64_AM::isMOVNMovAlias(Value, Shift, RegWidth);
  1005. }
  1006. bool isFPImm() const {
  1007. return Kind == k_FPImm &&
  1008. AArch64_AM::getFP64Imm(getFPImm().bitcastToAPInt()) != -1;
  1009. }
  1010. bool isBarrier() const {
  1011. return Kind == k_Barrier && !getBarriernXSModifier();
  1012. }
  1013. bool isBarriernXS() const {
  1014. return Kind == k_Barrier && getBarriernXSModifier();
  1015. }
  1016. bool isSysReg() const { return Kind == k_SysReg; }
  1017. bool isMRSSystemRegister() const {
  1018. if (!isSysReg()) return false;
  1019. return SysReg.MRSReg != -1U;
  1020. }
  1021. bool isMSRSystemRegister() const {
  1022. if (!isSysReg()) return false;
  1023. return SysReg.MSRReg != -1U;
  1024. }
  1025. bool isSystemPStateFieldWithImm0_1() const {
  1026. if (!isSysReg()) return false;
  1027. return AArch64PState::lookupPStateImm0_1ByEncoding(SysReg.PStateField);
  1028. }
  1029. bool isSystemPStateFieldWithImm0_15() const {
  1030. if (!isSysReg())
  1031. return false;
  1032. return AArch64PState::lookupPStateImm0_15ByEncoding(SysReg.PStateField);
  1033. }
  1034. bool isSVCR() const {
  1035. if (Kind != k_SVCR)
  1036. return false;
  1037. return SVCR.PStateField != -1U;
  1038. }
  1039. bool isReg() const override {
  1040. return Kind == k_Register;
  1041. }
  1042. bool isVectorList() const { return Kind == k_VectorList; }
  1043. bool isScalarReg() const {
  1044. return Kind == k_Register && Reg.Kind == RegKind::Scalar;
  1045. }
  1046. bool isNeonVectorReg() const {
  1047. return Kind == k_Register && Reg.Kind == RegKind::NeonVector;
  1048. }
  1049. bool isNeonVectorRegLo() const {
  1050. return Kind == k_Register && Reg.Kind == RegKind::NeonVector &&
  1051. (AArch64MCRegisterClasses[AArch64::FPR128_loRegClassID].contains(
  1052. Reg.RegNum) ||
  1053. AArch64MCRegisterClasses[AArch64::FPR64_loRegClassID].contains(
  1054. Reg.RegNum));
  1055. }
  1056. bool isMatrix() const { return Kind == k_MatrixRegister; }
  1057. bool isMatrixTileList() const { return Kind == k_MatrixTileList; }
  1058. template <unsigned Class> bool isSVEPredicateAsCounterReg() const {
  1059. RegKind RK;
  1060. switch (Class) {
  1061. case AArch64::PPRRegClassID:
  1062. case AArch64::PPR_3bRegClassID:
  1063. case AArch64::PPR_p8to15RegClassID:
  1064. RK = RegKind::SVEPredicateAsCounter;
  1065. break;
  1066. default:
  1067. llvm_unreachable("Unsupport register class");
  1068. }
  1069. return (Kind == k_Register && Reg.Kind == RK) &&
  1070. AArch64MCRegisterClasses[Class].contains(getReg());
  1071. }
  1072. template <unsigned Class> bool isSVEVectorReg() const {
  1073. RegKind RK;
  1074. switch (Class) {
  1075. case AArch64::ZPRRegClassID:
  1076. case AArch64::ZPR_3bRegClassID:
  1077. case AArch64::ZPR_4bRegClassID:
  1078. RK = RegKind::SVEDataVector;
  1079. break;
  1080. case AArch64::PPRRegClassID:
  1081. case AArch64::PPR_3bRegClassID:
  1082. RK = RegKind::SVEPredicateVector;
  1083. break;
  1084. default:
  1085. llvm_unreachable("Unsupport register class");
  1086. }
  1087. return (Kind == k_Register && Reg.Kind == RK) &&
  1088. AArch64MCRegisterClasses[Class].contains(getReg());
  1089. }
  1090. template <unsigned Class> bool isFPRasZPR() const {
  1091. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1092. AArch64MCRegisterClasses[Class].contains(getReg());
  1093. }
  1094. template <int ElementWidth, unsigned Class>
  1095. DiagnosticPredicate isSVEPredicateVectorRegOfWidth() const {
  1096. if (Kind != k_Register || Reg.Kind != RegKind::SVEPredicateVector)
  1097. return DiagnosticPredicateTy::NoMatch;
  1098. if (isSVEVectorReg<Class>() && (Reg.ElementWidth == ElementWidth))
  1099. return DiagnosticPredicateTy::Match;
  1100. return DiagnosticPredicateTy::NearMatch;
  1101. }
  1102. template <int ElementWidth, unsigned Class>
  1103. DiagnosticPredicate isSVEPredicateAsCounterRegOfWidth() const {
  1104. if (Kind != k_Register || Reg.Kind != RegKind::SVEPredicateAsCounter)
  1105. return DiagnosticPredicateTy::NoMatch;
  1106. if (isSVEPredicateAsCounterReg<Class>() && (Reg.ElementWidth == ElementWidth))
  1107. return DiagnosticPredicateTy::Match;
  1108. return DiagnosticPredicateTy::NearMatch;
  1109. }
  1110. template <int ElementWidth, unsigned Class>
  1111. DiagnosticPredicate isSVEDataVectorRegOfWidth() const {
  1112. if (Kind != k_Register || Reg.Kind != RegKind::SVEDataVector)
  1113. return DiagnosticPredicateTy::NoMatch;
  1114. if (isSVEVectorReg<Class>() && Reg.ElementWidth == ElementWidth)
  1115. return DiagnosticPredicateTy::Match;
  1116. return DiagnosticPredicateTy::NearMatch;
  1117. }
  1118. template <int ElementWidth, unsigned Class,
  1119. AArch64_AM::ShiftExtendType ShiftExtendTy, int ShiftWidth,
  1120. bool ShiftWidthAlwaysSame>
  1121. DiagnosticPredicate isSVEDataVectorRegWithShiftExtend() const {
  1122. auto VectorMatch = isSVEDataVectorRegOfWidth<ElementWidth, Class>();
  1123. if (!VectorMatch.isMatch())
  1124. return DiagnosticPredicateTy::NoMatch;
  1125. // Give a more specific diagnostic when the user has explicitly typed in
  1126. // a shift-amount that does not match what is expected, but for which
  1127. // there is also an unscaled addressing mode (e.g. sxtw/uxtw).
  1128. bool MatchShift = getShiftExtendAmount() == Log2_32(ShiftWidth / 8);
  1129. if (!MatchShift && (ShiftExtendTy == AArch64_AM::UXTW ||
  1130. ShiftExtendTy == AArch64_AM::SXTW) &&
  1131. !ShiftWidthAlwaysSame && hasShiftExtendAmount() && ShiftWidth == 8)
  1132. return DiagnosticPredicateTy::NoMatch;
  1133. if (MatchShift && ShiftExtendTy == getShiftExtendType())
  1134. return DiagnosticPredicateTy::Match;
  1135. return DiagnosticPredicateTy::NearMatch;
  1136. }
  1137. bool isGPR32as64() const {
  1138. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1139. AArch64MCRegisterClasses[AArch64::GPR64RegClassID].contains(Reg.RegNum);
  1140. }
  1141. bool isGPR64as32() const {
  1142. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1143. AArch64MCRegisterClasses[AArch64::GPR32RegClassID].contains(Reg.RegNum);
  1144. }
  1145. bool isGPR64x8() const {
  1146. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1147. AArch64MCRegisterClasses[AArch64::GPR64x8ClassRegClassID].contains(
  1148. Reg.RegNum);
  1149. }
  1150. bool isWSeqPair() const {
  1151. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1152. AArch64MCRegisterClasses[AArch64::WSeqPairsClassRegClassID].contains(
  1153. Reg.RegNum);
  1154. }
  1155. bool isXSeqPair() const {
  1156. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1157. AArch64MCRegisterClasses[AArch64::XSeqPairsClassRegClassID].contains(
  1158. Reg.RegNum);
  1159. }
  1160. bool isSyspXzrPair() const {
  1161. return isGPR64<AArch64::GPR64RegClassID>() && Reg.RegNum == AArch64::XZR;
  1162. }
  1163. template<int64_t Angle, int64_t Remainder>
  1164. DiagnosticPredicate isComplexRotation() const {
  1165. if (!isImm()) return DiagnosticPredicateTy::NoMatch;
  1166. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  1167. if (!CE) return DiagnosticPredicateTy::NoMatch;
  1168. uint64_t Value = CE->getValue();
  1169. if (Value % Angle == Remainder && Value <= 270)
  1170. return DiagnosticPredicateTy::Match;
  1171. return DiagnosticPredicateTy::NearMatch;
  1172. }
  1173. template <unsigned RegClassID> bool isGPR64() const {
  1174. return Kind == k_Register && Reg.Kind == RegKind::Scalar &&
  1175. AArch64MCRegisterClasses[RegClassID].contains(getReg());
  1176. }
  1177. template <unsigned RegClassID, int ExtWidth>
  1178. DiagnosticPredicate isGPR64WithShiftExtend() const {
  1179. if (Kind != k_Register || Reg.Kind != RegKind::Scalar)
  1180. return DiagnosticPredicateTy::NoMatch;
  1181. if (isGPR64<RegClassID>() && getShiftExtendType() == AArch64_AM::LSL &&
  1182. getShiftExtendAmount() == Log2_32(ExtWidth / 8))
  1183. return DiagnosticPredicateTy::Match;
  1184. return DiagnosticPredicateTy::NearMatch;
  1185. }
  1186. /// Is this a vector list with the type implicit (presumably attached to the
  1187. /// instruction itself)?
  1188. template <RegKind VectorKind, unsigned NumRegs>
  1189. bool isImplicitlyTypedVectorList() const {
  1190. return Kind == k_VectorList && VectorList.Count == NumRegs &&
  1191. VectorList.NumElements == 0 &&
  1192. VectorList.RegisterKind == VectorKind;
  1193. }
  1194. template <RegKind VectorKind, unsigned NumRegs, unsigned NumElements,
  1195. unsigned ElementWidth, unsigned Stride = 1>
  1196. bool isTypedVectorList() const {
  1197. if (Kind != k_VectorList)
  1198. return false;
  1199. if (VectorList.Count != NumRegs)
  1200. return false;
  1201. if (VectorList.RegisterKind != VectorKind)
  1202. return false;
  1203. if (VectorList.ElementWidth != ElementWidth)
  1204. return false;
  1205. if (VectorList.Stride != Stride)
  1206. return false;
  1207. return VectorList.NumElements == NumElements;
  1208. }
  1209. template <RegKind VectorKind, unsigned NumRegs, unsigned NumElements,
  1210. unsigned ElementWidth>
  1211. DiagnosticPredicate isTypedVectorListMultiple() const {
  1212. bool Res =
  1213. isTypedVectorList<VectorKind, NumRegs, NumElements, ElementWidth>();
  1214. if (!Res)
  1215. return DiagnosticPredicateTy::NoMatch;
  1216. if (((VectorList.RegNum - AArch64::Z0) % NumRegs) != 0)
  1217. return DiagnosticPredicateTy::NearMatch;
  1218. return DiagnosticPredicateTy::Match;
  1219. }
  1220. template <RegKind VectorKind, unsigned NumRegs, unsigned Stride,
  1221. unsigned ElementWidth>
  1222. DiagnosticPredicate isTypedVectorListStrided() const {
  1223. bool Res = isTypedVectorList<VectorKind, NumRegs, /*NumElements*/ 0,
  1224. ElementWidth, Stride>();
  1225. if (!Res)
  1226. return DiagnosticPredicateTy::NoMatch;
  1227. if ((VectorList.RegNum < (AArch64::Z0 + Stride)) ||
  1228. ((VectorList.RegNum >= AArch64::Z16) &&
  1229. (VectorList.RegNum < (AArch64::Z16 + Stride))))
  1230. return DiagnosticPredicateTy::Match;
  1231. return DiagnosticPredicateTy::NoMatch;
  1232. }
  1233. template <int Min, int Max>
  1234. DiagnosticPredicate isVectorIndex() const {
  1235. if (Kind != k_VectorIndex)
  1236. return DiagnosticPredicateTy::NoMatch;
  1237. if (VectorIndex.Val >= Min && VectorIndex.Val <= Max)
  1238. return DiagnosticPredicateTy::Match;
  1239. return DiagnosticPredicateTy::NearMatch;
  1240. }
  1241. bool isToken() const override { return Kind == k_Token; }
  1242. bool isTokenEqual(StringRef Str) const {
  1243. return Kind == k_Token && getToken() == Str;
  1244. }
  1245. bool isSysCR() const { return Kind == k_SysCR; }
  1246. bool isPrefetch() const { return Kind == k_Prefetch; }
  1247. bool isPSBHint() const { return Kind == k_PSBHint; }
  1248. bool isBTIHint() const { return Kind == k_BTIHint; }
  1249. bool isShiftExtend() const { return Kind == k_ShiftExtend; }
  1250. bool isShifter() const {
  1251. if (!isShiftExtend())
  1252. return false;
  1253. AArch64_AM::ShiftExtendType ST = getShiftExtendType();
  1254. return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR ||
  1255. ST == AArch64_AM::ASR || ST == AArch64_AM::ROR ||
  1256. ST == AArch64_AM::MSL);
  1257. }
  1258. template <unsigned ImmEnum> DiagnosticPredicate isExactFPImm() const {
  1259. if (Kind != k_FPImm)
  1260. return DiagnosticPredicateTy::NoMatch;
  1261. if (getFPImmIsExact()) {
  1262. // Lookup the immediate from table of supported immediates.
  1263. auto *Desc = AArch64ExactFPImm::lookupExactFPImmByEnum(ImmEnum);
  1264. assert(Desc && "Unknown enum value");
  1265. // Calculate its FP value.
  1266. APFloat RealVal(APFloat::IEEEdouble());
  1267. auto StatusOrErr =
  1268. RealVal.convertFromString(Desc->Repr, APFloat::rmTowardZero);
  1269. if (errorToBool(StatusOrErr.takeError()) || *StatusOrErr != APFloat::opOK)
  1270. llvm_unreachable("FP immediate is not exact");
  1271. if (getFPImm().bitwiseIsEqual(RealVal))
  1272. return DiagnosticPredicateTy::Match;
  1273. }
  1274. return DiagnosticPredicateTy::NearMatch;
  1275. }
  1276. template <unsigned ImmA, unsigned ImmB>
  1277. DiagnosticPredicate isExactFPImm() const {
  1278. DiagnosticPredicate Res = DiagnosticPredicateTy::NoMatch;
  1279. if ((Res = isExactFPImm<ImmA>()))
  1280. return DiagnosticPredicateTy::Match;
  1281. if ((Res = isExactFPImm<ImmB>()))
  1282. return DiagnosticPredicateTy::Match;
  1283. return Res;
  1284. }
  1285. bool isExtend() const {
  1286. if (!isShiftExtend())
  1287. return false;
  1288. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1289. return (ET == AArch64_AM::UXTB || ET == AArch64_AM::SXTB ||
  1290. ET == AArch64_AM::UXTH || ET == AArch64_AM::SXTH ||
  1291. ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW ||
  1292. ET == AArch64_AM::UXTX || ET == AArch64_AM::SXTX ||
  1293. ET == AArch64_AM::LSL) &&
  1294. getShiftExtendAmount() <= 4;
  1295. }
  1296. bool isExtend64() const {
  1297. if (!isExtend())
  1298. return false;
  1299. // Make sure the extend expects a 32-bit source register.
  1300. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1301. return ET == AArch64_AM::UXTB || ET == AArch64_AM::SXTB ||
  1302. ET == AArch64_AM::UXTH || ET == AArch64_AM::SXTH ||
  1303. ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW;
  1304. }
  1305. bool isExtendLSL64() const {
  1306. if (!isExtend())
  1307. return false;
  1308. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1309. return (ET == AArch64_AM::UXTX || ET == AArch64_AM::SXTX ||
  1310. ET == AArch64_AM::LSL) &&
  1311. getShiftExtendAmount() <= 4;
  1312. }
  1313. template<int Width> bool isMemXExtend() const {
  1314. if (!isExtend())
  1315. return false;
  1316. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1317. return (ET == AArch64_AM::LSL || ET == AArch64_AM::SXTX) &&
  1318. (getShiftExtendAmount() == Log2_32(Width / 8) ||
  1319. getShiftExtendAmount() == 0);
  1320. }
  1321. template<int Width> bool isMemWExtend() const {
  1322. if (!isExtend())
  1323. return false;
  1324. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1325. return (ET == AArch64_AM::UXTW || ET == AArch64_AM::SXTW) &&
  1326. (getShiftExtendAmount() == Log2_32(Width / 8) ||
  1327. getShiftExtendAmount() == 0);
  1328. }
  1329. template <unsigned width>
  1330. bool isArithmeticShifter() const {
  1331. if (!isShifter())
  1332. return false;
  1333. // An arithmetic shifter is LSL, LSR, or ASR.
  1334. AArch64_AM::ShiftExtendType ST = getShiftExtendType();
  1335. return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR ||
  1336. ST == AArch64_AM::ASR) && getShiftExtendAmount() < width;
  1337. }
  1338. template <unsigned width>
  1339. bool isLogicalShifter() const {
  1340. if (!isShifter())
  1341. return false;
  1342. // A logical shifter is LSL, LSR, ASR or ROR.
  1343. AArch64_AM::ShiftExtendType ST = getShiftExtendType();
  1344. return (ST == AArch64_AM::LSL || ST == AArch64_AM::LSR ||
  1345. ST == AArch64_AM::ASR || ST == AArch64_AM::ROR) &&
  1346. getShiftExtendAmount() < width;
  1347. }
  1348. bool isMovImm32Shifter() const {
  1349. if (!isShifter())
  1350. return false;
  1351. // A MOVi shifter is LSL of 0, 16, 32, or 48.
  1352. AArch64_AM::ShiftExtendType ST = getShiftExtendType();
  1353. if (ST != AArch64_AM::LSL)
  1354. return false;
  1355. uint64_t Val = getShiftExtendAmount();
  1356. return (Val == 0 || Val == 16);
  1357. }
  1358. bool isMovImm64Shifter() const {
  1359. if (!isShifter())
  1360. return false;
  1361. // A MOVi shifter is LSL of 0 or 16.
  1362. AArch64_AM::ShiftExtendType ST = getShiftExtendType();
  1363. if (ST != AArch64_AM::LSL)
  1364. return false;
  1365. uint64_t Val = getShiftExtendAmount();
  1366. return (Val == 0 || Val == 16 || Val == 32 || Val == 48);
  1367. }
  1368. bool isLogicalVecShifter() const {
  1369. if (!isShifter())
  1370. return false;
  1371. // A logical vector shifter is a left shift by 0, 8, 16, or 24.
  1372. unsigned Shift = getShiftExtendAmount();
  1373. return getShiftExtendType() == AArch64_AM::LSL &&
  1374. (Shift == 0 || Shift == 8 || Shift == 16 || Shift == 24);
  1375. }
  1376. bool isLogicalVecHalfWordShifter() const {
  1377. if (!isLogicalVecShifter())
  1378. return false;
  1379. // A logical vector shifter is a left shift by 0 or 8.
  1380. unsigned Shift = getShiftExtendAmount();
  1381. return getShiftExtendType() == AArch64_AM::LSL &&
  1382. (Shift == 0 || Shift == 8);
  1383. }
  1384. bool isMoveVecShifter() const {
  1385. if (!isShiftExtend())
  1386. return false;
  1387. // A logical vector shifter is a left shift by 8 or 16.
  1388. unsigned Shift = getShiftExtendAmount();
  1389. return getShiftExtendType() == AArch64_AM::MSL &&
  1390. (Shift == 8 || Shift == 16);
  1391. }
  1392. // Fallback unscaled operands are for aliases of LDR/STR that fall back
  1393. // to LDUR/STUR when the offset is not legal for the former but is for
  1394. // the latter. As such, in addition to checking for being a legal unscaled
  1395. // address, also check that it is not a legal scaled address. This avoids
  1396. // ambiguity in the matcher.
  1397. template<int Width>
  1398. bool isSImm9OffsetFB() const {
  1399. return isSImm<9>() && !isUImm12Offset<Width / 8>();
  1400. }
  1401. bool isAdrpLabel() const {
  1402. // Validation was handled during parsing, so we just verify that
  1403. // something didn't go haywire.
  1404. if (!isImm())
  1405. return false;
  1406. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Imm.Val)) {
  1407. int64_t Val = CE->getValue();
  1408. int64_t Min = - (4096 * (1LL << (21 - 1)));
  1409. int64_t Max = 4096 * ((1LL << (21 - 1)) - 1);
  1410. return (Val % 4096) == 0 && Val >= Min && Val <= Max;
  1411. }
  1412. return true;
  1413. }
  1414. bool isAdrLabel() const {
  1415. // Validation was handled during parsing, so we just verify that
  1416. // something didn't go haywire.
  1417. if (!isImm())
  1418. return false;
  1419. if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Imm.Val)) {
  1420. int64_t Val = CE->getValue();
  1421. int64_t Min = - (1LL << (21 - 1));
  1422. int64_t Max = ((1LL << (21 - 1)) - 1);
  1423. return Val >= Min && Val <= Max;
  1424. }
  1425. return true;
  1426. }
  1427. template <MatrixKind Kind, unsigned EltSize, unsigned RegClass>
  1428. DiagnosticPredicate isMatrixRegOperand() const {
  1429. if (!isMatrix())
  1430. return DiagnosticPredicateTy::NoMatch;
  1431. if (getMatrixKind() != Kind ||
  1432. !AArch64MCRegisterClasses[RegClass].contains(getMatrixReg()) ||
  1433. EltSize != getMatrixElementWidth())
  1434. return DiagnosticPredicateTy::NearMatch;
  1435. return DiagnosticPredicateTy::Match;
  1436. }
  1437. void addExpr(MCInst &Inst, const MCExpr *Expr) const {
  1438. // Add as immediates when possible. Null MCExpr = 0.
  1439. if (!Expr)
  1440. Inst.addOperand(MCOperand::createImm(0));
  1441. else if (const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Expr))
  1442. Inst.addOperand(MCOperand::createImm(CE->getValue()));
  1443. else
  1444. Inst.addOperand(MCOperand::createExpr(Expr));
  1445. }
  1446. void addRegOperands(MCInst &Inst, unsigned N) const {
  1447. assert(N == 1 && "Invalid number of operands!");
  1448. Inst.addOperand(MCOperand::createReg(getReg()));
  1449. }
  1450. void addMatrixOperands(MCInst &Inst, unsigned N) const {
  1451. assert(N == 1 && "Invalid number of operands!");
  1452. Inst.addOperand(MCOperand::createReg(getMatrixReg()));
  1453. }
  1454. void addGPR32as64Operands(MCInst &Inst, unsigned N) const {
  1455. assert(N == 1 && "Invalid number of operands!");
  1456. assert(
  1457. AArch64MCRegisterClasses[AArch64::GPR64RegClassID].contains(getReg()));
  1458. const MCRegisterInfo *RI = Ctx.getRegisterInfo();
  1459. uint32_t Reg = RI->getRegClass(AArch64::GPR32RegClassID).getRegister(
  1460. RI->getEncodingValue(getReg()));
  1461. Inst.addOperand(MCOperand::createReg(Reg));
  1462. }
  1463. void addGPR64as32Operands(MCInst &Inst, unsigned N) const {
  1464. assert(N == 1 && "Invalid number of operands!");
  1465. assert(
  1466. AArch64MCRegisterClasses[AArch64::GPR32RegClassID].contains(getReg()));
  1467. const MCRegisterInfo *RI = Ctx.getRegisterInfo();
  1468. uint32_t Reg = RI->getRegClass(AArch64::GPR64RegClassID).getRegister(
  1469. RI->getEncodingValue(getReg()));
  1470. Inst.addOperand(MCOperand::createReg(Reg));
  1471. }
  1472. template <int Width>
  1473. void addFPRasZPRRegOperands(MCInst &Inst, unsigned N) const {
  1474. unsigned Base;
  1475. switch (Width) {
  1476. case 8: Base = AArch64::B0; break;
  1477. case 16: Base = AArch64::H0; break;
  1478. case 32: Base = AArch64::S0; break;
  1479. case 64: Base = AArch64::D0; break;
  1480. case 128: Base = AArch64::Q0; break;
  1481. default:
  1482. llvm_unreachable("Unsupported width");
  1483. }
  1484. Inst.addOperand(MCOperand::createReg(AArch64::Z0 + getReg() - Base));
  1485. }
  1486. void addVectorReg64Operands(MCInst &Inst, unsigned N) const {
  1487. assert(N == 1 && "Invalid number of operands!");
  1488. assert(
  1489. AArch64MCRegisterClasses[AArch64::FPR128RegClassID].contains(getReg()));
  1490. Inst.addOperand(MCOperand::createReg(AArch64::D0 + getReg() - AArch64::Q0));
  1491. }
  1492. void addVectorReg128Operands(MCInst &Inst, unsigned N) const {
  1493. assert(N == 1 && "Invalid number of operands!");
  1494. assert(
  1495. AArch64MCRegisterClasses[AArch64::FPR128RegClassID].contains(getReg()));
  1496. Inst.addOperand(MCOperand::createReg(getReg()));
  1497. }
  1498. void addVectorRegLoOperands(MCInst &Inst, unsigned N) const {
  1499. assert(N == 1 && "Invalid number of operands!");
  1500. Inst.addOperand(MCOperand::createReg(getReg()));
  1501. }
  1502. enum VecListIndexType {
  1503. VecListIdx_DReg = 0,
  1504. VecListIdx_QReg = 1,
  1505. VecListIdx_ZReg = 2,
  1506. VecListIdx_PReg = 3,
  1507. };
  1508. template <VecListIndexType RegTy, unsigned NumRegs>
  1509. void addVectorListOperands(MCInst &Inst, unsigned N) const {
  1510. assert(N == 1 && "Invalid number of operands!");
  1511. static const unsigned FirstRegs[][5] = {
  1512. /* DReg */ { AArch64::Q0,
  1513. AArch64::D0, AArch64::D0_D1,
  1514. AArch64::D0_D1_D2, AArch64::D0_D1_D2_D3 },
  1515. /* QReg */ { AArch64::Q0,
  1516. AArch64::Q0, AArch64::Q0_Q1,
  1517. AArch64::Q0_Q1_Q2, AArch64::Q0_Q1_Q2_Q3 },
  1518. /* ZReg */ { AArch64::Z0,
  1519. AArch64::Z0, AArch64::Z0_Z1,
  1520. AArch64::Z0_Z1_Z2, AArch64::Z0_Z1_Z2_Z3 },
  1521. /* PReg */ { AArch64::P0,
  1522. AArch64::P0, AArch64::P0_P1 }
  1523. };
  1524. assert((RegTy != VecListIdx_ZReg || NumRegs <= 4) &&
  1525. " NumRegs must be <= 4 for ZRegs");
  1526. assert((RegTy != VecListIdx_PReg || NumRegs <= 2) &&
  1527. " NumRegs must be <= 2 for PRegs");
  1528. unsigned FirstReg = FirstRegs[(unsigned)RegTy][NumRegs];
  1529. Inst.addOperand(MCOperand::createReg(FirstReg + getVectorListStart() -
  1530. FirstRegs[(unsigned)RegTy][0]));
  1531. }
  1532. template <unsigned NumRegs>
  1533. void addStridedVectorListOperands(MCInst &Inst, unsigned N) const {
  1534. assert(N == 1 && "Invalid number of operands!");
  1535. assert((NumRegs == 2 || NumRegs == 4) && " NumRegs must be 2 or 4");
  1536. switch (NumRegs) {
  1537. case 2:
  1538. if (getVectorListStart() < AArch64::Z16) {
  1539. assert((getVectorListStart() < AArch64::Z8) &&
  1540. (getVectorListStart() >= AArch64::Z0) && "Invalid Register");
  1541. Inst.addOperand(MCOperand::createReg(
  1542. AArch64::Z0_Z8 + getVectorListStart() - AArch64::Z0));
  1543. } else {
  1544. assert((getVectorListStart() < AArch64::Z24) &&
  1545. (getVectorListStart() >= AArch64::Z16) && "Invalid Register");
  1546. Inst.addOperand(MCOperand::createReg(
  1547. AArch64::Z16_Z24 + getVectorListStart() - AArch64::Z16));
  1548. }
  1549. break;
  1550. case 4:
  1551. if (getVectorListStart() < AArch64::Z16) {
  1552. assert((getVectorListStart() < AArch64::Z4) &&
  1553. (getVectorListStart() >= AArch64::Z0) && "Invalid Register");
  1554. Inst.addOperand(MCOperand::createReg(
  1555. AArch64::Z0_Z4_Z8_Z12 + getVectorListStart() - AArch64::Z0));
  1556. } else {
  1557. assert((getVectorListStart() < AArch64::Z20) &&
  1558. (getVectorListStart() >= AArch64::Z16) && "Invalid Register");
  1559. Inst.addOperand(MCOperand::createReg(
  1560. AArch64::Z16_Z20_Z24_Z28 + getVectorListStart() - AArch64::Z16));
  1561. }
  1562. break;
  1563. default:
  1564. llvm_unreachable("Unsupported number of registers for strided vec list");
  1565. }
  1566. }
  1567. void addMatrixTileListOperands(MCInst &Inst, unsigned N) const {
  1568. assert(N == 1 && "Invalid number of operands!");
  1569. unsigned RegMask = getMatrixTileListRegMask();
  1570. assert(RegMask <= 0xFF && "Invalid mask!");
  1571. Inst.addOperand(MCOperand::createImm(RegMask));
  1572. }
  1573. void addVectorIndexOperands(MCInst &Inst, unsigned N) const {
  1574. assert(N == 1 && "Invalid number of operands!");
  1575. Inst.addOperand(MCOperand::createImm(getVectorIndex()));
  1576. }
  1577. template <unsigned ImmIs0, unsigned ImmIs1>
  1578. void addExactFPImmOperands(MCInst &Inst, unsigned N) const {
  1579. assert(N == 1 && "Invalid number of operands!");
  1580. assert(bool(isExactFPImm<ImmIs0, ImmIs1>()) && "Invalid operand");
  1581. Inst.addOperand(MCOperand::createImm(bool(isExactFPImm<ImmIs1>())));
  1582. }
  1583. void addImmOperands(MCInst &Inst, unsigned N) const {
  1584. assert(N == 1 && "Invalid number of operands!");
  1585. // If this is a pageoff symrefexpr with an addend, adjust the addend
  1586. // to be only the page-offset portion. Otherwise, just add the expr
  1587. // as-is.
  1588. addExpr(Inst, getImm());
  1589. }
  1590. template <int Shift>
  1591. void addImmWithOptionalShiftOperands(MCInst &Inst, unsigned N) const {
  1592. assert(N == 2 && "Invalid number of operands!");
  1593. if (auto ShiftedVal = getShiftedVal<Shift>()) {
  1594. Inst.addOperand(MCOperand::createImm(ShiftedVal->first));
  1595. Inst.addOperand(MCOperand::createImm(ShiftedVal->second));
  1596. } else if (isShiftedImm()) {
  1597. addExpr(Inst, getShiftedImmVal());
  1598. Inst.addOperand(MCOperand::createImm(getShiftedImmShift()));
  1599. } else {
  1600. addExpr(Inst, getImm());
  1601. Inst.addOperand(MCOperand::createImm(0));
  1602. }
  1603. }
  1604. template <int Shift>
  1605. void addImmNegWithOptionalShiftOperands(MCInst &Inst, unsigned N) const {
  1606. assert(N == 2 && "Invalid number of operands!");
  1607. if (auto ShiftedVal = getShiftedVal<Shift>()) {
  1608. Inst.addOperand(MCOperand::createImm(-ShiftedVal->first));
  1609. Inst.addOperand(MCOperand::createImm(ShiftedVal->second));
  1610. } else
  1611. llvm_unreachable("Not a shifted negative immediate");
  1612. }
  1613. void addCondCodeOperands(MCInst &Inst, unsigned N) const {
  1614. assert(N == 1 && "Invalid number of operands!");
  1615. Inst.addOperand(MCOperand::createImm(getCondCode()));
  1616. }
  1617. void addAdrpLabelOperands(MCInst &Inst, unsigned N) const {
  1618. assert(N == 1 && "Invalid number of operands!");
  1619. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  1620. if (!MCE)
  1621. addExpr(Inst, getImm());
  1622. else
  1623. Inst.addOperand(MCOperand::createImm(MCE->getValue() >> 12));
  1624. }
  1625. void addAdrLabelOperands(MCInst &Inst, unsigned N) const {
  1626. addImmOperands(Inst, N);
  1627. }
  1628. template<int Scale>
  1629. void addUImm12OffsetOperands(MCInst &Inst, unsigned N) const {
  1630. assert(N == 1 && "Invalid number of operands!");
  1631. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  1632. if (!MCE) {
  1633. Inst.addOperand(MCOperand::createExpr(getImm()));
  1634. return;
  1635. }
  1636. Inst.addOperand(MCOperand::createImm(MCE->getValue() / Scale));
  1637. }
  1638. void addUImm6Operands(MCInst &Inst, unsigned N) const {
  1639. assert(N == 1 && "Invalid number of operands!");
  1640. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1641. Inst.addOperand(MCOperand::createImm(MCE->getValue()));
  1642. }
  1643. template <int Scale>
  1644. void addImmScaledOperands(MCInst &Inst, unsigned N) const {
  1645. assert(N == 1 && "Invalid number of operands!");
  1646. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1647. Inst.addOperand(MCOperand::createImm(MCE->getValue() / Scale));
  1648. }
  1649. template <int Scale>
  1650. void addImmScaledRangeOperands(MCInst &Inst, unsigned N) const {
  1651. assert(N == 1 && "Invalid number of operands!");
  1652. Inst.addOperand(MCOperand::createImm(getFirstImmVal() / Scale));
  1653. }
  1654. template <typename T>
  1655. void addLogicalImmOperands(MCInst &Inst, unsigned N) const {
  1656. assert(N == 1 && "Invalid number of operands!");
  1657. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1658. std::make_unsigned_t<T> Val = MCE->getValue();
  1659. uint64_t encoding = AArch64_AM::encodeLogicalImmediate(Val, sizeof(T) * 8);
  1660. Inst.addOperand(MCOperand::createImm(encoding));
  1661. }
  1662. template <typename T>
  1663. void addLogicalImmNotOperands(MCInst &Inst, unsigned N) const {
  1664. assert(N == 1 && "Invalid number of operands!");
  1665. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1666. std::make_unsigned_t<T> Val = ~MCE->getValue();
  1667. uint64_t encoding = AArch64_AM::encodeLogicalImmediate(Val, sizeof(T) * 8);
  1668. Inst.addOperand(MCOperand::createImm(encoding));
  1669. }
  1670. void addSIMDImmType10Operands(MCInst &Inst, unsigned N) const {
  1671. assert(N == 1 && "Invalid number of operands!");
  1672. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1673. uint64_t encoding = AArch64_AM::encodeAdvSIMDModImmType10(MCE->getValue());
  1674. Inst.addOperand(MCOperand::createImm(encoding));
  1675. }
  1676. void addBranchTarget26Operands(MCInst &Inst, unsigned N) const {
  1677. // Branch operands don't encode the low bits, so shift them off
  1678. // here. If it's a label, however, just put it on directly as there's
  1679. // not enough information now to do anything.
  1680. assert(N == 1 && "Invalid number of operands!");
  1681. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  1682. if (!MCE) {
  1683. addExpr(Inst, getImm());
  1684. return;
  1685. }
  1686. assert(MCE && "Invalid constant immediate operand!");
  1687. Inst.addOperand(MCOperand::createImm(MCE->getValue() >> 2));
  1688. }
  1689. void addPCRelLabel19Operands(MCInst &Inst, unsigned N) const {
  1690. // Branch operands don't encode the low bits, so shift them off
  1691. // here. If it's a label, however, just put it on directly as there's
  1692. // not enough information now to do anything.
  1693. assert(N == 1 && "Invalid number of operands!");
  1694. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  1695. if (!MCE) {
  1696. addExpr(Inst, getImm());
  1697. return;
  1698. }
  1699. assert(MCE && "Invalid constant immediate operand!");
  1700. Inst.addOperand(MCOperand::createImm(MCE->getValue() >> 2));
  1701. }
  1702. void addBranchTarget14Operands(MCInst &Inst, unsigned N) const {
  1703. // Branch operands don't encode the low bits, so shift them off
  1704. // here. If it's a label, however, just put it on directly as there's
  1705. // not enough information now to do anything.
  1706. assert(N == 1 && "Invalid number of operands!");
  1707. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(getImm());
  1708. if (!MCE) {
  1709. addExpr(Inst, getImm());
  1710. return;
  1711. }
  1712. assert(MCE && "Invalid constant immediate operand!");
  1713. Inst.addOperand(MCOperand::createImm(MCE->getValue() >> 2));
  1714. }
  1715. void addFPImmOperands(MCInst &Inst, unsigned N) const {
  1716. assert(N == 1 && "Invalid number of operands!");
  1717. Inst.addOperand(MCOperand::createImm(
  1718. AArch64_AM::getFP64Imm(getFPImm().bitcastToAPInt())));
  1719. }
  1720. void addBarrierOperands(MCInst &Inst, unsigned N) const {
  1721. assert(N == 1 && "Invalid number of operands!");
  1722. Inst.addOperand(MCOperand::createImm(getBarrier()));
  1723. }
  1724. void addBarriernXSOperands(MCInst &Inst, unsigned N) const {
  1725. assert(N == 1 && "Invalid number of operands!");
  1726. Inst.addOperand(MCOperand::createImm(getBarrier()));
  1727. }
  1728. void addMRSSystemRegisterOperands(MCInst &Inst, unsigned N) const {
  1729. assert(N == 1 && "Invalid number of operands!");
  1730. Inst.addOperand(MCOperand::createImm(SysReg.MRSReg));
  1731. }
  1732. void addMSRSystemRegisterOperands(MCInst &Inst, unsigned N) const {
  1733. assert(N == 1 && "Invalid number of operands!");
  1734. Inst.addOperand(MCOperand::createImm(SysReg.MSRReg));
  1735. }
  1736. void addSystemPStateFieldWithImm0_1Operands(MCInst &Inst, unsigned N) const {
  1737. assert(N == 1 && "Invalid number of operands!");
  1738. Inst.addOperand(MCOperand::createImm(SysReg.PStateField));
  1739. }
  1740. void addSVCROperands(MCInst &Inst, unsigned N) const {
  1741. assert(N == 1 && "Invalid number of operands!");
  1742. Inst.addOperand(MCOperand::createImm(SVCR.PStateField));
  1743. }
  1744. void addSystemPStateFieldWithImm0_15Operands(MCInst &Inst, unsigned N) const {
  1745. assert(N == 1 && "Invalid number of operands!");
  1746. Inst.addOperand(MCOperand::createImm(SysReg.PStateField));
  1747. }
  1748. void addSysCROperands(MCInst &Inst, unsigned N) const {
  1749. assert(N == 1 && "Invalid number of operands!");
  1750. Inst.addOperand(MCOperand::createImm(getSysCR()));
  1751. }
  1752. void addPrefetchOperands(MCInst &Inst, unsigned N) const {
  1753. assert(N == 1 && "Invalid number of operands!");
  1754. Inst.addOperand(MCOperand::createImm(getPrefetch()));
  1755. }
  1756. void addPSBHintOperands(MCInst &Inst, unsigned N) const {
  1757. assert(N == 1 && "Invalid number of operands!");
  1758. Inst.addOperand(MCOperand::createImm(getPSBHint()));
  1759. }
  1760. void addBTIHintOperands(MCInst &Inst, unsigned N) const {
  1761. assert(N == 1 && "Invalid number of operands!");
  1762. Inst.addOperand(MCOperand::createImm(getBTIHint()));
  1763. }
  1764. void addShifterOperands(MCInst &Inst, unsigned N) const {
  1765. assert(N == 1 && "Invalid number of operands!");
  1766. unsigned Imm =
  1767. AArch64_AM::getShifterImm(getShiftExtendType(), getShiftExtendAmount());
  1768. Inst.addOperand(MCOperand::createImm(Imm));
  1769. }
  1770. void addSyspXzrPairOperand(MCInst &Inst, unsigned N) const {
  1771. assert(N == 1 && "Invalid number of operands!");
  1772. if (!isScalarReg())
  1773. return;
  1774. const MCRegisterInfo *RI = Ctx.getRegisterInfo();
  1775. uint32_t Reg = RI->getRegClass(AArch64::GPR64RegClassID)
  1776. .getRegister(RI->getEncodingValue(getReg()));
  1777. if (Reg != AArch64::XZR)
  1778. llvm_unreachable("wrong register");
  1779. Inst.addOperand(MCOperand::createReg(AArch64::XZR));
  1780. }
  1781. void addExtendOperands(MCInst &Inst, unsigned N) const {
  1782. assert(N == 1 && "Invalid number of operands!");
  1783. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1784. if (ET == AArch64_AM::LSL) ET = AArch64_AM::UXTW;
  1785. unsigned Imm = AArch64_AM::getArithExtendImm(ET, getShiftExtendAmount());
  1786. Inst.addOperand(MCOperand::createImm(Imm));
  1787. }
  1788. void addExtend64Operands(MCInst &Inst, unsigned N) const {
  1789. assert(N == 1 && "Invalid number of operands!");
  1790. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1791. if (ET == AArch64_AM::LSL) ET = AArch64_AM::UXTX;
  1792. unsigned Imm = AArch64_AM::getArithExtendImm(ET, getShiftExtendAmount());
  1793. Inst.addOperand(MCOperand::createImm(Imm));
  1794. }
  1795. void addMemExtendOperands(MCInst &Inst, unsigned N) const {
  1796. assert(N == 2 && "Invalid number of operands!");
  1797. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1798. bool IsSigned = ET == AArch64_AM::SXTW || ET == AArch64_AM::SXTX;
  1799. Inst.addOperand(MCOperand::createImm(IsSigned));
  1800. Inst.addOperand(MCOperand::createImm(getShiftExtendAmount() != 0));
  1801. }
  1802. // For 8-bit load/store instructions with a register offset, both the
  1803. // "DoShift" and "NoShift" variants have a shift of 0. Because of this,
  1804. // they're disambiguated by whether the shift was explicit or implicit rather
  1805. // than its size.
  1806. void addMemExtend8Operands(MCInst &Inst, unsigned N) const {
  1807. assert(N == 2 && "Invalid number of operands!");
  1808. AArch64_AM::ShiftExtendType ET = getShiftExtendType();
  1809. bool IsSigned = ET == AArch64_AM::SXTW || ET == AArch64_AM::SXTX;
  1810. Inst.addOperand(MCOperand::createImm(IsSigned));
  1811. Inst.addOperand(MCOperand::createImm(hasShiftExtendAmount()));
  1812. }
  1813. template<int Shift>
  1814. void addMOVZMovAliasOperands(MCInst &Inst, unsigned N) const {
  1815. assert(N == 1 && "Invalid number of operands!");
  1816. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(getImm());
  1817. if (CE) {
  1818. uint64_t Value = CE->getValue();
  1819. Inst.addOperand(MCOperand::createImm((Value >> Shift) & 0xffff));
  1820. } else {
  1821. addExpr(Inst, getImm());
  1822. }
  1823. }
  1824. template<int Shift>
  1825. void addMOVNMovAliasOperands(MCInst &Inst, unsigned N) const {
  1826. assert(N == 1 && "Invalid number of operands!");
  1827. const MCConstantExpr *CE = cast<MCConstantExpr>(getImm());
  1828. uint64_t Value = CE->getValue();
  1829. Inst.addOperand(MCOperand::createImm((~Value >> Shift) & 0xffff));
  1830. }
  1831. void addComplexRotationEvenOperands(MCInst &Inst, unsigned N) const {
  1832. assert(N == 1 && "Invalid number of operands!");
  1833. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1834. Inst.addOperand(MCOperand::createImm(MCE->getValue() / 90));
  1835. }
  1836. void addComplexRotationOddOperands(MCInst &Inst, unsigned N) const {
  1837. assert(N == 1 && "Invalid number of operands!");
  1838. const MCConstantExpr *MCE = cast<MCConstantExpr>(getImm());
  1839. Inst.addOperand(MCOperand::createImm((MCE->getValue() - 90) / 180));
  1840. }
  1841. void print(raw_ostream &OS) const override;
  1842. static std::unique_ptr<AArch64Operand>
  1843. CreateToken(StringRef Str, SMLoc S, MCContext &Ctx, bool IsSuffix = false) {
  1844. auto Op = std::make_unique<AArch64Operand>(k_Token, Ctx);
  1845. Op->Tok.Data = Str.data();
  1846. Op->Tok.Length = Str.size();
  1847. Op->Tok.IsSuffix = IsSuffix;
  1848. Op->StartLoc = S;
  1849. Op->EndLoc = S;
  1850. return Op;
  1851. }
  1852. static std::unique_ptr<AArch64Operand>
  1853. CreateReg(unsigned RegNum, RegKind Kind, SMLoc S, SMLoc E, MCContext &Ctx,
  1854. RegConstraintEqualityTy EqTy = RegConstraintEqualityTy::EqualsReg,
  1855. AArch64_AM::ShiftExtendType ExtTy = AArch64_AM::LSL,
  1856. unsigned ShiftAmount = 0,
  1857. unsigned HasExplicitAmount = false) {
  1858. auto Op = std::make_unique<AArch64Operand>(k_Register, Ctx);
  1859. Op->Reg.RegNum = RegNum;
  1860. Op->Reg.Kind = Kind;
  1861. Op->Reg.ElementWidth = 0;
  1862. Op->Reg.EqualityTy = EqTy;
  1863. Op->Reg.ShiftExtend.Type = ExtTy;
  1864. Op->Reg.ShiftExtend.Amount = ShiftAmount;
  1865. Op->Reg.ShiftExtend.HasExplicitAmount = HasExplicitAmount;
  1866. Op->StartLoc = S;
  1867. Op->EndLoc = E;
  1868. return Op;
  1869. }
  1870. static std::unique_ptr<AArch64Operand>
  1871. CreateVectorReg(unsigned RegNum, RegKind Kind, unsigned ElementWidth,
  1872. SMLoc S, SMLoc E, MCContext &Ctx,
  1873. AArch64_AM::ShiftExtendType ExtTy = AArch64_AM::LSL,
  1874. unsigned ShiftAmount = 0,
  1875. unsigned HasExplicitAmount = false) {
  1876. assert((Kind == RegKind::NeonVector || Kind == RegKind::SVEDataVector ||
  1877. Kind == RegKind::SVEPredicateVector ||
  1878. Kind == RegKind::SVEPredicateAsCounter) &&
  1879. "Invalid vector kind");
  1880. auto Op = CreateReg(RegNum, Kind, S, E, Ctx, EqualsReg, ExtTy, ShiftAmount,
  1881. HasExplicitAmount);
  1882. Op->Reg.ElementWidth = ElementWidth;
  1883. return Op;
  1884. }
  1885. static std::unique_ptr<AArch64Operand>
  1886. CreateVectorList(unsigned RegNum, unsigned Count, unsigned Stride,
  1887. unsigned NumElements, unsigned ElementWidth,
  1888. RegKind RegisterKind, SMLoc S, SMLoc E, MCContext &Ctx) {
  1889. auto Op = std::make_unique<AArch64Operand>(k_VectorList, Ctx);
  1890. Op->VectorList.RegNum = RegNum;
  1891. Op->VectorList.Count = Count;
  1892. Op->VectorList.Stride = Stride;
  1893. Op->VectorList.NumElements = NumElements;
  1894. Op->VectorList.ElementWidth = ElementWidth;
  1895. Op->VectorList.RegisterKind = RegisterKind;
  1896. Op->StartLoc = S;
  1897. Op->EndLoc = E;
  1898. return Op;
  1899. }
  1900. static std::unique_ptr<AArch64Operand>
  1901. CreateVectorIndex(int Idx, SMLoc S, SMLoc E, MCContext &Ctx) {
  1902. auto Op = std::make_unique<AArch64Operand>(k_VectorIndex, Ctx);
  1903. Op->VectorIndex.Val = Idx;
  1904. Op->StartLoc = S;
  1905. Op->EndLoc = E;
  1906. return Op;
  1907. }
  1908. static std::unique_ptr<AArch64Operand>
  1909. CreateMatrixTileList(unsigned RegMask, SMLoc S, SMLoc E, MCContext &Ctx) {
  1910. auto Op = std::make_unique<AArch64Operand>(k_MatrixTileList, Ctx);
  1911. Op->MatrixTileList.RegMask = RegMask;
  1912. Op->StartLoc = S;
  1913. Op->EndLoc = E;
  1914. return Op;
  1915. }
  1916. static void ComputeRegsForAlias(unsigned Reg, SmallSet<unsigned, 8> &OutRegs,
  1917. const unsigned ElementWidth) {
  1918. static std::map<std::pair<unsigned, unsigned>, std::vector<unsigned>>
  1919. RegMap = {
  1920. {{0, AArch64::ZAB0},
  1921. {AArch64::ZAD0, AArch64::ZAD1, AArch64::ZAD2, AArch64::ZAD3,
  1922. AArch64::ZAD4, AArch64::ZAD5, AArch64::ZAD6, AArch64::ZAD7}},
  1923. {{8, AArch64::ZAB0},
  1924. {AArch64::ZAD0, AArch64::ZAD1, AArch64::ZAD2, AArch64::ZAD3,
  1925. AArch64::ZAD4, AArch64::ZAD5, AArch64::ZAD6, AArch64::ZAD7}},
  1926. {{16, AArch64::ZAH0},
  1927. {AArch64::ZAD0, AArch64::ZAD2, AArch64::ZAD4, AArch64::ZAD6}},
  1928. {{16, AArch64::ZAH1},
  1929. {AArch64::ZAD1, AArch64::ZAD3, AArch64::ZAD5, AArch64::ZAD7}},
  1930. {{32, AArch64::ZAS0}, {AArch64::ZAD0, AArch64::ZAD4}},
  1931. {{32, AArch64::ZAS1}, {AArch64::ZAD1, AArch64::ZAD5}},
  1932. {{32, AArch64::ZAS2}, {AArch64::ZAD2, AArch64::ZAD6}},
  1933. {{32, AArch64::ZAS3}, {AArch64::ZAD3, AArch64::ZAD7}},
  1934. };
  1935. if (ElementWidth == 64)
  1936. OutRegs.insert(Reg);
  1937. else {
  1938. std::vector<unsigned> Regs = RegMap[std::make_pair(ElementWidth, Reg)];
  1939. assert(!Regs.empty() && "Invalid tile or element width!");
  1940. for (auto OutReg : Regs)
  1941. OutRegs.insert(OutReg);
  1942. }
  1943. }
  1944. static std::unique_ptr<AArch64Operand> CreateImm(const MCExpr *Val, SMLoc S,
  1945. SMLoc E, MCContext &Ctx) {
  1946. auto Op = std::make_unique<AArch64Operand>(k_Immediate, Ctx);
  1947. Op->Imm.Val = Val;
  1948. Op->StartLoc = S;
  1949. Op->EndLoc = E;
  1950. return Op;
  1951. }
  1952. static std::unique_ptr<AArch64Operand> CreateShiftedImm(const MCExpr *Val,
  1953. unsigned ShiftAmount,
  1954. SMLoc S, SMLoc E,
  1955. MCContext &Ctx) {
  1956. auto Op = std::make_unique<AArch64Operand>(k_ShiftedImm, Ctx);
  1957. Op->ShiftedImm .Val = Val;
  1958. Op->ShiftedImm.ShiftAmount = ShiftAmount;
  1959. Op->StartLoc = S;
  1960. Op->EndLoc = E;
  1961. return Op;
  1962. }
  1963. static std::unique_ptr<AArch64Operand> CreateImmRange(unsigned First,
  1964. unsigned Last, SMLoc S,
  1965. SMLoc E,
  1966. MCContext &Ctx) {
  1967. auto Op = std::make_unique<AArch64Operand>(k_ImmRange, Ctx);
  1968. Op->ImmRange.First = First;
  1969. Op->ImmRange.Last = Last;
  1970. Op->EndLoc = E;
  1971. return Op;
  1972. }
  1973. static std::unique_ptr<AArch64Operand>
  1974. CreateCondCode(AArch64CC::CondCode Code, SMLoc S, SMLoc E, MCContext &Ctx) {
  1975. auto Op = std::make_unique<AArch64Operand>(k_CondCode, Ctx);
  1976. Op->CondCode.Code = Code;
  1977. Op->StartLoc = S;
  1978. Op->EndLoc = E;
  1979. return Op;
  1980. }
  1981. static std::unique_ptr<AArch64Operand>
  1982. CreateFPImm(APFloat Val, bool IsExact, SMLoc S, MCContext &Ctx) {
  1983. auto Op = std::make_unique<AArch64Operand>(k_FPImm, Ctx);
  1984. Op->FPImm.Val = Val.bitcastToAPInt().getSExtValue();
  1985. Op->FPImm.IsExact = IsExact;
  1986. Op->StartLoc = S;
  1987. Op->EndLoc = S;
  1988. return Op;
  1989. }
  1990. static std::unique_ptr<AArch64Operand> CreateBarrier(unsigned Val,
  1991. StringRef Str,
  1992. SMLoc S,
  1993. MCContext &Ctx,
  1994. bool HasnXSModifier) {
  1995. auto Op = std::make_unique<AArch64Operand>(k_Barrier, Ctx);
  1996. Op->Barrier.Val = Val;
  1997. Op->Barrier.Data = Str.data();
  1998. Op->Barrier.Length = Str.size();
  1999. Op->Barrier.HasnXSModifier = HasnXSModifier;
  2000. Op->StartLoc = S;
  2001. Op->EndLoc = S;
  2002. return Op;
  2003. }
  2004. static std::unique_ptr<AArch64Operand> CreateSysReg(StringRef Str, SMLoc S,
  2005. uint32_t MRSReg,
  2006. uint32_t MSRReg,
  2007. uint32_t PStateField,
  2008. MCContext &Ctx) {
  2009. auto Op = std::make_unique<AArch64Operand>(k_SysReg, Ctx);
  2010. Op->SysReg.Data = Str.data();
  2011. Op->SysReg.Length = Str.size();
  2012. Op->SysReg.MRSReg = MRSReg;
  2013. Op->SysReg.MSRReg = MSRReg;
  2014. Op->SysReg.PStateField = PStateField;
  2015. Op->StartLoc = S;
  2016. Op->EndLoc = S;
  2017. return Op;
  2018. }
  2019. static std::unique_ptr<AArch64Operand> CreateSysCR(unsigned Val, SMLoc S,
  2020. SMLoc E, MCContext &Ctx) {
  2021. auto Op = std::make_unique<AArch64Operand>(k_SysCR, Ctx);
  2022. Op->SysCRImm.Val = Val;
  2023. Op->StartLoc = S;
  2024. Op->EndLoc = E;
  2025. return Op;
  2026. }
  2027. static std::unique_ptr<AArch64Operand> CreatePrefetch(unsigned Val,
  2028. StringRef Str,
  2029. SMLoc S,
  2030. MCContext &Ctx) {
  2031. auto Op = std::make_unique<AArch64Operand>(k_Prefetch, Ctx);
  2032. Op->Prefetch.Val = Val;
  2033. Op->Barrier.Data = Str.data();
  2034. Op->Barrier.Length = Str.size();
  2035. Op->StartLoc = S;
  2036. Op->EndLoc = S;
  2037. return Op;
  2038. }
  2039. static std::unique_ptr<AArch64Operand> CreatePSBHint(unsigned Val,
  2040. StringRef Str,
  2041. SMLoc S,
  2042. MCContext &Ctx) {
  2043. auto Op = std::make_unique<AArch64Operand>(k_PSBHint, Ctx);
  2044. Op->PSBHint.Val = Val;
  2045. Op->PSBHint.Data = Str.data();
  2046. Op->PSBHint.Length = Str.size();
  2047. Op->StartLoc = S;
  2048. Op->EndLoc = S;
  2049. return Op;
  2050. }
  2051. static std::unique_ptr<AArch64Operand> CreateBTIHint(unsigned Val,
  2052. StringRef Str,
  2053. SMLoc S,
  2054. MCContext &Ctx) {
  2055. auto Op = std::make_unique<AArch64Operand>(k_BTIHint, Ctx);
  2056. Op->BTIHint.Val = Val | 32;
  2057. Op->BTIHint.Data = Str.data();
  2058. Op->BTIHint.Length = Str.size();
  2059. Op->StartLoc = S;
  2060. Op->EndLoc = S;
  2061. return Op;
  2062. }
  2063. static std::unique_ptr<AArch64Operand>
  2064. CreateMatrixRegister(unsigned RegNum, unsigned ElementWidth, MatrixKind Kind,
  2065. SMLoc S, SMLoc E, MCContext &Ctx) {
  2066. auto Op = std::make_unique<AArch64Operand>(k_MatrixRegister, Ctx);
  2067. Op->MatrixReg.RegNum = RegNum;
  2068. Op->MatrixReg.ElementWidth = ElementWidth;
  2069. Op->MatrixReg.Kind = Kind;
  2070. Op->StartLoc = S;
  2071. Op->EndLoc = E;
  2072. return Op;
  2073. }
  2074. static std::unique_ptr<AArch64Operand>
  2075. CreateSVCR(uint32_t PStateField, StringRef Str, SMLoc S, MCContext &Ctx) {
  2076. auto Op = std::make_unique<AArch64Operand>(k_SVCR, Ctx);
  2077. Op->SVCR.PStateField = PStateField;
  2078. Op->SVCR.Data = Str.data();
  2079. Op->SVCR.Length = Str.size();
  2080. Op->StartLoc = S;
  2081. Op->EndLoc = S;
  2082. return Op;
  2083. }
  2084. static std::unique_ptr<AArch64Operand>
  2085. CreateShiftExtend(AArch64_AM::ShiftExtendType ShOp, unsigned Val,
  2086. bool HasExplicitAmount, SMLoc S, SMLoc E, MCContext &Ctx) {
  2087. auto Op = std::make_unique<AArch64Operand>(k_ShiftExtend, Ctx);
  2088. Op->ShiftExtend.Type = ShOp;
  2089. Op->ShiftExtend.Amount = Val;
  2090. Op->ShiftExtend.HasExplicitAmount = HasExplicitAmount;
  2091. Op->StartLoc = S;
  2092. Op->EndLoc = E;
  2093. return Op;
  2094. }
  2095. };
  2096. } // end anonymous namespace.
  2097. void AArch64Operand::print(raw_ostream &OS) const {
  2098. switch (Kind) {
  2099. case k_FPImm:
  2100. OS << "<fpimm " << getFPImm().bitcastToAPInt().getZExtValue();
  2101. if (!getFPImmIsExact())
  2102. OS << " (inexact)";
  2103. OS << ">";
  2104. break;
  2105. case k_Barrier: {
  2106. StringRef Name = getBarrierName();
  2107. if (!Name.empty())
  2108. OS << "<barrier " << Name << ">";
  2109. else
  2110. OS << "<barrier invalid #" << getBarrier() << ">";
  2111. break;
  2112. }
  2113. case k_Immediate:
  2114. OS << *getImm();
  2115. break;
  2116. case k_ShiftedImm: {
  2117. unsigned Shift = getShiftedImmShift();
  2118. OS << "<shiftedimm ";
  2119. OS << *getShiftedImmVal();
  2120. OS << ", lsl #" << AArch64_AM::getShiftValue(Shift) << ">";
  2121. break;
  2122. }
  2123. case k_ImmRange: {
  2124. OS << "<immrange ";
  2125. OS << getFirstImmVal();
  2126. OS << ":" << getLastImmVal() << ">";
  2127. break;
  2128. }
  2129. case k_CondCode:
  2130. OS << "<condcode " << getCondCode() << ">";
  2131. break;
  2132. case k_VectorList: {
  2133. OS << "<vectorlist ";
  2134. unsigned Reg = getVectorListStart();
  2135. for (unsigned i = 0, e = getVectorListCount(); i != e; ++i)
  2136. OS << Reg + i * getVectorListStride() << " ";
  2137. OS << ">";
  2138. break;
  2139. }
  2140. case k_VectorIndex:
  2141. OS << "<vectorindex " << getVectorIndex() << ">";
  2142. break;
  2143. case k_SysReg:
  2144. OS << "<sysreg: " << getSysReg() << '>';
  2145. break;
  2146. case k_Token:
  2147. OS << "'" << getToken() << "'";
  2148. break;
  2149. case k_SysCR:
  2150. OS << "c" << getSysCR();
  2151. break;
  2152. case k_Prefetch: {
  2153. StringRef Name = getPrefetchName();
  2154. if (!Name.empty())
  2155. OS << "<prfop " << Name << ">";
  2156. else
  2157. OS << "<prfop invalid #" << getPrefetch() << ">";
  2158. break;
  2159. }
  2160. case k_PSBHint:
  2161. OS << getPSBHintName();
  2162. break;
  2163. case k_BTIHint:
  2164. OS << getBTIHintName();
  2165. break;
  2166. case k_MatrixRegister:
  2167. OS << "<matrix " << getMatrixReg() << ">";
  2168. break;
  2169. case k_MatrixTileList: {
  2170. OS << "<matrixlist ";
  2171. unsigned RegMask = getMatrixTileListRegMask();
  2172. unsigned MaxBits = 8;
  2173. for (unsigned I = MaxBits; I > 0; --I)
  2174. OS << ((RegMask & (1 << (I - 1))) >> (I - 1));
  2175. OS << '>';
  2176. break;
  2177. }
  2178. case k_SVCR: {
  2179. OS << getSVCR();
  2180. break;
  2181. }
  2182. case k_Register:
  2183. OS << "<register " << getReg() << ">";
  2184. if (!getShiftExtendAmount() && !hasShiftExtendAmount())
  2185. break;
  2186. [[fallthrough]];
  2187. case k_ShiftExtend:
  2188. OS << "<" << AArch64_AM::getShiftExtendName(getShiftExtendType()) << " #"
  2189. << getShiftExtendAmount();
  2190. if (!hasShiftExtendAmount())
  2191. OS << "<imp>";
  2192. OS << '>';
  2193. break;
  2194. }
  2195. }
  2196. /// @name Auto-generated Match Functions
  2197. /// {
  2198. static unsigned MatchRegisterName(StringRef Name);
  2199. /// }
  2200. static unsigned MatchNeonVectorRegName(StringRef Name) {
  2201. return StringSwitch<unsigned>(Name.lower())
  2202. .Case("v0", AArch64::Q0)
  2203. .Case("v1", AArch64::Q1)
  2204. .Case("v2", AArch64::Q2)
  2205. .Case("v3", AArch64::Q3)
  2206. .Case("v4", AArch64::Q4)
  2207. .Case("v5", AArch64::Q5)
  2208. .Case("v6", AArch64::Q6)
  2209. .Case("v7", AArch64::Q7)
  2210. .Case("v8", AArch64::Q8)
  2211. .Case("v9", AArch64::Q9)
  2212. .Case("v10", AArch64::Q10)
  2213. .Case("v11", AArch64::Q11)
  2214. .Case("v12", AArch64::Q12)
  2215. .Case("v13", AArch64::Q13)
  2216. .Case("v14", AArch64::Q14)
  2217. .Case("v15", AArch64::Q15)
  2218. .Case("v16", AArch64::Q16)
  2219. .Case("v17", AArch64::Q17)
  2220. .Case("v18", AArch64::Q18)
  2221. .Case("v19", AArch64::Q19)
  2222. .Case("v20", AArch64::Q20)
  2223. .Case("v21", AArch64::Q21)
  2224. .Case("v22", AArch64::Q22)
  2225. .Case("v23", AArch64::Q23)
  2226. .Case("v24", AArch64::Q24)
  2227. .Case("v25", AArch64::Q25)
  2228. .Case("v26", AArch64::Q26)
  2229. .Case("v27", AArch64::Q27)
  2230. .Case("v28", AArch64::Q28)
  2231. .Case("v29", AArch64::Q29)
  2232. .Case("v30", AArch64::Q30)
  2233. .Case("v31", AArch64::Q31)
  2234. .Default(0);
  2235. }
  2236. /// Returns an optional pair of (#elements, element-width) if Suffix
  2237. /// is a valid vector kind. Where the number of elements in a vector
  2238. /// or the vector width is implicit or explicitly unknown (but still a
  2239. /// valid suffix kind), 0 is used.
  2240. static std::optional<std::pair<int, int>> parseVectorKind(StringRef Suffix,
  2241. RegKind VectorKind) {
  2242. std::pair<int, int> Res = {-1, -1};
  2243. switch (VectorKind) {
  2244. case RegKind::NeonVector:
  2245. Res =
  2246. StringSwitch<std::pair<int, int>>(Suffix.lower())
  2247. .Case("", {0, 0})
  2248. .Case(".1d", {1, 64})
  2249. .Case(".1q", {1, 128})
  2250. // '.2h' needed for fp16 scalar pairwise reductions
  2251. .Case(".2h", {2, 16})
  2252. .Case(".2s", {2, 32})
  2253. .Case(".2d", {2, 64})
  2254. // '.4b' is another special case for the ARMv8.2a dot product
  2255. // operand
  2256. .Case(".4b", {4, 8})
  2257. .Case(".4h", {4, 16})
  2258. .Case(".4s", {4, 32})
  2259. .Case(".8b", {8, 8})
  2260. .Case(".8h", {8, 16})
  2261. .Case(".16b", {16, 8})
  2262. // Accept the width neutral ones, too, for verbose syntax. If those
  2263. // aren't used in the right places, the token operand won't match so
  2264. // all will work out.
  2265. .Case(".b", {0, 8})
  2266. .Case(".h", {0, 16})
  2267. .Case(".s", {0, 32})
  2268. .Case(".d", {0, 64})
  2269. .Default({-1, -1});
  2270. break;
  2271. case RegKind::SVEPredicateAsCounter:
  2272. case RegKind::SVEPredicateVector:
  2273. case RegKind::SVEDataVector:
  2274. case RegKind::Matrix:
  2275. Res = StringSwitch<std::pair<int, int>>(Suffix.lower())
  2276. .Case("", {0, 0})
  2277. .Case(".b", {0, 8})
  2278. .Case(".h", {0, 16})
  2279. .Case(".s", {0, 32})
  2280. .Case(".d", {0, 64})
  2281. .Case(".q", {0, 128})
  2282. .Default({-1, -1});
  2283. break;
  2284. default:
  2285. llvm_unreachable("Unsupported RegKind");
  2286. }
  2287. if (Res == std::make_pair(-1, -1))
  2288. return std::nullopt;
  2289. return std::optional<std::pair<int, int>>(Res);
  2290. }
  2291. static bool isValidVectorKind(StringRef Suffix, RegKind VectorKind) {
  2292. return parseVectorKind(Suffix, VectorKind).has_value();
  2293. }
  2294. static unsigned matchSVEDataVectorRegName(StringRef Name) {
  2295. return StringSwitch<unsigned>(Name.lower())
  2296. .Case("z0", AArch64::Z0)
  2297. .Case("z1", AArch64::Z1)
  2298. .Case("z2", AArch64::Z2)
  2299. .Case("z3", AArch64::Z3)
  2300. .Case("z4", AArch64::Z4)
  2301. .Case("z5", AArch64::Z5)
  2302. .Case("z6", AArch64::Z6)
  2303. .Case("z7", AArch64::Z7)
  2304. .Case("z8", AArch64::Z8)
  2305. .Case("z9", AArch64::Z9)
  2306. .Case("z10", AArch64::Z10)
  2307. .Case("z11", AArch64::Z11)
  2308. .Case("z12", AArch64::Z12)
  2309. .Case("z13", AArch64::Z13)
  2310. .Case("z14", AArch64::Z14)
  2311. .Case("z15", AArch64::Z15)
  2312. .Case("z16", AArch64::Z16)
  2313. .Case("z17", AArch64::Z17)
  2314. .Case("z18", AArch64::Z18)
  2315. .Case("z19", AArch64::Z19)
  2316. .Case("z20", AArch64::Z20)
  2317. .Case("z21", AArch64::Z21)
  2318. .Case("z22", AArch64::Z22)
  2319. .Case("z23", AArch64::Z23)
  2320. .Case("z24", AArch64::Z24)
  2321. .Case("z25", AArch64::Z25)
  2322. .Case("z26", AArch64::Z26)
  2323. .Case("z27", AArch64::Z27)
  2324. .Case("z28", AArch64::Z28)
  2325. .Case("z29", AArch64::Z29)
  2326. .Case("z30", AArch64::Z30)
  2327. .Case("z31", AArch64::Z31)
  2328. .Default(0);
  2329. }
  2330. static unsigned matchSVEPredicateVectorRegName(StringRef Name) {
  2331. return StringSwitch<unsigned>(Name.lower())
  2332. .Case("p0", AArch64::P0)
  2333. .Case("p1", AArch64::P1)
  2334. .Case("p2", AArch64::P2)
  2335. .Case("p3", AArch64::P3)
  2336. .Case("p4", AArch64::P4)
  2337. .Case("p5", AArch64::P5)
  2338. .Case("p6", AArch64::P6)
  2339. .Case("p7", AArch64::P7)
  2340. .Case("p8", AArch64::P8)
  2341. .Case("p9", AArch64::P9)
  2342. .Case("p10", AArch64::P10)
  2343. .Case("p11", AArch64::P11)
  2344. .Case("p12", AArch64::P12)
  2345. .Case("p13", AArch64::P13)
  2346. .Case("p14", AArch64::P14)
  2347. .Case("p15", AArch64::P15)
  2348. .Default(0);
  2349. }
  2350. static unsigned matchSVEPredicateAsCounterRegName(StringRef Name) {
  2351. return StringSwitch<unsigned>(Name.lower())
  2352. .Case("pn0", AArch64::P0)
  2353. .Case("pn1", AArch64::P1)
  2354. .Case("pn2", AArch64::P2)
  2355. .Case("pn3", AArch64::P3)
  2356. .Case("pn4", AArch64::P4)
  2357. .Case("pn5", AArch64::P5)
  2358. .Case("pn6", AArch64::P6)
  2359. .Case("pn7", AArch64::P7)
  2360. .Case("pn8", AArch64::P8)
  2361. .Case("pn9", AArch64::P9)
  2362. .Case("pn10", AArch64::P10)
  2363. .Case("pn11", AArch64::P11)
  2364. .Case("pn12", AArch64::P12)
  2365. .Case("pn13", AArch64::P13)
  2366. .Case("pn14", AArch64::P14)
  2367. .Case("pn15", AArch64::P15)
  2368. .Default(0);
  2369. }
  2370. static unsigned matchMatrixTileListRegName(StringRef Name) {
  2371. return StringSwitch<unsigned>(Name.lower())
  2372. .Case("za0.d", AArch64::ZAD0)
  2373. .Case("za1.d", AArch64::ZAD1)
  2374. .Case("za2.d", AArch64::ZAD2)
  2375. .Case("za3.d", AArch64::ZAD3)
  2376. .Case("za4.d", AArch64::ZAD4)
  2377. .Case("za5.d", AArch64::ZAD5)
  2378. .Case("za6.d", AArch64::ZAD6)
  2379. .Case("za7.d", AArch64::ZAD7)
  2380. .Case("za0.s", AArch64::ZAS0)
  2381. .Case("za1.s", AArch64::ZAS1)
  2382. .Case("za2.s", AArch64::ZAS2)
  2383. .Case("za3.s", AArch64::ZAS3)
  2384. .Case("za0.h", AArch64::ZAH0)
  2385. .Case("za1.h", AArch64::ZAH1)
  2386. .Case("za0.b", AArch64::ZAB0)
  2387. .Default(0);
  2388. }
  2389. static unsigned matchMatrixRegName(StringRef Name) {
  2390. return StringSwitch<unsigned>(Name.lower())
  2391. .Case("za", AArch64::ZA)
  2392. .Case("za0.q", AArch64::ZAQ0)
  2393. .Case("za1.q", AArch64::ZAQ1)
  2394. .Case("za2.q", AArch64::ZAQ2)
  2395. .Case("za3.q", AArch64::ZAQ3)
  2396. .Case("za4.q", AArch64::ZAQ4)
  2397. .Case("za5.q", AArch64::ZAQ5)
  2398. .Case("za6.q", AArch64::ZAQ6)
  2399. .Case("za7.q", AArch64::ZAQ7)
  2400. .Case("za8.q", AArch64::ZAQ8)
  2401. .Case("za9.q", AArch64::ZAQ9)
  2402. .Case("za10.q", AArch64::ZAQ10)
  2403. .Case("za11.q", AArch64::ZAQ11)
  2404. .Case("za12.q", AArch64::ZAQ12)
  2405. .Case("za13.q", AArch64::ZAQ13)
  2406. .Case("za14.q", AArch64::ZAQ14)
  2407. .Case("za15.q", AArch64::ZAQ15)
  2408. .Case("za0.d", AArch64::ZAD0)
  2409. .Case("za1.d", AArch64::ZAD1)
  2410. .Case("za2.d", AArch64::ZAD2)
  2411. .Case("za3.d", AArch64::ZAD3)
  2412. .Case("za4.d", AArch64::ZAD4)
  2413. .Case("za5.d", AArch64::ZAD5)
  2414. .Case("za6.d", AArch64::ZAD6)
  2415. .Case("za7.d", AArch64::ZAD7)
  2416. .Case("za0.s", AArch64::ZAS0)
  2417. .Case("za1.s", AArch64::ZAS1)
  2418. .Case("za2.s", AArch64::ZAS2)
  2419. .Case("za3.s", AArch64::ZAS3)
  2420. .Case("za0.h", AArch64::ZAH0)
  2421. .Case("za1.h", AArch64::ZAH1)
  2422. .Case("za0.b", AArch64::ZAB0)
  2423. .Case("za0h.q", AArch64::ZAQ0)
  2424. .Case("za1h.q", AArch64::ZAQ1)
  2425. .Case("za2h.q", AArch64::ZAQ2)
  2426. .Case("za3h.q", AArch64::ZAQ3)
  2427. .Case("za4h.q", AArch64::ZAQ4)
  2428. .Case("za5h.q", AArch64::ZAQ5)
  2429. .Case("za6h.q", AArch64::ZAQ6)
  2430. .Case("za7h.q", AArch64::ZAQ7)
  2431. .Case("za8h.q", AArch64::ZAQ8)
  2432. .Case("za9h.q", AArch64::ZAQ9)
  2433. .Case("za10h.q", AArch64::ZAQ10)
  2434. .Case("za11h.q", AArch64::ZAQ11)
  2435. .Case("za12h.q", AArch64::ZAQ12)
  2436. .Case("za13h.q", AArch64::ZAQ13)
  2437. .Case("za14h.q", AArch64::ZAQ14)
  2438. .Case("za15h.q", AArch64::ZAQ15)
  2439. .Case("za0h.d", AArch64::ZAD0)
  2440. .Case("za1h.d", AArch64::ZAD1)
  2441. .Case("za2h.d", AArch64::ZAD2)
  2442. .Case("za3h.d", AArch64::ZAD3)
  2443. .Case("za4h.d", AArch64::ZAD4)
  2444. .Case("za5h.d", AArch64::ZAD5)
  2445. .Case("za6h.d", AArch64::ZAD6)
  2446. .Case("za7h.d", AArch64::ZAD7)
  2447. .Case("za0h.s", AArch64::ZAS0)
  2448. .Case("za1h.s", AArch64::ZAS1)
  2449. .Case("za2h.s", AArch64::ZAS2)
  2450. .Case("za3h.s", AArch64::ZAS3)
  2451. .Case("za0h.h", AArch64::ZAH0)
  2452. .Case("za1h.h", AArch64::ZAH1)
  2453. .Case("za0h.b", AArch64::ZAB0)
  2454. .Case("za0v.q", AArch64::ZAQ0)
  2455. .Case("za1v.q", AArch64::ZAQ1)
  2456. .Case("za2v.q", AArch64::ZAQ2)
  2457. .Case("za3v.q", AArch64::ZAQ3)
  2458. .Case("za4v.q", AArch64::ZAQ4)
  2459. .Case("za5v.q", AArch64::ZAQ5)
  2460. .Case("za6v.q", AArch64::ZAQ6)
  2461. .Case("za7v.q", AArch64::ZAQ7)
  2462. .Case("za8v.q", AArch64::ZAQ8)
  2463. .Case("za9v.q", AArch64::ZAQ9)
  2464. .Case("za10v.q", AArch64::ZAQ10)
  2465. .Case("za11v.q", AArch64::ZAQ11)
  2466. .Case("za12v.q", AArch64::ZAQ12)
  2467. .Case("za13v.q", AArch64::ZAQ13)
  2468. .Case("za14v.q", AArch64::ZAQ14)
  2469. .Case("za15v.q", AArch64::ZAQ15)
  2470. .Case("za0v.d", AArch64::ZAD0)
  2471. .Case("za1v.d", AArch64::ZAD1)
  2472. .Case("za2v.d", AArch64::ZAD2)
  2473. .Case("za3v.d", AArch64::ZAD3)
  2474. .Case("za4v.d", AArch64::ZAD4)
  2475. .Case("za5v.d", AArch64::ZAD5)
  2476. .Case("za6v.d", AArch64::ZAD6)
  2477. .Case("za7v.d", AArch64::ZAD7)
  2478. .Case("za0v.s", AArch64::ZAS0)
  2479. .Case("za1v.s", AArch64::ZAS1)
  2480. .Case("za2v.s", AArch64::ZAS2)
  2481. .Case("za3v.s", AArch64::ZAS3)
  2482. .Case("za0v.h", AArch64::ZAH0)
  2483. .Case("za1v.h", AArch64::ZAH1)
  2484. .Case("za0v.b", AArch64::ZAB0)
  2485. .Default(0);
  2486. }
  2487. bool AArch64AsmParser::parseRegister(MCRegister &RegNo, SMLoc &StartLoc,
  2488. SMLoc &EndLoc) {
  2489. return tryParseRegister(RegNo, StartLoc, EndLoc) != MatchOperand_Success;
  2490. }
  2491. OperandMatchResultTy AArch64AsmParser::tryParseRegister(MCRegister &RegNo,
  2492. SMLoc &StartLoc,
  2493. SMLoc &EndLoc) {
  2494. StartLoc = getLoc();
  2495. auto Res = tryParseScalarRegister(RegNo);
  2496. EndLoc = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  2497. return Res;
  2498. }
  2499. // Matches a register name or register alias previously defined by '.req'
  2500. unsigned AArch64AsmParser::matchRegisterNameAlias(StringRef Name,
  2501. RegKind Kind) {
  2502. unsigned RegNum = 0;
  2503. if ((RegNum = matchSVEDataVectorRegName(Name)))
  2504. return Kind == RegKind::SVEDataVector ? RegNum : 0;
  2505. if ((RegNum = matchSVEPredicateVectorRegName(Name)))
  2506. return Kind == RegKind::SVEPredicateVector ? RegNum : 0;
  2507. if ((RegNum = matchSVEPredicateAsCounterRegName(Name)))
  2508. return Kind == RegKind::SVEPredicateAsCounter ? RegNum : 0;
  2509. if ((RegNum = MatchNeonVectorRegName(Name)))
  2510. return Kind == RegKind::NeonVector ? RegNum : 0;
  2511. if ((RegNum = matchMatrixRegName(Name)))
  2512. return Kind == RegKind::Matrix ? RegNum : 0;
  2513. if (Name.equals_insensitive("zt0"))
  2514. return Kind == RegKind::LookupTable ? AArch64::ZT0 : 0;
  2515. // The parsed register must be of RegKind Scalar
  2516. if ((RegNum = MatchRegisterName(Name)))
  2517. return (Kind == RegKind::Scalar) ? RegNum : 0;
  2518. if (!RegNum) {
  2519. // Handle a few common aliases of registers.
  2520. if (auto RegNum = StringSwitch<unsigned>(Name.lower())
  2521. .Case("fp", AArch64::FP)
  2522. .Case("lr", AArch64::LR)
  2523. .Case("x31", AArch64::XZR)
  2524. .Case("w31", AArch64::WZR)
  2525. .Default(0))
  2526. return Kind == RegKind::Scalar ? RegNum : 0;
  2527. // Check for aliases registered via .req. Canonicalize to lower case.
  2528. // That's more consistent since register names are case insensitive, and
  2529. // it's how the original entry was passed in from MC/MCParser/AsmParser.
  2530. auto Entry = RegisterReqs.find(Name.lower());
  2531. if (Entry == RegisterReqs.end())
  2532. return 0;
  2533. // set RegNum if the match is the right kind of register
  2534. if (Kind == Entry->getValue().first)
  2535. RegNum = Entry->getValue().second;
  2536. }
  2537. return RegNum;
  2538. }
  2539. unsigned AArch64AsmParser::getNumRegsForRegKind(RegKind K) {
  2540. switch (K) {
  2541. case RegKind::Scalar:
  2542. case RegKind::NeonVector:
  2543. case RegKind::SVEDataVector:
  2544. return 32;
  2545. case RegKind::Matrix:
  2546. case RegKind::SVEPredicateVector:
  2547. case RegKind::SVEPredicateAsCounter:
  2548. return 16;
  2549. case RegKind::LookupTable:
  2550. return 1;
  2551. }
  2552. llvm_unreachable("Unsupported RegKind");
  2553. }
  2554. /// tryParseScalarRegister - Try to parse a register name. The token must be an
  2555. /// Identifier when called, and if it is a register name the token is eaten and
  2556. /// the register is added to the operand list.
  2557. OperandMatchResultTy
  2558. AArch64AsmParser::tryParseScalarRegister(MCRegister &RegNum) {
  2559. const AsmToken &Tok = getTok();
  2560. if (Tok.isNot(AsmToken::Identifier))
  2561. return MatchOperand_NoMatch;
  2562. std::string lowerCase = Tok.getString().lower();
  2563. unsigned Reg = matchRegisterNameAlias(lowerCase, RegKind::Scalar);
  2564. if (Reg == 0)
  2565. return MatchOperand_NoMatch;
  2566. RegNum = Reg;
  2567. Lex(); // Eat identifier token.
  2568. return MatchOperand_Success;
  2569. }
  2570. /// tryParseSysCROperand - Try to parse a system instruction CR operand name.
  2571. OperandMatchResultTy
  2572. AArch64AsmParser::tryParseSysCROperand(OperandVector &Operands) {
  2573. SMLoc S = getLoc();
  2574. if (getTok().isNot(AsmToken::Identifier)) {
  2575. Error(S, "Expected cN operand where 0 <= N <= 15");
  2576. return MatchOperand_ParseFail;
  2577. }
  2578. StringRef Tok = getTok().getIdentifier();
  2579. if (Tok[0] != 'c' && Tok[0] != 'C') {
  2580. Error(S, "Expected cN operand where 0 <= N <= 15");
  2581. return MatchOperand_ParseFail;
  2582. }
  2583. uint32_t CRNum;
  2584. bool BadNum = Tok.drop_front().getAsInteger(10, CRNum);
  2585. if (BadNum || CRNum > 15) {
  2586. Error(S, "Expected cN operand where 0 <= N <= 15");
  2587. return MatchOperand_ParseFail;
  2588. }
  2589. Lex(); // Eat identifier token.
  2590. Operands.push_back(
  2591. AArch64Operand::CreateSysCR(CRNum, S, getLoc(), getContext()));
  2592. return MatchOperand_Success;
  2593. }
  2594. // Either an identifier for named values or a 6-bit immediate.
  2595. OperandMatchResultTy
  2596. AArch64AsmParser::tryParseRPRFMOperand(OperandVector &Operands) {
  2597. SMLoc S = getLoc();
  2598. const AsmToken &Tok = getTok();
  2599. unsigned MaxVal = 63;
  2600. // Immediate case, with optional leading hash:
  2601. if (parseOptionalToken(AsmToken::Hash) ||
  2602. Tok.is(AsmToken::Integer)) {
  2603. const MCExpr *ImmVal;
  2604. if (getParser().parseExpression(ImmVal))
  2605. return MatchOperand_ParseFail;
  2606. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  2607. if (!MCE) {
  2608. TokError("immediate value expected for prefetch operand");
  2609. return MatchOperand_ParseFail;
  2610. }
  2611. unsigned prfop = MCE->getValue();
  2612. if (prfop > MaxVal) {
  2613. TokError("prefetch operand out of range, [0," + utostr(MaxVal) +
  2614. "] expected");
  2615. return MatchOperand_ParseFail;
  2616. }
  2617. auto RPRFM = AArch64RPRFM::lookupRPRFMByEncoding(MCE->getValue());
  2618. Operands.push_back(AArch64Operand::CreatePrefetch(
  2619. prfop, RPRFM ? RPRFM->Name : "", S, getContext()));
  2620. return MatchOperand_Success;
  2621. }
  2622. if (Tok.isNot(AsmToken::Identifier)) {
  2623. TokError("prefetch hint expected");
  2624. return MatchOperand_ParseFail;
  2625. }
  2626. auto RPRFM = AArch64RPRFM::lookupRPRFMByName(Tok.getString());
  2627. if (!RPRFM) {
  2628. TokError("prefetch hint expected");
  2629. return MatchOperand_ParseFail;
  2630. }
  2631. Operands.push_back(AArch64Operand::CreatePrefetch(
  2632. RPRFM->Encoding, Tok.getString(), S, getContext()));
  2633. Lex(); // Eat identifier token.
  2634. return MatchOperand_Success;
  2635. }
  2636. /// tryParsePrefetch - Try to parse a prefetch operand.
  2637. template <bool IsSVEPrefetch>
  2638. OperandMatchResultTy
  2639. AArch64AsmParser::tryParsePrefetch(OperandVector &Operands) {
  2640. SMLoc S = getLoc();
  2641. const AsmToken &Tok = getTok();
  2642. auto LookupByName = [](StringRef N) {
  2643. if (IsSVEPrefetch) {
  2644. if (auto Res = AArch64SVEPRFM::lookupSVEPRFMByName(N))
  2645. return std::optional<unsigned>(Res->Encoding);
  2646. } else if (auto Res = AArch64PRFM::lookupPRFMByName(N))
  2647. return std::optional<unsigned>(Res->Encoding);
  2648. return std::optional<unsigned>();
  2649. };
  2650. auto LookupByEncoding = [](unsigned E) {
  2651. if (IsSVEPrefetch) {
  2652. if (auto Res = AArch64SVEPRFM::lookupSVEPRFMByEncoding(E))
  2653. return std::optional<StringRef>(Res->Name);
  2654. } else if (auto Res = AArch64PRFM::lookupPRFMByEncoding(E))
  2655. return std::optional<StringRef>(Res->Name);
  2656. return std::optional<StringRef>();
  2657. };
  2658. unsigned MaxVal = IsSVEPrefetch ? 15 : 31;
  2659. // Either an identifier for named values or a 5-bit immediate.
  2660. // Eat optional hash.
  2661. if (parseOptionalToken(AsmToken::Hash) ||
  2662. Tok.is(AsmToken::Integer)) {
  2663. const MCExpr *ImmVal;
  2664. if (getParser().parseExpression(ImmVal))
  2665. return MatchOperand_ParseFail;
  2666. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  2667. if (!MCE) {
  2668. TokError("immediate value expected for prefetch operand");
  2669. return MatchOperand_ParseFail;
  2670. }
  2671. unsigned prfop = MCE->getValue();
  2672. if (prfop > MaxVal) {
  2673. TokError("prefetch operand out of range, [0," + utostr(MaxVal) +
  2674. "] expected");
  2675. return MatchOperand_ParseFail;
  2676. }
  2677. auto PRFM = LookupByEncoding(MCE->getValue());
  2678. Operands.push_back(AArch64Operand::CreatePrefetch(prfop, PRFM.value_or(""),
  2679. S, getContext()));
  2680. return MatchOperand_Success;
  2681. }
  2682. if (Tok.isNot(AsmToken::Identifier)) {
  2683. TokError("prefetch hint expected");
  2684. return MatchOperand_ParseFail;
  2685. }
  2686. auto PRFM = LookupByName(Tok.getString());
  2687. if (!PRFM) {
  2688. TokError("prefetch hint expected");
  2689. return MatchOperand_ParseFail;
  2690. }
  2691. Operands.push_back(AArch64Operand::CreatePrefetch(
  2692. *PRFM, Tok.getString(), S, getContext()));
  2693. Lex(); // Eat identifier token.
  2694. return MatchOperand_Success;
  2695. }
  2696. /// tryParsePSBHint - Try to parse a PSB operand, mapped to Hint command
  2697. OperandMatchResultTy
  2698. AArch64AsmParser::tryParsePSBHint(OperandVector &Operands) {
  2699. SMLoc S = getLoc();
  2700. const AsmToken &Tok = getTok();
  2701. if (Tok.isNot(AsmToken::Identifier)) {
  2702. TokError("invalid operand for instruction");
  2703. return MatchOperand_ParseFail;
  2704. }
  2705. auto PSB = AArch64PSBHint::lookupPSBByName(Tok.getString());
  2706. if (!PSB) {
  2707. TokError("invalid operand for instruction");
  2708. return MatchOperand_ParseFail;
  2709. }
  2710. Operands.push_back(AArch64Operand::CreatePSBHint(
  2711. PSB->Encoding, Tok.getString(), S, getContext()));
  2712. Lex(); // Eat identifier token.
  2713. return MatchOperand_Success;
  2714. }
  2715. OperandMatchResultTy
  2716. AArch64AsmParser::tryParseSyspXzrPair(OperandVector &Operands) {
  2717. SMLoc StartLoc = getLoc();
  2718. MCRegister RegNum;
  2719. // The case where xzr, xzr is not present is handled by an InstAlias.
  2720. auto RegTok = getTok(); // in case we need to backtrack
  2721. if (tryParseScalarRegister(RegNum) != MatchOperand_Success)
  2722. return MatchOperand_NoMatch;
  2723. if (RegNum != AArch64::XZR) {
  2724. getLexer().UnLex(RegTok);
  2725. return MatchOperand_NoMatch;
  2726. }
  2727. if (parseComma())
  2728. return MatchOperand_ParseFail;
  2729. if (tryParseScalarRegister(RegNum) != MatchOperand_Success) {
  2730. TokError("expected register operand");
  2731. return MatchOperand_ParseFail;
  2732. }
  2733. if (RegNum != AArch64::XZR) {
  2734. TokError("xzr must be followed by xzr");
  2735. return MatchOperand_ParseFail;
  2736. }
  2737. // We need to push something, since we claim this is an operand in .td.
  2738. // See also AArch64AsmParser::parseKeywordOperand.
  2739. Operands.push_back(AArch64Operand::CreateReg(
  2740. RegNum, RegKind::Scalar, StartLoc, getLoc(), getContext()));
  2741. return MatchOperand_Success;
  2742. }
  2743. /// tryParseBTIHint - Try to parse a BTI operand, mapped to Hint command
  2744. OperandMatchResultTy
  2745. AArch64AsmParser::tryParseBTIHint(OperandVector &Operands) {
  2746. SMLoc S = getLoc();
  2747. const AsmToken &Tok = getTok();
  2748. if (Tok.isNot(AsmToken::Identifier)) {
  2749. TokError("invalid operand for instruction");
  2750. return MatchOperand_ParseFail;
  2751. }
  2752. auto BTI = AArch64BTIHint::lookupBTIByName(Tok.getString());
  2753. if (!BTI) {
  2754. TokError("invalid operand for instruction");
  2755. return MatchOperand_ParseFail;
  2756. }
  2757. Operands.push_back(AArch64Operand::CreateBTIHint(
  2758. BTI->Encoding, Tok.getString(), S, getContext()));
  2759. Lex(); // Eat identifier token.
  2760. return MatchOperand_Success;
  2761. }
  2762. /// tryParseAdrpLabel - Parse and validate a source label for the ADRP
  2763. /// instruction.
  2764. OperandMatchResultTy
  2765. AArch64AsmParser::tryParseAdrpLabel(OperandVector &Operands) {
  2766. SMLoc S = getLoc();
  2767. const MCExpr *Expr = nullptr;
  2768. if (getTok().is(AsmToken::Hash)) {
  2769. Lex(); // Eat hash token.
  2770. }
  2771. if (parseSymbolicImmVal(Expr))
  2772. return MatchOperand_ParseFail;
  2773. AArch64MCExpr::VariantKind ELFRefKind;
  2774. MCSymbolRefExpr::VariantKind DarwinRefKind;
  2775. int64_t Addend;
  2776. if (classifySymbolRef(Expr, ELFRefKind, DarwinRefKind, Addend)) {
  2777. if (DarwinRefKind == MCSymbolRefExpr::VK_None &&
  2778. ELFRefKind == AArch64MCExpr::VK_INVALID) {
  2779. // No modifier was specified at all; this is the syntax for an ELF basic
  2780. // ADRP relocation (unfortunately).
  2781. Expr =
  2782. AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS_PAGE, getContext());
  2783. } else if ((DarwinRefKind == MCSymbolRefExpr::VK_GOTPAGE ||
  2784. DarwinRefKind == MCSymbolRefExpr::VK_TLVPPAGE) &&
  2785. Addend != 0) {
  2786. Error(S, "gotpage label reference not allowed an addend");
  2787. return MatchOperand_ParseFail;
  2788. } else if (DarwinRefKind != MCSymbolRefExpr::VK_PAGE &&
  2789. DarwinRefKind != MCSymbolRefExpr::VK_GOTPAGE &&
  2790. DarwinRefKind != MCSymbolRefExpr::VK_TLVPPAGE &&
  2791. ELFRefKind != AArch64MCExpr::VK_ABS_PAGE_NC &&
  2792. ELFRefKind != AArch64MCExpr::VK_GOT_PAGE &&
  2793. ELFRefKind != AArch64MCExpr::VK_GOT_PAGE_LO15 &&
  2794. ELFRefKind != AArch64MCExpr::VK_GOTTPREL_PAGE &&
  2795. ELFRefKind != AArch64MCExpr::VK_TLSDESC_PAGE) {
  2796. // The operand must be an @page or @gotpage qualified symbolref.
  2797. Error(S, "page or gotpage label reference expected");
  2798. return MatchOperand_ParseFail;
  2799. }
  2800. }
  2801. // We have either a label reference possibly with addend or an immediate. The
  2802. // addend is a raw value here. The linker will adjust it to only reference the
  2803. // page.
  2804. SMLoc E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  2805. Operands.push_back(AArch64Operand::CreateImm(Expr, S, E, getContext()));
  2806. return MatchOperand_Success;
  2807. }
  2808. /// tryParseAdrLabel - Parse and validate a source label for the ADR
  2809. /// instruction.
  2810. OperandMatchResultTy
  2811. AArch64AsmParser::tryParseAdrLabel(OperandVector &Operands) {
  2812. SMLoc S = getLoc();
  2813. const MCExpr *Expr = nullptr;
  2814. // Leave anything with a bracket to the default for SVE
  2815. if (getTok().is(AsmToken::LBrac))
  2816. return MatchOperand_NoMatch;
  2817. if (getTok().is(AsmToken::Hash))
  2818. Lex(); // Eat hash token.
  2819. if (parseSymbolicImmVal(Expr))
  2820. return MatchOperand_ParseFail;
  2821. AArch64MCExpr::VariantKind ELFRefKind;
  2822. MCSymbolRefExpr::VariantKind DarwinRefKind;
  2823. int64_t Addend;
  2824. if (classifySymbolRef(Expr, ELFRefKind, DarwinRefKind, Addend)) {
  2825. if (DarwinRefKind == MCSymbolRefExpr::VK_None &&
  2826. ELFRefKind == AArch64MCExpr::VK_INVALID) {
  2827. // No modifier was specified at all; this is the syntax for an ELF basic
  2828. // ADR relocation (unfortunately).
  2829. Expr = AArch64MCExpr::create(Expr, AArch64MCExpr::VK_ABS, getContext());
  2830. } else {
  2831. Error(S, "unexpected adr label");
  2832. return MatchOperand_ParseFail;
  2833. }
  2834. }
  2835. SMLoc E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  2836. Operands.push_back(AArch64Operand::CreateImm(Expr, S, E, getContext()));
  2837. return MatchOperand_Success;
  2838. }
  2839. /// tryParseFPImm - A floating point immediate expression operand.
  2840. template<bool AddFPZeroAsLiteral>
  2841. OperandMatchResultTy
  2842. AArch64AsmParser::tryParseFPImm(OperandVector &Operands) {
  2843. SMLoc S = getLoc();
  2844. bool Hash = parseOptionalToken(AsmToken::Hash);
  2845. // Handle negation, as that still comes through as a separate token.
  2846. bool isNegative = parseOptionalToken(AsmToken::Minus);
  2847. const AsmToken &Tok = getTok();
  2848. if (!Tok.is(AsmToken::Real) && !Tok.is(AsmToken::Integer)) {
  2849. if (!Hash)
  2850. return MatchOperand_NoMatch;
  2851. TokError("invalid floating point immediate");
  2852. return MatchOperand_ParseFail;
  2853. }
  2854. // Parse hexadecimal representation.
  2855. if (Tok.is(AsmToken::Integer) && Tok.getString().startswith("0x")) {
  2856. if (Tok.getIntVal() > 255 || isNegative) {
  2857. TokError("encoded floating point value out of range");
  2858. return MatchOperand_ParseFail;
  2859. }
  2860. APFloat F((double)AArch64_AM::getFPImmFloat(Tok.getIntVal()));
  2861. Operands.push_back(
  2862. AArch64Operand::CreateFPImm(F, true, S, getContext()));
  2863. } else {
  2864. // Parse FP representation.
  2865. APFloat RealVal(APFloat::IEEEdouble());
  2866. auto StatusOrErr =
  2867. RealVal.convertFromString(Tok.getString(), APFloat::rmTowardZero);
  2868. if (errorToBool(StatusOrErr.takeError())) {
  2869. TokError("invalid floating point representation");
  2870. return MatchOperand_ParseFail;
  2871. }
  2872. if (isNegative)
  2873. RealVal.changeSign();
  2874. if (AddFPZeroAsLiteral && RealVal.isPosZero()) {
  2875. Operands.push_back(AArch64Operand::CreateToken("#0", S, getContext()));
  2876. Operands.push_back(AArch64Operand::CreateToken(".0", S, getContext()));
  2877. } else
  2878. Operands.push_back(AArch64Operand::CreateFPImm(
  2879. RealVal, *StatusOrErr == APFloat::opOK, S, getContext()));
  2880. }
  2881. Lex(); // Eat the token.
  2882. return MatchOperand_Success;
  2883. }
  2884. /// tryParseImmWithOptionalShift - Parse immediate operand, optionally with
  2885. /// a shift suffix, for example '#1, lsl #12'.
  2886. OperandMatchResultTy
  2887. AArch64AsmParser::tryParseImmWithOptionalShift(OperandVector &Operands) {
  2888. SMLoc S = getLoc();
  2889. if (getTok().is(AsmToken::Hash))
  2890. Lex(); // Eat '#'
  2891. else if (getTok().isNot(AsmToken::Integer))
  2892. // Operand should start from # or should be integer, emit error otherwise.
  2893. return MatchOperand_NoMatch;
  2894. if (getTok().is(AsmToken::Integer) &&
  2895. getLexer().peekTok().is(AsmToken::Colon))
  2896. return tryParseImmRange(Operands);
  2897. const MCExpr *Imm = nullptr;
  2898. if (parseSymbolicImmVal(Imm))
  2899. return MatchOperand_ParseFail;
  2900. else if (getTok().isNot(AsmToken::Comma)) {
  2901. Operands.push_back(
  2902. AArch64Operand::CreateImm(Imm, S, getLoc(), getContext()));
  2903. return MatchOperand_Success;
  2904. }
  2905. // Eat ','
  2906. Lex();
  2907. StringRef VecGroup;
  2908. if (!parseOptionalVGOperand(Operands, VecGroup)) {
  2909. Operands.push_back(
  2910. AArch64Operand::CreateImm(Imm, S, getLoc(), getContext()));
  2911. Operands.push_back(
  2912. AArch64Operand::CreateToken(VecGroup, getLoc(), getContext()));
  2913. return MatchOperand_Success;
  2914. }
  2915. // The optional operand must be "lsl #N" where N is non-negative.
  2916. if (!getTok().is(AsmToken::Identifier) ||
  2917. !getTok().getIdentifier().equals_insensitive("lsl")) {
  2918. Error(getLoc(), "only 'lsl #+N' valid after immediate");
  2919. return MatchOperand_ParseFail;
  2920. }
  2921. // Eat 'lsl'
  2922. Lex();
  2923. parseOptionalToken(AsmToken::Hash);
  2924. if (getTok().isNot(AsmToken::Integer)) {
  2925. Error(getLoc(), "only 'lsl #+N' valid after immediate");
  2926. return MatchOperand_ParseFail;
  2927. }
  2928. int64_t ShiftAmount = getTok().getIntVal();
  2929. if (ShiftAmount < 0) {
  2930. Error(getLoc(), "positive shift amount required");
  2931. return MatchOperand_ParseFail;
  2932. }
  2933. Lex(); // Eat the number
  2934. // Just in case the optional lsl #0 is used for immediates other than zero.
  2935. if (ShiftAmount == 0 && Imm != nullptr) {
  2936. Operands.push_back(
  2937. AArch64Operand::CreateImm(Imm, S, getLoc(), getContext()));
  2938. return MatchOperand_Success;
  2939. }
  2940. Operands.push_back(AArch64Operand::CreateShiftedImm(Imm, ShiftAmount, S,
  2941. getLoc(), getContext()));
  2942. return MatchOperand_Success;
  2943. }
  2944. /// parseCondCodeString - Parse a Condition Code string, optionally returning a
  2945. /// suggestion to help common typos.
  2946. AArch64CC::CondCode
  2947. AArch64AsmParser::parseCondCodeString(StringRef Cond, std::string &Suggestion) {
  2948. AArch64CC::CondCode CC = StringSwitch<AArch64CC::CondCode>(Cond.lower())
  2949. .Case("eq", AArch64CC::EQ)
  2950. .Case("ne", AArch64CC::NE)
  2951. .Case("cs", AArch64CC::HS)
  2952. .Case("hs", AArch64CC::HS)
  2953. .Case("cc", AArch64CC::LO)
  2954. .Case("lo", AArch64CC::LO)
  2955. .Case("mi", AArch64CC::MI)
  2956. .Case("pl", AArch64CC::PL)
  2957. .Case("vs", AArch64CC::VS)
  2958. .Case("vc", AArch64CC::VC)
  2959. .Case("hi", AArch64CC::HI)
  2960. .Case("ls", AArch64CC::LS)
  2961. .Case("ge", AArch64CC::GE)
  2962. .Case("lt", AArch64CC::LT)
  2963. .Case("gt", AArch64CC::GT)
  2964. .Case("le", AArch64CC::LE)
  2965. .Case("al", AArch64CC::AL)
  2966. .Case("nv", AArch64CC::NV)
  2967. .Default(AArch64CC::Invalid);
  2968. if (CC == AArch64CC::Invalid &&
  2969. getSTI().getFeatureBits()[AArch64::FeatureSVE]) {
  2970. CC = StringSwitch<AArch64CC::CondCode>(Cond.lower())
  2971. .Case("none", AArch64CC::EQ)
  2972. .Case("any", AArch64CC::NE)
  2973. .Case("nlast", AArch64CC::HS)
  2974. .Case("last", AArch64CC::LO)
  2975. .Case("first", AArch64CC::MI)
  2976. .Case("nfrst", AArch64CC::PL)
  2977. .Case("pmore", AArch64CC::HI)
  2978. .Case("plast", AArch64CC::LS)
  2979. .Case("tcont", AArch64CC::GE)
  2980. .Case("tstop", AArch64CC::LT)
  2981. .Default(AArch64CC::Invalid);
  2982. if (CC == AArch64CC::Invalid && Cond.lower() == "nfirst")
  2983. Suggestion = "nfrst";
  2984. }
  2985. return CC;
  2986. }
  2987. /// parseCondCode - Parse a Condition Code operand.
  2988. bool AArch64AsmParser::parseCondCode(OperandVector &Operands,
  2989. bool invertCondCode) {
  2990. SMLoc S = getLoc();
  2991. const AsmToken &Tok = getTok();
  2992. assert(Tok.is(AsmToken::Identifier) && "Token is not an Identifier");
  2993. StringRef Cond = Tok.getString();
  2994. std::string Suggestion;
  2995. AArch64CC::CondCode CC = parseCondCodeString(Cond, Suggestion);
  2996. if (CC == AArch64CC::Invalid) {
  2997. std::string Msg = "invalid condition code";
  2998. if (!Suggestion.empty())
  2999. Msg += ", did you mean " + Suggestion + "?";
  3000. return TokError(Msg);
  3001. }
  3002. Lex(); // Eat identifier token.
  3003. if (invertCondCode) {
  3004. if (CC == AArch64CC::AL || CC == AArch64CC::NV)
  3005. return TokError("condition codes AL and NV are invalid for this instruction");
  3006. CC = AArch64CC::getInvertedCondCode(AArch64CC::CondCode(CC));
  3007. }
  3008. Operands.push_back(
  3009. AArch64Operand::CreateCondCode(CC, S, getLoc(), getContext()));
  3010. return false;
  3011. }
  3012. OperandMatchResultTy
  3013. AArch64AsmParser::tryParseSVCR(OperandVector &Operands) {
  3014. const AsmToken &Tok = getTok();
  3015. SMLoc S = getLoc();
  3016. if (Tok.isNot(AsmToken::Identifier)) {
  3017. TokError("invalid operand for instruction");
  3018. return MatchOperand_ParseFail;
  3019. }
  3020. unsigned PStateImm = -1;
  3021. const auto *SVCR = AArch64SVCR::lookupSVCRByName(Tok.getString());
  3022. if (!SVCR)
  3023. return MatchOperand_NoMatch;
  3024. if (SVCR->haveFeatures(getSTI().getFeatureBits()))
  3025. PStateImm = SVCR->Encoding;
  3026. Operands.push_back(
  3027. AArch64Operand::CreateSVCR(PStateImm, Tok.getString(), S, getContext()));
  3028. Lex(); // Eat identifier token.
  3029. return MatchOperand_Success;
  3030. }
  3031. OperandMatchResultTy
  3032. AArch64AsmParser::tryParseMatrixRegister(OperandVector &Operands) {
  3033. const AsmToken &Tok = getTok();
  3034. SMLoc S = getLoc();
  3035. StringRef Name = Tok.getString();
  3036. if (Name.equals_insensitive("za") || Name.startswith_insensitive("za.")) {
  3037. Lex(); // eat "za[.(b|h|s|d)]"
  3038. unsigned ElementWidth = 0;
  3039. auto DotPosition = Name.find('.');
  3040. if (DotPosition != StringRef::npos) {
  3041. const auto &KindRes =
  3042. parseVectorKind(Name.drop_front(DotPosition), RegKind::Matrix);
  3043. if (!KindRes) {
  3044. TokError(
  3045. "Expected the register to be followed by element width suffix");
  3046. return MatchOperand_ParseFail;
  3047. }
  3048. ElementWidth = KindRes->second;
  3049. }
  3050. Operands.push_back(AArch64Operand::CreateMatrixRegister(
  3051. AArch64::ZA, ElementWidth, MatrixKind::Array, S, getLoc(),
  3052. getContext()));
  3053. if (getLexer().is(AsmToken::LBrac)) {
  3054. // There's no comma after matrix operand, so we can parse the next operand
  3055. // immediately.
  3056. if (parseOperand(Operands, false, false))
  3057. return MatchOperand_NoMatch;
  3058. }
  3059. return MatchOperand_Success;
  3060. }
  3061. // Try to parse matrix register.
  3062. unsigned Reg = matchRegisterNameAlias(Name, RegKind::Matrix);
  3063. if (!Reg)
  3064. return MatchOperand_NoMatch;
  3065. size_t DotPosition = Name.find('.');
  3066. assert(DotPosition != StringRef::npos && "Unexpected register");
  3067. StringRef Head = Name.take_front(DotPosition);
  3068. StringRef Tail = Name.drop_front(DotPosition);
  3069. StringRef RowOrColumn = Head.take_back();
  3070. MatrixKind Kind = StringSwitch<MatrixKind>(RowOrColumn.lower())
  3071. .Case("h", MatrixKind::Row)
  3072. .Case("v", MatrixKind::Col)
  3073. .Default(MatrixKind::Tile);
  3074. // Next up, parsing the suffix
  3075. const auto &KindRes = parseVectorKind(Tail, RegKind::Matrix);
  3076. if (!KindRes) {
  3077. TokError("Expected the register to be followed by element width suffix");
  3078. return MatchOperand_ParseFail;
  3079. }
  3080. unsigned ElementWidth = KindRes->second;
  3081. Lex();
  3082. Operands.push_back(AArch64Operand::CreateMatrixRegister(
  3083. Reg, ElementWidth, Kind, S, getLoc(), getContext()));
  3084. if (getLexer().is(AsmToken::LBrac)) {
  3085. // There's no comma after matrix operand, so we can parse the next operand
  3086. // immediately.
  3087. if (parseOperand(Operands, false, false))
  3088. return MatchOperand_NoMatch;
  3089. }
  3090. return MatchOperand_Success;
  3091. }
  3092. /// tryParseOptionalShift - Some operands take an optional shift argument. Parse
  3093. /// them if present.
  3094. OperandMatchResultTy
  3095. AArch64AsmParser::tryParseOptionalShiftExtend(OperandVector &Operands) {
  3096. const AsmToken &Tok = getTok();
  3097. std::string LowerID = Tok.getString().lower();
  3098. AArch64_AM::ShiftExtendType ShOp =
  3099. StringSwitch<AArch64_AM::ShiftExtendType>(LowerID)
  3100. .Case("lsl", AArch64_AM::LSL)
  3101. .Case("lsr", AArch64_AM::LSR)
  3102. .Case("asr", AArch64_AM::ASR)
  3103. .Case("ror", AArch64_AM::ROR)
  3104. .Case("msl", AArch64_AM::MSL)
  3105. .Case("uxtb", AArch64_AM::UXTB)
  3106. .Case("uxth", AArch64_AM::UXTH)
  3107. .Case("uxtw", AArch64_AM::UXTW)
  3108. .Case("uxtx", AArch64_AM::UXTX)
  3109. .Case("sxtb", AArch64_AM::SXTB)
  3110. .Case("sxth", AArch64_AM::SXTH)
  3111. .Case("sxtw", AArch64_AM::SXTW)
  3112. .Case("sxtx", AArch64_AM::SXTX)
  3113. .Default(AArch64_AM::InvalidShiftExtend);
  3114. if (ShOp == AArch64_AM::InvalidShiftExtend)
  3115. return MatchOperand_NoMatch;
  3116. SMLoc S = Tok.getLoc();
  3117. Lex();
  3118. bool Hash = parseOptionalToken(AsmToken::Hash);
  3119. if (!Hash && getLexer().isNot(AsmToken::Integer)) {
  3120. if (ShOp == AArch64_AM::LSL || ShOp == AArch64_AM::LSR ||
  3121. ShOp == AArch64_AM::ASR || ShOp == AArch64_AM::ROR ||
  3122. ShOp == AArch64_AM::MSL) {
  3123. // We expect a number here.
  3124. TokError("expected #imm after shift specifier");
  3125. return MatchOperand_ParseFail;
  3126. }
  3127. // "extend" type operations don't need an immediate, #0 is implicit.
  3128. SMLoc E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  3129. Operands.push_back(
  3130. AArch64Operand::CreateShiftExtend(ShOp, 0, false, S, E, getContext()));
  3131. return MatchOperand_Success;
  3132. }
  3133. // Make sure we do actually have a number, identifier or a parenthesized
  3134. // expression.
  3135. SMLoc E = getLoc();
  3136. if (!getTok().is(AsmToken::Integer) && !getTok().is(AsmToken::LParen) &&
  3137. !getTok().is(AsmToken::Identifier)) {
  3138. Error(E, "expected integer shift amount");
  3139. return MatchOperand_ParseFail;
  3140. }
  3141. const MCExpr *ImmVal;
  3142. if (getParser().parseExpression(ImmVal))
  3143. return MatchOperand_ParseFail;
  3144. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  3145. if (!MCE) {
  3146. Error(E, "expected constant '#imm' after shift specifier");
  3147. return MatchOperand_ParseFail;
  3148. }
  3149. E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  3150. Operands.push_back(AArch64Operand::CreateShiftExtend(
  3151. ShOp, MCE->getValue(), true, S, E, getContext()));
  3152. return MatchOperand_Success;
  3153. }
  3154. static const struct Extension {
  3155. const char *Name;
  3156. const FeatureBitset Features;
  3157. } ExtensionMap[] = {
  3158. {"crc", {AArch64::FeatureCRC}},
  3159. {"sm4", {AArch64::FeatureSM4}},
  3160. {"sha3", {AArch64::FeatureSHA3}},
  3161. {"sha2", {AArch64::FeatureSHA2}},
  3162. {"aes", {AArch64::FeatureAES}},
  3163. {"crypto", {AArch64::FeatureCrypto}},
  3164. {"fp", {AArch64::FeatureFPARMv8}},
  3165. {"simd", {AArch64::FeatureNEON}},
  3166. {"ras", {AArch64::FeatureRAS}},
  3167. {"rasv2", {AArch64::FeatureRASv2}},
  3168. {"lse", {AArch64::FeatureLSE}},
  3169. {"predres", {AArch64::FeaturePredRes}},
  3170. {"predres2", {AArch64::FeatureSPECRES2}},
  3171. {"ccdp", {AArch64::FeatureCacheDeepPersist}},
  3172. {"mte", {AArch64::FeatureMTE}},
  3173. {"memtag", {AArch64::FeatureMTE}},
  3174. {"tlb-rmi", {AArch64::FeatureTLB_RMI}},
  3175. {"pan", {AArch64::FeaturePAN}},
  3176. {"pan-rwv", {AArch64::FeaturePAN_RWV}},
  3177. {"ccpp", {AArch64::FeatureCCPP}},
  3178. {"rcpc", {AArch64::FeatureRCPC}},
  3179. {"rng", {AArch64::FeatureRandGen}},
  3180. {"sve", {AArch64::FeatureSVE}},
  3181. {"sve2", {AArch64::FeatureSVE2}},
  3182. {"sve2-aes", {AArch64::FeatureSVE2AES}},
  3183. {"sve2-sm4", {AArch64::FeatureSVE2SM4}},
  3184. {"sve2-sha3", {AArch64::FeatureSVE2SHA3}},
  3185. {"sve2-bitperm", {AArch64::FeatureSVE2BitPerm}},
  3186. {"sve2p1", {AArch64::FeatureSVE2p1}},
  3187. {"b16b16", {AArch64::FeatureB16B16}},
  3188. {"ls64", {AArch64::FeatureLS64}},
  3189. {"xs", {AArch64::FeatureXS}},
  3190. {"pauth", {AArch64::FeaturePAuth}},
  3191. {"flagm", {AArch64::FeatureFlagM}},
  3192. {"rme", {AArch64::FeatureRME}},
  3193. {"sme", {AArch64::FeatureSME}},
  3194. {"sme-f64f64", {AArch64::FeatureSMEF64F64}},
  3195. {"sme-f16f16", {AArch64::FeatureSMEF16F16}},
  3196. {"sme-i16i64", {AArch64::FeatureSMEI16I64}},
  3197. {"sme2", {AArch64::FeatureSME2}},
  3198. {"sme2p1", {AArch64::FeatureSME2p1}},
  3199. {"hbc", {AArch64::FeatureHBC}},
  3200. {"mops", {AArch64::FeatureMOPS}},
  3201. {"mec", {AArch64::FeatureMEC}},
  3202. {"the", {AArch64::FeatureTHE}},
  3203. {"d128", {AArch64::FeatureD128}},
  3204. {"lse128", {AArch64::FeatureLSE128}},
  3205. {"ite", {AArch64::FeatureITE}},
  3206. {"cssc", {AArch64::FeatureCSSC}},
  3207. {"rcpc3", {AArch64::FeatureRCPC3}},
  3208. // FIXME: Unsupported extensions
  3209. {"lor", {}},
  3210. {"rdma", {}},
  3211. {"profile", {}},
  3212. };
  3213. static void setRequiredFeatureString(FeatureBitset FBS, std::string &Str) {
  3214. if (FBS[AArch64::HasV8_0aOps])
  3215. Str += "ARMv8a";
  3216. if (FBS[AArch64::HasV8_1aOps])
  3217. Str += "ARMv8.1a";
  3218. else if (FBS[AArch64::HasV8_2aOps])
  3219. Str += "ARMv8.2a";
  3220. else if (FBS[AArch64::HasV8_3aOps])
  3221. Str += "ARMv8.3a";
  3222. else if (FBS[AArch64::HasV8_4aOps])
  3223. Str += "ARMv8.4a";
  3224. else if (FBS[AArch64::HasV8_5aOps])
  3225. Str += "ARMv8.5a";
  3226. else if (FBS[AArch64::HasV8_6aOps])
  3227. Str += "ARMv8.6a";
  3228. else if (FBS[AArch64::HasV8_7aOps])
  3229. Str += "ARMv8.7a";
  3230. else if (FBS[AArch64::HasV8_8aOps])
  3231. Str += "ARMv8.8a";
  3232. else if (FBS[AArch64::HasV8_9aOps])
  3233. Str += "ARMv8.9a";
  3234. else if (FBS[AArch64::HasV9_0aOps])
  3235. Str += "ARMv9-a";
  3236. else if (FBS[AArch64::HasV9_1aOps])
  3237. Str += "ARMv9.1a";
  3238. else if (FBS[AArch64::HasV9_2aOps])
  3239. Str += "ARMv9.2a";
  3240. else if (FBS[AArch64::HasV9_3aOps])
  3241. Str += "ARMv9.3a";
  3242. else if (FBS[AArch64::HasV9_4aOps])
  3243. Str += "ARMv9.4a";
  3244. else if (FBS[AArch64::HasV8_0rOps])
  3245. Str += "ARMv8r";
  3246. else {
  3247. SmallVector<std::string, 2> ExtMatches;
  3248. for (const auto& Ext : ExtensionMap) {
  3249. // Use & in case multiple features are enabled
  3250. if ((FBS & Ext.Features) != FeatureBitset())
  3251. ExtMatches.push_back(Ext.Name);
  3252. }
  3253. Str += !ExtMatches.empty() ? llvm::join(ExtMatches, ", ") : "(unknown)";
  3254. }
  3255. }
  3256. void AArch64AsmParser::createSysAlias(uint16_t Encoding, OperandVector &Operands,
  3257. SMLoc S) {
  3258. const uint16_t Op2 = Encoding & 7;
  3259. const uint16_t Cm = (Encoding & 0x78) >> 3;
  3260. const uint16_t Cn = (Encoding & 0x780) >> 7;
  3261. const uint16_t Op1 = (Encoding & 0x3800) >> 11;
  3262. const MCExpr *Expr = MCConstantExpr::create(Op1, getContext());
  3263. Operands.push_back(
  3264. AArch64Operand::CreateImm(Expr, S, getLoc(), getContext()));
  3265. Operands.push_back(
  3266. AArch64Operand::CreateSysCR(Cn, S, getLoc(), getContext()));
  3267. Operands.push_back(
  3268. AArch64Operand::CreateSysCR(Cm, S, getLoc(), getContext()));
  3269. Expr = MCConstantExpr::create(Op2, getContext());
  3270. Operands.push_back(
  3271. AArch64Operand::CreateImm(Expr, S, getLoc(), getContext()));
  3272. }
  3273. /// parseSysAlias - The IC, DC, AT, and TLBI instructions are simple aliases for
  3274. /// the SYS instruction. Parse them specially so that we create a SYS MCInst.
  3275. bool AArch64AsmParser::parseSysAlias(StringRef Name, SMLoc NameLoc,
  3276. OperandVector &Operands) {
  3277. if (Name.contains('.'))
  3278. return TokError("invalid operand");
  3279. Mnemonic = Name;
  3280. Operands.push_back(AArch64Operand::CreateToken("sys", NameLoc, getContext()));
  3281. const AsmToken &Tok = getTok();
  3282. StringRef Op = Tok.getString();
  3283. SMLoc S = Tok.getLoc();
  3284. if (Mnemonic == "ic") {
  3285. const AArch64IC::IC *IC = AArch64IC::lookupICByName(Op);
  3286. if (!IC)
  3287. return TokError("invalid operand for IC instruction");
  3288. else if (!IC->haveFeatures(getSTI().getFeatureBits())) {
  3289. std::string Str("IC " + std::string(IC->Name) + " requires: ");
  3290. setRequiredFeatureString(IC->getRequiredFeatures(), Str);
  3291. return TokError(Str);
  3292. }
  3293. createSysAlias(IC->Encoding, Operands, S);
  3294. } else if (Mnemonic == "dc") {
  3295. const AArch64DC::DC *DC = AArch64DC::lookupDCByName(Op);
  3296. if (!DC)
  3297. return TokError("invalid operand for DC instruction");
  3298. else if (!DC->haveFeatures(getSTI().getFeatureBits())) {
  3299. std::string Str("DC " + std::string(DC->Name) + " requires: ");
  3300. setRequiredFeatureString(DC->getRequiredFeatures(), Str);
  3301. return TokError(Str);
  3302. }
  3303. createSysAlias(DC->Encoding, Operands, S);
  3304. } else if (Mnemonic == "at") {
  3305. const AArch64AT::AT *AT = AArch64AT::lookupATByName(Op);
  3306. if (!AT)
  3307. return TokError("invalid operand for AT instruction");
  3308. else if (!AT->haveFeatures(getSTI().getFeatureBits())) {
  3309. std::string Str("AT " + std::string(AT->Name) + " requires: ");
  3310. setRequiredFeatureString(AT->getRequiredFeatures(), Str);
  3311. return TokError(Str);
  3312. }
  3313. createSysAlias(AT->Encoding, Operands, S);
  3314. } else if (Mnemonic == "tlbi") {
  3315. const AArch64TLBI::TLBI *TLBI = AArch64TLBI::lookupTLBIByName(Op);
  3316. if (!TLBI)
  3317. return TokError("invalid operand for TLBI instruction");
  3318. else if (!TLBI->haveFeatures(getSTI().getFeatureBits())) {
  3319. std::string Str("TLBI " + std::string(TLBI->Name) + " requires: ");
  3320. setRequiredFeatureString(TLBI->getRequiredFeatures(), Str);
  3321. return TokError(Str);
  3322. }
  3323. createSysAlias(TLBI->Encoding, Operands, S);
  3324. } else if (Mnemonic == "cfp" || Mnemonic == "dvp" || Mnemonic == "cpp" || Mnemonic == "cosp") {
  3325. if (Op.lower() != "rctx")
  3326. return TokError("invalid operand for prediction restriction instruction");
  3327. bool hasAll = getSTI().hasFeature(AArch64::FeatureAll);
  3328. bool hasPredres = hasAll || getSTI().hasFeature(AArch64::FeaturePredRes);
  3329. bool hasSpecres2 = hasAll || getSTI().hasFeature(AArch64::FeatureSPECRES2);
  3330. if (Mnemonic == "cosp" && !hasSpecres2)
  3331. return TokError("COSP requires: predres2");
  3332. if (!hasPredres)
  3333. return TokError(Mnemonic.upper() + "RCTX requires: predres");
  3334. uint16_t PRCTX_Op2 = Mnemonic == "cfp" ? 0b100
  3335. : Mnemonic == "dvp" ? 0b101
  3336. : Mnemonic == "cosp" ? 0b110
  3337. : Mnemonic == "cpp" ? 0b111
  3338. : 0;
  3339. assert(PRCTX_Op2 &&
  3340. "Invalid mnemonic for prediction restriction instruction");
  3341. const auto SYS_3_7_3 = 0b01101110011; // op=3, CRn=7, CRm=3
  3342. const auto Encoding = SYS_3_7_3 << 3 | PRCTX_Op2;
  3343. createSysAlias(Encoding, Operands, S);
  3344. }
  3345. Lex(); // Eat operand.
  3346. bool ExpectRegister = (Op.lower().find("all") == StringRef::npos);
  3347. bool HasRegister = false;
  3348. // Check for the optional register operand.
  3349. if (parseOptionalToken(AsmToken::Comma)) {
  3350. if (Tok.isNot(AsmToken::Identifier) || parseRegister(Operands))
  3351. return TokError("expected register operand");
  3352. HasRegister = true;
  3353. }
  3354. if (ExpectRegister && !HasRegister)
  3355. return TokError("specified " + Mnemonic + " op requires a register");
  3356. else if (!ExpectRegister && HasRegister)
  3357. return TokError("specified " + Mnemonic + " op does not use a register");
  3358. if (parseToken(AsmToken::EndOfStatement, "unexpected token in argument list"))
  3359. return true;
  3360. return false;
  3361. }
  3362. /// parseSyspAlias - The TLBIP instructions are simple aliases for
  3363. /// the SYSP instruction. Parse them specially so that we create a SYSP MCInst.
  3364. bool AArch64AsmParser::parseSyspAlias(StringRef Name, SMLoc NameLoc,
  3365. OperandVector &Operands) {
  3366. if (Name.contains('.'))
  3367. return TokError("invalid operand");
  3368. Mnemonic = Name;
  3369. Operands.push_back(
  3370. AArch64Operand::CreateToken("sysp", NameLoc, getContext()));
  3371. const AsmToken &Tok = getTok();
  3372. StringRef Op = Tok.getString();
  3373. SMLoc S = Tok.getLoc();
  3374. if (Mnemonic == "tlbip") {
  3375. bool HasnXSQualifier = Op.endswith_insensitive("nXS");
  3376. if (HasnXSQualifier) {
  3377. Op = Op.drop_back(3);
  3378. }
  3379. const AArch64TLBI::TLBI *TLBIorig = AArch64TLBI::lookupTLBIByName(Op);
  3380. if (!TLBIorig)
  3381. return TokError("invalid operand for TLBIP instruction");
  3382. const AArch64TLBI::TLBI TLBI(
  3383. TLBIorig->Name, TLBIorig->Encoding | (HasnXSQualifier ? (1 << 7) : 0),
  3384. TLBIorig->NeedsReg,
  3385. HasnXSQualifier
  3386. ? TLBIorig->FeaturesRequired | FeatureBitset({AArch64::FeatureXS})
  3387. : TLBIorig->FeaturesRequired);
  3388. if (!TLBI.haveFeatures(getSTI().getFeatureBits())) {
  3389. std::string Name =
  3390. std::string(TLBI.Name) + (HasnXSQualifier ? "nXS" : "");
  3391. std::string Str("TLBIP " + Name + " requires: ");
  3392. setRequiredFeatureString(TLBI.getRequiredFeatures(), Str);
  3393. return TokError(Str);
  3394. }
  3395. createSysAlias(TLBI.Encoding, Operands, S);
  3396. }
  3397. Lex(); // Eat operand.
  3398. if (parseComma())
  3399. return true;
  3400. if (Tok.isNot(AsmToken::Identifier))
  3401. return TokError("expected register identifier");
  3402. auto Result = tryParseSyspXzrPair(Operands);
  3403. if (Result == MatchOperand_NoMatch)
  3404. Result = tryParseGPRSeqPair(Operands);
  3405. if (Result != MatchOperand_Success)
  3406. return TokError("specified " + Mnemonic +
  3407. " op requires a pair of registers");
  3408. if (parseToken(AsmToken::EndOfStatement, "unexpected token in argument list"))
  3409. return true;
  3410. return false;
  3411. }
  3412. OperandMatchResultTy
  3413. AArch64AsmParser::tryParseBarrierOperand(OperandVector &Operands) {
  3414. MCAsmParser &Parser = getParser();
  3415. const AsmToken &Tok = getTok();
  3416. if (Mnemonic == "tsb" && Tok.isNot(AsmToken::Identifier)) {
  3417. TokError("'csync' operand expected");
  3418. return MatchOperand_ParseFail;
  3419. } else if (parseOptionalToken(AsmToken::Hash) || Tok.is(AsmToken::Integer)) {
  3420. // Immediate operand.
  3421. const MCExpr *ImmVal;
  3422. SMLoc ExprLoc = getLoc();
  3423. AsmToken IntTok = Tok;
  3424. if (getParser().parseExpression(ImmVal))
  3425. return MatchOperand_ParseFail;
  3426. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  3427. if (!MCE) {
  3428. Error(ExprLoc, "immediate value expected for barrier operand");
  3429. return MatchOperand_ParseFail;
  3430. }
  3431. int64_t Value = MCE->getValue();
  3432. if (Mnemonic == "dsb" && Value > 15) {
  3433. // This case is a no match here, but it might be matched by the nXS
  3434. // variant. Deliberately not unlex the optional '#' as it is not necessary
  3435. // to characterize an integer immediate.
  3436. Parser.getLexer().UnLex(IntTok);
  3437. return MatchOperand_NoMatch;
  3438. }
  3439. if (Value < 0 || Value > 15) {
  3440. Error(ExprLoc, "barrier operand out of range");
  3441. return MatchOperand_ParseFail;
  3442. }
  3443. auto DB = AArch64DB::lookupDBByEncoding(Value);
  3444. Operands.push_back(AArch64Operand::CreateBarrier(Value, DB ? DB->Name : "",
  3445. ExprLoc, getContext(),
  3446. false /*hasnXSModifier*/));
  3447. return MatchOperand_Success;
  3448. }
  3449. if (Tok.isNot(AsmToken::Identifier)) {
  3450. TokError("invalid operand for instruction");
  3451. return MatchOperand_ParseFail;
  3452. }
  3453. StringRef Operand = Tok.getString();
  3454. auto TSB = AArch64TSB::lookupTSBByName(Operand);
  3455. auto DB = AArch64DB::lookupDBByName(Operand);
  3456. // The only valid named option for ISB is 'sy'
  3457. if (Mnemonic == "isb" && (!DB || DB->Encoding != AArch64DB::sy)) {
  3458. TokError("'sy' or #imm operand expected");
  3459. return MatchOperand_ParseFail;
  3460. // The only valid named option for TSB is 'csync'
  3461. } else if (Mnemonic == "tsb" && (!TSB || TSB->Encoding != AArch64TSB::csync)) {
  3462. TokError("'csync' operand expected");
  3463. return MatchOperand_ParseFail;
  3464. } else if (!DB && !TSB) {
  3465. if (Mnemonic == "dsb") {
  3466. // This case is a no match here, but it might be matched by the nXS
  3467. // variant.
  3468. return MatchOperand_NoMatch;
  3469. }
  3470. TokError("invalid barrier option name");
  3471. return MatchOperand_ParseFail;
  3472. }
  3473. Operands.push_back(AArch64Operand::CreateBarrier(
  3474. DB ? DB->Encoding : TSB->Encoding, Tok.getString(), getLoc(),
  3475. getContext(), false /*hasnXSModifier*/));
  3476. Lex(); // Consume the option
  3477. return MatchOperand_Success;
  3478. }
  3479. OperandMatchResultTy
  3480. AArch64AsmParser::tryParseBarriernXSOperand(OperandVector &Operands) {
  3481. const AsmToken &Tok = getTok();
  3482. assert(Mnemonic == "dsb" && "Instruction does not accept nXS operands");
  3483. if (Mnemonic != "dsb")
  3484. return MatchOperand_ParseFail;
  3485. if (parseOptionalToken(AsmToken::Hash) || Tok.is(AsmToken::Integer)) {
  3486. // Immediate operand.
  3487. const MCExpr *ImmVal;
  3488. SMLoc ExprLoc = getLoc();
  3489. if (getParser().parseExpression(ImmVal))
  3490. return MatchOperand_ParseFail;
  3491. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  3492. if (!MCE) {
  3493. Error(ExprLoc, "immediate value expected for barrier operand");
  3494. return MatchOperand_ParseFail;
  3495. }
  3496. int64_t Value = MCE->getValue();
  3497. // v8.7-A DSB in the nXS variant accepts only the following immediate
  3498. // values: 16, 20, 24, 28.
  3499. if (Value != 16 && Value != 20 && Value != 24 && Value != 28) {
  3500. Error(ExprLoc, "barrier operand out of range");
  3501. return MatchOperand_ParseFail;
  3502. }
  3503. auto DB = AArch64DBnXS::lookupDBnXSByImmValue(Value);
  3504. Operands.push_back(AArch64Operand::CreateBarrier(DB->Encoding, DB->Name,
  3505. ExprLoc, getContext(),
  3506. true /*hasnXSModifier*/));
  3507. return MatchOperand_Success;
  3508. }
  3509. if (Tok.isNot(AsmToken::Identifier)) {
  3510. TokError("invalid operand for instruction");
  3511. return MatchOperand_ParseFail;
  3512. }
  3513. StringRef Operand = Tok.getString();
  3514. auto DB = AArch64DBnXS::lookupDBnXSByName(Operand);
  3515. if (!DB) {
  3516. TokError("invalid barrier option name");
  3517. return MatchOperand_ParseFail;
  3518. }
  3519. Operands.push_back(
  3520. AArch64Operand::CreateBarrier(DB->Encoding, Tok.getString(), getLoc(),
  3521. getContext(), true /*hasnXSModifier*/));
  3522. Lex(); // Consume the option
  3523. return MatchOperand_Success;
  3524. }
  3525. OperandMatchResultTy
  3526. AArch64AsmParser::tryParseSysReg(OperandVector &Operands) {
  3527. const AsmToken &Tok = getTok();
  3528. if (Tok.isNot(AsmToken::Identifier))
  3529. return MatchOperand_NoMatch;
  3530. if (AArch64SVCR::lookupSVCRByName(Tok.getString()))
  3531. return MatchOperand_NoMatch;
  3532. int MRSReg, MSRReg;
  3533. auto SysReg = AArch64SysReg::lookupSysRegByName(Tok.getString());
  3534. if (SysReg && SysReg->haveFeatures(getSTI().getFeatureBits())) {
  3535. MRSReg = SysReg->Readable ? SysReg->Encoding : -1;
  3536. MSRReg = SysReg->Writeable ? SysReg->Encoding : -1;
  3537. } else
  3538. MRSReg = MSRReg = AArch64SysReg::parseGenericRegister(Tok.getString());
  3539. unsigned PStateImm = -1;
  3540. auto PState15 = AArch64PState::lookupPStateImm0_15ByName(Tok.getString());
  3541. if (PState15 && PState15->haveFeatures(getSTI().getFeatureBits()))
  3542. PStateImm = PState15->Encoding;
  3543. if (!PState15) {
  3544. auto PState1 = AArch64PState::lookupPStateImm0_1ByName(Tok.getString());
  3545. if (PState1 && PState1->haveFeatures(getSTI().getFeatureBits()))
  3546. PStateImm = PState1->Encoding;
  3547. }
  3548. Operands.push_back(
  3549. AArch64Operand::CreateSysReg(Tok.getString(), getLoc(), MRSReg, MSRReg,
  3550. PStateImm, getContext()));
  3551. Lex(); // Eat identifier
  3552. return MatchOperand_Success;
  3553. }
  3554. /// tryParseNeonVectorRegister - Parse a vector register operand.
  3555. bool AArch64AsmParser::tryParseNeonVectorRegister(OperandVector &Operands) {
  3556. if (getTok().isNot(AsmToken::Identifier))
  3557. return true;
  3558. SMLoc S = getLoc();
  3559. // Check for a vector register specifier first.
  3560. StringRef Kind;
  3561. MCRegister Reg;
  3562. OperandMatchResultTy Res =
  3563. tryParseVectorRegister(Reg, Kind, RegKind::NeonVector);
  3564. if (Res != MatchOperand_Success)
  3565. return true;
  3566. const auto &KindRes = parseVectorKind(Kind, RegKind::NeonVector);
  3567. if (!KindRes)
  3568. return true;
  3569. unsigned ElementWidth = KindRes->second;
  3570. Operands.push_back(
  3571. AArch64Operand::CreateVectorReg(Reg, RegKind::NeonVector, ElementWidth,
  3572. S, getLoc(), getContext()));
  3573. // If there was an explicit qualifier, that goes on as a literal text
  3574. // operand.
  3575. if (!Kind.empty())
  3576. Operands.push_back(AArch64Operand::CreateToken(Kind, S, getContext()));
  3577. return tryParseVectorIndex(Operands) == MatchOperand_ParseFail;
  3578. }
  3579. OperandMatchResultTy
  3580. AArch64AsmParser::tryParseVectorIndex(OperandVector &Operands) {
  3581. SMLoc SIdx = getLoc();
  3582. if (parseOptionalToken(AsmToken::LBrac)) {
  3583. const MCExpr *ImmVal;
  3584. if (getParser().parseExpression(ImmVal))
  3585. return MatchOperand_NoMatch;
  3586. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  3587. if (!MCE) {
  3588. TokError("immediate value expected for vector index");
  3589. return MatchOperand_ParseFail;;
  3590. }
  3591. SMLoc E = getLoc();
  3592. if (parseToken(AsmToken::RBrac, "']' expected"))
  3593. return MatchOperand_ParseFail;;
  3594. Operands.push_back(AArch64Operand::CreateVectorIndex(MCE->getValue(), SIdx,
  3595. E, getContext()));
  3596. return MatchOperand_Success;
  3597. }
  3598. return MatchOperand_NoMatch;
  3599. }
  3600. // tryParseVectorRegister - Try to parse a vector register name with
  3601. // optional kind specifier. If it is a register specifier, eat the token
  3602. // and return it.
  3603. OperandMatchResultTy
  3604. AArch64AsmParser::tryParseVectorRegister(MCRegister &Reg, StringRef &Kind,
  3605. RegKind MatchKind) {
  3606. const AsmToken &Tok = getTok();
  3607. if (Tok.isNot(AsmToken::Identifier))
  3608. return MatchOperand_NoMatch;
  3609. StringRef Name = Tok.getString();
  3610. // If there is a kind specifier, it's separated from the register name by
  3611. // a '.'.
  3612. size_t Start = 0, Next = Name.find('.');
  3613. StringRef Head = Name.slice(Start, Next);
  3614. unsigned RegNum = matchRegisterNameAlias(Head, MatchKind);
  3615. if (RegNum) {
  3616. if (Next != StringRef::npos) {
  3617. Kind = Name.slice(Next, StringRef::npos);
  3618. if (!isValidVectorKind(Kind, MatchKind)) {
  3619. TokError("invalid vector kind qualifier");
  3620. return MatchOperand_ParseFail;
  3621. }
  3622. }
  3623. Lex(); // Eat the register token.
  3624. Reg = RegNum;
  3625. return MatchOperand_Success;
  3626. }
  3627. return MatchOperand_NoMatch;
  3628. }
  3629. /// tryParseSVEPredicateVector - Parse a SVE predicate register operand.
  3630. template <RegKind RK> OperandMatchResultTy
  3631. AArch64AsmParser::tryParseSVEPredicateVector(OperandVector &Operands) {
  3632. // Check for a SVE predicate register specifier first.
  3633. const SMLoc S = getLoc();
  3634. StringRef Kind;
  3635. MCRegister RegNum;
  3636. auto Res = tryParseVectorRegister(RegNum, Kind, RK);
  3637. if (Res != MatchOperand_Success)
  3638. return Res;
  3639. const auto &KindRes = parseVectorKind(Kind, RK);
  3640. if (!KindRes)
  3641. return MatchOperand_NoMatch;
  3642. unsigned ElementWidth = KindRes->second;
  3643. Operands.push_back(AArch64Operand::CreateVectorReg(
  3644. RegNum, RK, ElementWidth, S,
  3645. getLoc(), getContext()));
  3646. if (getLexer().is(AsmToken::LBrac)) {
  3647. if (RK == RegKind::SVEPredicateAsCounter) {
  3648. OperandMatchResultTy ResIndex = tryParseVectorIndex(Operands);
  3649. if (ResIndex == MatchOperand_Success)
  3650. return MatchOperand_Success;
  3651. } else {
  3652. // Indexed predicate, there's no comma so try parse the next operand
  3653. // immediately.
  3654. if (parseOperand(Operands, false, false))
  3655. return MatchOperand_NoMatch;
  3656. }
  3657. }
  3658. // Not all predicates are followed by a '/m' or '/z'.
  3659. if (getTok().isNot(AsmToken::Slash))
  3660. return MatchOperand_Success;
  3661. // But when they do they shouldn't have an element type suffix.
  3662. if (!Kind.empty()) {
  3663. Error(S, "not expecting size suffix");
  3664. return MatchOperand_ParseFail;
  3665. }
  3666. // Add a literal slash as operand
  3667. Operands.push_back(AArch64Operand::CreateToken("/", getLoc(), getContext()));
  3668. Lex(); // Eat the slash.
  3669. // Zeroing or merging?
  3670. auto Pred = getTok().getString().lower();
  3671. if (RK == RegKind::SVEPredicateAsCounter && Pred != "z") {
  3672. Error(getLoc(), "expecting 'z' predication");
  3673. return MatchOperand_ParseFail;
  3674. }
  3675. if (RK == RegKind::SVEPredicateVector && Pred != "z" && Pred != "m") {
  3676. Error(getLoc(), "expecting 'm' or 'z' predication");
  3677. return MatchOperand_ParseFail;
  3678. }
  3679. // Add zero/merge token.
  3680. const char *ZM = Pred == "z" ? "z" : "m";
  3681. Operands.push_back(AArch64Operand::CreateToken(ZM, getLoc(), getContext()));
  3682. Lex(); // Eat zero/merge token.
  3683. return MatchOperand_Success;
  3684. }
  3685. /// parseRegister - Parse a register operand.
  3686. bool AArch64AsmParser::parseRegister(OperandVector &Operands) {
  3687. // Try for a Neon vector register.
  3688. if (!tryParseNeonVectorRegister(Operands))
  3689. return false;
  3690. if (tryParseZTOperand(Operands) == MatchOperand_Success)
  3691. return false;
  3692. // Otherwise try for a scalar register.
  3693. if (tryParseGPROperand<false>(Operands) == MatchOperand_Success)
  3694. return false;
  3695. return true;
  3696. }
  3697. bool AArch64AsmParser::parseSymbolicImmVal(const MCExpr *&ImmVal) {
  3698. bool HasELFModifier = false;
  3699. AArch64MCExpr::VariantKind RefKind;
  3700. if (parseOptionalToken(AsmToken::Colon)) {
  3701. HasELFModifier = true;
  3702. if (getTok().isNot(AsmToken::Identifier))
  3703. return TokError("expect relocation specifier in operand after ':'");
  3704. std::string LowerCase = getTok().getIdentifier().lower();
  3705. RefKind = StringSwitch<AArch64MCExpr::VariantKind>(LowerCase)
  3706. .Case("lo12", AArch64MCExpr::VK_LO12)
  3707. .Case("abs_g3", AArch64MCExpr::VK_ABS_G3)
  3708. .Case("abs_g2", AArch64MCExpr::VK_ABS_G2)
  3709. .Case("abs_g2_s", AArch64MCExpr::VK_ABS_G2_S)
  3710. .Case("abs_g2_nc", AArch64MCExpr::VK_ABS_G2_NC)
  3711. .Case("abs_g1", AArch64MCExpr::VK_ABS_G1)
  3712. .Case("abs_g1_s", AArch64MCExpr::VK_ABS_G1_S)
  3713. .Case("abs_g1_nc", AArch64MCExpr::VK_ABS_G1_NC)
  3714. .Case("abs_g0", AArch64MCExpr::VK_ABS_G0)
  3715. .Case("abs_g0_s", AArch64MCExpr::VK_ABS_G0_S)
  3716. .Case("abs_g0_nc", AArch64MCExpr::VK_ABS_G0_NC)
  3717. .Case("prel_g3", AArch64MCExpr::VK_PREL_G3)
  3718. .Case("prel_g2", AArch64MCExpr::VK_PREL_G2)
  3719. .Case("prel_g2_nc", AArch64MCExpr::VK_PREL_G2_NC)
  3720. .Case("prel_g1", AArch64MCExpr::VK_PREL_G1)
  3721. .Case("prel_g1_nc", AArch64MCExpr::VK_PREL_G1_NC)
  3722. .Case("prel_g0", AArch64MCExpr::VK_PREL_G0)
  3723. .Case("prel_g0_nc", AArch64MCExpr::VK_PREL_G0_NC)
  3724. .Case("dtprel_g2", AArch64MCExpr::VK_DTPREL_G2)
  3725. .Case("dtprel_g1", AArch64MCExpr::VK_DTPREL_G1)
  3726. .Case("dtprel_g1_nc", AArch64MCExpr::VK_DTPREL_G1_NC)
  3727. .Case("dtprel_g0", AArch64MCExpr::VK_DTPREL_G0)
  3728. .Case("dtprel_g0_nc", AArch64MCExpr::VK_DTPREL_G0_NC)
  3729. .Case("dtprel_hi12", AArch64MCExpr::VK_DTPREL_HI12)
  3730. .Case("dtprel_lo12", AArch64MCExpr::VK_DTPREL_LO12)
  3731. .Case("dtprel_lo12_nc", AArch64MCExpr::VK_DTPREL_LO12_NC)
  3732. .Case("pg_hi21_nc", AArch64MCExpr::VK_ABS_PAGE_NC)
  3733. .Case("tprel_g2", AArch64MCExpr::VK_TPREL_G2)
  3734. .Case("tprel_g1", AArch64MCExpr::VK_TPREL_G1)
  3735. .Case("tprel_g1_nc", AArch64MCExpr::VK_TPREL_G1_NC)
  3736. .Case("tprel_g0", AArch64MCExpr::VK_TPREL_G0)
  3737. .Case("tprel_g0_nc", AArch64MCExpr::VK_TPREL_G0_NC)
  3738. .Case("tprel_hi12", AArch64MCExpr::VK_TPREL_HI12)
  3739. .Case("tprel_lo12", AArch64MCExpr::VK_TPREL_LO12)
  3740. .Case("tprel_lo12_nc", AArch64MCExpr::VK_TPREL_LO12_NC)
  3741. .Case("tlsdesc_lo12", AArch64MCExpr::VK_TLSDESC_LO12)
  3742. .Case("got", AArch64MCExpr::VK_GOT_PAGE)
  3743. .Case("gotpage_lo15", AArch64MCExpr::VK_GOT_PAGE_LO15)
  3744. .Case("got_lo12", AArch64MCExpr::VK_GOT_LO12)
  3745. .Case("gottprel", AArch64MCExpr::VK_GOTTPREL_PAGE)
  3746. .Case("gottprel_lo12", AArch64MCExpr::VK_GOTTPREL_LO12_NC)
  3747. .Case("gottprel_g1", AArch64MCExpr::VK_GOTTPREL_G1)
  3748. .Case("gottprel_g0_nc", AArch64MCExpr::VK_GOTTPREL_G0_NC)
  3749. .Case("tlsdesc", AArch64MCExpr::VK_TLSDESC_PAGE)
  3750. .Case("secrel_lo12", AArch64MCExpr::VK_SECREL_LO12)
  3751. .Case("secrel_hi12", AArch64MCExpr::VK_SECREL_HI12)
  3752. .Default(AArch64MCExpr::VK_INVALID);
  3753. if (RefKind == AArch64MCExpr::VK_INVALID)
  3754. return TokError("expect relocation specifier in operand after ':'");
  3755. Lex(); // Eat identifier
  3756. if (parseToken(AsmToken::Colon, "expect ':' after relocation specifier"))
  3757. return true;
  3758. }
  3759. if (getParser().parseExpression(ImmVal))
  3760. return true;
  3761. if (HasELFModifier)
  3762. ImmVal = AArch64MCExpr::create(ImmVal, RefKind, getContext());
  3763. return false;
  3764. }
  3765. OperandMatchResultTy
  3766. AArch64AsmParser::tryParseMatrixTileList(OperandVector &Operands) {
  3767. if (getTok().isNot(AsmToken::LCurly))
  3768. return MatchOperand_NoMatch;
  3769. auto ParseMatrixTile = [this](unsigned &Reg, unsigned &ElementWidth) {
  3770. StringRef Name = getTok().getString();
  3771. size_t DotPosition = Name.find('.');
  3772. if (DotPosition == StringRef::npos)
  3773. return MatchOperand_NoMatch;
  3774. unsigned RegNum = matchMatrixTileListRegName(Name);
  3775. if (!RegNum)
  3776. return MatchOperand_NoMatch;
  3777. StringRef Tail = Name.drop_front(DotPosition);
  3778. const std::optional<std::pair<int, int>> &KindRes =
  3779. parseVectorKind(Tail, RegKind::Matrix);
  3780. if (!KindRes) {
  3781. TokError("Expected the register to be followed by element width suffix");
  3782. return MatchOperand_ParseFail;
  3783. }
  3784. ElementWidth = KindRes->second;
  3785. Reg = RegNum;
  3786. Lex(); // Eat the register.
  3787. return MatchOperand_Success;
  3788. };
  3789. SMLoc S = getLoc();
  3790. auto LCurly = getTok();
  3791. Lex(); // Eat left bracket token.
  3792. // Empty matrix list
  3793. if (parseOptionalToken(AsmToken::RCurly)) {
  3794. Operands.push_back(AArch64Operand::CreateMatrixTileList(
  3795. /*RegMask=*/0, S, getLoc(), getContext()));
  3796. return MatchOperand_Success;
  3797. }
  3798. // Try parse {za} alias early
  3799. if (getTok().getString().equals_insensitive("za")) {
  3800. Lex(); // Eat 'za'
  3801. if (parseToken(AsmToken::RCurly, "'}' expected"))
  3802. return MatchOperand_ParseFail;
  3803. Operands.push_back(AArch64Operand::CreateMatrixTileList(
  3804. /*RegMask=*/0xFF, S, getLoc(), getContext()));
  3805. return MatchOperand_Success;
  3806. }
  3807. SMLoc TileLoc = getLoc();
  3808. unsigned FirstReg, ElementWidth;
  3809. auto ParseRes = ParseMatrixTile(FirstReg, ElementWidth);
  3810. if (ParseRes != MatchOperand_Success) {
  3811. getLexer().UnLex(LCurly);
  3812. return ParseRes;
  3813. }
  3814. const MCRegisterInfo *RI = getContext().getRegisterInfo();
  3815. unsigned PrevReg = FirstReg;
  3816. SmallSet<unsigned, 8> DRegs;
  3817. AArch64Operand::ComputeRegsForAlias(FirstReg, DRegs, ElementWidth);
  3818. SmallSet<unsigned, 8> SeenRegs;
  3819. SeenRegs.insert(FirstReg);
  3820. while (parseOptionalToken(AsmToken::Comma)) {
  3821. TileLoc = getLoc();
  3822. unsigned Reg, NextElementWidth;
  3823. ParseRes = ParseMatrixTile(Reg, NextElementWidth);
  3824. if (ParseRes != MatchOperand_Success)
  3825. return ParseRes;
  3826. // Element size must match on all regs in the list.
  3827. if (ElementWidth != NextElementWidth) {
  3828. Error(TileLoc, "mismatched register size suffix");
  3829. return MatchOperand_ParseFail;
  3830. }
  3831. if (RI->getEncodingValue(Reg) <= (RI->getEncodingValue(PrevReg)))
  3832. Warning(TileLoc, "tile list not in ascending order");
  3833. if (SeenRegs.contains(Reg))
  3834. Warning(TileLoc, "duplicate tile in list");
  3835. else {
  3836. SeenRegs.insert(Reg);
  3837. AArch64Operand::ComputeRegsForAlias(Reg, DRegs, ElementWidth);
  3838. }
  3839. PrevReg = Reg;
  3840. }
  3841. if (parseToken(AsmToken::RCurly, "'}' expected"))
  3842. return MatchOperand_ParseFail;
  3843. unsigned RegMask = 0;
  3844. for (auto Reg : DRegs)
  3845. RegMask |= 0x1 << (RI->getEncodingValue(Reg) -
  3846. RI->getEncodingValue(AArch64::ZAD0));
  3847. Operands.push_back(
  3848. AArch64Operand::CreateMatrixTileList(RegMask, S, getLoc(), getContext()));
  3849. return MatchOperand_Success;
  3850. }
  3851. template <RegKind VectorKind>
  3852. OperandMatchResultTy
  3853. AArch64AsmParser::tryParseVectorList(OperandVector &Operands,
  3854. bool ExpectMatch) {
  3855. MCAsmParser &Parser = getParser();
  3856. if (!getTok().is(AsmToken::LCurly))
  3857. return MatchOperand_NoMatch;
  3858. // Wrapper around parse function
  3859. auto ParseVector = [this](MCRegister &Reg, StringRef &Kind, SMLoc Loc,
  3860. bool NoMatchIsError) {
  3861. auto RegTok = getTok();
  3862. auto ParseRes = tryParseVectorRegister(Reg, Kind, VectorKind);
  3863. if (ParseRes == MatchOperand_Success) {
  3864. if (parseVectorKind(Kind, VectorKind))
  3865. return ParseRes;
  3866. llvm_unreachable("Expected a valid vector kind");
  3867. }
  3868. if (RegTok.is(AsmToken::Identifier) && ParseRes == MatchOperand_NoMatch &&
  3869. RegTok.getString().equals_insensitive("zt0"))
  3870. return MatchOperand_NoMatch;
  3871. if (RegTok.isNot(AsmToken::Identifier) ||
  3872. ParseRes == MatchOperand_ParseFail ||
  3873. (ParseRes == MatchOperand_NoMatch && NoMatchIsError &&
  3874. !RegTok.getString().startswith_insensitive("za"))) {
  3875. Error(Loc, "vector register expected");
  3876. return MatchOperand_ParseFail;
  3877. }
  3878. return MatchOperand_NoMatch;
  3879. };
  3880. int NumRegs = getNumRegsForRegKind(VectorKind);
  3881. SMLoc S = getLoc();
  3882. auto LCurly = getTok();
  3883. Lex(); // Eat left bracket token.
  3884. StringRef Kind;
  3885. MCRegister FirstReg;
  3886. auto ParseRes = ParseVector(FirstReg, Kind, getLoc(), ExpectMatch);
  3887. // Put back the original left bracket if there was no match, so that
  3888. // different types of list-operands can be matched (e.g. SVE, Neon).
  3889. if (ParseRes == MatchOperand_NoMatch)
  3890. Parser.getLexer().UnLex(LCurly);
  3891. if (ParseRes != MatchOperand_Success)
  3892. return ParseRes;
  3893. int64_t PrevReg = FirstReg;
  3894. unsigned Count = 1;
  3895. int Stride = 1;
  3896. if (parseOptionalToken(AsmToken::Minus)) {
  3897. SMLoc Loc = getLoc();
  3898. StringRef NextKind;
  3899. MCRegister Reg;
  3900. ParseRes = ParseVector(Reg, NextKind, getLoc(), true);
  3901. if (ParseRes != MatchOperand_Success)
  3902. return ParseRes;
  3903. // Any Kind suffices must match on all regs in the list.
  3904. if (Kind != NextKind) {
  3905. Error(Loc, "mismatched register size suffix");
  3906. return MatchOperand_ParseFail;
  3907. }
  3908. unsigned Space =
  3909. (PrevReg < Reg) ? (Reg - PrevReg) : (Reg + NumRegs - PrevReg);
  3910. if (Space == 0 || Space > 3) {
  3911. Error(Loc, "invalid number of vectors");
  3912. return MatchOperand_ParseFail;
  3913. }
  3914. Count += Space;
  3915. }
  3916. else {
  3917. bool HasCalculatedStride = false;
  3918. while (parseOptionalToken(AsmToken::Comma)) {
  3919. SMLoc Loc = getLoc();
  3920. StringRef NextKind;
  3921. MCRegister Reg;
  3922. ParseRes = ParseVector(Reg, NextKind, getLoc(), true);
  3923. if (ParseRes != MatchOperand_Success)
  3924. return ParseRes;
  3925. // Any Kind suffices must match on all regs in the list.
  3926. if (Kind != NextKind) {
  3927. Error(Loc, "mismatched register size suffix");
  3928. return MatchOperand_ParseFail;
  3929. }
  3930. unsigned RegVal = getContext().getRegisterInfo()->getEncodingValue(Reg);
  3931. unsigned PrevRegVal =
  3932. getContext().getRegisterInfo()->getEncodingValue(PrevReg);
  3933. if (!HasCalculatedStride) {
  3934. Stride = (PrevRegVal < RegVal) ? (RegVal - PrevRegVal)
  3935. : (RegVal + NumRegs - PrevRegVal);
  3936. HasCalculatedStride = true;
  3937. }
  3938. // Register must be incremental (with a wraparound at last register).
  3939. if (Stride == 0 || RegVal != ((PrevRegVal + Stride) % NumRegs)) {
  3940. Error(Loc, "registers must have the same sequential stride");
  3941. return MatchOperand_ParseFail;
  3942. }
  3943. PrevReg = Reg;
  3944. ++Count;
  3945. }
  3946. }
  3947. if (parseToken(AsmToken::RCurly, "'}' expected"))
  3948. return MatchOperand_ParseFail;
  3949. if (Count > 4) {
  3950. Error(S, "invalid number of vectors");
  3951. return MatchOperand_ParseFail;
  3952. }
  3953. unsigned NumElements = 0;
  3954. unsigned ElementWidth = 0;
  3955. if (!Kind.empty()) {
  3956. if (const auto &VK = parseVectorKind(Kind, VectorKind))
  3957. std::tie(NumElements, ElementWidth) = *VK;
  3958. }
  3959. Operands.push_back(AArch64Operand::CreateVectorList(
  3960. FirstReg, Count, Stride, NumElements, ElementWidth, VectorKind, S,
  3961. getLoc(), getContext()));
  3962. return MatchOperand_Success;
  3963. }
  3964. /// parseNeonVectorList - Parse a vector list operand for AdvSIMD instructions.
  3965. bool AArch64AsmParser::parseNeonVectorList(OperandVector &Operands) {
  3966. auto ParseRes = tryParseVectorList<RegKind::NeonVector>(Operands, true);
  3967. if (ParseRes != MatchOperand_Success)
  3968. return true;
  3969. return tryParseVectorIndex(Operands) == MatchOperand_ParseFail;
  3970. }
  3971. OperandMatchResultTy
  3972. AArch64AsmParser::tryParseGPR64sp0Operand(OperandVector &Operands) {
  3973. SMLoc StartLoc = getLoc();
  3974. MCRegister RegNum;
  3975. OperandMatchResultTy Res = tryParseScalarRegister(RegNum);
  3976. if (Res != MatchOperand_Success)
  3977. return Res;
  3978. if (!parseOptionalToken(AsmToken::Comma)) {
  3979. Operands.push_back(AArch64Operand::CreateReg(
  3980. RegNum, RegKind::Scalar, StartLoc, getLoc(), getContext()));
  3981. return MatchOperand_Success;
  3982. }
  3983. parseOptionalToken(AsmToken::Hash);
  3984. if (getTok().isNot(AsmToken::Integer)) {
  3985. Error(getLoc(), "index must be absent or #0");
  3986. return MatchOperand_ParseFail;
  3987. }
  3988. const MCExpr *ImmVal;
  3989. if (getParser().parseExpression(ImmVal) || !isa<MCConstantExpr>(ImmVal) ||
  3990. cast<MCConstantExpr>(ImmVal)->getValue() != 0) {
  3991. Error(getLoc(), "index must be absent or #0");
  3992. return MatchOperand_ParseFail;
  3993. }
  3994. Operands.push_back(AArch64Operand::CreateReg(
  3995. RegNum, RegKind::Scalar, StartLoc, getLoc(), getContext()));
  3996. return MatchOperand_Success;
  3997. }
  3998. OperandMatchResultTy
  3999. AArch64AsmParser::tryParseZTOperand(OperandVector &Operands) {
  4000. SMLoc StartLoc = getLoc();
  4001. const AsmToken &Tok = getTok();
  4002. std::string Name = Tok.getString().lower();
  4003. unsigned RegNum = matchRegisterNameAlias(Name, RegKind::LookupTable);
  4004. if (RegNum == 0)
  4005. return MatchOperand_NoMatch;
  4006. Operands.push_back(AArch64Operand::CreateReg(
  4007. RegNum, RegKind::LookupTable, StartLoc, getLoc(), getContext()));
  4008. Lex(); // Eat identifier token.
  4009. // Check if register is followed by an index
  4010. if (parseOptionalToken(AsmToken::LBrac)) {
  4011. const MCExpr *ImmVal;
  4012. if (getParser().parseExpression(ImmVal))
  4013. return MatchOperand_NoMatch;
  4014. const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  4015. if (!MCE) {
  4016. TokError("immediate value expected for vector index");
  4017. return MatchOperand_ParseFail;
  4018. }
  4019. if (parseToken(AsmToken::RBrac, "']' expected"))
  4020. return MatchOperand_ParseFail;
  4021. Operands.push_back(AArch64Operand::CreateImm(
  4022. MCConstantExpr::create(MCE->getValue(), getContext()), StartLoc,
  4023. getLoc(), getContext()));
  4024. }
  4025. return MatchOperand_Success;
  4026. }
  4027. template <bool ParseShiftExtend, RegConstraintEqualityTy EqTy>
  4028. OperandMatchResultTy
  4029. AArch64AsmParser::tryParseGPROperand(OperandVector &Operands) {
  4030. SMLoc StartLoc = getLoc();
  4031. MCRegister RegNum;
  4032. OperandMatchResultTy Res = tryParseScalarRegister(RegNum);
  4033. if (Res != MatchOperand_Success)
  4034. return Res;
  4035. // No shift/extend is the default.
  4036. if (!ParseShiftExtend || getTok().isNot(AsmToken::Comma)) {
  4037. Operands.push_back(AArch64Operand::CreateReg(
  4038. RegNum, RegKind::Scalar, StartLoc, getLoc(), getContext(), EqTy));
  4039. return MatchOperand_Success;
  4040. }
  4041. // Eat the comma
  4042. Lex();
  4043. // Match the shift
  4044. SmallVector<std::unique_ptr<MCParsedAsmOperand>, 1> ExtOpnd;
  4045. Res = tryParseOptionalShiftExtend(ExtOpnd);
  4046. if (Res != MatchOperand_Success)
  4047. return Res;
  4048. auto Ext = static_cast<AArch64Operand*>(ExtOpnd.back().get());
  4049. Operands.push_back(AArch64Operand::CreateReg(
  4050. RegNum, RegKind::Scalar, StartLoc, Ext->getEndLoc(), getContext(), EqTy,
  4051. Ext->getShiftExtendType(), Ext->getShiftExtendAmount(),
  4052. Ext->hasShiftExtendAmount()));
  4053. return MatchOperand_Success;
  4054. }
  4055. bool AArch64AsmParser::parseOptionalMulOperand(OperandVector &Operands) {
  4056. MCAsmParser &Parser = getParser();
  4057. // Some SVE instructions have a decoration after the immediate, i.e.
  4058. // "mul vl". We parse them here and add tokens, which must be present in the
  4059. // asm string in the tablegen instruction.
  4060. bool NextIsVL =
  4061. Parser.getLexer().peekTok().getString().equals_insensitive("vl");
  4062. bool NextIsHash = Parser.getLexer().peekTok().is(AsmToken::Hash);
  4063. if (!getTok().getString().equals_insensitive("mul") ||
  4064. !(NextIsVL || NextIsHash))
  4065. return true;
  4066. Operands.push_back(
  4067. AArch64Operand::CreateToken("mul", getLoc(), getContext()));
  4068. Lex(); // Eat the "mul"
  4069. if (NextIsVL) {
  4070. Operands.push_back(
  4071. AArch64Operand::CreateToken("vl", getLoc(), getContext()));
  4072. Lex(); // Eat the "vl"
  4073. return false;
  4074. }
  4075. if (NextIsHash) {
  4076. Lex(); // Eat the #
  4077. SMLoc S = getLoc();
  4078. // Parse immediate operand.
  4079. const MCExpr *ImmVal;
  4080. if (!Parser.parseExpression(ImmVal))
  4081. if (const MCConstantExpr *MCE = dyn_cast<MCConstantExpr>(ImmVal)) {
  4082. Operands.push_back(AArch64Operand::CreateImm(
  4083. MCConstantExpr::create(MCE->getValue(), getContext()), S, getLoc(),
  4084. getContext()));
  4085. return MatchOperand_Success;
  4086. }
  4087. }
  4088. return Error(getLoc(), "expected 'vl' or '#<imm>'");
  4089. }
  4090. bool AArch64AsmParser::parseOptionalVGOperand(OperandVector &Operands,
  4091. StringRef &VecGroup) {
  4092. MCAsmParser &Parser = getParser();
  4093. auto Tok = Parser.getTok();
  4094. if (Tok.isNot(AsmToken::Identifier))
  4095. return true;
  4096. StringRef VG = StringSwitch<StringRef>(Tok.getString().lower())
  4097. .Case("vgx2", "vgx2")
  4098. .Case("vgx4", "vgx4")
  4099. .Default("");
  4100. if (VG.empty())
  4101. return true;
  4102. VecGroup = VG;
  4103. Parser.Lex(); // Eat vgx[2|4]
  4104. return false;
  4105. }
  4106. bool AArch64AsmParser::parseKeywordOperand(OperandVector &Operands) {
  4107. auto Tok = getTok();
  4108. if (Tok.isNot(AsmToken::Identifier))
  4109. return true;
  4110. auto Keyword = Tok.getString();
  4111. Keyword = StringSwitch<StringRef>(Keyword.lower())
  4112. .Case("sm", "sm")
  4113. .Case("za", "za")
  4114. .Default(Keyword);
  4115. Operands.push_back(
  4116. AArch64Operand::CreateToken(Keyword, Tok.getLoc(), getContext()));
  4117. Lex();
  4118. return false;
  4119. }
  4120. /// parseOperand - Parse a arm instruction operand. For now this parses the
  4121. /// operand regardless of the mnemonic.
  4122. bool AArch64AsmParser::parseOperand(OperandVector &Operands, bool isCondCode,
  4123. bool invertCondCode) {
  4124. MCAsmParser &Parser = getParser();
  4125. OperandMatchResultTy ResTy =
  4126. MatchOperandParserImpl(Operands, Mnemonic, /*ParseForAllFeatures=*/ true);
  4127. // Check if the current operand has a custom associated parser, if so, try to
  4128. // custom parse the operand, or fallback to the general approach.
  4129. if (ResTy == MatchOperand_Success)
  4130. return false;
  4131. // If there wasn't a custom match, try the generic matcher below. Otherwise,
  4132. // there was a match, but an error occurred, in which case, just return that
  4133. // the operand parsing failed.
  4134. if (ResTy == MatchOperand_ParseFail)
  4135. return true;
  4136. // Nothing custom, so do general case parsing.
  4137. SMLoc S, E;
  4138. switch (getLexer().getKind()) {
  4139. default: {
  4140. SMLoc S = getLoc();
  4141. const MCExpr *Expr;
  4142. if (parseSymbolicImmVal(Expr))
  4143. return Error(S, "invalid operand");
  4144. SMLoc E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  4145. Operands.push_back(AArch64Operand::CreateImm(Expr, S, E, getContext()));
  4146. return false;
  4147. }
  4148. case AsmToken::LBrac: {
  4149. Operands.push_back(
  4150. AArch64Operand::CreateToken("[", getLoc(), getContext()));
  4151. Lex(); // Eat '['
  4152. // There's no comma after a '[', so we can parse the next operand
  4153. // immediately.
  4154. return parseOperand(Operands, false, false);
  4155. }
  4156. case AsmToken::LCurly: {
  4157. if (!parseNeonVectorList(Operands))
  4158. return false;
  4159. Operands.push_back(
  4160. AArch64Operand::CreateToken("{", getLoc(), getContext()));
  4161. Lex(); // Eat '{'
  4162. // There's no comma after a '{', so we can parse the next operand
  4163. // immediately.
  4164. return parseOperand(Operands, false, false);
  4165. }
  4166. case AsmToken::Identifier: {
  4167. // See if this is a "VG" decoration used by SME instructions.
  4168. StringRef VecGroup;
  4169. if (!parseOptionalVGOperand(Operands, VecGroup)) {
  4170. Operands.push_back(
  4171. AArch64Operand::CreateToken(VecGroup, getLoc(), getContext()));
  4172. return false;
  4173. }
  4174. // If we're expecting a Condition Code operand, then just parse that.
  4175. if (isCondCode)
  4176. return parseCondCode(Operands, invertCondCode);
  4177. // If it's a register name, parse it.
  4178. if (!parseRegister(Operands))
  4179. return false;
  4180. // See if this is a "mul vl" decoration or "mul #<int>" operand used
  4181. // by SVE instructions.
  4182. if (!parseOptionalMulOperand(Operands))
  4183. return false;
  4184. // If this is an "smstart" or "smstop" instruction, parse its special
  4185. // keyword operand as an identifier.
  4186. if (Mnemonic == "smstart" || Mnemonic == "smstop")
  4187. return parseKeywordOperand(Operands);
  4188. // This could be an optional "shift" or "extend" operand.
  4189. OperandMatchResultTy GotShift = tryParseOptionalShiftExtend(Operands);
  4190. // We can only continue if no tokens were eaten.
  4191. if (GotShift != MatchOperand_NoMatch)
  4192. return GotShift;
  4193. // If this is a two-word mnemonic, parse its special keyword
  4194. // operand as an identifier.
  4195. if (Mnemonic == "brb")
  4196. return parseKeywordOperand(Operands);
  4197. // This was not a register so parse other operands that start with an
  4198. // identifier (like labels) as expressions and create them as immediates.
  4199. const MCExpr *IdVal;
  4200. S = getLoc();
  4201. if (getParser().parseExpression(IdVal))
  4202. return true;
  4203. E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  4204. Operands.push_back(AArch64Operand::CreateImm(IdVal, S, E, getContext()));
  4205. return false;
  4206. }
  4207. case AsmToken::Integer:
  4208. case AsmToken::Real:
  4209. case AsmToken::Hash: {
  4210. // #42 -> immediate.
  4211. S = getLoc();
  4212. parseOptionalToken(AsmToken::Hash);
  4213. // Parse a negative sign
  4214. bool isNegative = false;
  4215. if (getTok().is(AsmToken::Minus)) {
  4216. isNegative = true;
  4217. // We need to consume this token only when we have a Real, otherwise
  4218. // we let parseSymbolicImmVal take care of it
  4219. if (Parser.getLexer().peekTok().is(AsmToken::Real))
  4220. Lex();
  4221. }
  4222. // The only Real that should come through here is a literal #0.0 for
  4223. // the fcmp[e] r, #0.0 instructions. They expect raw token operands,
  4224. // so convert the value.
  4225. const AsmToken &Tok = getTok();
  4226. if (Tok.is(AsmToken::Real)) {
  4227. APFloat RealVal(APFloat::IEEEdouble(), Tok.getString());
  4228. uint64_t IntVal = RealVal.bitcastToAPInt().getZExtValue();
  4229. if (Mnemonic != "fcmp" && Mnemonic != "fcmpe" && Mnemonic != "fcmeq" &&
  4230. Mnemonic != "fcmge" && Mnemonic != "fcmgt" && Mnemonic != "fcmle" &&
  4231. Mnemonic != "fcmlt" && Mnemonic != "fcmne")
  4232. return TokError("unexpected floating point literal");
  4233. else if (IntVal != 0 || isNegative)
  4234. return TokError("expected floating-point constant #0.0");
  4235. Lex(); // Eat the token.
  4236. Operands.push_back(AArch64Operand::CreateToken("#0", S, getContext()));
  4237. Operands.push_back(AArch64Operand::CreateToken(".0", S, getContext()));
  4238. return false;
  4239. }
  4240. const MCExpr *ImmVal;
  4241. if (parseSymbolicImmVal(ImmVal))
  4242. return true;
  4243. E = SMLoc::getFromPointer(getLoc().getPointer() - 1);
  4244. Operands.push_back(AArch64Operand::CreateImm(ImmVal, S, E, getContext()));
  4245. return false;
  4246. }
  4247. case AsmToken::Equal: {
  4248. SMLoc Loc = getLoc();
  4249. if (Mnemonic != "ldr") // only parse for ldr pseudo (e.g. ldr r0, =val)
  4250. return TokError("unexpected token in operand");
  4251. Lex(); // Eat '='
  4252. const MCExpr *SubExprVal;
  4253. if (getParser().parseExpression(SubExprVal))
  4254. return true;
  4255. if (Operands.size() < 2 ||
  4256. !static_cast<AArch64Operand &>(*Operands[1]).isScalarReg())
  4257. return Error(Loc, "Only valid when first operand is register");
  4258. bool IsXReg =
  4259. AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  4260. Operands[1]->getReg());
  4261. MCContext& Ctx = getContext();
  4262. E = SMLoc::getFromPointer(Loc.getPointer() - 1);
  4263. // If the op is an imm and can be fit into a mov, then replace ldr with mov.
  4264. if (isa<MCConstantExpr>(SubExprVal)) {
  4265. uint64_t Imm = (cast<MCConstantExpr>(SubExprVal))->getValue();
  4266. uint32_t ShiftAmt = 0, MaxShiftAmt = IsXReg ? 48 : 16;
  4267. while(Imm > 0xFFFF && countTrailingZeros(Imm) >= 16) {
  4268. ShiftAmt += 16;
  4269. Imm >>= 16;
  4270. }
  4271. if (ShiftAmt <= MaxShiftAmt && Imm <= 0xFFFF) {
  4272. Operands[0] = AArch64Operand::CreateToken("movz", Loc, Ctx);
  4273. Operands.push_back(AArch64Operand::CreateImm(
  4274. MCConstantExpr::create(Imm, Ctx), S, E, Ctx));
  4275. if (ShiftAmt)
  4276. Operands.push_back(AArch64Operand::CreateShiftExtend(AArch64_AM::LSL,
  4277. ShiftAmt, true, S, E, Ctx));
  4278. return false;
  4279. }
  4280. APInt Simm = APInt(64, Imm << ShiftAmt);
  4281. // check if the immediate is an unsigned or signed 32-bit int for W regs
  4282. if (!IsXReg && !(Simm.isIntN(32) || Simm.isSignedIntN(32)))
  4283. return Error(Loc, "Immediate too large for register");
  4284. }
  4285. // If it is a label or an imm that cannot fit in a movz, put it into CP.
  4286. const MCExpr *CPLoc =
  4287. getTargetStreamer().addConstantPoolEntry(SubExprVal, IsXReg ? 8 : 4, Loc);
  4288. Operands.push_back(AArch64Operand::CreateImm(CPLoc, S, E, Ctx));
  4289. return false;
  4290. }
  4291. }
  4292. }
  4293. bool AArch64AsmParser::parseImmExpr(int64_t &Out) {
  4294. const MCExpr *Expr = nullptr;
  4295. SMLoc L = getLoc();
  4296. if (check(getParser().parseExpression(Expr), L, "expected expression"))
  4297. return true;
  4298. const MCConstantExpr *Value = dyn_cast_or_null<MCConstantExpr>(Expr);
  4299. if (check(!Value, L, "expected constant expression"))
  4300. return true;
  4301. Out = Value->getValue();
  4302. return false;
  4303. }
  4304. bool AArch64AsmParser::parseComma() {
  4305. if (check(getTok().isNot(AsmToken::Comma), getLoc(), "expected comma"))
  4306. return true;
  4307. // Eat the comma
  4308. Lex();
  4309. return false;
  4310. }
  4311. bool AArch64AsmParser::parseRegisterInRange(unsigned &Out, unsigned Base,
  4312. unsigned First, unsigned Last) {
  4313. MCRegister Reg;
  4314. SMLoc Start, End;
  4315. if (check(parseRegister(Reg, Start, End), getLoc(), "expected register"))
  4316. return true;
  4317. // Special handling for FP and LR; they aren't linearly after x28 in
  4318. // the registers enum.
  4319. unsigned RangeEnd = Last;
  4320. if (Base == AArch64::X0) {
  4321. if (Last == AArch64::FP) {
  4322. RangeEnd = AArch64::X28;
  4323. if (Reg == AArch64::FP) {
  4324. Out = 29;
  4325. return false;
  4326. }
  4327. }
  4328. if (Last == AArch64::LR) {
  4329. RangeEnd = AArch64::X28;
  4330. if (Reg == AArch64::FP) {
  4331. Out = 29;
  4332. return false;
  4333. } else if (Reg == AArch64::LR) {
  4334. Out = 30;
  4335. return false;
  4336. }
  4337. }
  4338. }
  4339. if (check(Reg < First || Reg > RangeEnd, Start,
  4340. Twine("expected register in range ") +
  4341. AArch64InstPrinter::getRegisterName(First) + " to " +
  4342. AArch64InstPrinter::getRegisterName(Last)))
  4343. return true;
  4344. Out = Reg - Base;
  4345. return false;
  4346. }
  4347. bool AArch64AsmParser::areEqualRegs(const MCParsedAsmOperand &Op1,
  4348. const MCParsedAsmOperand &Op2) const {
  4349. auto &AOp1 = static_cast<const AArch64Operand&>(Op1);
  4350. auto &AOp2 = static_cast<const AArch64Operand&>(Op2);
  4351. if (AOp1.isVectorList() && AOp2.isVectorList())
  4352. return AOp1.getVectorListCount() == AOp2.getVectorListCount() &&
  4353. AOp1.getVectorListStart() == AOp2.getVectorListStart() &&
  4354. AOp1.getVectorListStride() == AOp2.getVectorListStride();
  4355. if (!AOp1.isReg() || !AOp2.isReg())
  4356. return false;
  4357. if (AOp1.getRegEqualityTy() == RegConstraintEqualityTy::EqualsReg &&
  4358. AOp2.getRegEqualityTy() == RegConstraintEqualityTy::EqualsReg)
  4359. return MCTargetAsmParser::areEqualRegs(Op1, Op2);
  4360. assert(AOp1.isScalarReg() && AOp2.isScalarReg() &&
  4361. "Testing equality of non-scalar registers not supported");
  4362. // Check if a registers match their sub/super register classes.
  4363. if (AOp1.getRegEqualityTy() == EqualsSuperReg)
  4364. return getXRegFromWReg(Op1.getReg()) == Op2.getReg();
  4365. if (AOp1.getRegEqualityTy() == EqualsSubReg)
  4366. return getWRegFromXReg(Op1.getReg()) == Op2.getReg();
  4367. if (AOp2.getRegEqualityTy() == EqualsSuperReg)
  4368. return getXRegFromWReg(Op2.getReg()) == Op1.getReg();
  4369. if (AOp2.getRegEqualityTy() == EqualsSubReg)
  4370. return getWRegFromXReg(Op2.getReg()) == Op1.getReg();
  4371. return false;
  4372. }
  4373. /// ParseInstruction - Parse an AArch64 instruction mnemonic followed by its
  4374. /// operands.
  4375. bool AArch64AsmParser::ParseInstruction(ParseInstructionInfo &Info,
  4376. StringRef Name, SMLoc NameLoc,
  4377. OperandVector &Operands) {
  4378. Name = StringSwitch<StringRef>(Name.lower())
  4379. .Case("beq", "b.eq")
  4380. .Case("bne", "b.ne")
  4381. .Case("bhs", "b.hs")
  4382. .Case("bcs", "b.cs")
  4383. .Case("blo", "b.lo")
  4384. .Case("bcc", "b.cc")
  4385. .Case("bmi", "b.mi")
  4386. .Case("bpl", "b.pl")
  4387. .Case("bvs", "b.vs")
  4388. .Case("bvc", "b.vc")
  4389. .Case("bhi", "b.hi")
  4390. .Case("bls", "b.ls")
  4391. .Case("bge", "b.ge")
  4392. .Case("blt", "b.lt")
  4393. .Case("bgt", "b.gt")
  4394. .Case("ble", "b.le")
  4395. .Case("bal", "b.al")
  4396. .Case("bnv", "b.nv")
  4397. .Default(Name);
  4398. // First check for the AArch64-specific .req directive.
  4399. if (getTok().is(AsmToken::Identifier) &&
  4400. getTok().getIdentifier().lower() == ".req") {
  4401. parseDirectiveReq(Name, NameLoc);
  4402. // We always return 'error' for this, as we're done with this
  4403. // statement and don't need to match the 'instruction."
  4404. return true;
  4405. }
  4406. // Create the leading tokens for the mnemonic, split by '.' characters.
  4407. size_t Start = 0, Next = Name.find('.');
  4408. StringRef Head = Name.slice(Start, Next);
  4409. // IC, DC, AT, TLBI and Prediction invalidation instructions are aliases for
  4410. // the SYS instruction.
  4411. if (Head == "ic" || Head == "dc" || Head == "at" || Head == "tlbi" ||
  4412. Head == "cfp" || Head == "dvp" || Head == "cpp" || Head == "cosp")
  4413. return parseSysAlias(Head, NameLoc, Operands);
  4414. // TLBIP instructions are aliases for the SYSP instruction.
  4415. if (Head == "tlbip")
  4416. return parseSyspAlias(Head, NameLoc, Operands);
  4417. Operands.push_back(AArch64Operand::CreateToken(Head, NameLoc, getContext()));
  4418. Mnemonic = Head;
  4419. // Handle condition codes for a branch mnemonic
  4420. if ((Head == "b" || Head == "bc") && Next != StringRef::npos) {
  4421. Start = Next;
  4422. Next = Name.find('.', Start + 1);
  4423. Head = Name.slice(Start + 1, Next);
  4424. SMLoc SuffixLoc = SMLoc::getFromPointer(NameLoc.getPointer() +
  4425. (Head.data() - Name.data()));
  4426. std::string Suggestion;
  4427. AArch64CC::CondCode CC = parseCondCodeString(Head, Suggestion);
  4428. if (CC == AArch64CC::Invalid) {
  4429. std::string Msg = "invalid condition code";
  4430. if (!Suggestion.empty())
  4431. Msg += ", did you mean " + Suggestion + "?";
  4432. return Error(SuffixLoc, Msg);
  4433. }
  4434. Operands.push_back(AArch64Operand::CreateToken(".", SuffixLoc, getContext(),
  4435. /*IsSuffix=*/true));
  4436. Operands.push_back(
  4437. AArch64Operand::CreateCondCode(CC, NameLoc, NameLoc, getContext()));
  4438. }
  4439. // Add the remaining tokens in the mnemonic.
  4440. while (Next != StringRef::npos) {
  4441. Start = Next;
  4442. Next = Name.find('.', Start + 1);
  4443. Head = Name.slice(Start, Next);
  4444. SMLoc SuffixLoc = SMLoc::getFromPointer(NameLoc.getPointer() +
  4445. (Head.data() - Name.data()) + 1);
  4446. Operands.push_back(AArch64Operand::CreateToken(
  4447. Head, SuffixLoc, getContext(), /*IsSuffix=*/true));
  4448. }
  4449. // Conditional compare instructions have a Condition Code operand, which needs
  4450. // to be parsed and an immediate operand created.
  4451. bool condCodeFourthOperand =
  4452. (Head == "ccmp" || Head == "ccmn" || Head == "fccmp" ||
  4453. Head == "fccmpe" || Head == "fcsel" || Head == "csel" ||
  4454. Head == "csinc" || Head == "csinv" || Head == "csneg");
  4455. // These instructions are aliases to some of the conditional select
  4456. // instructions. However, the condition code is inverted in the aliased
  4457. // instruction.
  4458. //
  4459. // FIXME: Is this the correct way to handle these? Or should the parser
  4460. // generate the aliased instructions directly?
  4461. bool condCodeSecondOperand = (Head == "cset" || Head == "csetm");
  4462. bool condCodeThirdOperand =
  4463. (Head == "cinc" || Head == "cinv" || Head == "cneg");
  4464. // Read the remaining operands.
  4465. if (getLexer().isNot(AsmToken::EndOfStatement)) {
  4466. unsigned N = 1;
  4467. do {
  4468. // Parse and remember the operand.
  4469. if (parseOperand(Operands, (N == 4 && condCodeFourthOperand) ||
  4470. (N == 3 && condCodeThirdOperand) ||
  4471. (N == 2 && condCodeSecondOperand),
  4472. condCodeSecondOperand || condCodeThirdOperand)) {
  4473. return true;
  4474. }
  4475. // After successfully parsing some operands there are three special cases
  4476. // to consider (i.e. notional operands not separated by commas). Two are
  4477. // due to memory specifiers:
  4478. // + An RBrac will end an address for load/store/prefetch
  4479. // + An '!' will indicate a pre-indexed operation.
  4480. //
  4481. // And a further case is '}', which ends a group of tokens specifying the
  4482. // SME accumulator array 'ZA' or tile vector, i.e.
  4483. //
  4484. // '{ ZA }' or '{ <ZAt><HV>.<BHSDQ>[<Wv>, #<imm>] }'
  4485. //
  4486. // It's someone else's responsibility to make sure these tokens are sane
  4487. // in the given context!
  4488. if (parseOptionalToken(AsmToken::RBrac))
  4489. Operands.push_back(
  4490. AArch64Operand::CreateToken("]", getLoc(), getContext()));
  4491. if (parseOptionalToken(AsmToken::Exclaim))
  4492. Operands.push_back(
  4493. AArch64Operand::CreateToken("!", getLoc(), getContext()));
  4494. if (parseOptionalToken(AsmToken::RCurly))
  4495. Operands.push_back(
  4496. AArch64Operand::CreateToken("}", getLoc(), getContext()));
  4497. ++N;
  4498. } while (parseOptionalToken(AsmToken::Comma));
  4499. }
  4500. if (parseToken(AsmToken::EndOfStatement, "unexpected token in argument list"))
  4501. return true;
  4502. return false;
  4503. }
  4504. static inline bool isMatchingOrAlias(unsigned ZReg, unsigned Reg) {
  4505. assert((ZReg >= AArch64::Z0) && (ZReg <= AArch64::Z31));
  4506. return (ZReg == ((Reg - AArch64::B0) + AArch64::Z0)) ||
  4507. (ZReg == ((Reg - AArch64::H0) + AArch64::Z0)) ||
  4508. (ZReg == ((Reg - AArch64::S0) + AArch64::Z0)) ||
  4509. (ZReg == ((Reg - AArch64::D0) + AArch64::Z0)) ||
  4510. (ZReg == ((Reg - AArch64::Q0) + AArch64::Z0)) ||
  4511. (ZReg == ((Reg - AArch64::Z0) + AArch64::Z0));
  4512. }
  4513. // FIXME: This entire function is a giant hack to provide us with decent
  4514. // operand range validation/diagnostics until TableGen/MC can be extended
  4515. // to support autogeneration of this kind of validation.
  4516. bool AArch64AsmParser::validateInstruction(MCInst &Inst, SMLoc &IDLoc,
  4517. SmallVectorImpl<SMLoc> &Loc) {
  4518. const MCRegisterInfo *RI = getContext().getRegisterInfo();
  4519. const MCInstrDesc &MCID = MII.get(Inst.getOpcode());
  4520. // A prefix only applies to the instruction following it. Here we extract
  4521. // prefix information for the next instruction before validating the current
  4522. // one so that in the case of failure we don't erronously continue using the
  4523. // current prefix.
  4524. PrefixInfo Prefix = NextPrefix;
  4525. NextPrefix = PrefixInfo::CreateFromInst(Inst, MCID.TSFlags);
  4526. // Before validating the instruction in isolation we run through the rules
  4527. // applicable when it follows a prefix instruction.
  4528. // NOTE: brk & hlt can be prefixed but require no additional validation.
  4529. if (Prefix.isActive() &&
  4530. (Inst.getOpcode() != AArch64::BRK) &&
  4531. (Inst.getOpcode() != AArch64::HLT)) {
  4532. // Prefixed intructions must have a destructive operand.
  4533. if ((MCID.TSFlags & AArch64::DestructiveInstTypeMask) ==
  4534. AArch64::NotDestructive)
  4535. return Error(IDLoc, "instruction is unpredictable when following a"
  4536. " movprfx, suggest replacing movprfx with mov");
  4537. // Destination operands must match.
  4538. if (Inst.getOperand(0).getReg() != Prefix.getDstReg())
  4539. return Error(Loc[0], "instruction is unpredictable when following a"
  4540. " movprfx writing to a different destination");
  4541. // Destination operand must not be used in any other location.
  4542. for (unsigned i = 1; i < Inst.getNumOperands(); ++i) {
  4543. if (Inst.getOperand(i).isReg() &&
  4544. (MCID.getOperandConstraint(i, MCOI::TIED_TO) == -1) &&
  4545. isMatchingOrAlias(Prefix.getDstReg(), Inst.getOperand(i).getReg()))
  4546. return Error(Loc[0], "instruction is unpredictable when following a"
  4547. " movprfx and destination also used as non-destructive"
  4548. " source");
  4549. }
  4550. auto PPRRegClass = AArch64MCRegisterClasses[AArch64::PPRRegClassID];
  4551. if (Prefix.isPredicated()) {
  4552. int PgIdx = -1;
  4553. // Find the instructions general predicate.
  4554. for (unsigned i = 1; i < Inst.getNumOperands(); ++i)
  4555. if (Inst.getOperand(i).isReg() &&
  4556. PPRRegClass.contains(Inst.getOperand(i).getReg())) {
  4557. PgIdx = i;
  4558. break;
  4559. }
  4560. // Instruction must be predicated if the movprfx is predicated.
  4561. if (PgIdx == -1 ||
  4562. (MCID.TSFlags & AArch64::ElementSizeMask) == AArch64::ElementSizeNone)
  4563. return Error(IDLoc, "instruction is unpredictable when following a"
  4564. " predicated movprfx, suggest using unpredicated movprfx");
  4565. // Instruction must use same general predicate as the movprfx.
  4566. if (Inst.getOperand(PgIdx).getReg() != Prefix.getPgReg())
  4567. return Error(IDLoc, "instruction is unpredictable when following a"
  4568. " predicated movprfx using a different general predicate");
  4569. // Instruction element type must match the movprfx.
  4570. if ((MCID.TSFlags & AArch64::ElementSizeMask) != Prefix.getElementSize())
  4571. return Error(IDLoc, "instruction is unpredictable when following a"
  4572. " predicated movprfx with a different element size");
  4573. }
  4574. }
  4575. // Check for indexed addressing modes w/ the base register being the
  4576. // same as a destination/source register or pair load where
  4577. // the Rt == Rt2. All of those are undefined behaviour.
  4578. switch (Inst.getOpcode()) {
  4579. case AArch64::LDPSWpre:
  4580. case AArch64::LDPWpost:
  4581. case AArch64::LDPWpre:
  4582. case AArch64::LDPXpost:
  4583. case AArch64::LDPXpre: {
  4584. unsigned Rt = Inst.getOperand(1).getReg();
  4585. unsigned Rt2 = Inst.getOperand(2).getReg();
  4586. unsigned Rn = Inst.getOperand(3).getReg();
  4587. if (RI->isSubRegisterEq(Rn, Rt))
  4588. return Error(Loc[0], "unpredictable LDP instruction, writeback base "
  4589. "is also a destination");
  4590. if (RI->isSubRegisterEq(Rn, Rt2))
  4591. return Error(Loc[1], "unpredictable LDP instruction, writeback base "
  4592. "is also a destination");
  4593. [[fallthrough]];
  4594. }
  4595. case AArch64::LDPDi:
  4596. case AArch64::LDPQi:
  4597. case AArch64::LDPSi:
  4598. case AArch64::LDPSWi:
  4599. case AArch64::LDPWi:
  4600. case AArch64::LDPXi: {
  4601. unsigned Rt = Inst.getOperand(0).getReg();
  4602. unsigned Rt2 = Inst.getOperand(1).getReg();
  4603. if (Rt == Rt2)
  4604. return Error(Loc[1], "unpredictable LDP instruction, Rt2==Rt");
  4605. break;
  4606. }
  4607. case AArch64::LDPDpost:
  4608. case AArch64::LDPDpre:
  4609. case AArch64::LDPQpost:
  4610. case AArch64::LDPQpre:
  4611. case AArch64::LDPSpost:
  4612. case AArch64::LDPSpre:
  4613. case AArch64::LDPSWpost: {
  4614. unsigned Rt = Inst.getOperand(1).getReg();
  4615. unsigned Rt2 = Inst.getOperand(2).getReg();
  4616. if (Rt == Rt2)
  4617. return Error(Loc[1], "unpredictable LDP instruction, Rt2==Rt");
  4618. break;
  4619. }
  4620. case AArch64::STPDpost:
  4621. case AArch64::STPDpre:
  4622. case AArch64::STPQpost:
  4623. case AArch64::STPQpre:
  4624. case AArch64::STPSpost:
  4625. case AArch64::STPSpre:
  4626. case AArch64::STPWpost:
  4627. case AArch64::STPWpre:
  4628. case AArch64::STPXpost:
  4629. case AArch64::STPXpre: {
  4630. unsigned Rt = Inst.getOperand(1).getReg();
  4631. unsigned Rt2 = Inst.getOperand(2).getReg();
  4632. unsigned Rn = Inst.getOperand(3).getReg();
  4633. if (RI->isSubRegisterEq(Rn, Rt))
  4634. return Error(Loc[0], "unpredictable STP instruction, writeback base "
  4635. "is also a source");
  4636. if (RI->isSubRegisterEq(Rn, Rt2))
  4637. return Error(Loc[1], "unpredictable STP instruction, writeback base "
  4638. "is also a source");
  4639. break;
  4640. }
  4641. case AArch64::LDRBBpre:
  4642. case AArch64::LDRBpre:
  4643. case AArch64::LDRHHpre:
  4644. case AArch64::LDRHpre:
  4645. case AArch64::LDRSBWpre:
  4646. case AArch64::LDRSBXpre:
  4647. case AArch64::LDRSHWpre:
  4648. case AArch64::LDRSHXpre:
  4649. case AArch64::LDRSWpre:
  4650. case AArch64::LDRWpre:
  4651. case AArch64::LDRXpre:
  4652. case AArch64::LDRBBpost:
  4653. case AArch64::LDRBpost:
  4654. case AArch64::LDRHHpost:
  4655. case AArch64::LDRHpost:
  4656. case AArch64::LDRSBWpost:
  4657. case AArch64::LDRSBXpost:
  4658. case AArch64::LDRSHWpost:
  4659. case AArch64::LDRSHXpost:
  4660. case AArch64::LDRSWpost:
  4661. case AArch64::LDRWpost:
  4662. case AArch64::LDRXpost: {
  4663. unsigned Rt = Inst.getOperand(1).getReg();
  4664. unsigned Rn = Inst.getOperand(2).getReg();
  4665. if (RI->isSubRegisterEq(Rn, Rt))
  4666. return Error(Loc[0], "unpredictable LDR instruction, writeback base "
  4667. "is also a source");
  4668. break;
  4669. }
  4670. case AArch64::STRBBpost:
  4671. case AArch64::STRBpost:
  4672. case AArch64::STRHHpost:
  4673. case AArch64::STRHpost:
  4674. case AArch64::STRWpost:
  4675. case AArch64::STRXpost:
  4676. case AArch64::STRBBpre:
  4677. case AArch64::STRBpre:
  4678. case AArch64::STRHHpre:
  4679. case AArch64::STRHpre:
  4680. case AArch64::STRWpre:
  4681. case AArch64::STRXpre: {
  4682. unsigned Rt = Inst.getOperand(1).getReg();
  4683. unsigned Rn = Inst.getOperand(2).getReg();
  4684. if (RI->isSubRegisterEq(Rn, Rt))
  4685. return Error(Loc[0], "unpredictable STR instruction, writeback base "
  4686. "is also a source");
  4687. break;
  4688. }
  4689. case AArch64::STXRB:
  4690. case AArch64::STXRH:
  4691. case AArch64::STXRW:
  4692. case AArch64::STXRX:
  4693. case AArch64::STLXRB:
  4694. case AArch64::STLXRH:
  4695. case AArch64::STLXRW:
  4696. case AArch64::STLXRX: {
  4697. unsigned Rs = Inst.getOperand(0).getReg();
  4698. unsigned Rt = Inst.getOperand(1).getReg();
  4699. unsigned Rn = Inst.getOperand(2).getReg();
  4700. if (RI->isSubRegisterEq(Rt, Rs) ||
  4701. (RI->isSubRegisterEq(Rn, Rs) && Rn != AArch64::SP))
  4702. return Error(Loc[0],
  4703. "unpredictable STXR instruction, status is also a source");
  4704. break;
  4705. }
  4706. case AArch64::STXPW:
  4707. case AArch64::STXPX:
  4708. case AArch64::STLXPW:
  4709. case AArch64::STLXPX: {
  4710. unsigned Rs = Inst.getOperand(0).getReg();
  4711. unsigned Rt1 = Inst.getOperand(1).getReg();
  4712. unsigned Rt2 = Inst.getOperand(2).getReg();
  4713. unsigned Rn = Inst.getOperand(3).getReg();
  4714. if (RI->isSubRegisterEq(Rt1, Rs) || RI->isSubRegisterEq(Rt2, Rs) ||
  4715. (RI->isSubRegisterEq(Rn, Rs) && Rn != AArch64::SP))
  4716. return Error(Loc[0],
  4717. "unpredictable STXP instruction, status is also a source");
  4718. break;
  4719. }
  4720. case AArch64::LDRABwriteback:
  4721. case AArch64::LDRAAwriteback: {
  4722. unsigned Xt = Inst.getOperand(0).getReg();
  4723. unsigned Xn = Inst.getOperand(1).getReg();
  4724. if (Xt == Xn)
  4725. return Error(Loc[0],
  4726. "unpredictable LDRA instruction, writeback base"
  4727. " is also a destination");
  4728. break;
  4729. }
  4730. }
  4731. // Check v8.8-A memops instructions.
  4732. switch (Inst.getOpcode()) {
  4733. case AArch64::CPYFP:
  4734. case AArch64::CPYFPWN:
  4735. case AArch64::CPYFPRN:
  4736. case AArch64::CPYFPN:
  4737. case AArch64::CPYFPWT:
  4738. case AArch64::CPYFPWTWN:
  4739. case AArch64::CPYFPWTRN:
  4740. case AArch64::CPYFPWTN:
  4741. case AArch64::CPYFPRT:
  4742. case AArch64::CPYFPRTWN:
  4743. case AArch64::CPYFPRTRN:
  4744. case AArch64::CPYFPRTN:
  4745. case AArch64::CPYFPT:
  4746. case AArch64::CPYFPTWN:
  4747. case AArch64::CPYFPTRN:
  4748. case AArch64::CPYFPTN:
  4749. case AArch64::CPYFM:
  4750. case AArch64::CPYFMWN:
  4751. case AArch64::CPYFMRN:
  4752. case AArch64::CPYFMN:
  4753. case AArch64::CPYFMWT:
  4754. case AArch64::CPYFMWTWN:
  4755. case AArch64::CPYFMWTRN:
  4756. case AArch64::CPYFMWTN:
  4757. case AArch64::CPYFMRT:
  4758. case AArch64::CPYFMRTWN:
  4759. case AArch64::CPYFMRTRN:
  4760. case AArch64::CPYFMRTN:
  4761. case AArch64::CPYFMT:
  4762. case AArch64::CPYFMTWN:
  4763. case AArch64::CPYFMTRN:
  4764. case AArch64::CPYFMTN:
  4765. case AArch64::CPYFE:
  4766. case AArch64::CPYFEWN:
  4767. case AArch64::CPYFERN:
  4768. case AArch64::CPYFEN:
  4769. case AArch64::CPYFEWT:
  4770. case AArch64::CPYFEWTWN:
  4771. case AArch64::CPYFEWTRN:
  4772. case AArch64::CPYFEWTN:
  4773. case AArch64::CPYFERT:
  4774. case AArch64::CPYFERTWN:
  4775. case AArch64::CPYFERTRN:
  4776. case AArch64::CPYFERTN:
  4777. case AArch64::CPYFET:
  4778. case AArch64::CPYFETWN:
  4779. case AArch64::CPYFETRN:
  4780. case AArch64::CPYFETN:
  4781. case AArch64::CPYP:
  4782. case AArch64::CPYPWN:
  4783. case AArch64::CPYPRN:
  4784. case AArch64::CPYPN:
  4785. case AArch64::CPYPWT:
  4786. case AArch64::CPYPWTWN:
  4787. case AArch64::CPYPWTRN:
  4788. case AArch64::CPYPWTN:
  4789. case AArch64::CPYPRT:
  4790. case AArch64::CPYPRTWN:
  4791. case AArch64::CPYPRTRN:
  4792. case AArch64::CPYPRTN:
  4793. case AArch64::CPYPT:
  4794. case AArch64::CPYPTWN:
  4795. case AArch64::CPYPTRN:
  4796. case AArch64::CPYPTN:
  4797. case AArch64::CPYM:
  4798. case AArch64::CPYMWN:
  4799. case AArch64::CPYMRN:
  4800. case AArch64::CPYMN:
  4801. case AArch64::CPYMWT:
  4802. case AArch64::CPYMWTWN:
  4803. case AArch64::CPYMWTRN:
  4804. case AArch64::CPYMWTN:
  4805. case AArch64::CPYMRT:
  4806. case AArch64::CPYMRTWN:
  4807. case AArch64::CPYMRTRN:
  4808. case AArch64::CPYMRTN:
  4809. case AArch64::CPYMT:
  4810. case AArch64::CPYMTWN:
  4811. case AArch64::CPYMTRN:
  4812. case AArch64::CPYMTN:
  4813. case AArch64::CPYE:
  4814. case AArch64::CPYEWN:
  4815. case AArch64::CPYERN:
  4816. case AArch64::CPYEN:
  4817. case AArch64::CPYEWT:
  4818. case AArch64::CPYEWTWN:
  4819. case AArch64::CPYEWTRN:
  4820. case AArch64::CPYEWTN:
  4821. case AArch64::CPYERT:
  4822. case AArch64::CPYERTWN:
  4823. case AArch64::CPYERTRN:
  4824. case AArch64::CPYERTN:
  4825. case AArch64::CPYET:
  4826. case AArch64::CPYETWN:
  4827. case AArch64::CPYETRN:
  4828. case AArch64::CPYETN: {
  4829. unsigned Xd_wb = Inst.getOperand(0).getReg();
  4830. unsigned Xs_wb = Inst.getOperand(1).getReg();
  4831. unsigned Xn_wb = Inst.getOperand(2).getReg();
  4832. unsigned Xd = Inst.getOperand(3).getReg();
  4833. unsigned Xs = Inst.getOperand(4).getReg();
  4834. unsigned Xn = Inst.getOperand(5).getReg();
  4835. if (Xd_wb != Xd)
  4836. return Error(Loc[0],
  4837. "invalid CPY instruction, Xd_wb and Xd do not match");
  4838. if (Xs_wb != Xs)
  4839. return Error(Loc[0],
  4840. "invalid CPY instruction, Xs_wb and Xs do not match");
  4841. if (Xn_wb != Xn)
  4842. return Error(Loc[0],
  4843. "invalid CPY instruction, Xn_wb and Xn do not match");
  4844. if (Xd == Xs)
  4845. return Error(Loc[0], "invalid CPY instruction, destination and source"
  4846. " registers are the same");
  4847. if (Xd == Xn)
  4848. return Error(Loc[0], "invalid CPY instruction, destination and size"
  4849. " registers are the same");
  4850. if (Xs == Xn)
  4851. return Error(Loc[0], "invalid CPY instruction, source and size"
  4852. " registers are the same");
  4853. break;
  4854. }
  4855. case AArch64::SETP:
  4856. case AArch64::SETPT:
  4857. case AArch64::SETPN:
  4858. case AArch64::SETPTN:
  4859. case AArch64::SETM:
  4860. case AArch64::SETMT:
  4861. case AArch64::SETMN:
  4862. case AArch64::SETMTN:
  4863. case AArch64::SETE:
  4864. case AArch64::SETET:
  4865. case AArch64::SETEN:
  4866. case AArch64::SETETN:
  4867. case AArch64::SETGP:
  4868. case AArch64::SETGPT:
  4869. case AArch64::SETGPN:
  4870. case AArch64::SETGPTN:
  4871. case AArch64::SETGM:
  4872. case AArch64::SETGMT:
  4873. case AArch64::SETGMN:
  4874. case AArch64::SETGMTN:
  4875. case AArch64::MOPSSETGE:
  4876. case AArch64::MOPSSETGET:
  4877. case AArch64::MOPSSETGEN:
  4878. case AArch64::MOPSSETGETN: {
  4879. unsigned Xd_wb = Inst.getOperand(0).getReg();
  4880. unsigned Xn_wb = Inst.getOperand(1).getReg();
  4881. unsigned Xd = Inst.getOperand(2).getReg();
  4882. unsigned Xn = Inst.getOperand(3).getReg();
  4883. unsigned Xm = Inst.getOperand(4).getReg();
  4884. if (Xd_wb != Xd)
  4885. return Error(Loc[0],
  4886. "invalid SET instruction, Xd_wb and Xd do not match");
  4887. if (Xn_wb != Xn)
  4888. return Error(Loc[0],
  4889. "invalid SET instruction, Xn_wb and Xn do not match");
  4890. if (Xd == Xn)
  4891. return Error(Loc[0], "invalid SET instruction, destination and size"
  4892. " registers are the same");
  4893. if (Xd == Xm)
  4894. return Error(Loc[0], "invalid SET instruction, destination and source"
  4895. " registers are the same");
  4896. if (Xn == Xm)
  4897. return Error(Loc[0], "invalid SET instruction, source and size"
  4898. " registers are the same");
  4899. break;
  4900. }
  4901. }
  4902. // Now check immediate ranges. Separate from the above as there is overlap
  4903. // in the instructions being checked and this keeps the nested conditionals
  4904. // to a minimum.
  4905. switch (Inst.getOpcode()) {
  4906. case AArch64::ADDSWri:
  4907. case AArch64::ADDSXri:
  4908. case AArch64::ADDWri:
  4909. case AArch64::ADDXri:
  4910. case AArch64::SUBSWri:
  4911. case AArch64::SUBSXri:
  4912. case AArch64::SUBWri:
  4913. case AArch64::SUBXri: {
  4914. // Annoyingly we can't do this in the isAddSubImm predicate, so there is
  4915. // some slight duplication here.
  4916. if (Inst.getOperand(2).isExpr()) {
  4917. const MCExpr *Expr = Inst.getOperand(2).getExpr();
  4918. AArch64MCExpr::VariantKind ELFRefKind;
  4919. MCSymbolRefExpr::VariantKind DarwinRefKind;
  4920. int64_t Addend;
  4921. if (classifySymbolRef(Expr, ELFRefKind, DarwinRefKind, Addend)) {
  4922. // Only allow these with ADDXri.
  4923. if ((DarwinRefKind == MCSymbolRefExpr::VK_PAGEOFF ||
  4924. DarwinRefKind == MCSymbolRefExpr::VK_TLVPPAGEOFF) &&
  4925. Inst.getOpcode() == AArch64::ADDXri)
  4926. return false;
  4927. // Only allow these with ADDXri/ADDWri
  4928. if ((ELFRefKind == AArch64MCExpr::VK_LO12 ||
  4929. ELFRefKind == AArch64MCExpr::VK_DTPREL_HI12 ||
  4930. ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12 ||
  4931. ELFRefKind == AArch64MCExpr::VK_DTPREL_LO12_NC ||
  4932. ELFRefKind == AArch64MCExpr::VK_TPREL_HI12 ||
  4933. ELFRefKind == AArch64MCExpr::VK_TPREL_LO12 ||
  4934. ELFRefKind == AArch64MCExpr::VK_TPREL_LO12_NC ||
  4935. ELFRefKind == AArch64MCExpr::VK_TLSDESC_LO12 ||
  4936. ELFRefKind == AArch64MCExpr::VK_SECREL_LO12 ||
  4937. ELFRefKind == AArch64MCExpr::VK_SECREL_HI12) &&
  4938. (Inst.getOpcode() == AArch64::ADDXri ||
  4939. Inst.getOpcode() == AArch64::ADDWri))
  4940. return false;
  4941. // Don't allow symbol refs in the immediate field otherwise
  4942. // Note: Loc.back() may be Loc[1] or Loc[2] depending on the number of
  4943. // operands of the original instruction (i.e. 'add w0, w1, borked' vs
  4944. // 'cmp w0, 'borked')
  4945. return Error(Loc.back(), "invalid immediate expression");
  4946. }
  4947. // We don't validate more complex expressions here
  4948. }
  4949. return false;
  4950. }
  4951. default:
  4952. return false;
  4953. }
  4954. }
  4955. static std::string AArch64MnemonicSpellCheck(StringRef S,
  4956. const FeatureBitset &FBS,
  4957. unsigned VariantID = 0);
  4958. bool AArch64AsmParser::showMatchError(SMLoc Loc, unsigned ErrCode,
  4959. uint64_t ErrorInfo,
  4960. OperandVector &Operands) {
  4961. switch (ErrCode) {
  4962. case Match_InvalidTiedOperand: {
  4963. auto &Op = static_cast<const AArch64Operand &>(*Operands[ErrorInfo]);
  4964. if (Op.isVectorList())
  4965. return Error(Loc, "operand must match destination register list");
  4966. assert(Op.isReg() && "Unexpected operand type");
  4967. switch (Op.getRegEqualityTy()) {
  4968. case RegConstraintEqualityTy::EqualsSubReg:
  4969. return Error(Loc, "operand must be 64-bit form of destination register");
  4970. case RegConstraintEqualityTy::EqualsSuperReg:
  4971. return Error(Loc, "operand must be 32-bit form of destination register");
  4972. case RegConstraintEqualityTy::EqualsReg:
  4973. return Error(Loc, "operand must match destination register");
  4974. }
  4975. llvm_unreachable("Unknown RegConstraintEqualityTy");
  4976. }
  4977. case Match_MissingFeature:
  4978. return Error(Loc,
  4979. "instruction requires a CPU feature not currently enabled");
  4980. case Match_InvalidOperand:
  4981. return Error(Loc, "invalid operand for instruction");
  4982. case Match_InvalidSuffix:
  4983. return Error(Loc, "invalid type suffix for instruction");
  4984. case Match_InvalidCondCode:
  4985. return Error(Loc, "expected AArch64 condition code");
  4986. case Match_AddSubRegExtendSmall:
  4987. return Error(Loc,
  4988. "expected '[su]xt[bhw]' with optional integer in range [0, 4]");
  4989. case Match_AddSubRegExtendLarge:
  4990. return Error(Loc,
  4991. "expected 'sxtx' 'uxtx' or 'lsl' with optional integer in range [0, 4]");
  4992. case Match_AddSubSecondSource:
  4993. return Error(Loc,
  4994. "expected compatible register, symbol or integer in range [0, 4095]");
  4995. case Match_LogicalSecondSource:
  4996. return Error(Loc, "expected compatible register or logical immediate");
  4997. case Match_InvalidMovImm32Shift:
  4998. return Error(Loc, "expected 'lsl' with optional integer 0 or 16");
  4999. case Match_InvalidMovImm64Shift:
  5000. return Error(Loc, "expected 'lsl' with optional integer 0, 16, 32 or 48");
  5001. case Match_AddSubRegShift32:
  5002. return Error(Loc,
  5003. "expected 'lsl', 'lsr' or 'asr' with optional integer in range [0, 31]");
  5004. case Match_AddSubRegShift64:
  5005. return Error(Loc,
  5006. "expected 'lsl', 'lsr' or 'asr' with optional integer in range [0, 63]");
  5007. case Match_InvalidFPImm:
  5008. return Error(Loc,
  5009. "expected compatible register or floating-point constant");
  5010. case Match_InvalidMemoryIndexedSImm6:
  5011. return Error(Loc, "index must be an integer in range [-32, 31].");
  5012. case Match_InvalidMemoryIndexedSImm5:
  5013. return Error(Loc, "index must be an integer in range [-16, 15].");
  5014. case Match_InvalidMemoryIndexed1SImm4:
  5015. return Error(Loc, "index must be an integer in range [-8, 7].");
  5016. case Match_InvalidMemoryIndexed2SImm4:
  5017. return Error(Loc, "index must be a multiple of 2 in range [-16, 14].");
  5018. case Match_InvalidMemoryIndexed3SImm4:
  5019. return Error(Loc, "index must be a multiple of 3 in range [-24, 21].");
  5020. case Match_InvalidMemoryIndexed4SImm4:
  5021. return Error(Loc, "index must be a multiple of 4 in range [-32, 28].");
  5022. case Match_InvalidMemoryIndexed16SImm4:
  5023. return Error(Loc, "index must be a multiple of 16 in range [-128, 112].");
  5024. case Match_InvalidMemoryIndexed32SImm4:
  5025. return Error(Loc, "index must be a multiple of 32 in range [-256, 224].");
  5026. case Match_InvalidMemoryIndexed1SImm6:
  5027. return Error(Loc, "index must be an integer in range [-32, 31].");
  5028. case Match_InvalidMemoryIndexedSImm8:
  5029. return Error(Loc, "index must be an integer in range [-128, 127].");
  5030. case Match_InvalidMemoryIndexedSImm9:
  5031. return Error(Loc, "index must be an integer in range [-256, 255].");
  5032. case Match_InvalidMemoryIndexed16SImm9:
  5033. return Error(Loc, "index must be a multiple of 16 in range [-4096, 4080].");
  5034. case Match_InvalidMemoryIndexed8SImm10:
  5035. return Error(Loc, "index must be a multiple of 8 in range [-4096, 4088].");
  5036. case Match_InvalidMemoryIndexed4SImm7:
  5037. return Error(Loc, "index must be a multiple of 4 in range [-256, 252].");
  5038. case Match_InvalidMemoryIndexed8SImm7:
  5039. return Error(Loc, "index must be a multiple of 8 in range [-512, 504].");
  5040. case Match_InvalidMemoryIndexed16SImm7:
  5041. return Error(Loc, "index must be a multiple of 16 in range [-1024, 1008].");
  5042. case Match_InvalidMemoryIndexed8UImm5:
  5043. return Error(Loc, "index must be a multiple of 8 in range [0, 248].");
  5044. case Match_InvalidMemoryIndexed8UImm3:
  5045. return Error(Loc, "index must be a multiple of 8 in range [0, 56].");
  5046. case Match_InvalidMemoryIndexed4UImm5:
  5047. return Error(Loc, "index must be a multiple of 4 in range [0, 124].");
  5048. case Match_InvalidMemoryIndexed2UImm5:
  5049. return Error(Loc, "index must be a multiple of 2 in range [0, 62].");
  5050. case Match_InvalidMemoryIndexed8UImm6:
  5051. return Error(Loc, "index must be a multiple of 8 in range [0, 504].");
  5052. case Match_InvalidMemoryIndexed16UImm6:
  5053. return Error(Loc, "index must be a multiple of 16 in range [0, 1008].");
  5054. case Match_InvalidMemoryIndexed4UImm6:
  5055. return Error(Loc, "index must be a multiple of 4 in range [0, 252].");
  5056. case Match_InvalidMemoryIndexed2UImm6:
  5057. return Error(Loc, "index must be a multiple of 2 in range [0, 126].");
  5058. case Match_InvalidMemoryIndexed1UImm6:
  5059. return Error(Loc, "index must be in range [0, 63].");
  5060. case Match_InvalidMemoryWExtend8:
  5061. return Error(Loc,
  5062. "expected 'uxtw' or 'sxtw' with optional shift of #0");
  5063. case Match_InvalidMemoryWExtend16:
  5064. return Error(Loc,
  5065. "expected 'uxtw' or 'sxtw' with optional shift of #0 or #1");
  5066. case Match_InvalidMemoryWExtend32:
  5067. return Error(Loc,
  5068. "expected 'uxtw' or 'sxtw' with optional shift of #0 or #2");
  5069. case Match_InvalidMemoryWExtend64:
  5070. return Error(Loc,
  5071. "expected 'uxtw' or 'sxtw' with optional shift of #0 or #3");
  5072. case Match_InvalidMemoryWExtend128:
  5073. return Error(Loc,
  5074. "expected 'uxtw' or 'sxtw' with optional shift of #0 or #4");
  5075. case Match_InvalidMemoryXExtend8:
  5076. return Error(Loc,
  5077. "expected 'lsl' or 'sxtx' with optional shift of #0");
  5078. case Match_InvalidMemoryXExtend16:
  5079. return Error(Loc,
  5080. "expected 'lsl' or 'sxtx' with optional shift of #0 or #1");
  5081. case Match_InvalidMemoryXExtend32:
  5082. return Error(Loc,
  5083. "expected 'lsl' or 'sxtx' with optional shift of #0 or #2");
  5084. case Match_InvalidMemoryXExtend64:
  5085. return Error(Loc,
  5086. "expected 'lsl' or 'sxtx' with optional shift of #0 or #3");
  5087. case Match_InvalidMemoryXExtend128:
  5088. return Error(Loc,
  5089. "expected 'lsl' or 'sxtx' with optional shift of #0 or #4");
  5090. case Match_InvalidMemoryIndexed1:
  5091. return Error(Loc, "index must be an integer in range [0, 4095].");
  5092. case Match_InvalidMemoryIndexed2:
  5093. return Error(Loc, "index must be a multiple of 2 in range [0, 8190].");
  5094. case Match_InvalidMemoryIndexed4:
  5095. return Error(Loc, "index must be a multiple of 4 in range [0, 16380].");
  5096. case Match_InvalidMemoryIndexed8:
  5097. return Error(Loc, "index must be a multiple of 8 in range [0, 32760].");
  5098. case Match_InvalidMemoryIndexed16:
  5099. return Error(Loc, "index must be a multiple of 16 in range [0, 65520].");
  5100. case Match_InvalidImm0_0:
  5101. return Error(Loc, "immediate must be 0.");
  5102. case Match_InvalidImm0_1:
  5103. return Error(Loc, "immediate must be an integer in range [0, 1].");
  5104. case Match_InvalidImm0_3:
  5105. return Error(Loc, "immediate must be an integer in range [0, 3].");
  5106. case Match_InvalidImm0_7:
  5107. return Error(Loc, "immediate must be an integer in range [0, 7].");
  5108. case Match_InvalidImm0_15:
  5109. return Error(Loc, "immediate must be an integer in range [0, 15].");
  5110. case Match_InvalidImm0_31:
  5111. return Error(Loc, "immediate must be an integer in range [0, 31].");
  5112. case Match_InvalidImm0_63:
  5113. return Error(Loc, "immediate must be an integer in range [0, 63].");
  5114. case Match_InvalidImm0_127:
  5115. return Error(Loc, "immediate must be an integer in range [0, 127].");
  5116. case Match_InvalidImm0_255:
  5117. return Error(Loc, "immediate must be an integer in range [0, 255].");
  5118. case Match_InvalidImm0_65535:
  5119. return Error(Loc, "immediate must be an integer in range [0, 65535].");
  5120. case Match_InvalidImm1_8:
  5121. return Error(Loc, "immediate must be an integer in range [1, 8].");
  5122. case Match_InvalidImm1_16:
  5123. return Error(Loc, "immediate must be an integer in range [1, 16].");
  5124. case Match_InvalidImm1_32:
  5125. return Error(Loc, "immediate must be an integer in range [1, 32].");
  5126. case Match_InvalidImm1_64:
  5127. return Error(Loc, "immediate must be an integer in range [1, 64].");
  5128. case Match_InvalidMemoryIndexedRange2UImm0:
  5129. return Error(Loc, "vector select offset must be the immediate range 0:1.");
  5130. case Match_InvalidMemoryIndexedRange2UImm1:
  5131. return Error(Loc, "vector select offset must be an immediate range of the "
  5132. "form <immf>:<imml>, where the first "
  5133. "immediate is a multiple of 2 in the range [0, 2], and "
  5134. "the second immediate is immf + 1.");
  5135. case Match_InvalidMemoryIndexedRange2UImm2:
  5136. case Match_InvalidMemoryIndexedRange2UImm3:
  5137. return Error(
  5138. Loc,
  5139. "vector select offset must be an immediate range of the form "
  5140. "<immf>:<imml>, "
  5141. "where the first immediate is a multiple of 2 in the range [0, 6] or "
  5142. "[0, 14] "
  5143. "depending on the instruction, and the second immediate is immf + 1.");
  5144. case Match_InvalidMemoryIndexedRange4UImm0:
  5145. return Error(Loc, "vector select offset must be the immediate range 0:3.");
  5146. case Match_InvalidMemoryIndexedRange4UImm1:
  5147. case Match_InvalidMemoryIndexedRange4UImm2:
  5148. return Error(
  5149. Loc,
  5150. "vector select offset must be an immediate range of the form "
  5151. "<immf>:<imml>, "
  5152. "where the first immediate is a multiple of 4 in the range [0, 4] or "
  5153. "[0, 12] "
  5154. "depending on the instruction, and the second immediate is immf + 3.");
  5155. case Match_InvalidSVEAddSubImm8:
  5156. return Error(Loc, "immediate must be an integer in range [0, 255]"
  5157. " with a shift amount of 0");
  5158. case Match_InvalidSVEAddSubImm16:
  5159. case Match_InvalidSVEAddSubImm32:
  5160. case Match_InvalidSVEAddSubImm64:
  5161. return Error(Loc, "immediate must be an integer in range [0, 255] or a "
  5162. "multiple of 256 in range [256, 65280]");
  5163. case Match_InvalidSVECpyImm8:
  5164. return Error(Loc, "immediate must be an integer in range [-128, 255]"
  5165. " with a shift amount of 0");
  5166. case Match_InvalidSVECpyImm16:
  5167. return Error(Loc, "immediate must be an integer in range [-128, 127] or a "
  5168. "multiple of 256 in range [-32768, 65280]");
  5169. case Match_InvalidSVECpyImm32:
  5170. case Match_InvalidSVECpyImm64:
  5171. return Error(Loc, "immediate must be an integer in range [-128, 127] or a "
  5172. "multiple of 256 in range [-32768, 32512]");
  5173. case Match_InvalidIndexRange0_0:
  5174. return Error(Loc, "expected lane specifier '[0]'");
  5175. case Match_InvalidIndexRange1_1:
  5176. return Error(Loc, "expected lane specifier '[1]'");
  5177. case Match_InvalidIndexRange0_15:
  5178. return Error(Loc, "vector lane must be an integer in range [0, 15].");
  5179. case Match_InvalidIndexRange0_7:
  5180. return Error(Loc, "vector lane must be an integer in range [0, 7].");
  5181. case Match_InvalidIndexRange0_3:
  5182. return Error(Loc, "vector lane must be an integer in range [0, 3].");
  5183. case Match_InvalidIndexRange0_1:
  5184. return Error(Loc, "vector lane must be an integer in range [0, 1].");
  5185. case Match_InvalidSVEIndexRange0_63:
  5186. return Error(Loc, "vector lane must be an integer in range [0, 63].");
  5187. case Match_InvalidSVEIndexRange0_31:
  5188. return Error(Loc, "vector lane must be an integer in range [0, 31].");
  5189. case Match_InvalidSVEIndexRange0_15:
  5190. return Error(Loc, "vector lane must be an integer in range [0, 15].");
  5191. case Match_InvalidSVEIndexRange0_7:
  5192. return Error(Loc, "vector lane must be an integer in range [0, 7].");
  5193. case Match_InvalidSVEIndexRange0_3:
  5194. return Error(Loc, "vector lane must be an integer in range [0, 3].");
  5195. case Match_InvalidLabel:
  5196. return Error(Loc, "expected label or encodable integer pc offset");
  5197. case Match_MRS:
  5198. return Error(Loc, "expected readable system register");
  5199. case Match_MSR:
  5200. case Match_InvalidSVCR:
  5201. return Error(Loc, "expected writable system register or pstate");
  5202. case Match_InvalidComplexRotationEven:
  5203. return Error(Loc, "complex rotation must be 0, 90, 180 or 270.");
  5204. case Match_InvalidComplexRotationOdd:
  5205. return Error(Loc, "complex rotation must be 90 or 270.");
  5206. case Match_MnemonicFail: {
  5207. std::string Suggestion = AArch64MnemonicSpellCheck(
  5208. ((AArch64Operand &)*Operands[0]).getToken(),
  5209. ComputeAvailableFeatures(STI->getFeatureBits()));
  5210. return Error(Loc, "unrecognized instruction mnemonic" + Suggestion);
  5211. }
  5212. case Match_InvalidGPR64shifted8:
  5213. return Error(Loc, "register must be x0..x30 or xzr, without shift");
  5214. case Match_InvalidGPR64shifted16:
  5215. return Error(Loc, "register must be x0..x30 or xzr, with required shift 'lsl #1'");
  5216. case Match_InvalidGPR64shifted32:
  5217. return Error(Loc, "register must be x0..x30 or xzr, with required shift 'lsl #2'");
  5218. case Match_InvalidGPR64shifted64:
  5219. return Error(Loc, "register must be x0..x30 or xzr, with required shift 'lsl #3'");
  5220. case Match_InvalidGPR64shifted128:
  5221. return Error(
  5222. Loc, "register must be x0..x30 or xzr, with required shift 'lsl #4'");
  5223. case Match_InvalidGPR64NoXZRshifted8:
  5224. return Error(Loc, "register must be x0..x30 without shift");
  5225. case Match_InvalidGPR64NoXZRshifted16:
  5226. return Error(Loc, "register must be x0..x30 with required shift 'lsl #1'");
  5227. case Match_InvalidGPR64NoXZRshifted32:
  5228. return Error(Loc, "register must be x0..x30 with required shift 'lsl #2'");
  5229. case Match_InvalidGPR64NoXZRshifted64:
  5230. return Error(Loc, "register must be x0..x30 with required shift 'lsl #3'");
  5231. case Match_InvalidGPR64NoXZRshifted128:
  5232. return Error(Loc, "register must be x0..x30 with required shift 'lsl #4'");
  5233. case Match_InvalidZPR32UXTW8:
  5234. case Match_InvalidZPR32SXTW8:
  5235. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, (uxtw|sxtw)'");
  5236. case Match_InvalidZPR32UXTW16:
  5237. case Match_InvalidZPR32SXTW16:
  5238. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, (uxtw|sxtw) #1'");
  5239. case Match_InvalidZPR32UXTW32:
  5240. case Match_InvalidZPR32SXTW32:
  5241. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, (uxtw|sxtw) #2'");
  5242. case Match_InvalidZPR32UXTW64:
  5243. case Match_InvalidZPR32SXTW64:
  5244. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, (uxtw|sxtw) #3'");
  5245. case Match_InvalidZPR64UXTW8:
  5246. case Match_InvalidZPR64SXTW8:
  5247. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, (uxtw|sxtw)'");
  5248. case Match_InvalidZPR64UXTW16:
  5249. case Match_InvalidZPR64SXTW16:
  5250. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, (lsl|uxtw|sxtw) #1'");
  5251. case Match_InvalidZPR64UXTW32:
  5252. case Match_InvalidZPR64SXTW32:
  5253. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, (lsl|uxtw|sxtw) #2'");
  5254. case Match_InvalidZPR64UXTW64:
  5255. case Match_InvalidZPR64SXTW64:
  5256. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, (lsl|uxtw|sxtw) #3'");
  5257. case Match_InvalidZPR32LSL8:
  5258. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s'");
  5259. case Match_InvalidZPR32LSL16:
  5260. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, lsl #1'");
  5261. case Match_InvalidZPR32LSL32:
  5262. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, lsl #2'");
  5263. case Match_InvalidZPR32LSL64:
  5264. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].s, lsl #3'");
  5265. case Match_InvalidZPR64LSL8:
  5266. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d'");
  5267. case Match_InvalidZPR64LSL16:
  5268. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, lsl #1'");
  5269. case Match_InvalidZPR64LSL32:
  5270. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, lsl #2'");
  5271. case Match_InvalidZPR64LSL64:
  5272. return Error(Loc, "invalid shift/extend specified, expected 'z[0..31].d, lsl #3'");
  5273. case Match_InvalidZPR0:
  5274. return Error(Loc, "expected register without element width suffix");
  5275. case Match_InvalidZPR8:
  5276. case Match_InvalidZPR16:
  5277. case Match_InvalidZPR32:
  5278. case Match_InvalidZPR64:
  5279. case Match_InvalidZPR128:
  5280. return Error(Loc, "invalid element width");
  5281. case Match_InvalidZPR_3b8:
  5282. return Error(Loc, "Invalid restricted vector register, expected z0.b..z7.b");
  5283. case Match_InvalidZPR_3b16:
  5284. return Error(Loc, "Invalid restricted vector register, expected z0.h..z7.h");
  5285. case Match_InvalidZPR_3b32:
  5286. return Error(Loc, "Invalid restricted vector register, expected z0.s..z7.s");
  5287. case Match_InvalidZPR_4b8:
  5288. return Error(Loc,
  5289. "Invalid restricted vector register, expected z0.b..z15.b");
  5290. case Match_InvalidZPR_4b16:
  5291. return Error(Loc, "Invalid restricted vector register, expected z0.h..z15.h");
  5292. case Match_InvalidZPR_4b32:
  5293. return Error(Loc, "Invalid restricted vector register, expected z0.s..z15.s");
  5294. case Match_InvalidZPR_4b64:
  5295. return Error(Loc, "Invalid restricted vector register, expected z0.d..z15.d");
  5296. case Match_InvalidSVEPattern:
  5297. return Error(Loc, "invalid predicate pattern");
  5298. case Match_InvalidSVEPredicateAnyReg:
  5299. case Match_InvalidSVEPredicateBReg:
  5300. case Match_InvalidSVEPredicateHReg:
  5301. case Match_InvalidSVEPredicateSReg:
  5302. case Match_InvalidSVEPredicateDReg:
  5303. return Error(Loc, "invalid predicate register.");
  5304. case Match_InvalidSVEPredicate3bAnyReg:
  5305. return Error(Loc, "invalid restricted predicate register, expected p0..p7 (without element suffix)");
  5306. case Match_InvalidSVEPNPredicateB_p8to15Reg:
  5307. case Match_InvalidSVEPNPredicateH_p8to15Reg:
  5308. case Match_InvalidSVEPNPredicateS_p8to15Reg:
  5309. case Match_InvalidSVEPNPredicateD_p8to15Reg:
  5310. return Error(Loc, "Invalid predicate register, expected PN in range "
  5311. "pn8..pn15 with element suffix.");
  5312. case Match_InvalidSVEPNPredicateAny_p8to15Reg:
  5313. return Error(Loc, "invalid restricted predicate-as-counter register "
  5314. "expected pn8..pn15");
  5315. case Match_InvalidSVEPNPredicateBReg:
  5316. case Match_InvalidSVEPNPredicateHReg:
  5317. case Match_InvalidSVEPNPredicateSReg:
  5318. case Match_InvalidSVEPNPredicateDReg:
  5319. return Error(Loc, "Invalid predicate register, expected PN in range "
  5320. "pn0..pn15 with element suffix.");
  5321. case Match_InvalidSVEVecLenSpecifier:
  5322. return Error(Loc, "Invalid vector length specifier, expected VLx2 or VLx4");
  5323. case Match_InvalidSVEPredicateListMul2x8:
  5324. case Match_InvalidSVEPredicateListMul2x16:
  5325. case Match_InvalidSVEPredicateListMul2x32:
  5326. case Match_InvalidSVEPredicateListMul2x64:
  5327. return Error(Loc, "Invalid vector list, expected list with 2 consecutive "
  5328. "predicate registers, where the first vector is a multiple of 2 "
  5329. "and with correct element type");
  5330. case Match_InvalidSVEExactFPImmOperandHalfOne:
  5331. return Error(Loc, "Invalid floating point constant, expected 0.5 or 1.0.");
  5332. case Match_InvalidSVEExactFPImmOperandHalfTwo:
  5333. return Error(Loc, "Invalid floating point constant, expected 0.5 or 2.0.");
  5334. case Match_InvalidSVEExactFPImmOperandZeroOne:
  5335. return Error(Loc, "Invalid floating point constant, expected 0.0 or 1.0.");
  5336. case Match_InvalidMatrixTileVectorH8:
  5337. case Match_InvalidMatrixTileVectorV8:
  5338. return Error(Loc, "invalid matrix operand, expected za0h.b or za0v.b");
  5339. case Match_InvalidMatrixTileVectorH16:
  5340. case Match_InvalidMatrixTileVectorV16:
  5341. return Error(Loc,
  5342. "invalid matrix operand, expected za[0-1]h.h or za[0-1]v.h");
  5343. case Match_InvalidMatrixTileVectorH32:
  5344. case Match_InvalidMatrixTileVectorV32:
  5345. return Error(Loc,
  5346. "invalid matrix operand, expected za[0-3]h.s or za[0-3]v.s");
  5347. case Match_InvalidMatrixTileVectorH64:
  5348. case Match_InvalidMatrixTileVectorV64:
  5349. return Error(Loc,
  5350. "invalid matrix operand, expected za[0-7]h.d or za[0-7]v.d");
  5351. case Match_InvalidMatrixTileVectorH128:
  5352. case Match_InvalidMatrixTileVectorV128:
  5353. return Error(Loc,
  5354. "invalid matrix operand, expected za[0-15]h.q or za[0-15]v.q");
  5355. case Match_InvalidMatrixTile32:
  5356. return Error(Loc, "invalid matrix operand, expected za[0-3].s");
  5357. case Match_InvalidMatrixTile64:
  5358. return Error(Loc, "invalid matrix operand, expected za[0-7].d");
  5359. case Match_InvalidMatrix:
  5360. return Error(Loc, "invalid matrix operand, expected za");
  5361. case Match_InvalidMatrix8:
  5362. return Error(Loc, "invalid matrix operand, expected suffix .b");
  5363. case Match_InvalidMatrix16:
  5364. return Error(Loc, "invalid matrix operand, expected suffix .h");
  5365. case Match_InvalidMatrix32:
  5366. return Error(Loc, "invalid matrix operand, expected suffix .s");
  5367. case Match_InvalidMatrix64:
  5368. return Error(Loc, "invalid matrix operand, expected suffix .d");
  5369. case Match_InvalidMatrixIndexGPR32_12_15:
  5370. return Error(Loc, "operand must be a register in range [w12, w15]");
  5371. case Match_InvalidMatrixIndexGPR32_8_11:
  5372. return Error(Loc, "operand must be a register in range [w8, w11]");
  5373. case Match_InvalidSVEVectorListMul2x8:
  5374. case Match_InvalidSVEVectorListMul2x16:
  5375. case Match_InvalidSVEVectorListMul2x32:
  5376. case Match_InvalidSVEVectorListMul2x64:
  5377. return Error(Loc, "Invalid vector list, expected list with 2 consecutive "
  5378. "SVE vectors, where the first vector is a multiple of 2 "
  5379. "and with matching element types");
  5380. case Match_InvalidSVEVectorListMul4x8:
  5381. case Match_InvalidSVEVectorListMul4x16:
  5382. case Match_InvalidSVEVectorListMul4x32:
  5383. case Match_InvalidSVEVectorListMul4x64:
  5384. return Error(Loc, "Invalid vector list, expected list with 4 consecutive "
  5385. "SVE vectors, where the first vector is a multiple of 4 "
  5386. "and with matching element types");
  5387. case Match_InvalidLookupTable:
  5388. return Error(Loc, "Invalid lookup table, expected zt0");
  5389. case Match_InvalidSVEVectorListStrided2x8:
  5390. case Match_InvalidSVEVectorListStrided2x16:
  5391. case Match_InvalidSVEVectorListStrided2x32:
  5392. case Match_InvalidSVEVectorListStrided2x64:
  5393. return Error(
  5394. Loc,
  5395. "Invalid vector list, expected list with each SVE vector in the list "
  5396. "8 registers apart, and the first register in the range [z0, z7] or "
  5397. "[z16, z23] and with correct element type");
  5398. case Match_InvalidSVEVectorListStrided4x8:
  5399. case Match_InvalidSVEVectorListStrided4x16:
  5400. case Match_InvalidSVEVectorListStrided4x32:
  5401. case Match_InvalidSVEVectorListStrided4x64:
  5402. return Error(
  5403. Loc,
  5404. "Invalid vector list, expected list with each SVE vector in the list "
  5405. "4 registers apart, and the first register in the range [z0, z3] or "
  5406. "[z16, z19] and with correct element type");
  5407. default:
  5408. llvm_unreachable("unexpected error code!");
  5409. }
  5410. }
  5411. static const char *getSubtargetFeatureName(uint64_t Val);
  5412. bool AArch64AsmParser::MatchAndEmitInstruction(SMLoc IDLoc, unsigned &Opcode,
  5413. OperandVector &Operands,
  5414. MCStreamer &Out,
  5415. uint64_t &ErrorInfo,
  5416. bool MatchingInlineAsm) {
  5417. assert(!Operands.empty() && "Unexpect empty operand list!");
  5418. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[0]);
  5419. assert(Op.isToken() && "Leading operand should always be a mnemonic!");
  5420. StringRef Tok = Op.getToken();
  5421. unsigned NumOperands = Operands.size();
  5422. if (NumOperands == 4 && Tok == "lsl") {
  5423. AArch64Operand &Op2 = static_cast<AArch64Operand &>(*Operands[2]);
  5424. AArch64Operand &Op3 = static_cast<AArch64Operand &>(*Operands[3]);
  5425. if (Op2.isScalarReg() && Op3.isImm()) {
  5426. const MCConstantExpr *Op3CE = dyn_cast<MCConstantExpr>(Op3.getImm());
  5427. if (Op3CE) {
  5428. uint64_t Op3Val = Op3CE->getValue();
  5429. uint64_t NewOp3Val = 0;
  5430. uint64_t NewOp4Val = 0;
  5431. if (AArch64MCRegisterClasses[AArch64::GPR32allRegClassID].contains(
  5432. Op2.getReg())) {
  5433. NewOp3Val = (32 - Op3Val) & 0x1f;
  5434. NewOp4Val = 31 - Op3Val;
  5435. } else {
  5436. NewOp3Val = (64 - Op3Val) & 0x3f;
  5437. NewOp4Val = 63 - Op3Val;
  5438. }
  5439. const MCExpr *NewOp3 = MCConstantExpr::create(NewOp3Val, getContext());
  5440. const MCExpr *NewOp4 = MCConstantExpr::create(NewOp4Val, getContext());
  5441. Operands[0] =
  5442. AArch64Operand::CreateToken("ubfm", Op.getStartLoc(), getContext());
  5443. Operands.push_back(AArch64Operand::CreateImm(
  5444. NewOp4, Op3.getStartLoc(), Op3.getEndLoc(), getContext()));
  5445. Operands[3] = AArch64Operand::CreateImm(NewOp3, Op3.getStartLoc(),
  5446. Op3.getEndLoc(), getContext());
  5447. }
  5448. }
  5449. } else if (NumOperands == 4 && Tok == "bfc") {
  5450. // FIXME: Horrible hack to handle BFC->BFM alias.
  5451. AArch64Operand &Op1 = static_cast<AArch64Operand &>(*Operands[1]);
  5452. AArch64Operand LSBOp = static_cast<AArch64Operand &>(*Operands[2]);
  5453. AArch64Operand WidthOp = static_cast<AArch64Operand &>(*Operands[3]);
  5454. if (Op1.isScalarReg() && LSBOp.isImm() && WidthOp.isImm()) {
  5455. const MCConstantExpr *LSBCE = dyn_cast<MCConstantExpr>(LSBOp.getImm());
  5456. const MCConstantExpr *WidthCE = dyn_cast<MCConstantExpr>(WidthOp.getImm());
  5457. if (LSBCE && WidthCE) {
  5458. uint64_t LSB = LSBCE->getValue();
  5459. uint64_t Width = WidthCE->getValue();
  5460. uint64_t RegWidth = 0;
  5461. if (AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  5462. Op1.getReg()))
  5463. RegWidth = 64;
  5464. else
  5465. RegWidth = 32;
  5466. if (LSB >= RegWidth)
  5467. return Error(LSBOp.getStartLoc(),
  5468. "expected integer in range [0, 31]");
  5469. if (Width < 1 || Width > RegWidth)
  5470. return Error(WidthOp.getStartLoc(),
  5471. "expected integer in range [1, 32]");
  5472. uint64_t ImmR = 0;
  5473. if (RegWidth == 32)
  5474. ImmR = (32 - LSB) & 0x1f;
  5475. else
  5476. ImmR = (64 - LSB) & 0x3f;
  5477. uint64_t ImmS = Width - 1;
  5478. if (ImmR != 0 && ImmS >= ImmR)
  5479. return Error(WidthOp.getStartLoc(),
  5480. "requested insert overflows register");
  5481. const MCExpr *ImmRExpr = MCConstantExpr::create(ImmR, getContext());
  5482. const MCExpr *ImmSExpr = MCConstantExpr::create(ImmS, getContext());
  5483. Operands[0] =
  5484. AArch64Operand::CreateToken("bfm", Op.getStartLoc(), getContext());
  5485. Operands[2] = AArch64Operand::CreateReg(
  5486. RegWidth == 32 ? AArch64::WZR : AArch64::XZR, RegKind::Scalar,
  5487. SMLoc(), SMLoc(), getContext());
  5488. Operands[3] = AArch64Operand::CreateImm(
  5489. ImmRExpr, LSBOp.getStartLoc(), LSBOp.getEndLoc(), getContext());
  5490. Operands.emplace_back(
  5491. AArch64Operand::CreateImm(ImmSExpr, WidthOp.getStartLoc(),
  5492. WidthOp.getEndLoc(), getContext()));
  5493. }
  5494. }
  5495. } else if (NumOperands == 5) {
  5496. // FIXME: Horrible hack to handle the BFI -> BFM, SBFIZ->SBFM, and
  5497. // UBFIZ -> UBFM aliases.
  5498. if (Tok == "bfi" || Tok == "sbfiz" || Tok == "ubfiz") {
  5499. AArch64Operand &Op1 = static_cast<AArch64Operand &>(*Operands[1]);
  5500. AArch64Operand &Op3 = static_cast<AArch64Operand &>(*Operands[3]);
  5501. AArch64Operand &Op4 = static_cast<AArch64Operand &>(*Operands[4]);
  5502. if (Op1.isScalarReg() && Op3.isImm() && Op4.isImm()) {
  5503. const MCConstantExpr *Op3CE = dyn_cast<MCConstantExpr>(Op3.getImm());
  5504. const MCConstantExpr *Op4CE = dyn_cast<MCConstantExpr>(Op4.getImm());
  5505. if (Op3CE && Op4CE) {
  5506. uint64_t Op3Val = Op3CE->getValue();
  5507. uint64_t Op4Val = Op4CE->getValue();
  5508. uint64_t RegWidth = 0;
  5509. if (AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  5510. Op1.getReg()))
  5511. RegWidth = 64;
  5512. else
  5513. RegWidth = 32;
  5514. if (Op3Val >= RegWidth)
  5515. return Error(Op3.getStartLoc(),
  5516. "expected integer in range [0, 31]");
  5517. if (Op4Val < 1 || Op4Val > RegWidth)
  5518. return Error(Op4.getStartLoc(),
  5519. "expected integer in range [1, 32]");
  5520. uint64_t NewOp3Val = 0;
  5521. if (RegWidth == 32)
  5522. NewOp3Val = (32 - Op3Val) & 0x1f;
  5523. else
  5524. NewOp3Val = (64 - Op3Val) & 0x3f;
  5525. uint64_t NewOp4Val = Op4Val - 1;
  5526. if (NewOp3Val != 0 && NewOp4Val >= NewOp3Val)
  5527. return Error(Op4.getStartLoc(),
  5528. "requested insert overflows register");
  5529. const MCExpr *NewOp3 =
  5530. MCConstantExpr::create(NewOp3Val, getContext());
  5531. const MCExpr *NewOp4 =
  5532. MCConstantExpr::create(NewOp4Val, getContext());
  5533. Operands[3] = AArch64Operand::CreateImm(
  5534. NewOp3, Op3.getStartLoc(), Op3.getEndLoc(), getContext());
  5535. Operands[4] = AArch64Operand::CreateImm(
  5536. NewOp4, Op4.getStartLoc(), Op4.getEndLoc(), getContext());
  5537. if (Tok == "bfi")
  5538. Operands[0] = AArch64Operand::CreateToken("bfm", Op.getStartLoc(),
  5539. getContext());
  5540. else if (Tok == "sbfiz")
  5541. Operands[0] = AArch64Operand::CreateToken("sbfm", Op.getStartLoc(),
  5542. getContext());
  5543. else if (Tok == "ubfiz")
  5544. Operands[0] = AArch64Operand::CreateToken("ubfm", Op.getStartLoc(),
  5545. getContext());
  5546. else
  5547. llvm_unreachable("No valid mnemonic for alias?");
  5548. }
  5549. }
  5550. // FIXME: Horrible hack to handle the BFXIL->BFM, SBFX->SBFM, and
  5551. // UBFX -> UBFM aliases.
  5552. } else if (NumOperands == 5 &&
  5553. (Tok == "bfxil" || Tok == "sbfx" || Tok == "ubfx")) {
  5554. AArch64Operand &Op1 = static_cast<AArch64Operand &>(*Operands[1]);
  5555. AArch64Operand &Op3 = static_cast<AArch64Operand &>(*Operands[3]);
  5556. AArch64Operand &Op4 = static_cast<AArch64Operand &>(*Operands[4]);
  5557. if (Op1.isScalarReg() && Op3.isImm() && Op4.isImm()) {
  5558. const MCConstantExpr *Op3CE = dyn_cast<MCConstantExpr>(Op3.getImm());
  5559. const MCConstantExpr *Op4CE = dyn_cast<MCConstantExpr>(Op4.getImm());
  5560. if (Op3CE && Op4CE) {
  5561. uint64_t Op3Val = Op3CE->getValue();
  5562. uint64_t Op4Val = Op4CE->getValue();
  5563. uint64_t RegWidth = 0;
  5564. if (AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  5565. Op1.getReg()))
  5566. RegWidth = 64;
  5567. else
  5568. RegWidth = 32;
  5569. if (Op3Val >= RegWidth)
  5570. return Error(Op3.getStartLoc(),
  5571. "expected integer in range [0, 31]");
  5572. if (Op4Val < 1 || Op4Val > RegWidth)
  5573. return Error(Op4.getStartLoc(),
  5574. "expected integer in range [1, 32]");
  5575. uint64_t NewOp4Val = Op3Val + Op4Val - 1;
  5576. if (NewOp4Val >= RegWidth || NewOp4Val < Op3Val)
  5577. return Error(Op4.getStartLoc(),
  5578. "requested extract overflows register");
  5579. const MCExpr *NewOp4 =
  5580. MCConstantExpr::create(NewOp4Val, getContext());
  5581. Operands[4] = AArch64Operand::CreateImm(
  5582. NewOp4, Op4.getStartLoc(), Op4.getEndLoc(), getContext());
  5583. if (Tok == "bfxil")
  5584. Operands[0] = AArch64Operand::CreateToken("bfm", Op.getStartLoc(),
  5585. getContext());
  5586. else if (Tok == "sbfx")
  5587. Operands[0] = AArch64Operand::CreateToken("sbfm", Op.getStartLoc(),
  5588. getContext());
  5589. else if (Tok == "ubfx")
  5590. Operands[0] = AArch64Operand::CreateToken("ubfm", Op.getStartLoc(),
  5591. getContext());
  5592. else
  5593. llvm_unreachable("No valid mnemonic for alias?");
  5594. }
  5595. }
  5596. }
  5597. }
  5598. // The Cyclone CPU and early successors didn't execute the zero-cycle zeroing
  5599. // instruction for FP registers correctly in some rare circumstances. Convert
  5600. // it to a safe instruction and warn (because silently changing someone's
  5601. // assembly is rude).
  5602. if (getSTI().getFeatureBits()[AArch64::FeatureZCZeroingFPWorkaround] &&
  5603. NumOperands == 4 && Tok == "movi") {
  5604. AArch64Operand &Op1 = static_cast<AArch64Operand &>(*Operands[1]);
  5605. AArch64Operand &Op2 = static_cast<AArch64Operand &>(*Operands[2]);
  5606. AArch64Operand &Op3 = static_cast<AArch64Operand &>(*Operands[3]);
  5607. if ((Op1.isToken() && Op2.isNeonVectorReg() && Op3.isImm()) ||
  5608. (Op1.isNeonVectorReg() && Op2.isToken() && Op3.isImm())) {
  5609. StringRef Suffix = Op1.isToken() ? Op1.getToken() : Op2.getToken();
  5610. if (Suffix.lower() == ".2d" &&
  5611. cast<MCConstantExpr>(Op3.getImm())->getValue() == 0) {
  5612. Warning(IDLoc, "instruction movi.2d with immediate #0 may not function"
  5613. " correctly on this CPU, converting to equivalent movi.16b");
  5614. // Switch the suffix to .16b.
  5615. unsigned Idx = Op1.isToken() ? 1 : 2;
  5616. Operands[Idx] =
  5617. AArch64Operand::CreateToken(".16b", IDLoc, getContext());
  5618. }
  5619. }
  5620. }
  5621. // FIXME: Horrible hack for sxtw and uxtw with Wn src and Xd dst operands.
  5622. // InstAlias can't quite handle this since the reg classes aren't
  5623. // subclasses.
  5624. if (NumOperands == 3 && (Tok == "sxtw" || Tok == "uxtw")) {
  5625. // The source register can be Wn here, but the matcher expects a
  5626. // GPR64. Twiddle it here if necessary.
  5627. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[2]);
  5628. if (Op.isScalarReg()) {
  5629. unsigned Reg = getXRegFromWReg(Op.getReg());
  5630. Operands[2] = AArch64Operand::CreateReg(Reg, RegKind::Scalar,
  5631. Op.getStartLoc(), Op.getEndLoc(),
  5632. getContext());
  5633. }
  5634. }
  5635. // FIXME: Likewise for sxt[bh] with a Xd dst operand
  5636. else if (NumOperands == 3 && (Tok == "sxtb" || Tok == "sxth")) {
  5637. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[1]);
  5638. if (Op.isScalarReg() &&
  5639. AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  5640. Op.getReg())) {
  5641. // The source register can be Wn here, but the matcher expects a
  5642. // GPR64. Twiddle it here if necessary.
  5643. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[2]);
  5644. if (Op.isScalarReg()) {
  5645. unsigned Reg = getXRegFromWReg(Op.getReg());
  5646. Operands[2] = AArch64Operand::CreateReg(Reg, RegKind::Scalar,
  5647. Op.getStartLoc(),
  5648. Op.getEndLoc(), getContext());
  5649. }
  5650. }
  5651. }
  5652. // FIXME: Likewise for uxt[bh] with a Xd dst operand
  5653. else if (NumOperands == 3 && (Tok == "uxtb" || Tok == "uxth")) {
  5654. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[1]);
  5655. if (Op.isScalarReg() &&
  5656. AArch64MCRegisterClasses[AArch64::GPR64allRegClassID].contains(
  5657. Op.getReg())) {
  5658. // The source register can be Wn here, but the matcher expects a
  5659. // GPR32. Twiddle it here if necessary.
  5660. AArch64Operand &Op = static_cast<AArch64Operand &>(*Operands[1]);
  5661. if (Op.isScalarReg()) {
  5662. unsigned Reg = getWRegFromXReg(Op.getReg());
  5663. Operands[1] = AArch64Operand::CreateReg(Reg, RegKind::Scalar,
  5664. Op.getStartLoc(),
  5665. Op.getEndLoc(), getContext());
  5666. }
  5667. }
  5668. }
  5669. MCInst Inst;
  5670. FeatureBitset MissingFeatures;
  5671. // First try to match against the secondary set of tables containing the
  5672. // short-form NEON instructions (e.g. "fadd.2s v0, v1, v2").
  5673. unsigned MatchResult =
  5674. MatchInstructionImpl(Operands, Inst, ErrorInfo, MissingFeatures,
  5675. MatchingInlineAsm, 1);
  5676. // If that fails, try against the alternate table containing long-form NEON:
  5677. // "fadd v0.2s, v1.2s, v2.2s"
  5678. if (MatchResult != Match_Success) {
  5679. // But first, save the short-form match result: we can use it in case the
  5680. // long-form match also fails.
  5681. auto ShortFormNEONErrorInfo = ErrorInfo;
  5682. auto ShortFormNEONMatchResult = MatchResult;
  5683. auto ShortFormNEONMissingFeatures = MissingFeatures;
  5684. MatchResult =
  5685. MatchInstructionImpl(Operands, Inst, ErrorInfo, MissingFeatures,
  5686. MatchingInlineAsm, 0);
  5687. // Now, both matches failed, and the long-form match failed on the mnemonic
  5688. // suffix token operand. The short-form match failure is probably more
  5689. // relevant: use it instead.
  5690. if (MatchResult == Match_InvalidOperand && ErrorInfo == 1 &&
  5691. Operands.size() > 1 && ((AArch64Operand &)*Operands[1]).isToken() &&
  5692. ((AArch64Operand &)*Operands[1]).isTokenSuffix()) {
  5693. MatchResult = ShortFormNEONMatchResult;
  5694. ErrorInfo = ShortFormNEONErrorInfo;
  5695. MissingFeatures = ShortFormNEONMissingFeatures;
  5696. }
  5697. }
  5698. switch (MatchResult) {
  5699. case Match_Success: {
  5700. // Perform range checking and other semantic validations
  5701. SmallVector<SMLoc, 8> OperandLocs;
  5702. NumOperands = Operands.size();
  5703. for (unsigned i = 1; i < NumOperands; ++i)
  5704. OperandLocs.push_back(Operands[i]->getStartLoc());
  5705. if (validateInstruction(Inst, IDLoc, OperandLocs))
  5706. return true;
  5707. Inst.setLoc(IDLoc);
  5708. Out.emitInstruction(Inst, getSTI());
  5709. return false;
  5710. }
  5711. case Match_MissingFeature: {
  5712. assert(MissingFeatures.any() && "Unknown missing feature!");
  5713. // Special case the error message for the very common case where only
  5714. // a single subtarget feature is missing (neon, e.g.).
  5715. std::string Msg = "instruction requires:";
  5716. for (unsigned i = 0, e = MissingFeatures.size(); i != e; ++i) {
  5717. if (MissingFeatures[i]) {
  5718. Msg += " ";
  5719. Msg += getSubtargetFeatureName(i);
  5720. }
  5721. }
  5722. return Error(IDLoc, Msg);
  5723. }
  5724. case Match_MnemonicFail:
  5725. return showMatchError(IDLoc, MatchResult, ErrorInfo, Operands);
  5726. case Match_InvalidOperand: {
  5727. SMLoc ErrorLoc = IDLoc;
  5728. if (ErrorInfo != ~0ULL) {
  5729. if (ErrorInfo >= Operands.size())
  5730. return Error(IDLoc, "too few operands for instruction",
  5731. SMRange(IDLoc, getTok().getLoc()));
  5732. ErrorLoc = ((AArch64Operand &)*Operands[ErrorInfo]).getStartLoc();
  5733. if (ErrorLoc == SMLoc())
  5734. ErrorLoc = IDLoc;
  5735. }
  5736. // If the match failed on a suffix token operand, tweak the diagnostic
  5737. // accordingly.
  5738. if (((AArch64Operand &)*Operands[ErrorInfo]).isToken() &&
  5739. ((AArch64Operand &)*Operands[ErrorInfo]).isTokenSuffix())
  5740. MatchResult = Match_InvalidSuffix;
  5741. return showMatchError(ErrorLoc, MatchResult, ErrorInfo, Operands);
  5742. }
  5743. case Match_InvalidTiedOperand:
  5744. case Match_InvalidMemoryIndexed1:
  5745. case Match_InvalidMemoryIndexed2:
  5746. case Match_InvalidMemoryIndexed4:
  5747. case Match_InvalidMemoryIndexed8:
  5748. case Match_InvalidMemoryIndexed16:
  5749. case Match_InvalidCondCode:
  5750. case Match_AddSubRegExtendSmall:
  5751. case Match_AddSubRegExtendLarge:
  5752. case Match_AddSubSecondSource:
  5753. case Match_LogicalSecondSource:
  5754. case Match_AddSubRegShift32:
  5755. case Match_AddSubRegShift64:
  5756. case Match_InvalidMovImm32Shift:
  5757. case Match_InvalidMovImm64Shift:
  5758. case Match_InvalidFPImm:
  5759. case Match_InvalidMemoryWExtend8:
  5760. case Match_InvalidMemoryWExtend16:
  5761. case Match_InvalidMemoryWExtend32:
  5762. case Match_InvalidMemoryWExtend64:
  5763. case Match_InvalidMemoryWExtend128:
  5764. case Match_InvalidMemoryXExtend8:
  5765. case Match_InvalidMemoryXExtend16:
  5766. case Match_InvalidMemoryXExtend32:
  5767. case Match_InvalidMemoryXExtend64:
  5768. case Match_InvalidMemoryXExtend128:
  5769. case Match_InvalidMemoryIndexed1SImm4:
  5770. case Match_InvalidMemoryIndexed2SImm4:
  5771. case Match_InvalidMemoryIndexed3SImm4:
  5772. case Match_InvalidMemoryIndexed4SImm4:
  5773. case Match_InvalidMemoryIndexed1SImm6:
  5774. case Match_InvalidMemoryIndexed16SImm4:
  5775. case Match_InvalidMemoryIndexed32SImm4:
  5776. case Match_InvalidMemoryIndexed4SImm7:
  5777. case Match_InvalidMemoryIndexed8SImm7:
  5778. case Match_InvalidMemoryIndexed16SImm7:
  5779. case Match_InvalidMemoryIndexed8UImm5:
  5780. case Match_InvalidMemoryIndexed8UImm3:
  5781. case Match_InvalidMemoryIndexed4UImm5:
  5782. case Match_InvalidMemoryIndexed2UImm5:
  5783. case Match_InvalidMemoryIndexed1UImm6:
  5784. case Match_InvalidMemoryIndexed2UImm6:
  5785. case Match_InvalidMemoryIndexed4UImm6:
  5786. case Match_InvalidMemoryIndexed8UImm6:
  5787. case Match_InvalidMemoryIndexed16UImm6:
  5788. case Match_InvalidMemoryIndexedSImm6:
  5789. case Match_InvalidMemoryIndexedSImm5:
  5790. case Match_InvalidMemoryIndexedSImm8:
  5791. case Match_InvalidMemoryIndexedSImm9:
  5792. case Match_InvalidMemoryIndexed16SImm9:
  5793. case Match_InvalidMemoryIndexed8SImm10:
  5794. case Match_InvalidImm0_0:
  5795. case Match_InvalidImm0_1:
  5796. case Match_InvalidImm0_3:
  5797. case Match_InvalidImm0_7:
  5798. case Match_InvalidImm0_15:
  5799. case Match_InvalidImm0_31:
  5800. case Match_InvalidImm0_63:
  5801. case Match_InvalidImm0_127:
  5802. case Match_InvalidImm0_255:
  5803. case Match_InvalidImm0_65535:
  5804. case Match_InvalidImm1_8:
  5805. case Match_InvalidImm1_16:
  5806. case Match_InvalidImm1_32:
  5807. case Match_InvalidImm1_64:
  5808. case Match_InvalidMemoryIndexedRange2UImm0:
  5809. case Match_InvalidMemoryIndexedRange2UImm1:
  5810. case Match_InvalidMemoryIndexedRange2UImm2:
  5811. case Match_InvalidMemoryIndexedRange2UImm3:
  5812. case Match_InvalidMemoryIndexedRange4UImm0:
  5813. case Match_InvalidMemoryIndexedRange4UImm1:
  5814. case Match_InvalidMemoryIndexedRange4UImm2:
  5815. case Match_InvalidSVEAddSubImm8:
  5816. case Match_InvalidSVEAddSubImm16:
  5817. case Match_InvalidSVEAddSubImm32:
  5818. case Match_InvalidSVEAddSubImm64:
  5819. case Match_InvalidSVECpyImm8:
  5820. case Match_InvalidSVECpyImm16:
  5821. case Match_InvalidSVECpyImm32:
  5822. case Match_InvalidSVECpyImm64:
  5823. case Match_InvalidIndexRange0_0:
  5824. case Match_InvalidIndexRange1_1:
  5825. case Match_InvalidIndexRange0_15:
  5826. case Match_InvalidIndexRange0_7:
  5827. case Match_InvalidIndexRange0_3:
  5828. case Match_InvalidIndexRange0_1:
  5829. case Match_InvalidSVEIndexRange0_63:
  5830. case Match_InvalidSVEIndexRange0_31:
  5831. case Match_InvalidSVEIndexRange0_15:
  5832. case Match_InvalidSVEIndexRange0_7:
  5833. case Match_InvalidSVEIndexRange0_3:
  5834. case Match_InvalidLabel:
  5835. case Match_InvalidComplexRotationEven:
  5836. case Match_InvalidComplexRotationOdd:
  5837. case Match_InvalidGPR64shifted8:
  5838. case Match_InvalidGPR64shifted16:
  5839. case Match_InvalidGPR64shifted32:
  5840. case Match_InvalidGPR64shifted64:
  5841. case Match_InvalidGPR64shifted128:
  5842. case Match_InvalidGPR64NoXZRshifted8:
  5843. case Match_InvalidGPR64NoXZRshifted16:
  5844. case Match_InvalidGPR64NoXZRshifted32:
  5845. case Match_InvalidGPR64NoXZRshifted64:
  5846. case Match_InvalidGPR64NoXZRshifted128:
  5847. case Match_InvalidZPR32UXTW8:
  5848. case Match_InvalidZPR32UXTW16:
  5849. case Match_InvalidZPR32UXTW32:
  5850. case Match_InvalidZPR32UXTW64:
  5851. case Match_InvalidZPR32SXTW8:
  5852. case Match_InvalidZPR32SXTW16:
  5853. case Match_InvalidZPR32SXTW32:
  5854. case Match_InvalidZPR32SXTW64:
  5855. case Match_InvalidZPR64UXTW8:
  5856. case Match_InvalidZPR64SXTW8:
  5857. case Match_InvalidZPR64UXTW16:
  5858. case Match_InvalidZPR64SXTW16:
  5859. case Match_InvalidZPR64UXTW32:
  5860. case Match_InvalidZPR64SXTW32:
  5861. case Match_InvalidZPR64UXTW64:
  5862. case Match_InvalidZPR64SXTW64:
  5863. case Match_InvalidZPR32LSL8:
  5864. case Match_InvalidZPR32LSL16:
  5865. case Match_InvalidZPR32LSL32:
  5866. case Match_InvalidZPR32LSL64:
  5867. case Match_InvalidZPR64LSL8:
  5868. case Match_InvalidZPR64LSL16:
  5869. case Match_InvalidZPR64LSL32:
  5870. case Match_InvalidZPR64LSL64:
  5871. case Match_InvalidZPR0:
  5872. case Match_InvalidZPR8:
  5873. case Match_InvalidZPR16:
  5874. case Match_InvalidZPR32:
  5875. case Match_InvalidZPR64:
  5876. case Match_InvalidZPR128:
  5877. case Match_InvalidZPR_3b8:
  5878. case Match_InvalidZPR_3b16:
  5879. case Match_InvalidZPR_3b32:
  5880. case Match_InvalidZPR_4b8:
  5881. case Match_InvalidZPR_4b16:
  5882. case Match_InvalidZPR_4b32:
  5883. case Match_InvalidZPR_4b64:
  5884. case Match_InvalidSVEPredicateAnyReg:
  5885. case Match_InvalidSVEPattern:
  5886. case Match_InvalidSVEVecLenSpecifier:
  5887. case Match_InvalidSVEPredicateBReg:
  5888. case Match_InvalidSVEPredicateHReg:
  5889. case Match_InvalidSVEPredicateSReg:
  5890. case Match_InvalidSVEPredicateDReg:
  5891. case Match_InvalidSVEPredicate3bAnyReg:
  5892. case Match_InvalidSVEPNPredicateB_p8to15Reg:
  5893. case Match_InvalidSVEPNPredicateH_p8to15Reg:
  5894. case Match_InvalidSVEPNPredicateS_p8to15Reg:
  5895. case Match_InvalidSVEPNPredicateD_p8to15Reg:
  5896. case Match_InvalidSVEPNPredicateAny_p8to15Reg:
  5897. case Match_InvalidSVEPNPredicateBReg:
  5898. case Match_InvalidSVEPNPredicateHReg:
  5899. case Match_InvalidSVEPNPredicateSReg:
  5900. case Match_InvalidSVEPNPredicateDReg:
  5901. case Match_InvalidSVEPredicateListMul2x8:
  5902. case Match_InvalidSVEPredicateListMul2x16:
  5903. case Match_InvalidSVEPredicateListMul2x32:
  5904. case Match_InvalidSVEPredicateListMul2x64:
  5905. case Match_InvalidSVEExactFPImmOperandHalfOne:
  5906. case Match_InvalidSVEExactFPImmOperandHalfTwo:
  5907. case Match_InvalidSVEExactFPImmOperandZeroOne:
  5908. case Match_InvalidMatrixTile32:
  5909. case Match_InvalidMatrixTile64:
  5910. case Match_InvalidMatrix:
  5911. case Match_InvalidMatrix8:
  5912. case Match_InvalidMatrix16:
  5913. case Match_InvalidMatrix32:
  5914. case Match_InvalidMatrix64:
  5915. case Match_InvalidMatrixTileVectorH8:
  5916. case Match_InvalidMatrixTileVectorH16:
  5917. case Match_InvalidMatrixTileVectorH32:
  5918. case Match_InvalidMatrixTileVectorH64:
  5919. case Match_InvalidMatrixTileVectorH128:
  5920. case Match_InvalidMatrixTileVectorV8:
  5921. case Match_InvalidMatrixTileVectorV16:
  5922. case Match_InvalidMatrixTileVectorV32:
  5923. case Match_InvalidMatrixTileVectorV64:
  5924. case Match_InvalidMatrixTileVectorV128:
  5925. case Match_InvalidSVCR:
  5926. case Match_InvalidMatrixIndexGPR32_12_15:
  5927. case Match_InvalidMatrixIndexGPR32_8_11:
  5928. case Match_InvalidLookupTable:
  5929. case Match_InvalidSVEVectorListMul2x8:
  5930. case Match_InvalidSVEVectorListMul2x16:
  5931. case Match_InvalidSVEVectorListMul2x32:
  5932. case Match_InvalidSVEVectorListMul2x64:
  5933. case Match_InvalidSVEVectorListMul4x8:
  5934. case Match_InvalidSVEVectorListMul4x16:
  5935. case Match_InvalidSVEVectorListMul4x32:
  5936. case Match_InvalidSVEVectorListMul4x64:
  5937. case Match_InvalidSVEVectorListStrided2x8:
  5938. case Match_InvalidSVEVectorListStrided2x16:
  5939. case Match_InvalidSVEVectorListStrided2x32:
  5940. case Match_InvalidSVEVectorListStrided2x64:
  5941. case Match_InvalidSVEVectorListStrided4x8:
  5942. case Match_InvalidSVEVectorListStrided4x16:
  5943. case Match_InvalidSVEVectorListStrided4x32:
  5944. case Match_InvalidSVEVectorListStrided4x64:
  5945. case Match_MSR:
  5946. case Match_MRS: {
  5947. if (ErrorInfo >= Operands.size())
  5948. return Error(IDLoc, "too few operands for instruction", SMRange(IDLoc, (*Operands.back()).getEndLoc()));
  5949. // Any time we get here, there's nothing fancy to do. Just get the
  5950. // operand SMLoc and display the diagnostic.
  5951. SMLoc ErrorLoc = ((AArch64Operand &)*Operands[ErrorInfo]).getStartLoc();
  5952. if (ErrorLoc == SMLoc())
  5953. ErrorLoc = IDLoc;
  5954. return showMatchError(ErrorLoc, MatchResult, ErrorInfo, Operands);
  5955. }
  5956. }
  5957. llvm_unreachable("Implement any new match types added!");
  5958. }
  5959. /// ParseDirective parses the arm specific directives
  5960. bool AArch64AsmParser::ParseDirective(AsmToken DirectiveID) {
  5961. const MCContext::Environment Format = getContext().getObjectFileType();
  5962. bool IsMachO = Format == MCContext::IsMachO;
  5963. bool IsCOFF = Format == MCContext::IsCOFF;
  5964. auto IDVal = DirectiveID.getIdentifier().lower();
  5965. SMLoc Loc = DirectiveID.getLoc();
  5966. if (IDVal == ".arch")
  5967. parseDirectiveArch(Loc);
  5968. else if (IDVal == ".cpu")
  5969. parseDirectiveCPU(Loc);
  5970. else if (IDVal == ".tlsdesccall")
  5971. parseDirectiveTLSDescCall(Loc);
  5972. else if (IDVal == ".ltorg" || IDVal == ".pool")
  5973. parseDirectiveLtorg(Loc);
  5974. else if (IDVal == ".unreq")
  5975. parseDirectiveUnreq(Loc);
  5976. else if (IDVal == ".inst")
  5977. parseDirectiveInst(Loc);
  5978. else if (IDVal == ".cfi_negate_ra_state")
  5979. parseDirectiveCFINegateRAState();
  5980. else if (IDVal == ".cfi_b_key_frame")
  5981. parseDirectiveCFIBKeyFrame();
  5982. else if (IDVal == ".cfi_mte_tagged_frame")
  5983. parseDirectiveCFIMTETaggedFrame();
  5984. else if (IDVal == ".arch_extension")
  5985. parseDirectiveArchExtension(Loc);
  5986. else if (IDVal == ".variant_pcs")
  5987. parseDirectiveVariantPCS(Loc);
  5988. else if (IsMachO) {
  5989. if (IDVal == MCLOHDirectiveName())
  5990. parseDirectiveLOH(IDVal, Loc);
  5991. else
  5992. return true;
  5993. } else if (IsCOFF) {
  5994. if (IDVal == ".seh_stackalloc")
  5995. parseDirectiveSEHAllocStack(Loc);
  5996. else if (IDVal == ".seh_endprologue")
  5997. parseDirectiveSEHPrologEnd(Loc);
  5998. else if (IDVal == ".seh_save_r19r20_x")
  5999. parseDirectiveSEHSaveR19R20X(Loc);
  6000. else if (IDVal == ".seh_save_fplr")
  6001. parseDirectiveSEHSaveFPLR(Loc);
  6002. else if (IDVal == ".seh_save_fplr_x")
  6003. parseDirectiveSEHSaveFPLRX(Loc);
  6004. else if (IDVal == ".seh_save_reg")
  6005. parseDirectiveSEHSaveReg(Loc);
  6006. else if (IDVal == ".seh_save_reg_x")
  6007. parseDirectiveSEHSaveRegX(Loc);
  6008. else if (IDVal == ".seh_save_regp")
  6009. parseDirectiveSEHSaveRegP(Loc);
  6010. else if (IDVal == ".seh_save_regp_x")
  6011. parseDirectiveSEHSaveRegPX(Loc);
  6012. else if (IDVal == ".seh_save_lrpair")
  6013. parseDirectiveSEHSaveLRPair(Loc);
  6014. else if (IDVal == ".seh_save_freg")
  6015. parseDirectiveSEHSaveFReg(Loc);
  6016. else if (IDVal == ".seh_save_freg_x")
  6017. parseDirectiveSEHSaveFRegX(Loc);
  6018. else if (IDVal == ".seh_save_fregp")
  6019. parseDirectiveSEHSaveFRegP(Loc);
  6020. else if (IDVal == ".seh_save_fregp_x")
  6021. parseDirectiveSEHSaveFRegPX(Loc);
  6022. else if (IDVal == ".seh_set_fp")
  6023. parseDirectiveSEHSetFP(Loc);
  6024. else if (IDVal == ".seh_add_fp")
  6025. parseDirectiveSEHAddFP(Loc);
  6026. else if (IDVal == ".seh_nop")
  6027. parseDirectiveSEHNop(Loc);
  6028. else if (IDVal == ".seh_save_next")
  6029. parseDirectiveSEHSaveNext(Loc);
  6030. else if (IDVal == ".seh_startepilogue")
  6031. parseDirectiveSEHEpilogStart(Loc);
  6032. else if (IDVal == ".seh_endepilogue")
  6033. parseDirectiveSEHEpilogEnd(Loc);
  6034. else if (IDVal == ".seh_trap_frame")
  6035. parseDirectiveSEHTrapFrame(Loc);
  6036. else if (IDVal == ".seh_pushframe")
  6037. parseDirectiveSEHMachineFrame(Loc);
  6038. else if (IDVal == ".seh_context")
  6039. parseDirectiveSEHContext(Loc);
  6040. else if (IDVal == ".seh_clear_unwound_to_call")
  6041. parseDirectiveSEHClearUnwoundToCall(Loc);
  6042. else if (IDVal == ".seh_pac_sign_lr")
  6043. parseDirectiveSEHPACSignLR(Loc);
  6044. else if (IDVal == ".seh_save_any_reg")
  6045. parseDirectiveSEHSaveAnyReg(Loc, false, false);
  6046. else if (IDVal == ".seh_save_any_reg_p")
  6047. parseDirectiveSEHSaveAnyReg(Loc, true, false);
  6048. else if (IDVal == ".seh_save_any_reg_x")
  6049. parseDirectiveSEHSaveAnyReg(Loc, false, true);
  6050. else if (IDVal == ".seh_save_any_reg_px")
  6051. parseDirectiveSEHSaveAnyReg(Loc, true, true);
  6052. else
  6053. return true;
  6054. } else
  6055. return true;
  6056. return false;
  6057. }
  6058. static void ExpandCryptoAEK(const AArch64::ArchInfo &ArchInfo,
  6059. SmallVector<StringRef, 4> &RequestedExtensions) {
  6060. const bool NoCrypto = llvm::is_contained(RequestedExtensions, "nocrypto");
  6061. const bool Crypto = llvm::is_contained(RequestedExtensions, "crypto");
  6062. if (!NoCrypto && Crypto) {
  6063. // Map 'generic' (and others) to sha2 and aes, because
  6064. // that was the traditional meaning of crypto.
  6065. if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A ||
  6066. ArchInfo == AArch64::ARMV8_3A) {
  6067. RequestedExtensions.push_back("sha2");
  6068. RequestedExtensions.push_back("aes");
  6069. }
  6070. if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A ||
  6071. ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A ||
  6072. ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A ||
  6073. ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A ||
  6074. ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A ||
  6075. ArchInfo == AArch64::ARMV9_4A || ArchInfo == AArch64::ARMV8R) {
  6076. RequestedExtensions.push_back("sm4");
  6077. RequestedExtensions.push_back("sha3");
  6078. RequestedExtensions.push_back("sha2");
  6079. RequestedExtensions.push_back("aes");
  6080. }
  6081. } else if (NoCrypto) {
  6082. // Map 'generic' (and others) to sha2 and aes, because
  6083. // that was the traditional meaning of crypto.
  6084. if (ArchInfo == AArch64::ARMV8_1A || ArchInfo == AArch64::ARMV8_2A ||
  6085. ArchInfo == AArch64::ARMV8_3A) {
  6086. RequestedExtensions.push_back("nosha2");
  6087. RequestedExtensions.push_back("noaes");
  6088. }
  6089. if (ArchInfo == AArch64::ARMV8_4A || ArchInfo == AArch64::ARMV8_5A ||
  6090. ArchInfo == AArch64::ARMV8_6A || ArchInfo == AArch64::ARMV8_7A ||
  6091. ArchInfo == AArch64::ARMV8_8A || ArchInfo == AArch64::ARMV8_9A ||
  6092. ArchInfo == AArch64::ARMV9A || ArchInfo == AArch64::ARMV9_1A ||
  6093. ArchInfo == AArch64::ARMV9_2A || ArchInfo == AArch64::ARMV9_3A ||
  6094. ArchInfo == AArch64::ARMV9_4A) {
  6095. RequestedExtensions.push_back("nosm4");
  6096. RequestedExtensions.push_back("nosha3");
  6097. RequestedExtensions.push_back("nosha2");
  6098. RequestedExtensions.push_back("noaes");
  6099. }
  6100. }
  6101. }
  6102. /// parseDirectiveArch
  6103. /// ::= .arch token
  6104. bool AArch64AsmParser::parseDirectiveArch(SMLoc L) {
  6105. SMLoc ArchLoc = getLoc();
  6106. StringRef Arch, ExtensionString;
  6107. std::tie(Arch, ExtensionString) =
  6108. getParser().parseStringToEndOfStatement().trim().split('+');
  6109. const AArch64::ArchInfo &ArchInfo = AArch64::parseArch(Arch);
  6110. if (ArchInfo == AArch64::INVALID)
  6111. return Error(ArchLoc, "unknown arch name");
  6112. if (parseToken(AsmToken::EndOfStatement))
  6113. return true;
  6114. // Get the architecture and extension features.
  6115. std::vector<StringRef> AArch64Features;
  6116. AArch64Features.push_back(ArchInfo.ArchFeature);
  6117. AArch64::getExtensionFeatures(
  6118. AArch64::getDefaultExtensions("generic", ArchInfo), AArch64Features);
  6119. MCSubtargetInfo &STI = copySTI();
  6120. std::vector<std::string> ArchFeatures(AArch64Features.begin(), AArch64Features.end());
  6121. STI.setDefaultFeatures("generic", /*TuneCPU*/ "generic",
  6122. join(ArchFeatures.begin(), ArchFeatures.end(), ","));
  6123. SmallVector<StringRef, 4> RequestedExtensions;
  6124. if (!ExtensionString.empty())
  6125. ExtensionString.split(RequestedExtensions, '+');
  6126. ExpandCryptoAEK(ArchInfo, RequestedExtensions);
  6127. FeatureBitset Features = STI.getFeatureBits();
  6128. for (auto Name : RequestedExtensions) {
  6129. bool EnableFeature = true;
  6130. if (Name.startswith_insensitive("no")) {
  6131. EnableFeature = false;
  6132. Name = Name.substr(2);
  6133. }
  6134. for (const auto &Extension : ExtensionMap) {
  6135. if (Extension.Name != Name)
  6136. continue;
  6137. if (Extension.Features.none())
  6138. report_fatal_error("unsupported architectural extension: " + Name);
  6139. FeatureBitset ToggleFeatures =
  6140. EnableFeature
  6141. ? STI.SetFeatureBitsTransitively(~Features & Extension.Features)
  6142. : STI.ToggleFeature(Features & Extension.Features);
  6143. setAvailableFeatures(ComputeAvailableFeatures(ToggleFeatures));
  6144. break;
  6145. }
  6146. }
  6147. return false;
  6148. }
  6149. /// parseDirectiveArchExtension
  6150. /// ::= .arch_extension [no]feature
  6151. bool AArch64AsmParser::parseDirectiveArchExtension(SMLoc L) {
  6152. SMLoc ExtLoc = getLoc();
  6153. StringRef Name = getParser().parseStringToEndOfStatement().trim();
  6154. if (parseEOL())
  6155. return true;
  6156. bool EnableFeature = true;
  6157. if (Name.startswith_insensitive("no")) {
  6158. EnableFeature = false;
  6159. Name = Name.substr(2);
  6160. }
  6161. MCSubtargetInfo &STI = copySTI();
  6162. FeatureBitset Features = STI.getFeatureBits();
  6163. for (const auto &Extension : ExtensionMap) {
  6164. if (Extension.Name != Name)
  6165. continue;
  6166. if (Extension.Features.none())
  6167. return Error(ExtLoc, "unsupported architectural extension: " + Name);
  6168. FeatureBitset ToggleFeatures =
  6169. EnableFeature
  6170. ? STI.SetFeatureBitsTransitively(~Features & Extension.Features)
  6171. : STI.ToggleFeature(Features & Extension.Features);
  6172. setAvailableFeatures(ComputeAvailableFeatures(ToggleFeatures));
  6173. return false;
  6174. }
  6175. return Error(ExtLoc, "unknown architectural extension: " + Name);
  6176. }
  6177. static SMLoc incrementLoc(SMLoc L, int Offset) {
  6178. return SMLoc::getFromPointer(L.getPointer() + Offset);
  6179. }
  6180. /// parseDirectiveCPU
  6181. /// ::= .cpu id
  6182. bool AArch64AsmParser::parseDirectiveCPU(SMLoc L) {
  6183. SMLoc CurLoc = getLoc();
  6184. StringRef CPU, ExtensionString;
  6185. std::tie(CPU, ExtensionString) =
  6186. getParser().parseStringToEndOfStatement().trim().split('+');
  6187. if (parseToken(AsmToken::EndOfStatement))
  6188. return true;
  6189. SmallVector<StringRef, 4> RequestedExtensions;
  6190. if (!ExtensionString.empty())
  6191. ExtensionString.split(RequestedExtensions, '+');
  6192. // FIXME This is using tablegen data, but should be moved to ARMTargetParser
  6193. // once that is tablegen'ed
  6194. if (!getSTI().isCPUStringValid(CPU)) {
  6195. Error(CurLoc, "unknown CPU name");
  6196. return false;
  6197. }
  6198. MCSubtargetInfo &STI = copySTI();
  6199. STI.setDefaultFeatures(CPU, /*TuneCPU*/ CPU, "");
  6200. CurLoc = incrementLoc(CurLoc, CPU.size());
  6201. ExpandCryptoAEK(llvm::AArch64::getArchForCpu(CPU), RequestedExtensions);
  6202. for (auto Name : RequestedExtensions) {
  6203. // Advance source location past '+'.
  6204. CurLoc = incrementLoc(CurLoc, 1);
  6205. bool EnableFeature = true;
  6206. if (Name.startswith_insensitive("no")) {
  6207. EnableFeature = false;
  6208. Name = Name.substr(2);
  6209. }
  6210. bool FoundExtension = false;
  6211. for (const auto &Extension : ExtensionMap) {
  6212. if (Extension.Name != Name)
  6213. continue;
  6214. if (Extension.Features.none())
  6215. report_fatal_error("unsupported architectural extension: " + Name);
  6216. FeatureBitset Features = STI.getFeatureBits();
  6217. FeatureBitset ToggleFeatures =
  6218. EnableFeature
  6219. ? STI.SetFeatureBitsTransitively(~Features & Extension.Features)
  6220. : STI.ToggleFeature(Features & Extension.Features);
  6221. setAvailableFeatures(ComputeAvailableFeatures(ToggleFeatures));
  6222. FoundExtension = true;
  6223. break;
  6224. }
  6225. if (!FoundExtension)
  6226. Error(CurLoc, "unsupported architectural extension");
  6227. CurLoc = incrementLoc(CurLoc, Name.size());
  6228. }
  6229. return false;
  6230. }
  6231. /// parseDirectiveInst
  6232. /// ::= .inst opcode [, ...]
  6233. bool AArch64AsmParser::parseDirectiveInst(SMLoc Loc) {
  6234. if (getLexer().is(AsmToken::EndOfStatement))
  6235. return Error(Loc, "expected expression following '.inst' directive");
  6236. auto parseOp = [&]() -> bool {
  6237. SMLoc L = getLoc();
  6238. const MCExpr *Expr = nullptr;
  6239. if (check(getParser().parseExpression(Expr), L, "expected expression"))
  6240. return true;
  6241. const MCConstantExpr *Value = dyn_cast_or_null<MCConstantExpr>(Expr);
  6242. if (check(!Value, L, "expected constant expression"))
  6243. return true;
  6244. getTargetStreamer().emitInst(Value->getValue());
  6245. return false;
  6246. };
  6247. return parseMany(parseOp);
  6248. }
  6249. // parseDirectiveTLSDescCall:
  6250. // ::= .tlsdesccall symbol
  6251. bool AArch64AsmParser::parseDirectiveTLSDescCall(SMLoc L) {
  6252. StringRef Name;
  6253. if (check(getParser().parseIdentifier(Name), L, "expected symbol") ||
  6254. parseToken(AsmToken::EndOfStatement))
  6255. return true;
  6256. MCSymbol *Sym = getContext().getOrCreateSymbol(Name);
  6257. const MCExpr *Expr = MCSymbolRefExpr::create(Sym, getContext());
  6258. Expr = AArch64MCExpr::create(Expr, AArch64MCExpr::VK_TLSDESC, getContext());
  6259. MCInst Inst;
  6260. Inst.setOpcode(AArch64::TLSDESCCALL);
  6261. Inst.addOperand(MCOperand::createExpr(Expr));
  6262. getParser().getStreamer().emitInstruction(Inst, getSTI());
  6263. return false;
  6264. }
  6265. /// ::= .loh <lohName | lohId> label1, ..., labelN
  6266. /// The number of arguments depends on the loh identifier.
  6267. bool AArch64AsmParser::parseDirectiveLOH(StringRef IDVal, SMLoc Loc) {
  6268. MCLOHType Kind;
  6269. if (getTok().isNot(AsmToken::Identifier)) {
  6270. if (getTok().isNot(AsmToken::Integer))
  6271. return TokError("expected an identifier or a number in directive");
  6272. // We successfully get a numeric value for the identifier.
  6273. // Check if it is valid.
  6274. int64_t Id = getTok().getIntVal();
  6275. if (Id <= -1U && !isValidMCLOHType(Id))
  6276. return TokError("invalid numeric identifier in directive");
  6277. Kind = (MCLOHType)Id;
  6278. } else {
  6279. StringRef Name = getTok().getIdentifier();
  6280. // We successfully parse an identifier.
  6281. // Check if it is a recognized one.
  6282. int Id = MCLOHNameToId(Name);
  6283. if (Id == -1)
  6284. return TokError("invalid identifier in directive");
  6285. Kind = (MCLOHType)Id;
  6286. }
  6287. // Consume the identifier.
  6288. Lex();
  6289. // Get the number of arguments of this LOH.
  6290. int NbArgs = MCLOHIdToNbArgs(Kind);
  6291. assert(NbArgs != -1 && "Invalid number of arguments");
  6292. SmallVector<MCSymbol *, 3> Args;
  6293. for (int Idx = 0; Idx < NbArgs; ++Idx) {
  6294. StringRef Name;
  6295. if (getParser().parseIdentifier(Name))
  6296. return TokError("expected identifier in directive");
  6297. Args.push_back(getContext().getOrCreateSymbol(Name));
  6298. if (Idx + 1 == NbArgs)
  6299. break;
  6300. if (parseComma())
  6301. return true;
  6302. }
  6303. if (parseEOL())
  6304. return true;
  6305. getStreamer().emitLOHDirective((MCLOHType)Kind, Args);
  6306. return false;
  6307. }
  6308. /// parseDirectiveLtorg
  6309. /// ::= .ltorg | .pool
  6310. bool AArch64AsmParser::parseDirectiveLtorg(SMLoc L) {
  6311. if (parseEOL())
  6312. return true;
  6313. getTargetStreamer().emitCurrentConstantPool();
  6314. return false;
  6315. }
  6316. /// parseDirectiveReq
  6317. /// ::= name .req registername
  6318. bool AArch64AsmParser::parseDirectiveReq(StringRef Name, SMLoc L) {
  6319. Lex(); // Eat the '.req' token.
  6320. SMLoc SRegLoc = getLoc();
  6321. RegKind RegisterKind = RegKind::Scalar;
  6322. MCRegister RegNum;
  6323. OperandMatchResultTy ParseRes = tryParseScalarRegister(RegNum);
  6324. if (ParseRes != MatchOperand_Success) {
  6325. StringRef Kind;
  6326. RegisterKind = RegKind::NeonVector;
  6327. ParseRes = tryParseVectorRegister(RegNum, Kind, RegKind::NeonVector);
  6328. if (ParseRes == MatchOperand_ParseFail)
  6329. return true;
  6330. if (ParseRes == MatchOperand_Success && !Kind.empty())
  6331. return Error(SRegLoc, "vector register without type specifier expected");
  6332. }
  6333. if (ParseRes != MatchOperand_Success) {
  6334. StringRef Kind;
  6335. RegisterKind = RegKind::SVEDataVector;
  6336. ParseRes =
  6337. tryParseVectorRegister(RegNum, Kind, RegKind::SVEDataVector);
  6338. if (ParseRes == MatchOperand_ParseFail)
  6339. return true;
  6340. if (ParseRes == MatchOperand_Success && !Kind.empty())
  6341. return Error(SRegLoc,
  6342. "sve vector register without type specifier expected");
  6343. }
  6344. if (ParseRes != MatchOperand_Success) {
  6345. StringRef Kind;
  6346. RegisterKind = RegKind::SVEPredicateVector;
  6347. ParseRes = tryParseVectorRegister(RegNum, Kind, RegKind::SVEPredicateVector);
  6348. if (ParseRes == MatchOperand_ParseFail)
  6349. return true;
  6350. if (ParseRes == MatchOperand_Success && !Kind.empty())
  6351. return Error(SRegLoc,
  6352. "sve predicate register without type specifier expected");
  6353. }
  6354. if (ParseRes != MatchOperand_Success)
  6355. return Error(SRegLoc, "register name or alias expected");
  6356. // Shouldn't be anything else.
  6357. if (parseEOL())
  6358. return true;
  6359. auto pair = std::make_pair(RegisterKind, (unsigned) RegNum);
  6360. if (RegisterReqs.insert(std::make_pair(Name, pair)).first->second != pair)
  6361. Warning(L, "ignoring redefinition of register alias '" + Name + "'");
  6362. return false;
  6363. }
  6364. /// parseDirectiveUneq
  6365. /// ::= .unreq registername
  6366. bool AArch64AsmParser::parseDirectiveUnreq(SMLoc L) {
  6367. if (getTok().isNot(AsmToken::Identifier))
  6368. return TokError("unexpected input in .unreq directive.");
  6369. RegisterReqs.erase(getTok().getIdentifier().lower());
  6370. Lex(); // Eat the identifier.
  6371. return parseToken(AsmToken::EndOfStatement);
  6372. }
  6373. bool AArch64AsmParser::parseDirectiveCFINegateRAState() {
  6374. if (parseEOL())
  6375. return true;
  6376. getStreamer().emitCFINegateRAState();
  6377. return false;
  6378. }
  6379. /// parseDirectiveCFIBKeyFrame
  6380. /// ::= .cfi_b_key
  6381. bool AArch64AsmParser::parseDirectiveCFIBKeyFrame() {
  6382. if (parseEOL())
  6383. return true;
  6384. getStreamer().emitCFIBKeyFrame();
  6385. return false;
  6386. }
  6387. /// parseDirectiveCFIMTETaggedFrame
  6388. /// ::= .cfi_mte_tagged_frame
  6389. bool AArch64AsmParser::parseDirectiveCFIMTETaggedFrame() {
  6390. if (parseEOL())
  6391. return true;
  6392. getStreamer().emitCFIMTETaggedFrame();
  6393. return false;
  6394. }
  6395. /// parseDirectiveVariantPCS
  6396. /// ::= .variant_pcs symbolname
  6397. bool AArch64AsmParser::parseDirectiveVariantPCS(SMLoc L) {
  6398. StringRef Name;
  6399. if (getParser().parseIdentifier(Name))
  6400. return TokError("expected symbol name");
  6401. if (parseEOL())
  6402. return true;
  6403. getTargetStreamer().emitDirectiveVariantPCS(
  6404. getContext().getOrCreateSymbol(Name));
  6405. return false;
  6406. }
  6407. /// parseDirectiveSEHAllocStack
  6408. /// ::= .seh_stackalloc
  6409. bool AArch64AsmParser::parseDirectiveSEHAllocStack(SMLoc L) {
  6410. int64_t Size;
  6411. if (parseImmExpr(Size))
  6412. return true;
  6413. getTargetStreamer().emitARM64WinCFIAllocStack(Size);
  6414. return false;
  6415. }
  6416. /// parseDirectiveSEHPrologEnd
  6417. /// ::= .seh_endprologue
  6418. bool AArch64AsmParser::parseDirectiveSEHPrologEnd(SMLoc L) {
  6419. getTargetStreamer().emitARM64WinCFIPrologEnd();
  6420. return false;
  6421. }
  6422. /// parseDirectiveSEHSaveR19R20X
  6423. /// ::= .seh_save_r19r20_x
  6424. bool AArch64AsmParser::parseDirectiveSEHSaveR19R20X(SMLoc L) {
  6425. int64_t Offset;
  6426. if (parseImmExpr(Offset))
  6427. return true;
  6428. getTargetStreamer().emitARM64WinCFISaveR19R20X(Offset);
  6429. return false;
  6430. }
  6431. /// parseDirectiveSEHSaveFPLR
  6432. /// ::= .seh_save_fplr
  6433. bool AArch64AsmParser::parseDirectiveSEHSaveFPLR(SMLoc L) {
  6434. int64_t Offset;
  6435. if (parseImmExpr(Offset))
  6436. return true;
  6437. getTargetStreamer().emitARM64WinCFISaveFPLR(Offset);
  6438. return false;
  6439. }
  6440. /// parseDirectiveSEHSaveFPLRX
  6441. /// ::= .seh_save_fplr_x
  6442. bool AArch64AsmParser::parseDirectiveSEHSaveFPLRX(SMLoc L) {
  6443. int64_t Offset;
  6444. if (parseImmExpr(Offset))
  6445. return true;
  6446. getTargetStreamer().emitARM64WinCFISaveFPLRX(Offset);
  6447. return false;
  6448. }
  6449. /// parseDirectiveSEHSaveReg
  6450. /// ::= .seh_save_reg
  6451. bool AArch64AsmParser::parseDirectiveSEHSaveReg(SMLoc L) {
  6452. unsigned Reg;
  6453. int64_t Offset;
  6454. if (parseRegisterInRange(Reg, AArch64::X0, AArch64::X19, AArch64::LR) ||
  6455. parseComma() || parseImmExpr(Offset))
  6456. return true;
  6457. getTargetStreamer().emitARM64WinCFISaveReg(Reg, Offset);
  6458. return false;
  6459. }
  6460. /// parseDirectiveSEHSaveRegX
  6461. /// ::= .seh_save_reg_x
  6462. bool AArch64AsmParser::parseDirectiveSEHSaveRegX(SMLoc L) {
  6463. unsigned Reg;
  6464. int64_t Offset;
  6465. if (parseRegisterInRange(Reg, AArch64::X0, AArch64::X19, AArch64::LR) ||
  6466. parseComma() || parseImmExpr(Offset))
  6467. return true;
  6468. getTargetStreamer().emitARM64WinCFISaveRegX(Reg, Offset);
  6469. return false;
  6470. }
  6471. /// parseDirectiveSEHSaveRegP
  6472. /// ::= .seh_save_regp
  6473. bool AArch64AsmParser::parseDirectiveSEHSaveRegP(SMLoc L) {
  6474. unsigned Reg;
  6475. int64_t Offset;
  6476. if (parseRegisterInRange(Reg, AArch64::X0, AArch64::X19, AArch64::FP) ||
  6477. parseComma() || parseImmExpr(Offset))
  6478. return true;
  6479. getTargetStreamer().emitARM64WinCFISaveRegP(Reg, Offset);
  6480. return false;
  6481. }
  6482. /// parseDirectiveSEHSaveRegPX
  6483. /// ::= .seh_save_regp_x
  6484. bool AArch64AsmParser::parseDirectiveSEHSaveRegPX(SMLoc L) {
  6485. unsigned Reg;
  6486. int64_t Offset;
  6487. if (parseRegisterInRange(Reg, AArch64::X0, AArch64::X19, AArch64::FP) ||
  6488. parseComma() || parseImmExpr(Offset))
  6489. return true;
  6490. getTargetStreamer().emitARM64WinCFISaveRegPX(Reg, Offset);
  6491. return false;
  6492. }
  6493. /// parseDirectiveSEHSaveLRPair
  6494. /// ::= .seh_save_lrpair
  6495. bool AArch64AsmParser::parseDirectiveSEHSaveLRPair(SMLoc L) {
  6496. unsigned Reg;
  6497. int64_t Offset;
  6498. L = getLoc();
  6499. if (parseRegisterInRange(Reg, AArch64::X0, AArch64::X19, AArch64::LR) ||
  6500. parseComma() || parseImmExpr(Offset))
  6501. return true;
  6502. if (check(((Reg - 19) % 2 != 0), L,
  6503. "expected register with even offset from x19"))
  6504. return true;
  6505. getTargetStreamer().emitARM64WinCFISaveLRPair(Reg, Offset);
  6506. return false;
  6507. }
  6508. /// parseDirectiveSEHSaveFReg
  6509. /// ::= .seh_save_freg
  6510. bool AArch64AsmParser::parseDirectiveSEHSaveFReg(SMLoc L) {
  6511. unsigned Reg;
  6512. int64_t Offset;
  6513. if (parseRegisterInRange(Reg, AArch64::D0, AArch64::D8, AArch64::D15) ||
  6514. parseComma() || parseImmExpr(Offset))
  6515. return true;
  6516. getTargetStreamer().emitARM64WinCFISaveFReg(Reg, Offset);
  6517. return false;
  6518. }
  6519. /// parseDirectiveSEHSaveFRegX
  6520. /// ::= .seh_save_freg_x
  6521. bool AArch64AsmParser::parseDirectiveSEHSaveFRegX(SMLoc L) {
  6522. unsigned Reg;
  6523. int64_t Offset;
  6524. if (parseRegisterInRange(Reg, AArch64::D0, AArch64::D8, AArch64::D15) ||
  6525. parseComma() || parseImmExpr(Offset))
  6526. return true;
  6527. getTargetStreamer().emitARM64WinCFISaveFRegX(Reg, Offset);
  6528. return false;
  6529. }
  6530. /// parseDirectiveSEHSaveFRegP
  6531. /// ::= .seh_save_fregp
  6532. bool AArch64AsmParser::parseDirectiveSEHSaveFRegP(SMLoc L) {
  6533. unsigned Reg;
  6534. int64_t Offset;
  6535. if (parseRegisterInRange(Reg, AArch64::D0, AArch64::D8, AArch64::D14) ||
  6536. parseComma() || parseImmExpr(Offset))
  6537. return true;
  6538. getTargetStreamer().emitARM64WinCFISaveFRegP(Reg, Offset);
  6539. return false;
  6540. }
  6541. /// parseDirectiveSEHSaveFRegPX
  6542. /// ::= .seh_save_fregp_x
  6543. bool AArch64AsmParser::parseDirectiveSEHSaveFRegPX(SMLoc L) {
  6544. unsigned Reg;
  6545. int64_t Offset;
  6546. if (parseRegisterInRange(Reg, AArch64::D0, AArch64::D8, AArch64::D14) ||
  6547. parseComma() || parseImmExpr(Offset))
  6548. return true;
  6549. getTargetStreamer().emitARM64WinCFISaveFRegPX(Reg, Offset);
  6550. return false;
  6551. }
  6552. /// parseDirectiveSEHSetFP
  6553. /// ::= .seh_set_fp
  6554. bool AArch64AsmParser::parseDirectiveSEHSetFP(SMLoc L) {
  6555. getTargetStreamer().emitARM64WinCFISetFP();
  6556. return false;
  6557. }
  6558. /// parseDirectiveSEHAddFP
  6559. /// ::= .seh_add_fp
  6560. bool AArch64AsmParser::parseDirectiveSEHAddFP(SMLoc L) {
  6561. int64_t Size;
  6562. if (parseImmExpr(Size))
  6563. return true;
  6564. getTargetStreamer().emitARM64WinCFIAddFP(Size);
  6565. return false;
  6566. }
  6567. /// parseDirectiveSEHNop
  6568. /// ::= .seh_nop
  6569. bool AArch64AsmParser::parseDirectiveSEHNop(SMLoc L) {
  6570. getTargetStreamer().emitARM64WinCFINop();
  6571. return false;
  6572. }
  6573. /// parseDirectiveSEHSaveNext
  6574. /// ::= .seh_save_next
  6575. bool AArch64AsmParser::parseDirectiveSEHSaveNext(SMLoc L) {
  6576. getTargetStreamer().emitARM64WinCFISaveNext();
  6577. return false;
  6578. }
  6579. /// parseDirectiveSEHEpilogStart
  6580. /// ::= .seh_startepilogue
  6581. bool AArch64AsmParser::parseDirectiveSEHEpilogStart(SMLoc L) {
  6582. getTargetStreamer().emitARM64WinCFIEpilogStart();
  6583. return false;
  6584. }
  6585. /// parseDirectiveSEHEpilogEnd
  6586. /// ::= .seh_endepilogue
  6587. bool AArch64AsmParser::parseDirectiveSEHEpilogEnd(SMLoc L) {
  6588. getTargetStreamer().emitARM64WinCFIEpilogEnd();
  6589. return false;
  6590. }
  6591. /// parseDirectiveSEHTrapFrame
  6592. /// ::= .seh_trap_frame
  6593. bool AArch64AsmParser::parseDirectiveSEHTrapFrame(SMLoc L) {
  6594. getTargetStreamer().emitARM64WinCFITrapFrame();
  6595. return false;
  6596. }
  6597. /// parseDirectiveSEHMachineFrame
  6598. /// ::= .seh_pushframe
  6599. bool AArch64AsmParser::parseDirectiveSEHMachineFrame(SMLoc L) {
  6600. getTargetStreamer().emitARM64WinCFIMachineFrame();
  6601. return false;
  6602. }
  6603. /// parseDirectiveSEHContext
  6604. /// ::= .seh_context
  6605. bool AArch64AsmParser::parseDirectiveSEHContext(SMLoc L) {
  6606. getTargetStreamer().emitARM64WinCFIContext();
  6607. return false;
  6608. }
  6609. /// parseDirectiveSEHClearUnwoundToCall
  6610. /// ::= .seh_clear_unwound_to_call
  6611. bool AArch64AsmParser::parseDirectiveSEHClearUnwoundToCall(SMLoc L) {
  6612. getTargetStreamer().emitARM64WinCFIClearUnwoundToCall();
  6613. return false;
  6614. }
  6615. /// parseDirectiveSEHPACSignLR
  6616. /// ::= .seh_pac_sign_lr
  6617. bool AArch64AsmParser::parseDirectiveSEHPACSignLR(SMLoc L) {
  6618. getTargetStreamer().emitARM64WinCFIPACSignLR();
  6619. return false;
  6620. }
  6621. /// parseDirectiveSEHSaveAnyReg
  6622. /// ::= .seh_save_any_reg
  6623. /// ::= .seh_save_any_reg_p
  6624. /// ::= .seh_save_any_reg_x
  6625. /// ::= .seh_save_any_reg_px
  6626. bool AArch64AsmParser::parseDirectiveSEHSaveAnyReg(SMLoc L, bool Paired,
  6627. bool Writeback) {
  6628. MCRegister Reg;
  6629. SMLoc Start, End;
  6630. int64_t Offset;
  6631. if (check(parseRegister(Reg, Start, End), getLoc(), "expected register") ||
  6632. parseComma() || parseImmExpr(Offset))
  6633. return true;
  6634. if (Reg == AArch64::FP || Reg == AArch64::LR ||
  6635. (Reg >= AArch64::X0 && Reg <= AArch64::X28)) {
  6636. if (Offset < 0 || Offset % (Paired || Writeback ? 16 : 8))
  6637. return Error(L, "invalid save_any_reg offset");
  6638. unsigned EncodedReg;
  6639. if (Reg == AArch64::FP)
  6640. EncodedReg = 29;
  6641. else if (Reg == AArch64::LR)
  6642. EncodedReg = 30;
  6643. else
  6644. EncodedReg = Reg - AArch64::X0;
  6645. if (Paired) {
  6646. if (Reg == AArch64::LR)
  6647. return Error(Start, "lr cannot be paired with another register");
  6648. if (Writeback)
  6649. getTargetStreamer().emitARM64WinCFISaveAnyRegIPX(EncodedReg, Offset);
  6650. else
  6651. getTargetStreamer().emitARM64WinCFISaveAnyRegIP(EncodedReg, Offset);
  6652. } else {
  6653. if (Writeback)
  6654. getTargetStreamer().emitARM64WinCFISaveAnyRegIX(EncodedReg, Offset);
  6655. else
  6656. getTargetStreamer().emitARM64WinCFISaveAnyRegI(EncodedReg, Offset);
  6657. }
  6658. } else if (Reg >= AArch64::D0 && Reg <= AArch64::D31) {
  6659. unsigned EncodedReg = Reg - AArch64::D0;
  6660. if (Offset < 0 || Offset % (Paired || Writeback ? 16 : 8))
  6661. return Error(L, "invalid save_any_reg offset");
  6662. if (Paired) {
  6663. if (Reg == AArch64::D31)
  6664. return Error(Start, "d31 cannot be paired with another register");
  6665. if (Writeback)
  6666. getTargetStreamer().emitARM64WinCFISaveAnyRegDPX(EncodedReg, Offset);
  6667. else
  6668. getTargetStreamer().emitARM64WinCFISaveAnyRegDP(EncodedReg, Offset);
  6669. } else {
  6670. if (Writeback)
  6671. getTargetStreamer().emitARM64WinCFISaveAnyRegDX(EncodedReg, Offset);
  6672. else
  6673. getTargetStreamer().emitARM64WinCFISaveAnyRegD(EncodedReg, Offset);
  6674. }
  6675. } else if (Reg >= AArch64::Q0 && Reg <= AArch64::Q31) {
  6676. unsigned EncodedReg = Reg - AArch64::Q0;
  6677. if (Offset < 0 || Offset % 16)
  6678. return Error(L, "invalid save_any_reg offset");
  6679. if (Paired) {
  6680. if (Reg == AArch64::Q31)
  6681. return Error(Start, "q31 cannot be paired with another register");
  6682. if (Writeback)
  6683. getTargetStreamer().emitARM64WinCFISaveAnyRegQPX(EncodedReg, Offset);
  6684. else
  6685. getTargetStreamer().emitARM64WinCFISaveAnyRegQP(EncodedReg, Offset);
  6686. } else {
  6687. if (Writeback)
  6688. getTargetStreamer().emitARM64WinCFISaveAnyRegQX(EncodedReg, Offset);
  6689. else
  6690. getTargetStreamer().emitARM64WinCFISaveAnyRegQ(EncodedReg, Offset);
  6691. }
  6692. } else {
  6693. return Error(Start, "save_any_reg register must be x, q or d register");
  6694. }
  6695. return false;
  6696. }
  6697. bool
  6698. AArch64AsmParser::classifySymbolRef(const MCExpr *Expr,
  6699. AArch64MCExpr::VariantKind &ELFRefKind,
  6700. MCSymbolRefExpr::VariantKind &DarwinRefKind,
  6701. int64_t &Addend) {
  6702. ELFRefKind = AArch64MCExpr::VK_INVALID;
  6703. DarwinRefKind = MCSymbolRefExpr::VK_None;
  6704. Addend = 0;
  6705. if (const AArch64MCExpr *AE = dyn_cast<AArch64MCExpr>(Expr)) {
  6706. ELFRefKind = AE->getKind();
  6707. Expr = AE->getSubExpr();
  6708. }
  6709. const MCSymbolRefExpr *SE = dyn_cast<MCSymbolRefExpr>(Expr);
  6710. if (SE) {
  6711. // It's a simple symbol reference with no addend.
  6712. DarwinRefKind = SE->getKind();
  6713. return true;
  6714. }
  6715. // Check that it looks like a symbol + an addend
  6716. MCValue Res;
  6717. bool Relocatable = Expr->evaluateAsRelocatable(Res, nullptr, nullptr);
  6718. if (!Relocatable || Res.getSymB())
  6719. return false;
  6720. // Treat expressions with an ELFRefKind (like ":abs_g1:3", or
  6721. // ":abs_g1:x" where x is constant) as symbolic even if there is no symbol.
  6722. if (!Res.getSymA() && ELFRefKind == AArch64MCExpr::VK_INVALID)
  6723. return false;
  6724. if (Res.getSymA())
  6725. DarwinRefKind = Res.getSymA()->getKind();
  6726. Addend = Res.getConstant();
  6727. // It's some symbol reference + a constant addend, but really
  6728. // shouldn't use both Darwin and ELF syntax.
  6729. return ELFRefKind == AArch64MCExpr::VK_INVALID ||
  6730. DarwinRefKind == MCSymbolRefExpr::VK_None;
  6731. }
  6732. /// Force static initialization.
  6733. extern "C" LLVM_EXTERNAL_VISIBILITY void LLVMInitializeAArch64AsmParser() {
  6734. RegisterMCAsmParser<AArch64AsmParser> X(getTheAArch64leTarget());
  6735. RegisterMCAsmParser<AArch64AsmParser> Y(getTheAArch64beTarget());
  6736. RegisterMCAsmParser<AArch64AsmParser> Z(getTheARM64Target());
  6737. RegisterMCAsmParser<AArch64AsmParser> W(getTheARM64_32Target());
  6738. RegisterMCAsmParser<AArch64AsmParser> V(getTheAArch64_32Target());
  6739. }
  6740. #define GET_REGISTER_MATCHER
  6741. #define GET_SUBTARGET_FEATURE_NAME
  6742. #define GET_MATCHER_IMPLEMENTATION
  6743. #define GET_MNEMONIC_SPELL_CHECKER
  6744. #include "AArch64GenAsmMatcher.inc"
  6745. // Define this matcher function after the auto-generated include so we
  6746. // have the match class enum definitions.
  6747. unsigned AArch64AsmParser::validateTargetOperandClass(MCParsedAsmOperand &AsmOp,
  6748. unsigned Kind) {
  6749. AArch64Operand &Op = static_cast<AArch64Operand &>(AsmOp);
  6750. // If the kind is a token for a literal immediate, check if our asm
  6751. // operand matches. This is for InstAliases which have a fixed-value
  6752. // immediate in the syntax.
  6753. int64_t ExpectedVal;
  6754. switch (Kind) {
  6755. default:
  6756. return Match_InvalidOperand;
  6757. case MCK__HASH_0:
  6758. ExpectedVal = 0;
  6759. break;
  6760. case MCK__HASH_1:
  6761. ExpectedVal = 1;
  6762. break;
  6763. case MCK__HASH_12:
  6764. ExpectedVal = 12;
  6765. break;
  6766. case MCK__HASH_16:
  6767. ExpectedVal = 16;
  6768. break;
  6769. case MCK__HASH_2:
  6770. ExpectedVal = 2;
  6771. break;
  6772. case MCK__HASH_24:
  6773. ExpectedVal = 24;
  6774. break;
  6775. case MCK__HASH_3:
  6776. ExpectedVal = 3;
  6777. break;
  6778. case MCK__HASH_32:
  6779. ExpectedVal = 32;
  6780. break;
  6781. case MCK__HASH_4:
  6782. ExpectedVal = 4;
  6783. break;
  6784. case MCK__HASH_48:
  6785. ExpectedVal = 48;
  6786. break;
  6787. case MCK__HASH_6:
  6788. ExpectedVal = 6;
  6789. break;
  6790. case MCK__HASH_64:
  6791. ExpectedVal = 64;
  6792. break;
  6793. case MCK__HASH_8:
  6794. ExpectedVal = 8;
  6795. break;
  6796. case MCK__HASH__MINUS_4:
  6797. ExpectedVal = -4;
  6798. break;
  6799. case MCK__HASH__MINUS_8:
  6800. ExpectedVal = -8;
  6801. break;
  6802. case MCK__HASH__MINUS_16:
  6803. ExpectedVal = -16;
  6804. break;
  6805. case MCK_MPR:
  6806. // If the Kind is a token for the MPR register class which has the "za"
  6807. // register (SME accumulator array), check if the asm is a literal "za"
  6808. // token. This is for the "smstart za" alias that defines the register
  6809. // as a literal token.
  6810. if (Op.isTokenEqual("za"))
  6811. return Match_Success;
  6812. return Match_InvalidOperand;
  6813. }
  6814. if (!Op.isImm())
  6815. return Match_InvalidOperand;
  6816. const MCConstantExpr *CE = dyn_cast<MCConstantExpr>(Op.getImm());
  6817. if (!CE)
  6818. return Match_InvalidOperand;
  6819. if (CE->getValue() == ExpectedVal)
  6820. return Match_Success;
  6821. return Match_InvalidOperand;
  6822. }
  6823. OperandMatchResultTy
  6824. AArch64AsmParser::tryParseGPRSeqPair(OperandVector &Operands) {
  6825. SMLoc S = getLoc();
  6826. if (getTok().isNot(AsmToken::Identifier)) {
  6827. Error(S, "expected register");
  6828. return MatchOperand_ParseFail;
  6829. }
  6830. MCRegister FirstReg;
  6831. OperandMatchResultTy Res = tryParseScalarRegister(FirstReg);
  6832. if (Res != MatchOperand_Success) {
  6833. Error(S, "expected first even register of a "
  6834. "consecutive same-size even/odd register pair");
  6835. return MatchOperand_ParseFail;
  6836. }
  6837. const MCRegisterClass &WRegClass =
  6838. AArch64MCRegisterClasses[AArch64::GPR32RegClassID];
  6839. const MCRegisterClass &XRegClass =
  6840. AArch64MCRegisterClasses[AArch64::GPR64RegClassID];
  6841. bool isXReg = XRegClass.contains(FirstReg),
  6842. isWReg = WRegClass.contains(FirstReg);
  6843. if (!isXReg && !isWReg) {
  6844. Error(S, "expected first even register of a "
  6845. "consecutive same-size even/odd register pair");
  6846. return MatchOperand_ParseFail;
  6847. }
  6848. const MCRegisterInfo *RI = getContext().getRegisterInfo();
  6849. unsigned FirstEncoding = RI->getEncodingValue(FirstReg);
  6850. if (FirstEncoding & 0x1) {
  6851. Error(S, "expected first even register of a "
  6852. "consecutive same-size even/odd register pair");
  6853. return MatchOperand_ParseFail;
  6854. }
  6855. if (getTok().isNot(AsmToken::Comma)) {
  6856. Error(getLoc(), "expected comma");
  6857. return MatchOperand_ParseFail;
  6858. }
  6859. // Eat the comma
  6860. Lex();
  6861. SMLoc E = getLoc();
  6862. MCRegister SecondReg;
  6863. Res = tryParseScalarRegister(SecondReg);
  6864. if (Res != MatchOperand_Success) {
  6865. Error(E, "expected second odd register of a "
  6866. "consecutive same-size even/odd register pair");
  6867. return MatchOperand_ParseFail;
  6868. }
  6869. if (RI->getEncodingValue(SecondReg) != FirstEncoding + 1 ||
  6870. (isXReg && !XRegClass.contains(SecondReg)) ||
  6871. (isWReg && !WRegClass.contains(SecondReg))) {
  6872. Error(E, "expected second odd register of a "
  6873. "consecutive same-size even/odd register pair");
  6874. return MatchOperand_ParseFail;
  6875. }
  6876. unsigned Pair = 0;
  6877. if (isXReg) {
  6878. Pair = RI->getMatchingSuperReg(FirstReg, AArch64::sube64,
  6879. &AArch64MCRegisterClasses[AArch64::XSeqPairsClassRegClassID]);
  6880. } else {
  6881. Pair = RI->getMatchingSuperReg(FirstReg, AArch64::sube32,
  6882. &AArch64MCRegisterClasses[AArch64::WSeqPairsClassRegClassID]);
  6883. }
  6884. Operands.push_back(AArch64Operand::CreateReg(Pair, RegKind::Scalar, S,
  6885. getLoc(), getContext()));
  6886. return MatchOperand_Success;
  6887. }
  6888. template <bool ParseShiftExtend, bool ParseSuffix>
  6889. OperandMatchResultTy
  6890. AArch64AsmParser::tryParseSVEDataVector(OperandVector &Operands) {
  6891. const SMLoc S = getLoc();
  6892. // Check for a SVE vector register specifier first.
  6893. MCRegister RegNum;
  6894. StringRef Kind;
  6895. OperandMatchResultTy Res =
  6896. tryParseVectorRegister(RegNum, Kind, RegKind::SVEDataVector);
  6897. if (Res != MatchOperand_Success)
  6898. return Res;
  6899. if (ParseSuffix && Kind.empty())
  6900. return MatchOperand_NoMatch;
  6901. const auto &KindRes = parseVectorKind(Kind, RegKind::SVEDataVector);
  6902. if (!KindRes)
  6903. return MatchOperand_NoMatch;
  6904. unsigned ElementWidth = KindRes->second;
  6905. // No shift/extend is the default.
  6906. if (!ParseShiftExtend || getTok().isNot(AsmToken::Comma)) {
  6907. Operands.push_back(AArch64Operand::CreateVectorReg(
  6908. RegNum, RegKind::SVEDataVector, ElementWidth, S, S, getContext()));
  6909. OperandMatchResultTy Res = tryParseVectorIndex(Operands);
  6910. if (Res == MatchOperand_ParseFail)
  6911. return MatchOperand_ParseFail;
  6912. return MatchOperand_Success;
  6913. }
  6914. // Eat the comma
  6915. Lex();
  6916. // Match the shift
  6917. SmallVector<std::unique_ptr<MCParsedAsmOperand>, 1> ExtOpnd;
  6918. Res = tryParseOptionalShiftExtend(ExtOpnd);
  6919. if (Res != MatchOperand_Success)
  6920. return Res;
  6921. auto Ext = static_cast<AArch64Operand *>(ExtOpnd.back().get());
  6922. Operands.push_back(AArch64Operand::CreateVectorReg(
  6923. RegNum, RegKind::SVEDataVector, ElementWidth, S, Ext->getEndLoc(),
  6924. getContext(), Ext->getShiftExtendType(), Ext->getShiftExtendAmount(),
  6925. Ext->hasShiftExtendAmount()));
  6926. return MatchOperand_Success;
  6927. }
  6928. OperandMatchResultTy
  6929. AArch64AsmParser::tryParseSVEPattern(OperandVector &Operands) {
  6930. MCAsmParser &Parser = getParser();
  6931. SMLoc SS = getLoc();
  6932. const AsmToken &TokE = getTok();
  6933. bool IsHash = TokE.is(AsmToken::Hash);
  6934. if (!IsHash && TokE.isNot(AsmToken::Identifier))
  6935. return MatchOperand_NoMatch;
  6936. int64_t Pattern;
  6937. if (IsHash) {
  6938. Lex(); // Eat hash
  6939. // Parse the immediate operand.
  6940. const MCExpr *ImmVal;
  6941. SS = getLoc();
  6942. if (Parser.parseExpression(ImmVal))
  6943. return MatchOperand_ParseFail;
  6944. auto *MCE = dyn_cast<MCConstantExpr>(ImmVal);
  6945. if (!MCE)
  6946. return MatchOperand_ParseFail;
  6947. Pattern = MCE->getValue();
  6948. } else {
  6949. // Parse the pattern
  6950. auto Pat = AArch64SVEPredPattern::lookupSVEPREDPATByName(TokE.getString());
  6951. if (!Pat)
  6952. return MatchOperand_NoMatch;
  6953. Lex();
  6954. Pattern = Pat->Encoding;
  6955. assert(Pattern >= 0 && Pattern < 32);
  6956. }
  6957. Operands.push_back(
  6958. AArch64Operand::CreateImm(MCConstantExpr::create(Pattern, getContext()),
  6959. SS, getLoc(), getContext()));
  6960. return MatchOperand_Success;
  6961. }
  6962. OperandMatchResultTy
  6963. AArch64AsmParser::tryParseSVEVecLenSpecifier(OperandVector &Operands) {
  6964. int64_t Pattern;
  6965. SMLoc SS = getLoc();
  6966. const AsmToken &TokE = getTok();
  6967. // Parse the pattern
  6968. auto Pat = AArch64SVEVecLenSpecifier::lookupSVEVECLENSPECIFIERByName(
  6969. TokE.getString());
  6970. if (!Pat)
  6971. return MatchOperand_NoMatch;
  6972. Lex();
  6973. Pattern = Pat->Encoding;
  6974. assert(Pattern >= 0 && Pattern <= 1 && "Pattern does not exist");
  6975. Operands.push_back(
  6976. AArch64Operand::CreateImm(MCConstantExpr::create(Pattern, getContext()),
  6977. SS, getLoc(), getContext()));
  6978. return MatchOperand_Success;
  6979. }
  6980. OperandMatchResultTy
  6981. AArch64AsmParser::tryParseGPR64x8(OperandVector &Operands) {
  6982. SMLoc SS = getLoc();
  6983. MCRegister XReg;
  6984. if (tryParseScalarRegister(XReg) != MatchOperand_Success)
  6985. return MatchOperand_NoMatch;
  6986. MCContext &ctx = getContext();
  6987. const MCRegisterInfo *RI = ctx.getRegisterInfo();
  6988. int X8Reg = RI->getMatchingSuperReg(
  6989. XReg, AArch64::x8sub_0,
  6990. &AArch64MCRegisterClasses[AArch64::GPR64x8ClassRegClassID]);
  6991. if (!X8Reg) {
  6992. Error(SS, "expected an even-numbered x-register in the range [x0,x22]");
  6993. return MatchOperand_ParseFail;
  6994. }
  6995. Operands.push_back(
  6996. AArch64Operand::CreateReg(X8Reg, RegKind::Scalar, SS, getLoc(), ctx));
  6997. return MatchOperand_Success;
  6998. }
  6999. OperandMatchResultTy
  7000. AArch64AsmParser::tryParseImmRange(OperandVector &Operands) {
  7001. SMLoc S = getLoc();
  7002. if (getTok().isNot(AsmToken::Integer))
  7003. return MatchOperand_NoMatch;
  7004. if (getLexer().peekTok().isNot(AsmToken::Colon))
  7005. return MatchOperand_NoMatch;
  7006. const MCExpr *ImmF;
  7007. if (getParser().parseExpression(ImmF))
  7008. return MatchOperand_NoMatch;
  7009. if (getTok().isNot(AsmToken::Colon))
  7010. return MatchOperand_NoMatch;
  7011. Lex(); // Eat ':'
  7012. if (getTok().isNot(AsmToken::Integer))
  7013. return MatchOperand_NoMatch;
  7014. SMLoc E = getTok().getLoc();
  7015. const MCExpr *ImmL;
  7016. if (getParser().parseExpression(ImmL))
  7017. return MatchOperand_NoMatch;
  7018. unsigned ImmFVal = dyn_cast<MCConstantExpr>(ImmF)->getValue();
  7019. unsigned ImmLVal = dyn_cast<MCConstantExpr>(ImmL)->getValue();
  7020. Operands.push_back(
  7021. AArch64Operand::CreateImmRange(ImmFVal, ImmLVal, S, E, getContext()));
  7022. return MatchOperand_Success;
  7023. }