123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464 |
- .text
- .globl _aesni_encrypt
- .p2align 4
- _aesni_encrypt:
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- L$oop_enc1_1:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz L$oop_enc1_1
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .globl _aesni_decrypt
- .p2align 4
- _aesni_decrypt:
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- L$oop_dec1_2:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz L$oop_dec1_2
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt2:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$enc_loop2:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop2
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt2:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$dec_loop2:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop2
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt3:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$enc_loop3:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop3
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt3:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- L$dec_loop3:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop3
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt4:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- L$enc_loop4:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop4
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt4:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- L$dec_loop4:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop4
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt6:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,220,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,220,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$enc_loop6_enter
- .p2align 4
- L$enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- L$enc_loop6_enter:
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt6:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,222,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,222,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$dec_loop6_enter
- .p2align 4
- L$dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- L$dec_loop6_enter:
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop6
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_encrypt8:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$enc_loop8_inner
- .p2align 4
- L$enc_loop8:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- L$enc_loop8_inner:
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- L$enc_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$enc_loop8
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 102,68,15,56,221,192
- .byte 102,68,15,56,221,200
- .byte 0xf3,0xc3
- .p2align 4
- _aesni_decrypt8:
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp L$dec_loop8_inner
- .p2align 4
- L$dec_loop8:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- L$dec_loop8_inner:
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- L$dec_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$dec_loop8
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 102,68,15,56,223,192
- .byte 102,68,15,56,223,200
- .byte 0xf3,0xc3
- .globl _aesni_ecb_encrypt
- .p2align 4
- _aesni_ecb_encrypt:
- andq $-16,%rdx
- jz L$ecb_ret
- movl 240(%rcx),%eax
- movups (%rcx),%xmm0
- movq %rcx,%r11
- movl %eax,%r10d
- testl %r8d,%r8d
- jz L$ecb_decrypt
- cmpq $0x80,%rdx
- jb L$ecb_enc_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp L$ecb_enc_loop8_enter
- .p2align 4
- L$ecb_enc_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- L$ecb_enc_loop8_enter:
- call _aesni_encrypt8
- subq $0x80,%rdx
- jnc L$ecb_enc_loop8
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz L$ecb_ret
- L$ecb_enc_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb L$ecb_enc_one
- movups 16(%rdi),%xmm3
- je L$ecb_enc_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb L$ecb_enc_three
- movups 48(%rdi),%xmm5
- je L$ecb_enc_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb L$ecb_enc_five
- movups 80(%rdi),%xmm7
- je L$ecb_enc_six
- movdqu 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_encrypt8
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_3:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_3
- .byte 102,15,56,221,209
- movups %xmm2,(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_two:
- call _aesni_encrypt2
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_three:
- call _aesni_encrypt3
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_four:
- call _aesni_encrypt4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_five:
- xorps %xmm7,%xmm7
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_enc_six:
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- jmp L$ecb_ret
- .p2align 4
- L$ecb_decrypt:
- cmpq $0x80,%rdx
- jb L$ecb_dec_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp L$ecb_dec_loop8_enter
- .p2align 4
- L$ecb_dec_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- L$ecb_dec_loop8_enter:
- call _aesni_decrypt8
- movups (%r11),%xmm0
- subq $0x80,%rdx
- jnc L$ecb_dec_loop8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- movups %xmm9,112(%rsi)
- pxor %xmm9,%xmm9
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz L$ecb_ret
- L$ecb_dec_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb L$ecb_dec_one
- movups 16(%rdi),%xmm3
- je L$ecb_dec_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb L$ecb_dec_three
- movups 48(%rdi),%xmm5
- je L$ecb_dec_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb L$ecb_dec_five
- movups 80(%rdi),%xmm7
- je L$ecb_dec_six
- movups 96(%rdi),%xmm8
- movups (%rcx),%xmm0
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_4:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_4
- .byte 102,15,56,223,209
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_two:
- call _aesni_decrypt2
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_three:
- call _aesni_decrypt3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_four:
- call _aesni_decrypt4
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_five:
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- jmp L$ecb_ret
- .p2align 4
- L$ecb_dec_six:
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- L$ecb_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- .byte 0xf3,0xc3
- .globl _aesni_ccm64_encrypt_blocks
- .p2align 4
- _aesni_ccm64_encrypt_blocks:
- movl 240(%rcx),%eax
- movdqu (%r8),%xmm6
- movdqa L$increment64(%rip),%xmm9
- movdqa L$bswap_mask(%rip),%xmm7
- shll $4,%eax
- movl $16,%r10d
- leaq 0(%rcx),%r11
- movdqu (%r9),%xmm3
- movdqa %xmm6,%xmm2
- leaq 32(%rcx,%rax,1),%rcx
- .byte 102,15,56,0,247
- subq %rax,%r10
- jmp L$ccm64_enc_outer
- .p2align 4
- L$ccm64_enc_outer:
- movups (%r11),%xmm0
- movq %r10,%rax
- movups (%rdi),%xmm8
- xorps %xmm0,%xmm2
- movups 16(%r11),%xmm1
- xorps %xmm8,%xmm0
- xorps %xmm0,%xmm3
- movups 32(%r11),%xmm0
- L$ccm64_enc2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ccm64_enc2_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- paddq %xmm9,%xmm6
- decq %rdx
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leaq 16(%rdi),%rdi
- xorps %xmm2,%xmm8
- movdqa %xmm6,%xmm2
- movups %xmm8,(%rsi)
- .byte 102,15,56,0,215
- leaq 16(%rsi),%rsi
- jnz L$ccm64_enc_outer
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movups %xmm3,(%r9)
- pxor %xmm3,%xmm3
- pxor %xmm8,%xmm8
- pxor %xmm6,%xmm6
- .byte 0xf3,0xc3
- .globl _aesni_ccm64_decrypt_blocks
- .p2align 4
- _aesni_ccm64_decrypt_blocks:
- movl 240(%rcx),%eax
- movups (%r8),%xmm6
- movdqu (%r9),%xmm3
- movdqa L$increment64(%rip),%xmm9
- movdqa L$bswap_mask(%rip),%xmm7
- movaps %xmm6,%xmm2
- movl %eax,%r10d
- movq %rcx,%r11
- .byte 102,15,56,0,247
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_5:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_5
- .byte 102,15,56,221,209
- shll $4,%r10d
- movl $16,%eax
- movups (%rdi),%xmm8
- paddq %xmm9,%xmm6
- leaq 16(%rdi),%rdi
- subq %r10,%rax
- leaq 32(%r11,%r10,1),%rcx
- movq %rax,%r10
- jmp L$ccm64_dec_outer
- .p2align 4
- L$ccm64_dec_outer:
- xorps %xmm2,%xmm8
- movdqa %xmm6,%xmm2
- movups %xmm8,(%rsi)
- leaq 16(%rsi),%rsi
- .byte 102,15,56,0,215
- subq $1,%rdx
- jz L$ccm64_dec_break
- movups (%r11),%xmm0
- movq %r10,%rax
- movups 16(%r11),%xmm1
- xorps %xmm0,%xmm8
- xorps %xmm0,%xmm2
- xorps %xmm8,%xmm3
- movups 32(%r11),%xmm0
- jmp L$ccm64_dec2_loop
- .p2align 4
- L$ccm64_dec2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ccm64_dec2_loop
- movups (%rdi),%xmm8
- paddq %xmm9,%xmm6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leaq 16(%rdi),%rdi
- jmp L$ccm64_dec_outer
- .p2align 4
- L$ccm64_dec_break:
- movl 240(%r11),%eax
- movups (%r11),%xmm0
- movups 16(%r11),%xmm1
- xorps %xmm0,%xmm8
- leaq 32(%r11),%r11
- xorps %xmm8,%xmm3
- L$oop_enc1_6:
- .byte 102,15,56,220,217
- decl %eax
- movups (%r11),%xmm1
- leaq 16(%r11),%r11
- jnz L$oop_enc1_6
- .byte 102,15,56,221,217
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movups %xmm3,(%r9)
- pxor %xmm3,%xmm3
- pxor %xmm8,%xmm8
- pxor %xmm6,%xmm6
- .byte 0xf3,0xc3
- .globl _aesni_ctr32_encrypt_blocks
- .p2align 4
- _aesni_ctr32_encrypt_blocks:
- cmpq $1,%rdx
- jne L$ctr32_bulk
- movups (%r8),%xmm2
- movups (%rdi),%xmm3
- movl 240(%rcx),%edx
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_7:
- .byte 102,15,56,220,209
- decl %edx
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_7
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- xorps %xmm2,%xmm2
- jmp L$ctr32_epilogue
- .p2align 4
- L$ctr32_bulk:
- leaq (%rsp),%r11
- pushq %rbp
- subq $128,%rsp
- andq $-16,%rsp
- movdqu (%r8),%xmm2
- movdqu (%rcx),%xmm0
- movl 12(%r8),%r8d
- pxor %xmm0,%xmm2
- movl 12(%rcx),%ebp
- movdqa %xmm2,0(%rsp)
- bswapl %r8d
- movdqa %xmm2,%xmm3
- movdqa %xmm2,%xmm4
- movdqa %xmm2,%xmm5
- movdqa %xmm2,64(%rsp)
- movdqa %xmm2,80(%rsp)
- movdqa %xmm2,96(%rsp)
- movq %rdx,%r10
- movdqa %xmm2,112(%rsp)
- leaq 1(%r8),%rax
- leaq 2(%r8),%rdx
- bswapl %eax
- bswapl %edx
- xorl %ebp,%eax
- xorl %ebp,%edx
- .byte 102,15,58,34,216,3
- leaq 3(%r8),%rax
- movdqa %xmm3,16(%rsp)
- .byte 102,15,58,34,226,3
- bswapl %eax
- movq %r10,%rdx
- leaq 4(%r8),%r10
- movdqa %xmm4,32(%rsp)
- xorl %ebp,%eax
- bswapl %r10d
- .byte 102,15,58,34,232,3
- xorl %ebp,%r10d
- movdqa %xmm5,48(%rsp)
- leaq 5(%r8),%r9
- movl %r10d,64+12(%rsp)
- bswapl %r9d
- leaq 6(%r8),%r10
- movl 240(%rcx),%eax
- xorl %ebp,%r9d
- bswapl %r10d
- movl %r9d,80+12(%rsp)
- xorl %ebp,%r10d
- leaq 7(%r8),%r9
- movl %r10d,96+12(%rsp)
- bswapl %r9d
- movl _OPENSSL_ia32cap_P+4(%rip),%r10d
- xorl %ebp,%r9d
- andl $71303168,%r10d
- movl %r9d,112+12(%rsp)
- movups 16(%rcx),%xmm1
- movdqa 64(%rsp),%xmm6
- movdqa 80(%rsp),%xmm7
- cmpq $8,%rdx
- jb L$ctr32_tail
- subq $6,%rdx
- cmpl $4194304,%r10d
- je L$ctr32_6x
- leaq 128(%rcx),%rcx
- subq $2,%rdx
- jmp L$ctr32_loop8
- .p2align 4
- L$ctr32_6x:
- shll $4,%eax
- movl $48,%r10d
- bswapl %ebp
- leaq 32(%rcx,%rax,1),%rcx
- subq %rax,%r10
- jmp L$ctr32_loop6
- .p2align 4
- L$ctr32_loop6:
- addl $6,%r8d
- movups -48(%rcx,%r10,1),%xmm0
- .byte 102,15,56,220,209
- movl %r8d,%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,217
- .byte 0x0f,0x38,0xf1,0x44,0x24,12
- leal 1(%r8),%eax
- .byte 102,15,56,220,225
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,28
- .byte 102,15,56,220,233
- leal 2(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,241
- .byte 0x0f,0x38,0xf1,0x44,0x24,44
- leal 3(%r8),%eax
- .byte 102,15,56,220,249
- movups -32(%rcx,%r10,1),%xmm1
- xorl %ebp,%eax
- .byte 102,15,56,220,208
- .byte 0x0f,0x38,0xf1,0x44,0x24,60
- leal 4(%r8),%eax
- .byte 102,15,56,220,216
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,76
- .byte 102,15,56,220,224
- leal 5(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,232
- .byte 0x0f,0x38,0xf1,0x44,0x24,92
- movq %r10,%rax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%r10,1),%xmm0
- call L$enc_loop6
- movdqu (%rdi),%xmm8
- movdqu 16(%rdi),%xmm9
- movdqu 32(%rdi),%xmm10
- movdqu 48(%rdi),%xmm11
- movdqu 64(%rdi),%xmm12
- movdqu 80(%rdi),%xmm13
- leaq 96(%rdi),%rdi
- movups -64(%rcx,%r10,1),%xmm1
- pxor %xmm2,%xmm8
- movaps 0(%rsp),%xmm2
- pxor %xmm3,%xmm9
- movaps 16(%rsp),%xmm3
- pxor %xmm4,%xmm10
- movaps 32(%rsp),%xmm4
- pxor %xmm5,%xmm11
- movaps 48(%rsp),%xmm5
- pxor %xmm6,%xmm12
- movaps 64(%rsp),%xmm6
- pxor %xmm7,%xmm13
- movaps 80(%rsp),%xmm7
- movdqu %xmm8,(%rsi)
- movdqu %xmm9,16(%rsi)
- movdqu %xmm10,32(%rsi)
- movdqu %xmm11,48(%rsi)
- movdqu %xmm12,64(%rsi)
- movdqu %xmm13,80(%rsi)
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc L$ctr32_loop6
- addq $6,%rdx
- jz L$ctr32_done
- leal -48(%r10),%eax
- leaq -80(%rcx,%r10,1),%rcx
- negl %eax
- shrl $4,%eax
- jmp L$ctr32_tail
- .p2align 5
- L$ctr32_loop8:
- addl $8,%r8d
- movdqa 96(%rsp),%xmm8
- .byte 102,15,56,220,209
- movl %r8d,%r9d
- movdqa 112(%rsp),%xmm9
- .byte 102,15,56,220,217
- bswapl %r9d
- movups 32-128(%rcx),%xmm0
- .byte 102,15,56,220,225
- xorl %ebp,%r9d
- nop
- .byte 102,15,56,220,233
- movl %r9d,0+12(%rsp)
- leaq 1(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 48-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,16+12(%rsp)
- leaq 2(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 64-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,32+12(%rsp)
- leaq 3(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 80-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,48+12(%rsp)
- leaq 4(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 96-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,64+12(%rsp)
- leaq 5(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 112-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,80+12(%rsp)
- leaq 6(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 128-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,96+12(%rsp)
- leaq 7(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 144-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- xorl %ebp,%r9d
- movdqu 0(%rdi),%xmm10
- .byte 102,15,56,220,232
- movl %r9d,112+12(%rsp)
- cmpl $11,%eax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 160-128(%rcx),%xmm0
- jb L$ctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 176-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 192-128(%rcx),%xmm0
- je L$ctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 208-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 224-128(%rcx),%xmm0
- jmp L$ctr32_enc_done
- .p2align 4
- L$ctr32_enc_done:
- movdqu 16(%rdi),%xmm11
- pxor %xmm0,%xmm10
- movdqu 32(%rdi),%xmm12
- pxor %xmm0,%xmm11
- movdqu 48(%rdi),%xmm13
- pxor %xmm0,%xmm12
- movdqu 64(%rdi),%xmm14
- pxor %xmm0,%xmm13
- movdqu 80(%rdi),%xmm15
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movdqu 96(%rdi),%xmm1
- leaq 128(%rdi),%rdi
- .byte 102,65,15,56,221,210
- pxor %xmm0,%xmm1
- movdqu 112-128(%rdi),%xmm10
- .byte 102,65,15,56,221,219
- pxor %xmm0,%xmm10
- movdqa 0(%rsp),%xmm11
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- movdqa 16(%rsp),%xmm12
- movdqa 32(%rsp),%xmm13
- .byte 102,65,15,56,221,246
- .byte 102,65,15,56,221,255
- movdqa 48(%rsp),%xmm14
- movdqa 64(%rsp),%xmm15
- .byte 102,68,15,56,221,193
- movdqa 80(%rsp),%xmm0
- movups 16-128(%rcx),%xmm1
- .byte 102,69,15,56,221,202
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm0,%xmm7
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- subq $8,%rdx
- jnc L$ctr32_loop8
- addq $8,%rdx
- jz L$ctr32_done
- leaq -128(%rcx),%rcx
- L$ctr32_tail:
- leaq 16(%rcx),%rcx
- cmpq $4,%rdx
- jb L$ctr32_loop3
- je L$ctr32_loop4
- shll $4,%eax
- movdqa 96(%rsp),%xmm8
- pxor %xmm9,%xmm9
- movups 16(%rcx),%xmm0
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- leaq 32-16(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,225
- addq $16,%rax
- movups (%rdi),%xmm10
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- movups 16(%rdi),%xmm11
- movups 32(%rdi),%xmm12
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- call L$enc_loop8_enter
- movdqu 48(%rdi),%xmm13
- pxor %xmm10,%xmm2
- movdqu 64(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm10,%xmm6
- movdqu %xmm5,48(%rsi)
- movdqu %xmm6,64(%rsi)
- cmpq $6,%rdx
- jb L$ctr32_done
- movups 80(%rdi),%xmm11
- xorps %xmm11,%xmm7
- movups %xmm7,80(%rsi)
- je L$ctr32_done
- movups 96(%rdi),%xmm12
- xorps %xmm12,%xmm8
- movups %xmm8,96(%rsi)
- jmp L$ctr32_done
- .p2align 5
- L$ctr32_loop4:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx),%xmm1
- jnz L$ctr32_loop4
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- movups (%rdi),%xmm10
- movups 16(%rdi),%xmm11
- .byte 102,15,56,221,225
- .byte 102,15,56,221,233
- movups 32(%rdi),%xmm12
- movups 48(%rdi),%xmm13
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm4,32(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm5,48(%rsi)
- jmp L$ctr32_done
- .p2align 5
- L$ctr32_loop3:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx),%xmm1
- jnz L$ctr32_loop3
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- .byte 102,15,56,221,225
- movups (%rdi),%xmm10
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- cmpq $2,%rdx
- jb L$ctr32_done
- movups 16(%rdi),%xmm11
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- je L$ctr32_done
- movups 32(%rdi),%xmm12
- xorps %xmm12,%xmm4
- movups %xmm4,32(%rsi)
- L$ctr32_done:
- xorps %xmm0,%xmm0
- xorl %ebp,%ebp
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- movaps %xmm0,112(%rsp)
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$ctr32_epilogue:
- .byte 0xf3,0xc3
- .globl _aesni_xts_encrypt
- .p2align 4
- _aesni_xts_encrypt:
- leaq (%rsp),%r11
- pushq %rbp
- subq $112,%rsp
- andq $-16,%rsp
- movups (%r9),%xmm2
- movl 240(%r8),%eax
- movl 240(%rcx),%r10d
- movups (%r8),%xmm0
- movups 16(%r8),%xmm1
- leaq 32(%r8),%r8
- xorps %xmm0,%xmm2
- L$oop_enc1_8:
- .byte 102,15,56,220,209
- decl %eax
- movups (%r8),%xmm1
- leaq 16(%r8),%r8
- jnz L$oop_enc1_8
- .byte 102,15,56,221,209
- movups (%rcx),%xmm0
- movq %rcx,%rbp
- movl %r10d,%eax
- shll $4,%r10d
- movq %rdx,%r9
- andq $-16,%rdx
- movups 16(%rcx,%r10,1),%xmm1
- movdqa L$xts_magic(%rip),%xmm8
- movdqa %xmm2,%xmm15
- pshufd $0x5f,%xmm2,%xmm9
- pxor %xmm0,%xmm1
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm10
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm10
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm11
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm11
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm12
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm12
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm13
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm13
- pxor %xmm14,%xmm15
- movdqa %xmm15,%xmm14
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm9
- pxor %xmm0,%xmm14
- pxor %xmm9,%xmm15
- movaps %xmm1,96(%rsp)
- subq $96,%rdx
- jc L$xts_enc_short
- movl $16+96,%eax
- leaq 32(%rbp,%r10,1),%rcx
- subq %r10,%rax
- movups 16(%rbp),%xmm1
- movq %rax,%r10
- leaq L$xts_magic(%rip),%r8
- jmp L$xts_enc_grandloop
- .p2align 5
- L$xts_enc_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqa %xmm0,%xmm8
- movdqu 16(%rdi),%xmm3
- pxor %xmm10,%xmm2
- movdqu 32(%rdi),%xmm4
- pxor %xmm11,%xmm3
- .byte 102,15,56,220,209
- movdqu 48(%rdi),%xmm5
- pxor %xmm12,%xmm4
- .byte 102,15,56,220,217
- movdqu 64(%rdi),%xmm6
- pxor %xmm13,%xmm5
- .byte 102,15,56,220,225
- movdqu 80(%rdi),%xmm7
- pxor %xmm15,%xmm8
- movdqa 96(%rsp),%xmm9
- pxor %xmm14,%xmm6
- .byte 102,15,56,220,233
- movups 32(%rbp),%xmm0
- leaq 96(%rdi),%rdi
- pxor %xmm8,%xmm7
- pxor %xmm9,%xmm10
- .byte 102,15,56,220,241
- pxor %xmm9,%xmm11
- movdqa %xmm10,0(%rsp)
- .byte 102,15,56,220,249
- movups 48(%rbp),%xmm1
- pxor %xmm9,%xmm12
- .byte 102,15,56,220,208
- pxor %xmm9,%xmm13
- movdqa %xmm11,16(%rsp)
- .byte 102,15,56,220,216
- pxor %xmm9,%xmm14
- movdqa %xmm12,32(%rsp)
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- pxor %xmm9,%xmm8
- movdqa %xmm14,64(%rsp)
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups 64(%rbp),%xmm0
- movdqa %xmm8,80(%rsp)
- pshufd $0x5f,%xmm15,%xmm9
- jmp L$xts_enc_loop6
- .p2align 5
- L$xts_enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups -64(%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -80(%rcx,%rax,1),%xmm0
- jnz L$xts_enc_loop6
- movdqa (%r8),%xmm8
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- paddq %xmm15,%xmm15
- psrad $31,%xmm14
- .byte 102,15,56,220,217
- pand %xmm8,%xmm14
- movups (%rbp),%xmm10
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- pxor %xmm14,%xmm15
- movaps %xmm10,%xmm11
- .byte 102,15,56,220,249
- movups -64(%rcx),%xmm1
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,208
- paddd %xmm9,%xmm9
- pxor %xmm15,%xmm10
- .byte 102,15,56,220,216
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- pand %xmm8,%xmm14
- movaps %xmm11,%xmm12
- .byte 102,15,56,220,240
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,248
- movups -48(%rcx),%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- pxor %xmm15,%xmm11
- psrad $31,%xmm14
- .byte 102,15,56,220,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movdqa %xmm13,48(%rsp)
- pxor %xmm14,%xmm15
- .byte 102,15,56,220,241
- movaps %xmm12,%xmm13
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,249
- movups -32(%rcx),%xmm1
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,208
- pxor %xmm15,%xmm12
- psrad $31,%xmm14
- .byte 102,15,56,220,216
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- pxor %xmm14,%xmm15
- movaps %xmm13,%xmm14
- .byte 102,15,56,220,248
- movdqa %xmm9,%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- pxor %xmm15,%xmm13
- psrad $31,%xmm0
- .byte 102,15,56,220,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm0
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- pxor %xmm0,%xmm15
- movups (%rbp),%xmm0
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups 16(%rbp),%xmm1
- pxor %xmm15,%xmm14
- .byte 102,15,56,221,84,36,0
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- .byte 102,15,56,221,92,36,16
- .byte 102,15,56,221,100,36,32
- pand %xmm8,%xmm9
- movq %r10,%rax
- .byte 102,15,56,221,108,36,48
- .byte 102,15,56,221,116,36,64
- .byte 102,15,56,221,124,36,80
- pxor %xmm9,%xmm15
- leaq 96(%rsi),%rsi
- movups %xmm2,-96(%rsi)
- movups %xmm3,-80(%rsi)
- movups %xmm4,-64(%rsi)
- movups %xmm5,-48(%rsi)
- movups %xmm6,-32(%rsi)
- movups %xmm7,-16(%rsi)
- subq $96,%rdx
- jnc L$xts_enc_grandloop
- movl $16+96,%eax
- subl %r10d,%eax
- movq %rbp,%rcx
- shrl $4,%eax
- L$xts_enc_short:
- movl %eax,%r10d
- pxor %xmm0,%xmm10
- addq $96,%rdx
- jz L$xts_enc_done
- pxor %xmm0,%xmm11
- cmpq $0x20,%rdx
- jb L$xts_enc_one
- pxor %xmm0,%xmm12
- je L$xts_enc_two
- pxor %xmm0,%xmm13
- cmpq $0x40,%rdx
- jb L$xts_enc_three
- pxor %xmm0,%xmm14
- je L$xts_enc_four
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- pxor %xmm10,%xmm2
- movdqu 48(%rdi),%xmm5
- pxor %xmm11,%xmm3
- movdqu 64(%rdi),%xmm6
- leaq 80(%rdi),%rdi
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm6
- pxor %xmm7,%xmm7
- call _aesni_encrypt6
- xorps %xmm10,%xmm2
- movdqa %xmm15,%xmm10
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- xorps %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- xorps %xmm14,%xmm6
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- jmp L$xts_enc_done
- .p2align 4
- L$xts_enc_one:
- movups (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_9:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_9
- .byte 102,15,56,221,209
- xorps %xmm10,%xmm2
- movdqa %xmm11,%xmm10
- movups %xmm2,(%rsi)
- leaq 16(%rsi),%rsi
- jmp L$xts_enc_done
- .p2align 4
- L$xts_enc_two:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- leaq 32(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- call _aesni_encrypt2
- xorps %xmm10,%xmm2
- movdqa %xmm12,%xmm10
- xorps %xmm11,%xmm3
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- leaq 32(%rsi),%rsi
- jmp L$xts_enc_done
- .p2align 4
- L$xts_enc_three:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- leaq 48(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- call _aesni_encrypt3
- xorps %xmm10,%xmm2
- movdqa %xmm13,%xmm10
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- leaq 48(%rsi),%rsi
- jmp L$xts_enc_done
- .p2align 4
- L$xts_enc_four:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- xorps %xmm10,%xmm2
- movups 48(%rdi),%xmm5
- leaq 64(%rdi),%rdi
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- xorps %xmm13,%xmm5
- call _aesni_encrypt4
- pxor %xmm10,%xmm2
- movdqa %xmm14,%xmm10
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- leaq 64(%rsi),%rsi
- jmp L$xts_enc_done
- .p2align 4
- L$xts_enc_done:
- andq $15,%r9
- jz L$xts_enc_ret
- movq %r9,%rdx
- L$xts_enc_steal:
- movzbl (%rdi),%eax
- movzbl -16(%rsi),%ecx
- leaq 1(%rdi),%rdi
- movb %al,-16(%rsi)
- movb %cl,0(%rsi)
- leaq 1(%rsi),%rsi
- subq $1,%rdx
- jnz L$xts_enc_steal
- subq %r9,%rsi
- movq %rbp,%rcx
- movl %r10d,%eax
- movups -16(%rsi),%xmm2
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_enc1_10:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_10
- .byte 102,15,56,221,209
- xorps %xmm10,%xmm2
- movups %xmm2,-16(%rsi)
- L$xts_enc_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$xts_enc_epilogue:
- .byte 0xf3,0xc3
- .globl _aesni_xts_decrypt
- .p2align 4
- _aesni_xts_decrypt:
- leaq (%rsp),%r11
- pushq %rbp
- subq $112,%rsp
- andq $-16,%rsp
- movups (%r9),%xmm2
- movl 240(%r8),%eax
- movl 240(%rcx),%r10d
- movups (%r8),%xmm0
- movups 16(%r8),%xmm1
- leaq 32(%r8),%r8
- xorps %xmm0,%xmm2
- L$oop_enc1_11:
- .byte 102,15,56,220,209
- decl %eax
- movups (%r8),%xmm1
- leaq 16(%r8),%r8
- jnz L$oop_enc1_11
- .byte 102,15,56,221,209
- xorl %eax,%eax
- testq $15,%rdx
- setnz %al
- shlq $4,%rax
- subq %rax,%rdx
- movups (%rcx),%xmm0
- movq %rcx,%rbp
- movl %r10d,%eax
- shll $4,%r10d
- movq %rdx,%r9
- andq $-16,%rdx
- movups 16(%rcx,%r10,1),%xmm1
- movdqa L$xts_magic(%rip),%xmm8
- movdqa %xmm2,%xmm15
- pshufd $0x5f,%xmm2,%xmm9
- pxor %xmm0,%xmm1
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm10
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm10
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm11
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm11
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm12
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm12
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm13
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm13
- pxor %xmm14,%xmm15
- movdqa %xmm15,%xmm14
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm9
- pxor %xmm0,%xmm14
- pxor %xmm9,%xmm15
- movaps %xmm1,96(%rsp)
- subq $96,%rdx
- jc L$xts_dec_short
- movl $16+96,%eax
- leaq 32(%rbp,%r10,1),%rcx
- subq %r10,%rax
- movups 16(%rbp),%xmm1
- movq %rax,%r10
- leaq L$xts_magic(%rip),%r8
- jmp L$xts_dec_grandloop
- .p2align 5
- L$xts_dec_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqa %xmm0,%xmm8
- movdqu 16(%rdi),%xmm3
- pxor %xmm10,%xmm2
- movdqu 32(%rdi),%xmm4
- pxor %xmm11,%xmm3
- .byte 102,15,56,222,209
- movdqu 48(%rdi),%xmm5
- pxor %xmm12,%xmm4
- .byte 102,15,56,222,217
- movdqu 64(%rdi),%xmm6
- pxor %xmm13,%xmm5
- .byte 102,15,56,222,225
- movdqu 80(%rdi),%xmm7
- pxor %xmm15,%xmm8
- movdqa 96(%rsp),%xmm9
- pxor %xmm14,%xmm6
- .byte 102,15,56,222,233
- movups 32(%rbp),%xmm0
- leaq 96(%rdi),%rdi
- pxor %xmm8,%xmm7
- pxor %xmm9,%xmm10
- .byte 102,15,56,222,241
- pxor %xmm9,%xmm11
- movdqa %xmm10,0(%rsp)
- .byte 102,15,56,222,249
- movups 48(%rbp),%xmm1
- pxor %xmm9,%xmm12
- .byte 102,15,56,222,208
- pxor %xmm9,%xmm13
- movdqa %xmm11,16(%rsp)
- .byte 102,15,56,222,216
- pxor %xmm9,%xmm14
- movdqa %xmm12,32(%rsp)
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- pxor %xmm9,%xmm8
- movdqa %xmm14,64(%rsp)
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups 64(%rbp),%xmm0
- movdqa %xmm8,80(%rsp)
- pshufd $0x5f,%xmm15,%xmm9
- jmp L$xts_dec_loop6
- .p2align 5
- L$xts_dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups -64(%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -80(%rcx,%rax,1),%xmm0
- jnz L$xts_dec_loop6
- movdqa (%r8),%xmm8
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- paddq %xmm15,%xmm15
- psrad $31,%xmm14
- .byte 102,15,56,222,217
- pand %xmm8,%xmm14
- movups (%rbp),%xmm10
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- pxor %xmm14,%xmm15
- movaps %xmm10,%xmm11
- .byte 102,15,56,222,249
- movups -64(%rcx),%xmm1
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,208
- paddd %xmm9,%xmm9
- pxor %xmm15,%xmm10
- .byte 102,15,56,222,216
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- pand %xmm8,%xmm14
- movaps %xmm11,%xmm12
- .byte 102,15,56,222,240
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,248
- movups -48(%rcx),%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- pxor %xmm15,%xmm11
- psrad $31,%xmm14
- .byte 102,15,56,222,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movdqa %xmm13,48(%rsp)
- pxor %xmm14,%xmm15
- .byte 102,15,56,222,241
- movaps %xmm12,%xmm13
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,249
- movups -32(%rcx),%xmm1
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,208
- pxor %xmm15,%xmm12
- psrad $31,%xmm14
- .byte 102,15,56,222,216
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- pxor %xmm14,%xmm15
- movaps %xmm13,%xmm14
- .byte 102,15,56,222,248
- movdqa %xmm9,%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- pxor %xmm15,%xmm13
- psrad $31,%xmm0
- .byte 102,15,56,222,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm0
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm0,%xmm15
- movups (%rbp),%xmm0
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups 16(%rbp),%xmm1
- pxor %xmm15,%xmm14
- .byte 102,15,56,223,84,36,0
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- .byte 102,15,56,223,92,36,16
- .byte 102,15,56,223,100,36,32
- pand %xmm8,%xmm9
- movq %r10,%rax
- .byte 102,15,56,223,108,36,48
- .byte 102,15,56,223,116,36,64
- .byte 102,15,56,223,124,36,80
- pxor %xmm9,%xmm15
- leaq 96(%rsi),%rsi
- movups %xmm2,-96(%rsi)
- movups %xmm3,-80(%rsi)
- movups %xmm4,-64(%rsi)
- movups %xmm5,-48(%rsi)
- movups %xmm6,-32(%rsi)
- movups %xmm7,-16(%rsi)
- subq $96,%rdx
- jnc L$xts_dec_grandloop
- movl $16+96,%eax
- subl %r10d,%eax
- movq %rbp,%rcx
- shrl $4,%eax
- L$xts_dec_short:
- movl %eax,%r10d
- pxor %xmm0,%xmm10
- pxor %xmm0,%xmm11
- addq $96,%rdx
- jz L$xts_dec_done
- pxor %xmm0,%xmm12
- cmpq $0x20,%rdx
- jb L$xts_dec_one
- pxor %xmm0,%xmm13
- je L$xts_dec_two
- pxor %xmm0,%xmm14
- cmpq $0x40,%rdx
- jb L$xts_dec_three
- je L$xts_dec_four
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- pxor %xmm10,%xmm2
- movdqu 48(%rdi),%xmm5
- pxor %xmm11,%xmm3
- movdqu 64(%rdi),%xmm6
- leaq 80(%rdi),%rdi
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm6
- call _aesni_decrypt6
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- xorps %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- xorps %xmm14,%xmm6
- movdqu %xmm4,32(%rsi)
- pxor %xmm14,%xmm14
- movdqu %xmm5,48(%rsi)
- pcmpgtd %xmm15,%xmm14
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- pshufd $0x13,%xmm14,%xmm11
- andq $15,%r9
- jz L$xts_dec_ret
- movdqa %xmm15,%xmm10
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm11
- pxor %xmm15,%xmm11
- jmp L$xts_dec_done2
- .p2align 4
- L$xts_dec_one:
- movups (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_12:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_12
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movdqa %xmm11,%xmm10
- movups %xmm2,(%rsi)
- movdqa %xmm12,%xmm11
- leaq 16(%rsi),%rsi
- jmp L$xts_dec_done
- .p2align 4
- L$xts_dec_two:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- leaq 32(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- call _aesni_decrypt2
- xorps %xmm10,%xmm2
- movdqa %xmm12,%xmm10
- xorps %xmm11,%xmm3
- movdqa %xmm13,%xmm11
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- leaq 32(%rsi),%rsi
- jmp L$xts_dec_done
- .p2align 4
- L$xts_dec_three:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- leaq 48(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- call _aesni_decrypt3
- xorps %xmm10,%xmm2
- movdqa %xmm13,%xmm10
- xorps %xmm11,%xmm3
- movdqa %xmm14,%xmm11
- xorps %xmm12,%xmm4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- leaq 48(%rsi),%rsi
- jmp L$xts_dec_done
- .p2align 4
- L$xts_dec_four:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- xorps %xmm10,%xmm2
- movups 48(%rdi),%xmm5
- leaq 64(%rdi),%rdi
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- xorps %xmm13,%xmm5
- call _aesni_decrypt4
- pxor %xmm10,%xmm2
- movdqa %xmm14,%xmm10
- pxor %xmm11,%xmm3
- movdqa %xmm15,%xmm11
- pxor %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- leaq 64(%rsi),%rsi
- jmp L$xts_dec_done
- .p2align 4
- L$xts_dec_done:
- andq $15,%r9
- jz L$xts_dec_ret
- L$xts_dec_done2:
- movq %r9,%rdx
- movq %rbp,%rcx
- movl %r10d,%eax
- movups (%rdi),%xmm2
- xorps %xmm11,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_13:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_13
- .byte 102,15,56,223,209
- xorps %xmm11,%xmm2
- movups %xmm2,(%rsi)
- L$xts_dec_steal:
- movzbl 16(%rdi),%eax
- movzbl (%rsi),%ecx
- leaq 1(%rdi),%rdi
- movb %al,(%rsi)
- movb %cl,16(%rsi)
- leaq 1(%rsi),%rsi
- subq $1,%rdx
- jnz L$xts_dec_steal
- subq %r9,%rsi
- movq %rbp,%rcx
- movl %r10d,%eax
- movups (%rsi),%xmm2
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_14:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_14
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- L$xts_dec_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$xts_dec_epilogue:
- .byte 0xf3,0xc3
- .globl _aesni_ocb_encrypt
- .p2align 5
- _aesni_ocb_encrypt:
- leaq (%rsp),%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- movq 8(%rax),%rbx
- movq 8+8(%rax),%rbp
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- shll $4,%r10d
- movups (%rcx),%xmm9
- movups 16(%rcx,%r10,1),%xmm1
- movdqu (%r9),%xmm15
- pxor %xmm1,%xmm9
- pxor %xmm1,%xmm15
- movl $16+32,%eax
- leaq 32(%r11,%r10,1),%rcx
- movups 16(%r11),%xmm1
- subq %r10,%rax
- movq %rax,%r10
- movdqu (%rbx),%xmm10
- movdqu (%rbp),%xmm8
- testq $1,%r8
- jnz L$ocb_enc_odd
- bsfq %r8,%r12
- addq $1,%r8
- shlq $4,%r12
- movdqu (%rbx,%r12,1),%xmm7
- movdqu (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- call __ocb_encrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,(%rsi)
- leaq 16(%rsi),%rsi
- subq $1,%rdx
- jz L$ocb_enc_done
- L$ocb_enc_odd:
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- leaq 6(%r8),%r8
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- shlq $4,%r12
- shlq $4,%r13
- shlq $4,%r14
- subq $6,%rdx
- jc L$ocb_enc_short
- jmp L$ocb_enc_grandloop
- .p2align 5
- L$ocb_enc_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- leaq 96(%rdi),%rdi
- call __ocb_encrypt6
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc L$ocb_enc_grandloop
- L$ocb_enc_short:
- addq $6,%rdx
- jz L$ocb_enc_done
- movdqu 0(%rdi),%xmm2
- cmpq $2,%rdx
- jb L$ocb_enc_one
- movdqu 16(%rdi),%xmm3
- je L$ocb_enc_two
- movdqu 32(%rdi),%xmm4
- cmpq $4,%rdx
- jb L$ocb_enc_three
- movdqu 48(%rdi),%xmm5
- je L$ocb_enc_four
- movdqu 64(%rdi),%xmm6
- pxor %xmm7,%xmm7
- call __ocb_encrypt6
- movdqa %xmm14,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- jmp L$ocb_enc_done
- .p2align 4
- L$ocb_enc_one:
- movdqa %xmm10,%xmm7
- call __ocb_encrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,0(%rsi)
- jmp L$ocb_enc_done
- .p2align 4
- L$ocb_enc_two:
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- call __ocb_encrypt4
- movdqa %xmm11,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- jmp L$ocb_enc_done
- .p2align 4
- L$ocb_enc_three:
- pxor %xmm5,%xmm5
- call __ocb_encrypt4
- movdqa %xmm12,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- jmp L$ocb_enc_done
- .p2align 4
- L$ocb_enc_four:
- call __ocb_encrypt4
- movdqa %xmm13,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- L$ocb_enc_done:
- pxor %xmm0,%xmm15
- movdqu %xmm8,(%rbp)
- movdqu %xmm15,(%r9)
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- pxor %xmm10,%xmm10
- pxor %xmm11,%xmm11
- pxor %xmm12,%xmm12
- pxor %xmm13,%xmm13
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- leaq 40(%rsp),%rax
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$ocb_enc_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_encrypt6:
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- movdqa %xmm10,%xmm14
- pxor %xmm15,%xmm10
- movdqu (%rbx,%r14,1),%xmm15
- pxor %xmm10,%xmm11
- pxor %xmm2,%xmm8
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm3,%xmm8
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm4,%xmm8
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm14
- pxor %xmm5,%xmm8
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm15
- pxor %xmm6,%xmm8
- pxor %xmm14,%xmm6
- pxor %xmm7,%xmm8
- pxor %xmm15,%xmm7
- movups 32(%r11),%xmm0
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- addq $6,%r8
- pxor %xmm9,%xmm10
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- .byte 102,15,56,220,241
- pxor %xmm9,%xmm13
- pxor %xmm9,%xmm14
- .byte 102,15,56,220,249
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm15
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups 64(%r11),%xmm0
- shlq $4,%r12
- shlq $4,%r13
- jmp L$ocb_enc_loop6
- .p2align 5
- L$ocb_enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_enc_loop6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups 16(%r11),%xmm1
- shlq $4,%r14
- .byte 102,65,15,56,221,210
- movdqu (%rbx),%xmm10
- movq %r10,%rax
- .byte 102,65,15,56,221,219
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- .byte 102,65,15,56,221,246
- .byte 102,65,15,56,221,255
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_encrypt4:
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- pxor %xmm15,%xmm10
- pxor %xmm10,%xmm11
- pxor %xmm2,%xmm8
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm3,%xmm8
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm4,%xmm8
- pxor %xmm12,%xmm4
- pxor %xmm5,%xmm8
- pxor %xmm13,%xmm5
- movups 32(%r11),%xmm0
- pxor %xmm9,%xmm10
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- pxor %xmm9,%xmm13
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 48(%r11),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 64(%r11),%xmm0
- jmp L$ocb_enc_loop4
- .p2align 5
- L$ocb_enc_loop4:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_enc_loop4
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,65,15,56,221,210
- .byte 102,65,15,56,221,219
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_encrypt1:
- pxor %xmm15,%xmm7
- pxor %xmm9,%xmm7
- pxor %xmm2,%xmm8
- pxor %xmm7,%xmm2
- movups 32(%r11),%xmm0
- .byte 102,15,56,220,209
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm7
- .byte 102,15,56,220,208
- movups 64(%r11),%xmm0
- jmp L$ocb_enc_loop1
- .p2align 5
- L$ocb_enc_loop1:
- .byte 102,15,56,220,209
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_enc_loop1
- .byte 102,15,56,220,209
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,15,56,221,215
- .byte 0xf3,0xc3
- .globl _aesni_ocb_decrypt
- .p2align 5
- _aesni_ocb_decrypt:
- leaq (%rsp),%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- movq 8(%rax),%rbx
- movq 8+8(%rax),%rbp
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- shll $4,%r10d
- movups (%rcx),%xmm9
- movups 16(%rcx,%r10,1),%xmm1
- movdqu (%r9),%xmm15
- pxor %xmm1,%xmm9
- pxor %xmm1,%xmm15
- movl $16+32,%eax
- leaq 32(%r11,%r10,1),%rcx
- movups 16(%r11),%xmm1
- subq %r10,%rax
- movq %rax,%r10
- movdqu (%rbx),%xmm10
- movdqu (%rbp),%xmm8
- testq $1,%r8
- jnz L$ocb_dec_odd
- bsfq %r8,%r12
- addq $1,%r8
- shlq $4,%r12
- movdqu (%rbx,%r12,1),%xmm7
- movdqu (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- call __ocb_decrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,(%rsi)
- xorps %xmm2,%xmm8
- leaq 16(%rsi),%rsi
- subq $1,%rdx
- jz L$ocb_dec_done
- L$ocb_dec_odd:
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- leaq 6(%r8),%r8
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- shlq $4,%r12
- shlq $4,%r13
- shlq $4,%r14
- subq $6,%rdx
- jc L$ocb_dec_short
- jmp L$ocb_dec_grandloop
- .p2align 5
- L$ocb_dec_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- leaq 96(%rdi),%rdi
- call __ocb_decrypt6
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm8
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm8
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc L$ocb_dec_grandloop
- L$ocb_dec_short:
- addq $6,%rdx
- jz L$ocb_dec_done
- movdqu 0(%rdi),%xmm2
- cmpq $2,%rdx
- jb L$ocb_dec_one
- movdqu 16(%rdi),%xmm3
- je L$ocb_dec_two
- movdqu 32(%rdi),%xmm4
- cmpq $4,%rdx
- jb L$ocb_dec_three
- movdqu 48(%rdi),%xmm5
- je L$ocb_dec_four
- movdqu 64(%rdi),%xmm6
- pxor %xmm7,%xmm7
- call __ocb_decrypt6
- movdqa %xmm14,%xmm15
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm8
- jmp L$ocb_dec_done
- .p2align 4
- L$ocb_dec_one:
- movdqa %xmm10,%xmm7
- call __ocb_decrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- jmp L$ocb_dec_done
- .p2align 4
- L$ocb_dec_two:
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- call __ocb_decrypt4
- movdqa %xmm11,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- xorps %xmm3,%xmm8
- jmp L$ocb_dec_done
- .p2align 4
- L$ocb_dec_three:
- pxor %xmm5,%xmm5
- call __ocb_decrypt4
- movdqa %xmm12,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- xorps %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- xorps %xmm4,%xmm8
- jmp L$ocb_dec_done
- .p2align 4
- L$ocb_dec_four:
- call __ocb_decrypt4
- movdqa %xmm13,%xmm15
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- L$ocb_dec_done:
- pxor %xmm0,%xmm15
- movdqu %xmm8,(%rbp)
- movdqu %xmm15,(%r9)
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- pxor %xmm10,%xmm10
- pxor %xmm11,%xmm11
- pxor %xmm12,%xmm12
- pxor %xmm13,%xmm13
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- leaq 40(%rsp),%rax
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$ocb_dec_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_decrypt6:
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- movdqa %xmm10,%xmm14
- pxor %xmm15,%xmm10
- movdqu (%rbx,%r14,1),%xmm15
- pxor %xmm10,%xmm11
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm14
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm15
- pxor %xmm14,%xmm6
- pxor %xmm15,%xmm7
- movups 32(%r11),%xmm0
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- addq $6,%r8
- pxor %xmm9,%xmm10
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- .byte 102,15,56,222,241
- pxor %xmm9,%xmm13
- pxor %xmm9,%xmm14
- .byte 102,15,56,222,249
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm15
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups 64(%r11),%xmm0
- shlq $4,%r12
- shlq $4,%r13
- jmp L$ocb_dec_loop6
- .p2align 5
- L$ocb_dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_dec_loop6
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups 16(%r11),%xmm1
- shlq $4,%r14
- .byte 102,65,15,56,223,210
- movdqu (%rbx),%xmm10
- movq %r10,%rax
- .byte 102,65,15,56,223,219
- .byte 102,65,15,56,223,228
- .byte 102,65,15,56,223,237
- .byte 102,65,15,56,223,246
- .byte 102,65,15,56,223,255
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_decrypt4:
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- pxor %xmm15,%xmm10
- pxor %xmm10,%xmm11
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- movups 32(%r11),%xmm0
- pxor %xmm9,%xmm10
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- pxor %xmm9,%xmm13
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 48(%r11),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 64(%r11),%xmm0
- jmp L$ocb_dec_loop4
- .p2align 5
- L$ocb_dec_loop4:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_dec_loop4
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,65,15,56,223,210
- .byte 102,65,15,56,223,219
- .byte 102,65,15,56,223,228
- .byte 102,65,15,56,223,237
- .byte 0xf3,0xc3
- .p2align 5
- __ocb_decrypt1:
- pxor %xmm15,%xmm7
- pxor %xmm9,%xmm7
- pxor %xmm7,%xmm2
- movups 32(%r11),%xmm0
- .byte 102,15,56,222,209
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm7
- .byte 102,15,56,222,208
- movups 64(%r11),%xmm0
- jmp L$ocb_dec_loop1
- .p2align 5
- L$ocb_dec_loop1:
- .byte 102,15,56,222,209
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- movups -16(%rcx,%rax,1),%xmm0
- jnz L$ocb_dec_loop1
- .byte 102,15,56,222,209
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,15,56,223,215
- .byte 0xf3,0xc3
- .globl _aesni_cbc_encrypt
- .p2align 4
- _aesni_cbc_encrypt:
- testq %rdx,%rdx
- jz L$cbc_ret
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- testl %r9d,%r9d
- jz L$cbc_decrypt
- movups (%r8),%xmm2
- movl %r10d,%eax
- cmpq $16,%rdx
- jb L$cbc_enc_tail
- subq $16,%rdx
- jmp L$cbc_enc_loop
- .p2align 4
- L$cbc_enc_loop:
- movups (%rdi),%xmm3
- leaq 16(%rdi),%rdi
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm3
- leaq 32(%rcx),%rcx
- xorps %xmm3,%xmm2
- L$oop_enc1_15:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_enc1_15
- .byte 102,15,56,221,209
- movl %r10d,%eax
- movq %r11,%rcx
- movups %xmm2,0(%rsi)
- leaq 16(%rsi),%rsi
- subq $16,%rdx
- jnc L$cbc_enc_loop
- addq $16,%rdx
- jnz L$cbc_enc_tail
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%r8)
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- jmp L$cbc_ret
- L$cbc_enc_tail:
- movq %rdx,%rcx
- xchgq %rdi,%rsi
- .long 0x9066A4F3
- movl $16,%ecx
- subq %rdx,%rcx
- xorl %eax,%eax
- .long 0x9066AAF3
- leaq -16(%rdi),%rdi
- movl %r10d,%eax
- movq %rdi,%rsi
- movq %r11,%rcx
- xorq %rdx,%rdx
- jmp L$cbc_enc_loop
- .p2align 4
- L$cbc_decrypt:
- cmpq $16,%rdx
- jne L$cbc_decrypt_bulk
- movdqu (%rdi),%xmm2
- movdqu (%r8),%xmm3
- movdqa %xmm2,%xmm4
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_16:
- .byte 102,15,56,222,209
- decl %r10d
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_16
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movdqu %xmm4,(%r8)
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$cbc_ret
- .p2align 4
- L$cbc_decrypt_bulk:
- leaq (%rsp),%r11
- pushq %rbp
- subq $16,%rsp
- andq $-16,%rsp
- movq %rcx,%rbp
- movups (%r8),%xmm10
- movl %r10d,%eax
- cmpq $0x50,%rdx
- jbe L$cbc_dec_tail
- movups (%rcx),%xmm0
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- movl _OPENSSL_ia32cap_P+4(%rip),%r9d
- cmpq $0x70,%rdx
- jbe L$cbc_dec_six_or_seven
- andl $71303168,%r9d
- subq $0x50,%rdx
- cmpl $4194304,%r9d
- je L$cbc_dec_loop6_enter
- subq $0x20,%rdx
- leaq 112(%rcx),%rcx
- jmp L$cbc_dec_loop8_enter
- .p2align 4
- L$cbc_dec_loop8:
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- L$cbc_dec_loop8_enter:
- movdqu 96(%rdi),%xmm8
- pxor %xmm0,%xmm2
- movdqu 112(%rdi),%xmm9
- pxor %xmm0,%xmm3
- movups 16-112(%rcx),%xmm1
- pxor %xmm0,%xmm4
- movq $-1,%rbp
- cmpq $0x70,%rdx
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm9
- movups 32-112(%rcx),%xmm0
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- adcq $0,%rbp
- andq $128,%rbp
- .byte 102,68,15,56,222,201
- addq %rdi,%rbp
- movups 48-112(%rcx),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 64-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 80-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 96-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 112-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 128-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 144-112(%rcx),%xmm1
- cmpl $11,%eax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 160-112(%rcx),%xmm0
- jb L$cbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 176-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 192-112(%rcx),%xmm0
- je L$cbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 208-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 224-112(%rcx),%xmm0
- jmp L$cbc_dec_done
- .p2align 4
- L$cbc_dec_done:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm10
- pxor %xmm0,%xmm11
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm0,%xmm12
- pxor %xmm0,%xmm13
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movdqu 80(%rdi),%xmm1
- .byte 102,65,15,56,223,210
- movdqu 96(%rdi),%xmm10
- pxor %xmm0,%xmm1
- .byte 102,65,15,56,223,219
- pxor %xmm0,%xmm10
- movdqu 112(%rdi),%xmm0
- .byte 102,65,15,56,223,228
- leaq 128(%rdi),%rdi
- movdqu 0(%rbp),%xmm11
- .byte 102,65,15,56,223,237
- .byte 102,65,15,56,223,246
- movdqu 16(%rbp),%xmm12
- movdqu 32(%rbp),%xmm13
- .byte 102,65,15,56,223,255
- .byte 102,68,15,56,223,193
- movdqu 48(%rbp),%xmm14
- movdqu 64(%rbp),%xmm15
- .byte 102,69,15,56,223,202
- movdqa %xmm0,%xmm10
- movdqu 80(%rbp),%xmm1
- movups -112(%rcx),%xmm0
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm1,%xmm7
- movups %xmm8,96(%rsi)
- leaq 112(%rsi),%rsi
- subq $0x80,%rdx
- ja L$cbc_dec_loop8
- movaps %xmm9,%xmm2
- leaq -112(%rcx),%rcx
- addq $0x70,%rdx
- jle L$cbc_dec_clear_tail_collected
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- cmpq $0x50,%rdx
- jbe L$cbc_dec_tail
- movaps %xmm11,%xmm2
- L$cbc_dec_six_or_seven:
- cmpq $0x60,%rdx
- ja L$cbc_dec_seven
- movaps %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- leaq 80(%rsi),%rsi
- movdqa %xmm7,%xmm2
- pxor %xmm7,%xmm7
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_seven:
- movups 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups 80(%rdi),%xmm9
- pxor %xmm10,%xmm2
- movups 96(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm9,%xmm8
- movdqu %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- leaq 96(%rsi),%rsi
- movdqa %xmm8,%xmm2
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_loop6:
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- L$cbc_dec_loop6_enter:
- leaq 96(%rdi),%rdi
- movdqa %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movdqa %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm14,%xmm6
- movq %rbp,%rcx
- movdqu %xmm5,48(%rsi)
- pxor %xmm15,%xmm7
- movl %r10d,%eax
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- subq $0x60,%rdx
- ja L$cbc_dec_loop6
- movdqa %xmm7,%xmm2
- addq $0x50,%rdx
- jle L$cbc_dec_clear_tail_collected
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- L$cbc_dec_tail:
- movups (%rdi),%xmm2
- subq $0x10,%rdx
- jbe L$cbc_dec_one
- movups 16(%rdi),%xmm3
- movaps %xmm2,%xmm11
- subq $0x10,%rdx
- jbe L$cbc_dec_two
- movups 32(%rdi),%xmm4
- movaps %xmm3,%xmm12
- subq $0x10,%rdx
- jbe L$cbc_dec_three
- movups 48(%rdi),%xmm5
- movaps %xmm4,%xmm13
- subq $0x10,%rdx
- jbe L$cbc_dec_four
- movups 64(%rdi),%xmm6
- movaps %xmm5,%xmm14
- movaps %xmm6,%xmm15
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm15,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- leaq 64(%rsi),%rsi
- movdqa %xmm6,%xmm2
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- subq $0x10,%rdx
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_one:
- movaps %xmm2,%xmm11
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- L$oop_dec1_17:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz L$oop_dec1_17
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movaps %xmm11,%xmm10
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_two:
- movaps %xmm3,%xmm12
- call _aesni_decrypt2
- pxor %xmm10,%xmm2
- movaps %xmm12,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- movdqa %xmm3,%xmm2
- pxor %xmm3,%xmm3
- leaq 16(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_three:
- movaps %xmm4,%xmm13
- call _aesni_decrypt3
- pxor %xmm10,%xmm2
- movaps %xmm13,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movdqa %xmm4,%xmm2
- pxor %xmm4,%xmm4
- leaq 32(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_four:
- movaps %xmm5,%xmm14
- call _aesni_decrypt4
- pxor %xmm10,%xmm2
- movaps %xmm14,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movdqa %xmm5,%xmm2
- pxor %xmm5,%xmm5
- leaq 48(%rsi),%rsi
- jmp L$cbc_dec_tail_collected
- .p2align 4
- L$cbc_dec_clear_tail_collected:
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- L$cbc_dec_tail_collected:
- movups %xmm10,(%r8)
- andq $15,%rdx
- jnz L$cbc_dec_tail_partial
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp L$cbc_dec_ret
- .p2align 4
- L$cbc_dec_tail_partial:
- movaps %xmm2,(%rsp)
- pxor %xmm2,%xmm2
- movq $16,%rcx
- movq %rsi,%rdi
- subq %rdx,%rcx
- leaq (%rsp),%rsi
- .long 0x9066A4F3
- movdqa %xmm2,(%rsp)
- L$cbc_dec_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movq -8(%r11),%rbp
- leaq (%r11),%rsp
- L$cbc_ret:
- .byte 0xf3,0xc3
- .globl _aesni_set_decrypt_key
- .p2align 4
- _aesni_set_decrypt_key:
- .byte 0x48,0x83,0xEC,0x08
- call __aesni_set_encrypt_key
- shll $4,%esi
- testl %eax,%eax
- jnz L$dec_key_ret
- leaq 16(%rdx,%rsi,1),%rdi
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- movups %xmm0,(%rdi)
- movups %xmm1,(%rdx)
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- L$dec_key_inverse:
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- .byte 102,15,56,219,192
- .byte 102,15,56,219,201
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- movups %xmm0,16(%rdi)
- movups %xmm1,-16(%rdx)
- cmpq %rdx,%rdi
- ja L$dec_key_inverse
- movups (%rdx),%xmm0
- .byte 102,15,56,219,192
- pxor %xmm1,%xmm1
- movups %xmm0,(%rdi)
- pxor %xmm0,%xmm0
- L$dec_key_ret:
- addq $8,%rsp
- .byte 0xf3,0xc3
- L$SEH_end_set_decrypt_key:
- .globl _aesni_set_encrypt_key
- .p2align 4
- _aesni_set_encrypt_key:
- __aesni_set_encrypt_key:
- .byte 0x48,0x83,0xEC,0x08
- movq $-1,%rax
- testq %rdi,%rdi
- jz L$enc_key_ret
- testq %rdx,%rdx
- jz L$enc_key_ret
- movl $268437504,%r10d
- movups (%rdi),%xmm0
- xorps %xmm4,%xmm4
- andl _OPENSSL_ia32cap_P+4(%rip),%r10d
- leaq 16(%rdx),%rax
- cmpl $256,%esi
- je L$14rounds
- cmpl $192,%esi
- je L$12rounds
- cmpl $128,%esi
- jne L$bad_keybits
- L$10rounds:
- movl $9,%esi
- cmpl $268435456,%r10d
- je L$10rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,200,1
- call L$key_expansion_128_cold
- .byte 102,15,58,223,200,2
- call L$key_expansion_128
- .byte 102,15,58,223,200,4
- call L$key_expansion_128
- .byte 102,15,58,223,200,8
- call L$key_expansion_128
- .byte 102,15,58,223,200,16
- call L$key_expansion_128
- .byte 102,15,58,223,200,32
- call L$key_expansion_128
- .byte 102,15,58,223,200,64
- call L$key_expansion_128
- .byte 102,15,58,223,200,128
- call L$key_expansion_128
- .byte 102,15,58,223,200,27
- call L$key_expansion_128
- .byte 102,15,58,223,200,54
- call L$key_expansion_128
- movups %xmm0,(%rax)
- movl %esi,80(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$10rounds_alt:
- movdqa L$key_rotate(%rip),%xmm5
- movl $8,%r10d
- movdqa L$key_rcon1(%rip),%xmm4
- movdqa %xmm0,%xmm2
- movdqu %xmm0,(%rdx)
- jmp L$oop_key128
- .p2align 4
- L$oop_key128:
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- leaq 16(%rax),%rax
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,-16(%rax)
- movdqa %xmm0,%xmm2
- decl %r10d
- jnz L$oop_key128
- movdqa L$key_rcon1b(%rip),%xmm4
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- movdqa %xmm0,%xmm2
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,16(%rax)
- movl %esi,96(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$12rounds:
- movq 16(%rdi),%xmm2
- movl $11,%esi
- cmpl $268435456,%r10d
- je L$12rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,202,1
- call L$key_expansion_192a_cold
- .byte 102,15,58,223,202,2
- call L$key_expansion_192b
- .byte 102,15,58,223,202,4
- call L$key_expansion_192a
- .byte 102,15,58,223,202,8
- call L$key_expansion_192b
- .byte 102,15,58,223,202,16
- call L$key_expansion_192a
- .byte 102,15,58,223,202,32
- call L$key_expansion_192b
- .byte 102,15,58,223,202,64
- call L$key_expansion_192a
- .byte 102,15,58,223,202,128
- call L$key_expansion_192b
- movups %xmm0,(%rax)
- movl %esi,48(%rax)
- xorq %rax,%rax
- jmp L$enc_key_ret
- .p2align 4
- L$12rounds_alt:
- movdqa L$key_rotate192(%rip),%xmm5
- movdqa L$key_rcon1(%rip),%xmm4
- movl $8,%r10d
- movdqu %xmm0,(%rdx)
- jmp L$oop_key192
- .p2align 4
- L$oop_key192:
- movq %xmm2,0(%rax)
- movdqa %xmm2,%xmm1
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- pslld $1,%xmm4
- leaq 24(%rax),%rax
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pshufd $0xff,%xmm0,%xmm3
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pxor %xmm2,%xmm0
- pxor %xmm3,%xmm2
- movdqu %xmm0,-16(%rax)
- decl %r10d
- jnz L$oop_key192
- movl %esi,32(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$14rounds:
- movups 16(%rdi),%xmm2
- movl $13,%esi
- leaq 16(%rax),%rax
- cmpl $268435456,%r10d
- je L$14rounds_alt
- movups %xmm0,(%rdx)
- movups %xmm2,16(%rdx)
- .byte 102,15,58,223,202,1
- call L$key_expansion_256a_cold
- .byte 102,15,58,223,200,1
- call L$key_expansion_256b
- .byte 102,15,58,223,202,2
- call L$key_expansion_256a
- .byte 102,15,58,223,200,2
- call L$key_expansion_256b
- .byte 102,15,58,223,202,4
- call L$key_expansion_256a
- .byte 102,15,58,223,200,4
- call L$key_expansion_256b
- .byte 102,15,58,223,202,8
- call L$key_expansion_256a
- .byte 102,15,58,223,200,8
- call L$key_expansion_256b
- .byte 102,15,58,223,202,16
- call L$key_expansion_256a
- .byte 102,15,58,223,200,16
- call L$key_expansion_256b
- .byte 102,15,58,223,202,32
- call L$key_expansion_256a
- .byte 102,15,58,223,200,32
- call L$key_expansion_256b
- .byte 102,15,58,223,202,64
- call L$key_expansion_256a
- movups %xmm0,(%rax)
- movl %esi,16(%rax)
- xorq %rax,%rax
- jmp L$enc_key_ret
- .p2align 4
- L$14rounds_alt:
- movdqa L$key_rotate(%rip),%xmm5
- movdqa L$key_rcon1(%rip),%xmm4
- movl $7,%r10d
- movdqu %xmm0,0(%rdx)
- movdqa %xmm2,%xmm1
- movdqu %xmm2,16(%rdx)
- jmp L$oop_key256
- .p2align 4
- L$oop_key256:
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pslld $1,%xmm4
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- decl %r10d
- jz L$done_key256
- pshufd $0xff,%xmm0,%xmm2
- pxor %xmm3,%xmm3
- .byte 102,15,56,221,211
- movdqa %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm3,%xmm1
- pxor %xmm1,%xmm2
- movdqu %xmm2,16(%rax)
- leaq 32(%rax),%rax
- movdqa %xmm2,%xmm1
- jmp L$oop_key256
- L$done_key256:
- movl %esi,16(%rax)
- xorl %eax,%eax
- jmp L$enc_key_ret
- .p2align 4
- L$bad_keybits:
- movq $-2,%rax
- L$enc_key_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- addq $8,%rsp
- .byte 0xf3,0xc3
- L$SEH_end_set_encrypt_key:
- .p2align 4
- L$key_expansion_128:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_128_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_192a:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_192a_cold:
- movaps %xmm2,%xmm5
- L$key_expansion_192b_warm:
- shufps $16,%xmm0,%xmm4
- movdqa %xmm2,%xmm3
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- pslldq $4,%xmm3
- xorps %xmm4,%xmm0
- pshufd $85,%xmm1,%xmm1
- pxor %xmm3,%xmm2
- pxor %xmm1,%xmm0
- pshufd $255,%xmm0,%xmm3
- pxor %xmm3,%xmm2
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_192b:
- movaps %xmm0,%xmm3
- shufps $68,%xmm0,%xmm5
- movups %xmm5,(%rax)
- shufps $78,%xmm2,%xmm3
- movups %xmm3,16(%rax)
- leaq 32(%rax),%rax
- jmp L$key_expansion_192b_warm
- .p2align 4
- L$key_expansion_256a:
- movups %xmm2,(%rax)
- leaq 16(%rax),%rax
- L$key_expansion_256a_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .p2align 4
- L$key_expansion_256b:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- shufps $16,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $140,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $170,%xmm1,%xmm1
- xorps %xmm1,%xmm2
- .byte 0xf3,0xc3
- .p2align 6
- L$bswap_mask:
- .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
- L$increment32:
- .long 6,6,6,0
- L$increment64:
- .long 1,0,0,0
- L$xts_magic:
- .long 0x87,0,1,0
- L$increment1:
- .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
- L$key_rotate:
- .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d
- L$key_rotate192:
- .long 0x04070605,0x04070605,0x04070605,0x04070605
- L$key_rcon1:
- .long 1,1,1,1
- L$key_rcon1b:
- .long 0x1b,0x1b,0x1b,0x1b
- .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .p2align 6
|