1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474 |
- .text
- .globl aesni_encrypt
- .type aesni_encrypt,@function
- .align 16
- aesni_encrypt:
- .cfi_startproc
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- .Loop_enc1_1:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz .Loop_enc1_1
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_encrypt,.-aesni_encrypt
- .globl aesni_decrypt
- .type aesni_decrypt,@function
- .align 16
- aesni_decrypt:
- .cfi_startproc
- movups (%rdi),%xmm2
- movl 240(%rdx),%eax
- movups (%rdx),%xmm0
- movups 16(%rdx),%xmm1
- leaq 32(%rdx),%rdx
- xorps %xmm0,%xmm2
- .Loop_dec1_2:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rdx),%xmm1
- leaq 16(%rdx),%rdx
- jnz .Loop_dec1_2
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_decrypt, .-aesni_decrypt
- .type _aesni_encrypt2,@function
- .align 16
- _aesni_encrypt2:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- .Lenc_loop2:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lenc_loop2
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_encrypt2,.-_aesni_encrypt2
- .type _aesni_decrypt2,@function
- .align 16
- _aesni_decrypt2:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- .Ldec_loop2:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Ldec_loop2
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_decrypt2,.-_aesni_decrypt2
- .type _aesni_encrypt3,@function
- .align 16
- _aesni_encrypt3:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- .Lenc_loop3:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lenc_loop3
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_encrypt3,.-_aesni_encrypt3
- .type _aesni_decrypt3,@function
- .align 16
- _aesni_decrypt3:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- addq $16,%rax
- .Ldec_loop3:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Ldec_loop3
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_decrypt3,.-_aesni_decrypt3
- .type _aesni_encrypt4,@function
- .align 16
- _aesni_encrypt4:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- .Lenc_loop4:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lenc_loop4
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_encrypt4,.-_aesni_encrypt4
- .type _aesni_decrypt4,@function
- .align 16
- _aesni_decrypt4:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- xorps %xmm0,%xmm4
- xorps %xmm0,%xmm5
- movups 32(%rcx),%xmm0
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 0x0f,0x1f,0x00
- addq $16,%rax
- .Ldec_loop4:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Ldec_loop4
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_decrypt4,.-_aesni_decrypt4
- .type _aesni_encrypt6,@function
- .align 16
- _aesni_encrypt6:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,220,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,220,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp .Lenc_loop6_enter
- .align 16
- .Lenc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .Lenc_loop6_enter:
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lenc_loop6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_encrypt6,.-_aesni_encrypt6
- .type _aesni_decrypt6,@function
- .align 16
- _aesni_decrypt6:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- pxor %xmm0,%xmm3
- pxor %xmm0,%xmm4
- .byte 102,15,56,222,209
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- .byte 102,15,56,222,225
- pxor %xmm0,%xmm7
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp .Ldec_loop6_enter
- .align 16
- .Ldec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .Ldec_loop6_enter:
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Ldec_loop6
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_decrypt6,.-_aesni_decrypt6
- .type _aesni_encrypt8,@function
- .align 16
- _aesni_encrypt8:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,220,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp .Lenc_loop8_inner
- .align 16
- .Lenc_loop8:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .Lenc_loop8_inner:
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- .Lenc_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lenc_loop8
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- .byte 102,15,56,221,224
- .byte 102,15,56,221,232
- .byte 102,15,56,221,240
- .byte 102,15,56,221,248
- .byte 102,68,15,56,221,192
- .byte 102,68,15,56,221,200
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_encrypt8,.-_aesni_encrypt8
- .type _aesni_decrypt8,@function
- .align 16
- _aesni_decrypt8:
- .cfi_startproc
- movups (%rcx),%xmm0
- shll $4,%eax
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm2
- xorps %xmm0,%xmm3
- pxor %xmm0,%xmm4
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- leaq 32(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm9
- movups (%rcx,%rax,1),%xmm0
- addq $16,%rax
- jmp .Ldec_loop8_inner
- .align 16
- .Ldec_loop8:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .Ldec_loop8_inner:
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- .Ldec_loop8_enter:
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Ldec_loop8
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- .byte 102,15,56,223,208
- .byte 102,15,56,223,216
- .byte 102,15,56,223,224
- .byte 102,15,56,223,232
- .byte 102,15,56,223,240
- .byte 102,15,56,223,248
- .byte 102,68,15,56,223,192
- .byte 102,68,15,56,223,200
- .byte 0xf3,0xc3
- .cfi_endproc
- .size _aesni_decrypt8,.-_aesni_decrypt8
- .globl aesni_ecb_encrypt
- .type aesni_ecb_encrypt,@function
- .align 16
- aesni_ecb_encrypt:
- .cfi_startproc
- andq $-16,%rdx
- jz .Lecb_ret
- movl 240(%rcx),%eax
- movups (%rcx),%xmm0
- movq %rcx,%r11
- movl %eax,%r10d
- testl %r8d,%r8d
- jz .Lecb_decrypt
- cmpq $0x80,%rdx
- jb .Lecb_enc_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp .Lecb_enc_loop8_enter
- .align 16
- .Lecb_enc_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- .Lecb_enc_loop8_enter:
- call _aesni_encrypt8
- subq $0x80,%rdx
- jnc .Lecb_enc_loop8
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz .Lecb_ret
- .Lecb_enc_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb .Lecb_enc_one
- movups 16(%rdi),%xmm3
- je .Lecb_enc_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb .Lecb_enc_three
- movups 48(%rdi),%xmm5
- je .Lecb_enc_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb .Lecb_enc_five
- movups 80(%rdi),%xmm7
- je .Lecb_enc_six
- movdqu 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_encrypt8
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- movups %xmm8,96(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_enc1_3:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_3
- .byte 102,15,56,221,209
- movups %xmm2,(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_two:
- call _aesni_encrypt2
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_three:
- call _aesni_encrypt3
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_four:
- call _aesni_encrypt4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_five:
- xorps %xmm7,%xmm7
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_enc_six:
- call _aesni_encrypt6
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- jmp .Lecb_ret
- .align 16
- .Lecb_decrypt:
- cmpq $0x80,%rdx
- jb .Lecb_dec_tail
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- movdqu 96(%rdi),%xmm8
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- subq $0x80,%rdx
- jmp .Lecb_dec_loop8_enter
- .align 16
- .Lecb_dec_loop8:
- movups %xmm2,(%rsi)
- movq %r11,%rcx
- movdqu (%rdi),%xmm2
- movl %r10d,%eax
- movups %xmm3,16(%rsi)
- movdqu 16(%rdi),%xmm3
- movups %xmm4,32(%rsi)
- movdqu 32(%rdi),%xmm4
- movups %xmm5,48(%rsi)
- movdqu 48(%rdi),%xmm5
- movups %xmm6,64(%rsi)
- movdqu 64(%rdi),%xmm6
- movups %xmm7,80(%rsi)
- movdqu 80(%rdi),%xmm7
- movups %xmm8,96(%rsi)
- movdqu 96(%rdi),%xmm8
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- movdqu 112(%rdi),%xmm9
- leaq 128(%rdi),%rdi
- .Lecb_dec_loop8_enter:
- call _aesni_decrypt8
- movups (%r11),%xmm0
- subq $0x80,%rdx
- jnc .Lecb_dec_loop8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movq %r11,%rcx
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movl %r10d,%eax
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- movups %xmm9,112(%rsi)
- pxor %xmm9,%xmm9
- leaq 128(%rsi),%rsi
- addq $0x80,%rdx
- jz .Lecb_ret
- .Lecb_dec_tail:
- movups (%rdi),%xmm2
- cmpq $0x20,%rdx
- jb .Lecb_dec_one
- movups 16(%rdi),%xmm3
- je .Lecb_dec_two
- movups 32(%rdi),%xmm4
- cmpq $0x40,%rdx
- jb .Lecb_dec_three
- movups 48(%rdi),%xmm5
- je .Lecb_dec_four
- movups 64(%rdi),%xmm6
- cmpq $0x60,%rdx
- jb .Lecb_dec_five
- movups 80(%rdi),%xmm7
- je .Lecb_dec_six
- movups 96(%rdi),%xmm8
- movups (%rcx),%xmm0
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- movups %xmm8,96(%rsi)
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_one:
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_4:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_4
- .byte 102,15,56,223,209
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_two:
- call _aesni_decrypt2
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_three:
- call _aesni_decrypt3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_four:
- call _aesni_decrypt4
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_five:
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- jmp .Lecb_ret
- .align 16
- .Lecb_dec_six:
- call _aesni_decrypt6
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- .Lecb_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ecb_encrypt,.-aesni_ecb_encrypt
- .globl aesni_ccm64_encrypt_blocks
- .type aesni_ccm64_encrypt_blocks,@function
- .align 16
- aesni_ccm64_encrypt_blocks:
- .cfi_startproc
- movl 240(%rcx),%eax
- movdqu (%r8),%xmm6
- movdqa .Lincrement64(%rip),%xmm9
- movdqa .Lbswap_mask(%rip),%xmm7
- shll $4,%eax
- movl $16,%r10d
- leaq 0(%rcx),%r11
- movdqu (%r9),%xmm3
- movdqa %xmm6,%xmm2
- leaq 32(%rcx,%rax,1),%rcx
- .byte 102,15,56,0,247
- subq %rax,%r10
- jmp .Lccm64_enc_outer
- .align 16
- .Lccm64_enc_outer:
- movups (%r11),%xmm0
- movq %r10,%rax
- movups (%rdi),%xmm8
- xorps %xmm0,%xmm2
- movups 16(%r11),%xmm1
- xorps %xmm8,%xmm0
- xorps %xmm0,%xmm3
- movups 32(%r11),%xmm0
- .Lccm64_enc2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lccm64_enc2_loop
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- paddq %xmm9,%xmm6
- decq %rdx
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leaq 16(%rdi),%rdi
- xorps %xmm2,%xmm8
- movdqa %xmm6,%xmm2
- movups %xmm8,(%rsi)
- .byte 102,15,56,0,215
- leaq 16(%rsi),%rsi
- jnz .Lccm64_enc_outer
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movups %xmm3,(%r9)
- pxor %xmm3,%xmm3
- pxor %xmm8,%xmm8
- pxor %xmm6,%xmm6
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks
- .globl aesni_ccm64_decrypt_blocks
- .type aesni_ccm64_decrypt_blocks,@function
- .align 16
- aesni_ccm64_decrypt_blocks:
- .cfi_startproc
- movl 240(%rcx),%eax
- movups (%r8),%xmm6
- movdqu (%r9),%xmm3
- movdqa .Lincrement64(%rip),%xmm9
- movdqa .Lbswap_mask(%rip),%xmm7
- movaps %xmm6,%xmm2
- movl %eax,%r10d
- movq %rcx,%r11
- .byte 102,15,56,0,247
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_enc1_5:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_5
- .byte 102,15,56,221,209
- shll $4,%r10d
- movl $16,%eax
- movups (%rdi),%xmm8
- paddq %xmm9,%xmm6
- leaq 16(%rdi),%rdi
- subq %r10,%rax
- leaq 32(%r11,%r10,1),%rcx
- movq %rax,%r10
- jmp .Lccm64_dec_outer
- .align 16
- .Lccm64_dec_outer:
- xorps %xmm2,%xmm8
- movdqa %xmm6,%xmm2
- movups %xmm8,(%rsi)
- leaq 16(%rsi),%rsi
- .byte 102,15,56,0,215
- subq $1,%rdx
- jz .Lccm64_dec_break
- movups (%r11),%xmm0
- movq %r10,%rax
- movups 16(%r11),%xmm1
- xorps %xmm0,%xmm8
- xorps %xmm0,%xmm2
- xorps %xmm8,%xmm3
- movups 32(%r11),%xmm0
- jmp .Lccm64_dec2_loop
- .align 16
- .Lccm64_dec2_loop:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Lccm64_dec2_loop
- movups (%rdi),%xmm8
- paddq %xmm9,%xmm6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,221,208
- .byte 102,15,56,221,216
- leaq 16(%rdi),%rdi
- jmp .Lccm64_dec_outer
- .align 16
- .Lccm64_dec_break:
- movl 240(%r11),%eax
- movups (%r11),%xmm0
- movups 16(%r11),%xmm1
- xorps %xmm0,%xmm8
- leaq 32(%r11),%r11
- xorps %xmm8,%xmm3
- .Loop_enc1_6:
- .byte 102,15,56,220,217
- decl %eax
- movups (%r11),%xmm1
- leaq 16(%r11),%r11
- jnz .Loop_enc1_6
- .byte 102,15,56,221,217
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- movups %xmm3,(%r9)
- pxor %xmm3,%xmm3
- pxor %xmm8,%xmm8
- pxor %xmm6,%xmm6
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks
- .globl aesni_ctr32_encrypt_blocks
- .type aesni_ctr32_encrypt_blocks,@function
- .align 16
- aesni_ctr32_encrypt_blocks:
- .cfi_startproc
- cmpq $1,%rdx
- jne .Lctr32_bulk
- movups (%r8),%xmm2
- movups (%rdi),%xmm3
- movl 240(%rcx),%edx
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_enc1_7:
- .byte 102,15,56,220,209
- decl %edx
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_7
- .byte 102,15,56,221,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- xorps %xmm2,%xmm2
- jmp .Lctr32_epilogue
- .align 16
- .Lctr32_bulk:
- leaq (%rsp),%r11
- .cfi_def_cfa_register %r11
- pushq %rbp
- .cfi_offset %rbp,-16
- subq $128,%rsp
- andq $-16,%rsp
- movdqu (%r8),%xmm2
- movdqu (%rcx),%xmm0
- movl 12(%r8),%r8d
- pxor %xmm0,%xmm2
- movl 12(%rcx),%ebp
- movdqa %xmm2,0(%rsp)
- bswapl %r8d
- movdqa %xmm2,%xmm3
- movdqa %xmm2,%xmm4
- movdqa %xmm2,%xmm5
- movdqa %xmm2,64(%rsp)
- movdqa %xmm2,80(%rsp)
- movdqa %xmm2,96(%rsp)
- movq %rdx,%r10
- movdqa %xmm2,112(%rsp)
- leaq 1(%r8),%rax
- leaq 2(%r8),%rdx
- bswapl %eax
- bswapl %edx
- xorl %ebp,%eax
- xorl %ebp,%edx
- .byte 102,15,58,34,216,3
- leaq 3(%r8),%rax
- movdqa %xmm3,16(%rsp)
- .byte 102,15,58,34,226,3
- bswapl %eax
- movq %r10,%rdx
- leaq 4(%r8),%r10
- movdqa %xmm4,32(%rsp)
- xorl %ebp,%eax
- bswapl %r10d
- .byte 102,15,58,34,232,3
- xorl %ebp,%r10d
- movdqa %xmm5,48(%rsp)
- leaq 5(%r8),%r9
- movl %r10d,64+12(%rsp)
- bswapl %r9d
- leaq 6(%r8),%r10
- movl 240(%rcx),%eax
- xorl %ebp,%r9d
- bswapl %r10d
- movl %r9d,80+12(%rsp)
- xorl %ebp,%r10d
- leaq 7(%r8),%r9
- movl %r10d,96+12(%rsp)
- bswapl %r9d
- movl OPENSSL_ia32cap_P+4(%rip),%r10d
- xorl %ebp,%r9d
- andl $71303168,%r10d
- movl %r9d,112+12(%rsp)
- movups 16(%rcx),%xmm1
- movdqa 64(%rsp),%xmm6
- movdqa 80(%rsp),%xmm7
- cmpq $8,%rdx
- jb .Lctr32_tail
- subq $6,%rdx
- cmpl $4194304,%r10d
- je .Lctr32_6x
- leaq 128(%rcx),%rcx
- subq $2,%rdx
- jmp .Lctr32_loop8
- .align 16
- .Lctr32_6x:
- shll $4,%eax
- movl $48,%r10d
- bswapl %ebp
- leaq 32(%rcx,%rax,1),%rcx
- subq %rax,%r10
- jmp .Lctr32_loop6
- .align 16
- .Lctr32_loop6:
- addl $6,%r8d
- movups -48(%rcx,%r10,1),%xmm0
- .byte 102,15,56,220,209
- movl %r8d,%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,217
- .byte 0x0f,0x38,0xf1,0x44,0x24,12
- leal 1(%r8),%eax
- .byte 102,15,56,220,225
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,28
- .byte 102,15,56,220,233
- leal 2(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,241
- .byte 0x0f,0x38,0xf1,0x44,0x24,44
- leal 3(%r8),%eax
- .byte 102,15,56,220,249
- movups -32(%rcx,%r10,1),%xmm1
- xorl %ebp,%eax
- .byte 102,15,56,220,208
- .byte 0x0f,0x38,0xf1,0x44,0x24,60
- leal 4(%r8),%eax
- .byte 102,15,56,220,216
- xorl %ebp,%eax
- .byte 0x0f,0x38,0xf1,0x44,0x24,76
- .byte 102,15,56,220,224
- leal 5(%r8),%eax
- xorl %ebp,%eax
- .byte 102,15,56,220,232
- .byte 0x0f,0x38,0xf1,0x44,0x24,92
- movq %r10,%rax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%r10,1),%xmm0
- call .Lenc_loop6
- movdqu (%rdi),%xmm8
- movdqu 16(%rdi),%xmm9
- movdqu 32(%rdi),%xmm10
- movdqu 48(%rdi),%xmm11
- movdqu 64(%rdi),%xmm12
- movdqu 80(%rdi),%xmm13
- leaq 96(%rdi),%rdi
- movups -64(%rcx,%r10,1),%xmm1
- pxor %xmm2,%xmm8
- movaps 0(%rsp),%xmm2
- pxor %xmm3,%xmm9
- movaps 16(%rsp),%xmm3
- pxor %xmm4,%xmm10
- movaps 32(%rsp),%xmm4
- pxor %xmm5,%xmm11
- movaps 48(%rsp),%xmm5
- pxor %xmm6,%xmm12
- movaps 64(%rsp),%xmm6
- pxor %xmm7,%xmm13
- movaps 80(%rsp),%xmm7
- movdqu %xmm8,(%rsi)
- movdqu %xmm9,16(%rsi)
- movdqu %xmm10,32(%rsi)
- movdqu %xmm11,48(%rsi)
- movdqu %xmm12,64(%rsi)
- movdqu %xmm13,80(%rsi)
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc .Lctr32_loop6
- addq $6,%rdx
- jz .Lctr32_done
- leal -48(%r10),%eax
- leaq -80(%rcx,%r10,1),%rcx
- negl %eax
- shrl $4,%eax
- jmp .Lctr32_tail
- .align 32
- .Lctr32_loop8:
- addl $8,%r8d
- movdqa 96(%rsp),%xmm8
- .byte 102,15,56,220,209
- movl %r8d,%r9d
- movdqa 112(%rsp),%xmm9
- .byte 102,15,56,220,217
- bswapl %r9d
- movups 32-128(%rcx),%xmm0
- .byte 102,15,56,220,225
- xorl %ebp,%r9d
- nop
- .byte 102,15,56,220,233
- movl %r9d,0+12(%rsp)
- leaq 1(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 48-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,16+12(%rsp)
- leaq 2(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 64-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,32+12(%rsp)
- leaq 3(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 80-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,48+12(%rsp)
- leaq 4(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 96-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,64+12(%rsp)
- leaq 5(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 112-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movl %r9d,80+12(%rsp)
- leaq 6(%r8),%r9
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 128-128(%rcx),%xmm0
- bswapl %r9d
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- xorl %ebp,%r9d
- .byte 0x66,0x90
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movl %r9d,96+12(%rsp)
- leaq 7(%r8),%r9
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 144-128(%rcx),%xmm1
- bswapl %r9d
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- xorl %ebp,%r9d
- movdqu 0(%rdi),%xmm10
- .byte 102,15,56,220,232
- movl %r9d,112+12(%rsp)
- cmpl $11,%eax
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 160-128(%rcx),%xmm0
- jb .Lctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 176-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 192-128(%rcx),%xmm0
- je .Lctr32_enc_done
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movups 208-128(%rcx),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- .byte 102,68,15,56,220,192
- .byte 102,68,15,56,220,200
- movups 224-128(%rcx),%xmm0
- jmp .Lctr32_enc_done
- .align 16
- .Lctr32_enc_done:
- movdqu 16(%rdi),%xmm11
- pxor %xmm0,%xmm10
- movdqu 32(%rdi),%xmm12
- pxor %xmm0,%xmm11
- movdqu 48(%rdi),%xmm13
- pxor %xmm0,%xmm12
- movdqu 64(%rdi),%xmm14
- pxor %xmm0,%xmm13
- movdqu 80(%rdi),%xmm15
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- .byte 102,68,15,56,220,201
- movdqu 96(%rdi),%xmm1
- leaq 128(%rdi),%rdi
- .byte 102,65,15,56,221,210
- pxor %xmm0,%xmm1
- movdqu 112-128(%rdi),%xmm10
- .byte 102,65,15,56,221,219
- pxor %xmm0,%xmm10
- movdqa 0(%rsp),%xmm11
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- movdqa 16(%rsp),%xmm12
- movdqa 32(%rsp),%xmm13
- .byte 102,65,15,56,221,246
- .byte 102,65,15,56,221,255
- movdqa 48(%rsp),%xmm14
- movdqa 64(%rsp),%xmm15
- .byte 102,68,15,56,221,193
- movdqa 80(%rsp),%xmm0
- movups 16-128(%rcx),%xmm1
- .byte 102,69,15,56,221,202
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm0,%xmm7
- movups %xmm8,96(%rsi)
- movups %xmm9,112(%rsi)
- leaq 128(%rsi),%rsi
- subq $8,%rdx
- jnc .Lctr32_loop8
- addq $8,%rdx
- jz .Lctr32_done
- leaq -128(%rcx),%rcx
- .Lctr32_tail:
- leaq 16(%rcx),%rcx
- cmpq $4,%rdx
- jb .Lctr32_loop3
- je .Lctr32_loop4
- shll $4,%eax
- movdqa 96(%rsp),%xmm8
- pxor %xmm9,%xmm9
- movups 16(%rcx),%xmm0
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- leaq 32-16(%rcx,%rax,1),%rcx
- negq %rax
- .byte 102,15,56,220,225
- addq $16,%rax
- movups (%rdi),%xmm10
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- movups 16(%rdi),%xmm11
- movups 32(%rdi),%xmm12
- .byte 102,15,56,220,249
- .byte 102,68,15,56,220,193
- call .Lenc_loop8_enter
- movdqu 48(%rdi),%xmm13
- pxor %xmm10,%xmm2
- movdqu 64(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm10,%xmm6
- movdqu %xmm5,48(%rsi)
- movdqu %xmm6,64(%rsi)
- cmpq $6,%rdx
- jb .Lctr32_done
- movups 80(%rdi),%xmm11
- xorps %xmm11,%xmm7
- movups %xmm7,80(%rsi)
- je .Lctr32_done
- movups 96(%rdi),%xmm12
- xorps %xmm12,%xmm8
- movups %xmm8,96(%rsi)
- jmp .Lctr32_done
- .align 32
- .Lctr32_loop4:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx),%xmm1
- jnz .Lctr32_loop4
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- movups (%rdi),%xmm10
- movups 16(%rdi),%xmm11
- .byte 102,15,56,221,225
- .byte 102,15,56,221,233
- movups 32(%rdi),%xmm12
- movups 48(%rdi),%xmm13
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm4,32(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm5,48(%rsi)
- jmp .Lctr32_done
- .align 32
- .Lctr32_loop3:
- .byte 102,15,56,220,209
- leaq 16(%rcx),%rcx
- decl %eax
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- movups (%rcx),%xmm1
- jnz .Lctr32_loop3
- .byte 102,15,56,221,209
- .byte 102,15,56,221,217
- .byte 102,15,56,221,225
- movups (%rdi),%xmm10
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- cmpq $2,%rdx
- jb .Lctr32_done
- movups 16(%rdi),%xmm11
- xorps %xmm11,%xmm3
- movups %xmm3,16(%rsi)
- je .Lctr32_done
- movups 32(%rdi),%xmm12
- xorps %xmm12,%xmm4
- movups %xmm4,32(%rsi)
- .Lctr32_done:
- xorps %xmm0,%xmm0
- xorl %ebp,%ebp
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- movaps %xmm0,112(%rsp)
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- .cfi_restore %rbp
- leaq (%r11),%rsp
- .cfi_def_cfa_register %rsp
- .Lctr32_epilogue:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks
- .globl aesni_xts_encrypt
- .type aesni_xts_encrypt,@function
- .align 16
- aesni_xts_encrypt:
- .cfi_startproc
- leaq (%rsp),%r11
- .cfi_def_cfa_register %r11
- pushq %rbp
- .cfi_offset %rbp,-16
- subq $112,%rsp
- andq $-16,%rsp
- movups (%r9),%xmm2
- movl 240(%r8),%eax
- movl 240(%rcx),%r10d
- movups (%r8),%xmm0
- movups 16(%r8),%xmm1
- leaq 32(%r8),%r8
- xorps %xmm0,%xmm2
- .Loop_enc1_8:
- .byte 102,15,56,220,209
- decl %eax
- movups (%r8),%xmm1
- leaq 16(%r8),%r8
- jnz .Loop_enc1_8
- .byte 102,15,56,221,209
- movups (%rcx),%xmm0
- movq %rcx,%rbp
- movl %r10d,%eax
- shll $4,%r10d
- movq %rdx,%r9
- andq $-16,%rdx
- movups 16(%rcx,%r10,1),%xmm1
- movdqa .Lxts_magic(%rip),%xmm8
- movdqa %xmm2,%xmm15
- pshufd $0x5f,%xmm2,%xmm9
- pxor %xmm0,%xmm1
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm10
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm10
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm11
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm11
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm12
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm12
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm13
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm13
- pxor %xmm14,%xmm15
- movdqa %xmm15,%xmm14
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm9
- pxor %xmm0,%xmm14
- pxor %xmm9,%xmm15
- movaps %xmm1,96(%rsp)
- subq $96,%rdx
- jc .Lxts_enc_short
- movl $16+96,%eax
- leaq 32(%rbp,%r10,1),%rcx
- subq %r10,%rax
- movups 16(%rbp),%xmm1
- movq %rax,%r10
- leaq .Lxts_magic(%rip),%r8
- jmp .Lxts_enc_grandloop
- .align 32
- .Lxts_enc_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqa %xmm0,%xmm8
- movdqu 16(%rdi),%xmm3
- pxor %xmm10,%xmm2
- movdqu 32(%rdi),%xmm4
- pxor %xmm11,%xmm3
- .byte 102,15,56,220,209
- movdqu 48(%rdi),%xmm5
- pxor %xmm12,%xmm4
- .byte 102,15,56,220,217
- movdqu 64(%rdi),%xmm6
- pxor %xmm13,%xmm5
- .byte 102,15,56,220,225
- movdqu 80(%rdi),%xmm7
- pxor %xmm15,%xmm8
- movdqa 96(%rsp),%xmm9
- pxor %xmm14,%xmm6
- .byte 102,15,56,220,233
- movups 32(%rbp),%xmm0
- leaq 96(%rdi),%rdi
- pxor %xmm8,%xmm7
- pxor %xmm9,%xmm10
- .byte 102,15,56,220,241
- pxor %xmm9,%xmm11
- movdqa %xmm10,0(%rsp)
- .byte 102,15,56,220,249
- movups 48(%rbp),%xmm1
- pxor %xmm9,%xmm12
- .byte 102,15,56,220,208
- pxor %xmm9,%xmm13
- movdqa %xmm11,16(%rsp)
- .byte 102,15,56,220,216
- pxor %xmm9,%xmm14
- movdqa %xmm12,32(%rsp)
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- pxor %xmm9,%xmm8
- movdqa %xmm14,64(%rsp)
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups 64(%rbp),%xmm0
- movdqa %xmm8,80(%rsp)
- pshufd $0x5f,%xmm15,%xmm9
- jmp .Lxts_enc_loop6
- .align 32
- .Lxts_enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups -64(%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -80(%rcx,%rax,1),%xmm0
- jnz .Lxts_enc_loop6
- movdqa (%r8),%xmm8
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- paddq %xmm15,%xmm15
- psrad $31,%xmm14
- .byte 102,15,56,220,217
- pand %xmm8,%xmm14
- movups (%rbp),%xmm10
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- pxor %xmm14,%xmm15
- movaps %xmm10,%xmm11
- .byte 102,15,56,220,249
- movups -64(%rcx),%xmm1
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,208
- paddd %xmm9,%xmm9
- pxor %xmm15,%xmm10
- .byte 102,15,56,220,216
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- pand %xmm8,%xmm14
- movaps %xmm11,%xmm12
- .byte 102,15,56,220,240
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,248
- movups -48(%rcx),%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- pxor %xmm15,%xmm11
- psrad $31,%xmm14
- .byte 102,15,56,220,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movdqa %xmm13,48(%rsp)
- pxor %xmm14,%xmm15
- .byte 102,15,56,220,241
- movaps %xmm12,%xmm13
- movdqa %xmm9,%xmm14
- .byte 102,15,56,220,249
- movups -32(%rcx),%xmm1
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,208
- pxor %xmm15,%xmm12
- psrad $31,%xmm14
- .byte 102,15,56,220,216
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- pxor %xmm14,%xmm15
- movaps %xmm13,%xmm14
- .byte 102,15,56,220,248
- movdqa %xmm9,%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,220,209
- pxor %xmm15,%xmm13
- psrad $31,%xmm0
- .byte 102,15,56,220,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm0
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- pxor %xmm0,%xmm15
- movups (%rbp),%xmm0
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups 16(%rbp),%xmm1
- pxor %xmm15,%xmm14
- .byte 102,15,56,221,84,36,0
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- .byte 102,15,56,221,92,36,16
- .byte 102,15,56,221,100,36,32
- pand %xmm8,%xmm9
- movq %r10,%rax
- .byte 102,15,56,221,108,36,48
- .byte 102,15,56,221,116,36,64
- .byte 102,15,56,221,124,36,80
- pxor %xmm9,%xmm15
- leaq 96(%rsi),%rsi
- movups %xmm2,-96(%rsi)
- movups %xmm3,-80(%rsi)
- movups %xmm4,-64(%rsi)
- movups %xmm5,-48(%rsi)
- movups %xmm6,-32(%rsi)
- movups %xmm7,-16(%rsi)
- subq $96,%rdx
- jnc .Lxts_enc_grandloop
- movl $16+96,%eax
- subl %r10d,%eax
- movq %rbp,%rcx
- shrl $4,%eax
- .Lxts_enc_short:
- movl %eax,%r10d
- pxor %xmm0,%xmm10
- addq $96,%rdx
- jz .Lxts_enc_done
- pxor %xmm0,%xmm11
- cmpq $0x20,%rdx
- jb .Lxts_enc_one
- pxor %xmm0,%xmm12
- je .Lxts_enc_two
- pxor %xmm0,%xmm13
- cmpq $0x40,%rdx
- jb .Lxts_enc_three
- pxor %xmm0,%xmm14
- je .Lxts_enc_four
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- pxor %xmm10,%xmm2
- movdqu 48(%rdi),%xmm5
- pxor %xmm11,%xmm3
- movdqu 64(%rdi),%xmm6
- leaq 80(%rdi),%rdi
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm6
- pxor %xmm7,%xmm7
- call _aesni_encrypt6
- xorps %xmm10,%xmm2
- movdqa %xmm15,%xmm10
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- xorps %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- xorps %xmm14,%xmm6
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- jmp .Lxts_enc_done
- .align 16
- .Lxts_enc_one:
- movups (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_enc1_9:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_9
- .byte 102,15,56,221,209
- xorps %xmm10,%xmm2
- movdqa %xmm11,%xmm10
- movups %xmm2,(%rsi)
- leaq 16(%rsi),%rsi
- jmp .Lxts_enc_done
- .align 16
- .Lxts_enc_two:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- leaq 32(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- call _aesni_encrypt2
- xorps %xmm10,%xmm2
- movdqa %xmm12,%xmm10
- xorps %xmm11,%xmm3
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- leaq 32(%rsi),%rsi
- jmp .Lxts_enc_done
- .align 16
- .Lxts_enc_three:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- leaq 48(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- call _aesni_encrypt3
- xorps %xmm10,%xmm2
- movdqa %xmm13,%xmm10
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- leaq 48(%rsi),%rsi
- jmp .Lxts_enc_done
- .align 16
- .Lxts_enc_four:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- xorps %xmm10,%xmm2
- movups 48(%rdi),%xmm5
- leaq 64(%rdi),%rdi
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- xorps %xmm13,%xmm5
- call _aesni_encrypt4
- pxor %xmm10,%xmm2
- movdqa %xmm14,%xmm10
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- leaq 64(%rsi),%rsi
- jmp .Lxts_enc_done
- .align 16
- .Lxts_enc_done:
- andq $15,%r9
- jz .Lxts_enc_ret
- movq %r9,%rdx
- .Lxts_enc_steal:
- movzbl (%rdi),%eax
- movzbl -16(%rsi),%ecx
- leaq 1(%rdi),%rdi
- movb %al,-16(%rsi)
- movb %cl,0(%rsi)
- leaq 1(%rsi),%rsi
- subq $1,%rdx
- jnz .Lxts_enc_steal
- subq %r9,%rsi
- movq %rbp,%rcx
- movl %r10d,%eax
- movups -16(%rsi),%xmm2
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_enc1_10:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_10
- .byte 102,15,56,221,209
- xorps %xmm10,%xmm2
- movups %xmm2,-16(%rsi)
- .Lxts_enc_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- .cfi_restore %rbp
- leaq (%r11),%rsp
- .cfi_def_cfa_register %rsp
- .Lxts_enc_epilogue:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_xts_encrypt,.-aesni_xts_encrypt
- .globl aesni_xts_decrypt
- .type aesni_xts_decrypt,@function
- .align 16
- aesni_xts_decrypt:
- .cfi_startproc
- leaq (%rsp),%r11
- .cfi_def_cfa_register %r11
- pushq %rbp
- .cfi_offset %rbp,-16
- subq $112,%rsp
- andq $-16,%rsp
- movups (%r9),%xmm2
- movl 240(%r8),%eax
- movl 240(%rcx),%r10d
- movups (%r8),%xmm0
- movups 16(%r8),%xmm1
- leaq 32(%r8),%r8
- xorps %xmm0,%xmm2
- .Loop_enc1_11:
- .byte 102,15,56,220,209
- decl %eax
- movups (%r8),%xmm1
- leaq 16(%r8),%r8
- jnz .Loop_enc1_11
- .byte 102,15,56,221,209
- xorl %eax,%eax
- testq $15,%rdx
- setnz %al
- shlq $4,%rax
- subq %rax,%rdx
- movups (%rcx),%xmm0
- movq %rcx,%rbp
- movl %r10d,%eax
- shll $4,%r10d
- movq %rdx,%r9
- andq $-16,%rdx
- movups 16(%rcx,%r10,1),%xmm1
- movdqa .Lxts_magic(%rip),%xmm8
- movdqa %xmm2,%xmm15
- pshufd $0x5f,%xmm2,%xmm9
- pxor %xmm0,%xmm1
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm10
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm10
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm11
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm11
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm12
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm12
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- movdqa %xmm15,%xmm13
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- pxor %xmm0,%xmm13
- pxor %xmm14,%xmm15
- movdqa %xmm15,%xmm14
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm9
- pxor %xmm0,%xmm14
- pxor %xmm9,%xmm15
- movaps %xmm1,96(%rsp)
- subq $96,%rdx
- jc .Lxts_dec_short
- movl $16+96,%eax
- leaq 32(%rbp,%r10,1),%rcx
- subq %r10,%rax
- movups 16(%rbp),%xmm1
- movq %rax,%r10
- leaq .Lxts_magic(%rip),%r8
- jmp .Lxts_dec_grandloop
- .align 32
- .Lxts_dec_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqa %xmm0,%xmm8
- movdqu 16(%rdi),%xmm3
- pxor %xmm10,%xmm2
- movdqu 32(%rdi),%xmm4
- pxor %xmm11,%xmm3
- .byte 102,15,56,222,209
- movdqu 48(%rdi),%xmm5
- pxor %xmm12,%xmm4
- .byte 102,15,56,222,217
- movdqu 64(%rdi),%xmm6
- pxor %xmm13,%xmm5
- .byte 102,15,56,222,225
- movdqu 80(%rdi),%xmm7
- pxor %xmm15,%xmm8
- movdqa 96(%rsp),%xmm9
- pxor %xmm14,%xmm6
- .byte 102,15,56,222,233
- movups 32(%rbp),%xmm0
- leaq 96(%rdi),%rdi
- pxor %xmm8,%xmm7
- pxor %xmm9,%xmm10
- .byte 102,15,56,222,241
- pxor %xmm9,%xmm11
- movdqa %xmm10,0(%rsp)
- .byte 102,15,56,222,249
- movups 48(%rbp),%xmm1
- pxor %xmm9,%xmm12
- .byte 102,15,56,222,208
- pxor %xmm9,%xmm13
- movdqa %xmm11,16(%rsp)
- .byte 102,15,56,222,216
- pxor %xmm9,%xmm14
- movdqa %xmm12,32(%rsp)
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- pxor %xmm9,%xmm8
- movdqa %xmm14,64(%rsp)
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups 64(%rbp),%xmm0
- movdqa %xmm8,80(%rsp)
- pshufd $0x5f,%xmm15,%xmm9
- jmp .Lxts_dec_loop6
- .align 32
- .Lxts_dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups -64(%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -80(%rcx,%rax,1),%xmm0
- jnz .Lxts_dec_loop6
- movdqa (%r8),%xmm8
- movdqa %xmm9,%xmm14
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- paddq %xmm15,%xmm15
- psrad $31,%xmm14
- .byte 102,15,56,222,217
- pand %xmm8,%xmm14
- movups (%rbp),%xmm10
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- pxor %xmm14,%xmm15
- movaps %xmm10,%xmm11
- .byte 102,15,56,222,249
- movups -64(%rcx),%xmm1
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,208
- paddd %xmm9,%xmm9
- pxor %xmm15,%xmm10
- .byte 102,15,56,222,216
- psrad $31,%xmm14
- paddq %xmm15,%xmm15
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- pand %xmm8,%xmm14
- movaps %xmm11,%xmm12
- .byte 102,15,56,222,240
- pxor %xmm14,%xmm15
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,248
- movups -48(%rcx),%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- pxor %xmm15,%xmm11
- psrad $31,%xmm14
- .byte 102,15,56,222,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movdqa %xmm13,48(%rsp)
- pxor %xmm14,%xmm15
- .byte 102,15,56,222,241
- movaps %xmm12,%xmm13
- movdqa %xmm9,%xmm14
- .byte 102,15,56,222,249
- movups -32(%rcx),%xmm1
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,208
- pxor %xmm15,%xmm12
- psrad $31,%xmm14
- .byte 102,15,56,222,216
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm14
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- pxor %xmm14,%xmm15
- movaps %xmm13,%xmm14
- .byte 102,15,56,222,248
- movdqa %xmm9,%xmm0
- paddd %xmm9,%xmm9
- .byte 102,15,56,222,209
- pxor %xmm15,%xmm13
- psrad $31,%xmm0
- .byte 102,15,56,222,217
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm0
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm0,%xmm15
- movups (%rbp),%xmm0
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups 16(%rbp),%xmm1
- pxor %xmm15,%xmm14
- .byte 102,15,56,223,84,36,0
- psrad $31,%xmm9
- paddq %xmm15,%xmm15
- .byte 102,15,56,223,92,36,16
- .byte 102,15,56,223,100,36,32
- pand %xmm8,%xmm9
- movq %r10,%rax
- .byte 102,15,56,223,108,36,48
- .byte 102,15,56,223,116,36,64
- .byte 102,15,56,223,124,36,80
- pxor %xmm9,%xmm15
- leaq 96(%rsi),%rsi
- movups %xmm2,-96(%rsi)
- movups %xmm3,-80(%rsi)
- movups %xmm4,-64(%rsi)
- movups %xmm5,-48(%rsi)
- movups %xmm6,-32(%rsi)
- movups %xmm7,-16(%rsi)
- subq $96,%rdx
- jnc .Lxts_dec_grandloop
- movl $16+96,%eax
- subl %r10d,%eax
- movq %rbp,%rcx
- shrl $4,%eax
- .Lxts_dec_short:
- movl %eax,%r10d
- pxor %xmm0,%xmm10
- pxor %xmm0,%xmm11
- addq $96,%rdx
- jz .Lxts_dec_done
- pxor %xmm0,%xmm12
- cmpq $0x20,%rdx
- jb .Lxts_dec_one
- pxor %xmm0,%xmm13
- je .Lxts_dec_two
- pxor %xmm0,%xmm14
- cmpq $0x40,%rdx
- jb .Lxts_dec_three
- je .Lxts_dec_four
- movdqu (%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- pxor %xmm10,%xmm2
- movdqu 48(%rdi),%xmm5
- pxor %xmm11,%xmm3
- movdqu 64(%rdi),%xmm6
- leaq 80(%rdi),%rdi
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm6
- call _aesni_decrypt6
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- xorps %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- xorps %xmm14,%xmm6
- movdqu %xmm4,32(%rsi)
- pxor %xmm14,%xmm14
- movdqu %xmm5,48(%rsi)
- pcmpgtd %xmm15,%xmm14
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- pshufd $0x13,%xmm14,%xmm11
- andq $15,%r9
- jz .Lxts_dec_ret
- movdqa %xmm15,%xmm10
- paddq %xmm15,%xmm15
- pand %xmm8,%xmm11
- pxor %xmm15,%xmm11
- jmp .Lxts_dec_done2
- .align 16
- .Lxts_dec_one:
- movups (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_12:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_12
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movdqa %xmm11,%xmm10
- movups %xmm2,(%rsi)
- movdqa %xmm12,%xmm11
- leaq 16(%rsi),%rsi
- jmp .Lxts_dec_done
- .align 16
- .Lxts_dec_two:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- leaq 32(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- call _aesni_decrypt2
- xorps %xmm10,%xmm2
- movdqa %xmm12,%xmm10
- xorps %xmm11,%xmm3
- movdqa %xmm13,%xmm11
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- leaq 32(%rsi),%rsi
- jmp .Lxts_dec_done
- .align 16
- .Lxts_dec_three:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- leaq 48(%rdi),%rdi
- xorps %xmm10,%xmm2
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- call _aesni_decrypt3
- xorps %xmm10,%xmm2
- movdqa %xmm13,%xmm10
- xorps %xmm11,%xmm3
- movdqa %xmm14,%xmm11
- xorps %xmm12,%xmm4
- movups %xmm2,(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- leaq 48(%rsi),%rsi
- jmp .Lxts_dec_done
- .align 16
- .Lxts_dec_four:
- movups (%rdi),%xmm2
- movups 16(%rdi),%xmm3
- movups 32(%rdi),%xmm4
- xorps %xmm10,%xmm2
- movups 48(%rdi),%xmm5
- leaq 64(%rdi),%rdi
- xorps %xmm11,%xmm3
- xorps %xmm12,%xmm4
- xorps %xmm13,%xmm5
- call _aesni_decrypt4
- pxor %xmm10,%xmm2
- movdqa %xmm14,%xmm10
- pxor %xmm11,%xmm3
- movdqa %xmm15,%xmm11
- pxor %xmm12,%xmm4
- movdqu %xmm2,(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm3,16(%rsi)
- movdqu %xmm4,32(%rsi)
- movdqu %xmm5,48(%rsi)
- leaq 64(%rsi),%rsi
- jmp .Lxts_dec_done
- .align 16
- .Lxts_dec_done:
- andq $15,%r9
- jz .Lxts_dec_ret
- .Lxts_dec_done2:
- movq %r9,%rdx
- movq %rbp,%rcx
- movl %r10d,%eax
- movups (%rdi),%xmm2
- xorps %xmm11,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_13:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_13
- .byte 102,15,56,223,209
- xorps %xmm11,%xmm2
- movups %xmm2,(%rsi)
- .Lxts_dec_steal:
- movzbl 16(%rdi),%eax
- movzbl (%rsi),%ecx
- leaq 1(%rdi),%rdi
- movb %al,(%rsi)
- movb %cl,16(%rsi)
- leaq 1(%rsi),%rsi
- subq $1,%rdx
- jnz .Lxts_dec_steal
- subq %r9,%rsi
- movq %rbp,%rcx
- movl %r10d,%eax
- movups (%rsi),%xmm2
- xorps %xmm10,%xmm2
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_14:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_14
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movups %xmm2,(%rsi)
- .Lxts_dec_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- movaps %xmm0,0(%rsp)
- pxor %xmm8,%xmm8
- movaps %xmm0,16(%rsp)
- pxor %xmm9,%xmm9
- movaps %xmm0,32(%rsp)
- pxor %xmm10,%xmm10
- movaps %xmm0,48(%rsp)
- pxor %xmm11,%xmm11
- movaps %xmm0,64(%rsp)
- pxor %xmm12,%xmm12
- movaps %xmm0,80(%rsp)
- pxor %xmm13,%xmm13
- movaps %xmm0,96(%rsp)
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- movq -8(%r11),%rbp
- .cfi_restore %rbp
- leaq (%r11),%rsp
- .cfi_def_cfa_register %rsp
- .Lxts_dec_epilogue:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_xts_decrypt,.-aesni_xts_decrypt
- .globl aesni_ocb_encrypt
- .type aesni_ocb_encrypt,@function
- .align 32
- aesni_ocb_encrypt:
- .cfi_startproc
- leaq (%rsp),%rax
- pushq %rbx
- .cfi_adjust_cfa_offset 8
- .cfi_offset %rbx,-16
- pushq %rbp
- .cfi_adjust_cfa_offset 8
- .cfi_offset %rbp,-24
- pushq %r12
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r12,-32
- pushq %r13
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r13,-40
- pushq %r14
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r14,-48
- movq 8(%rax),%rbx
- movq 8+8(%rax),%rbp
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- shll $4,%r10d
- movups (%rcx),%xmm9
- movups 16(%rcx,%r10,1),%xmm1
- movdqu (%r9),%xmm15
- pxor %xmm1,%xmm9
- pxor %xmm1,%xmm15
- movl $16+32,%eax
- leaq 32(%r11,%r10,1),%rcx
- movups 16(%r11),%xmm1
- subq %r10,%rax
- movq %rax,%r10
- movdqu (%rbx),%xmm10
- movdqu (%rbp),%xmm8
- testq $1,%r8
- jnz .Locb_enc_odd
- bsfq %r8,%r12
- addq $1,%r8
- shlq $4,%r12
- movdqu (%rbx,%r12,1),%xmm7
- movdqu (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- call __ocb_encrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,(%rsi)
- leaq 16(%rsi),%rsi
- subq $1,%rdx
- jz .Locb_enc_done
- .Locb_enc_odd:
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- leaq 6(%r8),%r8
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- shlq $4,%r12
- shlq $4,%r13
- shlq $4,%r14
- subq $6,%rdx
- jc .Locb_enc_short
- jmp .Locb_enc_grandloop
- .align 32
- .Locb_enc_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- leaq 96(%rdi),%rdi
- call __ocb_encrypt6
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- movups %xmm7,80(%rsi)
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc .Locb_enc_grandloop
- .Locb_enc_short:
- addq $6,%rdx
- jz .Locb_enc_done
- movdqu 0(%rdi),%xmm2
- cmpq $2,%rdx
- jb .Locb_enc_one
- movdqu 16(%rdi),%xmm3
- je .Locb_enc_two
- movdqu 32(%rdi),%xmm4
- cmpq $4,%rdx
- jb .Locb_enc_three
- movdqu 48(%rdi),%xmm5
- je .Locb_enc_four
- movdqu 64(%rdi),%xmm6
- pxor %xmm7,%xmm7
- call __ocb_encrypt6
- movdqa %xmm14,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- movups %xmm6,64(%rsi)
- jmp .Locb_enc_done
- .align 16
- .Locb_enc_one:
- movdqa %xmm10,%xmm7
- call __ocb_encrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,0(%rsi)
- jmp .Locb_enc_done
- .align 16
- .Locb_enc_two:
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- call __ocb_encrypt4
- movdqa %xmm11,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- jmp .Locb_enc_done
- .align 16
- .Locb_enc_three:
- pxor %xmm5,%xmm5
- call __ocb_encrypt4
- movdqa %xmm12,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- jmp .Locb_enc_done
- .align 16
- .Locb_enc_four:
- call __ocb_encrypt4
- movdqa %xmm13,%xmm15
- movups %xmm2,0(%rsi)
- movups %xmm3,16(%rsi)
- movups %xmm4,32(%rsi)
- movups %xmm5,48(%rsi)
- .Locb_enc_done:
- pxor %xmm0,%xmm15
- movdqu %xmm8,(%rbp)
- movdqu %xmm15,(%r9)
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- pxor %xmm10,%xmm10
- pxor %xmm11,%xmm11
- pxor %xmm12,%xmm12
- pxor %xmm13,%xmm13
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- leaq 40(%rsp),%rax
- .cfi_def_cfa %rax,8
- movq -40(%rax),%r14
- .cfi_restore %r14
- movq -32(%rax),%r13
- .cfi_restore %r13
- movq -24(%rax),%r12
- .cfi_restore %r12
- movq -16(%rax),%rbp
- .cfi_restore %rbp
- movq -8(%rax),%rbx
- .cfi_restore %rbx
- leaq (%rax),%rsp
- .cfi_def_cfa_register %rsp
- .Locb_enc_epilogue:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ocb_encrypt,.-aesni_ocb_encrypt
- .type __ocb_encrypt6,@function
- .align 32
- __ocb_encrypt6:
- .cfi_startproc
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- movdqa %xmm10,%xmm14
- pxor %xmm15,%xmm10
- movdqu (%rbx,%r14,1),%xmm15
- pxor %xmm10,%xmm11
- pxor %xmm2,%xmm8
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm3,%xmm8
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm4,%xmm8
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm14
- pxor %xmm5,%xmm8
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm15
- pxor %xmm6,%xmm8
- pxor %xmm14,%xmm6
- pxor %xmm7,%xmm8
- pxor %xmm15,%xmm7
- movups 32(%r11),%xmm0
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- addq $6,%r8
- pxor %xmm9,%xmm10
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- .byte 102,15,56,220,241
- pxor %xmm9,%xmm13
- pxor %xmm9,%xmm14
- .byte 102,15,56,220,249
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm15
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups 64(%r11),%xmm0
- shlq $4,%r12
- shlq $4,%r13
- jmp .Locb_enc_loop6
- .align 32
- .Locb_enc_loop6:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- .byte 102,15,56,220,240
- .byte 102,15,56,220,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_enc_loop6
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- .byte 102,15,56,220,241
- .byte 102,15,56,220,249
- movups 16(%r11),%xmm1
- shlq $4,%r14
- .byte 102,65,15,56,221,210
- movdqu (%rbx),%xmm10
- movq %r10,%rax
- .byte 102,65,15,56,221,219
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- .byte 102,65,15,56,221,246
- .byte 102,65,15,56,221,255
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_encrypt6,.-__ocb_encrypt6
- .type __ocb_encrypt4,@function
- .align 32
- __ocb_encrypt4:
- .cfi_startproc
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- pxor %xmm15,%xmm10
- pxor %xmm10,%xmm11
- pxor %xmm2,%xmm8
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm3,%xmm8
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm4,%xmm8
- pxor %xmm12,%xmm4
- pxor %xmm5,%xmm8
- pxor %xmm13,%xmm5
- movups 32(%r11),%xmm0
- pxor %xmm9,%xmm10
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- pxor %xmm9,%xmm13
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 48(%r11),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 64(%r11),%xmm0
- jmp .Locb_enc_loop4
- .align 32
- .Locb_enc_loop4:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_enc_loop4
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,65,15,56,221,210
- .byte 102,65,15,56,221,219
- .byte 102,65,15,56,221,228
- .byte 102,65,15,56,221,237
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_encrypt4,.-__ocb_encrypt4
- .type __ocb_encrypt1,@function
- .align 32
- __ocb_encrypt1:
- .cfi_startproc
- pxor %xmm15,%xmm7
- pxor %xmm9,%xmm7
- pxor %xmm2,%xmm8
- pxor %xmm7,%xmm2
- movups 32(%r11),%xmm0
- .byte 102,15,56,220,209
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm7
- .byte 102,15,56,220,208
- movups 64(%r11),%xmm0
- jmp .Locb_enc_loop1
- .align 32
- .Locb_enc_loop1:
- .byte 102,15,56,220,209
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,220,208
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_enc_loop1
- .byte 102,15,56,220,209
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,15,56,221,215
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_encrypt1,.-__ocb_encrypt1
- .globl aesni_ocb_decrypt
- .type aesni_ocb_decrypt,@function
- .align 32
- aesni_ocb_decrypt:
- .cfi_startproc
- leaq (%rsp),%rax
- pushq %rbx
- .cfi_adjust_cfa_offset 8
- .cfi_offset %rbx,-16
- pushq %rbp
- .cfi_adjust_cfa_offset 8
- .cfi_offset %rbp,-24
- pushq %r12
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r12,-32
- pushq %r13
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r13,-40
- pushq %r14
- .cfi_adjust_cfa_offset 8
- .cfi_offset %r14,-48
- movq 8(%rax),%rbx
- movq 8+8(%rax),%rbp
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- shll $4,%r10d
- movups (%rcx),%xmm9
- movups 16(%rcx,%r10,1),%xmm1
- movdqu (%r9),%xmm15
- pxor %xmm1,%xmm9
- pxor %xmm1,%xmm15
- movl $16+32,%eax
- leaq 32(%r11,%r10,1),%rcx
- movups 16(%r11),%xmm1
- subq %r10,%rax
- movq %rax,%r10
- movdqu (%rbx),%xmm10
- movdqu (%rbp),%xmm8
- testq $1,%r8
- jnz .Locb_dec_odd
- bsfq %r8,%r12
- addq $1,%r8
- shlq $4,%r12
- movdqu (%rbx,%r12,1),%xmm7
- movdqu (%rdi),%xmm2
- leaq 16(%rdi),%rdi
- call __ocb_decrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,(%rsi)
- xorps %xmm2,%xmm8
- leaq 16(%rsi),%rsi
- subq $1,%rdx
- jz .Locb_dec_done
- .Locb_dec_odd:
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- leaq 6(%r8),%r8
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- shlq $4,%r12
- shlq $4,%r13
- shlq $4,%r14
- subq $6,%rdx
- jc .Locb_dec_short
- jmp .Locb_dec_grandloop
- .align 32
- .Locb_dec_grandloop:
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqu 32(%rdi),%xmm4
- movdqu 48(%rdi),%xmm5
- movdqu 64(%rdi),%xmm6
- movdqu 80(%rdi),%xmm7
- leaq 96(%rdi),%rdi
- call __ocb_decrypt6
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm8
- movups %xmm7,80(%rsi)
- pxor %xmm7,%xmm8
- leaq 96(%rsi),%rsi
- subq $6,%rdx
- jnc .Locb_dec_grandloop
- .Locb_dec_short:
- addq $6,%rdx
- jz .Locb_dec_done
- movdqu 0(%rdi),%xmm2
- cmpq $2,%rdx
- jb .Locb_dec_one
- movdqu 16(%rdi),%xmm3
- je .Locb_dec_two
- movdqu 32(%rdi),%xmm4
- cmpq $4,%rdx
- jb .Locb_dec_three
- movdqu 48(%rdi),%xmm5
- je .Locb_dec_four
- movdqu 64(%rdi),%xmm6
- pxor %xmm7,%xmm7
- call __ocb_decrypt6
- movdqa %xmm14,%xmm15
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- movups %xmm6,64(%rsi)
- pxor %xmm6,%xmm8
- jmp .Locb_dec_done
- .align 16
- .Locb_dec_one:
- movdqa %xmm10,%xmm7
- call __ocb_decrypt1
- movdqa %xmm7,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- jmp .Locb_dec_done
- .align 16
- .Locb_dec_two:
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- call __ocb_decrypt4
- movdqa %xmm11,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- xorps %xmm3,%xmm8
- jmp .Locb_dec_done
- .align 16
- .Locb_dec_three:
- pxor %xmm5,%xmm5
- call __ocb_decrypt4
- movdqa %xmm12,%xmm15
- movups %xmm2,0(%rsi)
- xorps %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- xorps %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- xorps %xmm4,%xmm8
- jmp .Locb_dec_done
- .align 16
- .Locb_dec_four:
- call __ocb_decrypt4
- movdqa %xmm13,%xmm15
- movups %xmm2,0(%rsi)
- pxor %xmm2,%xmm8
- movups %xmm3,16(%rsi)
- pxor %xmm3,%xmm8
- movups %xmm4,32(%rsi)
- pxor %xmm4,%xmm8
- movups %xmm5,48(%rsi)
- pxor %xmm5,%xmm8
- .Locb_dec_done:
- pxor %xmm0,%xmm15
- movdqu %xmm8,(%rbp)
- movdqu %xmm15,(%r9)
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- pxor %xmm10,%xmm10
- pxor %xmm11,%xmm11
- pxor %xmm12,%xmm12
- pxor %xmm13,%xmm13
- pxor %xmm14,%xmm14
- pxor %xmm15,%xmm15
- leaq 40(%rsp),%rax
- .cfi_def_cfa %rax,8
- movq -40(%rax),%r14
- .cfi_restore %r14
- movq -32(%rax),%r13
- .cfi_restore %r13
- movq -24(%rax),%r12
- .cfi_restore %r12
- movq -16(%rax),%rbp
- .cfi_restore %rbp
- movq -8(%rax),%rbx
- .cfi_restore %rbx
- leaq (%rax),%rsp
- .cfi_def_cfa_register %rsp
- .Locb_dec_epilogue:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_ocb_decrypt,.-aesni_ocb_decrypt
- .type __ocb_decrypt6,@function
- .align 32
- __ocb_decrypt6:
- .cfi_startproc
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- movdqa %xmm10,%xmm14
- pxor %xmm15,%xmm10
- movdqu (%rbx,%r14,1),%xmm15
- pxor %xmm10,%xmm11
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm14
- pxor %xmm13,%xmm5
- pxor %xmm14,%xmm15
- pxor %xmm14,%xmm6
- pxor %xmm15,%xmm7
- movups 32(%r11),%xmm0
- leaq 1(%r8),%r12
- leaq 3(%r8),%r13
- leaq 5(%r8),%r14
- addq $6,%r8
- pxor %xmm9,%xmm10
- bsfq %r12,%r12
- bsfq %r13,%r13
- bsfq %r14,%r14
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- .byte 102,15,56,222,241
- pxor %xmm9,%xmm13
- pxor %xmm9,%xmm14
- .byte 102,15,56,222,249
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm15
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups 64(%r11),%xmm0
- shlq $4,%r12
- shlq $4,%r13
- jmp .Locb_dec_loop6
- .align 32
- .Locb_dec_loop6:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_dec_loop6
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- movups 16(%r11),%xmm1
- shlq $4,%r14
- .byte 102,65,15,56,223,210
- movdqu (%rbx),%xmm10
- movq %r10,%rax
- .byte 102,65,15,56,223,219
- .byte 102,65,15,56,223,228
- .byte 102,65,15,56,223,237
- .byte 102,65,15,56,223,246
- .byte 102,65,15,56,223,255
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_decrypt6,.-__ocb_decrypt6
- .type __ocb_decrypt4,@function
- .align 32
- __ocb_decrypt4:
- .cfi_startproc
- pxor %xmm9,%xmm15
- movdqu (%rbx,%r12,1),%xmm11
- movdqa %xmm10,%xmm12
- movdqu (%rbx,%r13,1),%xmm13
- pxor %xmm15,%xmm10
- pxor %xmm10,%xmm11
- pxor %xmm10,%xmm2
- pxor %xmm11,%xmm12
- pxor %xmm11,%xmm3
- pxor %xmm12,%xmm13
- pxor %xmm12,%xmm4
- pxor %xmm13,%xmm5
- movups 32(%r11),%xmm0
- pxor %xmm9,%xmm10
- pxor %xmm9,%xmm11
- pxor %xmm9,%xmm12
- pxor %xmm9,%xmm13
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 48(%r11),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 64(%r11),%xmm0
- jmp .Locb_dec_loop4
- .align 32
- .Locb_dec_loop4:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_dec_loop4
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,65,15,56,223,210
- .byte 102,65,15,56,223,219
- .byte 102,65,15,56,223,228
- .byte 102,65,15,56,223,237
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_decrypt4,.-__ocb_decrypt4
- .type __ocb_decrypt1,@function
- .align 32
- __ocb_decrypt1:
- .cfi_startproc
- pxor %xmm15,%xmm7
- pxor %xmm9,%xmm7
- pxor %xmm7,%xmm2
- movups 32(%r11),%xmm0
- .byte 102,15,56,222,209
- movups 48(%r11),%xmm1
- pxor %xmm9,%xmm7
- .byte 102,15,56,222,208
- movups 64(%r11),%xmm0
- jmp .Locb_dec_loop1
- .align 32
- .Locb_dec_loop1:
- .byte 102,15,56,222,209
- movups (%rcx,%rax,1),%xmm1
- addq $32,%rax
- .byte 102,15,56,222,208
- movups -16(%rcx,%rax,1),%xmm0
- jnz .Locb_dec_loop1
- .byte 102,15,56,222,209
- movups 16(%r11),%xmm1
- movq %r10,%rax
- .byte 102,15,56,223,215
- .byte 0xf3,0xc3
- .cfi_endproc
- .size __ocb_decrypt1,.-__ocb_decrypt1
- .globl aesni_cbc_encrypt
- .type aesni_cbc_encrypt,@function
- .align 16
- aesni_cbc_encrypt:
- .cfi_startproc
- testq %rdx,%rdx
- jz .Lcbc_ret
- movl 240(%rcx),%r10d
- movq %rcx,%r11
- testl %r9d,%r9d
- jz .Lcbc_decrypt
- movups (%r8),%xmm2
- movl %r10d,%eax
- cmpq $16,%rdx
- jb .Lcbc_enc_tail
- subq $16,%rdx
- jmp .Lcbc_enc_loop
- .align 16
- .Lcbc_enc_loop:
- movups (%rdi),%xmm3
- leaq 16(%rdi),%rdi
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- xorps %xmm0,%xmm3
- leaq 32(%rcx),%rcx
- xorps %xmm3,%xmm2
- .Loop_enc1_15:
- .byte 102,15,56,220,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_enc1_15
- .byte 102,15,56,221,209
- movl %r10d,%eax
- movq %r11,%rcx
- movups %xmm2,0(%rsi)
- leaq 16(%rsi),%rsi
- subq $16,%rdx
- jnc .Lcbc_enc_loop
- addq $16,%rdx
- jnz .Lcbc_enc_tail
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movups %xmm2,(%r8)
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- jmp .Lcbc_ret
- .Lcbc_enc_tail:
- movq %rdx,%rcx
- xchgq %rdi,%rsi
- .long 0x9066A4F3
- movl $16,%ecx
- subq %rdx,%rcx
- xorl %eax,%eax
- .long 0x9066AAF3
- leaq -16(%rdi),%rdi
- movl %r10d,%eax
- movq %rdi,%rsi
- movq %r11,%rcx
- xorq %rdx,%rdx
- jmp .Lcbc_enc_loop
- .align 16
- .Lcbc_decrypt:
- cmpq $16,%rdx
- jne .Lcbc_decrypt_bulk
- movdqu (%rdi),%xmm2
- movdqu (%r8),%xmm3
- movdqa %xmm2,%xmm4
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_16:
- .byte 102,15,56,222,209
- decl %r10d
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_16
- .byte 102,15,56,223,209
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movdqu %xmm4,(%r8)
- xorps %xmm3,%xmm2
- pxor %xmm3,%xmm3
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp .Lcbc_ret
- .align 16
- .Lcbc_decrypt_bulk:
- leaq (%rsp),%r11
- .cfi_def_cfa_register %r11
- pushq %rbp
- .cfi_offset %rbp,-16
- subq $16,%rsp
- andq $-16,%rsp
- movq %rcx,%rbp
- movups (%r8),%xmm10
- movl %r10d,%eax
- cmpq $0x50,%rdx
- jbe .Lcbc_dec_tail
- movups (%rcx),%xmm0
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- movl OPENSSL_ia32cap_P+4(%rip),%r9d
- cmpq $0x70,%rdx
- jbe .Lcbc_dec_six_or_seven
- andl $71303168,%r9d
- subq $0x50,%rdx
- cmpl $4194304,%r9d
- je .Lcbc_dec_loop6_enter
- subq $0x20,%rdx
- leaq 112(%rcx),%rcx
- jmp .Lcbc_dec_loop8_enter
- .align 16
- .Lcbc_dec_loop8:
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- .Lcbc_dec_loop8_enter:
- movdqu 96(%rdi),%xmm8
- pxor %xmm0,%xmm2
- movdqu 112(%rdi),%xmm9
- pxor %xmm0,%xmm3
- movups 16-112(%rcx),%xmm1
- pxor %xmm0,%xmm4
- movq $-1,%rbp
- cmpq $0x70,%rdx
- pxor %xmm0,%xmm5
- pxor %xmm0,%xmm6
- pxor %xmm0,%xmm7
- pxor %xmm0,%xmm8
- .byte 102,15,56,222,209
- pxor %xmm0,%xmm9
- movups 32-112(%rcx),%xmm0
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- adcq $0,%rbp
- andq $128,%rbp
- .byte 102,68,15,56,222,201
- addq %rdi,%rbp
- movups 48-112(%rcx),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 64-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 80-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 96-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 112-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 128-112(%rcx),%xmm0
- nop
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 144-112(%rcx),%xmm1
- cmpl $11,%eax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 160-112(%rcx),%xmm0
- jb .Lcbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 176-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 192-112(%rcx),%xmm0
- je .Lcbc_dec_done
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movups 208-112(%rcx),%xmm1
- nop
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- .byte 102,15,56,222,240
- .byte 102,15,56,222,248
- .byte 102,68,15,56,222,192
- .byte 102,68,15,56,222,200
- movups 224-112(%rcx),%xmm0
- jmp .Lcbc_dec_done
- .align 16
- .Lcbc_dec_done:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- pxor %xmm0,%xmm10
- pxor %xmm0,%xmm11
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- pxor %xmm0,%xmm12
- pxor %xmm0,%xmm13
- .byte 102,15,56,222,241
- .byte 102,15,56,222,249
- pxor %xmm0,%xmm14
- pxor %xmm0,%xmm15
- .byte 102,68,15,56,222,193
- .byte 102,68,15,56,222,201
- movdqu 80(%rdi),%xmm1
- .byte 102,65,15,56,223,210
- movdqu 96(%rdi),%xmm10
- pxor %xmm0,%xmm1
- .byte 102,65,15,56,223,219
- pxor %xmm0,%xmm10
- movdqu 112(%rdi),%xmm0
- .byte 102,65,15,56,223,228
- leaq 128(%rdi),%rdi
- movdqu 0(%rbp),%xmm11
- .byte 102,65,15,56,223,237
- .byte 102,65,15,56,223,246
- movdqu 16(%rbp),%xmm12
- movdqu 32(%rbp),%xmm13
- .byte 102,65,15,56,223,255
- .byte 102,68,15,56,223,193
- movdqu 48(%rbp),%xmm14
- movdqu 64(%rbp),%xmm15
- .byte 102,69,15,56,223,202
- movdqa %xmm0,%xmm10
- movdqu 80(%rbp),%xmm1
- movups -112(%rcx),%xmm0
- movups %xmm2,(%rsi)
- movdqa %xmm11,%xmm2
- movups %xmm3,16(%rsi)
- movdqa %xmm12,%xmm3
- movups %xmm4,32(%rsi)
- movdqa %xmm13,%xmm4
- movups %xmm5,48(%rsi)
- movdqa %xmm14,%xmm5
- movups %xmm6,64(%rsi)
- movdqa %xmm15,%xmm6
- movups %xmm7,80(%rsi)
- movdqa %xmm1,%xmm7
- movups %xmm8,96(%rsi)
- leaq 112(%rsi),%rsi
- subq $0x80,%rdx
- ja .Lcbc_dec_loop8
- movaps %xmm9,%xmm2
- leaq -112(%rcx),%rcx
- addq $0x70,%rdx
- jle .Lcbc_dec_clear_tail_collected
- movups %xmm9,(%rsi)
- leaq 16(%rsi),%rsi
- cmpq $0x50,%rdx
- jbe .Lcbc_dec_tail
- movaps %xmm11,%xmm2
- .Lcbc_dec_six_or_seven:
- cmpq $0x60,%rdx
- ja .Lcbc_dec_seven
- movaps %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- leaq 80(%rsi),%rsi
- movdqa %xmm7,%xmm2
- pxor %xmm7,%xmm7
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_seven:
- movups 96(%rdi),%xmm8
- xorps %xmm9,%xmm9
- call _aesni_decrypt8
- movups 80(%rdi),%xmm9
- pxor %xmm10,%xmm2
- movups 96(%rdi),%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- pxor %xmm15,%xmm7
- movdqu %xmm6,64(%rsi)
- pxor %xmm6,%xmm6
- pxor %xmm9,%xmm8
- movdqu %xmm7,80(%rsi)
- pxor %xmm7,%xmm7
- leaq 96(%rsi),%rsi
- movdqa %xmm8,%xmm2
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_loop6:
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- movdqu 0(%rdi),%xmm2
- movdqu 16(%rdi),%xmm3
- movdqa %xmm2,%xmm11
- movdqu 32(%rdi),%xmm4
- movdqa %xmm3,%xmm12
- movdqu 48(%rdi),%xmm5
- movdqa %xmm4,%xmm13
- movdqu 64(%rdi),%xmm6
- movdqa %xmm5,%xmm14
- movdqu 80(%rdi),%xmm7
- movdqa %xmm6,%xmm15
- .Lcbc_dec_loop6_enter:
- leaq 96(%rdi),%rdi
- movdqa %xmm7,%xmm8
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movdqa %xmm8,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm14,%xmm6
- movq %rbp,%rcx
- movdqu %xmm5,48(%rsi)
- pxor %xmm15,%xmm7
- movl %r10d,%eax
- movdqu %xmm6,64(%rsi)
- leaq 80(%rsi),%rsi
- subq $0x60,%rdx
- ja .Lcbc_dec_loop6
- movdqa %xmm7,%xmm2
- addq $0x50,%rdx
- jle .Lcbc_dec_clear_tail_collected
- movups %xmm7,(%rsi)
- leaq 16(%rsi),%rsi
- .Lcbc_dec_tail:
- movups (%rdi),%xmm2
- subq $0x10,%rdx
- jbe .Lcbc_dec_one
- movups 16(%rdi),%xmm3
- movaps %xmm2,%xmm11
- subq $0x10,%rdx
- jbe .Lcbc_dec_two
- movups 32(%rdi),%xmm4
- movaps %xmm3,%xmm12
- subq $0x10,%rdx
- jbe .Lcbc_dec_three
- movups 48(%rdi),%xmm5
- movaps %xmm4,%xmm13
- subq $0x10,%rdx
- jbe .Lcbc_dec_four
- movups 64(%rdi),%xmm6
- movaps %xmm5,%xmm14
- movaps %xmm6,%xmm15
- xorps %xmm7,%xmm7
- call _aesni_decrypt6
- pxor %xmm10,%xmm2
- movaps %xmm15,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- pxor %xmm14,%xmm6
- movdqu %xmm5,48(%rsi)
- pxor %xmm5,%xmm5
- leaq 64(%rsi),%rsi
- movdqa %xmm6,%xmm2
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- subq $0x10,%rdx
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_one:
- movaps %xmm2,%xmm11
- movups (%rcx),%xmm0
- movups 16(%rcx),%xmm1
- leaq 32(%rcx),%rcx
- xorps %xmm0,%xmm2
- .Loop_dec1_17:
- .byte 102,15,56,222,209
- decl %eax
- movups (%rcx),%xmm1
- leaq 16(%rcx),%rcx
- jnz .Loop_dec1_17
- .byte 102,15,56,223,209
- xorps %xmm10,%xmm2
- movaps %xmm11,%xmm10
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_two:
- movaps %xmm3,%xmm12
- call _aesni_decrypt2
- pxor %xmm10,%xmm2
- movaps %xmm12,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- movdqa %xmm3,%xmm2
- pxor %xmm3,%xmm3
- leaq 16(%rsi),%rsi
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_three:
- movaps %xmm4,%xmm13
- call _aesni_decrypt3
- pxor %xmm10,%xmm2
- movaps %xmm13,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- movdqa %xmm4,%xmm2
- pxor %xmm4,%xmm4
- leaq 32(%rsi),%rsi
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_four:
- movaps %xmm5,%xmm14
- call _aesni_decrypt4
- pxor %xmm10,%xmm2
- movaps %xmm14,%xmm10
- pxor %xmm11,%xmm3
- movdqu %xmm2,(%rsi)
- pxor %xmm12,%xmm4
- movdqu %xmm3,16(%rsi)
- pxor %xmm3,%xmm3
- pxor %xmm13,%xmm5
- movdqu %xmm4,32(%rsi)
- pxor %xmm4,%xmm4
- movdqa %xmm5,%xmm2
- pxor %xmm5,%xmm5
- leaq 48(%rsi),%rsi
- jmp .Lcbc_dec_tail_collected
- .align 16
- .Lcbc_dec_clear_tail_collected:
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- pxor %xmm8,%xmm8
- pxor %xmm9,%xmm9
- .Lcbc_dec_tail_collected:
- movups %xmm10,(%r8)
- andq $15,%rdx
- jnz .Lcbc_dec_tail_partial
- movups %xmm2,(%rsi)
- pxor %xmm2,%xmm2
- jmp .Lcbc_dec_ret
- .align 16
- .Lcbc_dec_tail_partial:
- movaps %xmm2,(%rsp)
- pxor %xmm2,%xmm2
- movq $16,%rcx
- movq %rsi,%rdi
- subq %rdx,%rcx
- leaq (%rsp),%rsi
- .long 0x9066A4F3
- movdqa %xmm2,(%rsp)
- .Lcbc_dec_ret:
- xorps %xmm0,%xmm0
- pxor %xmm1,%xmm1
- movq -8(%r11),%rbp
- .cfi_restore %rbp
- leaq (%r11),%rsp
- .cfi_def_cfa_register %rsp
- .Lcbc_ret:
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_cbc_encrypt,.-aesni_cbc_encrypt
- .globl aesni_set_decrypt_key
- .type aesni_set_decrypt_key,@function
- .align 16
- aesni_set_decrypt_key:
- .cfi_startproc
- .byte 0x48,0x83,0xEC,0x08
- .cfi_adjust_cfa_offset 8
- call __aesni_set_encrypt_key
- shll $4,%esi
- testl %eax,%eax
- jnz .Ldec_key_ret
- leaq 16(%rdx,%rsi,1),%rdi
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- movups %xmm0,(%rdi)
- movups %xmm1,(%rdx)
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- .Ldec_key_inverse:
- movups (%rdx),%xmm0
- movups (%rdi),%xmm1
- .byte 102,15,56,219,192
- .byte 102,15,56,219,201
- leaq 16(%rdx),%rdx
- leaq -16(%rdi),%rdi
- movups %xmm0,16(%rdi)
- movups %xmm1,-16(%rdx)
- cmpq %rdx,%rdi
- ja .Ldec_key_inverse
- movups (%rdx),%xmm0
- .byte 102,15,56,219,192
- pxor %xmm1,%xmm1
- movups %xmm0,(%rdi)
- pxor %xmm0,%xmm0
- .Ldec_key_ret:
- addq $8,%rsp
- .cfi_adjust_cfa_offset -8
- .byte 0xf3,0xc3
- .cfi_endproc
- .LSEH_end_set_decrypt_key:
- .size aesni_set_decrypt_key,.-aesni_set_decrypt_key
- .globl aesni_set_encrypt_key
- .type aesni_set_encrypt_key,@function
- .align 16
- aesni_set_encrypt_key:
- __aesni_set_encrypt_key:
- .cfi_startproc
- .byte 0x48,0x83,0xEC,0x08
- .cfi_adjust_cfa_offset 8
- movq $-1,%rax
- testq %rdi,%rdi
- jz .Lenc_key_ret
- testq %rdx,%rdx
- jz .Lenc_key_ret
- movl $268437504,%r10d
- movups (%rdi),%xmm0
- xorps %xmm4,%xmm4
- andl OPENSSL_ia32cap_P+4(%rip),%r10d
- leaq 16(%rdx),%rax
- cmpl $256,%esi
- je .L14rounds
- cmpl $192,%esi
- je .L12rounds
- cmpl $128,%esi
- jne .Lbad_keybits
- .L10rounds:
- movl $9,%esi
- cmpl $268435456,%r10d
- je .L10rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,200,1
- call .Lkey_expansion_128_cold
- .byte 102,15,58,223,200,2
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,4
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,8
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,16
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,32
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,64
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,128
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,27
- call .Lkey_expansion_128
- .byte 102,15,58,223,200,54
- call .Lkey_expansion_128
- movups %xmm0,(%rax)
- movl %esi,80(%rax)
- xorl %eax,%eax
- jmp .Lenc_key_ret
- .align 16
- .L10rounds_alt:
- movdqa .Lkey_rotate(%rip),%xmm5
- movl $8,%r10d
- movdqa .Lkey_rcon1(%rip),%xmm4
- movdqa %xmm0,%xmm2
- movdqu %xmm0,(%rdx)
- jmp .Loop_key128
- .align 16
- .Loop_key128:
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- leaq 16(%rax),%rax
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,-16(%rax)
- movdqa %xmm0,%xmm2
- decl %r10d
- jnz .Loop_key128
- movdqa .Lkey_rcon1b(%rip),%xmm4
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- pslld $1,%xmm4
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- movdqa %xmm0,%xmm2
- .byte 102,15,56,0,197
- .byte 102,15,56,221,196
- movdqa %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm2,%xmm3
- pslldq $4,%xmm2
- pxor %xmm3,%xmm2
- pxor %xmm2,%xmm0
- movdqu %xmm0,16(%rax)
- movl %esi,96(%rax)
- xorl %eax,%eax
- jmp .Lenc_key_ret
- .align 16
- .L12rounds:
- movq 16(%rdi),%xmm2
- movl $11,%esi
- cmpl $268435456,%r10d
- je .L12rounds_alt
- movups %xmm0,(%rdx)
- .byte 102,15,58,223,202,1
- call .Lkey_expansion_192a_cold
- .byte 102,15,58,223,202,2
- call .Lkey_expansion_192b
- .byte 102,15,58,223,202,4
- call .Lkey_expansion_192a
- .byte 102,15,58,223,202,8
- call .Lkey_expansion_192b
- .byte 102,15,58,223,202,16
- call .Lkey_expansion_192a
- .byte 102,15,58,223,202,32
- call .Lkey_expansion_192b
- .byte 102,15,58,223,202,64
- call .Lkey_expansion_192a
- .byte 102,15,58,223,202,128
- call .Lkey_expansion_192b
- movups %xmm0,(%rax)
- movl %esi,48(%rax)
- xorq %rax,%rax
- jmp .Lenc_key_ret
- .align 16
- .L12rounds_alt:
- movdqa .Lkey_rotate192(%rip),%xmm5
- movdqa .Lkey_rcon1(%rip),%xmm4
- movl $8,%r10d
- movdqu %xmm0,(%rdx)
- jmp .Loop_key192
- .align 16
- .Loop_key192:
- movq %xmm2,0(%rax)
- movdqa %xmm2,%xmm1
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- pslld $1,%xmm4
- leaq 24(%rax),%rax
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pshufd $0xff,%xmm0,%xmm3
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pxor %xmm2,%xmm0
- pxor %xmm3,%xmm2
- movdqu %xmm0,-16(%rax)
- decl %r10d
- jnz .Loop_key192
- movl %esi,32(%rax)
- xorl %eax,%eax
- jmp .Lenc_key_ret
- .align 16
- .L14rounds:
- movups 16(%rdi),%xmm2
- movl $13,%esi
- leaq 16(%rax),%rax
- cmpl $268435456,%r10d
- je .L14rounds_alt
- movups %xmm0,(%rdx)
- movups %xmm2,16(%rdx)
- .byte 102,15,58,223,202,1
- call .Lkey_expansion_256a_cold
- .byte 102,15,58,223,200,1
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,2
- call .Lkey_expansion_256a
- .byte 102,15,58,223,200,2
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,4
- call .Lkey_expansion_256a
- .byte 102,15,58,223,200,4
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,8
- call .Lkey_expansion_256a
- .byte 102,15,58,223,200,8
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,16
- call .Lkey_expansion_256a
- .byte 102,15,58,223,200,16
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,32
- call .Lkey_expansion_256a
- .byte 102,15,58,223,200,32
- call .Lkey_expansion_256b
- .byte 102,15,58,223,202,64
- call .Lkey_expansion_256a
- movups %xmm0,(%rax)
- movl %esi,16(%rax)
- xorq %rax,%rax
- jmp .Lenc_key_ret
- .align 16
- .L14rounds_alt:
- movdqa .Lkey_rotate(%rip),%xmm5
- movdqa .Lkey_rcon1(%rip),%xmm4
- movl $7,%r10d
- movdqu %xmm0,0(%rdx)
- movdqa %xmm2,%xmm1
- movdqu %xmm2,16(%rdx)
- jmp .Loop_key256
- .align 16
- .Loop_key256:
- .byte 102,15,56,0,213
- .byte 102,15,56,221,212
- movdqa %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm0,%xmm3
- pslldq $4,%xmm0
- pxor %xmm3,%xmm0
- pslld $1,%xmm4
- pxor %xmm2,%xmm0
- movdqu %xmm0,(%rax)
- decl %r10d
- jz .Ldone_key256
- pshufd $0xff,%xmm0,%xmm2
- pxor %xmm3,%xmm3
- .byte 102,15,56,221,211
- movdqa %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm1,%xmm3
- pslldq $4,%xmm1
- pxor %xmm3,%xmm1
- pxor %xmm1,%xmm2
- movdqu %xmm2,16(%rax)
- leaq 32(%rax),%rax
- movdqa %xmm2,%xmm1
- jmp .Loop_key256
- .Ldone_key256:
- movl %esi,16(%rax)
- xorl %eax,%eax
- jmp .Lenc_key_ret
- .align 16
- .Lbad_keybits:
- movq $-2,%rax
- .Lenc_key_ret:
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- addq $8,%rsp
- .cfi_adjust_cfa_offset -8
- .byte 0xf3,0xc3
- .LSEH_end_set_encrypt_key:
- .align 16
- .Lkey_expansion_128:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- .Lkey_expansion_128_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .align 16
- .Lkey_expansion_192a:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- .Lkey_expansion_192a_cold:
- movaps %xmm2,%xmm5
- .Lkey_expansion_192b_warm:
- shufps $16,%xmm0,%xmm4
- movdqa %xmm2,%xmm3
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- pslldq $4,%xmm3
- xorps %xmm4,%xmm0
- pshufd $85,%xmm1,%xmm1
- pxor %xmm3,%xmm2
- pxor %xmm1,%xmm0
- pshufd $255,%xmm0,%xmm3
- pxor %xmm3,%xmm2
- .byte 0xf3,0xc3
- .align 16
- .Lkey_expansion_192b:
- movaps %xmm0,%xmm3
- shufps $68,%xmm0,%xmm5
- movups %xmm5,(%rax)
- shufps $78,%xmm2,%xmm3
- movups %xmm3,16(%rax)
- leaq 32(%rax),%rax
- jmp .Lkey_expansion_192b_warm
- .align 16
- .Lkey_expansion_256a:
- movups %xmm2,(%rax)
- leaq 16(%rax),%rax
- .Lkey_expansion_256a_cold:
- shufps $16,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $140,%xmm0,%xmm4
- xorps %xmm4,%xmm0
- shufps $255,%xmm1,%xmm1
- xorps %xmm1,%xmm0
- .byte 0xf3,0xc3
- .align 16
- .Lkey_expansion_256b:
- movups %xmm0,(%rax)
- leaq 16(%rax),%rax
- shufps $16,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $140,%xmm2,%xmm4
- xorps %xmm4,%xmm2
- shufps $170,%xmm1,%xmm1
- xorps %xmm1,%xmm2
- .byte 0xf3,0xc3
- .cfi_endproc
- .size aesni_set_encrypt_key,.-aesni_set_encrypt_key
- .size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key
- .align 64
- .Lbswap_mask:
- .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
- .Lincrement32:
- .long 6,6,6,0
- .Lincrement64:
- .long 1,0,0,0
- .Lxts_magic:
- .long 0x87,0,1,0
- .Lincrement1:
- .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
- .Lkey_rotate:
- .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d
- .Lkey_rotate192:
- .long 0x04070605,0x04070605,0x04070605,0x04070605
- .Lkey_rcon1:
- .long 1,1,1,1
- .Lkey_rcon1b:
- .long 0x1b,0x1b,0x1b,0x1b
- .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .align 64
|