1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507 |
- .text
- .globl _aesni_multi_cbc_encrypt
- .p2align 5
- _aesni_multi_cbc_encrypt:
- cmpl $2,%edx
- jb L$enc_non_avx
- movl _OPENSSL_ia32cap_P+4(%rip),%ecx
- testl $268435456,%ecx
- jnz _avx_cbc_enc_shortcut
- jmp L$enc_non_avx
- .p2align 4
- L$enc_non_avx:
- movq %rsp,%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $48,%rsp
- andq $-64,%rsp
- movq %rax,16(%rsp)
- L$enc4x_body:
- movdqu (%rsi),%xmm12
- leaq 120(%rsi),%rsi
- leaq 80(%rdi),%rdi
- L$enc4x_loop_grande:
- movl %edx,24(%rsp)
- xorl %edx,%edx
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r8
- cmpl %edx,%ecx
- movq -72(%rdi),%r12
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu -56(%rdi),%xmm2
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r9
- cmpl %edx,%ecx
- movq -32(%rdi),%r13
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu -16(%rdi),%xmm3
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r10
- cmpl %edx,%ecx
- movq 8(%rdi),%r14
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu 24(%rdi),%xmm4
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r11
- cmpl %edx,%ecx
- movq 48(%rdi),%r15
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu 64(%rdi),%xmm5
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- testl %edx,%edx
- jz L$enc4x_done
- movups 16-120(%rsi),%xmm1
- pxor %xmm12,%xmm2
- movups 32-120(%rsi),%xmm0
- pxor %xmm12,%xmm3
- movl 240-120(%rsi),%eax
- pxor %xmm12,%xmm4
- movdqu (%r8),%xmm6
- pxor %xmm12,%xmm5
- movdqu (%r9),%xmm7
- pxor %xmm6,%xmm2
- movdqu (%r10),%xmm8
- pxor %xmm7,%xmm3
- movdqu (%r11),%xmm9
- pxor %xmm8,%xmm4
- pxor %xmm9,%xmm5
- movdqa 32(%rsp),%xmm10
- xorq %rbx,%rbx
- jmp L$oop_enc4x
- .p2align 5
- L$oop_enc4x:
- addq $16,%rbx
- leaq 16(%rsp),%rbp
- movl $1,%ecx
- subq %rbx,%rbp
- .byte 102,15,56,220,209
- prefetcht0 31(%r8,%rbx,1)
- prefetcht0 31(%r9,%rbx,1)
- .byte 102,15,56,220,217
- prefetcht0 31(%r10,%rbx,1)
- prefetcht0 31(%r10,%rbx,1)
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 48-120(%rsi),%xmm1
- cmpl 32(%rsp),%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- cmovgeq %rbp,%r8
- cmovgq %rbp,%r12
- .byte 102,15,56,220,232
- movups -56(%rsi),%xmm0
- cmpl 36(%rsp),%ecx
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- cmovgeq %rbp,%r9
- cmovgq %rbp,%r13
- .byte 102,15,56,220,233
- movups -40(%rsi),%xmm1
- cmpl 40(%rsp),%ecx
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- cmovgeq %rbp,%r10
- cmovgq %rbp,%r14
- .byte 102,15,56,220,232
- movups -24(%rsi),%xmm0
- cmpl 44(%rsp),%ecx
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- cmovgeq %rbp,%r11
- cmovgq %rbp,%r15
- .byte 102,15,56,220,233
- movups -8(%rsi),%xmm1
- movdqa %xmm10,%xmm11
- .byte 102,15,56,220,208
- prefetcht0 15(%r12,%rbx,1)
- prefetcht0 15(%r13,%rbx,1)
- .byte 102,15,56,220,216
- prefetcht0 15(%r14,%rbx,1)
- prefetcht0 15(%r15,%rbx,1)
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 128-120(%rsi),%xmm0
- pxor %xmm12,%xmm12
- .byte 102,15,56,220,209
- pcmpgtd %xmm12,%xmm11
- movdqu -120(%rsi),%xmm12
- .byte 102,15,56,220,217
- paddd %xmm11,%xmm10
- movdqa %xmm10,32(%rsp)
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 144-120(%rsi),%xmm1
- cmpl $11,%eax
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 160-120(%rsi),%xmm0
- jb L$enc4x_tail
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 176-120(%rsi),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 192-120(%rsi),%xmm0
- je L$enc4x_tail
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movups 208-120(%rsi),%xmm1
- .byte 102,15,56,220,208
- .byte 102,15,56,220,216
- .byte 102,15,56,220,224
- .byte 102,15,56,220,232
- movups 224-120(%rsi),%xmm0
- jmp L$enc4x_tail
- .p2align 5
- L$enc4x_tail:
- .byte 102,15,56,220,209
- .byte 102,15,56,220,217
- .byte 102,15,56,220,225
- .byte 102,15,56,220,233
- movdqu (%r8,%rbx,1),%xmm6
- movdqu 16-120(%rsi),%xmm1
- .byte 102,15,56,221,208
- movdqu (%r9,%rbx,1),%xmm7
- pxor %xmm12,%xmm6
- .byte 102,15,56,221,216
- movdqu (%r10,%rbx,1),%xmm8
- pxor %xmm12,%xmm7
- .byte 102,15,56,221,224
- movdqu (%r11,%rbx,1),%xmm9
- pxor %xmm12,%xmm8
- .byte 102,15,56,221,232
- movdqu 32-120(%rsi),%xmm0
- pxor %xmm12,%xmm9
- movups %xmm2,-16(%r12,%rbx,1)
- pxor %xmm6,%xmm2
- movups %xmm3,-16(%r13,%rbx,1)
- pxor %xmm7,%xmm3
- movups %xmm4,-16(%r14,%rbx,1)
- pxor %xmm8,%xmm4
- movups %xmm5,-16(%r15,%rbx,1)
- pxor %xmm9,%xmm5
- decl %edx
- jnz L$oop_enc4x
- movq 16(%rsp),%rax
- movl 24(%rsp),%edx
- leaq 160(%rdi),%rdi
- decl %edx
- jnz L$enc4x_loop_grande
- L$enc4x_done:
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$enc4x_epilogue:
- .byte 0xf3,0xc3
- .globl _aesni_multi_cbc_decrypt
- .p2align 5
- _aesni_multi_cbc_decrypt:
- cmpl $2,%edx
- jb L$dec_non_avx
- movl _OPENSSL_ia32cap_P+4(%rip),%ecx
- testl $268435456,%ecx
- jnz _avx_cbc_dec_shortcut
- jmp L$dec_non_avx
- .p2align 4
- L$dec_non_avx:
- movq %rsp,%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $48,%rsp
- andq $-64,%rsp
- movq %rax,16(%rsp)
- L$dec4x_body:
- movdqu (%rsi),%xmm12
- leaq 120(%rsi),%rsi
- leaq 80(%rdi),%rdi
- L$dec4x_loop_grande:
- movl %edx,24(%rsp)
- xorl %edx,%edx
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r8
- cmpl %edx,%ecx
- movq -72(%rdi),%r12
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu -56(%rdi),%xmm6
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r9
- cmpl %edx,%ecx
- movq -32(%rdi),%r13
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu -16(%rdi),%xmm7
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r10
- cmpl %edx,%ecx
- movq 8(%rdi),%r14
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu 24(%rdi),%xmm8
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r11
- cmpl %edx,%ecx
- movq 48(%rdi),%r15
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movdqu 64(%rdi),%xmm9
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- testl %edx,%edx
- jz L$dec4x_done
- movups 16-120(%rsi),%xmm1
- movups 32-120(%rsi),%xmm0
- movl 240-120(%rsi),%eax
- movdqu (%r8),%xmm2
- movdqu (%r9),%xmm3
- pxor %xmm12,%xmm2
- movdqu (%r10),%xmm4
- pxor %xmm12,%xmm3
- movdqu (%r11),%xmm5
- pxor %xmm12,%xmm4
- pxor %xmm12,%xmm5
- movdqa 32(%rsp),%xmm10
- xorq %rbx,%rbx
- jmp L$oop_dec4x
- .p2align 5
- L$oop_dec4x:
- addq $16,%rbx
- leaq 16(%rsp),%rbp
- movl $1,%ecx
- subq %rbx,%rbp
- .byte 102,15,56,222,209
- prefetcht0 31(%r8,%rbx,1)
- prefetcht0 31(%r9,%rbx,1)
- .byte 102,15,56,222,217
- prefetcht0 31(%r10,%rbx,1)
- prefetcht0 31(%r11,%rbx,1)
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 48-120(%rsi),%xmm1
- cmpl 32(%rsp),%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- cmovgeq %rbp,%r8
- cmovgq %rbp,%r12
- .byte 102,15,56,222,232
- movups -56(%rsi),%xmm0
- cmpl 36(%rsp),%ecx
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- cmovgeq %rbp,%r9
- cmovgq %rbp,%r13
- .byte 102,15,56,222,233
- movups -40(%rsi),%xmm1
- cmpl 40(%rsp),%ecx
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- cmovgeq %rbp,%r10
- cmovgq %rbp,%r14
- .byte 102,15,56,222,232
- movups -24(%rsi),%xmm0
- cmpl 44(%rsp),%ecx
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- cmovgeq %rbp,%r11
- cmovgq %rbp,%r15
- .byte 102,15,56,222,233
- movups -8(%rsi),%xmm1
- movdqa %xmm10,%xmm11
- .byte 102,15,56,222,208
- prefetcht0 15(%r12,%rbx,1)
- prefetcht0 15(%r13,%rbx,1)
- .byte 102,15,56,222,216
- prefetcht0 15(%r14,%rbx,1)
- prefetcht0 15(%r15,%rbx,1)
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 128-120(%rsi),%xmm0
- pxor %xmm12,%xmm12
- .byte 102,15,56,222,209
- pcmpgtd %xmm12,%xmm11
- movdqu -120(%rsi),%xmm12
- .byte 102,15,56,222,217
- paddd %xmm11,%xmm10
- movdqa %xmm10,32(%rsp)
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 144-120(%rsi),%xmm1
- cmpl $11,%eax
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 160-120(%rsi),%xmm0
- jb L$dec4x_tail
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 176-120(%rsi),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 192-120(%rsi),%xmm0
- je L$dec4x_tail
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- .byte 102,15,56,222,233
- movups 208-120(%rsi),%xmm1
- .byte 102,15,56,222,208
- .byte 102,15,56,222,216
- .byte 102,15,56,222,224
- .byte 102,15,56,222,232
- movups 224-120(%rsi),%xmm0
- jmp L$dec4x_tail
- .p2align 5
- L$dec4x_tail:
- .byte 102,15,56,222,209
- .byte 102,15,56,222,217
- .byte 102,15,56,222,225
- pxor %xmm0,%xmm6
- pxor %xmm0,%xmm7
- .byte 102,15,56,222,233
- movdqu 16-120(%rsi),%xmm1
- pxor %xmm0,%xmm8
- pxor %xmm0,%xmm9
- movdqu 32-120(%rsi),%xmm0
- .byte 102,15,56,223,214
- .byte 102,15,56,223,223
- movdqu -16(%r8,%rbx,1),%xmm6
- movdqu -16(%r9,%rbx,1),%xmm7
- .byte 102,65,15,56,223,224
- .byte 102,65,15,56,223,233
- movdqu -16(%r10,%rbx,1),%xmm8
- movdqu -16(%r11,%rbx,1),%xmm9
- movups %xmm2,-16(%r12,%rbx,1)
- movdqu (%r8,%rbx,1),%xmm2
- movups %xmm3,-16(%r13,%rbx,1)
- movdqu (%r9,%rbx,1),%xmm3
- pxor %xmm12,%xmm2
- movups %xmm4,-16(%r14,%rbx,1)
- movdqu (%r10,%rbx,1),%xmm4
- pxor %xmm12,%xmm3
- movups %xmm5,-16(%r15,%rbx,1)
- movdqu (%r11,%rbx,1),%xmm5
- pxor %xmm12,%xmm4
- pxor %xmm12,%xmm5
- decl %edx
- jnz L$oop_dec4x
- movq 16(%rsp),%rax
- movl 24(%rsp),%edx
- leaq 160(%rdi),%rdi
- decl %edx
- jnz L$dec4x_loop_grande
- L$dec4x_done:
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$dec4x_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- aesni_multi_cbc_encrypt_avx:
- _avx_cbc_enc_shortcut:
- movq %rsp,%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $192,%rsp
- andq $-128,%rsp
- movq %rax,16(%rsp)
- L$enc8x_body:
- vzeroupper
- vmovdqu (%rsi),%xmm15
- leaq 120(%rsi),%rsi
- leaq 160(%rdi),%rdi
- shrl $1,%edx
- L$enc8x_loop_grande:
- xorl %edx,%edx
- movl -144(%rdi),%ecx
- movq -160(%rdi),%r8
- cmpl %edx,%ecx
- movq -152(%rdi),%rbx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -136(%rdi),%xmm2
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- subq %r8,%rbx
- movq %rbx,64(%rsp)
- movl -104(%rdi),%ecx
- movq -120(%rdi),%r9
- cmpl %edx,%ecx
- movq -112(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -96(%rdi),%xmm3
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- subq %r9,%rbp
- movq %rbp,72(%rsp)
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r10
- cmpl %edx,%ecx
- movq -72(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -56(%rdi),%xmm4
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- subq %r10,%rbp
- movq %rbp,80(%rsp)
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r11
- cmpl %edx,%ecx
- movq -32(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -16(%rdi),%xmm5
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- subq %r11,%rbp
- movq %rbp,88(%rsp)
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r12
- cmpl %edx,%ecx
- movq 8(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 24(%rdi),%xmm6
- movl %ecx,48(%rsp)
- cmovleq %rsp,%r12
- subq %r12,%rbp
- movq %rbp,96(%rsp)
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r13
- cmpl %edx,%ecx
- movq 48(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 64(%rdi),%xmm7
- movl %ecx,52(%rsp)
- cmovleq %rsp,%r13
- subq %r13,%rbp
- movq %rbp,104(%rsp)
- movl 96(%rdi),%ecx
- movq 80(%rdi),%r14
- cmpl %edx,%ecx
- movq 88(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 104(%rdi),%xmm8
- movl %ecx,56(%rsp)
- cmovleq %rsp,%r14
- subq %r14,%rbp
- movq %rbp,112(%rsp)
- movl 136(%rdi),%ecx
- movq 120(%rdi),%r15
- cmpl %edx,%ecx
- movq 128(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 144(%rdi),%xmm9
- movl %ecx,60(%rsp)
- cmovleq %rsp,%r15
- subq %r15,%rbp
- movq %rbp,120(%rsp)
- testl %edx,%edx
- jz L$enc8x_done
- vmovups 16-120(%rsi),%xmm1
- vmovups 32-120(%rsi),%xmm0
- movl 240-120(%rsi),%eax
- vpxor (%r8),%xmm15,%xmm10
- leaq 128(%rsp),%rbp
- vpxor (%r9),%xmm15,%xmm11
- vpxor (%r10),%xmm15,%xmm12
- vpxor (%r11),%xmm15,%xmm13
- vpxor %xmm10,%xmm2,%xmm2
- vpxor (%r12),%xmm15,%xmm10
- vpxor %xmm11,%xmm3,%xmm3
- vpxor (%r13),%xmm15,%xmm11
- vpxor %xmm12,%xmm4,%xmm4
- vpxor (%r14),%xmm15,%xmm12
- vpxor %xmm13,%xmm5,%xmm5
- vpxor (%r15),%xmm15,%xmm13
- vpxor %xmm10,%xmm6,%xmm6
- movl $1,%ecx
- vpxor %xmm11,%xmm7,%xmm7
- vpxor %xmm12,%xmm8,%xmm8
- vpxor %xmm13,%xmm9,%xmm9
- jmp L$oop_enc8x
- .p2align 5
- L$oop_enc8x:
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+0(%rsp),%ecx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r8)
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r8,%rbx,1),%rbx
- cmovgeq %rsp,%r8
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r8,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r8),%xmm15,%xmm10
- movq %rbx,64+0(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -72(%rsi),%xmm1
- leaq 16(%r8,%rbx,1),%r8
- vmovdqu %xmm10,0(%rbp)
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+4(%rsp),%ecx
- movq 64+8(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r9)
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r9,%rbx,1),%rbx
- cmovgeq %rsp,%r9
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r9,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r9),%xmm15,%xmm11
- movq %rbx,64+8(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups -56(%rsi),%xmm0
- leaq 16(%r9,%rbx,1),%r9
- vmovdqu %xmm11,16(%rbp)
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+8(%rsp),%ecx
- movq 64+16(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r10)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r8)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r10,%rbx,1),%rbx
- cmovgeq %rsp,%r10
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r10,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r10),%xmm15,%xmm12
- movq %rbx,64+16(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -40(%rsi),%xmm1
- leaq 16(%r10,%rbx,1),%r10
- vmovdqu %xmm12,32(%rbp)
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+12(%rsp),%ecx
- movq 64+24(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r11)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r9)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r11,%rbx,1),%rbx
- cmovgeq %rsp,%r11
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r11,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r11),%xmm15,%xmm13
- movq %rbx,64+24(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups -24(%rsi),%xmm0
- leaq 16(%r11,%rbx,1),%r11
- vmovdqu %xmm13,48(%rbp)
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+16(%rsp),%ecx
- movq 64+32(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r12)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r10)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r12,%rbx,1),%rbx
- cmovgeq %rsp,%r12
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r12,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r12),%xmm15,%xmm10
- movq %rbx,64+32(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -8(%rsi),%xmm1
- leaq 16(%r12,%rbx,1),%r12
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+20(%rsp),%ecx
- movq 64+40(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r13)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r11)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%rbx,%r13,1),%rbx
- cmovgeq %rsp,%r13
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r13,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r13),%xmm15,%xmm11
- movq %rbx,64+40(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 8(%rsi),%xmm0
- leaq 16(%r13,%rbx,1),%r13
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+24(%rsp),%ecx
- movq 64+48(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r14)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r12)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r14,%rbx,1),%rbx
- cmovgeq %rsp,%r14
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r14,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r14),%xmm15,%xmm12
- movq %rbx,64+48(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 24(%rsi),%xmm1
- leaq 16(%r14,%rbx,1),%r14
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+28(%rsp),%ecx
- movq 64+56(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r15)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r13)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r15,%rbx,1),%rbx
- cmovgeq %rsp,%r15
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r15,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r15),%xmm15,%xmm13
- movq %rbx,64+56(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 40(%rsi),%xmm0
- leaq 16(%r15,%rbx,1),%r15
- vmovdqu 32(%rsp),%xmm14
- prefetcht0 15(%r14)
- prefetcht0 15(%r15)
- cmpl $11,%eax
- jb L$enc8x_tail
- vaesenc %xmm1,%xmm2,%xmm2
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vaesenc %xmm1,%xmm7,%xmm7
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 176-120(%rsi),%xmm1
- vaesenc %xmm0,%xmm2,%xmm2
- vaesenc %xmm0,%xmm3,%xmm3
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- vaesenc %xmm0,%xmm6,%xmm6
- vaesenc %xmm0,%xmm7,%xmm7
- vaesenc %xmm0,%xmm8,%xmm8
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 192-120(%rsi),%xmm0
- je L$enc8x_tail
- vaesenc %xmm1,%xmm2,%xmm2
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vaesenc %xmm1,%xmm7,%xmm7
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 208-120(%rsi),%xmm1
- vaesenc %xmm0,%xmm2,%xmm2
- vaesenc %xmm0,%xmm3,%xmm3
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- vaesenc %xmm0,%xmm6,%xmm6
- vaesenc %xmm0,%xmm7,%xmm7
- vaesenc %xmm0,%xmm8,%xmm8
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 224-120(%rsi),%xmm0
- L$enc8x_tail:
- vaesenc %xmm1,%xmm2,%xmm2
- vpxor %xmm15,%xmm15,%xmm15
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vpaddd %xmm14,%xmm15,%xmm15
- vmovdqu 48(%rsp),%xmm14
- vaesenc %xmm1,%xmm7,%xmm7
- movq 64(%rsp),%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 16-120(%rsi),%xmm1
- vaesenclast %xmm0,%xmm2,%xmm2
- vmovdqa %xmm15,32(%rsp)
- vpxor %xmm15,%xmm15,%xmm15
- vaesenclast %xmm0,%xmm3,%xmm3
- vaesenclast %xmm0,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesenclast %xmm0,%xmm5,%xmm5
- vaesenclast %xmm0,%xmm6,%xmm6
- vpaddd %xmm15,%xmm14,%xmm14
- vmovdqu -120(%rsi),%xmm15
- vaesenclast %xmm0,%xmm7,%xmm7
- vaesenclast %xmm0,%xmm8,%xmm8
- vmovdqa %xmm14,48(%rsp)
- vaesenclast %xmm0,%xmm9,%xmm9
- vmovups 32-120(%rsi),%xmm0
- vmovups %xmm2,-16(%r8)
- subq %rbx,%r8
- vpxor 0(%rbp),%xmm2,%xmm2
- vmovups %xmm3,-16(%r9)
- subq 72(%rsp),%r9
- vpxor 16(%rbp),%xmm3,%xmm3
- vmovups %xmm4,-16(%r10)
- subq 80(%rsp),%r10
- vpxor 32(%rbp),%xmm4,%xmm4
- vmovups %xmm5,-16(%r11)
- subq 88(%rsp),%r11
- vpxor 48(%rbp),%xmm5,%xmm5
- vmovups %xmm6,-16(%r12)
- subq 96(%rsp),%r12
- vpxor %xmm10,%xmm6,%xmm6
- vmovups %xmm7,-16(%r13)
- subq 104(%rsp),%r13
- vpxor %xmm11,%xmm7,%xmm7
- vmovups %xmm8,-16(%r14)
- subq 112(%rsp),%r14
- vpxor %xmm12,%xmm8,%xmm8
- vmovups %xmm9,-16(%r15)
- subq 120(%rsp),%r15
- vpxor %xmm13,%xmm9,%xmm9
- decl %edx
- jnz L$oop_enc8x
- movq 16(%rsp),%rax
- L$enc8x_done:
- vzeroupper
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$enc8x_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- aesni_multi_cbc_decrypt_avx:
- _avx_cbc_dec_shortcut:
- movq %rsp,%rax
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $256,%rsp
- andq $-256,%rsp
- subq $192,%rsp
- movq %rax,16(%rsp)
- L$dec8x_body:
- vzeroupper
- vmovdqu (%rsi),%xmm15
- leaq 120(%rsi),%rsi
- leaq 160(%rdi),%rdi
- shrl $1,%edx
- L$dec8x_loop_grande:
- xorl %edx,%edx
- movl -144(%rdi),%ecx
- movq -160(%rdi),%r8
- cmpl %edx,%ecx
- movq -152(%rdi),%rbx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -136(%rdi),%xmm2
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- subq %r8,%rbx
- movq %rbx,64(%rsp)
- vmovdqu %xmm2,192(%rsp)
- movl -104(%rdi),%ecx
- movq -120(%rdi),%r9
- cmpl %edx,%ecx
- movq -112(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -96(%rdi),%xmm3
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- subq %r9,%rbp
- movq %rbp,72(%rsp)
- vmovdqu %xmm3,208(%rsp)
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r10
- cmpl %edx,%ecx
- movq -72(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -56(%rdi),%xmm4
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- subq %r10,%rbp
- movq %rbp,80(%rsp)
- vmovdqu %xmm4,224(%rsp)
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r11
- cmpl %edx,%ecx
- movq -32(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -16(%rdi),%xmm5
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- subq %r11,%rbp
- movq %rbp,88(%rsp)
- vmovdqu %xmm5,240(%rsp)
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r12
- cmpl %edx,%ecx
- movq 8(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 24(%rdi),%xmm6
- movl %ecx,48(%rsp)
- cmovleq %rsp,%r12
- subq %r12,%rbp
- movq %rbp,96(%rsp)
- vmovdqu %xmm6,256(%rsp)
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r13
- cmpl %edx,%ecx
- movq 48(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 64(%rdi),%xmm7
- movl %ecx,52(%rsp)
- cmovleq %rsp,%r13
- subq %r13,%rbp
- movq %rbp,104(%rsp)
- vmovdqu %xmm7,272(%rsp)
- movl 96(%rdi),%ecx
- movq 80(%rdi),%r14
- cmpl %edx,%ecx
- movq 88(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 104(%rdi),%xmm8
- movl %ecx,56(%rsp)
- cmovleq %rsp,%r14
- subq %r14,%rbp
- movq %rbp,112(%rsp)
- vmovdqu %xmm8,288(%rsp)
- movl 136(%rdi),%ecx
- movq 120(%rdi),%r15
- cmpl %edx,%ecx
- movq 128(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 144(%rdi),%xmm9
- movl %ecx,60(%rsp)
- cmovleq %rsp,%r15
- subq %r15,%rbp
- movq %rbp,120(%rsp)
- vmovdqu %xmm9,304(%rsp)
- testl %edx,%edx
- jz L$dec8x_done
- vmovups 16-120(%rsi),%xmm1
- vmovups 32-120(%rsi),%xmm0
- movl 240-120(%rsi),%eax
- leaq 192+128(%rsp),%rbp
- vmovdqu (%r8),%xmm2
- vmovdqu (%r9),%xmm3
- vmovdqu (%r10),%xmm4
- vmovdqu (%r11),%xmm5
- vmovdqu (%r12),%xmm6
- vmovdqu (%r13),%xmm7
- vmovdqu (%r14),%xmm8
- vmovdqu (%r15),%xmm9
- vmovdqu %xmm2,0(%rbp)
- vpxor %xmm15,%xmm2,%xmm2
- vmovdqu %xmm3,16(%rbp)
- vpxor %xmm15,%xmm3,%xmm3
- vmovdqu %xmm4,32(%rbp)
- vpxor %xmm15,%xmm4,%xmm4
- vmovdqu %xmm5,48(%rbp)
- vpxor %xmm15,%xmm5,%xmm5
- vmovdqu %xmm6,64(%rbp)
- vpxor %xmm15,%xmm6,%xmm6
- vmovdqu %xmm7,80(%rbp)
- vpxor %xmm15,%xmm7,%xmm7
- vmovdqu %xmm8,96(%rbp)
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu %xmm9,112(%rbp)
- vpxor %xmm15,%xmm9,%xmm9
- xorq $0x80,%rbp
- movl $1,%ecx
- jmp L$oop_dec8x
- .p2align 5
- L$oop_dec8x:
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+0(%rsp),%ecx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r8)
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r8,%rbx,1),%rbx
- cmovgeq %rsp,%r8
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r8,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r8),%xmm10
- movq %rbx,64+0(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -72(%rsi),%xmm1
- leaq 16(%r8,%rbx,1),%r8
- vmovdqu %xmm10,128(%rsp)
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+4(%rsp),%ecx
- movq 64+8(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r9)
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r9,%rbx,1),%rbx
- cmovgeq %rsp,%r9
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r9,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r9),%xmm11
- movq %rbx,64+8(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups -56(%rsi),%xmm0
- leaq 16(%r9,%rbx,1),%r9
- vmovdqu %xmm11,144(%rsp)
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+8(%rsp),%ecx
- movq 64+16(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r10)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r8)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r10,%rbx,1),%rbx
- cmovgeq %rsp,%r10
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r10,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r10),%xmm12
- movq %rbx,64+16(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -40(%rsi),%xmm1
- leaq 16(%r10,%rbx,1),%r10
- vmovdqu %xmm12,160(%rsp)
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+12(%rsp),%ecx
- movq 64+24(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r11)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r9)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r11,%rbx,1),%rbx
- cmovgeq %rsp,%r11
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r11,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r11),%xmm13
- movq %rbx,64+24(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups -24(%rsi),%xmm0
- leaq 16(%r11,%rbx,1),%r11
- vmovdqu %xmm13,176(%rsp)
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+16(%rsp),%ecx
- movq 64+32(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r12)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r10)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r12,%rbx,1),%rbx
- cmovgeq %rsp,%r12
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r12,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r12),%xmm10
- movq %rbx,64+32(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -8(%rsi),%xmm1
- leaq 16(%r12,%rbx,1),%r12
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+20(%rsp),%ecx
- movq 64+40(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r13)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r11)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%rbx,%r13,1),%rbx
- cmovgeq %rsp,%r13
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r13,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r13),%xmm11
- movq %rbx,64+40(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 8(%rsi),%xmm0
- leaq 16(%r13,%rbx,1),%r13
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+24(%rsp),%ecx
- movq 64+48(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r14)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r12)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r14,%rbx,1),%rbx
- cmovgeq %rsp,%r14
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r14,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r14),%xmm12
- movq %rbx,64+48(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 24(%rsi),%xmm1
- leaq 16(%r14,%rbx,1),%r14
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+28(%rsp),%ecx
- movq 64+56(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r15)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r13)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r15,%rbx,1),%rbx
- cmovgeq %rsp,%r15
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r15,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r15),%xmm13
- movq %rbx,64+56(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 40(%rsi),%xmm0
- leaq 16(%r15,%rbx,1),%r15
- vmovdqu 32(%rsp),%xmm14
- prefetcht0 15(%r14)
- prefetcht0 15(%r15)
- cmpl $11,%eax
- jb L$dec8x_tail
- vaesdec %xmm1,%xmm2,%xmm2
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vaesdec %xmm1,%xmm7,%xmm7
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 176-120(%rsi),%xmm1
- vaesdec %xmm0,%xmm2,%xmm2
- vaesdec %xmm0,%xmm3,%xmm3
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- vaesdec %xmm0,%xmm6,%xmm6
- vaesdec %xmm0,%xmm7,%xmm7
- vaesdec %xmm0,%xmm8,%xmm8
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 192-120(%rsi),%xmm0
- je L$dec8x_tail
- vaesdec %xmm1,%xmm2,%xmm2
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vaesdec %xmm1,%xmm7,%xmm7
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 208-120(%rsi),%xmm1
- vaesdec %xmm0,%xmm2,%xmm2
- vaesdec %xmm0,%xmm3,%xmm3
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- vaesdec %xmm0,%xmm6,%xmm6
- vaesdec %xmm0,%xmm7,%xmm7
- vaesdec %xmm0,%xmm8,%xmm8
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 224-120(%rsi),%xmm0
- L$dec8x_tail:
- vaesdec %xmm1,%xmm2,%xmm2
- vpxor %xmm15,%xmm15,%xmm15
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vpaddd %xmm14,%xmm15,%xmm15
- vmovdqu 48(%rsp),%xmm14
- vaesdec %xmm1,%xmm7,%xmm7
- movq 64(%rsp),%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 16-120(%rsi),%xmm1
- vaesdeclast %xmm0,%xmm2,%xmm2
- vmovdqa %xmm15,32(%rsp)
- vpxor %xmm15,%xmm15,%xmm15
- vaesdeclast %xmm0,%xmm3,%xmm3
- vpxor 0(%rbp),%xmm2,%xmm2
- vaesdeclast %xmm0,%xmm4,%xmm4
- vpxor 16(%rbp),%xmm3,%xmm3
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesdeclast %xmm0,%xmm5,%xmm5
- vpxor 32(%rbp),%xmm4,%xmm4
- vaesdeclast %xmm0,%xmm6,%xmm6
- vpxor 48(%rbp),%xmm5,%xmm5
- vpaddd %xmm15,%xmm14,%xmm14
- vmovdqu -120(%rsi),%xmm15
- vaesdeclast %xmm0,%xmm7,%xmm7
- vpxor 64(%rbp),%xmm6,%xmm6
- vaesdeclast %xmm0,%xmm8,%xmm8
- vpxor 80(%rbp),%xmm7,%xmm7
- vmovdqa %xmm14,48(%rsp)
- vaesdeclast %xmm0,%xmm9,%xmm9
- vpxor 96(%rbp),%xmm8,%xmm8
- vmovups 32-120(%rsi),%xmm0
- vmovups %xmm2,-16(%r8)
- subq %rbx,%r8
- vmovdqu 128+0(%rsp),%xmm2
- vpxor 112(%rbp),%xmm9,%xmm9
- vmovups %xmm3,-16(%r9)
- subq 72(%rsp),%r9
- vmovdqu %xmm2,0(%rbp)
- vpxor %xmm15,%xmm2,%xmm2
- vmovdqu 128+16(%rsp),%xmm3
- vmovups %xmm4,-16(%r10)
- subq 80(%rsp),%r10
- vmovdqu %xmm3,16(%rbp)
- vpxor %xmm15,%xmm3,%xmm3
- vmovdqu 128+32(%rsp),%xmm4
- vmovups %xmm5,-16(%r11)
- subq 88(%rsp),%r11
- vmovdqu %xmm4,32(%rbp)
- vpxor %xmm15,%xmm4,%xmm4
- vmovdqu 128+48(%rsp),%xmm5
- vmovups %xmm6,-16(%r12)
- subq 96(%rsp),%r12
- vmovdqu %xmm5,48(%rbp)
- vpxor %xmm15,%xmm5,%xmm5
- vmovdqu %xmm10,64(%rbp)
- vpxor %xmm10,%xmm15,%xmm6
- vmovups %xmm7,-16(%r13)
- subq 104(%rsp),%r13
- vmovdqu %xmm11,80(%rbp)
- vpxor %xmm11,%xmm15,%xmm7
- vmovups %xmm8,-16(%r14)
- subq 112(%rsp),%r14
- vmovdqu %xmm12,96(%rbp)
- vpxor %xmm12,%xmm15,%xmm8
- vmovups %xmm9,-16(%r15)
- subq 120(%rsp),%r15
- vmovdqu %xmm13,112(%rbp)
- vpxor %xmm13,%xmm15,%xmm9
- xorq $128,%rbp
- decl %edx
- jnz L$oop_dec8x
- movq 16(%rsp),%rax
- L$dec8x_done:
- vzeroupper
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$dec8x_epilogue:
- .byte 0xf3,0xc3
|