123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239 |
- .text
- .globl _bn_mul_mont
- .p2align 4
- _bn_mul_mont:
- movl %r9d,%r9d
- movq %rsp,%rax
- testl $3,%r9d
- jnz L$mul_enter
- cmpl $8,%r9d
- jb L$mul_enter
- movl _OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpq %rsi,%rdx
- jne L$mul4x_enter
- testl $7,%r9d
- jz L$sqr8x_enter
- jmp L$mul4x_enter
- .p2align 4
- L$mul_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- negq %r9
- movq %rsp,%r11
- leaq -16(%rsp,%r9,8),%r10
- negq %r9
- andq $-1024,%r10
- subq %r10,%r11
- andq $-4096,%r11
- leaq (%r10,%r11,1),%rsp
- movq (%rsp),%r11
- cmpq %r10,%rsp
- ja L$mul_page_walk
- jmp L$mul_page_walk_done
- .p2align 4
- L$mul_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r11
- cmpq %r10,%rsp
- ja L$mul_page_walk
- L$mul_page_walk_done:
- movq %rax,8(%rsp,%r9,8)
- L$mul_body:
- movq %rdx,%r12
- movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
- xorq %r14,%r14
- xorq %r15,%r15
- movq %r8,%rbp
- mulq %rbx
- movq %rax,%r10
- movq (%rcx),%rax
- imulq %r10,%rbp
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%r13
- leaq 1(%r15),%r15
- jmp L$1st_enter
- .p2align 4
- L$1st:
- addq %rax,%r13
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%r13
- movq %r10,%r11
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
- L$1st_enter:
- mulq %rbx
- addq %rax,%r11
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- leaq 1(%r15),%r15
- movq %rdx,%r10
- mulq %rbp
- cmpq %r9,%r15
- jne L$1st
- addq %rax,%r13
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%r13
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
- movq %r10,%r11
- xorq %rdx,%rdx
- addq %r11,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r9,8)
- movq %rdx,(%rsp,%r9,8)
- leaq 1(%r14),%r14
- jmp L$outer
- .p2align 4
- L$outer:
- movq (%r12,%r14,8),%rbx
- xorq %r15,%r15
- movq %r8,%rbp
- movq (%rsp),%r10
- mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
- adcq $0,%rdx
- imulq %r10,%rbp
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq 8(%rsp),%r10
- movq %rdx,%r13
- leaq 1(%r15),%r15
- jmp L$inner_enter
- .p2align 4
- L$inner:
- addq %rax,%r13
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
- L$inner_enter:
- mulq %rbx
- addq %rax,%r11
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%r10
- movq %rdx,%r11
- adcq $0,%r11
- leaq 1(%r15),%r15
- mulq %rbp
- cmpq %r9,%r15
- jne L$inner
- addq %rax,%r13
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
- xorq %rdx,%rdx
- addq %r11,%r13
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r9,8)
- movq %rdx,(%rsp,%r9,8)
- leaq 1(%r14),%r14
- cmpq %r9,%r14
- jb L$outer
- xorq %r14,%r14
- movq (%rsp),%rax
- movq %r9,%r15
- .p2align 4
- L$sub: sbbq (%rcx,%r14,8),%rax
- movq %rax,(%rdi,%r14,8)
- movq 8(%rsp,%r14,8),%rax
- leaq 1(%r14),%r14
- decq %r15
- jnz L$sub
- sbbq $0,%rax
- movq $-1,%rbx
- xorq %rax,%rbx
- xorq %r14,%r14
- movq %r9,%r15
- L$copy:
- movq (%rdi,%r14,8),%rcx
- movq (%rsp,%r14,8),%rdx
- andq %rbx,%rcx
- andq %rax,%rdx
- movq %r9,(%rsp,%r14,8)
- orq %rcx,%rdx
- movq %rdx,(%rdi,%r14,8)
- leaq 1(%r14),%r14
- subq $1,%r15
- jnz L$copy
- movq 8(%rsp,%r9,8),%rsi
- movq $1,%rax
- movq -48(%rsi),%r15
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$mul_epilogue:
- .byte 0xf3,0xc3
- .p2align 4
- bn_mul4x_mont:
- movl %r9d,%r9d
- movq %rsp,%rax
- L$mul4x_enter:
- andl $0x80100,%r11d
- cmpl $0x80100,%r11d
- je L$mulx4x_enter
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- negq %r9
- movq %rsp,%r11
- leaq -32(%rsp,%r9,8),%r10
- negq %r9
- andq $-1024,%r10
- subq %r10,%r11
- andq $-4096,%r11
- leaq (%r10,%r11,1),%rsp
- movq (%rsp),%r11
- cmpq %r10,%rsp
- ja L$mul4x_page_walk
- jmp L$mul4x_page_walk_done
- L$mul4x_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r11
- cmpq %r10,%rsp
- ja L$mul4x_page_walk
- L$mul4x_page_walk_done:
- movq %rax,8(%rsp,%r9,8)
- L$mul4x_body:
- movq %rdi,16(%rsp,%r9,8)
- movq %rdx,%r12
- movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
- xorq %r14,%r14
- xorq %r15,%r15
- movq %r8,%rbp
- mulq %rbx
- movq %rax,%r10
- movq (%rcx),%rax
- imulq %r10,%rbp
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp L$1st4x
- .p2align 4
- L$1st4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jb L$1st4x
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
- leaq 1(%r14),%r14
- .p2align 2
- L$outer4x:
- movq (%r12,%r14,8),%rbx
- xorq %r15,%r15
- movq (%rsp),%r10
- movq %r8,%rbp
- mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
- adcq $0,%rdx
- imulq %r10,%rbp
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- addq 8(%rsp),%r11
- adcq $0,%rdx
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp L$inner4x
- .p2align 4
- L$inner4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq 8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jb L$inner4x
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 1(%r14),%r14
- movq %rdx,%r10
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- addq (%rsp,%r9,8),%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
- cmpq %r9,%r14
- jb L$outer4x
- movq 16(%rsp,%r9,8),%rdi
- leaq -4(%r9),%r15
- movq 0(%rsp),%rax
- movq 8(%rsp),%rdx
- shrq $2,%r15
- leaq (%rsp),%rsi
- xorq %r14,%r14
- subq 0(%rcx),%rax
- movq 16(%rsi),%rbx
- movq 24(%rsi),%rbp
- sbbq 8(%rcx),%rdx
- L$sub4x:
- movq %rax,0(%rdi,%r14,8)
- movq %rdx,8(%rdi,%r14,8)
- sbbq 16(%rcx,%r14,8),%rbx
- movq 32(%rsi,%r14,8),%rax
- movq 40(%rsi,%r14,8),%rdx
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
- movq %rbp,24(%rdi,%r14,8)
- sbbq 32(%rcx,%r14,8),%rax
- movq 48(%rsi,%r14,8),%rbx
- movq 56(%rsi,%r14,8),%rbp
- sbbq 40(%rcx,%r14,8),%rdx
- leaq 4(%r14),%r14
- decq %r15
- jnz L$sub4x
- movq %rax,0(%rdi,%r14,8)
- movq 32(%rsi,%r14,8),%rax
- sbbq 16(%rcx,%r14,8),%rbx
- movq %rdx,8(%rdi,%r14,8)
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
- sbbq $0,%rax
- movq %rbp,24(%rdi,%r14,8)
- pxor %xmm0,%xmm0
- .byte 102,72,15,110,224
- pcmpeqd %xmm5,%xmm5
- pshufd $0,%xmm4,%xmm4
- movq %r9,%r15
- pxor %xmm4,%xmm5
- shrq $2,%r15
- xorl %eax,%eax
- jmp L$copy4x
- .p2align 4
- L$copy4x:
- movdqa (%rsp,%rax,1),%xmm1
- movdqu (%rdi,%rax,1),%xmm2
- pand %xmm4,%xmm1
- pand %xmm5,%xmm2
- movdqa 16(%rsp,%rax,1),%xmm3
- movdqa %xmm0,(%rsp,%rax,1)
- por %xmm2,%xmm1
- movdqu 16(%rdi,%rax,1),%xmm2
- movdqu %xmm1,(%rdi,%rax,1)
- pand %xmm4,%xmm3
- pand %xmm5,%xmm2
- movdqa %xmm0,16(%rsp,%rax,1)
- por %xmm2,%xmm3
- movdqu %xmm3,16(%rdi,%rax,1)
- leaq 32(%rax),%rax
- decq %r15
- jnz L$copy4x
- movq 8(%rsp,%r9,8),%rsi
- movq $1,%rax
- movq -48(%rsi),%r15
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$mul4x_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- bn_sqr8x_mont:
- movq %rsp,%rax
- L$sqr8x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- L$sqr8x_prologue:
- movl %r9d,%r10d
- shll $3,%r9d
- shlq $3+2,%r10
- negq %r9
- leaq -64(%rsp,%r9,2),%r11
- movq %rsp,%rbp
- movq (%r8),%r8
- subq %rsi,%r11
- andq $4095,%r11
- cmpq %r11,%r10
- jb L$sqr8x_sp_alt
- subq %r11,%rbp
- leaq -64(%rbp,%r9,2),%rbp
- jmp L$sqr8x_sp_done
- .p2align 5
- L$sqr8x_sp_alt:
- leaq 4096-64(,%r9,2),%r10
- leaq -64(%rbp,%r9,2),%rbp
- subq %r10,%r11
- movq $0,%r10
- cmovcq %r10,%r11
- subq %r11,%rbp
- L$sqr8x_sp_done:
- andq $-64,%rbp
- movq %rsp,%r11
- subq %rbp,%r11
- andq $-4096,%r11
- leaq (%r11,%rbp,1),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja L$sqr8x_page_walk
- jmp L$sqr8x_page_walk_done
- .p2align 4
- L$sqr8x_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja L$sqr8x_page_walk
- L$sqr8x_page_walk_done:
- movq %r9,%r10
- negq %r9
- movq %r8,32(%rsp)
- movq %rax,40(%rsp)
- L$sqr8x_body:
- .byte 102,72,15,110,209
- pxor %xmm0,%xmm0
- .byte 102,72,15,110,207
- .byte 102,73,15,110,218
- movl _OPENSSL_ia32cap_P+8(%rip),%eax
- andl $0x80100,%eax
- cmpl $0x80100,%eax
- jne L$sqr8x_nox
- call _bn_sqrx8x_internal
- leaq (%r8,%rcx,1),%rbx
- movq %rcx,%r9
- movq %rcx,%rdx
- .byte 102,72,15,126,207
- sarq $3+2,%rcx
- jmp L$sqr8x_sub
- .p2align 5
- L$sqr8x_nox:
- call _bn_sqr8x_internal
- leaq (%rdi,%r9,1),%rbx
- movq %r9,%rcx
- movq %r9,%rdx
- .byte 102,72,15,126,207
- sarq $3+2,%rcx
- jmp L$sqr8x_sub
- .p2align 5
- L$sqr8x_sub:
- movq 0(%rbx),%r12
- movq 8(%rbx),%r13
- movq 16(%rbx),%r14
- movq 24(%rbx),%r15
- leaq 32(%rbx),%rbx
- sbbq 0(%rbp),%r12
- sbbq 8(%rbp),%r13
- sbbq 16(%rbp),%r14
- sbbq 24(%rbp),%r15
- leaq 32(%rbp),%rbp
- movq %r12,0(%rdi)
- movq %r13,8(%rdi)
- movq %r14,16(%rdi)
- movq %r15,24(%rdi)
- leaq 32(%rdi),%rdi
- incq %rcx
- jnz L$sqr8x_sub
- sbbq $0,%rax
- leaq (%rbx,%r9,1),%rbx
- leaq (%rdi,%r9,1),%rdi
- .byte 102,72,15,110,200
- pxor %xmm0,%xmm0
- pshufd $0,%xmm1,%xmm1
- movq 40(%rsp),%rsi
- jmp L$sqr8x_cond_copy
- .p2align 5
- L$sqr8x_cond_copy:
- movdqa 0(%rbx),%xmm2
- movdqa 16(%rbx),%xmm3
- leaq 32(%rbx),%rbx
- movdqu 0(%rdi),%xmm4
- movdqu 16(%rdi),%xmm5
- leaq 32(%rdi),%rdi
- movdqa %xmm0,-32(%rbx)
- movdqa %xmm0,-16(%rbx)
- movdqa %xmm0,-32(%rbx,%rdx,1)
- movdqa %xmm0,-16(%rbx,%rdx,1)
- pcmpeqd %xmm1,%xmm0
- pand %xmm1,%xmm2
- pand %xmm1,%xmm3
- pand %xmm0,%xmm4
- pand %xmm0,%xmm5
- pxor %xmm0,%xmm0
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqu %xmm4,-32(%rdi)
- movdqu %xmm5,-16(%rdi)
- addq $32,%r9
- jnz L$sqr8x_cond_copy
- movq $1,%rax
- movq -48(%rsi),%r15
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$sqr8x_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- bn_mulx4x_mont:
- movq %rsp,%rax
- L$mulx4x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- L$mulx4x_prologue:
- shll $3,%r9d
- xorq %r10,%r10
- subq %r9,%r10
- movq (%r8),%r8
- leaq -72(%rsp,%r10,1),%rbp
- andq $-128,%rbp
- movq %rsp,%r11
- subq %rbp,%r11
- andq $-4096,%r11
- leaq (%r11,%rbp,1),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja L$mulx4x_page_walk
- jmp L$mulx4x_page_walk_done
- .p2align 4
- L$mulx4x_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja L$mulx4x_page_walk
- L$mulx4x_page_walk_done:
- leaq (%rdx,%r9,1),%r10
- movq %r9,0(%rsp)
- shrq $5,%r9
- movq %r10,16(%rsp)
- subq $1,%r9
- movq %r8,24(%rsp)
- movq %rdi,32(%rsp)
- movq %rax,40(%rsp)
- movq %r9,48(%rsp)
- jmp L$mulx4x_body
- .p2align 5
- L$mulx4x_body:
- leaq 8(%rdx),%rdi
- movq (%rdx),%rdx
- leaq 64+32(%rsp),%rbx
- movq %rdx,%r9
- mulxq 0(%rsi),%r8,%rax
- mulxq 8(%rsi),%r11,%r14
- addq %rax,%r11
- movq %rdi,8(%rsp)
- mulxq 16(%rsi),%r12,%r13
- adcq %r14,%r12
- adcq $0,%r13
- movq %r8,%rdi
- imulq 24(%rsp),%r8
- xorq %rbp,%rbp
- mulxq 24(%rsi),%rax,%r14
- movq %r8,%rdx
- leaq 32(%rsi),%rsi
- adcxq %rax,%r13
- adcxq %rbp,%r14
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%rdi
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- .byte 0xc4,0x62,0xfb,0xf6,0xa1,0x10,0x00,0x00,0x00
- movq 48(%rsp),%rdi
- movq %r10,-32(%rbx)
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-24(%rbx)
- adcxq %rax,%r12
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r12,-16(%rbx)
- jmp L$mulx4x_1st
- .p2align 5
- L$mulx4x_1st:
- adcxq %rbp,%r15
- mulxq 0(%rsi),%r10,%rax
- adcxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
- .byte 0x67,0x67
- movq %r8,%rdx
- adcxq %rax,%r13
- adcxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- movq %r11,-32(%rbx)
- adoxq %r15,%r13
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r13,-16(%rbx)
- decq %rdi
- jnz L$mulx4x_1st
- movq 0(%rsp),%rax
- movq 8(%rsp),%rdi
- adcq %rbp,%r15
- addq %r15,%r14
- sbbq %r15,%r15
- movq %r14,-8(%rbx)
- jmp L$mulx4x_outer
- .p2align 5
- L$mulx4x_outer:
- movq (%rdi),%rdx
- leaq 8(%rdi),%rdi
- subq %rax,%rsi
- movq %r15,(%rbx)
- leaq 64+32(%rsp),%rbx
- subq %rax,%rcx
- mulxq 0(%rsi),%r8,%r11
- xorl %ebp,%ebp
- movq %rdx,%r9
- mulxq 8(%rsi),%r14,%r12
- adoxq -32(%rbx),%r8
- adcxq %r14,%r11
- mulxq 16(%rsi),%r15,%r13
- adoxq -24(%rbx),%r11
- adcxq %r15,%r12
- adoxq -16(%rbx),%r12
- adcxq %rbp,%r13
- adoxq %rbp,%r13
- movq %rdi,8(%rsp)
- movq %r8,%r15
- imulq 24(%rsp),%r8
- xorl %ebp,%ebp
- mulxq 24(%rsi),%rax,%r14
- movq %r8,%rdx
- adcxq %rax,%r13
- adoxq -8(%rbx),%r13
- adcxq %rbp,%r14
- leaq 32(%rsi),%rsi
- adoxq %rbp,%r14
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%r15
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 16(%rcx),%rax,%r12
- movq %r10,-32(%rbx)
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-24(%rbx)
- leaq 32(%rcx),%rcx
- adcxq %rax,%r12
- adoxq %rbp,%r15
- movq 48(%rsp),%rdi
- movq %r12,-16(%rbx)
- jmp L$mulx4x_inner
- .p2align 5
- L$mulx4x_inner:
- mulxq 0(%rsi),%r10,%rax
- adcxq %rbp,%r15
- adoxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq 0(%rbx),%r10
- adoxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq 8(%rbx),%r11
- adoxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
- movq %r8,%rdx
- adcxq 16(%rbx),%r12
- adoxq %rax,%r13
- adcxq 24(%rbx),%r13
- adoxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
- adcxq %rbp,%r14
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- adoxq %r15,%r13
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-32(%rbx)
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r13,-16(%rbx)
- decq %rdi
- jnz L$mulx4x_inner
- movq 0(%rsp),%rax
- movq 8(%rsp),%rdi
- adcq %rbp,%r15
- subq 0(%rbx),%rbp
- adcq %r15,%r14
- sbbq %r15,%r15
- movq %r14,-8(%rbx)
- cmpq 16(%rsp),%rdi
- jne L$mulx4x_outer
- leaq 64(%rsp),%rbx
- subq %rax,%rcx
- negq %r15
- movq %rax,%rdx
- shrq $3+2,%rax
- movq 32(%rsp),%rdi
- jmp L$mulx4x_sub
- .p2align 5
- L$mulx4x_sub:
- movq 0(%rbx),%r11
- movq 8(%rbx),%r12
- movq 16(%rbx),%r13
- movq 24(%rbx),%r14
- leaq 32(%rbx),%rbx
- sbbq 0(%rcx),%r11
- sbbq 8(%rcx),%r12
- sbbq 16(%rcx),%r13
- sbbq 24(%rcx),%r14
- leaq 32(%rcx),%rcx
- movq %r11,0(%rdi)
- movq %r12,8(%rdi)
- movq %r13,16(%rdi)
- movq %r14,24(%rdi)
- leaq 32(%rdi),%rdi
- decq %rax
- jnz L$mulx4x_sub
- sbbq $0,%r15
- leaq 64(%rsp),%rbx
- subq %rdx,%rdi
- .byte 102,73,15,110,207
- pxor %xmm0,%xmm0
- pshufd $0,%xmm1,%xmm1
- movq 40(%rsp),%rsi
- jmp L$mulx4x_cond_copy
- .p2align 5
- L$mulx4x_cond_copy:
- movdqa 0(%rbx),%xmm2
- movdqa 16(%rbx),%xmm3
- leaq 32(%rbx),%rbx
- movdqu 0(%rdi),%xmm4
- movdqu 16(%rdi),%xmm5
- leaq 32(%rdi),%rdi
- movdqa %xmm0,-32(%rbx)
- movdqa %xmm0,-16(%rbx)
- pcmpeqd %xmm1,%xmm0
- pand %xmm1,%xmm2
- pand %xmm1,%xmm3
- pand %xmm0,%xmm4
- pand %xmm0,%xmm5
- pxor %xmm0,%xmm0
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqu %xmm4,-32(%rdi)
- movdqu %xmm5,-16(%rdi)
- subq $32,%rdx
- jnz L$mulx4x_cond_copy
- movq %rdx,(%rbx)
- movq $1,%rax
- movq -48(%rsi),%r15
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$mulx4x_epilogue:
- .byte 0xf3,0xc3
- .byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .p2align 4
|