12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985 |
- .text
- .globl _rsaz_512_sqr
- .p2align 5
- _rsaz_512_sqr:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $128+24,%rsp
- L$sqr_body:
- .byte 102,72,15,110,202
- movq (%rsi),%rdx
- movq 8(%rsi),%rax
- movq %rcx,128(%rsp)
- movl $0x80100,%r11d
- andl _OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je L$oop_sqrx
- jmp L$oop_sqr
- .p2align 5
- L$oop_sqr:
- movl %r8d,128+8(%rsp)
- movq %rdx,%rbx
- movq %rax,%rbp
- mulq %rdx
- movq %rax,%r8
- movq 16(%rsi),%rax
- movq %rdx,%r9
- mulq %rbx
- addq %rax,%r9
- movq 24(%rsi),%rax
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r10
- movq 32(%rsi),%rax
- movq %rdx,%r11
- adcq $0,%r11
- mulq %rbx
- addq %rax,%r11
- movq 40(%rsi),%rax
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r12
- movq 48(%rsi),%rax
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r13
- movq 56(%rsi),%rax
- movq %rdx,%r14
- adcq $0,%r14
- mulq %rbx
- addq %rax,%r14
- movq %rbx,%rax
- adcq $0,%rdx
- xorq %rcx,%rcx
- addq %r8,%r8
- movq %rdx,%r15
- adcq $0,%rcx
- mulq %rax
- addq %r8,%rdx
- adcq $0,%rcx
- movq %rax,(%rsp)
- movq %rdx,8(%rsp)
- movq 16(%rsi),%rax
- mulq %rbp
- addq %rax,%r10
- movq 24(%rsi),%rax
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %rbp
- addq %rax,%r11
- movq 32(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r11
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %rbp
- addq %rax,%r12
- movq 40(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r12
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %rbp
- addq %rax,%r13
- movq 48(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r13
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %rbp
- addq %rax,%r14
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r14
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %rbp
- addq %rax,%r15
- movq %rbp,%rax
- adcq $0,%rdx
- addq %rbx,%r15
- adcq $0,%rdx
- xorq %rbx,%rbx
- addq %r9,%r9
- movq %rdx,%r8
- adcq %r10,%r10
- adcq $0,%rbx
- mulq %rax
- addq %rcx,%rax
- movq 16(%rsi),%rbp
- addq %rax,%r9
- movq 24(%rsi),%rax
- adcq %rdx,%r10
- adcq $0,%rbx
- movq %r9,16(%rsp)
- movq %r10,24(%rsp)
- mulq %rbp
- addq %rax,%r12
- movq 32(%rsi),%rax
- movq %rdx,%rcx
- adcq $0,%rcx
- mulq %rbp
- addq %rax,%r13
- movq 40(%rsi),%rax
- adcq $0,%rdx
- addq %rcx,%r13
- movq %rdx,%rcx
- adcq $0,%rcx
- mulq %rbp
- addq %rax,%r14
- movq 48(%rsi),%rax
- adcq $0,%rdx
- addq %rcx,%r14
- movq %rdx,%rcx
- adcq $0,%rcx
- mulq %rbp
- addq %rax,%r15
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %rcx,%r15
- movq %rdx,%rcx
- adcq $0,%rcx
- mulq %rbp
- addq %rax,%r8
- movq %rbp,%rax
- adcq $0,%rdx
- addq %rcx,%r8
- adcq $0,%rdx
- xorq %rcx,%rcx
- addq %r11,%r11
- movq %rdx,%r9
- adcq %r12,%r12
- adcq $0,%rcx
- mulq %rax
- addq %rbx,%rax
- movq 24(%rsi),%r10
- addq %rax,%r11
- movq 32(%rsi),%rax
- adcq %rdx,%r12
- adcq $0,%rcx
- movq %r11,32(%rsp)
- movq %r12,40(%rsp)
- movq %rax,%r11
- mulq %r10
- addq %rax,%r14
- movq 40(%rsi),%rax
- movq %rdx,%rbx
- adcq $0,%rbx
- movq %rax,%r12
- mulq %r10
- addq %rax,%r15
- movq 48(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r15
- movq %rdx,%rbx
- adcq $0,%rbx
- movq %rax,%rbp
- mulq %r10
- addq %rax,%r8
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %rbx,%r8
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %r10
- addq %rax,%r9
- movq %r10,%rax
- adcq $0,%rdx
- addq %rbx,%r9
- adcq $0,%rdx
- xorq %rbx,%rbx
- addq %r13,%r13
- movq %rdx,%r10
- adcq %r14,%r14
- adcq $0,%rbx
- mulq %rax
- addq %rcx,%rax
- addq %rax,%r13
- movq %r12,%rax
- adcq %rdx,%r14
- adcq $0,%rbx
- movq %r13,48(%rsp)
- movq %r14,56(%rsp)
- mulq %r11
- addq %rax,%r8
- movq %rbp,%rax
- movq %rdx,%rcx
- adcq $0,%rcx
- mulq %r11
- addq %rax,%r9
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %rcx,%r9
- movq %rdx,%rcx
- adcq $0,%rcx
- movq %rax,%r14
- mulq %r11
- addq %rax,%r10
- movq %r11,%rax
- adcq $0,%rdx
- addq %rcx,%r10
- adcq $0,%rdx
- xorq %rcx,%rcx
- addq %r15,%r15
- movq %rdx,%r11
- adcq %r8,%r8
- adcq $0,%rcx
- mulq %rax
- addq %rbx,%rax
- addq %rax,%r15
- movq %rbp,%rax
- adcq %rdx,%r8
- adcq $0,%rcx
- movq %r15,64(%rsp)
- movq %r8,72(%rsp)
- mulq %r12
- addq %rax,%r10
- movq %r14,%rax
- movq %rdx,%rbx
- adcq $0,%rbx
- mulq %r12
- addq %rax,%r11
- movq %r12,%rax
- adcq $0,%rdx
- addq %rbx,%r11
- adcq $0,%rdx
- xorq %rbx,%rbx
- addq %r9,%r9
- movq %rdx,%r12
- adcq %r10,%r10
- adcq $0,%rbx
- mulq %rax
- addq %rcx,%rax
- addq %rax,%r9
- movq %r14,%rax
- adcq %rdx,%r10
- adcq $0,%rbx
- movq %r9,80(%rsp)
- movq %r10,88(%rsp)
- mulq %rbp
- addq %rax,%r12
- movq %rbp,%rax
- adcq $0,%rdx
- xorq %rcx,%rcx
- addq %r11,%r11
- movq %rdx,%r13
- adcq %r12,%r12
- adcq $0,%rcx
- mulq %rax
- addq %rbx,%rax
- addq %rax,%r11
- movq %r14,%rax
- adcq %rdx,%r12
- adcq $0,%rcx
- movq %r11,96(%rsp)
- movq %r12,104(%rsp)
- xorq %rbx,%rbx
- addq %r13,%r13
- adcq $0,%rbx
- mulq %rax
- addq %rcx,%rax
- addq %r13,%rax
- adcq %rbx,%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- .byte 102,72,15,126,205
- movq %rax,112(%rsp)
- movq %rdx,120(%rsp)
- call __rsaz_512_reduce
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- sbbq %rcx,%rcx
- call __rsaz_512_subtract
- movq %r8,%rdx
- movq %r9,%rax
- movl 128+8(%rsp),%r8d
- movq %rdi,%rsi
- decl %r8d
- jnz L$oop_sqr
- jmp L$sqr_tail
- .p2align 5
- L$oop_sqrx:
- movl %r8d,128+8(%rsp)
- .byte 102,72,15,110,199
- mulxq %rax,%r8,%r9
- movq %rax,%rbx
- mulxq 16(%rsi),%rcx,%r10
- xorq %rbp,%rbp
- mulxq 24(%rsi),%rax,%r11
- adcxq %rcx,%r9
- .byte 0xc4,0x62,0xf3,0xf6,0xa6,0x20,0x00,0x00,0x00
- adcxq %rax,%r10
- .byte 0xc4,0x62,0xfb,0xf6,0xae,0x28,0x00,0x00,0x00
- adcxq %rcx,%r11
- mulxq 48(%rsi),%rcx,%r14
- adcxq %rax,%r12
- adcxq %rcx,%r13
- mulxq 56(%rsi),%rax,%r15
- adcxq %rax,%r14
- adcxq %rbp,%r15
- mulxq %rdx,%rax,%rdi
- movq %rbx,%rdx
- xorq %rcx,%rcx
- adoxq %r8,%r8
- adcxq %rdi,%r8
- adoxq %rbp,%rcx
- adcxq %rbp,%rcx
- movq %rax,(%rsp)
- movq %r8,8(%rsp)
- .byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x10,0x00,0x00,0x00
- adoxq %rax,%r10
- adcxq %rbx,%r11
- mulxq 24(%rsi),%rdi,%r8
- adoxq %rdi,%r11
- .byte 0x66
- adcxq %r8,%r12
- mulxq 32(%rsi),%rax,%rbx
- adoxq %rax,%r12
- adcxq %rbx,%r13
- mulxq 40(%rsi),%rdi,%r8
- adoxq %rdi,%r13
- adcxq %r8,%r14
- .byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x30,0x00,0x00,0x00
- adoxq %rax,%r14
- adcxq %rbx,%r15
- .byte 0xc4,0x62,0xc3,0xf6,0x86,0x38,0x00,0x00,0x00
- adoxq %rdi,%r15
- adcxq %rbp,%r8
- mulxq %rdx,%rax,%rdi
- adoxq %rbp,%r8
- .byte 0x48,0x8b,0x96,0x10,0x00,0x00,0x00
- xorq %rbx,%rbx
- adoxq %r9,%r9
- adcxq %rcx,%rax
- adoxq %r10,%r10
- adcxq %rax,%r9
- adoxq %rbp,%rbx
- adcxq %rdi,%r10
- adcxq %rbp,%rbx
- movq %r9,16(%rsp)
- .byte 0x4c,0x89,0x94,0x24,0x18,0x00,0x00,0x00
- mulxq 24(%rsi),%rdi,%r9
- adoxq %rdi,%r12
- adcxq %r9,%r13
- mulxq 32(%rsi),%rax,%rcx
- adoxq %rax,%r13
- adcxq %rcx,%r14
- .byte 0xc4,0x62,0xc3,0xf6,0x8e,0x28,0x00,0x00,0x00
- adoxq %rdi,%r14
- adcxq %r9,%r15
- .byte 0xc4,0xe2,0xfb,0xf6,0x8e,0x30,0x00,0x00,0x00
- adoxq %rax,%r15
- adcxq %rcx,%r8
- mulxq 56(%rsi),%rdi,%r9
- adoxq %rdi,%r8
- adcxq %rbp,%r9
- mulxq %rdx,%rax,%rdi
- adoxq %rbp,%r9
- movq 24(%rsi),%rdx
- xorq %rcx,%rcx
- adoxq %r11,%r11
- adcxq %rbx,%rax
- adoxq %r12,%r12
- adcxq %rax,%r11
- adoxq %rbp,%rcx
- adcxq %rdi,%r12
- adcxq %rbp,%rcx
- movq %r11,32(%rsp)
- movq %r12,40(%rsp)
- mulxq 32(%rsi),%rax,%rbx
- adoxq %rax,%r14
- adcxq %rbx,%r15
- mulxq 40(%rsi),%rdi,%r10
- adoxq %rdi,%r15
- adcxq %r10,%r8
- mulxq 48(%rsi),%rax,%rbx
- adoxq %rax,%r8
- adcxq %rbx,%r9
- mulxq 56(%rsi),%rdi,%r10
- adoxq %rdi,%r9
- adcxq %rbp,%r10
- mulxq %rdx,%rax,%rdi
- adoxq %rbp,%r10
- movq 32(%rsi),%rdx
- xorq %rbx,%rbx
- adoxq %r13,%r13
- adcxq %rcx,%rax
- adoxq %r14,%r14
- adcxq %rax,%r13
- adoxq %rbp,%rbx
- adcxq %rdi,%r14
- adcxq %rbp,%rbx
- movq %r13,48(%rsp)
- movq %r14,56(%rsp)
- mulxq 40(%rsi),%rdi,%r11
- adoxq %rdi,%r8
- adcxq %r11,%r9
- mulxq 48(%rsi),%rax,%rcx
- adoxq %rax,%r9
- adcxq %rcx,%r10
- mulxq 56(%rsi),%rdi,%r11
- adoxq %rdi,%r10
- adcxq %rbp,%r11
- mulxq %rdx,%rax,%rdi
- movq 40(%rsi),%rdx
- adoxq %rbp,%r11
- xorq %rcx,%rcx
- adoxq %r15,%r15
- adcxq %rbx,%rax
- adoxq %r8,%r8
- adcxq %rax,%r15
- adoxq %rbp,%rcx
- adcxq %rdi,%r8
- adcxq %rbp,%rcx
- movq %r15,64(%rsp)
- movq %r8,72(%rsp)
- .byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x30,0x00,0x00,0x00
- adoxq %rax,%r10
- adcxq %rbx,%r11
- .byte 0xc4,0x62,0xc3,0xf6,0xa6,0x38,0x00,0x00,0x00
- adoxq %rdi,%r11
- adcxq %rbp,%r12
- mulxq %rdx,%rax,%rdi
- adoxq %rbp,%r12
- movq 48(%rsi),%rdx
- xorq %rbx,%rbx
- adoxq %r9,%r9
- adcxq %rcx,%rax
- adoxq %r10,%r10
- adcxq %rax,%r9
- adcxq %rdi,%r10
- adoxq %rbp,%rbx
- adcxq %rbp,%rbx
- movq %r9,80(%rsp)
- movq %r10,88(%rsp)
- .byte 0xc4,0x62,0xfb,0xf6,0xae,0x38,0x00,0x00,0x00
- adoxq %rax,%r12
- adoxq %rbp,%r13
- mulxq %rdx,%rax,%rdi
- xorq %rcx,%rcx
- movq 56(%rsi),%rdx
- adoxq %r11,%r11
- adcxq %rbx,%rax
- adoxq %r12,%r12
- adcxq %rax,%r11
- adoxq %rbp,%rcx
- adcxq %rdi,%r12
- adcxq %rbp,%rcx
- .byte 0x4c,0x89,0x9c,0x24,0x60,0x00,0x00,0x00
- .byte 0x4c,0x89,0xa4,0x24,0x68,0x00,0x00,0x00
- mulxq %rdx,%rax,%rdx
- xorq %rbx,%rbx
- adoxq %r13,%r13
- adcxq %rcx,%rax
- adoxq %rbp,%rbx
- adcxq %r13,%rax
- adcxq %rdx,%rbx
- .byte 102,72,15,126,199
- .byte 102,72,15,126,205
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- movq %rax,112(%rsp)
- movq %rbx,120(%rsp)
- call __rsaz_512_reducex
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- sbbq %rcx,%rcx
- call __rsaz_512_subtract
- movq %r8,%rdx
- movq %r9,%rax
- movl 128+8(%rsp),%r8d
- movq %rdi,%rsi
- decl %r8d
- jnz L$oop_sqrx
- L$sqr_tail:
- leaq 128+24+48(%rsp),%rax
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$sqr_epilogue:
- .byte 0xf3,0xc3
- .globl _rsaz_512_mul
- .p2align 5
- _rsaz_512_mul:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $128+24,%rsp
- L$mul_body:
- .byte 102,72,15,110,199
- .byte 102,72,15,110,201
- movq %r8,128(%rsp)
- movl $0x80100,%r11d
- andl _OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je L$mulx
- movq (%rdx),%rbx
- movq %rdx,%rbp
- call __rsaz_512_mul
- .byte 102,72,15,126,199
- .byte 102,72,15,126,205
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reduce
- jmp L$mul_tail
- .p2align 5
- L$mulx:
- movq %rdx,%rbp
- movq (%rdx),%rdx
- call __rsaz_512_mulx
- .byte 102,72,15,126,199
- .byte 102,72,15,126,205
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reducex
- L$mul_tail:
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- sbbq %rcx,%rcx
- call __rsaz_512_subtract
- leaq 128+24+48(%rsp),%rax
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$mul_epilogue:
- .byte 0xf3,0xc3
- .globl _rsaz_512_mul_gather4
- .p2align 5
- _rsaz_512_mul_gather4:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $152,%rsp
- L$mul_gather4_body:
- movd %r9d,%xmm8
- movdqa L$inc+16(%rip),%xmm1
- movdqa L$inc(%rip),%xmm0
- pshufd $0,%xmm8,%xmm8
- movdqa %xmm1,%xmm7
- movdqa %xmm1,%xmm2
- paddd %xmm0,%xmm1
- pcmpeqd %xmm8,%xmm0
- movdqa %xmm7,%xmm3
- paddd %xmm1,%xmm2
- pcmpeqd %xmm8,%xmm1
- movdqa %xmm7,%xmm4
- paddd %xmm2,%xmm3
- pcmpeqd %xmm8,%xmm2
- movdqa %xmm7,%xmm5
- paddd %xmm3,%xmm4
- pcmpeqd %xmm8,%xmm3
- movdqa %xmm7,%xmm6
- paddd %xmm4,%xmm5
- pcmpeqd %xmm8,%xmm4
- paddd %xmm5,%xmm6
- pcmpeqd %xmm8,%xmm5
- paddd %xmm6,%xmm7
- pcmpeqd %xmm8,%xmm6
- pcmpeqd %xmm8,%xmm7
- movdqa 0(%rdx),%xmm8
- movdqa 16(%rdx),%xmm9
- movdqa 32(%rdx),%xmm10
- movdqa 48(%rdx),%xmm11
- pand %xmm0,%xmm8
- movdqa 64(%rdx),%xmm12
- pand %xmm1,%xmm9
- movdqa 80(%rdx),%xmm13
- pand %xmm2,%xmm10
- movdqa 96(%rdx),%xmm14
- pand %xmm3,%xmm11
- movdqa 112(%rdx),%xmm15
- leaq 128(%rdx),%rbp
- pand %xmm4,%xmm12
- pand %xmm5,%xmm13
- pand %xmm6,%xmm14
- pand %xmm7,%xmm15
- por %xmm10,%xmm8
- por %xmm11,%xmm9
- por %xmm12,%xmm8
- por %xmm13,%xmm9
- por %xmm14,%xmm8
- por %xmm15,%xmm9
- por %xmm9,%xmm8
- pshufd $0x4e,%xmm8,%xmm9
- por %xmm9,%xmm8
- movl $0x80100,%r11d
- andl _OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je L$mulx_gather
- .byte 102,76,15,126,195
- movq %r8,128(%rsp)
- movq %rdi,128+8(%rsp)
- movq %rcx,128+16(%rsp)
- movq (%rsi),%rax
- movq 8(%rsi),%rcx
- mulq %rbx
- movq %rax,(%rsp)
- movq %rcx,%rax
- movq %rdx,%r8
- mulq %rbx
- addq %rax,%r8
- movq 16(%rsi),%rax
- movq %rdx,%r9
- adcq $0,%r9
- mulq %rbx
- addq %rax,%r9
- movq 24(%rsi),%rax
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r10
- movq 32(%rsi),%rax
- movq %rdx,%r11
- adcq $0,%r11
- mulq %rbx
- addq %rax,%r11
- movq 40(%rsi),%rax
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r12
- movq 48(%rsi),%rax
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r13
- movq 56(%rsi),%rax
- movq %rdx,%r14
- adcq $0,%r14
- mulq %rbx
- addq %rax,%r14
- movq (%rsi),%rax
- movq %rdx,%r15
- adcq $0,%r15
- leaq 8(%rsp),%rdi
- movl $7,%ecx
- jmp L$oop_mul_gather
- .p2align 5
- L$oop_mul_gather:
- movdqa 0(%rbp),%xmm8
- movdqa 16(%rbp),%xmm9
- movdqa 32(%rbp),%xmm10
- movdqa 48(%rbp),%xmm11
- pand %xmm0,%xmm8
- movdqa 64(%rbp),%xmm12
- pand %xmm1,%xmm9
- movdqa 80(%rbp),%xmm13
- pand %xmm2,%xmm10
- movdqa 96(%rbp),%xmm14
- pand %xmm3,%xmm11
- movdqa 112(%rbp),%xmm15
- leaq 128(%rbp),%rbp
- pand %xmm4,%xmm12
- pand %xmm5,%xmm13
- pand %xmm6,%xmm14
- pand %xmm7,%xmm15
- por %xmm10,%xmm8
- por %xmm11,%xmm9
- por %xmm12,%xmm8
- por %xmm13,%xmm9
- por %xmm14,%xmm8
- por %xmm15,%xmm9
- por %xmm9,%xmm8
- pshufd $0x4e,%xmm8,%xmm9
- por %xmm9,%xmm8
- .byte 102,76,15,126,195
- mulq %rbx
- addq %rax,%r8
- movq 8(%rsi),%rax
- movq %r8,(%rdi)
- movq %rdx,%r8
- adcq $0,%r8
- mulq %rbx
- addq %rax,%r9
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r9,%r8
- movq %rdx,%r9
- adcq $0,%r9
- mulq %rbx
- addq %rax,%r10
- movq 24(%rsi),%rax
- adcq $0,%rdx
- addq %r10,%r9
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r11
- movq 32(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%r10
- movq %rdx,%r11
- adcq $0,%r11
- mulq %rbx
- addq %rax,%r12
- movq 40(%rsi),%rax
- adcq $0,%rdx
- addq %r12,%r11
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r13
- movq 48(%rsi),%rax
- adcq $0,%rdx
- addq %r13,%r12
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r14
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %r14,%r13
- movq %rdx,%r14
- adcq $0,%r14
- mulq %rbx
- addq %rax,%r15
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r15,%r14
- movq %rdx,%r15
- adcq $0,%r15
- leaq 8(%rdi),%rdi
- decl %ecx
- jnz L$oop_mul_gather
- movq %r8,(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
- movq 128+8(%rsp),%rdi
- movq 128+16(%rsp),%rbp
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reduce
- jmp L$mul_gather_tail
- .p2align 5
- L$mulx_gather:
- .byte 102,76,15,126,194
- movq %r8,128(%rsp)
- movq %rdi,128+8(%rsp)
- movq %rcx,128+16(%rsp)
- mulxq (%rsi),%rbx,%r8
- movq %rbx,(%rsp)
- xorl %edi,%edi
- mulxq 8(%rsi),%rax,%r9
- mulxq 16(%rsi),%rbx,%r10
- adcxq %rax,%r8
- mulxq 24(%rsi),%rax,%r11
- adcxq %rbx,%r9
- mulxq 32(%rsi),%rbx,%r12
- adcxq %rax,%r10
- mulxq 40(%rsi),%rax,%r13
- adcxq %rbx,%r11
- mulxq 48(%rsi),%rbx,%r14
- adcxq %rax,%r12
- mulxq 56(%rsi),%rax,%r15
- adcxq %rbx,%r13
- adcxq %rax,%r14
- .byte 0x67
- movq %r8,%rbx
- adcxq %rdi,%r15
- movq $-7,%rcx
- jmp L$oop_mulx_gather
- .p2align 5
- L$oop_mulx_gather:
- movdqa 0(%rbp),%xmm8
- movdqa 16(%rbp),%xmm9
- movdqa 32(%rbp),%xmm10
- movdqa 48(%rbp),%xmm11
- pand %xmm0,%xmm8
- movdqa 64(%rbp),%xmm12
- pand %xmm1,%xmm9
- movdqa 80(%rbp),%xmm13
- pand %xmm2,%xmm10
- movdqa 96(%rbp),%xmm14
- pand %xmm3,%xmm11
- movdqa 112(%rbp),%xmm15
- leaq 128(%rbp),%rbp
- pand %xmm4,%xmm12
- pand %xmm5,%xmm13
- pand %xmm6,%xmm14
- pand %xmm7,%xmm15
- por %xmm10,%xmm8
- por %xmm11,%xmm9
- por %xmm12,%xmm8
- por %xmm13,%xmm9
- por %xmm14,%xmm8
- por %xmm15,%xmm9
- por %xmm9,%xmm8
- pshufd $0x4e,%xmm8,%xmm9
- por %xmm9,%xmm8
- .byte 102,76,15,126,194
- .byte 0xc4,0x62,0xfb,0xf6,0x86,0x00,0x00,0x00,0x00
- adcxq %rax,%rbx
- adoxq %r9,%r8
- mulxq 8(%rsi),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
- mulxq 16(%rsi),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
- .byte 0xc4,0x62,0xfb,0xf6,0x9e,0x18,0x00,0x00,0x00
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 32(%rsi),%rax,%r12
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
- .byte 0xc4,0x62,0xfb,0xf6,0xb6,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
- .byte 0x67
- adoxq %r15,%r14
- mulxq 56(%rsi),%rax,%r15
- movq %rbx,64(%rsp,%rcx,8)
- adcxq %rax,%r14
- adoxq %rdi,%r15
- movq %r8,%rbx
- adcxq %rdi,%r15
- incq %rcx
- jnz L$oop_mulx_gather
- movq %r8,64(%rsp)
- movq %r9,64+8(%rsp)
- movq %r10,64+16(%rsp)
- movq %r11,64+24(%rsp)
- movq %r12,64+32(%rsp)
- movq %r13,64+40(%rsp)
- movq %r14,64+48(%rsp)
- movq %r15,64+56(%rsp)
- movq 128(%rsp),%rdx
- movq 128+8(%rsp),%rdi
- movq 128+16(%rsp),%rbp
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reducex
- L$mul_gather_tail:
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- sbbq %rcx,%rcx
- call __rsaz_512_subtract
- leaq 128+24+48(%rsp),%rax
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$mul_gather4_epilogue:
- .byte 0xf3,0xc3
- .globl _rsaz_512_mul_scatter4
- .p2align 5
- _rsaz_512_mul_scatter4:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- movl %r9d,%r9d
- subq $128+24,%rsp
- L$mul_scatter4_body:
- leaq (%r8,%r9,8),%r8
- .byte 102,72,15,110,199
- .byte 102,72,15,110,202
- .byte 102,73,15,110,208
- movq %rcx,128(%rsp)
- movq %rdi,%rbp
- movl $0x80100,%r11d
- andl _OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je L$mulx_scatter
- movq (%rdi),%rbx
- call __rsaz_512_mul
- .byte 102,72,15,126,199
- .byte 102,72,15,126,205
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reduce
- jmp L$mul_scatter_tail
- .p2align 5
- L$mulx_scatter:
- movq (%rdi),%rdx
- call __rsaz_512_mulx
- .byte 102,72,15,126,199
- .byte 102,72,15,126,205
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
- call __rsaz_512_reducex
- L$mul_scatter_tail:
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- .byte 102,72,15,126,214
- sbbq %rcx,%rcx
- call __rsaz_512_subtract
- movq %r8,0(%rsi)
- movq %r9,128(%rsi)
- movq %r10,256(%rsi)
- movq %r11,384(%rsi)
- movq %r12,512(%rsi)
- movq %r13,640(%rsi)
- movq %r14,768(%rsi)
- movq %r15,896(%rsi)
- leaq 128+24+48(%rsp),%rax
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$mul_scatter4_epilogue:
- .byte 0xf3,0xc3
- .globl _rsaz_512_mul_by_one
- .p2align 5
- _rsaz_512_mul_by_one:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $128+24,%rsp
- L$mul_by_one_body:
- movl _OPENSSL_ia32cap_P+8(%rip),%eax
- movq %rdx,%rbp
- movq %rcx,128(%rsp)
- movq (%rsi),%r8
- pxor %xmm0,%xmm0
- movq 8(%rsi),%r9
- movq 16(%rsi),%r10
- movq 24(%rsi),%r11
- movq 32(%rsi),%r12
- movq 40(%rsi),%r13
- movq 48(%rsi),%r14
- movq 56(%rsi),%r15
- movdqa %xmm0,(%rsp)
- movdqa %xmm0,16(%rsp)
- movdqa %xmm0,32(%rsp)
- movdqa %xmm0,48(%rsp)
- movdqa %xmm0,64(%rsp)
- movdqa %xmm0,80(%rsp)
- movdqa %xmm0,96(%rsp)
- andl $0x80100,%eax
- cmpl $0x80100,%eax
- je L$by_one_callx
- call __rsaz_512_reduce
- jmp L$by_one_tail
- .p2align 5
- L$by_one_callx:
- movq 128(%rsp),%rdx
- call __rsaz_512_reducex
- L$by_one_tail:
- movq %r8,(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
- leaq 128+24+48(%rsp),%rax
- movq -48(%rax),%r15
- movq -40(%rax),%r14
- movq -32(%rax),%r13
- movq -24(%rax),%r12
- movq -16(%rax),%rbp
- movq -8(%rax),%rbx
- leaq (%rax),%rsp
- L$mul_by_one_epilogue:
- .byte 0xf3,0xc3
- .p2align 5
- __rsaz_512_reduce:
- movq %r8,%rbx
- imulq 128+8(%rsp),%rbx
- movq 0(%rbp),%rax
- movl $8,%ecx
- jmp L$reduction_loop
- .p2align 5
- L$reduction_loop:
- mulq %rbx
- movq 8(%rbp),%rax
- negq %r8
- movq %rdx,%r8
- adcq $0,%r8
- mulq %rbx
- addq %rax,%r9
- movq 16(%rbp),%rax
- adcq $0,%rdx
- addq %r9,%r8
- movq %rdx,%r9
- adcq $0,%r9
- mulq %rbx
- addq %rax,%r10
- movq 24(%rbp),%rax
- adcq $0,%rdx
- addq %r10,%r9
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r11
- movq 32(%rbp),%rax
- adcq $0,%rdx
- addq %r11,%r10
- movq 128+8(%rsp),%rsi
- adcq $0,%rdx
- movq %rdx,%r11
- mulq %rbx
- addq %rax,%r12
- movq 40(%rbp),%rax
- adcq $0,%rdx
- imulq %r8,%rsi
- addq %r12,%r11
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r13
- movq 48(%rbp),%rax
- adcq $0,%rdx
- addq %r13,%r12
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r14
- movq 56(%rbp),%rax
- adcq $0,%rdx
- addq %r14,%r13
- movq %rdx,%r14
- adcq $0,%r14
- mulq %rbx
- movq %rsi,%rbx
- addq %rax,%r15
- movq 0(%rbp),%rax
- adcq $0,%rdx
- addq %r15,%r14
- movq %rdx,%r15
- adcq $0,%r15
- decl %ecx
- jne L$reduction_loop
- .byte 0xf3,0xc3
- .p2align 5
- __rsaz_512_reducex:
- imulq %r8,%rdx
- xorq %rsi,%rsi
- movl $8,%ecx
- jmp L$reduction_loopx
- .p2align 5
- L$reduction_loopx:
- movq %r8,%rbx
- mulxq 0(%rbp),%rax,%r8
- adcxq %rbx,%rax
- adoxq %r9,%r8
- mulxq 8(%rbp),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
- mulxq 16(%rbp),%rbx,%r10
- adcxq %rbx,%r9
- adoxq %r11,%r10
- mulxq 24(%rbp),%rbx,%r11
- adcxq %rbx,%r10
- adoxq %r12,%r11
- .byte 0xc4,0x62,0xe3,0xf6,0xa5,0x20,0x00,0x00,0x00
- movq %rdx,%rax
- movq %r8,%rdx
- adcxq %rbx,%r11
- adoxq %r13,%r12
- mulxq 128+8(%rsp),%rbx,%rdx
- movq %rax,%rdx
- mulxq 40(%rbp),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
- .byte 0xc4,0x62,0xfb,0xf6,0xb5,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
- adoxq %r15,%r14
- mulxq 56(%rbp),%rax,%r15
- movq %rbx,%rdx
- adcxq %rax,%r14
- adoxq %rsi,%r15
- adcxq %rsi,%r15
- decl %ecx
- jne L$reduction_loopx
- .byte 0xf3,0xc3
- .p2align 5
- __rsaz_512_subtract:
- movq %r8,(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
- movq 0(%rbp),%r8
- movq 8(%rbp),%r9
- negq %r8
- notq %r9
- andq %rcx,%r8
- movq 16(%rbp),%r10
- andq %rcx,%r9
- notq %r10
- movq 24(%rbp),%r11
- andq %rcx,%r10
- notq %r11
- movq 32(%rbp),%r12
- andq %rcx,%r11
- notq %r12
- movq 40(%rbp),%r13
- andq %rcx,%r12
- notq %r13
- movq 48(%rbp),%r14
- andq %rcx,%r13
- notq %r14
- movq 56(%rbp),%r15
- andq %rcx,%r14
- notq %r15
- andq %rcx,%r15
- addq (%rdi),%r8
- adcq 8(%rdi),%r9
- adcq 16(%rdi),%r10
- adcq 24(%rdi),%r11
- adcq 32(%rdi),%r12
- adcq 40(%rdi),%r13
- adcq 48(%rdi),%r14
- adcq 56(%rdi),%r15
- movq %r8,(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
- .byte 0xf3,0xc3
- .p2align 5
- __rsaz_512_mul:
- leaq 8(%rsp),%rdi
- movq (%rsi),%rax
- mulq %rbx
- movq %rax,(%rdi)
- movq 8(%rsi),%rax
- movq %rdx,%r8
- mulq %rbx
- addq %rax,%r8
- movq 16(%rsi),%rax
- movq %rdx,%r9
- adcq $0,%r9
- mulq %rbx
- addq %rax,%r9
- movq 24(%rsi),%rax
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r10
- movq 32(%rsi),%rax
- movq %rdx,%r11
- adcq $0,%r11
- mulq %rbx
- addq %rax,%r11
- movq 40(%rsi),%rax
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r12
- movq 48(%rsi),%rax
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r13
- movq 56(%rsi),%rax
- movq %rdx,%r14
- adcq $0,%r14
- mulq %rbx
- addq %rax,%r14
- movq (%rsi),%rax
- movq %rdx,%r15
- adcq $0,%r15
- leaq 8(%rbp),%rbp
- leaq 8(%rdi),%rdi
- movl $7,%ecx
- jmp L$oop_mul
- .p2align 5
- L$oop_mul:
- movq (%rbp),%rbx
- mulq %rbx
- addq %rax,%r8
- movq 8(%rsi),%rax
- movq %r8,(%rdi)
- movq %rdx,%r8
- adcq $0,%r8
- mulq %rbx
- addq %rax,%r9
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r9,%r8
- movq %rdx,%r9
- adcq $0,%r9
- mulq %rbx
- addq %rax,%r10
- movq 24(%rsi),%rax
- adcq $0,%rdx
- addq %r10,%r9
- movq %rdx,%r10
- adcq $0,%r10
- mulq %rbx
- addq %rax,%r11
- movq 32(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%r10
- movq %rdx,%r11
- adcq $0,%r11
- mulq %rbx
- addq %rax,%r12
- movq 40(%rsi),%rax
- adcq $0,%rdx
- addq %r12,%r11
- movq %rdx,%r12
- adcq $0,%r12
- mulq %rbx
- addq %rax,%r13
- movq 48(%rsi),%rax
- adcq $0,%rdx
- addq %r13,%r12
- movq %rdx,%r13
- adcq $0,%r13
- mulq %rbx
- addq %rax,%r14
- movq 56(%rsi),%rax
- adcq $0,%rdx
- addq %r14,%r13
- movq %rdx,%r14
- leaq 8(%rbp),%rbp
- adcq $0,%r14
- mulq %rbx
- addq %rax,%r15
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r15,%r14
- movq %rdx,%r15
- adcq $0,%r15
- leaq 8(%rdi),%rdi
- decl %ecx
- jnz L$oop_mul
- movq %r8,(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
- .byte 0xf3,0xc3
- .p2align 5
- __rsaz_512_mulx:
- mulxq (%rsi),%rbx,%r8
- movq $-6,%rcx
- mulxq 8(%rsi),%rax,%r9
- movq %rbx,8(%rsp)
- mulxq 16(%rsi),%rbx,%r10
- adcq %rax,%r8
- mulxq 24(%rsi),%rax,%r11
- adcq %rbx,%r9
- mulxq 32(%rsi),%rbx,%r12
- adcq %rax,%r10
- mulxq 40(%rsi),%rax,%r13
- adcq %rbx,%r11
- mulxq 48(%rsi),%rbx,%r14
- adcq %rax,%r12
- mulxq 56(%rsi),%rax,%r15
- movq 8(%rbp),%rdx
- adcq %rbx,%r13
- adcq %rax,%r14
- adcq $0,%r15
- xorq %rdi,%rdi
- jmp L$oop_mulx
- .p2align 5
- L$oop_mulx:
- movq %r8,%rbx
- mulxq (%rsi),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
- mulxq 8(%rsi),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
- mulxq 16(%rsi),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
- mulxq 24(%rsi),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- .byte 0x3e,0xc4,0x62,0xfb,0xf6,0xa6,0x20,0x00,0x00,0x00
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
- mulxq 48(%rsi),%rax,%r14
- adcxq %rax,%r13
- adoxq %r15,%r14
- mulxq 56(%rsi),%rax,%r15
- movq 64(%rbp,%rcx,8),%rdx
- movq %rbx,8+64-8(%rsp,%rcx,8)
- adcxq %rax,%r14
- adoxq %rdi,%r15
- adcxq %rdi,%r15
- incq %rcx
- jnz L$oop_mulx
- movq %r8,%rbx
- mulxq (%rsi),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
- .byte 0xc4,0x62,0xfb,0xf6,0x8e,0x08,0x00,0x00,0x00
- adcxq %rax,%r8
- adoxq %r10,%r9
- .byte 0xc4,0x62,0xfb,0xf6,0x96,0x10,0x00,0x00,0x00
- adcxq %rax,%r9
- adoxq %r11,%r10
- mulxq 24(%rsi),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 32(%rsi),%rax,%r12
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
- .byte 0xc4,0x62,0xfb,0xf6,0xb6,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
- adoxq %r15,%r14
- .byte 0xc4,0x62,0xfb,0xf6,0xbe,0x38,0x00,0x00,0x00
- adcxq %rax,%r14
- adoxq %rdi,%r15
- adcxq %rdi,%r15
- movq %rbx,8+64-8(%rsp)
- movq %r8,8+64(%rsp)
- movq %r9,8+64+8(%rsp)
- movq %r10,8+64+16(%rsp)
- movq %r11,8+64+24(%rsp)
- movq %r12,8+64+32(%rsp)
- movq %r13,8+64+40(%rsp)
- movq %r14,8+64+48(%rsp)
- movq %r15,8+64+56(%rsp)
- .byte 0xf3,0xc3
- .globl _rsaz_512_scatter4
- .p2align 4
- _rsaz_512_scatter4:
- leaq (%rdi,%rdx,8),%rdi
- movl $8,%r9d
- jmp L$oop_scatter
- .p2align 4
- L$oop_scatter:
- movq (%rsi),%rax
- leaq 8(%rsi),%rsi
- movq %rax,(%rdi)
- leaq 128(%rdi),%rdi
- decl %r9d
- jnz L$oop_scatter
- .byte 0xf3,0xc3
- .globl _rsaz_512_gather4
- .p2align 4
- _rsaz_512_gather4:
- movd %edx,%xmm8
- movdqa L$inc+16(%rip),%xmm1
- movdqa L$inc(%rip),%xmm0
- pshufd $0,%xmm8,%xmm8
- movdqa %xmm1,%xmm7
- movdqa %xmm1,%xmm2
- paddd %xmm0,%xmm1
- pcmpeqd %xmm8,%xmm0
- movdqa %xmm7,%xmm3
- paddd %xmm1,%xmm2
- pcmpeqd %xmm8,%xmm1
- movdqa %xmm7,%xmm4
- paddd %xmm2,%xmm3
- pcmpeqd %xmm8,%xmm2
- movdqa %xmm7,%xmm5
- paddd %xmm3,%xmm4
- pcmpeqd %xmm8,%xmm3
- movdqa %xmm7,%xmm6
- paddd %xmm4,%xmm5
- pcmpeqd %xmm8,%xmm4
- paddd %xmm5,%xmm6
- pcmpeqd %xmm8,%xmm5
- paddd %xmm6,%xmm7
- pcmpeqd %xmm8,%xmm6
- pcmpeqd %xmm8,%xmm7
- movl $8,%r9d
- jmp L$oop_gather
- .p2align 4
- L$oop_gather:
- movdqa 0(%rsi),%xmm8
- movdqa 16(%rsi),%xmm9
- movdqa 32(%rsi),%xmm10
- movdqa 48(%rsi),%xmm11
- pand %xmm0,%xmm8
- movdqa 64(%rsi),%xmm12
- pand %xmm1,%xmm9
- movdqa 80(%rsi),%xmm13
- pand %xmm2,%xmm10
- movdqa 96(%rsi),%xmm14
- pand %xmm3,%xmm11
- movdqa 112(%rsi),%xmm15
- leaq 128(%rsi),%rsi
- pand %xmm4,%xmm12
- pand %xmm5,%xmm13
- pand %xmm6,%xmm14
- pand %xmm7,%xmm15
- por %xmm10,%xmm8
- por %xmm11,%xmm9
- por %xmm12,%xmm8
- por %xmm13,%xmm9
- por %xmm14,%xmm8
- por %xmm15,%xmm9
- por %xmm9,%xmm8
- pshufd $0x4e,%xmm8,%xmm9
- por %xmm9,%xmm8
- movq %xmm8,(%rdi)
- leaq 8(%rdi),%rdi
- decl %r9d
- jnz L$oop_gather
- .byte 0xf3,0xc3
- L$SEH_end_rsaz_512_gather4:
- .p2align 6
- L$inc:
- .long 0,0, 1,1
- .long 2,2, 2,2
|