12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835 |
- .text
- .globl _gcm_gmult_4bit
- .p2align 4
- _gcm_gmult_4bit:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $280,%rsp
- L$gmult_prologue:
- movzbq 15(%rdi),%r8
- leaq L$rem_4bit(%rip),%r11
- xorq %rax,%rax
- xorq %rbx,%rbx
- movb %r8b,%al
- movb %r8b,%bl
- shlb $4,%al
- movq $14,%rcx
- movq 8(%rsi,%rax,1),%r8
- movq (%rsi,%rax,1),%r9
- andb $0xf0,%bl
- movq %r8,%rdx
- jmp L$oop1
- .p2align 4
- L$oop1:
- shrq $4,%r8
- andq $0xf,%rdx
- movq %r9,%r10
- movb (%rdi,%rcx,1),%al
- shrq $4,%r9
- xorq 8(%rsi,%rbx,1),%r8
- shlq $60,%r10
- xorq (%rsi,%rbx,1),%r9
- movb %al,%bl
- xorq (%r11,%rdx,8),%r9
- movq %r8,%rdx
- shlb $4,%al
- xorq %r10,%r8
- decq %rcx
- js L$break1
- shrq $4,%r8
- andq $0xf,%rdx
- movq %r9,%r10
- shrq $4,%r9
- xorq 8(%rsi,%rax,1),%r8
- shlq $60,%r10
- xorq (%rsi,%rax,1),%r9
- andb $0xf0,%bl
- xorq (%r11,%rdx,8),%r9
- movq %r8,%rdx
- xorq %r10,%r8
- jmp L$oop1
- .p2align 4
- L$break1:
- shrq $4,%r8
- andq $0xf,%rdx
- movq %r9,%r10
- shrq $4,%r9
- xorq 8(%rsi,%rax,1),%r8
- shlq $60,%r10
- xorq (%rsi,%rax,1),%r9
- andb $0xf0,%bl
- xorq (%r11,%rdx,8),%r9
- movq %r8,%rdx
- xorq %r10,%r8
- shrq $4,%r8
- andq $0xf,%rdx
- movq %r9,%r10
- shrq $4,%r9
- xorq 8(%rsi,%rbx,1),%r8
- shlq $60,%r10
- xorq (%rsi,%rbx,1),%r9
- xorq %r10,%r8
- xorq (%r11,%rdx,8),%r9
- bswapq %r8
- bswapq %r9
- movq %r8,8(%rdi)
- movq %r9,(%rdi)
- leaq 280+48(%rsp),%rsi
- movq -8(%rsi),%rbx
- leaq (%rsi),%rsp
- L$gmult_epilogue:
- .byte 0xf3,0xc3
- .globl _gcm_ghash_4bit
- .p2align 4
- _gcm_ghash_4bit:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
- subq $280,%rsp
- L$ghash_prologue:
- movq %rdx,%r14
- movq %rcx,%r15
- subq $-128,%rsi
- leaq 16+128(%rsp),%rbp
- xorl %edx,%edx
- movq 0+0-128(%rsi),%r8
- movq 0+8-128(%rsi),%rax
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq 16+0-128(%rsi),%r9
- shlb $4,%dl
- movq 16+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,0(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,0(%rbp)
- movq 32+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,0-128(%rbp)
- movq 32+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,1(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,8(%rbp)
- movq 48+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,8-128(%rbp)
- movq 48+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,2(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,16(%rbp)
- movq 64+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,16-128(%rbp)
- movq 64+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,3(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,24(%rbp)
- movq 80+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,24-128(%rbp)
- movq 80+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,4(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,32(%rbp)
- movq 96+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,32-128(%rbp)
- movq 96+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,5(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,40(%rbp)
- movq 112+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,40-128(%rbp)
- movq 112+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,6(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,48(%rbp)
- movq 128+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,48-128(%rbp)
- movq 128+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,7(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,56(%rbp)
- movq 144+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,56-128(%rbp)
- movq 144+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,8(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,64(%rbp)
- movq 160+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,64-128(%rbp)
- movq 160+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,9(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,72(%rbp)
- movq 176+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,72-128(%rbp)
- movq 176+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,10(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,80(%rbp)
- movq 192+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,80-128(%rbp)
- movq 192+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,11(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,88(%rbp)
- movq 208+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,88-128(%rbp)
- movq 208+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,12(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,96(%rbp)
- movq 224+0-128(%rsi),%r8
- shlb $4,%dl
- movq %rax,96-128(%rbp)
- movq 224+8-128(%rsi),%rax
- shlq $60,%r10
- movb %dl,13(%rsp)
- orq %r10,%rbx
- movb %al,%dl
- shrq $4,%rax
- movq %r8,%r10
- shrq $4,%r8
- movq %r9,104(%rbp)
- movq 240+0-128(%rsi),%r9
- shlb $4,%dl
- movq %rbx,104-128(%rbp)
- movq 240+8-128(%rsi),%rbx
- shlq $60,%r10
- movb %dl,14(%rsp)
- orq %r10,%rax
- movb %bl,%dl
- shrq $4,%rbx
- movq %r9,%r10
- shrq $4,%r9
- movq %r8,112(%rbp)
- shlb $4,%dl
- movq %rax,112-128(%rbp)
- shlq $60,%r10
- movb %dl,15(%rsp)
- orq %r10,%rbx
- movq %r9,120(%rbp)
- movq %rbx,120-128(%rbp)
- addq $-128,%rsi
- movq 8(%rdi),%r8
- movq 0(%rdi),%r9
- addq %r14,%r15
- leaq L$rem_8bit(%rip),%r11
- jmp L$outer_loop
- .p2align 4
- L$outer_loop:
- xorq (%r14),%r9
- movq 8(%r14),%rdx
- leaq 16(%r14),%r14
- xorq %r8,%rdx
- movq %r9,(%rdi)
- movq %rdx,8(%rdi)
- shrq $32,%rdx
- xorq %rax,%rax
- roll $8,%edx
- movb %dl,%al
- movzbl %dl,%ebx
- shlb $4,%al
- shrl $4,%ebx
- roll $8,%edx
- movq 8(%rsi,%rax,1),%r8
- movq (%rsi,%rax,1),%r9
- movb %dl,%al
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- xorq %r8,%r12
- movq %r9,%r10
- shrq $8,%r8
- movzbq %r12b,%r12
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- movl 8(%rdi),%edx
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- movl 4(%rdi),%edx
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- movl 0(%rdi),%edx
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- shrl $4,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r12,2),%r12
- movzbl %dl,%ebx
- shlb $4,%al
- movzbq (%rsp,%rcx,1),%r13
- shrl $4,%ebx
- shlq $48,%r12
- xorq %r8,%r13
- movq %r9,%r10
- xorq %r12,%r9
- shrq $8,%r8
- movzbq %r13b,%r13
- shrq $8,%r9
- xorq -128(%rbp,%rcx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rcx,8),%r9
- roll $8,%edx
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- movb %dl,%al
- xorq %r10,%r8
- movzwq (%r11,%r13,2),%r13
- movzbl %dl,%ecx
- shlb $4,%al
- movzbq (%rsp,%rbx,1),%r12
- andl $240,%ecx
- shlq $48,%r13
- xorq %r8,%r12
- movq %r9,%r10
- xorq %r13,%r9
- shrq $8,%r8
- movzbq %r12b,%r12
- movl -4(%rdi),%edx
- shrq $8,%r9
- xorq -128(%rbp,%rbx,8),%r8
- shlq $56,%r10
- xorq (%rbp,%rbx,8),%r9
- movzwq (%r11,%r12,2),%r12
- xorq 8(%rsi,%rax,1),%r8
- xorq (%rsi,%rax,1),%r9
- shlq $48,%r12
- xorq %r10,%r8
- xorq %r12,%r9
- movzbq %r8b,%r13
- shrq $4,%r8
- movq %r9,%r10
- shlb $4,%r13b
- shrq $4,%r9
- xorq 8(%rsi,%rcx,1),%r8
- movzwq (%r11,%r13,2),%r13
- shlq $60,%r10
- xorq (%rsi,%rcx,1),%r9
- xorq %r10,%r8
- shlq $48,%r13
- bswapq %r8
- xorq %r13,%r9
- bswapq %r9
- cmpq %r15,%r14
- jb L$outer_loop
- movq %r8,8(%rdi)
- movq %r9,(%rdi)
- leaq 280+48(%rsp),%rsi
- movq -48(%rsi),%r15
- movq -40(%rsi),%r14
- movq -32(%rsi),%r13
- movq -24(%rsi),%r12
- movq -16(%rsi),%rbp
- movq -8(%rsi),%rbx
- leaq 0(%rsi),%rsp
- L$ghash_epilogue:
- .byte 0xf3,0xc3
- .globl _gcm_init_clmul
- .p2align 4
- _gcm_init_clmul:
- L$_init_clmul:
- movdqu (%rsi),%xmm2
- pshufd $78,%xmm2,%xmm2
- pshufd $255,%xmm2,%xmm4
- movdqa %xmm2,%xmm3
- psllq $1,%xmm2
- pxor %xmm5,%xmm5
- psrlq $63,%xmm3
- pcmpgtd %xmm4,%xmm5
- pslldq $8,%xmm3
- por %xmm3,%xmm2
- pand L$0x1c2_polynomial(%rip),%xmm5
- pxor %xmm5,%xmm2
- pshufd $78,%xmm2,%xmm6
- movdqa %xmm2,%xmm0
- pxor %xmm2,%xmm6
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm3
- pxor %xmm0,%xmm3
- .byte 102,15,58,68,194,0
- .byte 102,15,58,68,202,17
- .byte 102,15,58,68,222,0
- pxor %xmm0,%xmm3
- pxor %xmm1,%xmm3
- movdqa %xmm3,%xmm4
- psrldq $8,%xmm3
- pslldq $8,%xmm4
- pxor %xmm3,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- pshufd $78,%xmm2,%xmm3
- pshufd $78,%xmm0,%xmm4
- pxor %xmm2,%xmm3
- movdqu %xmm2,0(%rdi)
- pxor %xmm0,%xmm4
- movdqu %xmm0,16(%rdi)
- .byte 102,15,58,15,227,8
- movdqu %xmm4,32(%rdi)
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm3
- pxor %xmm0,%xmm3
- .byte 102,15,58,68,194,0
- .byte 102,15,58,68,202,17
- .byte 102,15,58,68,222,0
- pxor %xmm0,%xmm3
- pxor %xmm1,%xmm3
- movdqa %xmm3,%xmm4
- psrldq $8,%xmm3
- pslldq $8,%xmm4
- pxor %xmm3,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- movdqa %xmm0,%xmm5
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm3
- pxor %xmm0,%xmm3
- .byte 102,15,58,68,194,0
- .byte 102,15,58,68,202,17
- .byte 102,15,58,68,222,0
- pxor %xmm0,%xmm3
- pxor %xmm1,%xmm3
- movdqa %xmm3,%xmm4
- psrldq $8,%xmm3
- pslldq $8,%xmm4
- pxor %xmm3,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- pshufd $78,%xmm5,%xmm3
- pshufd $78,%xmm0,%xmm4
- pxor %xmm5,%xmm3
- movdqu %xmm5,48(%rdi)
- pxor %xmm0,%xmm4
- movdqu %xmm0,64(%rdi)
- .byte 102,15,58,15,227,8
- movdqu %xmm4,80(%rdi)
- .byte 0xf3,0xc3
- .globl _gcm_gmult_clmul
- .p2align 4
- _gcm_gmult_clmul:
- L$_gmult_clmul:
- movdqu (%rdi),%xmm0
- movdqa L$bswap_mask(%rip),%xmm5
- movdqu (%rsi),%xmm2
- movdqu 32(%rsi),%xmm4
- .byte 102,15,56,0,197
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm3
- pxor %xmm0,%xmm3
- .byte 102,15,58,68,194,0
- .byte 102,15,58,68,202,17
- .byte 102,15,58,68,220,0
- pxor %xmm0,%xmm3
- pxor %xmm1,%xmm3
- movdqa %xmm3,%xmm4
- psrldq $8,%xmm3
- pslldq $8,%xmm4
- pxor %xmm3,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- .byte 102,15,56,0,197
- movdqu %xmm0,(%rdi)
- .byte 0xf3,0xc3
- .globl _gcm_ghash_clmul
- .p2align 5
- _gcm_ghash_clmul:
- L$_ghash_clmul:
- movdqa L$bswap_mask(%rip),%xmm10
- movdqu (%rdi),%xmm0
- movdqu (%rsi),%xmm2
- movdqu 32(%rsi),%xmm7
- .byte 102,65,15,56,0,194
- subq $0x10,%rcx
- jz L$odd_tail
- movdqu 16(%rsi),%xmm6
- movl _OPENSSL_ia32cap_P+4(%rip),%eax
- cmpq $0x30,%rcx
- jb L$skip4x
- andl $71303168,%eax
- cmpl $4194304,%eax
- je L$skip4x
- subq $0x30,%rcx
- movq $0xA040608020C0E000,%rax
- movdqu 48(%rsi),%xmm14
- movdqu 64(%rsi),%xmm15
- movdqu 48(%rdx),%xmm3
- movdqu 32(%rdx),%xmm11
- .byte 102,65,15,56,0,218
- .byte 102,69,15,56,0,218
- movdqa %xmm3,%xmm5
- pshufd $78,%xmm3,%xmm4
- pxor %xmm3,%xmm4
- .byte 102,15,58,68,218,0
- .byte 102,15,58,68,234,17
- .byte 102,15,58,68,231,0
- movdqa %xmm11,%xmm13
- pshufd $78,%xmm11,%xmm12
- pxor %xmm11,%xmm12
- .byte 102,68,15,58,68,222,0
- .byte 102,68,15,58,68,238,17
- .byte 102,68,15,58,68,231,16
- xorps %xmm11,%xmm3
- xorps %xmm13,%xmm5
- movups 80(%rsi),%xmm7
- xorps %xmm12,%xmm4
- movdqu 16(%rdx),%xmm11
- movdqu 0(%rdx),%xmm8
- .byte 102,69,15,56,0,218
- .byte 102,69,15,56,0,194
- movdqa %xmm11,%xmm13
- pshufd $78,%xmm11,%xmm12
- pxor %xmm8,%xmm0
- pxor %xmm11,%xmm12
- .byte 102,69,15,58,68,222,0
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm8
- pxor %xmm0,%xmm8
- .byte 102,69,15,58,68,238,17
- .byte 102,68,15,58,68,231,0
- xorps %xmm11,%xmm3
- xorps %xmm13,%xmm5
- leaq 64(%rdx),%rdx
- subq $0x40,%rcx
- jc L$tail4x
- jmp L$mod4_loop
- .p2align 5
- L$mod4_loop:
- .byte 102,65,15,58,68,199,0
- xorps %xmm12,%xmm4
- movdqu 48(%rdx),%xmm11
- .byte 102,69,15,56,0,218
- .byte 102,65,15,58,68,207,17
- xorps %xmm3,%xmm0
- movdqu 32(%rdx),%xmm3
- movdqa %xmm11,%xmm13
- .byte 102,68,15,58,68,199,16
- pshufd $78,%xmm11,%xmm12
- xorps %xmm5,%xmm1
- pxor %xmm11,%xmm12
- .byte 102,65,15,56,0,218
- movups 32(%rsi),%xmm7
- xorps %xmm4,%xmm8
- .byte 102,68,15,58,68,218,0
- pshufd $78,%xmm3,%xmm4
- pxor %xmm0,%xmm8
- movdqa %xmm3,%xmm5
- pxor %xmm1,%xmm8
- pxor %xmm3,%xmm4
- movdqa %xmm8,%xmm9
- .byte 102,68,15,58,68,234,17
- pslldq $8,%xmm8
- psrldq $8,%xmm9
- pxor %xmm8,%xmm0
- movdqa L$7_mask(%rip),%xmm8
- pxor %xmm9,%xmm1
- .byte 102,76,15,110,200
- pand %xmm0,%xmm8
- .byte 102,69,15,56,0,200
- pxor %xmm0,%xmm9
- .byte 102,68,15,58,68,231,0
- psllq $57,%xmm9
- movdqa %xmm9,%xmm8
- pslldq $8,%xmm9
- .byte 102,15,58,68,222,0
- psrldq $8,%xmm8
- pxor %xmm9,%xmm0
- pxor %xmm8,%xmm1
- movdqu 0(%rdx),%xmm8
- movdqa %xmm0,%xmm9
- psrlq $1,%xmm0
- .byte 102,15,58,68,238,17
- xorps %xmm11,%xmm3
- movdqu 16(%rdx),%xmm11
- .byte 102,69,15,56,0,218
- .byte 102,15,58,68,231,16
- xorps %xmm13,%xmm5
- movups 80(%rsi),%xmm7
- .byte 102,69,15,56,0,194
- pxor %xmm9,%xmm1
- pxor %xmm0,%xmm9
- psrlq $5,%xmm0
- movdqa %xmm11,%xmm13
- pxor %xmm12,%xmm4
- pshufd $78,%xmm11,%xmm12
- pxor %xmm9,%xmm0
- pxor %xmm8,%xmm1
- pxor %xmm11,%xmm12
- .byte 102,69,15,58,68,222,0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- movdqa %xmm0,%xmm1
- .byte 102,69,15,58,68,238,17
- xorps %xmm11,%xmm3
- pshufd $78,%xmm0,%xmm8
- pxor %xmm0,%xmm8
- .byte 102,68,15,58,68,231,0
- xorps %xmm13,%xmm5
- leaq 64(%rdx),%rdx
- subq $0x40,%rcx
- jnc L$mod4_loop
- L$tail4x:
- .byte 102,65,15,58,68,199,0
- .byte 102,65,15,58,68,207,17
- .byte 102,68,15,58,68,199,16
- xorps %xmm12,%xmm4
- xorps %xmm3,%xmm0
- xorps %xmm5,%xmm1
- pxor %xmm0,%xmm1
- pxor %xmm4,%xmm8
- pxor %xmm1,%xmm8
- pxor %xmm0,%xmm1
- movdqa %xmm8,%xmm9
- psrldq $8,%xmm8
- pslldq $8,%xmm9
- pxor %xmm8,%xmm1
- pxor %xmm9,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- addq $0x40,%rcx
- jz L$done
- movdqu 32(%rsi),%xmm7
- subq $0x10,%rcx
- jz L$odd_tail
- L$skip4x:
- movdqu (%rdx),%xmm8
- movdqu 16(%rdx),%xmm3
- .byte 102,69,15,56,0,194
- .byte 102,65,15,56,0,218
- pxor %xmm8,%xmm0
- movdqa %xmm3,%xmm5
- pshufd $78,%xmm3,%xmm4
- pxor %xmm3,%xmm4
- .byte 102,15,58,68,218,0
- .byte 102,15,58,68,234,17
- .byte 102,15,58,68,231,0
- leaq 32(%rdx),%rdx
- nop
- subq $0x20,%rcx
- jbe L$even_tail
- nop
- jmp L$mod_loop
- .p2align 5
- L$mod_loop:
- movdqa %xmm0,%xmm1
- movdqa %xmm4,%xmm8
- pshufd $78,%xmm0,%xmm4
- pxor %xmm0,%xmm4
- .byte 102,15,58,68,198,0
- .byte 102,15,58,68,206,17
- .byte 102,15,58,68,231,16
- pxor %xmm3,%xmm0
- pxor %xmm5,%xmm1
- movdqu (%rdx),%xmm9
- pxor %xmm0,%xmm8
- .byte 102,69,15,56,0,202
- movdqu 16(%rdx),%xmm3
- pxor %xmm1,%xmm8
- pxor %xmm9,%xmm1
- pxor %xmm8,%xmm4
- .byte 102,65,15,56,0,218
- movdqa %xmm4,%xmm8
- psrldq $8,%xmm8
- pslldq $8,%xmm4
- pxor %xmm8,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm3,%xmm5
- movdqa %xmm0,%xmm9
- movdqa %xmm0,%xmm8
- psllq $5,%xmm0
- pxor %xmm0,%xmm8
- .byte 102,15,58,68,218,0
- psllq $1,%xmm0
- pxor %xmm8,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm8
- pslldq $8,%xmm0
- psrldq $8,%xmm8
- pxor %xmm9,%xmm0
- pshufd $78,%xmm5,%xmm4
- pxor %xmm8,%xmm1
- pxor %xmm5,%xmm4
- movdqa %xmm0,%xmm9
- psrlq $1,%xmm0
- .byte 102,15,58,68,234,17
- pxor %xmm9,%xmm1
- pxor %xmm0,%xmm9
- psrlq $5,%xmm0
- pxor %xmm9,%xmm0
- leaq 32(%rdx),%rdx
- psrlq $1,%xmm0
- .byte 102,15,58,68,231,0
- pxor %xmm1,%xmm0
- subq $0x20,%rcx
- ja L$mod_loop
- L$even_tail:
- movdqa %xmm0,%xmm1
- movdqa %xmm4,%xmm8
- pshufd $78,%xmm0,%xmm4
- pxor %xmm0,%xmm4
- .byte 102,15,58,68,198,0
- .byte 102,15,58,68,206,17
- .byte 102,15,58,68,231,16
- pxor %xmm3,%xmm0
- pxor %xmm5,%xmm1
- pxor %xmm0,%xmm8
- pxor %xmm1,%xmm8
- pxor %xmm8,%xmm4
- movdqa %xmm4,%xmm8
- psrldq $8,%xmm8
- pslldq $8,%xmm4
- pxor %xmm8,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- testq %rcx,%rcx
- jnz L$done
- L$odd_tail:
- movdqu (%rdx),%xmm8
- .byte 102,69,15,56,0,194
- pxor %xmm8,%xmm0
- movdqa %xmm0,%xmm1
- pshufd $78,%xmm0,%xmm3
- pxor %xmm0,%xmm3
- .byte 102,15,58,68,194,0
- .byte 102,15,58,68,202,17
- .byte 102,15,58,68,223,0
- pxor %xmm0,%xmm3
- pxor %xmm1,%xmm3
- movdqa %xmm3,%xmm4
- psrldq $8,%xmm3
- pslldq $8,%xmm4
- pxor %xmm3,%xmm1
- pxor %xmm4,%xmm0
- movdqa %xmm0,%xmm4
- movdqa %xmm0,%xmm3
- psllq $5,%xmm0
- pxor %xmm0,%xmm3
- psllq $1,%xmm0
- pxor %xmm3,%xmm0
- psllq $57,%xmm0
- movdqa %xmm0,%xmm3
- pslldq $8,%xmm0
- psrldq $8,%xmm3
- pxor %xmm4,%xmm0
- pxor %xmm3,%xmm1
- movdqa %xmm0,%xmm4
- psrlq $1,%xmm0
- pxor %xmm4,%xmm1
- pxor %xmm0,%xmm4
- psrlq $5,%xmm0
- pxor %xmm4,%xmm0
- psrlq $1,%xmm0
- pxor %xmm1,%xmm0
- L$done:
- .byte 102,65,15,56,0,194
- movdqu %xmm0,(%rdi)
- .byte 0xf3,0xc3
- .globl _gcm_init_avx
- .p2align 5
- _gcm_init_avx:
- vzeroupper
- vmovdqu (%rsi),%xmm2
- vpshufd $78,%xmm2,%xmm2
- vpshufd $255,%xmm2,%xmm4
- vpsrlq $63,%xmm2,%xmm3
- vpsllq $1,%xmm2,%xmm2
- vpxor %xmm5,%xmm5,%xmm5
- vpcmpgtd %xmm4,%xmm5,%xmm5
- vpslldq $8,%xmm3,%xmm3
- vpor %xmm3,%xmm2,%xmm2
- vpand L$0x1c2_polynomial(%rip),%xmm5,%xmm5
- vpxor %xmm5,%xmm2,%xmm2
- vpunpckhqdq %xmm2,%xmm2,%xmm6
- vmovdqa %xmm2,%xmm0
- vpxor %xmm2,%xmm6,%xmm6
- movq $4,%r10
- jmp L$init_start_avx
- .p2align 5
- L$init_loop_avx:
- vpalignr $8,%xmm3,%xmm4,%xmm5
- vmovdqu %xmm5,-16(%rdi)
- vpunpckhqdq %xmm0,%xmm0,%xmm3
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1
- vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3
- vpxor %xmm0,%xmm1,%xmm4
- vpxor %xmm4,%xmm3,%xmm3
- vpslldq $8,%xmm3,%xmm4
- vpsrldq $8,%xmm3,%xmm3
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm3,%xmm1,%xmm1
- vpsllq $57,%xmm0,%xmm3
- vpsllq $62,%xmm0,%xmm4
- vpxor %xmm3,%xmm4,%xmm4
- vpsllq $63,%xmm0,%xmm3
- vpxor %xmm3,%xmm4,%xmm4
- vpslldq $8,%xmm4,%xmm3
- vpsrldq $8,%xmm4,%xmm4
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm4,%xmm1,%xmm1
- vpsrlq $1,%xmm0,%xmm4
- vpxor %xmm0,%xmm1,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $5,%xmm4,%xmm4
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $1,%xmm0,%xmm0
- vpxor %xmm1,%xmm0,%xmm0
- L$init_start_avx:
- vmovdqa %xmm0,%xmm5
- vpunpckhqdq %xmm0,%xmm0,%xmm3
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1
- vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3
- vpxor %xmm0,%xmm1,%xmm4
- vpxor %xmm4,%xmm3,%xmm3
- vpslldq $8,%xmm3,%xmm4
- vpsrldq $8,%xmm3,%xmm3
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm3,%xmm1,%xmm1
- vpsllq $57,%xmm0,%xmm3
- vpsllq $62,%xmm0,%xmm4
- vpxor %xmm3,%xmm4,%xmm4
- vpsllq $63,%xmm0,%xmm3
- vpxor %xmm3,%xmm4,%xmm4
- vpslldq $8,%xmm4,%xmm3
- vpsrldq $8,%xmm4,%xmm4
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm4,%xmm1,%xmm1
- vpsrlq $1,%xmm0,%xmm4
- vpxor %xmm0,%xmm1,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $5,%xmm4,%xmm4
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $1,%xmm0,%xmm0
- vpxor %xmm1,%xmm0,%xmm0
- vpshufd $78,%xmm5,%xmm3
- vpshufd $78,%xmm0,%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqu %xmm5,0(%rdi)
- vpxor %xmm0,%xmm4,%xmm4
- vmovdqu %xmm0,16(%rdi)
- leaq 48(%rdi),%rdi
- subq $1,%r10
- jnz L$init_loop_avx
- vpalignr $8,%xmm4,%xmm3,%xmm5
- vmovdqu %xmm5,-16(%rdi)
- vzeroupper
- .byte 0xf3,0xc3
- .globl _gcm_gmult_avx
- .p2align 5
- _gcm_gmult_avx:
- jmp L$_gmult_clmul
- .globl _gcm_ghash_avx
- .p2align 5
- _gcm_ghash_avx:
- vzeroupper
- vmovdqu (%rdi),%xmm10
- leaq L$0x1c2_polynomial(%rip),%r10
- leaq 64(%rsi),%rsi
- vmovdqu L$bswap_mask(%rip),%xmm13
- vpshufb %xmm13,%xmm10,%xmm10
- cmpq $0x80,%rcx
- jb L$short_avx
- subq $0x80,%rcx
- vmovdqu 112(%rdx),%xmm14
- vmovdqu 0-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm14
- vmovdqu 32-64(%rsi),%xmm7
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vmovdqu 96(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm14,%xmm9,%xmm9
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 16-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vmovdqu 80(%rdx),%xmm14
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 48-64(%rsi),%xmm6
- vpxor %xmm14,%xmm9,%xmm9
- vmovdqu 64(%rdx),%xmm15
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 80-64(%rsi),%xmm7
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu 48(%rdx),%xmm14
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm4,%xmm1,%xmm1
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 96-64(%rsi),%xmm6
- vpxor %xmm5,%xmm2,%xmm2
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 128-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vmovdqu 32(%rdx),%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu 16(%rdx),%xmm14
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm4,%xmm1,%xmm1
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 144-64(%rsi),%xmm6
- vpxor %xmm5,%xmm2,%xmm2
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 176-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vmovdqu (%rdx),%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 160-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2
- leaq 128(%rdx),%rdx
- cmpq $0x80,%rcx
- jb L$tail_avx
- vpxor %xmm10,%xmm15,%xmm15
- subq $0x80,%rcx
- jmp L$oop8x_avx
- .p2align 5
- L$oop8x_avx:
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vmovdqu 112(%rdx),%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpxor %xmm15,%xmm8,%xmm8
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm10
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm11
- vmovdqu 0-64(%rsi),%xmm6
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm12
- vmovdqu 32-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vmovdqu 96(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm3,%xmm10,%xmm10
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vxorps %xmm4,%xmm11,%xmm11
- vmovdqu 16-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm5,%xmm12,%xmm12
- vxorps %xmm15,%xmm8,%xmm8
- vmovdqu 80(%rdx),%xmm14
- vpxor %xmm10,%xmm12,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm11,%xmm12,%xmm12
- vpslldq $8,%xmm12,%xmm9
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vpsrldq $8,%xmm12,%xmm12
- vpxor %xmm9,%xmm10,%xmm10
- vmovdqu 48-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm14
- vxorps %xmm12,%xmm11,%xmm11
- vpxor %xmm1,%xmm4,%xmm4
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 80-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
- vmovdqu 64(%rdx),%xmm15
- vpalignr $8,%xmm10,%xmm10,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm4,%xmm1,%xmm1
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vxorps %xmm15,%xmm8,%xmm8
- vpxor %xmm5,%xmm2,%xmm2
- vmovdqu 48(%rdx),%xmm14
- vpclmulqdq $0x10,(%r10),%xmm10,%xmm10
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 96-64(%rsi),%xmm6
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 128-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
- vmovdqu 32(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm4,%xmm1,%xmm1
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
- vpxor %xmm5,%xmm2,%xmm2
- vxorps %xmm12,%xmm10,%xmm10
- vmovdqu 16(%rdx),%xmm14
- vpalignr $8,%xmm10,%xmm10,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 144-64(%rsi),%xmm6
- vpclmulqdq $0x10,(%r10),%xmm10,%xmm10
- vxorps %xmm11,%xmm12,%xmm12
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 176-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
- vmovdqu (%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 160-64(%rsi),%xmm6
- vpxor %xmm12,%xmm15,%xmm15
- vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2
- vpxor %xmm10,%xmm15,%xmm15
- leaq 128(%rdx),%rdx
- subq $0x80,%rcx
- jnc L$oop8x_avx
- addq $0x80,%rcx
- jmp L$tail_no_xor_avx
- .p2align 5
- L$short_avx:
- vmovdqu -16(%rdx,%rcx,1),%xmm14
- leaq (%rdx,%rcx,1),%rdx
- vmovdqu 0-64(%rsi),%xmm6
- vmovdqu 32-64(%rsi),%xmm7
- vpshufb %xmm13,%xmm14,%xmm15
- vmovdqa %xmm0,%xmm3
- vmovdqa %xmm1,%xmm4
- vmovdqa %xmm2,%xmm5
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -32(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 16-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -48(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 48-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovdqu 80-64(%rsi),%xmm7
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -64(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -80(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 96-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovdqu 128-64(%rsi),%xmm7
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -96(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz L$tail_avx
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -112(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 144-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovq 184-64(%rsi),%xmm7
- subq $0x10,%rcx
- jmp L$tail_avx
- .p2align 5
- L$tail_avx:
- vpxor %xmm10,%xmm15,%xmm15
- L$tail_no_xor_avx:
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovdqu (%r10),%xmm12
- vpxor %xmm0,%xmm3,%xmm10
- vpxor %xmm1,%xmm4,%xmm11
- vpxor %xmm2,%xmm5,%xmm5
- vpxor %xmm10,%xmm5,%xmm5
- vpxor %xmm11,%xmm5,%xmm5
- vpslldq $8,%xmm5,%xmm9
- vpsrldq $8,%xmm5,%xmm5
- vpxor %xmm9,%xmm10,%xmm10
- vpxor %xmm5,%xmm11,%xmm11
- vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9
- vpalignr $8,%xmm10,%xmm10,%xmm10
- vpxor %xmm9,%xmm10,%xmm10
- vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9
- vpalignr $8,%xmm10,%xmm10,%xmm10
- vpxor %xmm11,%xmm10,%xmm10
- vpxor %xmm9,%xmm10,%xmm10
- cmpq $0,%rcx
- jne L$short_avx
- vpshufb %xmm13,%xmm10,%xmm10
- vmovdqu %xmm10,(%rdi)
- vzeroupper
- .byte 0xf3,0xc3
- .p2align 6
- L$bswap_mask:
- .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
- L$0x1c2_polynomial:
- .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
- L$7_mask:
- .long 7,0,7,0
- L$7_mask_poly:
- .long 7,0,450,0
- .p2align 6
- L$rem_4bit:
- .long 0,0,0,471859200,0,943718400,0,610271232
- .long 0,1887436800,0,1822425088,0,1220542464,0,1423966208
- .long 0,3774873600,0,4246732800,0,3644850176,0,3311403008
- .long 0,2441084928,0,2376073216,0,2847932416,0,3051356160
- L$rem_8bit:
- .value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
- .value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
- .value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
- .value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
- .value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
- .value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
- .value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
- .value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
- .value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
- .value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
- .value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
- .value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
- .value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
- .value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
- .value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
- .value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
- .value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
- .value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
- .value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
- .value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
- .value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
- .value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
- .value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
- .value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
- .value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
- .value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
- .value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
- .value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
- .value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
- .value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
- .value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
- .value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
- .byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
- .p2align 6
|