rc4-x86_64.s 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629
  1. .text
  2. .globl _RC4
  3. .p2align 4
  4. _RC4:
  5. orq %rsi,%rsi
  6. jne L$entry
  7. .byte 0xf3,0xc3
  8. L$entry:
  9. pushq %rbx
  10. pushq %r12
  11. pushq %r13
  12. L$prologue:
  13. movq %rsi,%r11
  14. movq %rdx,%r12
  15. movq %rcx,%r13
  16. xorq %r10,%r10
  17. xorq %rcx,%rcx
  18. leaq 8(%rdi),%rdi
  19. movb -8(%rdi),%r10b
  20. movb -4(%rdi),%cl
  21. cmpl $-1,256(%rdi)
  22. je L$RC4_CHAR
  23. movl _OPENSSL_ia32cap_P(%rip),%r8d
  24. xorq %rbx,%rbx
  25. incb %r10b
  26. subq %r10,%rbx
  27. subq %r12,%r13
  28. movl (%rdi,%r10,4),%eax
  29. testq $-16,%r11
  30. jz L$loop1
  31. btl $30,%r8d
  32. jc L$intel
  33. andq $7,%rbx
  34. leaq 1(%r10),%rsi
  35. jz L$oop8
  36. subq %rbx,%r11
  37. L$oop8_warmup:
  38. addb %al,%cl
  39. movl (%rdi,%rcx,4),%edx
  40. movl %eax,(%rdi,%rcx,4)
  41. movl %edx,(%rdi,%r10,4)
  42. addb %dl,%al
  43. incb %r10b
  44. movl (%rdi,%rax,4),%edx
  45. movl (%rdi,%r10,4),%eax
  46. xorb (%r12),%dl
  47. movb %dl,(%r12,%r13,1)
  48. leaq 1(%r12),%r12
  49. decq %rbx
  50. jnz L$oop8_warmup
  51. leaq 1(%r10),%rsi
  52. jmp L$oop8
  53. .p2align 4
  54. L$oop8:
  55. addb %al,%cl
  56. movl (%rdi,%rcx,4),%edx
  57. movl %eax,(%rdi,%rcx,4)
  58. movl 0(%rdi,%rsi,4),%ebx
  59. rorq $8,%r8
  60. movl %edx,0(%rdi,%r10,4)
  61. addb %al,%dl
  62. movb (%rdi,%rdx,4),%r8b
  63. addb %bl,%cl
  64. movl (%rdi,%rcx,4),%edx
  65. movl %ebx,(%rdi,%rcx,4)
  66. movl 4(%rdi,%rsi,4),%eax
  67. rorq $8,%r8
  68. movl %edx,4(%rdi,%r10,4)
  69. addb %bl,%dl
  70. movb (%rdi,%rdx,4),%r8b
  71. addb %al,%cl
  72. movl (%rdi,%rcx,4),%edx
  73. movl %eax,(%rdi,%rcx,4)
  74. movl 8(%rdi,%rsi,4),%ebx
  75. rorq $8,%r8
  76. movl %edx,8(%rdi,%r10,4)
  77. addb %al,%dl
  78. movb (%rdi,%rdx,4),%r8b
  79. addb %bl,%cl
  80. movl (%rdi,%rcx,4),%edx
  81. movl %ebx,(%rdi,%rcx,4)
  82. movl 12(%rdi,%rsi,4),%eax
  83. rorq $8,%r8
  84. movl %edx,12(%rdi,%r10,4)
  85. addb %bl,%dl
  86. movb (%rdi,%rdx,4),%r8b
  87. addb %al,%cl
  88. movl (%rdi,%rcx,4),%edx
  89. movl %eax,(%rdi,%rcx,4)
  90. movl 16(%rdi,%rsi,4),%ebx
  91. rorq $8,%r8
  92. movl %edx,16(%rdi,%r10,4)
  93. addb %al,%dl
  94. movb (%rdi,%rdx,4),%r8b
  95. addb %bl,%cl
  96. movl (%rdi,%rcx,4),%edx
  97. movl %ebx,(%rdi,%rcx,4)
  98. movl 20(%rdi,%rsi,4),%eax
  99. rorq $8,%r8
  100. movl %edx,20(%rdi,%r10,4)
  101. addb %bl,%dl
  102. movb (%rdi,%rdx,4),%r8b
  103. addb %al,%cl
  104. movl (%rdi,%rcx,4),%edx
  105. movl %eax,(%rdi,%rcx,4)
  106. movl 24(%rdi,%rsi,4),%ebx
  107. rorq $8,%r8
  108. movl %edx,24(%rdi,%r10,4)
  109. addb %al,%dl
  110. movb (%rdi,%rdx,4),%r8b
  111. addb $8,%sil
  112. addb %bl,%cl
  113. movl (%rdi,%rcx,4),%edx
  114. movl %ebx,(%rdi,%rcx,4)
  115. movl -4(%rdi,%rsi,4),%eax
  116. rorq $8,%r8
  117. movl %edx,28(%rdi,%r10,4)
  118. addb %bl,%dl
  119. movb (%rdi,%rdx,4),%r8b
  120. addb $8,%r10b
  121. rorq $8,%r8
  122. subq $8,%r11
  123. xorq (%r12),%r8
  124. movq %r8,(%r12,%r13,1)
  125. leaq 8(%r12),%r12
  126. testq $-8,%r11
  127. jnz L$oop8
  128. cmpq $0,%r11
  129. jne L$loop1
  130. jmp L$exit
  131. .p2align 4
  132. L$intel:
  133. testq $-32,%r11
  134. jz L$loop1
  135. andq $15,%rbx
  136. jz L$oop16_is_hot
  137. subq %rbx,%r11
  138. L$oop16_warmup:
  139. addb %al,%cl
  140. movl (%rdi,%rcx,4),%edx
  141. movl %eax,(%rdi,%rcx,4)
  142. movl %edx,(%rdi,%r10,4)
  143. addb %dl,%al
  144. incb %r10b
  145. movl (%rdi,%rax,4),%edx
  146. movl (%rdi,%r10,4),%eax
  147. xorb (%r12),%dl
  148. movb %dl,(%r12,%r13,1)
  149. leaq 1(%r12),%r12
  150. decq %rbx
  151. jnz L$oop16_warmup
  152. movq %rcx,%rbx
  153. xorq %rcx,%rcx
  154. movb %bl,%cl
  155. L$oop16_is_hot:
  156. leaq (%rdi,%r10,4),%rsi
  157. addb %al,%cl
  158. movl (%rdi,%rcx,4),%edx
  159. pxor %xmm0,%xmm0
  160. movl %eax,(%rdi,%rcx,4)
  161. addb %dl,%al
  162. movl 4(%rsi),%ebx
  163. movzbl %al,%eax
  164. movl %edx,0(%rsi)
  165. addb %bl,%cl
  166. pinsrw $0,(%rdi,%rax,4),%xmm0
  167. jmp L$oop16_enter
  168. .p2align 4
  169. L$oop16:
  170. addb %al,%cl
  171. movl (%rdi,%rcx,4),%edx
  172. pxor %xmm0,%xmm2
  173. psllq $8,%xmm1
  174. pxor %xmm0,%xmm0
  175. movl %eax,(%rdi,%rcx,4)
  176. addb %dl,%al
  177. movl 4(%rsi),%ebx
  178. movzbl %al,%eax
  179. movl %edx,0(%rsi)
  180. pxor %xmm1,%xmm2
  181. addb %bl,%cl
  182. pinsrw $0,(%rdi,%rax,4),%xmm0
  183. movdqu %xmm2,(%r12,%r13,1)
  184. leaq 16(%r12),%r12
  185. L$oop16_enter:
  186. movl (%rdi,%rcx,4),%edx
  187. pxor %xmm1,%xmm1
  188. movl %ebx,(%rdi,%rcx,4)
  189. addb %dl,%bl
  190. movl 8(%rsi),%eax
  191. movzbl %bl,%ebx
  192. movl %edx,4(%rsi)
  193. addb %al,%cl
  194. pinsrw $0,(%rdi,%rbx,4),%xmm1
  195. movl (%rdi,%rcx,4),%edx
  196. movl %eax,(%rdi,%rcx,4)
  197. addb %dl,%al
  198. movl 12(%rsi),%ebx
  199. movzbl %al,%eax
  200. movl %edx,8(%rsi)
  201. addb %bl,%cl
  202. pinsrw $1,(%rdi,%rax,4),%xmm0
  203. movl (%rdi,%rcx,4),%edx
  204. movl %ebx,(%rdi,%rcx,4)
  205. addb %dl,%bl
  206. movl 16(%rsi),%eax
  207. movzbl %bl,%ebx
  208. movl %edx,12(%rsi)
  209. addb %al,%cl
  210. pinsrw $1,(%rdi,%rbx,4),%xmm1
  211. movl (%rdi,%rcx,4),%edx
  212. movl %eax,(%rdi,%rcx,4)
  213. addb %dl,%al
  214. movl 20(%rsi),%ebx
  215. movzbl %al,%eax
  216. movl %edx,16(%rsi)
  217. addb %bl,%cl
  218. pinsrw $2,(%rdi,%rax,4),%xmm0
  219. movl (%rdi,%rcx,4),%edx
  220. movl %ebx,(%rdi,%rcx,4)
  221. addb %dl,%bl
  222. movl 24(%rsi),%eax
  223. movzbl %bl,%ebx
  224. movl %edx,20(%rsi)
  225. addb %al,%cl
  226. pinsrw $2,(%rdi,%rbx,4),%xmm1
  227. movl (%rdi,%rcx,4),%edx
  228. movl %eax,(%rdi,%rcx,4)
  229. addb %dl,%al
  230. movl 28(%rsi),%ebx
  231. movzbl %al,%eax
  232. movl %edx,24(%rsi)
  233. addb %bl,%cl
  234. pinsrw $3,(%rdi,%rax,4),%xmm0
  235. movl (%rdi,%rcx,4),%edx
  236. movl %ebx,(%rdi,%rcx,4)
  237. addb %dl,%bl
  238. movl 32(%rsi),%eax
  239. movzbl %bl,%ebx
  240. movl %edx,28(%rsi)
  241. addb %al,%cl
  242. pinsrw $3,(%rdi,%rbx,4),%xmm1
  243. movl (%rdi,%rcx,4),%edx
  244. movl %eax,(%rdi,%rcx,4)
  245. addb %dl,%al
  246. movl 36(%rsi),%ebx
  247. movzbl %al,%eax
  248. movl %edx,32(%rsi)
  249. addb %bl,%cl
  250. pinsrw $4,(%rdi,%rax,4),%xmm0
  251. movl (%rdi,%rcx,4),%edx
  252. movl %ebx,(%rdi,%rcx,4)
  253. addb %dl,%bl
  254. movl 40(%rsi),%eax
  255. movzbl %bl,%ebx
  256. movl %edx,36(%rsi)
  257. addb %al,%cl
  258. pinsrw $4,(%rdi,%rbx,4),%xmm1
  259. movl (%rdi,%rcx,4),%edx
  260. movl %eax,(%rdi,%rcx,4)
  261. addb %dl,%al
  262. movl 44(%rsi),%ebx
  263. movzbl %al,%eax
  264. movl %edx,40(%rsi)
  265. addb %bl,%cl
  266. pinsrw $5,(%rdi,%rax,4),%xmm0
  267. movl (%rdi,%rcx,4),%edx
  268. movl %ebx,(%rdi,%rcx,4)
  269. addb %dl,%bl
  270. movl 48(%rsi),%eax
  271. movzbl %bl,%ebx
  272. movl %edx,44(%rsi)
  273. addb %al,%cl
  274. pinsrw $5,(%rdi,%rbx,4),%xmm1
  275. movl (%rdi,%rcx,4),%edx
  276. movl %eax,(%rdi,%rcx,4)
  277. addb %dl,%al
  278. movl 52(%rsi),%ebx
  279. movzbl %al,%eax
  280. movl %edx,48(%rsi)
  281. addb %bl,%cl
  282. pinsrw $6,(%rdi,%rax,4),%xmm0
  283. movl (%rdi,%rcx,4),%edx
  284. movl %ebx,(%rdi,%rcx,4)
  285. addb %dl,%bl
  286. movl 56(%rsi),%eax
  287. movzbl %bl,%ebx
  288. movl %edx,52(%rsi)
  289. addb %al,%cl
  290. pinsrw $6,(%rdi,%rbx,4),%xmm1
  291. movl (%rdi,%rcx,4),%edx
  292. movl %eax,(%rdi,%rcx,4)
  293. addb %dl,%al
  294. movl 60(%rsi),%ebx
  295. movzbl %al,%eax
  296. movl %edx,56(%rsi)
  297. addb %bl,%cl
  298. pinsrw $7,(%rdi,%rax,4),%xmm0
  299. addb $16,%r10b
  300. movdqu (%r12),%xmm2
  301. movl (%rdi,%rcx,4),%edx
  302. movl %ebx,(%rdi,%rcx,4)
  303. addb %dl,%bl
  304. movzbl %bl,%ebx
  305. movl %edx,60(%rsi)
  306. leaq (%rdi,%r10,4),%rsi
  307. pinsrw $7,(%rdi,%rbx,4),%xmm1
  308. movl (%rsi),%eax
  309. movq %rcx,%rbx
  310. xorq %rcx,%rcx
  311. subq $16,%r11
  312. movb %bl,%cl
  313. testq $-16,%r11
  314. jnz L$oop16
  315. psllq $8,%xmm1
  316. pxor %xmm0,%xmm2
  317. pxor %xmm1,%xmm2
  318. movdqu %xmm2,(%r12,%r13,1)
  319. leaq 16(%r12),%r12
  320. cmpq $0,%r11
  321. jne L$loop1
  322. jmp L$exit
  323. .p2align 4
  324. L$loop1:
  325. addb %al,%cl
  326. movl (%rdi,%rcx,4),%edx
  327. movl %eax,(%rdi,%rcx,4)
  328. movl %edx,(%rdi,%r10,4)
  329. addb %dl,%al
  330. incb %r10b
  331. movl (%rdi,%rax,4),%edx
  332. movl (%rdi,%r10,4),%eax
  333. xorb (%r12),%dl
  334. movb %dl,(%r12,%r13,1)
  335. leaq 1(%r12),%r12
  336. decq %r11
  337. jnz L$loop1
  338. jmp L$exit
  339. .p2align 4
  340. L$RC4_CHAR:
  341. addb $1,%r10b
  342. movzbl (%rdi,%r10,1),%eax
  343. testq $-8,%r11
  344. jz L$cloop1
  345. jmp L$cloop8
  346. .p2align 4
  347. L$cloop8:
  348. movl (%r12),%r8d
  349. movl 4(%r12),%r9d
  350. addb %al,%cl
  351. leaq 1(%r10),%rsi
  352. movzbl (%rdi,%rcx,1),%edx
  353. movzbl %sil,%esi
  354. movzbl (%rdi,%rsi,1),%ebx
  355. movb %al,(%rdi,%rcx,1)
  356. cmpq %rsi,%rcx
  357. movb %dl,(%rdi,%r10,1)
  358. jne L$cmov0
  359. movq %rax,%rbx
  360. L$cmov0:
  361. addb %al,%dl
  362. xorb (%rdi,%rdx,1),%r8b
  363. rorl $8,%r8d
  364. addb %bl,%cl
  365. leaq 1(%rsi),%r10
  366. movzbl (%rdi,%rcx,1),%edx
  367. movzbl %r10b,%r10d
  368. movzbl (%rdi,%r10,1),%eax
  369. movb %bl,(%rdi,%rcx,1)
  370. cmpq %r10,%rcx
  371. movb %dl,(%rdi,%rsi,1)
  372. jne L$cmov1
  373. movq %rbx,%rax
  374. L$cmov1:
  375. addb %bl,%dl
  376. xorb (%rdi,%rdx,1),%r8b
  377. rorl $8,%r8d
  378. addb %al,%cl
  379. leaq 1(%r10),%rsi
  380. movzbl (%rdi,%rcx,1),%edx
  381. movzbl %sil,%esi
  382. movzbl (%rdi,%rsi,1),%ebx
  383. movb %al,(%rdi,%rcx,1)
  384. cmpq %rsi,%rcx
  385. movb %dl,(%rdi,%r10,1)
  386. jne L$cmov2
  387. movq %rax,%rbx
  388. L$cmov2:
  389. addb %al,%dl
  390. xorb (%rdi,%rdx,1),%r8b
  391. rorl $8,%r8d
  392. addb %bl,%cl
  393. leaq 1(%rsi),%r10
  394. movzbl (%rdi,%rcx,1),%edx
  395. movzbl %r10b,%r10d
  396. movzbl (%rdi,%r10,1),%eax
  397. movb %bl,(%rdi,%rcx,1)
  398. cmpq %r10,%rcx
  399. movb %dl,(%rdi,%rsi,1)
  400. jne L$cmov3
  401. movq %rbx,%rax
  402. L$cmov3:
  403. addb %bl,%dl
  404. xorb (%rdi,%rdx,1),%r8b
  405. rorl $8,%r8d
  406. addb %al,%cl
  407. leaq 1(%r10),%rsi
  408. movzbl (%rdi,%rcx,1),%edx
  409. movzbl %sil,%esi
  410. movzbl (%rdi,%rsi,1),%ebx
  411. movb %al,(%rdi,%rcx,1)
  412. cmpq %rsi,%rcx
  413. movb %dl,(%rdi,%r10,1)
  414. jne L$cmov4
  415. movq %rax,%rbx
  416. L$cmov4:
  417. addb %al,%dl
  418. xorb (%rdi,%rdx,1),%r9b
  419. rorl $8,%r9d
  420. addb %bl,%cl
  421. leaq 1(%rsi),%r10
  422. movzbl (%rdi,%rcx,1),%edx
  423. movzbl %r10b,%r10d
  424. movzbl (%rdi,%r10,1),%eax
  425. movb %bl,(%rdi,%rcx,1)
  426. cmpq %r10,%rcx
  427. movb %dl,(%rdi,%rsi,1)
  428. jne L$cmov5
  429. movq %rbx,%rax
  430. L$cmov5:
  431. addb %bl,%dl
  432. xorb (%rdi,%rdx,1),%r9b
  433. rorl $8,%r9d
  434. addb %al,%cl
  435. leaq 1(%r10),%rsi
  436. movzbl (%rdi,%rcx,1),%edx
  437. movzbl %sil,%esi
  438. movzbl (%rdi,%rsi,1),%ebx
  439. movb %al,(%rdi,%rcx,1)
  440. cmpq %rsi,%rcx
  441. movb %dl,(%rdi,%r10,1)
  442. jne L$cmov6
  443. movq %rax,%rbx
  444. L$cmov6:
  445. addb %al,%dl
  446. xorb (%rdi,%rdx,1),%r9b
  447. rorl $8,%r9d
  448. addb %bl,%cl
  449. leaq 1(%rsi),%r10
  450. movzbl (%rdi,%rcx,1),%edx
  451. movzbl %r10b,%r10d
  452. movzbl (%rdi,%r10,1),%eax
  453. movb %bl,(%rdi,%rcx,1)
  454. cmpq %r10,%rcx
  455. movb %dl,(%rdi,%rsi,1)
  456. jne L$cmov7
  457. movq %rbx,%rax
  458. L$cmov7:
  459. addb %bl,%dl
  460. xorb (%rdi,%rdx,1),%r9b
  461. rorl $8,%r9d
  462. leaq -8(%r11),%r11
  463. movl %r8d,(%r13)
  464. leaq 8(%r12),%r12
  465. movl %r9d,4(%r13)
  466. leaq 8(%r13),%r13
  467. testq $-8,%r11
  468. jnz L$cloop8
  469. cmpq $0,%r11
  470. jne L$cloop1
  471. jmp L$exit
  472. .p2align 4
  473. L$cloop1:
  474. addb %al,%cl
  475. movzbl %cl,%ecx
  476. movzbl (%rdi,%rcx,1),%edx
  477. movb %al,(%rdi,%rcx,1)
  478. movb %dl,(%rdi,%r10,1)
  479. addb %al,%dl
  480. addb $1,%r10b
  481. movzbl %dl,%edx
  482. movzbl %r10b,%r10d
  483. movzbl (%rdi,%rdx,1),%edx
  484. movzbl (%rdi,%r10,1),%eax
  485. xorb (%r12),%dl
  486. leaq 1(%r12),%r12
  487. movb %dl,(%r13)
  488. leaq 1(%r13),%r13
  489. subq $1,%r11
  490. jnz L$cloop1
  491. jmp L$exit
  492. .p2align 4
  493. L$exit:
  494. subb $1,%r10b
  495. movl %r10d,-8(%rdi)
  496. movl %ecx,-4(%rdi)
  497. movq (%rsp),%r13
  498. movq 8(%rsp),%r12
  499. movq 16(%rsp),%rbx
  500. addq $24,%rsp
  501. L$epilogue:
  502. .byte 0xf3,0xc3
  503. .globl _RC4_set_key
  504. .p2align 4
  505. _RC4_set_key:
  506. leaq 8(%rdi),%rdi
  507. leaq (%rdx,%rsi,1),%rdx
  508. negq %rsi
  509. movq %rsi,%rcx
  510. xorl %eax,%eax
  511. xorq %r9,%r9
  512. xorq %r10,%r10
  513. xorq %r11,%r11
  514. movl _OPENSSL_ia32cap_P(%rip),%r8d
  515. btl $20,%r8d
  516. jc L$c1stloop
  517. jmp L$w1stloop
  518. .p2align 4
  519. L$w1stloop:
  520. movl %eax,(%rdi,%rax,4)
  521. addb $1,%al
  522. jnc L$w1stloop
  523. xorq %r9,%r9
  524. xorq %r8,%r8
  525. .p2align 4
  526. L$w2ndloop:
  527. movl (%rdi,%r9,4),%r10d
  528. addb (%rdx,%rsi,1),%r8b
  529. addb %r10b,%r8b
  530. addq $1,%rsi
  531. movl (%rdi,%r8,4),%r11d
  532. cmovzq %rcx,%rsi
  533. movl %r10d,(%rdi,%r8,4)
  534. movl %r11d,(%rdi,%r9,4)
  535. addb $1,%r9b
  536. jnc L$w2ndloop
  537. jmp L$exit_key
  538. .p2align 4
  539. L$c1stloop:
  540. movb %al,(%rdi,%rax,1)
  541. addb $1,%al
  542. jnc L$c1stloop
  543. xorq %r9,%r9
  544. xorq %r8,%r8
  545. .p2align 4
  546. L$c2ndloop:
  547. movb (%rdi,%r9,1),%r10b
  548. addb (%rdx,%rsi,1),%r8b
  549. addb %r10b,%r8b
  550. addq $1,%rsi
  551. movb (%rdi,%r8,1),%r11b
  552. jnz L$cnowrap
  553. movq %rcx,%rsi
  554. L$cnowrap:
  555. movb %r10b,(%rdi,%r8,1)
  556. movb %r11b,(%rdi,%r9,1)
  557. addb $1,%r9b
  558. jnc L$c2ndloop
  559. movl $-1,256(%rdi)
  560. .p2align 4
  561. L$exit_key:
  562. xorl %eax,%eax
  563. movl %eax,-8(%rdi)
  564. movl %eax,-4(%rdi)
  565. .byte 0xf3,0xc3
  566. .globl _RC4_options
  567. .p2align 4
  568. _RC4_options:
  569. leaq L$opts(%rip),%rax
  570. movl _OPENSSL_ia32cap_P(%rip),%edx
  571. btl $20,%edx
  572. jc L$8xchar
  573. btl $30,%edx
  574. jnc L$done
  575. addq $25,%rax
  576. .byte 0xf3,0xc3
  577. L$8xchar:
  578. addq $12,%rax
  579. L$done:
  580. .byte 0xf3,0xc3
  581. .p2align 6
  582. L$opts:
  583. .byte 114,99,52,40,56,120,44,105,110,116,41,0
  584. .byte 114,99,52,40,56,120,44,99,104,97,114,41,0
  585. .byte 114,99,52,40,49,54,120,44,105,110,116,41,0
  586. .byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
  587. .p2align 6