aesv8-armx.S 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. #include "arm_arch.h"
  2. #if __ARM_MAX_ARCH__>=7
  3. .text
  4. .arch armv7-a @ don't confuse not-so-latest binutils with argv8 :-)
  5. .fpu neon
  6. .code 32
  7. #undef __thumb2__
  8. .align 5
  9. .Lrcon:
  10. .long 0x01,0x01,0x01,0x01
  11. .long 0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d,0x0c0f0e0d @ rotate-n-splat
  12. .long 0x1b,0x1b,0x1b,0x1b
  13. .globl aes_v8_set_encrypt_key
  14. .type aes_v8_set_encrypt_key,%function
  15. .align 5
  16. aes_v8_set_encrypt_key:
  17. .Lenc_key:
  18. mov r3,#-1
  19. cmp r0,#0
  20. beq .Lenc_key_abort
  21. cmp r2,#0
  22. beq .Lenc_key_abort
  23. mov r3,#-2
  24. cmp r1,#128
  25. blt .Lenc_key_abort
  26. cmp r1,#256
  27. bgt .Lenc_key_abort
  28. tst r1,#0x3f
  29. bne .Lenc_key_abort
  30. adr r3,.Lrcon
  31. cmp r1,#192
  32. veor q0,q0,q0
  33. vld1.8 {q3},[r0]!
  34. mov r1,#8 @ reuse r1
  35. vld1.32 {q1,q2},[r3]!
  36. blt .Loop128
  37. beq .L192
  38. b .L256
  39. .align 4
  40. .Loop128:
  41. vtbl.8 d20,{q3},d4
  42. vtbl.8 d21,{q3},d5
  43. vext.8 q9,q0,q3,#12
  44. vst1.32 {q3},[r2]!
  45. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  46. subs r1,r1,#1
  47. veor q3,q3,q9
  48. vext.8 q9,q0,q9,#12
  49. veor q3,q3,q9
  50. vext.8 q9,q0,q9,#12
  51. veor q10,q10,q1
  52. veor q3,q3,q9
  53. vshl.u8 q1,q1,#1
  54. veor q3,q3,q10
  55. bne .Loop128
  56. vld1.32 {q1},[r3]
  57. vtbl.8 d20,{q3},d4
  58. vtbl.8 d21,{q3},d5
  59. vext.8 q9,q0,q3,#12
  60. vst1.32 {q3},[r2]!
  61. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  62. veor q3,q3,q9
  63. vext.8 q9,q0,q9,#12
  64. veor q3,q3,q9
  65. vext.8 q9,q0,q9,#12
  66. veor q10,q10,q1
  67. veor q3,q3,q9
  68. vshl.u8 q1,q1,#1
  69. veor q3,q3,q10
  70. vtbl.8 d20,{q3},d4
  71. vtbl.8 d21,{q3},d5
  72. vext.8 q9,q0,q3,#12
  73. vst1.32 {q3},[r2]!
  74. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  75. veor q3,q3,q9
  76. vext.8 q9,q0,q9,#12
  77. veor q3,q3,q9
  78. vext.8 q9,q0,q9,#12
  79. veor q10,q10,q1
  80. veor q3,q3,q9
  81. veor q3,q3,q10
  82. vst1.32 {q3},[r2]
  83. add r2,r2,#0x50
  84. mov r12,#10
  85. b .Ldone
  86. .align 4
  87. .L192:
  88. vld1.8 {d16},[r0]!
  89. vmov.i8 q10,#8 @ borrow q10
  90. vst1.32 {q3},[r2]!
  91. vsub.i8 q2,q2,q10 @ adjust the mask
  92. .Loop192:
  93. vtbl.8 d20,{q8},d4
  94. vtbl.8 d21,{q8},d5
  95. vext.8 q9,q0,q3,#12
  96. #ifdef __ARMEB__
  97. vst1.32 {q8},[r2]!
  98. sub r2,r2,#8
  99. #else
  100. vst1.32 {d16},[r2]!
  101. #endif
  102. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  103. subs r1,r1,#1
  104. veor q3,q3,q9
  105. vext.8 q9,q0,q9,#12
  106. veor q3,q3,q9
  107. vext.8 q9,q0,q9,#12
  108. veor q3,q3,q9
  109. vdup.32 q9,d7[1]
  110. veor q9,q9,q8
  111. veor q10,q10,q1
  112. vext.8 q8,q0,q8,#12
  113. vshl.u8 q1,q1,#1
  114. veor q8,q8,q9
  115. veor q3,q3,q10
  116. veor q8,q8,q10
  117. vst1.32 {q3},[r2]!
  118. bne .Loop192
  119. mov r12,#12
  120. add r2,r2,#0x20
  121. b .Ldone
  122. .align 4
  123. .L256:
  124. vld1.8 {q8},[r0]
  125. mov r1,#7
  126. mov r12,#14
  127. vst1.32 {q3},[r2]!
  128. .Loop256:
  129. vtbl.8 d20,{q8},d4
  130. vtbl.8 d21,{q8},d5
  131. vext.8 q9,q0,q3,#12
  132. vst1.32 {q8},[r2]!
  133. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  134. subs r1,r1,#1
  135. veor q3,q3,q9
  136. vext.8 q9,q0,q9,#12
  137. veor q3,q3,q9
  138. vext.8 q9,q0,q9,#12
  139. veor q10,q10,q1
  140. veor q3,q3,q9
  141. vshl.u8 q1,q1,#1
  142. veor q3,q3,q10
  143. vst1.32 {q3},[r2]!
  144. beq .Ldone
  145. vdup.32 q10,d7[1]
  146. vext.8 q9,q0,q8,#12
  147. .byte 0x00,0x43,0xf0,0xf3 @ aese q10,q0
  148. veor q8,q8,q9
  149. vext.8 q9,q0,q9,#12
  150. veor q8,q8,q9
  151. vext.8 q9,q0,q9,#12
  152. veor q8,q8,q9
  153. veor q8,q8,q10
  154. b .Loop256
  155. .Ldone:
  156. str r12,[r2]
  157. mov r3,#0
  158. .Lenc_key_abort:
  159. mov r0,r3 @ return value
  160. bx lr
  161. .size aes_v8_set_encrypt_key,.-aes_v8_set_encrypt_key
  162. .globl aes_v8_set_decrypt_key
  163. .type aes_v8_set_decrypt_key,%function
  164. .align 5
  165. aes_v8_set_decrypt_key:
  166. stmdb sp!,{r4,lr}
  167. bl .Lenc_key
  168. cmp r0,#0
  169. bne .Ldec_key_abort
  170. sub r2,r2,#240 @ restore original r2
  171. mov r4,#-16
  172. add r0,r2,r12,lsl#4 @ end of key schedule
  173. vld1.32 {q0},[r2]
  174. vld1.32 {q1},[r0]
  175. vst1.32 {q0},[r0],r4
  176. vst1.32 {q1},[r2]!
  177. .Loop_imc:
  178. vld1.32 {q0},[r2]
  179. vld1.32 {q1},[r0]
  180. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  181. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  182. vst1.32 {q0},[r0],r4
  183. vst1.32 {q1},[r2]!
  184. cmp r0,r2
  185. bhi .Loop_imc
  186. vld1.32 {q0},[r2]
  187. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  188. vst1.32 {q0},[r0]
  189. eor r0,r0,r0 @ return value
  190. .Ldec_key_abort:
  191. ldmia sp!,{r4,pc}
  192. .size aes_v8_set_decrypt_key,.-aes_v8_set_decrypt_key
  193. .globl aes_v8_encrypt
  194. .type aes_v8_encrypt,%function
  195. .align 5
  196. aes_v8_encrypt:
  197. ldr r3,[r2,#240]
  198. vld1.32 {q0},[r2]!
  199. vld1.8 {q2},[r0]
  200. sub r3,r3,#2
  201. vld1.32 {q1},[r2]!
  202. .Loop_enc:
  203. .byte 0x00,0x43,0xb0,0xf3 @ aese q2,q0
  204. .byte 0x84,0x43,0xb0,0xf3 @ aesmc q2,q2
  205. vld1.32 {q0},[r2]!
  206. subs r3,r3,#2
  207. .byte 0x02,0x43,0xb0,0xf3 @ aese q2,q1
  208. .byte 0x84,0x43,0xb0,0xf3 @ aesmc q2,q2
  209. vld1.32 {q1},[r2]!
  210. bgt .Loop_enc
  211. .byte 0x00,0x43,0xb0,0xf3 @ aese q2,q0
  212. .byte 0x84,0x43,0xb0,0xf3 @ aesmc q2,q2
  213. vld1.32 {q0},[r2]
  214. .byte 0x02,0x43,0xb0,0xf3 @ aese q2,q1
  215. veor q2,q2,q0
  216. vst1.8 {q2},[r1]
  217. bx lr
  218. .size aes_v8_encrypt,.-aes_v8_encrypt
  219. .globl aes_v8_decrypt
  220. .type aes_v8_decrypt,%function
  221. .align 5
  222. aes_v8_decrypt:
  223. ldr r3,[r2,#240]
  224. vld1.32 {q0},[r2]!
  225. vld1.8 {q2},[r0]
  226. sub r3,r3,#2
  227. vld1.32 {q1},[r2]!
  228. .Loop_dec:
  229. .byte 0x40,0x43,0xb0,0xf3 @ aesd q2,q0
  230. .byte 0xc4,0x43,0xb0,0xf3 @ aesimc q2,q2
  231. vld1.32 {q0},[r2]!
  232. subs r3,r3,#2
  233. .byte 0x42,0x43,0xb0,0xf3 @ aesd q2,q1
  234. .byte 0xc4,0x43,0xb0,0xf3 @ aesimc q2,q2
  235. vld1.32 {q1},[r2]!
  236. bgt .Loop_dec
  237. .byte 0x40,0x43,0xb0,0xf3 @ aesd q2,q0
  238. .byte 0xc4,0x43,0xb0,0xf3 @ aesimc q2,q2
  239. vld1.32 {q0},[r2]
  240. .byte 0x42,0x43,0xb0,0xf3 @ aesd q2,q1
  241. veor q2,q2,q0
  242. vst1.8 {q2},[r1]
  243. bx lr
  244. .size aes_v8_decrypt,.-aes_v8_decrypt
  245. .globl aes_v8_cbc_encrypt
  246. .type aes_v8_cbc_encrypt,%function
  247. .align 5
  248. aes_v8_cbc_encrypt:
  249. mov ip,sp
  250. stmdb sp!,{r4,r5,r6,r7,r8,lr}
  251. vstmdb sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ ABI specification says so
  252. ldmia ip,{r4,r5} @ load remaining args
  253. subs r2,r2,#16
  254. mov r8,#16
  255. blo .Lcbc_abort
  256. moveq r8,#0
  257. cmp r5,#0 @ en- or decrypting?
  258. ldr r5,[r3,#240]
  259. and r2,r2,#-16
  260. vld1.8 {q6},[r4]
  261. vld1.8 {q0},[r0],r8
  262. vld1.32 {q8,q9},[r3] @ load key schedule...
  263. sub r5,r5,#6
  264. add r7,r3,r5,lsl#4 @ pointer to last 7 round keys
  265. sub r5,r5,#2
  266. vld1.32 {q10,q11},[r7]!
  267. vld1.32 {q12,q13},[r7]!
  268. vld1.32 {q14,q15},[r7]!
  269. vld1.32 {q7},[r7]
  270. add r7,r3,#32
  271. mov r6,r5
  272. beq .Lcbc_dec
  273. cmp r5,#2
  274. veor q0,q0,q6
  275. veor q5,q8,q7
  276. beq .Lcbc_enc128
  277. vld1.32 {q2,q3},[r7]
  278. add r7,r3,#16
  279. add r6,r3,#16*4
  280. add r12,r3,#16*5
  281. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  282. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  283. add r14,r3,#16*6
  284. add r3,r3,#16*7
  285. b .Lenter_cbc_enc
  286. .align 4
  287. .Loop_cbc_enc:
  288. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  289. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  290. vst1.8 {q6},[r1]!
  291. .Lenter_cbc_enc:
  292. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  293. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  294. .byte 0x04,0x03,0xb0,0xf3 @ aese q0,q2
  295. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  296. vld1.32 {q8},[r6]
  297. cmp r5,#4
  298. .byte 0x06,0x03,0xb0,0xf3 @ aese q0,q3
  299. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  300. vld1.32 {q9},[r12]
  301. beq .Lcbc_enc192
  302. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  303. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  304. vld1.32 {q8},[r14]
  305. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  306. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  307. vld1.32 {q9},[r3]
  308. nop
  309. .Lcbc_enc192:
  310. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  311. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  312. subs r2,r2,#16
  313. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  314. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  315. moveq r8,#0
  316. .byte 0x24,0x03,0xb0,0xf3 @ aese q0,q10
  317. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  318. .byte 0x26,0x03,0xb0,0xf3 @ aese q0,q11
  319. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  320. vld1.8 {q8},[r0],r8
  321. .byte 0x28,0x03,0xb0,0xf3 @ aese q0,q12
  322. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  323. veor q8,q8,q5
  324. .byte 0x2a,0x03,0xb0,0xf3 @ aese q0,q13
  325. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  326. vld1.32 {q9},[r7] @ re-pre-load rndkey[1]
  327. .byte 0x2c,0x03,0xb0,0xf3 @ aese q0,q14
  328. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  329. .byte 0x2e,0x03,0xb0,0xf3 @ aese q0,q15
  330. veor q6,q0,q7
  331. bhs .Loop_cbc_enc
  332. vst1.8 {q6},[r1]!
  333. b .Lcbc_done
  334. .align 5
  335. .Lcbc_enc128:
  336. vld1.32 {q2,q3},[r7]
  337. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  338. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  339. b .Lenter_cbc_enc128
  340. .Loop_cbc_enc128:
  341. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  342. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  343. vst1.8 {q6},[r1]!
  344. .Lenter_cbc_enc128:
  345. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  346. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  347. subs r2,r2,#16
  348. .byte 0x04,0x03,0xb0,0xf3 @ aese q0,q2
  349. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  350. moveq r8,#0
  351. .byte 0x06,0x03,0xb0,0xf3 @ aese q0,q3
  352. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  353. .byte 0x24,0x03,0xb0,0xf3 @ aese q0,q10
  354. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  355. .byte 0x26,0x03,0xb0,0xf3 @ aese q0,q11
  356. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  357. vld1.8 {q8},[r0],r8
  358. .byte 0x28,0x03,0xb0,0xf3 @ aese q0,q12
  359. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  360. .byte 0x2a,0x03,0xb0,0xf3 @ aese q0,q13
  361. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  362. .byte 0x2c,0x03,0xb0,0xf3 @ aese q0,q14
  363. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  364. veor q8,q8,q5
  365. .byte 0x2e,0x03,0xb0,0xf3 @ aese q0,q15
  366. veor q6,q0,q7
  367. bhs .Loop_cbc_enc128
  368. vst1.8 {q6},[r1]!
  369. b .Lcbc_done
  370. .align 5
  371. .Lcbc_dec:
  372. vld1.8 {q10},[r0]!
  373. subs r2,r2,#32 @ bias
  374. add r6,r5,#2
  375. vorr q3,q0,q0
  376. vorr q1,q0,q0
  377. vorr q11,q10,q10
  378. blo .Lcbc_dec_tail
  379. vorr q1,q10,q10
  380. vld1.8 {q10},[r0]!
  381. vorr q2,q0,q0
  382. vorr q3,q1,q1
  383. vorr q11,q10,q10
  384. .Loop3x_cbc_dec:
  385. .byte 0x60,0x03,0xb0,0xf3 @ aesd q0,q8
  386. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  387. .byte 0x60,0x23,0xb0,0xf3 @ aesd q1,q8
  388. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  389. .byte 0x60,0x43,0xf0,0xf3 @ aesd q10,q8
  390. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  391. vld1.32 {q8},[r7]!
  392. subs r6,r6,#2
  393. .byte 0x62,0x03,0xb0,0xf3 @ aesd q0,q9
  394. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  395. .byte 0x62,0x23,0xb0,0xf3 @ aesd q1,q9
  396. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  397. .byte 0x62,0x43,0xf0,0xf3 @ aesd q10,q9
  398. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  399. vld1.32 {q9},[r7]!
  400. bgt .Loop3x_cbc_dec
  401. .byte 0x60,0x03,0xb0,0xf3 @ aesd q0,q8
  402. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  403. .byte 0x60,0x23,0xb0,0xf3 @ aesd q1,q8
  404. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  405. .byte 0x60,0x43,0xf0,0xf3 @ aesd q10,q8
  406. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  407. veor q4,q6,q7
  408. subs r2,r2,#0x30
  409. veor q5,q2,q7
  410. movlo r6,r2 @ r6, r6, is zero at this point
  411. .byte 0x62,0x03,0xb0,0xf3 @ aesd q0,q9
  412. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  413. .byte 0x62,0x23,0xb0,0xf3 @ aesd q1,q9
  414. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  415. .byte 0x62,0x43,0xf0,0xf3 @ aesd q10,q9
  416. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  417. veor q9,q3,q7
  418. add r0,r0,r6 @ r0 is adjusted in such way that
  419. @ at exit from the loop q1-q10
  420. @ are loaded with last "words"
  421. vorr q6,q11,q11
  422. mov r7,r3
  423. .byte 0x68,0x03,0xb0,0xf3 @ aesd q0,q12
  424. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  425. .byte 0x68,0x23,0xb0,0xf3 @ aesd q1,q12
  426. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  427. .byte 0x68,0x43,0xf0,0xf3 @ aesd q10,q12
  428. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  429. vld1.8 {q2},[r0]!
  430. .byte 0x6a,0x03,0xb0,0xf3 @ aesd q0,q13
  431. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  432. .byte 0x6a,0x23,0xb0,0xf3 @ aesd q1,q13
  433. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  434. .byte 0x6a,0x43,0xf0,0xf3 @ aesd q10,q13
  435. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  436. vld1.8 {q3},[r0]!
  437. .byte 0x6c,0x03,0xb0,0xf3 @ aesd q0,q14
  438. .byte 0xc0,0x03,0xb0,0xf3 @ aesimc q0,q0
  439. .byte 0x6c,0x23,0xb0,0xf3 @ aesd q1,q14
  440. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  441. .byte 0x6c,0x43,0xf0,0xf3 @ aesd q10,q14
  442. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  443. vld1.8 {q11},[r0]!
  444. .byte 0x6e,0x03,0xb0,0xf3 @ aesd q0,q15
  445. .byte 0x6e,0x23,0xb0,0xf3 @ aesd q1,q15
  446. .byte 0x6e,0x43,0xf0,0xf3 @ aesd q10,q15
  447. vld1.32 {q8},[r7]! @ re-pre-load rndkey[0]
  448. add r6,r5,#2
  449. veor q4,q4,q0
  450. veor q5,q5,q1
  451. veor q10,q10,q9
  452. vld1.32 {q9},[r7]! @ re-pre-load rndkey[1]
  453. vst1.8 {q4},[r1]!
  454. vorr q0,q2,q2
  455. vst1.8 {q5},[r1]!
  456. vorr q1,q3,q3
  457. vst1.8 {q10},[r1]!
  458. vorr q10,q11,q11
  459. bhs .Loop3x_cbc_dec
  460. cmn r2,#0x30
  461. beq .Lcbc_done
  462. nop
  463. .Lcbc_dec_tail:
  464. .byte 0x60,0x23,0xb0,0xf3 @ aesd q1,q8
  465. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  466. .byte 0x60,0x43,0xf0,0xf3 @ aesd q10,q8
  467. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  468. vld1.32 {q8},[r7]!
  469. subs r6,r6,#2
  470. .byte 0x62,0x23,0xb0,0xf3 @ aesd q1,q9
  471. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  472. .byte 0x62,0x43,0xf0,0xf3 @ aesd q10,q9
  473. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  474. vld1.32 {q9},[r7]!
  475. bgt .Lcbc_dec_tail
  476. .byte 0x60,0x23,0xb0,0xf3 @ aesd q1,q8
  477. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  478. .byte 0x60,0x43,0xf0,0xf3 @ aesd q10,q8
  479. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  480. .byte 0x62,0x23,0xb0,0xf3 @ aesd q1,q9
  481. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  482. .byte 0x62,0x43,0xf0,0xf3 @ aesd q10,q9
  483. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  484. .byte 0x68,0x23,0xb0,0xf3 @ aesd q1,q12
  485. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  486. .byte 0x68,0x43,0xf0,0xf3 @ aesd q10,q12
  487. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  488. cmn r2,#0x20
  489. .byte 0x6a,0x23,0xb0,0xf3 @ aesd q1,q13
  490. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  491. .byte 0x6a,0x43,0xf0,0xf3 @ aesd q10,q13
  492. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  493. veor q5,q6,q7
  494. .byte 0x6c,0x23,0xb0,0xf3 @ aesd q1,q14
  495. .byte 0xc2,0x23,0xb0,0xf3 @ aesimc q1,q1
  496. .byte 0x6c,0x43,0xf0,0xf3 @ aesd q10,q14
  497. .byte 0xe4,0x43,0xf0,0xf3 @ aesimc q10,q10
  498. veor q9,q3,q7
  499. .byte 0x6e,0x23,0xb0,0xf3 @ aesd q1,q15
  500. .byte 0x6e,0x43,0xf0,0xf3 @ aesd q10,q15
  501. beq .Lcbc_dec_one
  502. veor q5,q5,q1
  503. veor q9,q9,q10
  504. vorr q6,q11,q11
  505. vst1.8 {q5},[r1]!
  506. vst1.8 {q9},[r1]!
  507. b .Lcbc_done
  508. .Lcbc_dec_one:
  509. veor q5,q5,q10
  510. vorr q6,q11,q11
  511. vst1.8 {q5},[r1]!
  512. .Lcbc_done:
  513. vst1.8 {q6},[r4]
  514. .Lcbc_abort:
  515. vldmia sp!,{d8,d9,d10,d11,d12,d13,d14,d15}
  516. ldmia sp!,{r4,r5,r6,r7,r8,pc}
  517. .size aes_v8_cbc_encrypt,.-aes_v8_cbc_encrypt
  518. .globl aes_v8_ctr32_encrypt_blocks
  519. .type aes_v8_ctr32_encrypt_blocks,%function
  520. .align 5
  521. aes_v8_ctr32_encrypt_blocks:
  522. mov ip,sp
  523. stmdb sp!,{r4,r5,r6,r7,r8,r9,r10,lr}
  524. vstmdb sp!,{d8,d9,d10,d11,d12,d13,d14,d15} @ ABI specification says so
  525. ldr r4, [ip] @ load remaining arg
  526. ldr r5,[r3,#240]
  527. ldr r8, [r4, #12]
  528. #ifdef __ARMEB__
  529. vld1.8 {q0},[r4]
  530. #else
  531. vld1.32 {q0},[r4]
  532. #endif
  533. vld1.32 {q8,q9},[r3] @ load key schedule...
  534. sub r5,r5,#4
  535. mov r12,#16
  536. cmp r2,#2
  537. add r7,r3,r5,lsl#4 @ pointer to last 5 round keys
  538. sub r5,r5,#2
  539. vld1.32 {q12,q13},[r7]!
  540. vld1.32 {q14,q15},[r7]!
  541. vld1.32 {q7},[r7]
  542. add r7,r3,#32
  543. mov r6,r5
  544. movlo r12,#0
  545. #ifndef __ARMEB__
  546. rev r8, r8
  547. #endif
  548. add r10, r8, #1
  549. vorr q6,q0,q0
  550. rev r10, r10
  551. vmov.32 d13[1],r10
  552. add r8, r8, #2
  553. vorr q1,q6,q6
  554. bls .Lctr32_tail
  555. rev r12, r8
  556. vmov.32 d13[1],r12
  557. sub r2,r2,#3 @ bias
  558. vorr q10,q6,q6
  559. b .Loop3x_ctr32
  560. .align 4
  561. .Loop3x_ctr32:
  562. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  563. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  564. .byte 0x20,0x23,0xb0,0xf3 @ aese q1,q8
  565. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  566. .byte 0x20,0x43,0xf0,0xf3 @ aese q10,q8
  567. .byte 0xa4,0x43,0xf0,0xf3 @ aesmc q10,q10
  568. vld1.32 {q8},[r7]!
  569. subs r6,r6,#2
  570. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  571. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  572. .byte 0x22,0x23,0xb0,0xf3 @ aese q1,q9
  573. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  574. .byte 0x22,0x43,0xf0,0xf3 @ aese q10,q9
  575. .byte 0xa4,0x43,0xf0,0xf3 @ aesmc q10,q10
  576. vld1.32 {q9},[r7]!
  577. bgt .Loop3x_ctr32
  578. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  579. .byte 0x80,0x83,0xb0,0xf3 @ aesmc q4,q0
  580. .byte 0x20,0x23,0xb0,0xf3 @ aese q1,q8
  581. .byte 0x82,0xa3,0xb0,0xf3 @ aesmc q5,q1
  582. vld1.8 {q2},[r0]!
  583. add r9,r8,#1
  584. .byte 0x20,0x43,0xf0,0xf3 @ aese q10,q8
  585. .byte 0xa4,0x43,0xf0,0xf3 @ aesmc q10,q10
  586. vld1.8 {q3},[r0]!
  587. rev r9,r9
  588. .byte 0x22,0x83,0xb0,0xf3 @ aese q4,q9
  589. .byte 0x88,0x83,0xb0,0xf3 @ aesmc q4,q4
  590. .byte 0x22,0xa3,0xb0,0xf3 @ aese q5,q9
  591. .byte 0x8a,0xa3,0xb0,0xf3 @ aesmc q5,q5
  592. vld1.8 {q11},[r0]!
  593. mov r7,r3
  594. .byte 0x22,0x43,0xf0,0xf3 @ aese q10,q9
  595. .byte 0xa4,0x23,0xf0,0xf3 @ aesmc q9,q10
  596. .byte 0x28,0x83,0xb0,0xf3 @ aese q4,q12
  597. .byte 0x88,0x83,0xb0,0xf3 @ aesmc q4,q4
  598. .byte 0x28,0xa3,0xb0,0xf3 @ aese q5,q12
  599. .byte 0x8a,0xa3,0xb0,0xf3 @ aesmc q5,q5
  600. veor q2,q2,q7
  601. add r10,r8,#2
  602. .byte 0x28,0x23,0xf0,0xf3 @ aese q9,q12
  603. .byte 0xa2,0x23,0xf0,0xf3 @ aesmc q9,q9
  604. veor q3,q3,q7
  605. add r8,r8,#3
  606. .byte 0x2a,0x83,0xb0,0xf3 @ aese q4,q13
  607. .byte 0x88,0x83,0xb0,0xf3 @ aesmc q4,q4
  608. .byte 0x2a,0xa3,0xb0,0xf3 @ aese q5,q13
  609. .byte 0x8a,0xa3,0xb0,0xf3 @ aesmc q5,q5
  610. veor q11,q11,q7
  611. vmov.32 d13[1], r9
  612. .byte 0x2a,0x23,0xf0,0xf3 @ aese q9,q13
  613. .byte 0xa2,0x23,0xf0,0xf3 @ aesmc q9,q9
  614. vorr q0,q6,q6
  615. rev r10,r10
  616. .byte 0x2c,0x83,0xb0,0xf3 @ aese q4,q14
  617. .byte 0x88,0x83,0xb0,0xf3 @ aesmc q4,q4
  618. vmov.32 d13[1], r10
  619. rev r12,r8
  620. .byte 0x2c,0xa3,0xb0,0xf3 @ aese q5,q14
  621. .byte 0x8a,0xa3,0xb0,0xf3 @ aesmc q5,q5
  622. vorr q1,q6,q6
  623. vmov.32 d13[1], r12
  624. .byte 0x2c,0x23,0xf0,0xf3 @ aese q9,q14
  625. .byte 0xa2,0x23,0xf0,0xf3 @ aesmc q9,q9
  626. vorr q10,q6,q6
  627. subs r2,r2,#3
  628. .byte 0x2e,0x83,0xb0,0xf3 @ aese q4,q15
  629. .byte 0x2e,0xa3,0xb0,0xf3 @ aese q5,q15
  630. .byte 0x2e,0x23,0xf0,0xf3 @ aese q9,q15
  631. veor q2,q2,q4
  632. vld1.32 {q8},[r7]! @ re-pre-load rndkey[0]
  633. vst1.8 {q2},[r1]!
  634. veor q3,q3,q5
  635. mov r6,r5
  636. vst1.8 {q3},[r1]!
  637. veor q11,q11,q9
  638. vld1.32 {q9},[r7]! @ re-pre-load rndkey[1]
  639. vst1.8 {q11},[r1]!
  640. bhs .Loop3x_ctr32
  641. adds r2,r2,#3
  642. beq .Lctr32_done
  643. cmp r2,#1
  644. mov r12,#16
  645. moveq r12,#0
  646. .Lctr32_tail:
  647. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  648. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  649. .byte 0x20,0x23,0xb0,0xf3 @ aese q1,q8
  650. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  651. vld1.32 {q8},[r7]!
  652. subs r6,r6,#2
  653. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  654. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  655. .byte 0x22,0x23,0xb0,0xf3 @ aese q1,q9
  656. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  657. vld1.32 {q9},[r7]!
  658. bgt .Lctr32_tail
  659. .byte 0x20,0x03,0xb0,0xf3 @ aese q0,q8
  660. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  661. .byte 0x20,0x23,0xb0,0xf3 @ aese q1,q8
  662. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  663. .byte 0x22,0x03,0xb0,0xf3 @ aese q0,q9
  664. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  665. .byte 0x22,0x23,0xb0,0xf3 @ aese q1,q9
  666. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  667. vld1.8 {q2},[r0],r12
  668. .byte 0x28,0x03,0xb0,0xf3 @ aese q0,q12
  669. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  670. .byte 0x28,0x23,0xb0,0xf3 @ aese q1,q12
  671. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  672. vld1.8 {q3},[r0]
  673. .byte 0x2a,0x03,0xb0,0xf3 @ aese q0,q13
  674. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  675. .byte 0x2a,0x23,0xb0,0xf3 @ aese q1,q13
  676. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  677. veor q2,q2,q7
  678. .byte 0x2c,0x03,0xb0,0xf3 @ aese q0,q14
  679. .byte 0x80,0x03,0xb0,0xf3 @ aesmc q0,q0
  680. .byte 0x2c,0x23,0xb0,0xf3 @ aese q1,q14
  681. .byte 0x82,0x23,0xb0,0xf3 @ aesmc q1,q1
  682. veor q3,q3,q7
  683. .byte 0x2e,0x03,0xb0,0xf3 @ aese q0,q15
  684. .byte 0x2e,0x23,0xb0,0xf3 @ aese q1,q15
  685. cmp r2,#1
  686. veor q2,q2,q0
  687. veor q3,q3,q1
  688. vst1.8 {q2},[r1]!
  689. beq .Lctr32_done
  690. vst1.8 {q3},[r1]
  691. .Lctr32_done:
  692. vldmia sp!,{d8,d9,d10,d11,d12,d13,d14,d15}
  693. ldmia sp!,{r4,r5,r6,r7,r8,r9,r10,pc}
  694. .size aes_v8_ctr32_encrypt_blocks,.-aes_v8_ctr32_encrypt_blocks
  695. #endif