aes_core.c 80 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997
  1. /*
  2. * Copyright 2002-2020 The OpenSSL Project Authors. All Rights Reserved.
  3. *
  4. * Licensed under the OpenSSL license (the "License"). You may not use
  5. * this file except in compliance with the License. You can obtain a copy
  6. * in the file LICENSE in the source distribution or at
  7. * https://www.openssl.org/source/license.html
  8. */
  9. /**
  10. * rijndael-alg-fst.c
  11. *
  12. * @version 3.0 (December 2000)
  13. *
  14. * Optimised ANSI C code for the Rijndael cipher (now AES)
  15. *
  16. * @author Vincent Rijmen
  17. * @author Antoon Bosselaers
  18. * @author Paulo Barreto
  19. *
  20. * This code is hereby placed in the public domain.
  21. *
  22. * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS
  23. * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  24. * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
  25. * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE
  26. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  27. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  28. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  29. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  30. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  31. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
  32. * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  33. */
  34. /* Note: rewritten a little bit to provide error control and an OpenSSL-
  35. compatible API */
  36. #include <assert.h>
  37. #include <stdlib.h>
  38. #include <openssl/crypto.h>
  39. #include <openssl/aes.h>
  40. #include "aes_local.h"
  41. #if defined(OPENSSL_AES_CONST_TIME) && !defined(AES_ASM)
  42. typedef union {
  43. unsigned char b[8];
  44. u32 w[2];
  45. u64 d;
  46. } uni;
  47. /*
  48. * Compute w := (w * x) mod (x^8 + x^4 + x^3 + x^1 + 1)
  49. * Therefore the name "xtime".
  50. */
  51. static void XtimeWord(u32 *w)
  52. {
  53. u32 a, b;
  54. a = *w;
  55. b = a & 0x80808080u;
  56. a ^= b;
  57. b -= b >> 7;
  58. b &= 0x1B1B1B1Bu;
  59. b ^= a << 1;
  60. *w = b;
  61. }
  62. static void XtimeLong(u64 *w)
  63. {
  64. u64 a, b;
  65. a = *w;
  66. b = a & 0x8080808080808080uLL;
  67. a ^= b;
  68. b -= b >> 7;
  69. b &= 0x1B1B1B1B1B1B1B1BuLL;
  70. b ^= a << 1;
  71. *w = b;
  72. }
  73. /*
  74. * This computes w := S * w ^ -1 + c, where c = {01100011}.
  75. * Instead of using GF(2^8) mod (x^8+x^4+x^3+x+1} we do the inversion
  76. * in GF(GF(GF(2^2)^2)^2) mod (X^2+X+8)
  77. * and GF(GF(2^2)^2) mod (X^2+X+2)
  78. * and GF(2^2) mod (X^2+X+1)
  79. * The first part of the algorithm below transfers the coordinates
  80. * {0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80} =>
  81. * {1,Y,Y^2,Y^3,Y^4,Y^5,Y^6,Y^7} with Y=0x41:
  82. * {0x01,0x41,0x66,0x6c,0x56,0x9a,0x58,0xc4}
  83. * The last part undoes the coordinate transfer and the final affine
  84. * transformation S:
  85. * b[i] = b[i] + b[(i+4)%8] + b[(i+5)%8] + b[(i+6)%8] + b[(i+7)%8] + c[i]
  86. * in one step.
  87. * The multiplication in GF(2^2^2^2) is done in ordinary coords:
  88. * A = (a0*1 + a1*x^4)
  89. * B = (b0*1 + b1*x^4)
  90. * AB = ((a0*b0 + 8*a1*b1)*1 + (a1*b0 + (a0+a1)*b1)*x^4)
  91. * When A = (a0,a1) is given we want to solve AB = 1:
  92. * (a) 1 = a0*b0 + 8*a1*b1
  93. * (b) 0 = a1*b0 + (a0+a1)*b1
  94. * => multiply (a) by a1 and (b) by a0
  95. * (c) a1 = a1*a0*b0 + (8*a1*a1)*b1
  96. * (d) 0 = a1*a0*b0 + (a0*a0+a1*a0)*b1
  97. * => add (c) + (d)
  98. * (e) a1 = (a0*a0 + a1*a0 + 8*a1*a1)*b1
  99. * => therefore
  100. * b1 = (a0*a0 + a1*a0 + 8*a1*a1)^-1 * a1
  101. * => and adding (a1*b0) to (b) we get
  102. * (f) a1*b0 = (a0+a1)*b1
  103. * => therefore
  104. * b0 = (a0*a0 + a1*a0 + 8*a1*a1)^-1 * (a0+a1)
  105. * Note this formula also works for the case
  106. * (a0+a1)*a0 + 8*a1*a1 = 0
  107. * if the inverse element for 0^-1 is mapped to 0.
  108. * Repeat the same for GF(2^2^2) and GF(2^2).
  109. * We get the following algorithm:
  110. * inv8(a0,a1):
  111. * x0 = a0^a1
  112. * [y0,y1] = mul4([x0,a1],[a0,a1]); (*)
  113. * y1 = mul4(8,y1);
  114. * t = inv4(y0^y1);
  115. * [b0,b1] = mul4([x0,a1],[t,t]); (*)
  116. * return [b0,b1];
  117. * The non-linear multiplies (*) can be done in parallel at no extra cost.
  118. */
  119. static void SubWord(u32 *w)
  120. {
  121. u32 x, y, a1, a2, a3, a4, a5, a6;
  122. x = *w;
  123. y = ((x & 0xFEFEFEFEu) >> 1) | ((x & 0x01010101u) << 7);
  124. x &= 0xDDDDDDDDu;
  125. x ^= y & 0x57575757u;
  126. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  127. x ^= y & 0x1C1C1C1Cu;
  128. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  129. x ^= y & 0x4A4A4A4Au;
  130. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  131. x ^= y & 0x42424242u;
  132. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  133. x ^= y & 0x64646464u;
  134. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  135. x ^= y & 0xE0E0E0E0u;
  136. a1 = x;
  137. a1 ^= (x & 0xF0F0F0F0u) >> 4;
  138. a2 = ((x & 0xCCCCCCCCu) >> 2) | ((x & 0x33333333u) << 2);
  139. a3 = x & a1;
  140. a3 ^= (a3 & 0xAAAAAAAAu) >> 1;
  141. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & 0xAAAAAAAAu;
  142. a4 = a2 & a1;
  143. a4 ^= (a4 & 0xAAAAAAAAu) >> 1;
  144. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & 0xAAAAAAAAu;
  145. a5 = (a3 & 0xCCCCCCCCu) >> 2;
  146. a3 ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCu;
  147. a4 = a5 & 0x22222222u;
  148. a4 |= a4 >> 1;
  149. a4 ^= (a5 << 1) & 0x22222222u;
  150. a3 ^= a4;
  151. a5 = a3 & 0xA0A0A0A0u;
  152. a5 |= a5 >> 1;
  153. a5 ^= (a3 << 1) & 0xA0A0A0A0u;
  154. a4 = a5 & 0xC0C0C0C0u;
  155. a6 = a4 >> 2;
  156. a4 ^= (a5 << 2) & 0xC0C0C0C0u;
  157. a5 = a6 & 0x20202020u;
  158. a5 |= a5 >> 1;
  159. a5 ^= (a6 << 1) & 0x20202020u;
  160. a4 |= a5;
  161. a3 ^= a4 >> 4;
  162. a3 &= 0x0F0F0F0Fu;
  163. a2 = a3;
  164. a2 ^= (a3 & 0x0C0C0C0Cu) >> 2;
  165. a4 = a3 & a2;
  166. a4 ^= (a4 & 0x0A0A0A0A0Au) >> 1;
  167. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & 0x0A0A0A0Au;
  168. a5 = a4 & 0x08080808u;
  169. a5 |= a5 >> 1;
  170. a5 ^= (a4 << 1) & 0x08080808u;
  171. a4 ^= a5 >> 2;
  172. a4 &= 0x03030303u;
  173. a4 ^= (a4 & 0x02020202u) >> 1;
  174. a4 |= a4 << 2;
  175. a3 = a2 & a4;
  176. a3 ^= (a3 & 0x0A0A0A0Au) >> 1;
  177. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & 0x0A0A0A0Au;
  178. a3 |= a3 << 4;
  179. a2 = ((a1 & 0xCCCCCCCCu) >> 2) | ((a1 & 0x33333333u) << 2);
  180. x = a1 & a3;
  181. x ^= (x & 0xAAAAAAAAu) >> 1;
  182. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & 0xAAAAAAAAu;
  183. a4 = a2 & a3;
  184. a4 ^= (a4 & 0xAAAAAAAAu) >> 1;
  185. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & 0xAAAAAAAAu;
  186. a5 = (x & 0xCCCCCCCCu) >> 2;
  187. x ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCu;
  188. a4 = a5 & 0x22222222u;
  189. a4 |= a4 >> 1;
  190. a4 ^= (a5 << 1) & 0x22222222u;
  191. x ^= a4;
  192. y = ((x & 0xFEFEFEFEu) >> 1) | ((x & 0x01010101u) << 7);
  193. x &= 0x39393939u;
  194. x ^= y & 0x3F3F3F3Fu;
  195. y = ((y & 0xFCFCFCFCu) >> 2) | ((y & 0x03030303u) << 6);
  196. x ^= y & 0x97979797u;
  197. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  198. x ^= y & 0x9B9B9B9Bu;
  199. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  200. x ^= y & 0x3C3C3C3Cu;
  201. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  202. x ^= y & 0xDDDDDDDDu;
  203. y = ((y & 0xFEFEFEFEu) >> 1) | ((y & 0x01010101u) << 7);
  204. x ^= y & 0x72727272u;
  205. x ^= 0x63636363u;
  206. *w = x;
  207. }
  208. static void SubLong(u64 *w)
  209. {
  210. u64 x, y, a1, a2, a3, a4, a5, a6;
  211. x = *w;
  212. y = ((x & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((x & 0x0101010101010101uLL) << 7);
  213. x &= 0xDDDDDDDDDDDDDDDDuLL;
  214. x ^= y & 0x5757575757575757uLL;
  215. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  216. x ^= y & 0x1C1C1C1C1C1C1C1CuLL;
  217. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  218. x ^= y & 0x4A4A4A4A4A4A4A4AuLL;
  219. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  220. x ^= y & 0x4242424242424242uLL;
  221. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  222. x ^= y & 0x6464646464646464uLL;
  223. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  224. x ^= y & 0xE0E0E0E0E0E0E0E0uLL;
  225. a1 = x;
  226. a1 ^= (x & 0xF0F0F0F0F0F0F0F0uLL) >> 4;
  227. a2 = ((x & 0xCCCCCCCCCCCCCCCCuLL) >> 2) | ((x & 0x3333333333333333uLL) << 2);
  228. a3 = x & a1;
  229. a3 ^= (a3 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  230. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & 0xAAAAAAAAAAAAAAAAuLL;
  231. a4 = a2 & a1;
  232. a4 ^= (a4 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  233. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & 0xAAAAAAAAAAAAAAAAuLL;
  234. a5 = (a3 & 0xCCCCCCCCCCCCCCCCuLL) >> 2;
  235. a3 ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCCCCCCCCCuLL;
  236. a4 = a5 & 0x2222222222222222uLL;
  237. a4 |= a4 >> 1;
  238. a4 ^= (a5 << 1) & 0x2222222222222222uLL;
  239. a3 ^= a4;
  240. a5 = a3 & 0xA0A0A0A0A0A0A0A0uLL;
  241. a5 |= a5 >> 1;
  242. a5 ^= (a3 << 1) & 0xA0A0A0A0A0A0A0A0uLL;
  243. a4 = a5 & 0xC0C0C0C0C0C0C0C0uLL;
  244. a6 = a4 >> 2;
  245. a4 ^= (a5 << 2) & 0xC0C0C0C0C0C0C0C0uLL;
  246. a5 = a6 & 0x2020202020202020uLL;
  247. a5 |= a5 >> 1;
  248. a5 ^= (a6 << 1) & 0x2020202020202020uLL;
  249. a4 |= a5;
  250. a3 ^= a4 >> 4;
  251. a3 &= 0x0F0F0F0F0F0F0F0FuLL;
  252. a2 = a3;
  253. a2 ^= (a3 & 0x0C0C0C0C0C0C0C0CuLL) >> 2;
  254. a4 = a3 & a2;
  255. a4 ^= (a4 & 0x0A0A0A0A0A0A0A0AuLL) >> 1;
  256. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & 0x0A0A0A0A0A0A0A0AuLL;
  257. a5 = a4 & 0x0808080808080808uLL;
  258. a5 |= a5 >> 1;
  259. a5 ^= (a4 << 1) & 0x0808080808080808uLL;
  260. a4 ^= a5 >> 2;
  261. a4 &= 0x0303030303030303uLL;
  262. a4 ^= (a4 & 0x0202020202020202uLL) >> 1;
  263. a4 |= a4 << 2;
  264. a3 = a2 & a4;
  265. a3 ^= (a3 & 0x0A0A0A0A0A0A0A0AuLL) >> 1;
  266. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & 0x0A0A0A0A0A0A0A0AuLL;
  267. a3 |= a3 << 4;
  268. a2 = ((a1 & 0xCCCCCCCCCCCCCCCCuLL) >> 2) | ((a1 & 0x3333333333333333uLL) << 2);
  269. x = a1 & a3;
  270. x ^= (x & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  271. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & 0xAAAAAAAAAAAAAAAAuLL;
  272. a4 = a2 & a3;
  273. a4 ^= (a4 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  274. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & 0xAAAAAAAAAAAAAAAAuLL;
  275. a5 = (x & 0xCCCCCCCCCCCCCCCCuLL) >> 2;
  276. x ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCCCCCCCCCuLL;
  277. a4 = a5 & 0x2222222222222222uLL;
  278. a4 |= a4 >> 1;
  279. a4 ^= (a5 << 1) & 0x2222222222222222uLL;
  280. x ^= a4;
  281. y = ((x & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((x & 0x0101010101010101uLL) << 7);
  282. x &= 0x3939393939393939uLL;
  283. x ^= y & 0x3F3F3F3F3F3F3F3FuLL;
  284. y = ((y & 0xFCFCFCFCFCFCFCFCuLL) >> 2) | ((y & 0x0303030303030303uLL) << 6);
  285. x ^= y & 0x9797979797979797uLL;
  286. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  287. x ^= y & 0x9B9B9B9B9B9B9B9BuLL;
  288. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  289. x ^= y & 0x3C3C3C3C3C3C3C3CuLL;
  290. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  291. x ^= y & 0xDDDDDDDDDDDDDDDDuLL;
  292. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  293. x ^= y & 0x7272727272727272uLL;
  294. x ^= 0x6363636363636363uLL;
  295. *w = x;
  296. }
  297. /*
  298. * This computes w := (S^-1 * (w + c))^-1
  299. */
  300. static void InvSubLong(u64 *w)
  301. {
  302. u64 x, y, a1, a2, a3, a4, a5, a6;
  303. x = *w;
  304. x ^= 0x6363636363636363uLL;
  305. y = ((x & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((x & 0x0101010101010101uLL) << 7);
  306. x &= 0xFDFDFDFDFDFDFDFDuLL;
  307. x ^= y & 0x5E5E5E5E5E5E5E5EuLL;
  308. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  309. x ^= y & 0xF3F3F3F3F3F3F3F3uLL;
  310. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  311. x ^= y & 0xF5F5F5F5F5F5F5F5uLL;
  312. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  313. x ^= y & 0x7878787878787878uLL;
  314. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  315. x ^= y & 0x7777777777777777uLL;
  316. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  317. x ^= y & 0x1515151515151515uLL;
  318. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  319. x ^= y & 0xA5A5A5A5A5A5A5A5uLL;
  320. a1 = x;
  321. a1 ^= (x & 0xF0F0F0F0F0F0F0F0uLL) >> 4;
  322. a2 = ((x & 0xCCCCCCCCCCCCCCCCuLL) >> 2) | ((x & 0x3333333333333333uLL) << 2);
  323. a3 = x & a1;
  324. a3 ^= (a3 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  325. a3 ^= (((x << 1) & a1) ^ ((a1 << 1) & x)) & 0xAAAAAAAAAAAAAAAAuLL;
  326. a4 = a2 & a1;
  327. a4 ^= (a4 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  328. a4 ^= (((a2 << 1) & a1) ^ ((a1 << 1) & a2)) & 0xAAAAAAAAAAAAAAAAuLL;
  329. a5 = (a3 & 0xCCCCCCCCCCCCCCCCuLL) >> 2;
  330. a3 ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCCCCCCCCCuLL;
  331. a4 = a5 & 0x2222222222222222uLL;
  332. a4 |= a4 >> 1;
  333. a4 ^= (a5 << 1) & 0x2222222222222222uLL;
  334. a3 ^= a4;
  335. a5 = a3 & 0xA0A0A0A0A0A0A0A0uLL;
  336. a5 |= a5 >> 1;
  337. a5 ^= (a3 << 1) & 0xA0A0A0A0A0A0A0A0uLL;
  338. a4 = a5 & 0xC0C0C0C0C0C0C0C0uLL;
  339. a6 = a4 >> 2;
  340. a4 ^= (a5 << 2) & 0xC0C0C0C0C0C0C0C0uLL;
  341. a5 = a6 & 0x2020202020202020uLL;
  342. a5 |= a5 >> 1;
  343. a5 ^= (a6 << 1) & 0x2020202020202020uLL;
  344. a4 |= a5;
  345. a3 ^= a4 >> 4;
  346. a3 &= 0x0F0F0F0F0F0F0F0FuLL;
  347. a2 = a3;
  348. a2 ^= (a3 & 0x0C0C0C0C0C0C0C0CuLL) >> 2;
  349. a4 = a3 & a2;
  350. a4 ^= (a4 & 0x0A0A0A0A0A0A0A0AuLL) >> 1;
  351. a4 ^= (((a3 << 1) & a2) ^ ((a2 << 1) & a3)) & 0x0A0A0A0A0A0A0A0AuLL;
  352. a5 = a4 & 0x0808080808080808uLL;
  353. a5 |= a5 >> 1;
  354. a5 ^= (a4 << 1) & 0x0808080808080808uLL;
  355. a4 ^= a5 >> 2;
  356. a4 &= 0x0303030303030303uLL;
  357. a4 ^= (a4 & 0x0202020202020202uLL) >> 1;
  358. a4 |= a4 << 2;
  359. a3 = a2 & a4;
  360. a3 ^= (a3 & 0x0A0A0A0A0A0A0A0AuLL) >> 1;
  361. a3 ^= (((a2 << 1) & a4) ^ ((a4 << 1) & a2)) & 0x0A0A0A0A0A0A0A0AuLL;
  362. a3 |= a3 << 4;
  363. a2 = ((a1 & 0xCCCCCCCCCCCCCCCCuLL) >> 2) | ((a1 & 0x3333333333333333uLL) << 2);
  364. x = a1 & a3;
  365. x ^= (x & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  366. x ^= (((a1 << 1) & a3) ^ ((a3 << 1) & a1)) & 0xAAAAAAAAAAAAAAAAuLL;
  367. a4 = a2 & a3;
  368. a4 ^= (a4 & 0xAAAAAAAAAAAAAAAAuLL) >> 1;
  369. a4 ^= (((a2 << 1) & a3) ^ ((a3 << 1) & a2)) & 0xAAAAAAAAAAAAAAAAuLL;
  370. a5 = (x & 0xCCCCCCCCCCCCCCCCuLL) >> 2;
  371. x ^= ((a4 << 2) ^ a4) & 0xCCCCCCCCCCCCCCCCuLL;
  372. a4 = a5 & 0x2222222222222222uLL;
  373. a4 |= a4 >> 1;
  374. a4 ^= (a5 << 1) & 0x2222222222222222uLL;
  375. x ^= a4;
  376. y = ((x & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((x & 0x0101010101010101uLL) << 7);
  377. x &= 0xB5B5B5B5B5B5B5B5uLL;
  378. x ^= y & 0x4040404040404040uLL;
  379. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  380. x ^= y & 0x8080808080808080uLL;
  381. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  382. x ^= y & 0x1616161616161616uLL;
  383. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  384. x ^= y & 0xEBEBEBEBEBEBEBEBuLL;
  385. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  386. x ^= y & 0x9797979797979797uLL;
  387. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  388. x ^= y & 0xFBFBFBFBFBFBFBFBuLL;
  389. y = ((y & 0xFEFEFEFEFEFEFEFEuLL) >> 1) | ((y & 0x0101010101010101uLL) << 7);
  390. x ^= y & 0x7D7D7D7D7D7D7D7DuLL;
  391. *w = x;
  392. }
  393. static void ShiftRows(u64 *state)
  394. {
  395. unsigned char s[4];
  396. unsigned char *s0;
  397. int r;
  398. s0 = (unsigned char *)state;
  399. for (r = 0; r < 4; r++) {
  400. s[0] = s0[0*4 + r];
  401. s[1] = s0[1*4 + r];
  402. s[2] = s0[2*4 + r];
  403. s[3] = s0[3*4 + r];
  404. s0[0*4 + r] = s[(r+0) % 4];
  405. s0[1*4 + r] = s[(r+1) % 4];
  406. s0[2*4 + r] = s[(r+2) % 4];
  407. s0[3*4 + r] = s[(r+3) % 4];
  408. }
  409. }
  410. static void InvShiftRows(u64 *state)
  411. {
  412. unsigned char s[4];
  413. unsigned char *s0;
  414. int r;
  415. s0 = (unsigned char *)state;
  416. for (r = 0; r < 4; r++) {
  417. s[0] = s0[0*4 + r];
  418. s[1] = s0[1*4 + r];
  419. s[2] = s0[2*4 + r];
  420. s[3] = s0[3*4 + r];
  421. s0[0*4 + r] = s[(4-r) % 4];
  422. s0[1*4 + r] = s[(5-r) % 4];
  423. s0[2*4 + r] = s[(6-r) % 4];
  424. s0[3*4 + r] = s[(7-r) % 4];
  425. }
  426. }
  427. static void MixColumns(u64 *state)
  428. {
  429. uni s1;
  430. uni s;
  431. int c;
  432. for (c = 0; c < 2; c++) {
  433. s1.d = state[c];
  434. s.d = s1.d;
  435. s.d ^= ((s.d & 0xFFFF0000FFFF0000uLL) >> 16)
  436. | ((s.d & 0x0000FFFF0000FFFFuLL) << 16);
  437. s.d ^= ((s.d & 0xFF00FF00FF00FF00uLL) >> 8)
  438. | ((s.d & 0x00FF00FF00FF00FFuLL) << 8);
  439. s.d ^= s1.d;
  440. XtimeLong(&s1.d);
  441. s.d ^= s1.d;
  442. s.b[0] ^= s1.b[1];
  443. s.b[1] ^= s1.b[2];
  444. s.b[2] ^= s1.b[3];
  445. s.b[3] ^= s1.b[0];
  446. s.b[4] ^= s1.b[5];
  447. s.b[5] ^= s1.b[6];
  448. s.b[6] ^= s1.b[7];
  449. s.b[7] ^= s1.b[4];
  450. state[c] = s.d;
  451. }
  452. }
  453. static void InvMixColumns(u64 *state)
  454. {
  455. uni s1;
  456. uni s;
  457. int c;
  458. for (c = 0; c < 2; c++) {
  459. s1.d = state[c];
  460. s.d = s1.d;
  461. s.d ^= ((s.d & 0xFFFF0000FFFF0000uLL) >> 16)
  462. | ((s.d & 0x0000FFFF0000FFFFuLL) << 16);
  463. s.d ^= ((s.d & 0xFF00FF00FF00FF00uLL) >> 8)
  464. | ((s.d & 0x00FF00FF00FF00FFuLL) << 8);
  465. s.d ^= s1.d;
  466. XtimeLong(&s1.d);
  467. s.d ^= s1.d;
  468. s.b[0] ^= s1.b[1];
  469. s.b[1] ^= s1.b[2];
  470. s.b[2] ^= s1.b[3];
  471. s.b[3] ^= s1.b[0];
  472. s.b[4] ^= s1.b[5];
  473. s.b[5] ^= s1.b[6];
  474. s.b[6] ^= s1.b[7];
  475. s.b[7] ^= s1.b[4];
  476. XtimeLong(&s1.d);
  477. s1.d ^= ((s1.d & 0xFFFF0000FFFF0000uLL) >> 16)
  478. | ((s1.d & 0x0000FFFF0000FFFFuLL) << 16);
  479. s.d ^= s1.d;
  480. XtimeLong(&s1.d);
  481. s1.d ^= ((s1.d & 0xFF00FF00FF00FF00uLL) >> 8)
  482. | ((s1.d & 0x00FF00FF00FF00FFuLL) << 8);
  483. s.d ^= s1.d;
  484. state[c] = s.d;
  485. }
  486. }
  487. static void AddRoundKey(u64 *state, const u64 *w)
  488. {
  489. state[0] ^= w[0];
  490. state[1] ^= w[1];
  491. }
  492. static void Cipher(const unsigned char *in, unsigned char *out,
  493. const u64 *w, int nr)
  494. {
  495. u64 state[2];
  496. int i;
  497. memcpy(state, in, 16);
  498. AddRoundKey(state, w);
  499. for (i = 1; i < nr; i++) {
  500. SubLong(&state[0]);
  501. SubLong(&state[1]);
  502. ShiftRows(state);
  503. MixColumns(state);
  504. AddRoundKey(state, w + i*2);
  505. }
  506. SubLong(&state[0]);
  507. SubLong(&state[1]);
  508. ShiftRows(state);
  509. AddRoundKey(state, w + nr*2);
  510. memcpy(out, state, 16);
  511. }
  512. static void InvCipher(const unsigned char *in, unsigned char *out,
  513. const u64 *w, int nr)
  514. {
  515. u64 state[2];
  516. int i;
  517. memcpy(state, in, 16);
  518. AddRoundKey(state, w + nr*2);
  519. for (i = nr - 1; i > 0; i--) {
  520. InvShiftRows(state);
  521. InvSubLong(&state[0]);
  522. InvSubLong(&state[1]);
  523. AddRoundKey(state, w + i*2);
  524. InvMixColumns(state);
  525. }
  526. InvShiftRows(state);
  527. InvSubLong(&state[0]);
  528. InvSubLong(&state[1]);
  529. AddRoundKey(state, w);
  530. memcpy(out, state, 16);
  531. }
  532. static void RotWord(u32 *x)
  533. {
  534. unsigned char *w0;
  535. unsigned char tmp;
  536. w0 = (unsigned char *)x;
  537. tmp = w0[0];
  538. w0[0] = w0[1];
  539. w0[1] = w0[2];
  540. w0[2] = w0[3];
  541. w0[3] = tmp;
  542. }
  543. static void KeyExpansion(const unsigned char *key, u64 *w,
  544. int nr, int nk)
  545. {
  546. u32 rcon;
  547. uni prev;
  548. u32 temp;
  549. int i, n;
  550. memcpy(w, key, nk*4);
  551. memcpy(&rcon, "\1\0\0\0", 4);
  552. n = nk/2;
  553. prev.d = w[n-1];
  554. for (i = n; i < (nr+1)*2; i++) {
  555. temp = prev.w[1];
  556. if (i % n == 0) {
  557. RotWord(&temp);
  558. SubWord(&temp);
  559. temp ^= rcon;
  560. XtimeWord(&rcon);
  561. } else if (nk > 6 && i % n == 2) {
  562. SubWord(&temp);
  563. }
  564. prev.d = w[i-n];
  565. prev.w[0] ^= temp;
  566. prev.w[1] ^= prev.w[0];
  567. w[i] = prev.d;
  568. }
  569. }
  570. /**
  571. * Expand the cipher key into the encryption key schedule.
  572. */
  573. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  574. AES_KEY *key)
  575. {
  576. u64 *rk;
  577. if (!userKey || !key)
  578. return -1;
  579. if (bits != 128 && bits != 192 && bits != 256)
  580. return -2;
  581. rk = (u64*)key->rd_key;
  582. if (bits == 128)
  583. key->rounds = 10;
  584. else if (bits == 192)
  585. key->rounds = 12;
  586. else
  587. key->rounds = 14;
  588. KeyExpansion(userKey, rk, key->rounds, bits/32);
  589. return 0;
  590. }
  591. /**
  592. * Expand the cipher key into the decryption key schedule.
  593. */
  594. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  595. AES_KEY *key)
  596. {
  597. return AES_set_encrypt_key(userKey, bits, key);
  598. }
  599. /*
  600. * Encrypt a single block
  601. * in and out can overlap
  602. */
  603. void AES_encrypt(const unsigned char *in, unsigned char *out,
  604. const AES_KEY *key)
  605. {
  606. const u64 *rk;
  607. assert(in && out && key);
  608. rk = (u64*)key->rd_key;
  609. Cipher(in, out, rk, key->rounds);
  610. }
  611. /*
  612. * Decrypt a single block
  613. * in and out can overlap
  614. */
  615. void AES_decrypt(const unsigned char *in, unsigned char *out,
  616. const AES_KEY *key)
  617. {
  618. const u64 *rk;
  619. assert(in && out && key);
  620. rk = (u64*)key->rd_key;
  621. InvCipher(in, out, rk, key->rounds);
  622. }
  623. #elif !defined(AES_ASM)
  624. /*-
  625. Te0[x] = S [x].[02, 01, 01, 03];
  626. Te1[x] = S [x].[03, 02, 01, 01];
  627. Te2[x] = S [x].[01, 03, 02, 01];
  628. Te3[x] = S [x].[01, 01, 03, 02];
  629. Td0[x] = Si[x].[0e, 09, 0d, 0b];
  630. Td1[x] = Si[x].[0b, 0e, 09, 0d];
  631. Td2[x] = Si[x].[0d, 0b, 0e, 09];
  632. Td3[x] = Si[x].[09, 0d, 0b, 0e];
  633. Td4[x] = Si[x].[01];
  634. */
  635. static const u32 Te0[256] = {
  636. 0xc66363a5U, 0xf87c7c84U, 0xee777799U, 0xf67b7b8dU,
  637. 0xfff2f20dU, 0xd66b6bbdU, 0xde6f6fb1U, 0x91c5c554U,
  638. 0x60303050U, 0x02010103U, 0xce6767a9U, 0x562b2b7dU,
  639. 0xe7fefe19U, 0xb5d7d762U, 0x4dababe6U, 0xec76769aU,
  640. 0x8fcaca45U, 0x1f82829dU, 0x89c9c940U, 0xfa7d7d87U,
  641. 0xeffafa15U, 0xb25959ebU, 0x8e4747c9U, 0xfbf0f00bU,
  642. 0x41adadecU, 0xb3d4d467U, 0x5fa2a2fdU, 0x45afafeaU,
  643. 0x239c9cbfU, 0x53a4a4f7U, 0xe4727296U, 0x9bc0c05bU,
  644. 0x75b7b7c2U, 0xe1fdfd1cU, 0x3d9393aeU, 0x4c26266aU,
  645. 0x6c36365aU, 0x7e3f3f41U, 0xf5f7f702U, 0x83cccc4fU,
  646. 0x6834345cU, 0x51a5a5f4U, 0xd1e5e534U, 0xf9f1f108U,
  647. 0xe2717193U, 0xabd8d873U, 0x62313153U, 0x2a15153fU,
  648. 0x0804040cU, 0x95c7c752U, 0x46232365U, 0x9dc3c35eU,
  649. 0x30181828U, 0x379696a1U, 0x0a05050fU, 0x2f9a9ab5U,
  650. 0x0e070709U, 0x24121236U, 0x1b80809bU, 0xdfe2e23dU,
  651. 0xcdebeb26U, 0x4e272769U, 0x7fb2b2cdU, 0xea75759fU,
  652. 0x1209091bU, 0x1d83839eU, 0x582c2c74U, 0x341a1a2eU,
  653. 0x361b1b2dU, 0xdc6e6eb2U, 0xb45a5aeeU, 0x5ba0a0fbU,
  654. 0xa45252f6U, 0x763b3b4dU, 0xb7d6d661U, 0x7db3b3ceU,
  655. 0x5229297bU, 0xdde3e33eU, 0x5e2f2f71U, 0x13848497U,
  656. 0xa65353f5U, 0xb9d1d168U, 0x00000000U, 0xc1eded2cU,
  657. 0x40202060U, 0xe3fcfc1fU, 0x79b1b1c8U, 0xb65b5bedU,
  658. 0xd46a6abeU, 0x8dcbcb46U, 0x67bebed9U, 0x7239394bU,
  659. 0x944a4adeU, 0x984c4cd4U, 0xb05858e8U, 0x85cfcf4aU,
  660. 0xbbd0d06bU, 0xc5efef2aU, 0x4faaaae5U, 0xedfbfb16U,
  661. 0x864343c5U, 0x9a4d4dd7U, 0x66333355U, 0x11858594U,
  662. 0x8a4545cfU, 0xe9f9f910U, 0x04020206U, 0xfe7f7f81U,
  663. 0xa05050f0U, 0x783c3c44U, 0x259f9fbaU, 0x4ba8a8e3U,
  664. 0xa25151f3U, 0x5da3a3feU, 0x804040c0U, 0x058f8f8aU,
  665. 0x3f9292adU, 0x219d9dbcU, 0x70383848U, 0xf1f5f504U,
  666. 0x63bcbcdfU, 0x77b6b6c1U, 0xafdada75U, 0x42212163U,
  667. 0x20101030U, 0xe5ffff1aU, 0xfdf3f30eU, 0xbfd2d26dU,
  668. 0x81cdcd4cU, 0x180c0c14U, 0x26131335U, 0xc3ecec2fU,
  669. 0xbe5f5fe1U, 0x359797a2U, 0x884444ccU, 0x2e171739U,
  670. 0x93c4c457U, 0x55a7a7f2U, 0xfc7e7e82U, 0x7a3d3d47U,
  671. 0xc86464acU, 0xba5d5de7U, 0x3219192bU, 0xe6737395U,
  672. 0xc06060a0U, 0x19818198U, 0x9e4f4fd1U, 0xa3dcdc7fU,
  673. 0x44222266U, 0x542a2a7eU, 0x3b9090abU, 0x0b888883U,
  674. 0x8c4646caU, 0xc7eeee29U, 0x6bb8b8d3U, 0x2814143cU,
  675. 0xa7dede79U, 0xbc5e5ee2U, 0x160b0b1dU, 0xaddbdb76U,
  676. 0xdbe0e03bU, 0x64323256U, 0x743a3a4eU, 0x140a0a1eU,
  677. 0x924949dbU, 0x0c06060aU, 0x4824246cU, 0xb85c5ce4U,
  678. 0x9fc2c25dU, 0xbdd3d36eU, 0x43acacefU, 0xc46262a6U,
  679. 0x399191a8U, 0x319595a4U, 0xd3e4e437U, 0xf279798bU,
  680. 0xd5e7e732U, 0x8bc8c843U, 0x6e373759U, 0xda6d6db7U,
  681. 0x018d8d8cU, 0xb1d5d564U, 0x9c4e4ed2U, 0x49a9a9e0U,
  682. 0xd86c6cb4U, 0xac5656faU, 0xf3f4f407U, 0xcfeaea25U,
  683. 0xca6565afU, 0xf47a7a8eU, 0x47aeaee9U, 0x10080818U,
  684. 0x6fbabad5U, 0xf0787888U, 0x4a25256fU, 0x5c2e2e72U,
  685. 0x381c1c24U, 0x57a6a6f1U, 0x73b4b4c7U, 0x97c6c651U,
  686. 0xcbe8e823U, 0xa1dddd7cU, 0xe874749cU, 0x3e1f1f21U,
  687. 0x964b4bddU, 0x61bdbddcU, 0x0d8b8b86U, 0x0f8a8a85U,
  688. 0xe0707090U, 0x7c3e3e42U, 0x71b5b5c4U, 0xcc6666aaU,
  689. 0x904848d8U, 0x06030305U, 0xf7f6f601U, 0x1c0e0e12U,
  690. 0xc26161a3U, 0x6a35355fU, 0xae5757f9U, 0x69b9b9d0U,
  691. 0x17868691U, 0x99c1c158U, 0x3a1d1d27U, 0x279e9eb9U,
  692. 0xd9e1e138U, 0xebf8f813U, 0x2b9898b3U, 0x22111133U,
  693. 0xd26969bbU, 0xa9d9d970U, 0x078e8e89U, 0x339494a7U,
  694. 0x2d9b9bb6U, 0x3c1e1e22U, 0x15878792U, 0xc9e9e920U,
  695. 0x87cece49U, 0xaa5555ffU, 0x50282878U, 0xa5dfdf7aU,
  696. 0x038c8c8fU, 0x59a1a1f8U, 0x09898980U, 0x1a0d0d17U,
  697. 0x65bfbfdaU, 0xd7e6e631U, 0x844242c6U, 0xd06868b8U,
  698. 0x824141c3U, 0x299999b0U, 0x5a2d2d77U, 0x1e0f0f11U,
  699. 0x7bb0b0cbU, 0xa85454fcU, 0x6dbbbbd6U, 0x2c16163aU,
  700. };
  701. static const u32 Te1[256] = {
  702. 0xa5c66363U, 0x84f87c7cU, 0x99ee7777U, 0x8df67b7bU,
  703. 0x0dfff2f2U, 0xbdd66b6bU, 0xb1de6f6fU, 0x5491c5c5U,
  704. 0x50603030U, 0x03020101U, 0xa9ce6767U, 0x7d562b2bU,
  705. 0x19e7fefeU, 0x62b5d7d7U, 0xe64dababU, 0x9aec7676U,
  706. 0x458fcacaU, 0x9d1f8282U, 0x4089c9c9U, 0x87fa7d7dU,
  707. 0x15effafaU, 0xebb25959U, 0xc98e4747U, 0x0bfbf0f0U,
  708. 0xec41adadU, 0x67b3d4d4U, 0xfd5fa2a2U, 0xea45afafU,
  709. 0xbf239c9cU, 0xf753a4a4U, 0x96e47272U, 0x5b9bc0c0U,
  710. 0xc275b7b7U, 0x1ce1fdfdU, 0xae3d9393U, 0x6a4c2626U,
  711. 0x5a6c3636U, 0x417e3f3fU, 0x02f5f7f7U, 0x4f83ccccU,
  712. 0x5c683434U, 0xf451a5a5U, 0x34d1e5e5U, 0x08f9f1f1U,
  713. 0x93e27171U, 0x73abd8d8U, 0x53623131U, 0x3f2a1515U,
  714. 0x0c080404U, 0x5295c7c7U, 0x65462323U, 0x5e9dc3c3U,
  715. 0x28301818U, 0xa1379696U, 0x0f0a0505U, 0xb52f9a9aU,
  716. 0x090e0707U, 0x36241212U, 0x9b1b8080U, 0x3ddfe2e2U,
  717. 0x26cdebebU, 0x694e2727U, 0xcd7fb2b2U, 0x9fea7575U,
  718. 0x1b120909U, 0x9e1d8383U, 0x74582c2cU, 0x2e341a1aU,
  719. 0x2d361b1bU, 0xb2dc6e6eU, 0xeeb45a5aU, 0xfb5ba0a0U,
  720. 0xf6a45252U, 0x4d763b3bU, 0x61b7d6d6U, 0xce7db3b3U,
  721. 0x7b522929U, 0x3edde3e3U, 0x715e2f2fU, 0x97138484U,
  722. 0xf5a65353U, 0x68b9d1d1U, 0x00000000U, 0x2cc1ededU,
  723. 0x60402020U, 0x1fe3fcfcU, 0xc879b1b1U, 0xedb65b5bU,
  724. 0xbed46a6aU, 0x468dcbcbU, 0xd967bebeU, 0x4b723939U,
  725. 0xde944a4aU, 0xd4984c4cU, 0xe8b05858U, 0x4a85cfcfU,
  726. 0x6bbbd0d0U, 0x2ac5efefU, 0xe54faaaaU, 0x16edfbfbU,
  727. 0xc5864343U, 0xd79a4d4dU, 0x55663333U, 0x94118585U,
  728. 0xcf8a4545U, 0x10e9f9f9U, 0x06040202U, 0x81fe7f7fU,
  729. 0xf0a05050U, 0x44783c3cU, 0xba259f9fU, 0xe34ba8a8U,
  730. 0xf3a25151U, 0xfe5da3a3U, 0xc0804040U, 0x8a058f8fU,
  731. 0xad3f9292U, 0xbc219d9dU, 0x48703838U, 0x04f1f5f5U,
  732. 0xdf63bcbcU, 0xc177b6b6U, 0x75afdadaU, 0x63422121U,
  733. 0x30201010U, 0x1ae5ffffU, 0x0efdf3f3U, 0x6dbfd2d2U,
  734. 0x4c81cdcdU, 0x14180c0cU, 0x35261313U, 0x2fc3ececU,
  735. 0xe1be5f5fU, 0xa2359797U, 0xcc884444U, 0x392e1717U,
  736. 0x5793c4c4U, 0xf255a7a7U, 0x82fc7e7eU, 0x477a3d3dU,
  737. 0xacc86464U, 0xe7ba5d5dU, 0x2b321919U, 0x95e67373U,
  738. 0xa0c06060U, 0x98198181U, 0xd19e4f4fU, 0x7fa3dcdcU,
  739. 0x66442222U, 0x7e542a2aU, 0xab3b9090U, 0x830b8888U,
  740. 0xca8c4646U, 0x29c7eeeeU, 0xd36bb8b8U, 0x3c281414U,
  741. 0x79a7dedeU, 0xe2bc5e5eU, 0x1d160b0bU, 0x76addbdbU,
  742. 0x3bdbe0e0U, 0x56643232U, 0x4e743a3aU, 0x1e140a0aU,
  743. 0xdb924949U, 0x0a0c0606U, 0x6c482424U, 0xe4b85c5cU,
  744. 0x5d9fc2c2U, 0x6ebdd3d3U, 0xef43acacU, 0xa6c46262U,
  745. 0xa8399191U, 0xa4319595U, 0x37d3e4e4U, 0x8bf27979U,
  746. 0x32d5e7e7U, 0x438bc8c8U, 0x596e3737U, 0xb7da6d6dU,
  747. 0x8c018d8dU, 0x64b1d5d5U, 0xd29c4e4eU, 0xe049a9a9U,
  748. 0xb4d86c6cU, 0xfaac5656U, 0x07f3f4f4U, 0x25cfeaeaU,
  749. 0xafca6565U, 0x8ef47a7aU, 0xe947aeaeU, 0x18100808U,
  750. 0xd56fbabaU, 0x88f07878U, 0x6f4a2525U, 0x725c2e2eU,
  751. 0x24381c1cU, 0xf157a6a6U, 0xc773b4b4U, 0x5197c6c6U,
  752. 0x23cbe8e8U, 0x7ca1ddddU, 0x9ce87474U, 0x213e1f1fU,
  753. 0xdd964b4bU, 0xdc61bdbdU, 0x860d8b8bU, 0x850f8a8aU,
  754. 0x90e07070U, 0x427c3e3eU, 0xc471b5b5U, 0xaacc6666U,
  755. 0xd8904848U, 0x05060303U, 0x01f7f6f6U, 0x121c0e0eU,
  756. 0xa3c26161U, 0x5f6a3535U, 0xf9ae5757U, 0xd069b9b9U,
  757. 0x91178686U, 0x5899c1c1U, 0x273a1d1dU, 0xb9279e9eU,
  758. 0x38d9e1e1U, 0x13ebf8f8U, 0xb32b9898U, 0x33221111U,
  759. 0xbbd26969U, 0x70a9d9d9U, 0x89078e8eU, 0xa7339494U,
  760. 0xb62d9b9bU, 0x223c1e1eU, 0x92158787U, 0x20c9e9e9U,
  761. 0x4987ceceU, 0xffaa5555U, 0x78502828U, 0x7aa5dfdfU,
  762. 0x8f038c8cU, 0xf859a1a1U, 0x80098989U, 0x171a0d0dU,
  763. 0xda65bfbfU, 0x31d7e6e6U, 0xc6844242U, 0xb8d06868U,
  764. 0xc3824141U, 0xb0299999U, 0x775a2d2dU, 0x111e0f0fU,
  765. 0xcb7bb0b0U, 0xfca85454U, 0xd66dbbbbU, 0x3a2c1616U,
  766. };
  767. static const u32 Te2[256] = {
  768. 0x63a5c663U, 0x7c84f87cU, 0x7799ee77U, 0x7b8df67bU,
  769. 0xf20dfff2U, 0x6bbdd66bU, 0x6fb1de6fU, 0xc55491c5U,
  770. 0x30506030U, 0x01030201U, 0x67a9ce67U, 0x2b7d562bU,
  771. 0xfe19e7feU, 0xd762b5d7U, 0xabe64dabU, 0x769aec76U,
  772. 0xca458fcaU, 0x829d1f82U, 0xc94089c9U, 0x7d87fa7dU,
  773. 0xfa15effaU, 0x59ebb259U, 0x47c98e47U, 0xf00bfbf0U,
  774. 0xadec41adU, 0xd467b3d4U, 0xa2fd5fa2U, 0xafea45afU,
  775. 0x9cbf239cU, 0xa4f753a4U, 0x7296e472U, 0xc05b9bc0U,
  776. 0xb7c275b7U, 0xfd1ce1fdU, 0x93ae3d93U, 0x266a4c26U,
  777. 0x365a6c36U, 0x3f417e3fU, 0xf702f5f7U, 0xcc4f83ccU,
  778. 0x345c6834U, 0xa5f451a5U, 0xe534d1e5U, 0xf108f9f1U,
  779. 0x7193e271U, 0xd873abd8U, 0x31536231U, 0x153f2a15U,
  780. 0x040c0804U, 0xc75295c7U, 0x23654623U, 0xc35e9dc3U,
  781. 0x18283018U, 0x96a13796U, 0x050f0a05U, 0x9ab52f9aU,
  782. 0x07090e07U, 0x12362412U, 0x809b1b80U, 0xe23ddfe2U,
  783. 0xeb26cdebU, 0x27694e27U, 0xb2cd7fb2U, 0x759fea75U,
  784. 0x091b1209U, 0x839e1d83U, 0x2c74582cU, 0x1a2e341aU,
  785. 0x1b2d361bU, 0x6eb2dc6eU, 0x5aeeb45aU, 0xa0fb5ba0U,
  786. 0x52f6a452U, 0x3b4d763bU, 0xd661b7d6U, 0xb3ce7db3U,
  787. 0x297b5229U, 0xe33edde3U, 0x2f715e2fU, 0x84971384U,
  788. 0x53f5a653U, 0xd168b9d1U, 0x00000000U, 0xed2cc1edU,
  789. 0x20604020U, 0xfc1fe3fcU, 0xb1c879b1U, 0x5bedb65bU,
  790. 0x6abed46aU, 0xcb468dcbU, 0xbed967beU, 0x394b7239U,
  791. 0x4ade944aU, 0x4cd4984cU, 0x58e8b058U, 0xcf4a85cfU,
  792. 0xd06bbbd0U, 0xef2ac5efU, 0xaae54faaU, 0xfb16edfbU,
  793. 0x43c58643U, 0x4dd79a4dU, 0x33556633U, 0x85941185U,
  794. 0x45cf8a45U, 0xf910e9f9U, 0x02060402U, 0x7f81fe7fU,
  795. 0x50f0a050U, 0x3c44783cU, 0x9fba259fU, 0xa8e34ba8U,
  796. 0x51f3a251U, 0xa3fe5da3U, 0x40c08040U, 0x8f8a058fU,
  797. 0x92ad3f92U, 0x9dbc219dU, 0x38487038U, 0xf504f1f5U,
  798. 0xbcdf63bcU, 0xb6c177b6U, 0xda75afdaU, 0x21634221U,
  799. 0x10302010U, 0xff1ae5ffU, 0xf30efdf3U, 0xd26dbfd2U,
  800. 0xcd4c81cdU, 0x0c14180cU, 0x13352613U, 0xec2fc3ecU,
  801. 0x5fe1be5fU, 0x97a23597U, 0x44cc8844U, 0x17392e17U,
  802. 0xc45793c4U, 0xa7f255a7U, 0x7e82fc7eU, 0x3d477a3dU,
  803. 0x64acc864U, 0x5de7ba5dU, 0x192b3219U, 0x7395e673U,
  804. 0x60a0c060U, 0x81981981U, 0x4fd19e4fU, 0xdc7fa3dcU,
  805. 0x22664422U, 0x2a7e542aU, 0x90ab3b90U, 0x88830b88U,
  806. 0x46ca8c46U, 0xee29c7eeU, 0xb8d36bb8U, 0x143c2814U,
  807. 0xde79a7deU, 0x5ee2bc5eU, 0x0b1d160bU, 0xdb76addbU,
  808. 0xe03bdbe0U, 0x32566432U, 0x3a4e743aU, 0x0a1e140aU,
  809. 0x49db9249U, 0x060a0c06U, 0x246c4824U, 0x5ce4b85cU,
  810. 0xc25d9fc2U, 0xd36ebdd3U, 0xacef43acU, 0x62a6c462U,
  811. 0x91a83991U, 0x95a43195U, 0xe437d3e4U, 0x798bf279U,
  812. 0xe732d5e7U, 0xc8438bc8U, 0x37596e37U, 0x6db7da6dU,
  813. 0x8d8c018dU, 0xd564b1d5U, 0x4ed29c4eU, 0xa9e049a9U,
  814. 0x6cb4d86cU, 0x56faac56U, 0xf407f3f4U, 0xea25cfeaU,
  815. 0x65afca65U, 0x7a8ef47aU, 0xaee947aeU, 0x08181008U,
  816. 0xbad56fbaU, 0x7888f078U, 0x256f4a25U, 0x2e725c2eU,
  817. 0x1c24381cU, 0xa6f157a6U, 0xb4c773b4U, 0xc65197c6U,
  818. 0xe823cbe8U, 0xdd7ca1ddU, 0x749ce874U, 0x1f213e1fU,
  819. 0x4bdd964bU, 0xbddc61bdU, 0x8b860d8bU, 0x8a850f8aU,
  820. 0x7090e070U, 0x3e427c3eU, 0xb5c471b5U, 0x66aacc66U,
  821. 0x48d89048U, 0x03050603U, 0xf601f7f6U, 0x0e121c0eU,
  822. 0x61a3c261U, 0x355f6a35U, 0x57f9ae57U, 0xb9d069b9U,
  823. 0x86911786U, 0xc15899c1U, 0x1d273a1dU, 0x9eb9279eU,
  824. 0xe138d9e1U, 0xf813ebf8U, 0x98b32b98U, 0x11332211U,
  825. 0x69bbd269U, 0xd970a9d9U, 0x8e89078eU, 0x94a73394U,
  826. 0x9bb62d9bU, 0x1e223c1eU, 0x87921587U, 0xe920c9e9U,
  827. 0xce4987ceU, 0x55ffaa55U, 0x28785028U, 0xdf7aa5dfU,
  828. 0x8c8f038cU, 0xa1f859a1U, 0x89800989U, 0x0d171a0dU,
  829. 0xbfda65bfU, 0xe631d7e6U, 0x42c68442U, 0x68b8d068U,
  830. 0x41c38241U, 0x99b02999U, 0x2d775a2dU, 0x0f111e0fU,
  831. 0xb0cb7bb0U, 0x54fca854U, 0xbbd66dbbU, 0x163a2c16U,
  832. };
  833. static const u32 Te3[256] = {
  834. 0x6363a5c6U, 0x7c7c84f8U, 0x777799eeU, 0x7b7b8df6U,
  835. 0xf2f20dffU, 0x6b6bbdd6U, 0x6f6fb1deU, 0xc5c55491U,
  836. 0x30305060U, 0x01010302U, 0x6767a9ceU, 0x2b2b7d56U,
  837. 0xfefe19e7U, 0xd7d762b5U, 0xababe64dU, 0x76769aecU,
  838. 0xcaca458fU, 0x82829d1fU, 0xc9c94089U, 0x7d7d87faU,
  839. 0xfafa15efU, 0x5959ebb2U, 0x4747c98eU, 0xf0f00bfbU,
  840. 0xadadec41U, 0xd4d467b3U, 0xa2a2fd5fU, 0xafafea45U,
  841. 0x9c9cbf23U, 0xa4a4f753U, 0x727296e4U, 0xc0c05b9bU,
  842. 0xb7b7c275U, 0xfdfd1ce1U, 0x9393ae3dU, 0x26266a4cU,
  843. 0x36365a6cU, 0x3f3f417eU, 0xf7f702f5U, 0xcccc4f83U,
  844. 0x34345c68U, 0xa5a5f451U, 0xe5e534d1U, 0xf1f108f9U,
  845. 0x717193e2U, 0xd8d873abU, 0x31315362U, 0x15153f2aU,
  846. 0x04040c08U, 0xc7c75295U, 0x23236546U, 0xc3c35e9dU,
  847. 0x18182830U, 0x9696a137U, 0x05050f0aU, 0x9a9ab52fU,
  848. 0x0707090eU, 0x12123624U, 0x80809b1bU, 0xe2e23ddfU,
  849. 0xebeb26cdU, 0x2727694eU, 0xb2b2cd7fU, 0x75759feaU,
  850. 0x09091b12U, 0x83839e1dU, 0x2c2c7458U, 0x1a1a2e34U,
  851. 0x1b1b2d36U, 0x6e6eb2dcU, 0x5a5aeeb4U, 0xa0a0fb5bU,
  852. 0x5252f6a4U, 0x3b3b4d76U, 0xd6d661b7U, 0xb3b3ce7dU,
  853. 0x29297b52U, 0xe3e33eddU, 0x2f2f715eU, 0x84849713U,
  854. 0x5353f5a6U, 0xd1d168b9U, 0x00000000U, 0xeded2cc1U,
  855. 0x20206040U, 0xfcfc1fe3U, 0xb1b1c879U, 0x5b5bedb6U,
  856. 0x6a6abed4U, 0xcbcb468dU, 0xbebed967U, 0x39394b72U,
  857. 0x4a4ade94U, 0x4c4cd498U, 0x5858e8b0U, 0xcfcf4a85U,
  858. 0xd0d06bbbU, 0xefef2ac5U, 0xaaaae54fU, 0xfbfb16edU,
  859. 0x4343c586U, 0x4d4dd79aU, 0x33335566U, 0x85859411U,
  860. 0x4545cf8aU, 0xf9f910e9U, 0x02020604U, 0x7f7f81feU,
  861. 0x5050f0a0U, 0x3c3c4478U, 0x9f9fba25U, 0xa8a8e34bU,
  862. 0x5151f3a2U, 0xa3a3fe5dU, 0x4040c080U, 0x8f8f8a05U,
  863. 0x9292ad3fU, 0x9d9dbc21U, 0x38384870U, 0xf5f504f1U,
  864. 0xbcbcdf63U, 0xb6b6c177U, 0xdada75afU, 0x21216342U,
  865. 0x10103020U, 0xffff1ae5U, 0xf3f30efdU, 0xd2d26dbfU,
  866. 0xcdcd4c81U, 0x0c0c1418U, 0x13133526U, 0xecec2fc3U,
  867. 0x5f5fe1beU, 0x9797a235U, 0x4444cc88U, 0x1717392eU,
  868. 0xc4c45793U, 0xa7a7f255U, 0x7e7e82fcU, 0x3d3d477aU,
  869. 0x6464acc8U, 0x5d5de7baU, 0x19192b32U, 0x737395e6U,
  870. 0x6060a0c0U, 0x81819819U, 0x4f4fd19eU, 0xdcdc7fa3U,
  871. 0x22226644U, 0x2a2a7e54U, 0x9090ab3bU, 0x8888830bU,
  872. 0x4646ca8cU, 0xeeee29c7U, 0xb8b8d36bU, 0x14143c28U,
  873. 0xdede79a7U, 0x5e5ee2bcU, 0x0b0b1d16U, 0xdbdb76adU,
  874. 0xe0e03bdbU, 0x32325664U, 0x3a3a4e74U, 0x0a0a1e14U,
  875. 0x4949db92U, 0x06060a0cU, 0x24246c48U, 0x5c5ce4b8U,
  876. 0xc2c25d9fU, 0xd3d36ebdU, 0xacacef43U, 0x6262a6c4U,
  877. 0x9191a839U, 0x9595a431U, 0xe4e437d3U, 0x79798bf2U,
  878. 0xe7e732d5U, 0xc8c8438bU, 0x3737596eU, 0x6d6db7daU,
  879. 0x8d8d8c01U, 0xd5d564b1U, 0x4e4ed29cU, 0xa9a9e049U,
  880. 0x6c6cb4d8U, 0x5656faacU, 0xf4f407f3U, 0xeaea25cfU,
  881. 0x6565afcaU, 0x7a7a8ef4U, 0xaeaee947U, 0x08081810U,
  882. 0xbabad56fU, 0x787888f0U, 0x25256f4aU, 0x2e2e725cU,
  883. 0x1c1c2438U, 0xa6a6f157U, 0xb4b4c773U, 0xc6c65197U,
  884. 0xe8e823cbU, 0xdddd7ca1U, 0x74749ce8U, 0x1f1f213eU,
  885. 0x4b4bdd96U, 0xbdbddc61U, 0x8b8b860dU, 0x8a8a850fU,
  886. 0x707090e0U, 0x3e3e427cU, 0xb5b5c471U, 0x6666aaccU,
  887. 0x4848d890U, 0x03030506U, 0xf6f601f7U, 0x0e0e121cU,
  888. 0x6161a3c2U, 0x35355f6aU, 0x5757f9aeU, 0xb9b9d069U,
  889. 0x86869117U, 0xc1c15899U, 0x1d1d273aU, 0x9e9eb927U,
  890. 0xe1e138d9U, 0xf8f813ebU, 0x9898b32bU, 0x11113322U,
  891. 0x6969bbd2U, 0xd9d970a9U, 0x8e8e8907U, 0x9494a733U,
  892. 0x9b9bb62dU, 0x1e1e223cU, 0x87879215U, 0xe9e920c9U,
  893. 0xcece4987U, 0x5555ffaaU, 0x28287850U, 0xdfdf7aa5U,
  894. 0x8c8c8f03U, 0xa1a1f859U, 0x89898009U, 0x0d0d171aU,
  895. 0xbfbfda65U, 0xe6e631d7U, 0x4242c684U, 0x6868b8d0U,
  896. 0x4141c382U, 0x9999b029U, 0x2d2d775aU, 0x0f0f111eU,
  897. 0xb0b0cb7bU, 0x5454fca8U, 0xbbbbd66dU, 0x16163a2cU,
  898. };
  899. static const u32 Td0[256] = {
  900. 0x51f4a750U, 0x7e416553U, 0x1a17a4c3U, 0x3a275e96U,
  901. 0x3bab6bcbU, 0x1f9d45f1U, 0xacfa58abU, 0x4be30393U,
  902. 0x2030fa55U, 0xad766df6U, 0x88cc7691U, 0xf5024c25U,
  903. 0x4fe5d7fcU, 0xc52acbd7U, 0x26354480U, 0xb562a38fU,
  904. 0xdeb15a49U, 0x25ba1b67U, 0x45ea0e98U, 0x5dfec0e1U,
  905. 0xc32f7502U, 0x814cf012U, 0x8d4697a3U, 0x6bd3f9c6U,
  906. 0x038f5fe7U, 0x15929c95U, 0xbf6d7aebU, 0x955259daU,
  907. 0xd4be832dU, 0x587421d3U, 0x49e06929U, 0x8ec9c844U,
  908. 0x75c2896aU, 0xf48e7978U, 0x99583e6bU, 0x27b971ddU,
  909. 0xbee14fb6U, 0xf088ad17U, 0xc920ac66U, 0x7dce3ab4U,
  910. 0x63df4a18U, 0xe51a3182U, 0x97513360U, 0x62537f45U,
  911. 0xb16477e0U, 0xbb6bae84U, 0xfe81a01cU, 0xf9082b94U,
  912. 0x70486858U, 0x8f45fd19U, 0x94de6c87U, 0x527bf8b7U,
  913. 0xab73d323U, 0x724b02e2U, 0xe31f8f57U, 0x6655ab2aU,
  914. 0xb2eb2807U, 0x2fb5c203U, 0x86c57b9aU, 0xd33708a5U,
  915. 0x302887f2U, 0x23bfa5b2U, 0x02036abaU, 0xed16825cU,
  916. 0x8acf1c2bU, 0xa779b492U, 0xf307f2f0U, 0x4e69e2a1U,
  917. 0x65daf4cdU, 0x0605bed5U, 0xd134621fU, 0xc4a6fe8aU,
  918. 0x342e539dU, 0xa2f355a0U, 0x058ae132U, 0xa4f6eb75U,
  919. 0x0b83ec39U, 0x4060efaaU, 0x5e719f06U, 0xbd6e1051U,
  920. 0x3e218af9U, 0x96dd063dU, 0xdd3e05aeU, 0x4de6bd46U,
  921. 0x91548db5U, 0x71c45d05U, 0x0406d46fU, 0x605015ffU,
  922. 0x1998fb24U, 0xd6bde997U, 0x894043ccU, 0x67d99e77U,
  923. 0xb0e842bdU, 0x07898b88U, 0xe7195b38U, 0x79c8eedbU,
  924. 0xa17c0a47U, 0x7c420fe9U, 0xf8841ec9U, 0x00000000U,
  925. 0x09808683U, 0x322bed48U, 0x1e1170acU, 0x6c5a724eU,
  926. 0xfd0efffbU, 0x0f853856U, 0x3daed51eU, 0x362d3927U,
  927. 0x0a0fd964U, 0x685ca621U, 0x9b5b54d1U, 0x24362e3aU,
  928. 0x0c0a67b1U, 0x9357e70fU, 0xb4ee96d2U, 0x1b9b919eU,
  929. 0x80c0c54fU, 0x61dc20a2U, 0x5a774b69U, 0x1c121a16U,
  930. 0xe293ba0aU, 0xc0a02ae5U, 0x3c22e043U, 0x121b171dU,
  931. 0x0e090d0bU, 0xf28bc7adU, 0x2db6a8b9U, 0x141ea9c8U,
  932. 0x57f11985U, 0xaf75074cU, 0xee99ddbbU, 0xa37f60fdU,
  933. 0xf701269fU, 0x5c72f5bcU, 0x44663bc5U, 0x5bfb7e34U,
  934. 0x8b432976U, 0xcb23c6dcU, 0xb6edfc68U, 0xb8e4f163U,
  935. 0xd731dccaU, 0x42638510U, 0x13972240U, 0x84c61120U,
  936. 0x854a247dU, 0xd2bb3df8U, 0xaef93211U, 0xc729a16dU,
  937. 0x1d9e2f4bU, 0xdcb230f3U, 0x0d8652ecU, 0x77c1e3d0U,
  938. 0x2bb3166cU, 0xa970b999U, 0x119448faU, 0x47e96422U,
  939. 0xa8fc8cc4U, 0xa0f03f1aU, 0x567d2cd8U, 0x223390efU,
  940. 0x87494ec7U, 0xd938d1c1U, 0x8ccaa2feU, 0x98d40b36U,
  941. 0xa6f581cfU, 0xa57ade28U, 0xdab78e26U, 0x3fadbfa4U,
  942. 0x2c3a9de4U, 0x5078920dU, 0x6a5fcc9bU, 0x547e4662U,
  943. 0xf68d13c2U, 0x90d8b8e8U, 0x2e39f75eU, 0x82c3aff5U,
  944. 0x9f5d80beU, 0x69d0937cU, 0x6fd52da9U, 0xcf2512b3U,
  945. 0xc8ac993bU, 0x10187da7U, 0xe89c636eU, 0xdb3bbb7bU,
  946. 0xcd267809U, 0x6e5918f4U, 0xec9ab701U, 0x834f9aa8U,
  947. 0xe6956e65U, 0xaaffe67eU, 0x21bccf08U, 0xef15e8e6U,
  948. 0xbae79bd9U, 0x4a6f36ceU, 0xea9f09d4U, 0x29b07cd6U,
  949. 0x31a4b2afU, 0x2a3f2331U, 0xc6a59430U, 0x35a266c0U,
  950. 0x744ebc37U, 0xfc82caa6U, 0xe090d0b0U, 0x33a7d815U,
  951. 0xf104984aU, 0x41ecdaf7U, 0x7fcd500eU, 0x1791f62fU,
  952. 0x764dd68dU, 0x43efb04dU, 0xccaa4d54U, 0xe49604dfU,
  953. 0x9ed1b5e3U, 0x4c6a881bU, 0xc12c1fb8U, 0x4665517fU,
  954. 0x9d5eea04U, 0x018c355dU, 0xfa877473U, 0xfb0b412eU,
  955. 0xb3671d5aU, 0x92dbd252U, 0xe9105633U, 0x6dd64713U,
  956. 0x9ad7618cU, 0x37a10c7aU, 0x59f8148eU, 0xeb133c89U,
  957. 0xcea927eeU, 0xb761c935U, 0xe11ce5edU, 0x7a47b13cU,
  958. 0x9cd2df59U, 0x55f2733fU, 0x1814ce79U, 0x73c737bfU,
  959. 0x53f7cdeaU, 0x5ffdaa5bU, 0xdf3d6f14U, 0x7844db86U,
  960. 0xcaaff381U, 0xb968c43eU, 0x3824342cU, 0xc2a3405fU,
  961. 0x161dc372U, 0xbce2250cU, 0x283c498bU, 0xff0d9541U,
  962. 0x39a80171U, 0x080cb3deU, 0xd8b4e49cU, 0x6456c190U,
  963. 0x7bcb8461U, 0xd532b670U, 0x486c5c74U, 0xd0b85742U,
  964. };
  965. static const u32 Td1[256] = {
  966. 0x5051f4a7U, 0x537e4165U, 0xc31a17a4U, 0x963a275eU,
  967. 0xcb3bab6bU, 0xf11f9d45U, 0xabacfa58U, 0x934be303U,
  968. 0x552030faU, 0xf6ad766dU, 0x9188cc76U, 0x25f5024cU,
  969. 0xfc4fe5d7U, 0xd7c52acbU, 0x80263544U, 0x8fb562a3U,
  970. 0x49deb15aU, 0x6725ba1bU, 0x9845ea0eU, 0xe15dfec0U,
  971. 0x02c32f75U, 0x12814cf0U, 0xa38d4697U, 0xc66bd3f9U,
  972. 0xe7038f5fU, 0x9515929cU, 0xebbf6d7aU, 0xda955259U,
  973. 0x2dd4be83U, 0xd3587421U, 0x2949e069U, 0x448ec9c8U,
  974. 0x6a75c289U, 0x78f48e79U, 0x6b99583eU, 0xdd27b971U,
  975. 0xb6bee14fU, 0x17f088adU, 0x66c920acU, 0xb47dce3aU,
  976. 0x1863df4aU, 0x82e51a31U, 0x60975133U, 0x4562537fU,
  977. 0xe0b16477U, 0x84bb6baeU, 0x1cfe81a0U, 0x94f9082bU,
  978. 0x58704868U, 0x198f45fdU, 0x8794de6cU, 0xb7527bf8U,
  979. 0x23ab73d3U, 0xe2724b02U, 0x57e31f8fU, 0x2a6655abU,
  980. 0x07b2eb28U, 0x032fb5c2U, 0x9a86c57bU, 0xa5d33708U,
  981. 0xf2302887U, 0xb223bfa5U, 0xba02036aU, 0x5ced1682U,
  982. 0x2b8acf1cU, 0x92a779b4U, 0xf0f307f2U, 0xa14e69e2U,
  983. 0xcd65daf4U, 0xd50605beU, 0x1fd13462U, 0x8ac4a6feU,
  984. 0x9d342e53U, 0xa0a2f355U, 0x32058ae1U, 0x75a4f6ebU,
  985. 0x390b83ecU, 0xaa4060efU, 0x065e719fU, 0x51bd6e10U,
  986. 0xf93e218aU, 0x3d96dd06U, 0xaedd3e05U, 0x464de6bdU,
  987. 0xb591548dU, 0x0571c45dU, 0x6f0406d4U, 0xff605015U,
  988. 0x241998fbU, 0x97d6bde9U, 0xcc894043U, 0x7767d99eU,
  989. 0xbdb0e842U, 0x8807898bU, 0x38e7195bU, 0xdb79c8eeU,
  990. 0x47a17c0aU, 0xe97c420fU, 0xc9f8841eU, 0x00000000U,
  991. 0x83098086U, 0x48322bedU, 0xac1e1170U, 0x4e6c5a72U,
  992. 0xfbfd0effU, 0x560f8538U, 0x1e3daed5U, 0x27362d39U,
  993. 0x640a0fd9U, 0x21685ca6U, 0xd19b5b54U, 0x3a24362eU,
  994. 0xb10c0a67U, 0x0f9357e7U, 0xd2b4ee96U, 0x9e1b9b91U,
  995. 0x4f80c0c5U, 0xa261dc20U, 0x695a774bU, 0x161c121aU,
  996. 0x0ae293baU, 0xe5c0a02aU, 0x433c22e0U, 0x1d121b17U,
  997. 0x0b0e090dU, 0xadf28bc7U, 0xb92db6a8U, 0xc8141ea9U,
  998. 0x8557f119U, 0x4caf7507U, 0xbbee99ddU, 0xfda37f60U,
  999. 0x9ff70126U, 0xbc5c72f5U, 0xc544663bU, 0x345bfb7eU,
  1000. 0x768b4329U, 0xdccb23c6U, 0x68b6edfcU, 0x63b8e4f1U,
  1001. 0xcad731dcU, 0x10426385U, 0x40139722U, 0x2084c611U,
  1002. 0x7d854a24U, 0xf8d2bb3dU, 0x11aef932U, 0x6dc729a1U,
  1003. 0x4b1d9e2fU, 0xf3dcb230U, 0xec0d8652U, 0xd077c1e3U,
  1004. 0x6c2bb316U, 0x99a970b9U, 0xfa119448U, 0x2247e964U,
  1005. 0xc4a8fc8cU, 0x1aa0f03fU, 0xd8567d2cU, 0xef223390U,
  1006. 0xc787494eU, 0xc1d938d1U, 0xfe8ccaa2U, 0x3698d40bU,
  1007. 0xcfa6f581U, 0x28a57adeU, 0x26dab78eU, 0xa43fadbfU,
  1008. 0xe42c3a9dU, 0x0d507892U, 0x9b6a5fccU, 0x62547e46U,
  1009. 0xc2f68d13U, 0xe890d8b8U, 0x5e2e39f7U, 0xf582c3afU,
  1010. 0xbe9f5d80U, 0x7c69d093U, 0xa96fd52dU, 0xb3cf2512U,
  1011. 0x3bc8ac99U, 0xa710187dU, 0x6ee89c63U, 0x7bdb3bbbU,
  1012. 0x09cd2678U, 0xf46e5918U, 0x01ec9ab7U, 0xa8834f9aU,
  1013. 0x65e6956eU, 0x7eaaffe6U, 0x0821bccfU, 0xe6ef15e8U,
  1014. 0xd9bae79bU, 0xce4a6f36U, 0xd4ea9f09U, 0xd629b07cU,
  1015. 0xaf31a4b2U, 0x312a3f23U, 0x30c6a594U, 0xc035a266U,
  1016. 0x37744ebcU, 0xa6fc82caU, 0xb0e090d0U, 0x1533a7d8U,
  1017. 0x4af10498U, 0xf741ecdaU, 0x0e7fcd50U, 0x2f1791f6U,
  1018. 0x8d764dd6U, 0x4d43efb0U, 0x54ccaa4dU, 0xdfe49604U,
  1019. 0xe39ed1b5U, 0x1b4c6a88U, 0xb8c12c1fU, 0x7f466551U,
  1020. 0x049d5eeaU, 0x5d018c35U, 0x73fa8774U, 0x2efb0b41U,
  1021. 0x5ab3671dU, 0x5292dbd2U, 0x33e91056U, 0x136dd647U,
  1022. 0x8c9ad761U, 0x7a37a10cU, 0x8e59f814U, 0x89eb133cU,
  1023. 0xeecea927U, 0x35b761c9U, 0xede11ce5U, 0x3c7a47b1U,
  1024. 0x599cd2dfU, 0x3f55f273U, 0x791814ceU, 0xbf73c737U,
  1025. 0xea53f7cdU, 0x5b5ffdaaU, 0x14df3d6fU, 0x867844dbU,
  1026. 0x81caaff3U, 0x3eb968c4U, 0x2c382434U, 0x5fc2a340U,
  1027. 0x72161dc3U, 0x0cbce225U, 0x8b283c49U, 0x41ff0d95U,
  1028. 0x7139a801U, 0xde080cb3U, 0x9cd8b4e4U, 0x906456c1U,
  1029. 0x617bcb84U, 0x70d532b6U, 0x74486c5cU, 0x42d0b857U,
  1030. };
  1031. static const u32 Td2[256] = {
  1032. 0xa75051f4U, 0x65537e41U, 0xa4c31a17U, 0x5e963a27U,
  1033. 0x6bcb3babU, 0x45f11f9dU, 0x58abacfaU, 0x03934be3U,
  1034. 0xfa552030U, 0x6df6ad76U, 0x769188ccU, 0x4c25f502U,
  1035. 0xd7fc4fe5U, 0xcbd7c52aU, 0x44802635U, 0xa38fb562U,
  1036. 0x5a49deb1U, 0x1b6725baU, 0x0e9845eaU, 0xc0e15dfeU,
  1037. 0x7502c32fU, 0xf012814cU, 0x97a38d46U, 0xf9c66bd3U,
  1038. 0x5fe7038fU, 0x9c951592U, 0x7aebbf6dU, 0x59da9552U,
  1039. 0x832dd4beU, 0x21d35874U, 0x692949e0U, 0xc8448ec9U,
  1040. 0x896a75c2U, 0x7978f48eU, 0x3e6b9958U, 0x71dd27b9U,
  1041. 0x4fb6bee1U, 0xad17f088U, 0xac66c920U, 0x3ab47dceU,
  1042. 0x4a1863dfU, 0x3182e51aU, 0x33609751U, 0x7f456253U,
  1043. 0x77e0b164U, 0xae84bb6bU, 0xa01cfe81U, 0x2b94f908U,
  1044. 0x68587048U, 0xfd198f45U, 0x6c8794deU, 0xf8b7527bU,
  1045. 0xd323ab73U, 0x02e2724bU, 0x8f57e31fU, 0xab2a6655U,
  1046. 0x2807b2ebU, 0xc2032fb5U, 0x7b9a86c5U, 0x08a5d337U,
  1047. 0x87f23028U, 0xa5b223bfU, 0x6aba0203U, 0x825ced16U,
  1048. 0x1c2b8acfU, 0xb492a779U, 0xf2f0f307U, 0xe2a14e69U,
  1049. 0xf4cd65daU, 0xbed50605U, 0x621fd134U, 0xfe8ac4a6U,
  1050. 0x539d342eU, 0x55a0a2f3U, 0xe132058aU, 0xeb75a4f6U,
  1051. 0xec390b83U, 0xefaa4060U, 0x9f065e71U, 0x1051bd6eU,
  1052. 0x8af93e21U, 0x063d96ddU, 0x05aedd3eU, 0xbd464de6U,
  1053. 0x8db59154U, 0x5d0571c4U, 0xd46f0406U, 0x15ff6050U,
  1054. 0xfb241998U, 0xe997d6bdU, 0x43cc8940U, 0x9e7767d9U,
  1055. 0x42bdb0e8U, 0x8b880789U, 0x5b38e719U, 0xeedb79c8U,
  1056. 0x0a47a17cU, 0x0fe97c42U, 0x1ec9f884U, 0x00000000U,
  1057. 0x86830980U, 0xed48322bU, 0x70ac1e11U, 0x724e6c5aU,
  1058. 0xfffbfd0eU, 0x38560f85U, 0xd51e3daeU, 0x3927362dU,
  1059. 0xd9640a0fU, 0xa621685cU, 0x54d19b5bU, 0x2e3a2436U,
  1060. 0x67b10c0aU, 0xe70f9357U, 0x96d2b4eeU, 0x919e1b9bU,
  1061. 0xc54f80c0U, 0x20a261dcU, 0x4b695a77U, 0x1a161c12U,
  1062. 0xba0ae293U, 0x2ae5c0a0U, 0xe0433c22U, 0x171d121bU,
  1063. 0x0d0b0e09U, 0xc7adf28bU, 0xa8b92db6U, 0xa9c8141eU,
  1064. 0x198557f1U, 0x074caf75U, 0xddbbee99U, 0x60fda37fU,
  1065. 0x269ff701U, 0xf5bc5c72U, 0x3bc54466U, 0x7e345bfbU,
  1066. 0x29768b43U, 0xc6dccb23U, 0xfc68b6edU, 0xf163b8e4U,
  1067. 0xdccad731U, 0x85104263U, 0x22401397U, 0x112084c6U,
  1068. 0x247d854aU, 0x3df8d2bbU, 0x3211aef9U, 0xa16dc729U,
  1069. 0x2f4b1d9eU, 0x30f3dcb2U, 0x52ec0d86U, 0xe3d077c1U,
  1070. 0x166c2bb3U, 0xb999a970U, 0x48fa1194U, 0x642247e9U,
  1071. 0x8cc4a8fcU, 0x3f1aa0f0U, 0x2cd8567dU, 0x90ef2233U,
  1072. 0x4ec78749U, 0xd1c1d938U, 0xa2fe8ccaU, 0x0b3698d4U,
  1073. 0x81cfa6f5U, 0xde28a57aU, 0x8e26dab7U, 0xbfa43fadU,
  1074. 0x9de42c3aU, 0x920d5078U, 0xcc9b6a5fU, 0x4662547eU,
  1075. 0x13c2f68dU, 0xb8e890d8U, 0xf75e2e39U, 0xaff582c3U,
  1076. 0x80be9f5dU, 0x937c69d0U, 0x2da96fd5U, 0x12b3cf25U,
  1077. 0x993bc8acU, 0x7da71018U, 0x636ee89cU, 0xbb7bdb3bU,
  1078. 0x7809cd26U, 0x18f46e59U, 0xb701ec9aU, 0x9aa8834fU,
  1079. 0x6e65e695U, 0xe67eaaffU, 0xcf0821bcU, 0xe8e6ef15U,
  1080. 0x9bd9bae7U, 0x36ce4a6fU, 0x09d4ea9fU, 0x7cd629b0U,
  1081. 0xb2af31a4U, 0x23312a3fU, 0x9430c6a5U, 0x66c035a2U,
  1082. 0xbc37744eU, 0xcaa6fc82U, 0xd0b0e090U, 0xd81533a7U,
  1083. 0x984af104U, 0xdaf741ecU, 0x500e7fcdU, 0xf62f1791U,
  1084. 0xd68d764dU, 0xb04d43efU, 0x4d54ccaaU, 0x04dfe496U,
  1085. 0xb5e39ed1U, 0x881b4c6aU, 0x1fb8c12cU, 0x517f4665U,
  1086. 0xea049d5eU, 0x355d018cU, 0x7473fa87U, 0x412efb0bU,
  1087. 0x1d5ab367U, 0xd25292dbU, 0x5633e910U, 0x47136dd6U,
  1088. 0x618c9ad7U, 0x0c7a37a1U, 0x148e59f8U, 0x3c89eb13U,
  1089. 0x27eecea9U, 0xc935b761U, 0xe5ede11cU, 0xb13c7a47U,
  1090. 0xdf599cd2U, 0x733f55f2U, 0xce791814U, 0x37bf73c7U,
  1091. 0xcdea53f7U, 0xaa5b5ffdU, 0x6f14df3dU, 0xdb867844U,
  1092. 0xf381caafU, 0xc43eb968U, 0x342c3824U, 0x405fc2a3U,
  1093. 0xc372161dU, 0x250cbce2U, 0x498b283cU, 0x9541ff0dU,
  1094. 0x017139a8U, 0xb3de080cU, 0xe49cd8b4U, 0xc1906456U,
  1095. 0x84617bcbU, 0xb670d532U, 0x5c74486cU, 0x5742d0b8U,
  1096. };
  1097. static const u32 Td3[256] = {
  1098. 0xf4a75051U, 0x4165537eU, 0x17a4c31aU, 0x275e963aU,
  1099. 0xab6bcb3bU, 0x9d45f11fU, 0xfa58abacU, 0xe303934bU,
  1100. 0x30fa5520U, 0x766df6adU, 0xcc769188U, 0x024c25f5U,
  1101. 0xe5d7fc4fU, 0x2acbd7c5U, 0x35448026U, 0x62a38fb5U,
  1102. 0xb15a49deU, 0xba1b6725U, 0xea0e9845U, 0xfec0e15dU,
  1103. 0x2f7502c3U, 0x4cf01281U, 0x4697a38dU, 0xd3f9c66bU,
  1104. 0x8f5fe703U, 0x929c9515U, 0x6d7aebbfU, 0x5259da95U,
  1105. 0xbe832dd4U, 0x7421d358U, 0xe0692949U, 0xc9c8448eU,
  1106. 0xc2896a75U, 0x8e7978f4U, 0x583e6b99U, 0xb971dd27U,
  1107. 0xe14fb6beU, 0x88ad17f0U, 0x20ac66c9U, 0xce3ab47dU,
  1108. 0xdf4a1863U, 0x1a3182e5U, 0x51336097U, 0x537f4562U,
  1109. 0x6477e0b1U, 0x6bae84bbU, 0x81a01cfeU, 0x082b94f9U,
  1110. 0x48685870U, 0x45fd198fU, 0xde6c8794U, 0x7bf8b752U,
  1111. 0x73d323abU, 0x4b02e272U, 0x1f8f57e3U, 0x55ab2a66U,
  1112. 0xeb2807b2U, 0xb5c2032fU, 0xc57b9a86U, 0x3708a5d3U,
  1113. 0x2887f230U, 0xbfa5b223U, 0x036aba02U, 0x16825cedU,
  1114. 0xcf1c2b8aU, 0x79b492a7U, 0x07f2f0f3U, 0x69e2a14eU,
  1115. 0xdaf4cd65U, 0x05bed506U, 0x34621fd1U, 0xa6fe8ac4U,
  1116. 0x2e539d34U, 0xf355a0a2U, 0x8ae13205U, 0xf6eb75a4U,
  1117. 0x83ec390bU, 0x60efaa40U, 0x719f065eU, 0x6e1051bdU,
  1118. 0x218af93eU, 0xdd063d96U, 0x3e05aeddU, 0xe6bd464dU,
  1119. 0x548db591U, 0xc45d0571U, 0x06d46f04U, 0x5015ff60U,
  1120. 0x98fb2419U, 0xbde997d6U, 0x4043cc89U, 0xd99e7767U,
  1121. 0xe842bdb0U, 0x898b8807U, 0x195b38e7U, 0xc8eedb79U,
  1122. 0x7c0a47a1U, 0x420fe97cU, 0x841ec9f8U, 0x00000000U,
  1123. 0x80868309U, 0x2bed4832U, 0x1170ac1eU, 0x5a724e6cU,
  1124. 0x0efffbfdU, 0x8538560fU, 0xaed51e3dU, 0x2d392736U,
  1125. 0x0fd9640aU, 0x5ca62168U, 0x5b54d19bU, 0x362e3a24U,
  1126. 0x0a67b10cU, 0x57e70f93U, 0xee96d2b4U, 0x9b919e1bU,
  1127. 0xc0c54f80U, 0xdc20a261U, 0x774b695aU, 0x121a161cU,
  1128. 0x93ba0ae2U, 0xa02ae5c0U, 0x22e0433cU, 0x1b171d12U,
  1129. 0x090d0b0eU, 0x8bc7adf2U, 0xb6a8b92dU, 0x1ea9c814U,
  1130. 0xf1198557U, 0x75074cafU, 0x99ddbbeeU, 0x7f60fda3U,
  1131. 0x01269ff7U, 0x72f5bc5cU, 0x663bc544U, 0xfb7e345bU,
  1132. 0x4329768bU, 0x23c6dccbU, 0xedfc68b6U, 0xe4f163b8U,
  1133. 0x31dccad7U, 0x63851042U, 0x97224013U, 0xc6112084U,
  1134. 0x4a247d85U, 0xbb3df8d2U, 0xf93211aeU, 0x29a16dc7U,
  1135. 0x9e2f4b1dU, 0xb230f3dcU, 0x8652ec0dU, 0xc1e3d077U,
  1136. 0xb3166c2bU, 0x70b999a9U, 0x9448fa11U, 0xe9642247U,
  1137. 0xfc8cc4a8U, 0xf03f1aa0U, 0x7d2cd856U, 0x3390ef22U,
  1138. 0x494ec787U, 0x38d1c1d9U, 0xcaa2fe8cU, 0xd40b3698U,
  1139. 0xf581cfa6U, 0x7ade28a5U, 0xb78e26daU, 0xadbfa43fU,
  1140. 0x3a9de42cU, 0x78920d50U, 0x5fcc9b6aU, 0x7e466254U,
  1141. 0x8d13c2f6U, 0xd8b8e890U, 0x39f75e2eU, 0xc3aff582U,
  1142. 0x5d80be9fU, 0xd0937c69U, 0xd52da96fU, 0x2512b3cfU,
  1143. 0xac993bc8U, 0x187da710U, 0x9c636ee8U, 0x3bbb7bdbU,
  1144. 0x267809cdU, 0x5918f46eU, 0x9ab701ecU, 0x4f9aa883U,
  1145. 0x956e65e6U, 0xffe67eaaU, 0xbccf0821U, 0x15e8e6efU,
  1146. 0xe79bd9baU, 0x6f36ce4aU, 0x9f09d4eaU, 0xb07cd629U,
  1147. 0xa4b2af31U, 0x3f23312aU, 0xa59430c6U, 0xa266c035U,
  1148. 0x4ebc3774U, 0x82caa6fcU, 0x90d0b0e0U, 0xa7d81533U,
  1149. 0x04984af1U, 0xecdaf741U, 0xcd500e7fU, 0x91f62f17U,
  1150. 0x4dd68d76U, 0xefb04d43U, 0xaa4d54ccU, 0x9604dfe4U,
  1151. 0xd1b5e39eU, 0x6a881b4cU, 0x2c1fb8c1U, 0x65517f46U,
  1152. 0x5eea049dU, 0x8c355d01U, 0x877473faU, 0x0b412efbU,
  1153. 0x671d5ab3U, 0xdbd25292U, 0x105633e9U, 0xd647136dU,
  1154. 0xd7618c9aU, 0xa10c7a37U, 0xf8148e59U, 0x133c89ebU,
  1155. 0xa927eeceU, 0x61c935b7U, 0x1ce5ede1U, 0x47b13c7aU,
  1156. 0xd2df599cU, 0xf2733f55U, 0x14ce7918U, 0xc737bf73U,
  1157. 0xf7cdea53U, 0xfdaa5b5fU, 0x3d6f14dfU, 0x44db8678U,
  1158. 0xaff381caU, 0x68c43eb9U, 0x24342c38U, 0xa3405fc2U,
  1159. 0x1dc37216U, 0xe2250cbcU, 0x3c498b28U, 0x0d9541ffU,
  1160. 0xa8017139U, 0x0cb3de08U, 0xb4e49cd8U, 0x56c19064U,
  1161. 0xcb84617bU, 0x32b670d5U, 0x6c5c7448U, 0xb85742d0U,
  1162. };
  1163. static const u8 Td4[256] = {
  1164. 0x52U, 0x09U, 0x6aU, 0xd5U, 0x30U, 0x36U, 0xa5U, 0x38U,
  1165. 0xbfU, 0x40U, 0xa3U, 0x9eU, 0x81U, 0xf3U, 0xd7U, 0xfbU,
  1166. 0x7cU, 0xe3U, 0x39U, 0x82U, 0x9bU, 0x2fU, 0xffU, 0x87U,
  1167. 0x34U, 0x8eU, 0x43U, 0x44U, 0xc4U, 0xdeU, 0xe9U, 0xcbU,
  1168. 0x54U, 0x7bU, 0x94U, 0x32U, 0xa6U, 0xc2U, 0x23U, 0x3dU,
  1169. 0xeeU, 0x4cU, 0x95U, 0x0bU, 0x42U, 0xfaU, 0xc3U, 0x4eU,
  1170. 0x08U, 0x2eU, 0xa1U, 0x66U, 0x28U, 0xd9U, 0x24U, 0xb2U,
  1171. 0x76U, 0x5bU, 0xa2U, 0x49U, 0x6dU, 0x8bU, 0xd1U, 0x25U,
  1172. 0x72U, 0xf8U, 0xf6U, 0x64U, 0x86U, 0x68U, 0x98U, 0x16U,
  1173. 0xd4U, 0xa4U, 0x5cU, 0xccU, 0x5dU, 0x65U, 0xb6U, 0x92U,
  1174. 0x6cU, 0x70U, 0x48U, 0x50U, 0xfdU, 0xedU, 0xb9U, 0xdaU,
  1175. 0x5eU, 0x15U, 0x46U, 0x57U, 0xa7U, 0x8dU, 0x9dU, 0x84U,
  1176. 0x90U, 0xd8U, 0xabU, 0x00U, 0x8cU, 0xbcU, 0xd3U, 0x0aU,
  1177. 0xf7U, 0xe4U, 0x58U, 0x05U, 0xb8U, 0xb3U, 0x45U, 0x06U,
  1178. 0xd0U, 0x2cU, 0x1eU, 0x8fU, 0xcaU, 0x3fU, 0x0fU, 0x02U,
  1179. 0xc1U, 0xafU, 0xbdU, 0x03U, 0x01U, 0x13U, 0x8aU, 0x6bU,
  1180. 0x3aU, 0x91U, 0x11U, 0x41U, 0x4fU, 0x67U, 0xdcU, 0xeaU,
  1181. 0x97U, 0xf2U, 0xcfU, 0xceU, 0xf0U, 0xb4U, 0xe6U, 0x73U,
  1182. 0x96U, 0xacU, 0x74U, 0x22U, 0xe7U, 0xadU, 0x35U, 0x85U,
  1183. 0xe2U, 0xf9U, 0x37U, 0xe8U, 0x1cU, 0x75U, 0xdfU, 0x6eU,
  1184. 0x47U, 0xf1U, 0x1aU, 0x71U, 0x1dU, 0x29U, 0xc5U, 0x89U,
  1185. 0x6fU, 0xb7U, 0x62U, 0x0eU, 0xaaU, 0x18U, 0xbeU, 0x1bU,
  1186. 0xfcU, 0x56U, 0x3eU, 0x4bU, 0xc6U, 0xd2U, 0x79U, 0x20U,
  1187. 0x9aU, 0xdbU, 0xc0U, 0xfeU, 0x78U, 0xcdU, 0x5aU, 0xf4U,
  1188. 0x1fU, 0xddU, 0xa8U, 0x33U, 0x88U, 0x07U, 0xc7U, 0x31U,
  1189. 0xb1U, 0x12U, 0x10U, 0x59U, 0x27U, 0x80U, 0xecU, 0x5fU,
  1190. 0x60U, 0x51U, 0x7fU, 0xa9U, 0x19U, 0xb5U, 0x4aU, 0x0dU,
  1191. 0x2dU, 0xe5U, 0x7aU, 0x9fU, 0x93U, 0xc9U, 0x9cU, 0xefU,
  1192. 0xa0U, 0xe0U, 0x3bU, 0x4dU, 0xaeU, 0x2aU, 0xf5U, 0xb0U,
  1193. 0xc8U, 0xebU, 0xbbU, 0x3cU, 0x83U, 0x53U, 0x99U, 0x61U,
  1194. 0x17U, 0x2bU, 0x04U, 0x7eU, 0xbaU, 0x77U, 0xd6U, 0x26U,
  1195. 0xe1U, 0x69U, 0x14U, 0x63U, 0x55U, 0x21U, 0x0cU, 0x7dU,
  1196. };
  1197. static const u32 rcon[] = {
  1198. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  1199. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  1200. 0x1B000000, 0x36000000, /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  1201. };
  1202. /**
  1203. * Expand the cipher key into the encryption key schedule.
  1204. */
  1205. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1206. AES_KEY *key)
  1207. {
  1208. u32 *rk;
  1209. int i = 0;
  1210. u32 temp;
  1211. if (!userKey || !key)
  1212. return -1;
  1213. if (bits != 128 && bits != 192 && bits != 256)
  1214. return -2;
  1215. rk = key->rd_key;
  1216. if (bits == 128)
  1217. key->rounds = 10;
  1218. else if (bits == 192)
  1219. key->rounds = 12;
  1220. else
  1221. key->rounds = 14;
  1222. rk[0] = GETU32(userKey );
  1223. rk[1] = GETU32(userKey + 4);
  1224. rk[2] = GETU32(userKey + 8);
  1225. rk[3] = GETU32(userKey + 12);
  1226. if (bits == 128) {
  1227. while (1) {
  1228. temp = rk[3];
  1229. rk[4] = rk[0] ^
  1230. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1231. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1232. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1233. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1234. rcon[i];
  1235. rk[5] = rk[1] ^ rk[4];
  1236. rk[6] = rk[2] ^ rk[5];
  1237. rk[7] = rk[3] ^ rk[6];
  1238. if (++i == 10) {
  1239. return 0;
  1240. }
  1241. rk += 4;
  1242. }
  1243. }
  1244. rk[4] = GETU32(userKey + 16);
  1245. rk[5] = GETU32(userKey + 20);
  1246. if (bits == 192) {
  1247. while (1) {
  1248. temp = rk[ 5];
  1249. rk[ 6] = rk[ 0] ^
  1250. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1251. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1252. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1253. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1254. rcon[i];
  1255. rk[ 7] = rk[ 1] ^ rk[ 6];
  1256. rk[ 8] = rk[ 2] ^ rk[ 7];
  1257. rk[ 9] = rk[ 3] ^ rk[ 8];
  1258. if (++i == 8) {
  1259. return 0;
  1260. }
  1261. rk[10] = rk[ 4] ^ rk[ 9];
  1262. rk[11] = rk[ 5] ^ rk[10];
  1263. rk += 6;
  1264. }
  1265. }
  1266. rk[6] = GETU32(userKey + 24);
  1267. rk[7] = GETU32(userKey + 28);
  1268. if (bits == 256) {
  1269. while (1) {
  1270. temp = rk[ 7];
  1271. rk[ 8] = rk[ 0] ^
  1272. (Te2[(temp >> 16) & 0xff] & 0xff000000) ^
  1273. (Te3[(temp >> 8) & 0xff] & 0x00ff0000) ^
  1274. (Te0[(temp ) & 0xff] & 0x0000ff00) ^
  1275. (Te1[(temp >> 24) ] & 0x000000ff) ^
  1276. rcon[i];
  1277. rk[ 9] = rk[ 1] ^ rk[ 8];
  1278. rk[10] = rk[ 2] ^ rk[ 9];
  1279. rk[11] = rk[ 3] ^ rk[10];
  1280. if (++i == 7) {
  1281. return 0;
  1282. }
  1283. temp = rk[11];
  1284. rk[12] = rk[ 4] ^
  1285. (Te2[(temp >> 24) ] & 0xff000000) ^
  1286. (Te3[(temp >> 16) & 0xff] & 0x00ff0000) ^
  1287. (Te0[(temp >> 8) & 0xff] & 0x0000ff00) ^
  1288. (Te1[(temp ) & 0xff] & 0x000000ff);
  1289. rk[13] = rk[ 5] ^ rk[12];
  1290. rk[14] = rk[ 6] ^ rk[13];
  1291. rk[15] = rk[ 7] ^ rk[14];
  1292. rk += 8;
  1293. }
  1294. }
  1295. return 0;
  1296. }
  1297. /**
  1298. * Expand the cipher key into the decryption key schedule.
  1299. */
  1300. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1301. AES_KEY *key)
  1302. {
  1303. u32 *rk;
  1304. int i, j, status;
  1305. u32 temp;
  1306. /* first, start with an encryption schedule */
  1307. status = AES_set_encrypt_key(userKey, bits, key);
  1308. if (status < 0)
  1309. return status;
  1310. rk = key->rd_key;
  1311. /* invert the order of the round keys: */
  1312. for (i = 0, j = 4*(key->rounds); i < j; i += 4, j -= 4) {
  1313. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  1314. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  1315. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  1316. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  1317. }
  1318. /* apply the inverse MixColumn transform to all round keys but the first and the last: */
  1319. for (i = 1; i < (key->rounds); i++) {
  1320. rk += 4;
  1321. rk[0] =
  1322. Td0[Te1[(rk[0] >> 24) ] & 0xff] ^
  1323. Td1[Te1[(rk[0] >> 16) & 0xff] & 0xff] ^
  1324. Td2[Te1[(rk[0] >> 8) & 0xff] & 0xff] ^
  1325. Td3[Te1[(rk[0] ) & 0xff] & 0xff];
  1326. rk[1] =
  1327. Td0[Te1[(rk[1] >> 24) ] & 0xff] ^
  1328. Td1[Te1[(rk[1] >> 16) & 0xff] & 0xff] ^
  1329. Td2[Te1[(rk[1] >> 8) & 0xff] & 0xff] ^
  1330. Td3[Te1[(rk[1] ) & 0xff] & 0xff];
  1331. rk[2] =
  1332. Td0[Te1[(rk[2] >> 24) ] & 0xff] ^
  1333. Td1[Te1[(rk[2] >> 16) & 0xff] & 0xff] ^
  1334. Td2[Te1[(rk[2] >> 8) & 0xff] & 0xff] ^
  1335. Td3[Te1[(rk[2] ) & 0xff] & 0xff];
  1336. rk[3] =
  1337. Td0[Te1[(rk[3] >> 24) ] & 0xff] ^
  1338. Td1[Te1[(rk[3] >> 16) & 0xff] & 0xff] ^
  1339. Td2[Te1[(rk[3] >> 8) & 0xff] & 0xff] ^
  1340. Td3[Te1[(rk[3] ) & 0xff] & 0xff];
  1341. }
  1342. return 0;
  1343. }
  1344. /*
  1345. * Encrypt a single block
  1346. * in and out can overlap
  1347. */
  1348. void AES_encrypt(const unsigned char *in, unsigned char *out,
  1349. const AES_KEY *key) {
  1350. const u32 *rk;
  1351. u32 s0, s1, s2, s3, t0, t1, t2, t3;
  1352. #ifndef FULL_UNROLL
  1353. int r;
  1354. #endif /* ?FULL_UNROLL */
  1355. assert(in && out && key);
  1356. rk = key->rd_key;
  1357. /*
  1358. * map byte array block to cipher state
  1359. * and add initial round key:
  1360. */
  1361. s0 = GETU32(in ) ^ rk[0];
  1362. s1 = GETU32(in + 4) ^ rk[1];
  1363. s2 = GETU32(in + 8) ^ rk[2];
  1364. s3 = GETU32(in + 12) ^ rk[3];
  1365. #ifdef FULL_UNROLL
  1366. /* round 1: */
  1367. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[ 4];
  1368. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[ 5];
  1369. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[ 6];
  1370. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[ 7];
  1371. /* round 2: */
  1372. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[ 8];
  1373. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[ 9];
  1374. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[10];
  1375. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[11];
  1376. /* round 3: */
  1377. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[12];
  1378. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[13];
  1379. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[14];
  1380. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[15];
  1381. /* round 4: */
  1382. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[16];
  1383. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[17];
  1384. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[18];
  1385. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[19];
  1386. /* round 5: */
  1387. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[20];
  1388. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[21];
  1389. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[22];
  1390. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[23];
  1391. /* round 6: */
  1392. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[24];
  1393. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[25];
  1394. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[26];
  1395. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[27];
  1396. /* round 7: */
  1397. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[28];
  1398. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[29];
  1399. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[30];
  1400. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[31];
  1401. /* round 8: */
  1402. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[32];
  1403. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[33];
  1404. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[34];
  1405. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[35];
  1406. /* round 9: */
  1407. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[36];
  1408. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[37];
  1409. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[38];
  1410. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[39];
  1411. if (key->rounds > 10) {
  1412. /* round 10: */
  1413. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[40];
  1414. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[41];
  1415. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[42];
  1416. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[43];
  1417. /* round 11: */
  1418. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[44];
  1419. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[45];
  1420. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[46];
  1421. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[47];
  1422. if (key->rounds > 12) {
  1423. /* round 12: */
  1424. s0 = Te0[t0 >> 24] ^ Te1[(t1 >> 16) & 0xff] ^ Te2[(t2 >> 8) & 0xff] ^ Te3[t3 & 0xff] ^ rk[48];
  1425. s1 = Te0[t1 >> 24] ^ Te1[(t2 >> 16) & 0xff] ^ Te2[(t3 >> 8) & 0xff] ^ Te3[t0 & 0xff] ^ rk[49];
  1426. s2 = Te0[t2 >> 24] ^ Te1[(t3 >> 16) & 0xff] ^ Te2[(t0 >> 8) & 0xff] ^ Te3[t1 & 0xff] ^ rk[50];
  1427. s3 = Te0[t3 >> 24] ^ Te1[(t0 >> 16) & 0xff] ^ Te2[(t1 >> 8) & 0xff] ^ Te3[t2 & 0xff] ^ rk[51];
  1428. /* round 13: */
  1429. t0 = Te0[s0 >> 24] ^ Te1[(s1 >> 16) & 0xff] ^ Te2[(s2 >> 8) & 0xff] ^ Te3[s3 & 0xff] ^ rk[52];
  1430. t1 = Te0[s1 >> 24] ^ Te1[(s2 >> 16) & 0xff] ^ Te2[(s3 >> 8) & 0xff] ^ Te3[s0 & 0xff] ^ rk[53];
  1431. t2 = Te0[s2 >> 24] ^ Te1[(s3 >> 16) & 0xff] ^ Te2[(s0 >> 8) & 0xff] ^ Te3[s1 & 0xff] ^ rk[54];
  1432. t3 = Te0[s3 >> 24] ^ Te1[(s0 >> 16) & 0xff] ^ Te2[(s1 >> 8) & 0xff] ^ Te3[s2 & 0xff] ^ rk[55];
  1433. }
  1434. }
  1435. rk += key->rounds << 2;
  1436. #else /* !FULL_UNROLL */
  1437. /*
  1438. * Nr - 1 full rounds:
  1439. */
  1440. r = key->rounds >> 1;
  1441. for (;;) {
  1442. t0 =
  1443. Te0[(s0 >> 24) ] ^
  1444. Te1[(s1 >> 16) & 0xff] ^
  1445. Te2[(s2 >> 8) & 0xff] ^
  1446. Te3[(s3 ) & 0xff] ^
  1447. rk[4];
  1448. t1 =
  1449. Te0[(s1 >> 24) ] ^
  1450. Te1[(s2 >> 16) & 0xff] ^
  1451. Te2[(s3 >> 8) & 0xff] ^
  1452. Te3[(s0 ) & 0xff] ^
  1453. rk[5];
  1454. t2 =
  1455. Te0[(s2 >> 24) ] ^
  1456. Te1[(s3 >> 16) & 0xff] ^
  1457. Te2[(s0 >> 8) & 0xff] ^
  1458. Te3[(s1 ) & 0xff] ^
  1459. rk[6];
  1460. t3 =
  1461. Te0[(s3 >> 24) ] ^
  1462. Te1[(s0 >> 16) & 0xff] ^
  1463. Te2[(s1 >> 8) & 0xff] ^
  1464. Te3[(s2 ) & 0xff] ^
  1465. rk[7];
  1466. rk += 8;
  1467. if (--r == 0) {
  1468. break;
  1469. }
  1470. s0 =
  1471. Te0[(t0 >> 24) ] ^
  1472. Te1[(t1 >> 16) & 0xff] ^
  1473. Te2[(t2 >> 8) & 0xff] ^
  1474. Te3[(t3 ) & 0xff] ^
  1475. rk[0];
  1476. s1 =
  1477. Te0[(t1 >> 24) ] ^
  1478. Te1[(t2 >> 16) & 0xff] ^
  1479. Te2[(t3 >> 8) & 0xff] ^
  1480. Te3[(t0 ) & 0xff] ^
  1481. rk[1];
  1482. s2 =
  1483. Te0[(t2 >> 24) ] ^
  1484. Te1[(t3 >> 16) & 0xff] ^
  1485. Te2[(t0 >> 8) & 0xff] ^
  1486. Te3[(t1 ) & 0xff] ^
  1487. rk[2];
  1488. s3 =
  1489. Te0[(t3 >> 24) ] ^
  1490. Te1[(t0 >> 16) & 0xff] ^
  1491. Te2[(t1 >> 8) & 0xff] ^
  1492. Te3[(t2 ) & 0xff] ^
  1493. rk[3];
  1494. }
  1495. #endif /* ?FULL_UNROLL */
  1496. /*
  1497. * apply last round and
  1498. * map cipher state to byte array block:
  1499. */
  1500. s0 =
  1501. (Te2[(t0 >> 24) ] & 0xff000000) ^
  1502. (Te3[(t1 >> 16) & 0xff] & 0x00ff0000) ^
  1503. (Te0[(t2 >> 8) & 0xff] & 0x0000ff00) ^
  1504. (Te1[(t3 ) & 0xff] & 0x000000ff) ^
  1505. rk[0];
  1506. PUTU32(out , s0);
  1507. s1 =
  1508. (Te2[(t1 >> 24) ] & 0xff000000) ^
  1509. (Te3[(t2 >> 16) & 0xff] & 0x00ff0000) ^
  1510. (Te0[(t3 >> 8) & 0xff] & 0x0000ff00) ^
  1511. (Te1[(t0 ) & 0xff] & 0x000000ff) ^
  1512. rk[1];
  1513. PUTU32(out + 4, s1);
  1514. s2 =
  1515. (Te2[(t2 >> 24) ] & 0xff000000) ^
  1516. (Te3[(t3 >> 16) & 0xff] & 0x00ff0000) ^
  1517. (Te0[(t0 >> 8) & 0xff] & 0x0000ff00) ^
  1518. (Te1[(t1 ) & 0xff] & 0x000000ff) ^
  1519. rk[2];
  1520. PUTU32(out + 8, s2);
  1521. s3 =
  1522. (Te2[(t3 >> 24) ] & 0xff000000) ^
  1523. (Te3[(t0 >> 16) & 0xff] & 0x00ff0000) ^
  1524. (Te0[(t1 >> 8) & 0xff] & 0x0000ff00) ^
  1525. (Te1[(t2 ) & 0xff] & 0x000000ff) ^
  1526. rk[3];
  1527. PUTU32(out + 12, s3);
  1528. }
  1529. /*
  1530. * Decrypt a single block
  1531. * in and out can overlap
  1532. */
  1533. void AES_decrypt(const unsigned char *in, unsigned char *out,
  1534. const AES_KEY *key)
  1535. {
  1536. const u32 *rk;
  1537. u32 s0, s1, s2, s3, t0, t1, t2, t3;
  1538. #ifndef FULL_UNROLL
  1539. int r;
  1540. #endif /* ?FULL_UNROLL */
  1541. assert(in && out && key);
  1542. rk = key->rd_key;
  1543. /*
  1544. * map byte array block to cipher state
  1545. * and add initial round key:
  1546. */
  1547. s0 = GETU32(in ) ^ rk[0];
  1548. s1 = GETU32(in + 4) ^ rk[1];
  1549. s2 = GETU32(in + 8) ^ rk[2];
  1550. s3 = GETU32(in + 12) ^ rk[3];
  1551. #ifdef FULL_UNROLL
  1552. /* round 1: */
  1553. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[ 4];
  1554. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[ 5];
  1555. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[ 6];
  1556. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[ 7];
  1557. /* round 2: */
  1558. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[ 8];
  1559. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[ 9];
  1560. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[10];
  1561. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[11];
  1562. /* round 3: */
  1563. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[12];
  1564. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[13];
  1565. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[14];
  1566. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[15];
  1567. /* round 4: */
  1568. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[16];
  1569. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[17];
  1570. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[18];
  1571. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[19];
  1572. /* round 5: */
  1573. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[20];
  1574. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[21];
  1575. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[22];
  1576. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[23];
  1577. /* round 6: */
  1578. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[24];
  1579. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[25];
  1580. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[26];
  1581. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[27];
  1582. /* round 7: */
  1583. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[28];
  1584. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[29];
  1585. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[30];
  1586. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[31];
  1587. /* round 8: */
  1588. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[32];
  1589. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[33];
  1590. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[34];
  1591. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[35];
  1592. /* round 9: */
  1593. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[36];
  1594. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[37];
  1595. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[38];
  1596. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[39];
  1597. if (key->rounds > 10) {
  1598. /* round 10: */
  1599. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[40];
  1600. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[41];
  1601. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[42];
  1602. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[43];
  1603. /* round 11: */
  1604. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[44];
  1605. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[45];
  1606. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[46];
  1607. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[47];
  1608. if (key->rounds > 12) {
  1609. /* round 12: */
  1610. s0 = Td0[t0 >> 24] ^ Td1[(t3 >> 16) & 0xff] ^ Td2[(t2 >> 8) & 0xff] ^ Td3[t1 & 0xff] ^ rk[48];
  1611. s1 = Td0[t1 >> 24] ^ Td1[(t0 >> 16) & 0xff] ^ Td2[(t3 >> 8) & 0xff] ^ Td3[t2 & 0xff] ^ rk[49];
  1612. s2 = Td0[t2 >> 24] ^ Td1[(t1 >> 16) & 0xff] ^ Td2[(t0 >> 8) & 0xff] ^ Td3[t3 & 0xff] ^ rk[50];
  1613. s3 = Td0[t3 >> 24] ^ Td1[(t2 >> 16) & 0xff] ^ Td2[(t1 >> 8) & 0xff] ^ Td3[t0 & 0xff] ^ rk[51];
  1614. /* round 13: */
  1615. t0 = Td0[s0 >> 24] ^ Td1[(s3 >> 16) & 0xff] ^ Td2[(s2 >> 8) & 0xff] ^ Td3[s1 & 0xff] ^ rk[52];
  1616. t1 = Td0[s1 >> 24] ^ Td1[(s0 >> 16) & 0xff] ^ Td2[(s3 >> 8) & 0xff] ^ Td3[s2 & 0xff] ^ rk[53];
  1617. t2 = Td0[s2 >> 24] ^ Td1[(s1 >> 16) & 0xff] ^ Td2[(s0 >> 8) & 0xff] ^ Td3[s3 & 0xff] ^ rk[54];
  1618. t3 = Td0[s3 >> 24] ^ Td1[(s2 >> 16) & 0xff] ^ Td2[(s1 >> 8) & 0xff] ^ Td3[s0 & 0xff] ^ rk[55];
  1619. }
  1620. }
  1621. rk += key->rounds << 2;
  1622. #else /* !FULL_UNROLL */
  1623. /*
  1624. * Nr - 1 full rounds:
  1625. */
  1626. r = key->rounds >> 1;
  1627. for (;;) {
  1628. t0 =
  1629. Td0[(s0 >> 24) ] ^
  1630. Td1[(s3 >> 16) & 0xff] ^
  1631. Td2[(s2 >> 8) & 0xff] ^
  1632. Td3[(s1 ) & 0xff] ^
  1633. rk[4];
  1634. t1 =
  1635. Td0[(s1 >> 24) ] ^
  1636. Td1[(s0 >> 16) & 0xff] ^
  1637. Td2[(s3 >> 8) & 0xff] ^
  1638. Td3[(s2 ) & 0xff] ^
  1639. rk[5];
  1640. t2 =
  1641. Td0[(s2 >> 24) ] ^
  1642. Td1[(s1 >> 16) & 0xff] ^
  1643. Td2[(s0 >> 8) & 0xff] ^
  1644. Td3[(s3 ) & 0xff] ^
  1645. rk[6];
  1646. t3 =
  1647. Td0[(s3 >> 24) ] ^
  1648. Td1[(s2 >> 16) & 0xff] ^
  1649. Td2[(s1 >> 8) & 0xff] ^
  1650. Td3[(s0 ) & 0xff] ^
  1651. rk[7];
  1652. rk += 8;
  1653. if (--r == 0) {
  1654. break;
  1655. }
  1656. s0 =
  1657. Td0[(t0 >> 24) ] ^
  1658. Td1[(t3 >> 16) & 0xff] ^
  1659. Td2[(t2 >> 8) & 0xff] ^
  1660. Td3[(t1 ) & 0xff] ^
  1661. rk[0];
  1662. s1 =
  1663. Td0[(t1 >> 24) ] ^
  1664. Td1[(t0 >> 16) & 0xff] ^
  1665. Td2[(t3 >> 8) & 0xff] ^
  1666. Td3[(t2 ) & 0xff] ^
  1667. rk[1];
  1668. s2 =
  1669. Td0[(t2 >> 24) ] ^
  1670. Td1[(t1 >> 16) & 0xff] ^
  1671. Td2[(t0 >> 8) & 0xff] ^
  1672. Td3[(t3 ) & 0xff] ^
  1673. rk[2];
  1674. s3 =
  1675. Td0[(t3 >> 24) ] ^
  1676. Td1[(t2 >> 16) & 0xff] ^
  1677. Td2[(t1 >> 8) & 0xff] ^
  1678. Td3[(t0 ) & 0xff] ^
  1679. rk[3];
  1680. }
  1681. #endif /* ?FULL_UNROLL */
  1682. /*
  1683. * apply last round and
  1684. * map cipher state to byte array block:
  1685. */
  1686. s0 =
  1687. ((u32)Td4[(t0 >> 24) ] << 24) ^
  1688. ((u32)Td4[(t3 >> 16) & 0xff] << 16) ^
  1689. ((u32)Td4[(t2 >> 8) & 0xff] << 8) ^
  1690. ((u32)Td4[(t1 ) & 0xff]) ^
  1691. rk[0];
  1692. PUTU32(out , s0);
  1693. s1 =
  1694. ((u32)Td4[(t1 >> 24) ] << 24) ^
  1695. ((u32)Td4[(t0 >> 16) & 0xff] << 16) ^
  1696. ((u32)Td4[(t3 >> 8) & 0xff] << 8) ^
  1697. ((u32)Td4[(t2 ) & 0xff]) ^
  1698. rk[1];
  1699. PUTU32(out + 4, s1);
  1700. s2 =
  1701. ((u32)Td4[(t2 >> 24) ] << 24) ^
  1702. ((u32)Td4[(t1 >> 16) & 0xff] << 16) ^
  1703. ((u32)Td4[(t0 >> 8) & 0xff] << 8) ^
  1704. ((u32)Td4[(t3 ) & 0xff]) ^
  1705. rk[2];
  1706. PUTU32(out + 8, s2);
  1707. s3 =
  1708. ((u32)Td4[(t3 >> 24) ] << 24) ^
  1709. ((u32)Td4[(t2 >> 16) & 0xff] << 16) ^
  1710. ((u32)Td4[(t1 >> 8) & 0xff] << 8) ^
  1711. ((u32)Td4[(t0 ) & 0xff]) ^
  1712. rk[3];
  1713. PUTU32(out + 12, s3);
  1714. }
  1715. #else /* AES_ASM */
  1716. static const u8 Te4[256] = {
  1717. 0x63U, 0x7cU, 0x77U, 0x7bU, 0xf2U, 0x6bU, 0x6fU, 0xc5U,
  1718. 0x30U, 0x01U, 0x67U, 0x2bU, 0xfeU, 0xd7U, 0xabU, 0x76U,
  1719. 0xcaU, 0x82U, 0xc9U, 0x7dU, 0xfaU, 0x59U, 0x47U, 0xf0U,
  1720. 0xadU, 0xd4U, 0xa2U, 0xafU, 0x9cU, 0xa4U, 0x72U, 0xc0U,
  1721. 0xb7U, 0xfdU, 0x93U, 0x26U, 0x36U, 0x3fU, 0xf7U, 0xccU,
  1722. 0x34U, 0xa5U, 0xe5U, 0xf1U, 0x71U, 0xd8U, 0x31U, 0x15U,
  1723. 0x04U, 0xc7U, 0x23U, 0xc3U, 0x18U, 0x96U, 0x05U, 0x9aU,
  1724. 0x07U, 0x12U, 0x80U, 0xe2U, 0xebU, 0x27U, 0xb2U, 0x75U,
  1725. 0x09U, 0x83U, 0x2cU, 0x1aU, 0x1bU, 0x6eU, 0x5aU, 0xa0U,
  1726. 0x52U, 0x3bU, 0xd6U, 0xb3U, 0x29U, 0xe3U, 0x2fU, 0x84U,
  1727. 0x53U, 0xd1U, 0x00U, 0xedU, 0x20U, 0xfcU, 0xb1U, 0x5bU,
  1728. 0x6aU, 0xcbU, 0xbeU, 0x39U, 0x4aU, 0x4cU, 0x58U, 0xcfU,
  1729. 0xd0U, 0xefU, 0xaaU, 0xfbU, 0x43U, 0x4dU, 0x33U, 0x85U,
  1730. 0x45U, 0xf9U, 0x02U, 0x7fU, 0x50U, 0x3cU, 0x9fU, 0xa8U,
  1731. 0x51U, 0xa3U, 0x40U, 0x8fU, 0x92U, 0x9dU, 0x38U, 0xf5U,
  1732. 0xbcU, 0xb6U, 0xdaU, 0x21U, 0x10U, 0xffU, 0xf3U, 0xd2U,
  1733. 0xcdU, 0x0cU, 0x13U, 0xecU, 0x5fU, 0x97U, 0x44U, 0x17U,
  1734. 0xc4U, 0xa7U, 0x7eU, 0x3dU, 0x64U, 0x5dU, 0x19U, 0x73U,
  1735. 0x60U, 0x81U, 0x4fU, 0xdcU, 0x22U, 0x2aU, 0x90U, 0x88U,
  1736. 0x46U, 0xeeU, 0xb8U, 0x14U, 0xdeU, 0x5eU, 0x0bU, 0xdbU,
  1737. 0xe0U, 0x32U, 0x3aU, 0x0aU, 0x49U, 0x06U, 0x24U, 0x5cU,
  1738. 0xc2U, 0xd3U, 0xacU, 0x62U, 0x91U, 0x95U, 0xe4U, 0x79U,
  1739. 0xe7U, 0xc8U, 0x37U, 0x6dU, 0x8dU, 0xd5U, 0x4eU, 0xa9U,
  1740. 0x6cU, 0x56U, 0xf4U, 0xeaU, 0x65U, 0x7aU, 0xaeU, 0x08U,
  1741. 0xbaU, 0x78U, 0x25U, 0x2eU, 0x1cU, 0xa6U, 0xb4U, 0xc6U,
  1742. 0xe8U, 0xddU, 0x74U, 0x1fU, 0x4bU, 0xbdU, 0x8bU, 0x8aU,
  1743. 0x70U, 0x3eU, 0xb5U, 0x66U, 0x48U, 0x03U, 0xf6U, 0x0eU,
  1744. 0x61U, 0x35U, 0x57U, 0xb9U, 0x86U, 0xc1U, 0x1dU, 0x9eU,
  1745. 0xe1U, 0xf8U, 0x98U, 0x11U, 0x69U, 0xd9U, 0x8eU, 0x94U,
  1746. 0x9bU, 0x1eU, 0x87U, 0xe9U, 0xceU, 0x55U, 0x28U, 0xdfU,
  1747. 0x8cU, 0xa1U, 0x89U, 0x0dU, 0xbfU, 0xe6U, 0x42U, 0x68U,
  1748. 0x41U, 0x99U, 0x2dU, 0x0fU, 0xb0U, 0x54U, 0xbbU, 0x16U
  1749. };
  1750. static const u32 rcon[] = {
  1751. 0x01000000, 0x02000000, 0x04000000, 0x08000000,
  1752. 0x10000000, 0x20000000, 0x40000000, 0x80000000,
  1753. 0x1B000000, 0x36000000, /* for 128-bit blocks, Rijndael never uses more than 10 rcon values */
  1754. };
  1755. /**
  1756. * Expand the cipher key into the encryption key schedule.
  1757. */
  1758. int AES_set_encrypt_key(const unsigned char *userKey, const int bits,
  1759. AES_KEY *key)
  1760. {
  1761. u32 *rk;
  1762. int i = 0;
  1763. u32 temp;
  1764. if (!userKey || !key)
  1765. return -1;
  1766. if (bits != 128 && bits != 192 && bits != 256)
  1767. return -2;
  1768. rk = key->rd_key;
  1769. if (bits == 128)
  1770. key->rounds = 10;
  1771. else if (bits == 192)
  1772. key->rounds = 12;
  1773. else
  1774. key->rounds = 14;
  1775. rk[0] = GETU32(userKey );
  1776. rk[1] = GETU32(userKey + 4);
  1777. rk[2] = GETU32(userKey + 8);
  1778. rk[3] = GETU32(userKey + 12);
  1779. if (bits == 128) {
  1780. while (1) {
  1781. temp = rk[3];
  1782. rk[4] = rk[0] ^
  1783. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1784. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1785. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1786. ((u32)Te4[(temp >> 24) ]) ^
  1787. rcon[i];
  1788. rk[5] = rk[1] ^ rk[4];
  1789. rk[6] = rk[2] ^ rk[5];
  1790. rk[7] = rk[3] ^ rk[6];
  1791. if (++i == 10) {
  1792. return 0;
  1793. }
  1794. rk += 4;
  1795. }
  1796. }
  1797. rk[4] = GETU32(userKey + 16);
  1798. rk[5] = GETU32(userKey + 20);
  1799. if (bits == 192) {
  1800. while (1) {
  1801. temp = rk[ 5];
  1802. rk[ 6] = rk[ 0] ^
  1803. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1804. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1805. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1806. ((u32)Te4[(temp >> 24) ]) ^
  1807. rcon[i];
  1808. rk[ 7] = rk[ 1] ^ rk[ 6];
  1809. rk[ 8] = rk[ 2] ^ rk[ 7];
  1810. rk[ 9] = rk[ 3] ^ rk[ 8];
  1811. if (++i == 8) {
  1812. return 0;
  1813. }
  1814. rk[10] = rk[ 4] ^ rk[ 9];
  1815. rk[11] = rk[ 5] ^ rk[10];
  1816. rk += 6;
  1817. }
  1818. }
  1819. rk[6] = GETU32(userKey + 24);
  1820. rk[7] = GETU32(userKey + 28);
  1821. if (bits == 256) {
  1822. while (1) {
  1823. temp = rk[ 7];
  1824. rk[ 8] = rk[ 0] ^
  1825. ((u32)Te4[(temp >> 16) & 0xff] << 24) ^
  1826. ((u32)Te4[(temp >> 8) & 0xff] << 16) ^
  1827. ((u32)Te4[(temp ) & 0xff] << 8) ^
  1828. ((u32)Te4[(temp >> 24) ]) ^
  1829. rcon[i];
  1830. rk[ 9] = rk[ 1] ^ rk[ 8];
  1831. rk[10] = rk[ 2] ^ rk[ 9];
  1832. rk[11] = rk[ 3] ^ rk[10];
  1833. if (++i == 7) {
  1834. return 0;
  1835. }
  1836. temp = rk[11];
  1837. rk[12] = rk[ 4] ^
  1838. ((u32)Te4[(temp >> 24) ] << 24) ^
  1839. ((u32)Te4[(temp >> 16) & 0xff] << 16) ^
  1840. ((u32)Te4[(temp >> 8) & 0xff] << 8) ^
  1841. ((u32)Te4[(temp ) & 0xff]);
  1842. rk[13] = rk[ 5] ^ rk[12];
  1843. rk[14] = rk[ 6] ^ rk[13];
  1844. rk[15] = rk[ 7] ^ rk[14];
  1845. rk += 8;
  1846. }
  1847. }
  1848. return 0;
  1849. }
  1850. /**
  1851. * Expand the cipher key into the decryption key schedule.
  1852. */
  1853. int AES_set_decrypt_key(const unsigned char *userKey, const int bits,
  1854. AES_KEY *key)
  1855. {
  1856. u32 *rk;
  1857. int i, j, status;
  1858. u32 temp;
  1859. /* first, start with an encryption schedule */
  1860. status = AES_set_encrypt_key(userKey, bits, key);
  1861. if (status < 0)
  1862. return status;
  1863. rk = key->rd_key;
  1864. /* invert the order of the round keys: */
  1865. for (i = 0, j = 4*(key->rounds); i < j; i += 4, j -= 4) {
  1866. temp = rk[i ]; rk[i ] = rk[j ]; rk[j ] = temp;
  1867. temp = rk[i + 1]; rk[i + 1] = rk[j + 1]; rk[j + 1] = temp;
  1868. temp = rk[i + 2]; rk[i + 2] = rk[j + 2]; rk[j + 2] = temp;
  1869. temp = rk[i + 3]; rk[i + 3] = rk[j + 3]; rk[j + 3] = temp;
  1870. }
  1871. /* apply the inverse MixColumn transform to all round keys but the first and the last: */
  1872. for (i = 1; i < (key->rounds); i++) {
  1873. rk += 4;
  1874. for (j = 0; j < 4; j++) {
  1875. u32 tp1, tp2, tp4, tp8, tp9, tpb, tpd, tpe, m;
  1876. tp1 = rk[j];
  1877. m = tp1 & 0x80808080;
  1878. tp2 = ((tp1 & 0x7f7f7f7f) << 1) ^
  1879. ((m - (m >> 7)) & 0x1b1b1b1b);
  1880. m = tp2 & 0x80808080;
  1881. tp4 = ((tp2 & 0x7f7f7f7f) << 1) ^
  1882. ((m - (m >> 7)) & 0x1b1b1b1b);
  1883. m = tp4 & 0x80808080;
  1884. tp8 = ((tp4 & 0x7f7f7f7f) << 1) ^
  1885. ((m - (m >> 7)) & 0x1b1b1b1b);
  1886. tp9 = tp8 ^ tp1;
  1887. tpb = tp9 ^ tp2;
  1888. tpd = tp9 ^ tp4;
  1889. tpe = tp8 ^ tp4 ^ tp2;
  1890. #if defined(ROTATE)
  1891. rk[j] = tpe ^ ROTATE(tpd,16) ^
  1892. ROTATE(tp9,24) ^ ROTATE(tpb,8);
  1893. #else
  1894. rk[j] = tpe ^ (tpd >> 16) ^ (tpd << 16) ^
  1895. (tp9 >> 8) ^ (tp9 << 24) ^
  1896. (tpb >> 24) ^ (tpb << 8);
  1897. #endif
  1898. }
  1899. }
  1900. return 0;
  1901. }
  1902. #endif /* AES_ASM */