aeskey.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662
  1. /*
  2. ---------------------------------------------------------------------------
  3. Copyright (c) 1998-2013, Brian Gladman, Worcester, UK. All rights reserved.
  4. The redistribution and use of this software (with or without changes)
  5. is allowed without the payment of fees or royalties provided that:
  6. source code distributions include the above copyright notice, this
  7. list of conditions and the following disclaimer;
  8. binary distributions include the above copyright notice, this list
  9. of conditions and the following disclaimer in their documentation.
  10. This software is provided 'as is' with no explicit or implied warranties
  11. in respect of its operation, including, but not limited to, correctness
  12. and fitness for purpose.
  13. ---------------------------------------------------------------------------
  14. Issue Date: 20/12/2007
  15. */
  16. #include "aesopt.h"
  17. #include "aestab.h"
  18. #if defined(USE_INTEL_AES_IF_PRESENT)
  19. #include "aes_ni.h"
  20. #else
  21. /* map names here to provide the external API ('name' -> 'aes_name') */
  22. #define aes_xi(x) aes_##x
  23. #endif
  24. #ifdef USE_VIA_ACE_IF_PRESENT
  25. #include "aes_via_ace.h"
  26. #endif
  27. #if defined(__cplusplus)
  28. extern "C" {
  29. #endif
  30. /* Initialise the key schedule from the user supplied key. The key
  31. length can be specified in bytes, with legal values of 16, 24
  32. and 32, or in bits, with legal values of 128, 192 and 256. These
  33. values correspond with Nk values of 4, 6 and 8 respectively.
  34. The following macros implement a single cycle in the key
  35. schedule generation process. The number of cycles needed
  36. for each cx->n_col and nk value is:
  37. nk = 4 5 6 7 8
  38. ------------------------------
  39. cx->n_col = 4 10 9 8 7 7
  40. cx->n_col = 5 14 11 10 9 9
  41. cx->n_col = 6 19 15 12 11 11
  42. cx->n_col = 7 21 19 16 13 14
  43. cx->n_col = 8 29 23 19 17 14
  44. */
  45. #if defined(REDUCE_CODE_SIZE)
  46. #define ls_box ls_sub
  47. uint32_t ls_sub(const uint32_t t, const uint32_t n);
  48. #define inv_mcol im_sub
  49. uint32_t im_sub(const uint32_t x);
  50. #ifdef ENC_KS_UNROLL
  51. #undef ENC_KS_UNROLL
  52. #endif
  53. #ifdef DEC_KS_UNROLL
  54. #undef DEC_KS_UNROLL
  55. #endif
  56. #endif
  57. #if(FUNCS_IN_C & ENC_KEYING_IN_C)
  58. #if defined(AES_128) || defined(AES_VAR)
  59. #define ke4(k, i) \
  60. { \
  61. k[4 * (i) + 4] = ss[0] ^= ls_box(ss[3], 3) ^ t_use(r, c)[i]; \
  62. k[4 * (i) + 5] = ss[1] ^= ss[0]; \
  63. k[4 * (i) + 6] = ss[2] ^= ss[1]; \
  64. k[4 * (i) + 7] = ss[3] ^= ss[2]; \
  65. }
  66. AES_RETURN aes_xi(encrypt_key128)(const unsigned char* key, aes_encrypt_ctx cx[1]) {
  67. uint32_t ss[4];
  68. cx->ks[0] = ss[0] = word_in(key, 0);
  69. cx->ks[1] = ss[1] = word_in(key, 1);
  70. cx->ks[2] = ss[2] = word_in(key, 2);
  71. cx->ks[3] = ss[3] = word_in(key, 3);
  72. #ifdef ENC_KS_UNROLL
  73. ke4(cx->ks, 0);
  74. ke4(cx->ks, 1);
  75. ke4(cx->ks, 2);
  76. ke4(cx->ks, 3);
  77. ke4(cx->ks, 4);
  78. ke4(cx->ks, 5);
  79. ke4(cx->ks, 6);
  80. ke4(cx->ks, 7);
  81. ke4(cx->ks, 8);
  82. #else
  83. {
  84. uint32_t i;
  85. for(i = 0; i < 9; ++i) ke4(cx->ks, i);
  86. }
  87. #endif
  88. ke4(cx->ks, 9);
  89. cx->inf.l = 0;
  90. cx->inf.b[0] = 10 * AES_BLOCK_SIZE;
  91. #ifdef USE_VIA_ACE_IF_PRESENT
  92. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  93. #endif
  94. return EXIT_SUCCESS;
  95. }
  96. #endif
  97. #if defined(AES_192) || defined(AES_VAR)
  98. #define kef6(k, i) \
  99. { \
  100. k[6 * (i) + 6] = ss[0] ^= ls_box(ss[5], 3) ^ t_use(r, c)[i]; \
  101. k[6 * (i) + 7] = ss[1] ^= ss[0]; \
  102. k[6 * (i) + 8] = ss[2] ^= ss[1]; \
  103. k[6 * (i) + 9] = ss[3] ^= ss[2]; \
  104. }
  105. #define ke6(k, i) \
  106. { \
  107. kef6(k, i); \
  108. k[6 * (i) + 10] = ss[4] ^= ss[3]; \
  109. k[6 * (i) + 11] = ss[5] ^= ss[4]; \
  110. }
  111. AES_RETURN aes_xi(encrypt_key192)(const unsigned char* key, aes_encrypt_ctx cx[1]) {
  112. uint32_t ss[6];
  113. cx->ks[0] = ss[0] = word_in(key, 0);
  114. cx->ks[1] = ss[1] = word_in(key, 1);
  115. cx->ks[2] = ss[2] = word_in(key, 2);
  116. cx->ks[3] = ss[3] = word_in(key, 3);
  117. cx->ks[4] = ss[4] = word_in(key, 4);
  118. cx->ks[5] = ss[5] = word_in(key, 5);
  119. #ifdef ENC_KS_UNROLL
  120. ke6(cx->ks, 0);
  121. ke6(cx->ks, 1);
  122. ke6(cx->ks, 2);
  123. ke6(cx->ks, 3);
  124. ke6(cx->ks, 4);
  125. ke6(cx->ks, 5);
  126. ke6(cx->ks, 6);
  127. #else
  128. {
  129. uint32_t i;
  130. for(i = 0; i < 7; ++i) ke6(cx->ks, i);
  131. }
  132. #endif
  133. kef6(cx->ks, 7);
  134. cx->inf.l = 0;
  135. cx->inf.b[0] = 12 * AES_BLOCK_SIZE;
  136. #ifdef USE_VIA_ACE_IF_PRESENT
  137. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  138. #endif
  139. return EXIT_SUCCESS;
  140. }
  141. #endif
  142. #if defined(AES_256) || defined(AES_VAR)
  143. #define kef8(k, i) \
  144. { \
  145. k[8 * (i) + 8] = ss[0] ^= ls_box(ss[7], 3) ^ t_use(r, c)[i]; \
  146. k[8 * (i) + 9] = ss[1] ^= ss[0]; \
  147. k[8 * (i) + 10] = ss[2] ^= ss[1]; \
  148. k[8 * (i) + 11] = ss[3] ^= ss[2]; \
  149. }
  150. #define ke8(k, i) \
  151. { \
  152. kef8(k, i); \
  153. k[8 * (i) + 12] = ss[4] ^= ls_box(ss[3], 0); \
  154. k[8 * (i) + 13] = ss[5] ^= ss[4]; \
  155. k[8 * (i) + 14] = ss[6] ^= ss[5]; \
  156. k[8 * (i) + 15] = ss[7] ^= ss[6]; \
  157. }
  158. AES_RETURN aes_xi(encrypt_key256)(const unsigned char* key, aes_encrypt_ctx cx[1]) {
  159. uint32_t ss[8];
  160. cx->ks[0] = ss[0] = word_in(key, 0);
  161. cx->ks[1] = ss[1] = word_in(key, 1);
  162. cx->ks[2] = ss[2] = word_in(key, 2);
  163. cx->ks[3] = ss[3] = word_in(key, 3);
  164. cx->ks[4] = ss[4] = word_in(key, 4);
  165. cx->ks[5] = ss[5] = word_in(key, 5);
  166. cx->ks[6] = ss[6] = word_in(key, 6);
  167. cx->ks[7] = ss[7] = word_in(key, 7);
  168. #ifdef ENC_KS_UNROLL
  169. ke8(cx->ks, 0);
  170. ke8(cx->ks, 1);
  171. ke8(cx->ks, 2);
  172. ke8(cx->ks, 3);
  173. ke8(cx->ks, 4);
  174. ke8(cx->ks, 5);
  175. #else
  176. {
  177. uint32_t i;
  178. for(i = 0; i < 6; ++i) ke8(cx->ks, i);
  179. }
  180. #endif
  181. kef8(cx->ks, 6);
  182. cx->inf.l = 0;
  183. cx->inf.b[0] = 14 * AES_BLOCK_SIZE;
  184. #ifdef USE_VIA_ACE_IF_PRESENT
  185. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  186. #endif
  187. return EXIT_SUCCESS;
  188. }
  189. #endif
  190. #endif
  191. #if(FUNCS_IN_C & DEC_KEYING_IN_C)
  192. /* this is used to store the decryption round keys */
  193. /* in forward or reverse order */
  194. #ifdef AES_REV_DKS
  195. #define v(n, i) ((n) - (i) + 2 * ((i)&3))
  196. #else
  197. #define v(n, i) (i)
  198. #endif
  199. #if DEC_ROUND == NO_TABLES
  200. #define ff(x) (x)
  201. #else
  202. #define ff(x) inv_mcol(x)
  203. #if defined(dec_imvars)
  204. #define d_vars dec_imvars
  205. #endif
  206. #endif
  207. #if defined(AES_128) || defined(AES_VAR)
  208. #define k4e(k, i) \
  209. { \
  210. k[v(40, (4 * (i)) + 4)] = ss[0] ^= ls_box(ss[3], 3) ^ t_use(r, c)[i]; \
  211. k[v(40, (4 * (i)) + 5)] = ss[1] ^= ss[0]; \
  212. k[v(40, (4 * (i)) + 6)] = ss[2] ^= ss[1]; \
  213. k[v(40, (4 * (i)) + 7)] = ss[3] ^= ss[2]; \
  214. }
  215. #if 1
  216. #define kdf4(k, i) \
  217. { \
  218. ss[0] = ss[0] ^ ss[2] ^ ss[1] ^ ss[3]; \
  219. ss[1] = ss[1] ^ ss[3]; \
  220. ss[2] = ss[2] ^ ss[3]; \
  221. ss[4] = ls_box(ss[(i + 3) % 4], 3) ^ t_use(r, c)[i]; \
  222. ss[i % 4] ^= ss[4]; \
  223. ss[4] ^= k[v(40, (4 * (i)))]; \
  224. k[v(40, (4 * (i)) + 4)] = ff(ss[4]); \
  225. ss[4] ^= k[v(40, (4 * (i)) + 1)]; \
  226. k[v(40, (4 * (i)) + 5)] = ff(ss[4]); \
  227. ss[4] ^= k[v(40, (4 * (i)) + 2)]; \
  228. k[v(40, (4 * (i)) + 6)] = ff(ss[4]); \
  229. ss[4] ^= k[v(40, (4 * (i)) + 3)]; \
  230. k[v(40, (4 * (i)) + 7)] = ff(ss[4]); \
  231. }
  232. #define kd4(k, i) \
  233. { \
  234. ss[4] = ls_box(ss[(i + 3) % 4], 3) ^ t_use(r, c)[i]; \
  235. ss[i % 4] ^= ss[4]; \
  236. ss[4] = ff(ss[4]); \
  237. k[v(40, (4 * (i)) + 4)] = ss[4] ^= k[v(40, (4 * (i)))]; \
  238. k[v(40, (4 * (i)) + 5)] = ss[4] ^= k[v(40, (4 * (i)) + 1)]; \
  239. k[v(40, (4 * (i)) + 6)] = ss[4] ^= k[v(40, (4 * (i)) + 2)]; \
  240. k[v(40, (4 * (i)) + 7)] = ss[4] ^= k[v(40, (4 * (i)) + 3)]; \
  241. }
  242. #define kdl4(k, i) \
  243. { \
  244. ss[4] = ls_box(ss[(i + 3) % 4], 3) ^ t_use(r, c)[i]; \
  245. ss[i % 4] ^= ss[4]; \
  246. k[v(40, (4 * (i)) + 4)] = (ss[0] ^= ss[1]) ^ ss[2] ^ ss[3]; \
  247. k[v(40, (4 * (i)) + 5)] = ss[1] ^ ss[3]; \
  248. k[v(40, (4 * (i)) + 6)] = ss[0]; \
  249. k[v(40, (4 * (i)) + 7)] = ss[1]; \
  250. }
  251. #else
  252. #define kdf4(k, i) \
  253. { \
  254. ss[0] ^= ls_box(ss[3], 3) ^ t_use(r, c)[i]; \
  255. k[v(40, (4 * (i)) + 4)] = ff(ss[0]); \
  256. ss[1] ^= ss[0]; \
  257. k[v(40, (4 * (i)) + 5)] = ff(ss[1]); \
  258. ss[2] ^= ss[1]; \
  259. k[v(40, (4 * (i)) + 6)] = ff(ss[2]); \
  260. ss[3] ^= ss[2]; \
  261. k[v(40, (4 * (i)) + 7)] = ff(ss[3]); \
  262. }
  263. #define kd4(k, i) \
  264. { \
  265. ss[4] = ls_box(ss[3], 3) ^ t_use(r, c)[i]; \
  266. ss[0] ^= ss[4]; \
  267. ss[4] = ff(ss[4]); \
  268. k[v(40, (4 * (i)) + 4)] = ss[4] ^= k[v(40, (4 * (i)))]; \
  269. ss[1] ^= ss[0]; \
  270. k[v(40, (4 * (i)) + 5)] = ss[4] ^= k[v(40, (4 * (i)) + 1)]; \
  271. ss[2] ^= ss[1]; \
  272. k[v(40, (4 * (i)) + 6)] = ss[4] ^= k[v(40, (4 * (i)) + 2)]; \
  273. ss[3] ^= ss[2]; \
  274. k[v(40, (4 * (i)) + 7)] = ss[4] ^= k[v(40, (4 * (i)) + 3)]; \
  275. }
  276. #define kdl4(k, i) \
  277. { \
  278. ss[0] ^= ls_box(ss[3], 3) ^ t_use(r, c)[i]; \
  279. k[v(40, (4 * (i)) + 4)] = ss[0]; \
  280. ss[1] ^= ss[0]; \
  281. k[v(40, (4 * (i)) + 5)] = ss[1]; \
  282. ss[2] ^= ss[1]; \
  283. k[v(40, (4 * (i)) + 6)] = ss[2]; \
  284. ss[3] ^= ss[2]; \
  285. k[v(40, (4 * (i)) + 7)] = ss[3]; \
  286. }
  287. #endif
  288. AES_RETURN aes_xi(decrypt_key128)(const unsigned char* key, aes_decrypt_ctx cx[1]) {
  289. uint32_t ss[5];
  290. #if defined(d_vars)
  291. d_vars;
  292. #endif
  293. cx->ks[v(40, (0))] = ss[0] = word_in(key, 0);
  294. cx->ks[v(40, (1))] = ss[1] = word_in(key, 1);
  295. cx->ks[v(40, (2))] = ss[2] = word_in(key, 2);
  296. cx->ks[v(40, (3))] = ss[3] = word_in(key, 3);
  297. #ifdef DEC_KS_UNROLL
  298. kdf4(cx->ks, 0);
  299. kd4(cx->ks, 1);
  300. kd4(cx->ks, 2);
  301. kd4(cx->ks, 3);
  302. kd4(cx->ks, 4);
  303. kd4(cx->ks, 5);
  304. kd4(cx->ks, 6);
  305. kd4(cx->ks, 7);
  306. kd4(cx->ks, 8);
  307. kdl4(cx->ks, 9);
  308. #else
  309. {
  310. uint32_t i;
  311. for(i = 0; i < 10; ++i) k4e(cx->ks, i);
  312. #if !(DEC_ROUND == NO_TABLES)
  313. for(i = N_COLS; i < 10 * N_COLS; ++i) cx->ks[i] = inv_mcol(cx->ks[i]);
  314. #endif
  315. }
  316. #endif
  317. cx->inf.l = 0;
  318. cx->inf.b[0] = 10 * AES_BLOCK_SIZE;
  319. #ifdef USE_VIA_ACE_IF_PRESENT
  320. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  321. #endif
  322. return EXIT_SUCCESS;
  323. }
  324. #endif
  325. #if defined(AES_192) || defined(AES_VAR)
  326. #define k6ef(k, i) \
  327. { \
  328. k[v(48, (6 * (i)) + 6)] = ss[0] ^= ls_box(ss[5], 3) ^ t_use(r, c)[i]; \
  329. k[v(48, (6 * (i)) + 7)] = ss[1] ^= ss[0]; \
  330. k[v(48, (6 * (i)) + 8)] = ss[2] ^= ss[1]; \
  331. k[v(48, (6 * (i)) + 9)] = ss[3] ^= ss[2]; \
  332. }
  333. #define k6e(k, i) \
  334. { \
  335. k6ef(k, i); \
  336. k[v(48, (6 * (i)) + 10)] = ss[4] ^= ss[3]; \
  337. k[v(48, (6 * (i)) + 11)] = ss[5] ^= ss[4]; \
  338. }
  339. #define kdf6(k, i) \
  340. { \
  341. ss[0] ^= ls_box(ss[5], 3) ^ t_use(r, c)[i]; \
  342. k[v(48, (6 * (i)) + 6)] = ff(ss[0]); \
  343. ss[1] ^= ss[0]; \
  344. k[v(48, (6 * (i)) + 7)] = ff(ss[1]); \
  345. ss[2] ^= ss[1]; \
  346. k[v(48, (6 * (i)) + 8)] = ff(ss[2]); \
  347. ss[3] ^= ss[2]; \
  348. k[v(48, (6 * (i)) + 9)] = ff(ss[3]); \
  349. ss[4] ^= ss[3]; \
  350. k[v(48, (6 * (i)) + 10)] = ff(ss[4]); \
  351. ss[5] ^= ss[4]; \
  352. k[v(48, (6 * (i)) + 11)] = ff(ss[5]); \
  353. }
  354. #define kd6(k, i) \
  355. { \
  356. ss[6] = ls_box(ss[5], 3) ^ t_use(r, c)[i]; \
  357. ss[0] ^= ss[6]; \
  358. ss[6] = ff(ss[6]); \
  359. k[v(48, (6 * (i)) + 6)] = ss[6] ^= k[v(48, (6 * (i)))]; \
  360. ss[1] ^= ss[0]; \
  361. k[v(48, (6 * (i)) + 7)] = ss[6] ^= k[v(48, (6 * (i)) + 1)]; \
  362. ss[2] ^= ss[1]; \
  363. k[v(48, (6 * (i)) + 8)] = ss[6] ^= k[v(48, (6 * (i)) + 2)]; \
  364. ss[3] ^= ss[2]; \
  365. k[v(48, (6 * (i)) + 9)] = ss[6] ^= k[v(48, (6 * (i)) + 3)]; \
  366. ss[4] ^= ss[3]; \
  367. k[v(48, (6 * (i)) + 10)] = ss[6] ^= k[v(48, (6 * (i)) + 4)]; \
  368. ss[5] ^= ss[4]; \
  369. k[v(48, (6 * (i)) + 11)] = ss[6] ^= k[v(48, (6 * (i)) + 5)]; \
  370. }
  371. #define kdl6(k, i) \
  372. { \
  373. ss[0] ^= ls_box(ss[5], 3) ^ t_use(r, c)[i]; \
  374. k[v(48, (6 * (i)) + 6)] = ss[0]; \
  375. ss[1] ^= ss[0]; \
  376. k[v(48, (6 * (i)) + 7)] = ss[1]; \
  377. ss[2] ^= ss[1]; \
  378. k[v(48, (6 * (i)) + 8)] = ss[2]; \
  379. ss[3] ^= ss[2]; \
  380. k[v(48, (6 * (i)) + 9)] = ss[3]; \
  381. }
  382. AES_RETURN aes_xi(decrypt_key192)(const unsigned char* key, aes_decrypt_ctx cx[1]) {
  383. uint32_t ss[7];
  384. #if defined(d_vars)
  385. d_vars;
  386. #endif
  387. cx->ks[v(48, (0))] = ss[0] = word_in(key, 0);
  388. cx->ks[v(48, (1))] = ss[1] = word_in(key, 1);
  389. cx->ks[v(48, (2))] = ss[2] = word_in(key, 2);
  390. cx->ks[v(48, (3))] = ss[3] = word_in(key, 3);
  391. #ifdef DEC_KS_UNROLL
  392. ss[4] = word_in(key, 4);
  393. ss[5] = word_in(key, 5);
  394. cx->ks[v(48, (4))] = ff(ss[4]);
  395. cx->ks[v(48, (5))] = ff(ss[5]);
  396. kdf6(cx->ks, 0);
  397. kd6(cx->ks, 1);
  398. kd6(cx->ks, 2);
  399. kd6(cx->ks, 3);
  400. kd6(cx->ks, 4);
  401. kd6(cx->ks, 5);
  402. kd6(cx->ks, 6);
  403. kdl6(cx->ks, 7);
  404. #else
  405. cx->ks[v(48, (4))] = ss[4] = word_in(key, 4);
  406. cx->ks[v(48, (5))] = ss[5] = word_in(key, 5);
  407. {
  408. uint32_t i;
  409. for(i = 0; i < 7; ++i) k6e(cx->ks, i);
  410. k6ef(cx->ks, 7);
  411. #if !(DEC_ROUND == NO_TABLES)
  412. for(i = N_COLS; i < 12 * N_COLS; ++i) cx->ks[i] = inv_mcol(cx->ks[i]);
  413. #endif
  414. }
  415. #endif
  416. cx->inf.l = 0;
  417. cx->inf.b[0] = 12 * AES_BLOCK_SIZE;
  418. #ifdef USE_VIA_ACE_IF_PRESENT
  419. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  420. #endif
  421. return EXIT_SUCCESS;
  422. }
  423. #endif
  424. #if defined(AES_256) || defined(AES_VAR)
  425. #define k8ef(k, i) \
  426. { \
  427. k[v(56, (8 * (i)) + 8)] = ss[0] ^= ls_box(ss[7], 3) ^ t_use(r, c)[i]; \
  428. k[v(56, (8 * (i)) + 9)] = ss[1] ^= ss[0]; \
  429. k[v(56, (8 * (i)) + 10)] = ss[2] ^= ss[1]; \
  430. k[v(56, (8 * (i)) + 11)] = ss[3] ^= ss[2]; \
  431. }
  432. #define k8e(k, i) \
  433. { \
  434. k8ef(k, i); \
  435. k[v(56, (8 * (i)) + 12)] = ss[4] ^= ls_box(ss[3], 0); \
  436. k[v(56, (8 * (i)) + 13)] = ss[5] ^= ss[4]; \
  437. k[v(56, (8 * (i)) + 14)] = ss[6] ^= ss[5]; \
  438. k[v(56, (8 * (i)) + 15)] = ss[7] ^= ss[6]; \
  439. }
  440. #define kdf8(k, i) \
  441. { \
  442. ss[0] ^= ls_box(ss[7], 3) ^ t_use(r, c)[i]; \
  443. k[v(56, (8 * (i)) + 8)] = ff(ss[0]); \
  444. ss[1] ^= ss[0]; \
  445. k[v(56, (8 * (i)) + 9)] = ff(ss[1]); \
  446. ss[2] ^= ss[1]; \
  447. k[v(56, (8 * (i)) + 10)] = ff(ss[2]); \
  448. ss[3] ^= ss[2]; \
  449. k[v(56, (8 * (i)) + 11)] = ff(ss[3]); \
  450. ss[4] ^= ls_box(ss[3], 0); \
  451. k[v(56, (8 * (i)) + 12)] = ff(ss[4]); \
  452. ss[5] ^= ss[4]; \
  453. k[v(56, (8 * (i)) + 13)] = ff(ss[5]); \
  454. ss[6] ^= ss[5]; \
  455. k[v(56, (8 * (i)) + 14)] = ff(ss[6]); \
  456. ss[7] ^= ss[6]; \
  457. k[v(56, (8 * (i)) + 15)] = ff(ss[7]); \
  458. }
  459. #define kd8(k, i) \
  460. { \
  461. ss[8] = ls_box(ss[7], 3) ^ t_use(r, c)[i]; \
  462. ss[0] ^= ss[8]; \
  463. ss[8] = ff(ss[8]); \
  464. k[v(56, (8 * (i)) + 8)] = ss[8] ^= k[v(56, (8 * (i)))]; \
  465. ss[1] ^= ss[0]; \
  466. k[v(56, (8 * (i)) + 9)] = ss[8] ^= k[v(56, (8 * (i)) + 1)]; \
  467. ss[2] ^= ss[1]; \
  468. k[v(56, (8 * (i)) + 10)] = ss[8] ^= k[v(56, (8 * (i)) + 2)]; \
  469. ss[3] ^= ss[2]; \
  470. k[v(56, (8 * (i)) + 11)] = ss[8] ^= k[v(56, (8 * (i)) + 3)]; \
  471. ss[8] = ls_box(ss[3], 0); \
  472. ss[4] ^= ss[8]; \
  473. ss[8] = ff(ss[8]); \
  474. k[v(56, (8 * (i)) + 12)] = ss[8] ^= k[v(56, (8 * (i)) + 4)]; \
  475. ss[5] ^= ss[4]; \
  476. k[v(56, (8 * (i)) + 13)] = ss[8] ^= k[v(56, (8 * (i)) + 5)]; \
  477. ss[6] ^= ss[5]; \
  478. k[v(56, (8 * (i)) + 14)] = ss[8] ^= k[v(56, (8 * (i)) + 6)]; \
  479. ss[7] ^= ss[6]; \
  480. k[v(56, (8 * (i)) + 15)] = ss[8] ^= k[v(56, (8 * (i)) + 7)]; \
  481. }
  482. #define kdl8(k, i) \
  483. { \
  484. ss[0] ^= ls_box(ss[7], 3) ^ t_use(r, c)[i]; \
  485. k[v(56, (8 * (i)) + 8)] = ss[0]; \
  486. ss[1] ^= ss[0]; \
  487. k[v(56, (8 * (i)) + 9)] = ss[1]; \
  488. ss[2] ^= ss[1]; \
  489. k[v(56, (8 * (i)) + 10)] = ss[2]; \
  490. ss[3] ^= ss[2]; \
  491. k[v(56, (8 * (i)) + 11)] = ss[3]; \
  492. }
  493. AES_RETURN aes_xi(decrypt_key256)(const unsigned char* key, aes_decrypt_ctx cx[1]) {
  494. uint32_t ss[9];
  495. #if defined(d_vars)
  496. d_vars;
  497. #endif
  498. cx->ks[v(56, (0))] = ss[0] = word_in(key, 0);
  499. cx->ks[v(56, (1))] = ss[1] = word_in(key, 1);
  500. cx->ks[v(56, (2))] = ss[2] = word_in(key, 2);
  501. cx->ks[v(56, (3))] = ss[3] = word_in(key, 3);
  502. #ifdef DEC_KS_UNROLL
  503. ss[4] = word_in(key, 4);
  504. ss[5] = word_in(key, 5);
  505. ss[6] = word_in(key, 6);
  506. ss[7] = word_in(key, 7);
  507. cx->ks[v(56, (4))] = ff(ss[4]);
  508. cx->ks[v(56, (5))] = ff(ss[5]);
  509. cx->ks[v(56, (6))] = ff(ss[6]);
  510. cx->ks[v(56, (7))] = ff(ss[7]);
  511. kdf8(cx->ks, 0);
  512. kd8(cx->ks, 1);
  513. kd8(cx->ks, 2);
  514. kd8(cx->ks, 3);
  515. kd8(cx->ks, 4);
  516. kd8(cx->ks, 5);
  517. kdl8(cx->ks, 6);
  518. #else
  519. cx->ks[v(56, (4))] = ss[4] = word_in(key, 4);
  520. cx->ks[v(56, (5))] = ss[5] = word_in(key, 5);
  521. cx->ks[v(56, (6))] = ss[6] = word_in(key, 6);
  522. cx->ks[v(56, (7))] = ss[7] = word_in(key, 7);
  523. {
  524. uint32_t i;
  525. for(i = 0; i < 6; ++i) k8e(cx->ks, i);
  526. k8ef(cx->ks, 6);
  527. #if !(DEC_ROUND == NO_TABLES)
  528. for(i = N_COLS; i < 14 * N_COLS; ++i) cx->ks[i] = inv_mcol(cx->ks[i]);
  529. #endif
  530. }
  531. #endif
  532. cx->inf.l = 0;
  533. cx->inf.b[0] = 14 * AES_BLOCK_SIZE;
  534. #ifdef USE_VIA_ACE_IF_PRESENT
  535. if(VIA_ACE_AVAILABLE) cx->inf.b[1] = 0xff;
  536. #endif
  537. return EXIT_SUCCESS;
  538. }
  539. #endif
  540. #endif
  541. #if defined(AES_VAR)
  542. AES_RETURN aes_encrypt_key(const unsigned char* key, int key_len, aes_encrypt_ctx cx[1]) {
  543. switch(key_len) {
  544. case 16:
  545. case 128:
  546. return aes_encrypt_key128(key, cx);
  547. case 24:
  548. case 192:
  549. return aes_encrypt_key192(key, cx);
  550. case 32:
  551. case 256:
  552. return aes_encrypt_key256(key, cx);
  553. default:
  554. return EXIT_FAILURE;
  555. }
  556. }
  557. AES_RETURN aes_decrypt_key(const unsigned char* key, int key_len, aes_decrypt_ctx cx[1]) {
  558. switch(key_len) {
  559. case 16:
  560. case 128:
  561. return aes_decrypt_key128(key, cx);
  562. case 24:
  563. case 192:
  564. return aes_decrypt_key192(key, cx);
  565. case 32:
  566. case 256:
  567. return aes_decrypt_key256(key, cx);
  568. default:
  569. return EXIT_FAILURE;
  570. }
  571. }
  572. #endif
  573. #if defined(__cplusplus)
  574. }
  575. #endif