1 /* Rijndael (AES) for GnuPG
2 * Copyright (C) 2000, 2001, 2002, 2003, 2007,
3 * 2008, 2011, 2012 Free Software Foundation, Inc.
4 *
5 * This file is part of Libgcrypt.
6 *
7 * Libgcrypt is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU Lesser General Public License as
9 * published by the Free Software Foundation; either version 2.1 of
10 * the License, or (at your option) any later version.
11 *
12 * Libgcrypt is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this program; if not, see <http://www.gnu.org/licenses/>.
19 *******************************************************************
20 * The code here is based on the optimized implementation taken from
21 * http://www.esat.kuleuven.ac.be/~rijmen/rijndael/ on Oct 2, 2000,
22 * which carries this notice:
23 *------------------------------------------
24 * rijndael-alg-fst.c v2.3 April '2000
25 *
26 * Optimised ANSI C code
27 *
28 * authors: v1.0: Antoon Bosselaers
29 * v2.0: Vincent Rijmen
30 * v2.3: Paulo Barreto
31 *
32 * This code is placed in the public domain.
33 *------------------------------------------
34 *
35 * The SP800-38a document is available at:
36 * http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf
37 *
38 */
39
40 #include <config.h>
41 #include <stdio.h>
42 #include <stdlib.h>
43 #include <string.h> /* for memcmp() */
44
45 #include "types.h" /* for byte and u32 typedefs */
46 #include "g10lib.h"
47 #include "cipher.h"
48 #include "bufhelp.h"
49 #include "cipher-selftest.h"
50 #include "rijndael-internal.h"
51 #include "./cipher-internal.h"
52
53
54 #ifdef USE_AMD64_ASM
55 /* AMD64 assembly implementations of AES */
56 extern unsigned int _gcry_aes_amd64_encrypt_block(const void *keysched_enc,
57 unsigned char *out,
58 const unsigned char *in,
59 int rounds,
60 const void *encT);
61
62 extern unsigned int _gcry_aes_amd64_decrypt_block(const void *keysched_dec,
63 unsigned char *out,
64 const unsigned char *in,
65 int rounds,
66 const void *decT);
67 #endif /*USE_AMD64_ASM*/
68
69 #ifdef USE_AESNI
70 /* AES-NI (AMD64 & i386) accelerated implementations of AES */
71 extern void _gcry_aes_aesni_do_setkey(RIJNDAEL_context *ctx, const byte *key);
72 extern void _gcry_aes_aesni_prepare_decryption(RIJNDAEL_context *ctx);
73
74 extern unsigned int _gcry_aes_aesni_encrypt (const RIJNDAEL_context *ctx,
75 unsigned char *dst,
76 const unsigned char *src);
77 extern unsigned int _gcry_aes_aesni_decrypt (const RIJNDAEL_context *ctx,
78 unsigned char *dst,
79 const unsigned char *src);
80 extern void _gcry_aes_aesni_cfb_enc (void *context, unsigned char *iv,
81 void *outbuf_arg, const void *inbuf_arg,
82 size_t nblocks);
83 extern void _gcry_aes_aesni_cbc_enc (void *context, unsigned char *iv,
84 void *outbuf_arg, const void *inbuf_arg,
85 size_t nblocks, int cbc_mac);
86 extern void _gcry_aes_aesni_ctr_enc (void *context, unsigned char *ctr,
87 void *outbuf_arg, const void *inbuf_arg,
88 size_t nblocks);
89 extern void _gcry_aes_aesni_cfb_dec (void *context, unsigned char *iv,
90 void *outbuf_arg, const void *inbuf_arg,
91 size_t nblocks);
92 extern void _gcry_aes_aesni_cbc_dec (void *context, unsigned char *iv,
93 void *outbuf_arg, const void *inbuf_arg,
94 size_t nblocks);
95 extern size_t _gcry_aes_aesni_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
96 const void *inbuf_arg, size_t nblocks,
97 int encrypt);
98 extern size_t _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
99 size_t nblocks);
100 extern void _gcry_aes_aesni_xts_crypt (void *context, unsigned char *tweak,
101 void *outbuf_arg, const void *inbuf_arg,
102 size_t nblocks, int encrypt);
103 #endif
104
105 #ifdef USE_SSSE3
106 /* SSSE3 (AMD64) vector permutation implementation of AES */
107 extern void _gcry_aes_ssse3_do_setkey(RIJNDAEL_context *ctx, const byte *key);
108 extern void _gcry_aes_ssse3_prepare_decryption(RIJNDAEL_context *ctx);
109
110 extern unsigned int _gcry_aes_ssse3_encrypt (const RIJNDAEL_context *ctx,
111 unsigned char *dst,
112 const unsigned char *src);
113 extern unsigned int _gcry_aes_ssse3_decrypt (const RIJNDAEL_context *ctx,
114 unsigned char *dst,
115 const unsigned char *src);
116 extern void _gcry_aes_ssse3_cfb_enc (void *context, unsigned char *iv,
117 void *outbuf_arg, const void *inbuf_arg,
118 size_t nblocks);
119 extern void _gcry_aes_ssse3_cbc_enc (void *context, unsigned char *iv,
120 void *outbuf_arg, const void *inbuf_arg,
121 size_t nblocks,
122 int cbc_mac);
123 extern void _gcry_aes_ssse3_ctr_enc (void *context, unsigned char *ctr,
124 void *outbuf_arg, const void *inbuf_arg,
125 size_t nblocks);
126 extern void _gcry_aes_ssse3_cfb_dec (void *context, unsigned char *iv,
127 void *outbuf_arg, const void *inbuf_arg,
128 size_t nblocks);
129 extern void _gcry_aes_ssse3_cbc_dec (void *context, unsigned char *iv,
130 void *outbuf_arg, const void *inbuf_arg,
131 size_t nblocks);
132 extern size_t _gcry_aes_ssse3_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
133 const void *inbuf_arg, size_t nblocks,
134 int encrypt);
135 extern size_t _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
136 size_t nblocks);
137 #endif
138
139 #ifdef USE_PADLOCK
140 extern unsigned int _gcry_aes_padlock_encrypt (const RIJNDAEL_context *ctx,
141 unsigned char *bx,
142 const unsigned char *ax);
143 extern unsigned int _gcry_aes_padlock_decrypt (const RIJNDAEL_context *ctx,
144 unsigned char *bx,
145 const unsigned char *ax);
146 extern void _gcry_aes_padlock_prepare_decryption (RIJNDAEL_context *ctx);
147 #endif
148
149 #ifdef USE_ARM_ASM
150 /* ARM assembly implementations of AES */
151 extern unsigned int _gcry_aes_arm_encrypt_block(const void *keysched_enc,
152 unsigned char *out,
153 const unsigned char *in,
154 int rounds,
155 const void *encT);
156
157 extern unsigned int _gcry_aes_arm_decrypt_block(const void *keysched_dec,
158 unsigned char *out,
159 const unsigned char *in,
160 int rounds,
161 const void *decT);
162 #endif /*USE_ARM_ASM*/
163
164 #ifdef USE_ARM_CE
165 /* ARMv8 Crypto Extension implementations of AES */
166 extern void _gcry_aes_armv8_ce_setkey(RIJNDAEL_context *ctx, const byte *key);
167 extern void _gcry_aes_armv8_ce_prepare_decryption(RIJNDAEL_context *ctx);
168
169 extern unsigned int _gcry_aes_armv8_ce_encrypt(const RIJNDAEL_context *ctx,
170 unsigned char *dst,
171 const unsigned char *src);
172 extern unsigned int _gcry_aes_armv8_ce_decrypt(const RIJNDAEL_context *ctx,
173 unsigned char *dst,
174 const unsigned char *src);
175
176 extern void _gcry_aes_armv8_ce_cfb_enc (void *context, unsigned char *iv,
177 void *outbuf_arg, const void *inbuf_arg,
178 size_t nblocks);
179 extern void _gcry_aes_armv8_ce_cbc_enc (void *context, unsigned char *iv,
180 void *outbuf_arg, const void *inbuf_arg,
181 size_t nblocks,
182 int cbc_mac);
183 extern void _gcry_aes_armv8_ce_ctr_enc (void *context, unsigned char *ctr,
184 void *outbuf_arg, const void *inbuf_arg,
185 size_t nblocks);
186 extern void _gcry_aes_armv8_ce_cfb_dec (void *context, unsigned char *iv,
187 void *outbuf_arg, const void *inbuf_arg,
188 size_t nblocks);
189 extern void _gcry_aes_armv8_ce_cbc_dec (void *context, unsigned char *iv,
190 void *outbuf_arg, const void *inbuf_arg,
191 size_t nblocks);
192 extern size_t _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
193 const void *inbuf_arg, size_t nblocks,
194 int encrypt);
195 extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c,
196 const void *abuf_arg, size_t nblocks);
197 extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak,
198 void *outbuf_arg,
199 const void *inbuf_arg,
200 size_t nblocks, int encrypt);
201 #endif /*USE_ARM_ASM*/
202
203 #ifdef USE_PPC_CRYPTO
204 /* PowerPC Crypto implementations of AES */
205 extern void _gcry_aes_ppc8_setkey(RIJNDAEL_context *ctx, const byte *key);
206 extern void _gcry_aes_ppc8_prepare_decryption(RIJNDAEL_context *ctx);
207
208 extern unsigned int _gcry_aes_ppc8_encrypt(const RIJNDAEL_context *ctx,
209 unsigned char *dst,
210 const unsigned char *src);
211 extern unsigned int _gcry_aes_ppc8_decrypt(const RIJNDAEL_context *ctx,
212 unsigned char *dst,
213 const unsigned char *src);
214
215 extern void _gcry_aes_ppc8_cfb_enc (void *context, unsigned char *iv,
216 void *outbuf_arg, const void *inbuf_arg,
217 size_t nblocks);
218 extern void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv,
219 void *outbuf_arg, const void *inbuf_arg,
220 size_t nblocks, int cbc_mac);
221 extern void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr,
222 void *outbuf_arg, const void *inbuf_arg,
223 size_t nblocks);
224 extern void _gcry_aes_ppc8_cfb_dec (void *context, unsigned char *iv,
225 void *outbuf_arg, const void *inbuf_arg,
226 size_t nblocks);
227 extern void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv,
228 void *outbuf_arg, const void *inbuf_arg,
229 size_t nblocks);
230
231 extern size_t _gcry_aes_ppc8_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
232 const void *inbuf_arg, size_t nblocks,
233 int encrypt);
234 extern size_t _gcry_aes_ppc8_ocb_auth (gcry_cipher_hd_t c,
235 const void *abuf_arg, size_t nblocks);
236
237 extern void _gcry_aes_ppc8_xts_crypt (void *context, unsigned char *tweak,
238 void *outbuf_arg,
239 const void *inbuf_arg,
240 size_t nblocks, int encrypt);
241 #endif /*USE_PPC_CRYPTO*/
242
243 #ifdef USE_PPC_CRYPTO_WITH_PPC9LE
244 /* Power9 little-endian crypto implementations of AES */
245 extern unsigned int _gcry_aes_ppc9le_encrypt(const RIJNDAEL_context *ctx,
246 unsigned char *dst,
247 const unsigned char *src);
248 extern unsigned int _gcry_aes_ppc9le_decrypt(const RIJNDAEL_context *ctx,
249 unsigned char *dst,
250 const unsigned char *src);
251
252 extern void _gcry_aes_ppc9le_cfb_enc (void *context, unsigned char *iv,
253 void *outbuf_arg, const void *inbuf_arg,
254 size_t nblocks);
255 extern void _gcry_aes_ppc9le_cbc_enc (void *context, unsigned char *iv,
256 void *outbuf_arg, const void *inbuf_arg,
257 size_t nblocks, int cbc_mac);
258 extern void _gcry_aes_ppc9le_ctr_enc (void *context, unsigned char *ctr,
259 void *outbuf_arg, const void *inbuf_arg,
260 size_t nblocks);
261 extern void _gcry_aes_ppc9le_cfb_dec (void *context, unsigned char *iv,
262 void *outbuf_arg, const void *inbuf_arg,
263 size_t nblocks);
264 extern void _gcry_aes_ppc9le_cbc_dec (void *context, unsigned char *iv,
265 void *outbuf_arg, const void *inbuf_arg,
266 size_t nblocks);
267
268 extern size_t _gcry_aes_ppc9le_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
269 const void *inbuf_arg, size_t nblocks,
270 int encrypt);
271 extern size_t _gcry_aes_ppc9le_ocb_auth (gcry_cipher_hd_t c,
272 const void *abuf_arg, size_t nblocks);
273
274 extern void _gcry_aes_ppc9le_xts_crypt (void *context, unsigned char *tweak,
275 void *outbuf_arg,
276 const void *inbuf_arg,
277 size_t nblocks, int encrypt);
278 #endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/
279
280 #ifdef USE_S390X_CRYPTO
281 /* zSeries crypto implementations of AES */
282 extern int _gcry_aes_s390x_setup_acceleration(RIJNDAEL_context *ctx,
283 unsigned int keylen,
284 unsigned int hwfeatures,
285 cipher_bulk_ops_t *bulk_ops);
286 extern void _gcry_aes_s390x_setkey(RIJNDAEL_context *ctx, const byte *key);
287 extern void _gcry_aes_s390x_prepare_decryption(RIJNDAEL_context *ctx);
288
289 extern unsigned int _gcry_aes_s390x_encrypt(const RIJNDAEL_context *ctx,
290 unsigned char *dst,
291 const unsigned char *src);
292 extern unsigned int _gcry_aes_s390x_decrypt(const RIJNDAEL_context *ctx,
293 unsigned char *dst,
294 const unsigned char *src);
295
296 #endif /*USE_S390X_CRYPTO*/
297
298 static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
299 const unsigned char *ax);
300 static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
301 const unsigned char *ax);
302
303 static void _gcry_aes_cfb_enc (void *context, unsigned char *iv,
304 void *outbuf, const void *inbuf,
305 size_t nblocks);
306 static void _gcry_aes_cfb_dec (void *context, unsigned char *iv,
307 void *outbuf_arg, const void *inbuf_arg,
308 size_t nblocks);
309 static void _gcry_aes_cbc_enc (void *context, unsigned char *iv,
310 void *outbuf_arg, const void *inbuf_arg,
311 size_t nblocks, int cbc_mac);
312 static void _gcry_aes_cbc_dec (void *context, unsigned char *iv,
313 void *outbuf_arg, const void *inbuf_arg,
314 size_t nblocks);
315 static void _gcry_aes_ctr_enc (void *context, unsigned char *ctr,
316 void *outbuf_arg, const void *inbuf_arg,
317 size_t nblocks);
318 static size_t _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
319 const void *inbuf_arg, size_t nblocks,
320 int encrypt);
321 static size_t _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg,
322 size_t nblocks);
323 static void _gcry_aes_xts_crypt (void *context, unsigned char *tweak,
324 void *outbuf_arg, const void *inbuf_arg,
325 size_t nblocks, int encrypt);
326
327
328 /* All the numbers. */
329 #include "rijndael-tables.h"
330
331
332
333
334 /* Function prototypes. */
335 static const char *selftest(void);
336 static void prepare_decryption(RIJNDAEL_context *ctx);
337
338
339
340 /* Prefetching for encryption/decryption tables. */
prefetch_table(const volatile byte * tab,size_t len)341 static inline void prefetch_table(const volatile byte *tab, size_t len)
342 {
343 size_t i;
344
345 for (i = 0; len - i >= 8 * 32; i += 8 * 32)
346 {
347 (void)tab[i + 0 * 32];
348 (void)tab[i + 1 * 32];
349 (void)tab[i + 2 * 32];
350 (void)tab[i + 3 * 32];
351 (void)tab[i + 4 * 32];
352 (void)tab[i + 5 * 32];
353 (void)tab[i + 6 * 32];
354 (void)tab[i + 7 * 32];
355 }
356 for (; i < len; i += 32)
357 {
358 (void)tab[i];
359 }
360
361 (void)tab[len - 1];
362 }
363
prefetch_enc(void)364 static void prefetch_enc(void)
365 {
366 /* Modify counters to trigger copy-on-write and unsharing if physical pages
367 * of look-up table are shared between processes. Modifying counters also
368 * causes checksums for pages to change and hint same-page merging algorithm
369 * that these pages are frequently changing. */
370 enc_tables.counter_head++;
371 enc_tables.counter_tail++;
372
373 /* Prefetch look-up tables to cache. */
374 prefetch_table((const void *)&enc_tables, sizeof(enc_tables));
375 }
376
prefetch_dec(void)377 static void prefetch_dec(void)
378 {
379 /* Modify counters to trigger copy-on-write and unsharing if physical pages
380 * of look-up table are shared between processes. Modifying counters also
381 * causes checksums for pages to change and hint same-page merging algorithm
382 * that these pages are frequently changing. */
383 dec_tables.counter_head++;
384 dec_tables.counter_tail++;
385
386 /* Prefetch look-up tables to cache. */
387 prefetch_table((const void *)&dec_tables, sizeof(dec_tables));
388 }
389
390
391
392 /* Perform the key setup. */
393 static gcry_err_code_t
do_setkey(RIJNDAEL_context * ctx,const byte * key,const unsigned keylen,cipher_bulk_ops_t * bulk_ops)394 do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen,
395 cipher_bulk_ops_t *bulk_ops)
396 {
397 static int initialized = 0;
398 static const char *selftest_failed = 0;
399 void (*hw_setkey)(RIJNDAEL_context *ctx, const byte *key) = NULL;
400 int rounds;
401 int i,j, r, t, rconpointer = 0;
402 int KC;
403 unsigned int hwfeatures;
404
405 /* The on-the-fly self tests are only run in non-fips mode. In fips
406 mode explicit self-tests are required. Actually the on-the-fly
407 self-tests are not fully thread-safe and it might happen that a
408 failed self-test won't get noticed in another thread.
409
410 FIXME: We might want to have a central registry of succeeded
411 self-tests. */
412 if (!fips_mode () && !initialized)
413 {
414 initialized = 1;
415 selftest_failed = selftest ();
416 if (selftest_failed)
417 log_error ("%s\n", selftest_failed );
418 }
419 if (selftest_failed)
420 return GPG_ERR_SELFTEST_FAILED;
421
422 if( keylen == 128/8 )
423 {
424 rounds = 10;
425 KC = 4;
426 }
427 else if ( keylen == 192/8 )
428 {
429 rounds = 12;
430 KC = 6;
431 }
432 else if ( keylen == 256/8 )
433 {
434 rounds = 14;
435 KC = 8;
436 }
437 else
438 return GPG_ERR_INV_KEYLEN;
439
440 ctx->rounds = rounds;
441 hwfeatures = _gcry_get_hw_features ();
442
443 ctx->decryption_prepared = 0;
444
445 /* Setup default bulk encryption routines. */
446 memset (bulk_ops, 0, sizeof(*bulk_ops));
447 bulk_ops->cfb_enc = _gcry_aes_cfb_enc;
448 bulk_ops->cfb_dec = _gcry_aes_cfb_dec;
449 bulk_ops->cbc_enc = _gcry_aes_cbc_enc;
450 bulk_ops->cbc_dec = _gcry_aes_cbc_dec;
451 bulk_ops->ctr_enc = _gcry_aes_ctr_enc;
452 bulk_ops->ocb_crypt = _gcry_aes_ocb_crypt;
453 bulk_ops->ocb_auth = _gcry_aes_ocb_auth;
454 bulk_ops->xts_crypt = _gcry_aes_xts_crypt;
455
456 (void)hwfeatures;
457
458 if (0)
459 {
460 ;
461 }
462 #ifdef USE_AESNI
463 else if (hwfeatures & HWF_INTEL_AESNI)
464 {
465 hw_setkey = _gcry_aes_aesni_do_setkey;
466 ctx->encrypt_fn = _gcry_aes_aesni_encrypt;
467 ctx->decrypt_fn = _gcry_aes_aesni_decrypt;
468 ctx->prefetch_enc_fn = NULL;
469 ctx->prefetch_dec_fn = NULL;
470 ctx->prepare_decryption = _gcry_aes_aesni_prepare_decryption;
471 ctx->use_avx = !!(hwfeatures & HWF_INTEL_AVX);
472 ctx->use_avx2 = !!(hwfeatures & HWF_INTEL_AVX2);
473
474 /* Setup AES-NI bulk encryption routines. */
475 bulk_ops->cfb_enc = _gcry_aes_aesni_cfb_enc;
476 bulk_ops->cfb_dec = _gcry_aes_aesni_cfb_dec;
477 bulk_ops->cbc_enc = _gcry_aes_aesni_cbc_enc;
478 bulk_ops->cbc_dec = _gcry_aes_aesni_cbc_dec;
479 bulk_ops->ctr_enc = _gcry_aes_aesni_ctr_enc;
480 bulk_ops->ocb_crypt = _gcry_aes_aesni_ocb_crypt;
481 bulk_ops->ocb_auth = _gcry_aes_aesni_ocb_auth;
482 bulk_ops->xts_crypt = _gcry_aes_aesni_xts_crypt;
483 }
484 #endif
485 #ifdef USE_PADLOCK
486 else if (hwfeatures & HWF_PADLOCK_AES && keylen == 128/8)
487 {
488 ctx->encrypt_fn = _gcry_aes_padlock_encrypt;
489 ctx->decrypt_fn = _gcry_aes_padlock_decrypt;
490 ctx->prefetch_enc_fn = NULL;
491 ctx->prefetch_dec_fn = NULL;
492 ctx->prepare_decryption = _gcry_aes_padlock_prepare_decryption;
493 memcpy (ctx->padlockkey, key, keylen);
494 }
495 #endif
496 #ifdef USE_SSSE3
497 else if (hwfeatures & HWF_INTEL_SSSE3)
498 {
499 hw_setkey = _gcry_aes_ssse3_do_setkey;
500 ctx->encrypt_fn = _gcry_aes_ssse3_encrypt;
501 ctx->decrypt_fn = _gcry_aes_ssse3_decrypt;
502 ctx->prefetch_enc_fn = NULL;
503 ctx->prefetch_dec_fn = NULL;
504 ctx->prepare_decryption = _gcry_aes_ssse3_prepare_decryption;
505
506 /* Setup SSSE3 bulk encryption routines. */
507 bulk_ops->cfb_enc = _gcry_aes_ssse3_cfb_enc;
508 bulk_ops->cfb_dec = _gcry_aes_ssse3_cfb_dec;
509 bulk_ops->cbc_enc = _gcry_aes_ssse3_cbc_enc;
510 bulk_ops->cbc_dec = _gcry_aes_ssse3_cbc_dec;
511 bulk_ops->ctr_enc = _gcry_aes_ssse3_ctr_enc;
512 bulk_ops->ocb_crypt = _gcry_aes_ssse3_ocb_crypt;
513 bulk_ops->ocb_auth = _gcry_aes_ssse3_ocb_auth;
514 }
515 #endif
516 #ifdef USE_ARM_CE
517 else if (hwfeatures & HWF_ARM_AES)
518 {
519 hw_setkey = _gcry_aes_armv8_ce_setkey;
520 ctx->encrypt_fn = _gcry_aes_armv8_ce_encrypt;
521 ctx->decrypt_fn = _gcry_aes_armv8_ce_decrypt;
522 ctx->prefetch_enc_fn = NULL;
523 ctx->prefetch_dec_fn = NULL;
524 ctx->prepare_decryption = _gcry_aes_armv8_ce_prepare_decryption;
525
526 /* Setup ARM-CE bulk encryption routines. */
527 bulk_ops->cfb_enc = _gcry_aes_armv8_ce_cfb_enc;
528 bulk_ops->cfb_dec = _gcry_aes_armv8_ce_cfb_dec;
529 bulk_ops->cbc_enc = _gcry_aes_armv8_ce_cbc_enc;
530 bulk_ops->cbc_dec = _gcry_aes_armv8_ce_cbc_dec;
531 bulk_ops->ctr_enc = _gcry_aes_armv8_ce_ctr_enc;
532 bulk_ops->ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt;
533 bulk_ops->ocb_auth = _gcry_aes_armv8_ce_ocb_auth;
534 bulk_ops->xts_crypt = _gcry_aes_armv8_ce_xts_crypt;
535 }
536 #endif
537 #ifdef USE_PPC_CRYPTO_WITH_PPC9LE
538 else if ((hwfeatures & HWF_PPC_VCRYPTO) && (hwfeatures & HWF_PPC_ARCH_3_00))
539 {
540 hw_setkey = _gcry_aes_ppc8_setkey;
541 ctx->encrypt_fn = _gcry_aes_ppc9le_encrypt;
542 ctx->decrypt_fn = _gcry_aes_ppc9le_decrypt;
543 ctx->prefetch_enc_fn = NULL;
544 ctx->prefetch_dec_fn = NULL;
545 ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption;
546
547 /* Setup PPC9LE bulk encryption routines. */
548 bulk_ops->cfb_enc = _gcry_aes_ppc9le_cfb_enc;
549 bulk_ops->cfb_dec = _gcry_aes_ppc9le_cfb_dec;
550 bulk_ops->cbc_enc = _gcry_aes_ppc9le_cbc_enc;
551 bulk_ops->cbc_dec = _gcry_aes_ppc9le_cbc_dec;
552 bulk_ops->ctr_enc = _gcry_aes_ppc9le_ctr_enc;
553 bulk_ops->ocb_crypt = _gcry_aes_ppc9le_ocb_crypt;
554 bulk_ops->ocb_auth = _gcry_aes_ppc9le_ocb_auth;
555 bulk_ops->xts_crypt = _gcry_aes_ppc9le_xts_crypt;
556 }
557 #endif
558 #ifdef USE_PPC_CRYPTO
559 else if (hwfeatures & HWF_PPC_VCRYPTO)
560 {
561 hw_setkey = _gcry_aes_ppc8_setkey;
562 ctx->encrypt_fn = _gcry_aes_ppc8_encrypt;
563 ctx->decrypt_fn = _gcry_aes_ppc8_decrypt;
564 ctx->prefetch_enc_fn = NULL;
565 ctx->prefetch_dec_fn = NULL;
566 ctx->prepare_decryption = _gcry_aes_ppc8_prepare_decryption;
567
568 /* Setup PPC8 bulk encryption routines. */
569 bulk_ops->cfb_enc = _gcry_aes_ppc8_cfb_enc;
570 bulk_ops->cfb_dec = _gcry_aes_ppc8_cfb_dec;
571 bulk_ops->cbc_enc = _gcry_aes_ppc8_cbc_enc;
572 bulk_ops->cbc_dec = _gcry_aes_ppc8_cbc_dec;
573 bulk_ops->ctr_enc = _gcry_aes_ppc8_ctr_enc;
574 bulk_ops->ocb_crypt = _gcry_aes_ppc8_ocb_crypt;
575 bulk_ops->ocb_auth = _gcry_aes_ppc8_ocb_auth;
576 bulk_ops->xts_crypt = _gcry_aes_ppc8_xts_crypt;
577 }
578 #endif
579 #ifdef USE_S390X_CRYPTO
580 else if (_gcry_aes_s390x_setup_acceleration (ctx, keylen, hwfeatures,
581 bulk_ops))
582 {
583 hw_setkey = _gcry_aes_s390x_setkey;
584 ctx->encrypt_fn = _gcry_aes_s390x_encrypt;
585 ctx->decrypt_fn = _gcry_aes_s390x_decrypt;
586 ctx->prefetch_enc_fn = NULL;
587 ctx->prefetch_dec_fn = NULL;
588 ctx->prepare_decryption = _gcry_aes_s390x_prepare_decryption;
589 }
590 #endif
591 else
592 {
593 ctx->encrypt_fn = do_encrypt;
594 ctx->decrypt_fn = do_decrypt;
595 ctx->prefetch_enc_fn = prefetch_enc;
596 ctx->prefetch_dec_fn = prefetch_dec;
597 ctx->prepare_decryption = prepare_decryption;
598 }
599
600 /* NB: We don't yet support Padlock hardware key generation. */
601
602 if (hw_setkey)
603 {
604 hw_setkey (ctx, key);
605 }
606 else
607 {
608 const byte *sbox = ((const byte *)encT) + 1;
609 union
610 {
611 PROPERLY_ALIGNED_TYPE dummy;
612 byte data[MAXKC][4];
613 u32 data32[MAXKC];
614 } tkk[2];
615 #define k tkk[0].data
616 #define k_u32 tkk[0].data32
617 #define tk tkk[1].data
618 #define tk_u32 tkk[1].data32
619 #define W (ctx->keyschenc)
620 #define W_u32 (ctx->keyschenc32)
621
622 prefetch_enc();
623
624 for (i = 0; i < keylen; i++)
625 {
626 k[i >> 2][i & 3] = key[i];
627 }
628
629 for (j = KC-1; j >= 0; j--)
630 {
631 tk_u32[j] = k_u32[j];
632 }
633 r = 0;
634 t = 0;
635 /* Copy values into round key array. */
636 for (j = 0; (j < KC) && (r < rounds + 1); )
637 {
638 for (; (j < KC) && (t < 4); j++, t++)
639 {
640 W_u32[r][t] = le_bswap32(tk_u32[j]);
641 }
642 if (t == 4)
643 {
644 r++;
645 t = 0;
646 }
647 }
648
649 while (r < rounds + 1)
650 {
651 /* While not enough round key material calculated calculate
652 new values. */
653 tk[0][0] ^= sbox[tk[KC-1][1] * 4];
654 tk[0][1] ^= sbox[tk[KC-1][2] * 4];
655 tk[0][2] ^= sbox[tk[KC-1][3] * 4];
656 tk[0][3] ^= sbox[tk[KC-1][0] * 4];
657 tk[0][0] ^= rcon[rconpointer++];
658
659 if (KC != 8)
660 {
661 for (j = 1; j < KC; j++)
662 {
663 tk_u32[j] ^= tk_u32[j-1];
664 }
665 }
666 else
667 {
668 for (j = 1; j < KC/2; j++)
669 {
670 tk_u32[j] ^= tk_u32[j-1];
671 }
672 tk[KC/2][0] ^= sbox[tk[KC/2 - 1][0] * 4];
673 tk[KC/2][1] ^= sbox[tk[KC/2 - 1][1] * 4];
674 tk[KC/2][2] ^= sbox[tk[KC/2 - 1][2] * 4];
675 tk[KC/2][3] ^= sbox[tk[KC/2 - 1][3] * 4];
676 for (j = KC/2 + 1; j < KC; j++)
677 {
678 tk_u32[j] ^= tk_u32[j-1];
679 }
680 }
681
682 /* Copy values into round key array. */
683 for (j = 0; (j < KC) && (r < rounds + 1); )
684 {
685 for (; (j < KC) && (t < 4); j++, t++)
686 {
687 W_u32[r][t] = le_bswap32(tk_u32[j]);
688 }
689 if (t == 4)
690 {
691 r++;
692 t = 0;
693 }
694 }
695 }
696 #undef W
697 #undef tk
698 #undef k
699 #undef W_u32
700 #undef tk_u32
701 #undef k_u32
702 wipememory(&tkk, sizeof(tkk));
703 }
704
705 return 0;
706 }
707
708
709 static gcry_err_code_t
rijndael_setkey(void * context,const byte * key,const unsigned keylen,cipher_bulk_ops_t * bulk_ops)710 rijndael_setkey (void *context, const byte *key, const unsigned keylen,
711 cipher_bulk_ops_t *bulk_ops)
712 {
713 RIJNDAEL_context *ctx = context;
714 return do_setkey (ctx, key, keylen, bulk_ops);
715 }
716
717
718 /* Make a decryption key from an encryption key. */
719 static void
prepare_decryption(RIJNDAEL_context * ctx)720 prepare_decryption( RIJNDAEL_context *ctx )
721 {
722 const byte *sbox = ((const byte *)encT) + 1;
723 int r;
724
725 prefetch_enc();
726 prefetch_dec();
727
728 ctx->keyschdec32[0][0] = ctx->keyschenc32[0][0];
729 ctx->keyschdec32[0][1] = ctx->keyschenc32[0][1];
730 ctx->keyschdec32[0][2] = ctx->keyschenc32[0][2];
731 ctx->keyschdec32[0][3] = ctx->keyschenc32[0][3];
732
733 for (r = 1; r < ctx->rounds; r++)
734 {
735 u32 *wi = ctx->keyschenc32[r];
736 u32 *wo = ctx->keyschdec32[r];
737 u32 wt;
738
739 wt = wi[0];
740 wo[0] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
741 ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
742 ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
743 ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
744
745 wt = wi[1];
746 wo[1] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
747 ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
748 ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
749 ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
750
751 wt = wi[2];
752 wo[2] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
753 ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
754 ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
755 ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
756
757 wt = wi[3];
758 wo[3] = rol(decT[sbox[(byte)(wt >> 0) * 4]], 8 * 0)
759 ^ rol(decT[sbox[(byte)(wt >> 8) * 4]], 8 * 1)
760 ^ rol(decT[sbox[(byte)(wt >> 16) * 4]], 8 * 2)
761 ^ rol(decT[sbox[(byte)(wt >> 24) * 4]], 8 * 3);
762 }
763
764 ctx->keyschdec32[r][0] = ctx->keyschenc32[r][0];
765 ctx->keyschdec32[r][1] = ctx->keyschenc32[r][1];
766 ctx->keyschdec32[r][2] = ctx->keyschenc32[r][2];
767 ctx->keyschdec32[r][3] = ctx->keyschenc32[r][3];
768 }
769
770
771 #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM)
772 /* Encrypt one block. A and B may be the same. */
773 static unsigned int
do_encrypt_fn(const RIJNDAEL_context * ctx,unsigned char * b,const unsigned char * a)774 do_encrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b,
775 const unsigned char *a)
776 {
777 #define rk (ctx->keyschenc32)
778 const byte *sbox = ((const byte *)encT) + 1;
779 int rounds = ctx->rounds;
780 int r;
781 u32 sa[4];
782 u32 sb[4];
783
784 sb[0] = buf_get_le32(a + 0);
785 sb[1] = buf_get_le32(a + 4);
786 sb[2] = buf_get_le32(a + 8);
787 sb[3] = buf_get_le32(a + 12);
788
789 sa[0] = sb[0] ^ rk[0][0];
790 sa[1] = sb[1] ^ rk[0][1];
791 sa[2] = sb[2] ^ rk[0][2];
792 sa[3] = sb[3] ^ rk[0][3];
793
794 sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
795 sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
796 sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
797 sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
798 sa[0] = rk[1][0] ^ sb[0];
799
800 sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
801 sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
802 sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
803 sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
804 sa[1] = rk[1][1] ^ sb[1];
805
806 sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
807 sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
808 sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
809 sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
810 sa[2] = rk[1][2] ^ sb[2];
811
812 sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
813 sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
814 sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
815 sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
816 sa[3] = rk[1][3] ^ sb[3];
817
818 for (r = 2; r < rounds; r++)
819 {
820 sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
821 sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
822 sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
823 sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
824 sa[0] = rk[r][0] ^ sb[0];
825
826 sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
827 sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
828 sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
829 sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
830 sa[1] = rk[r][1] ^ sb[1];
831
832 sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
833 sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
834 sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
835 sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
836 sa[2] = rk[r][2] ^ sb[2];
837
838 sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
839 sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
840 sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
841 sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
842 sa[3] = rk[r][3] ^ sb[3];
843
844 r++;
845
846 sb[0] = rol(encT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
847 sb[3] = rol(encT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
848 sb[2] = rol(encT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
849 sb[1] = rol(encT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
850 sa[0] = rk[r][0] ^ sb[0];
851
852 sb[1] ^= rol(encT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
853 sa[0] ^= rol(encT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
854 sb[3] ^= rol(encT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
855 sb[2] ^= rol(encT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
856 sa[1] = rk[r][1] ^ sb[1];
857
858 sb[2] ^= rol(encT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
859 sa[1] ^= rol(encT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
860 sa[0] ^= rol(encT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
861 sb[3] ^= rol(encT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
862 sa[2] = rk[r][2] ^ sb[2];
863
864 sb[3] ^= rol(encT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
865 sa[2] ^= rol(encT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
866 sa[1] ^= rol(encT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
867 sa[0] ^= rol(encT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
868 sa[3] = rk[r][3] ^ sb[3];
869 }
870
871 /* Last round is special. */
872
873 sb[0] = ((u32)sbox[(byte)(sa[0] >> (0 * 8)) * 4]) << (0 * 8);
874 sb[3] = ((u32)sbox[(byte)(sa[0] >> (1 * 8)) * 4]) << (1 * 8);
875 sb[2] = ((u32)sbox[(byte)(sa[0] >> (2 * 8)) * 4]) << (2 * 8);
876 sb[1] = ((u32)sbox[(byte)(sa[0] >> (3 * 8)) * 4]) << (3 * 8);
877 sa[0] = rk[r][0] ^ sb[0];
878
879 sb[1] ^= ((u32)sbox[(byte)(sa[1] >> (0 * 8)) * 4]) << (0 * 8);
880 sa[0] ^= ((u32)sbox[(byte)(sa[1] >> (1 * 8)) * 4]) << (1 * 8);
881 sb[3] ^= ((u32)sbox[(byte)(sa[1] >> (2 * 8)) * 4]) << (2 * 8);
882 sb[2] ^= ((u32)sbox[(byte)(sa[1] >> (3 * 8)) * 4]) << (3 * 8);
883 sa[1] = rk[r][1] ^ sb[1];
884
885 sb[2] ^= ((u32)sbox[(byte)(sa[2] >> (0 * 8)) * 4]) << (0 * 8);
886 sa[1] ^= ((u32)sbox[(byte)(sa[2] >> (1 * 8)) * 4]) << (1 * 8);
887 sa[0] ^= ((u32)sbox[(byte)(sa[2] >> (2 * 8)) * 4]) << (2 * 8);
888 sb[3] ^= ((u32)sbox[(byte)(sa[2] >> (3 * 8)) * 4]) << (3 * 8);
889 sa[2] = rk[r][2] ^ sb[2];
890
891 sb[3] ^= ((u32)sbox[(byte)(sa[3] >> (0 * 8)) * 4]) << (0 * 8);
892 sa[2] ^= ((u32)sbox[(byte)(sa[3] >> (1 * 8)) * 4]) << (1 * 8);
893 sa[1] ^= ((u32)sbox[(byte)(sa[3] >> (2 * 8)) * 4]) << (2 * 8);
894 sa[0] ^= ((u32)sbox[(byte)(sa[3] >> (3 * 8)) * 4]) << (3 * 8);
895 sa[3] = rk[r][3] ^ sb[3];
896
897 buf_put_le32(b + 0, sa[0]);
898 buf_put_le32(b + 4, sa[1]);
899 buf_put_le32(b + 8, sa[2]);
900 buf_put_le32(b + 12, sa[3]);
901 #undef rk
902
903 return (56 + 2*sizeof(int));
904 }
905 #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
906
907
908 static unsigned int
do_encrypt(const RIJNDAEL_context * ctx,unsigned char * bx,const unsigned char * ax)909 do_encrypt (const RIJNDAEL_context *ctx,
910 unsigned char *bx, const unsigned char *ax)
911 {
912 #ifdef USE_AMD64_ASM
913 return _gcry_aes_amd64_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds,
914 enc_tables.T);
915 #elif defined(USE_ARM_ASM)
916 return _gcry_aes_arm_encrypt_block(ctx->keyschenc, bx, ax, ctx->rounds,
917 enc_tables.T);
918 #else
919 return do_encrypt_fn (ctx, bx, ax);
920 #endif /* !USE_ARM_ASM && !USE_AMD64_ASM*/
921 }
922
923
924 static unsigned int
rijndael_encrypt(void * context,byte * b,const byte * a)925 rijndael_encrypt (void *context, byte *b, const byte *a)
926 {
927 RIJNDAEL_context *ctx = context;
928
929 if (ctx->prefetch_enc_fn)
930 ctx->prefetch_enc_fn();
931
932 return ctx->encrypt_fn (ctx, b, a);
933 }
934
935
936 /* Bulk encryption of complete blocks in CFB mode. Caller needs to
937 make sure that IV is aligned on an unsigned long boundary. This
938 function is only intended for the bulk encryption feature of
939 cipher.c. */
940 static void
_gcry_aes_cfb_enc(void * context,unsigned char * iv,void * outbuf_arg,const void * inbuf_arg,size_t nblocks)941 _gcry_aes_cfb_enc (void *context, unsigned char *iv,
942 void *outbuf_arg, const void *inbuf_arg,
943 size_t nblocks)
944 {
945 RIJNDAEL_context *ctx = context;
946 unsigned char *outbuf = outbuf_arg;
947 const unsigned char *inbuf = inbuf_arg;
948 unsigned int burn_depth = 0;
949 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
950
951 if (ctx->prefetch_enc_fn)
952 ctx->prefetch_enc_fn();
953
954 for ( ;nblocks; nblocks-- )
955 {
956 /* Encrypt the IV. */
957 burn_depth = encrypt_fn (ctx, iv, iv);
958 /* XOR the input with the IV and store input into IV. */
959 cipher_block_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE);
960 outbuf += BLOCKSIZE;
961 inbuf += BLOCKSIZE;
962 }
963
964 if (burn_depth)
965 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
966 }
967
968
969 /* Bulk encryption of complete blocks in CBC mode. Caller needs to
970 make sure that IV is aligned on an unsigned long boundary. This
971 function is only intended for the bulk encryption feature of
972 cipher.c. */
973 static void
_gcry_aes_cbc_enc(void * context,unsigned char * iv,void * outbuf_arg,const void * inbuf_arg,size_t nblocks,int cbc_mac)974 _gcry_aes_cbc_enc (void *context, unsigned char *iv,
975 void *outbuf_arg, const void *inbuf_arg,
976 size_t nblocks, int cbc_mac)
977 {
978 RIJNDAEL_context *ctx = context;
979 unsigned char *outbuf = outbuf_arg;
980 const unsigned char *inbuf = inbuf_arg;
981 unsigned char *last_iv;
982 unsigned int burn_depth = 0;
983 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
984
985 if (ctx->prefetch_enc_fn)
986 ctx->prefetch_enc_fn();
987
988 last_iv = iv;
989
990 for ( ;nblocks; nblocks-- )
991 {
992 cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE);
993
994 burn_depth = encrypt_fn (ctx, outbuf, outbuf);
995
996 last_iv = outbuf;
997 inbuf += BLOCKSIZE;
998 if (!cbc_mac)
999 outbuf += BLOCKSIZE;
1000 }
1001
1002 if (last_iv != iv)
1003 cipher_block_cpy (iv, last_iv, BLOCKSIZE);
1004
1005 if (burn_depth)
1006 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1007 }
1008
1009
1010 /* Bulk encryption of complete blocks in CTR mode. Caller needs to
1011 make sure that CTR is aligned on a 16 byte boundary if AESNI; the
1012 minimum alignment is for an u32. This function is only intended
1013 for the bulk encryption feature of cipher.c. CTR is expected to be
1014 of size BLOCKSIZE. */
1015 static void
_gcry_aes_ctr_enc(void * context,unsigned char * ctr,void * outbuf_arg,const void * inbuf_arg,size_t nblocks)1016 _gcry_aes_ctr_enc (void *context, unsigned char *ctr,
1017 void *outbuf_arg, const void *inbuf_arg,
1018 size_t nblocks)
1019 {
1020 RIJNDAEL_context *ctx = context;
1021 unsigned char *outbuf = outbuf_arg;
1022 const unsigned char *inbuf = inbuf_arg;
1023 unsigned int burn_depth = 0;
1024 union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp;
1025 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1026
1027 if (ctx->prefetch_enc_fn)
1028 ctx->prefetch_enc_fn();
1029
1030 for ( ;nblocks; nblocks-- )
1031 {
1032 /* Encrypt the counter. */
1033 burn_depth = encrypt_fn (ctx, tmp.x1, ctr);
1034 /* XOR the input with the encrypted counter and store in output. */
1035 cipher_block_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE);
1036 outbuf += BLOCKSIZE;
1037 inbuf += BLOCKSIZE;
1038 /* Increment the counter. */
1039 cipher_block_add(ctr, 1, BLOCKSIZE);
1040 }
1041
1042 wipememory(&tmp, sizeof(tmp));
1043
1044 if (burn_depth)
1045 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1046 }
1047
1048
1049
1050 #if !defined(USE_ARM_ASM) && !defined(USE_AMD64_ASM)
1051 /* Decrypt one block. A and B may be the same. */
1052 static unsigned int
do_decrypt_fn(const RIJNDAEL_context * ctx,unsigned char * b,const unsigned char * a)1053 do_decrypt_fn (const RIJNDAEL_context *ctx, unsigned char *b,
1054 const unsigned char *a)
1055 {
1056 #define rk (ctx->keyschdec32)
1057 int rounds = ctx->rounds;
1058 int r;
1059 u32 sa[4];
1060 u32 sb[4];
1061
1062 sb[0] = buf_get_le32(a + 0);
1063 sb[1] = buf_get_le32(a + 4);
1064 sb[2] = buf_get_le32(a + 8);
1065 sb[3] = buf_get_le32(a + 12);
1066
1067 sa[0] = sb[0] ^ rk[rounds][0];
1068 sa[1] = sb[1] ^ rk[rounds][1];
1069 sa[2] = sb[2] ^ rk[rounds][2];
1070 sa[3] = sb[3] ^ rk[rounds][3];
1071
1072 for (r = rounds - 1; r > 1; r--)
1073 {
1074 sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1075 sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1076 sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1077 sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1078 sa[0] = rk[r][0] ^ sb[0];
1079
1080 sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1081 sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1082 sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1083 sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1084 sa[1] = rk[r][1] ^ sb[1];
1085
1086 sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1087 sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1088 sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1089 sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1090 sa[2] = rk[r][2] ^ sb[2];
1091
1092 sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1093 sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1094 sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1095 sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1096 sa[3] = rk[r][3] ^ sb[3];
1097
1098 r--;
1099
1100 sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1101 sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1102 sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1103 sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1104 sa[0] = rk[r][0] ^ sb[0];
1105
1106 sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1107 sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1108 sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1109 sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1110 sa[1] = rk[r][1] ^ sb[1];
1111
1112 sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1113 sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1114 sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1115 sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1116 sa[2] = rk[r][2] ^ sb[2];
1117
1118 sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1119 sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1120 sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1121 sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1122 sa[3] = rk[r][3] ^ sb[3];
1123 }
1124
1125 sb[0] = rol(decT[(byte)(sa[0] >> (0 * 8))], (0 * 8));
1126 sb[1] = rol(decT[(byte)(sa[0] >> (1 * 8))], (1 * 8));
1127 sb[2] = rol(decT[(byte)(sa[0] >> (2 * 8))], (2 * 8));
1128 sb[3] = rol(decT[(byte)(sa[0] >> (3 * 8))], (3 * 8));
1129 sa[0] = rk[1][0] ^ sb[0];
1130
1131 sb[1] ^= rol(decT[(byte)(sa[1] >> (0 * 8))], (0 * 8));
1132 sb[2] ^= rol(decT[(byte)(sa[1] >> (1 * 8))], (1 * 8));
1133 sb[3] ^= rol(decT[(byte)(sa[1] >> (2 * 8))], (2 * 8));
1134 sa[0] ^= rol(decT[(byte)(sa[1] >> (3 * 8))], (3 * 8));
1135 sa[1] = rk[1][1] ^ sb[1];
1136
1137 sb[2] ^= rol(decT[(byte)(sa[2] >> (0 * 8))], (0 * 8));
1138 sb[3] ^= rol(decT[(byte)(sa[2] >> (1 * 8))], (1 * 8));
1139 sa[0] ^= rol(decT[(byte)(sa[2] >> (2 * 8))], (2 * 8));
1140 sa[1] ^= rol(decT[(byte)(sa[2] >> (3 * 8))], (3 * 8));
1141 sa[2] = rk[1][2] ^ sb[2];
1142
1143 sb[3] ^= rol(decT[(byte)(sa[3] >> (0 * 8))], (0 * 8));
1144 sa[0] ^= rol(decT[(byte)(sa[3] >> (1 * 8))], (1 * 8));
1145 sa[1] ^= rol(decT[(byte)(sa[3] >> (2 * 8))], (2 * 8));
1146 sa[2] ^= rol(decT[(byte)(sa[3] >> (3 * 8))], (3 * 8));
1147 sa[3] = rk[1][3] ^ sb[3];
1148
1149 /* Last round is special. */
1150 sb[0] = (u32)inv_sbox[(byte)(sa[0] >> (0 * 8))] << (0 * 8);
1151 sb[1] = (u32)inv_sbox[(byte)(sa[0] >> (1 * 8))] << (1 * 8);
1152 sb[2] = (u32)inv_sbox[(byte)(sa[0] >> (2 * 8))] << (2 * 8);
1153 sb[3] = (u32)inv_sbox[(byte)(sa[0] >> (3 * 8))] << (3 * 8);
1154 sa[0] = sb[0] ^ rk[0][0];
1155
1156 sb[1] ^= (u32)inv_sbox[(byte)(sa[1] >> (0 * 8))] << (0 * 8);
1157 sb[2] ^= (u32)inv_sbox[(byte)(sa[1] >> (1 * 8))] << (1 * 8);
1158 sb[3] ^= (u32)inv_sbox[(byte)(sa[1] >> (2 * 8))] << (2 * 8);
1159 sa[0] ^= (u32)inv_sbox[(byte)(sa[1] >> (3 * 8))] << (3 * 8);
1160 sa[1] = sb[1] ^ rk[0][1];
1161
1162 sb[2] ^= (u32)inv_sbox[(byte)(sa[2] >> (0 * 8))] << (0 * 8);
1163 sb[3] ^= (u32)inv_sbox[(byte)(sa[2] >> (1 * 8))] << (1 * 8);
1164 sa[0] ^= (u32)inv_sbox[(byte)(sa[2] >> (2 * 8))] << (2 * 8);
1165 sa[1] ^= (u32)inv_sbox[(byte)(sa[2] >> (3 * 8))] << (3 * 8);
1166 sa[2] = sb[2] ^ rk[0][2];
1167
1168 sb[3] ^= (u32)inv_sbox[(byte)(sa[3] >> (0 * 8))] << (0 * 8);
1169 sa[0] ^= (u32)inv_sbox[(byte)(sa[3] >> (1 * 8))] << (1 * 8);
1170 sa[1] ^= (u32)inv_sbox[(byte)(sa[3] >> (2 * 8))] << (2 * 8);
1171 sa[2] ^= (u32)inv_sbox[(byte)(sa[3] >> (3 * 8))] << (3 * 8);
1172 sa[3] = sb[3] ^ rk[0][3];
1173
1174 buf_put_le32(b + 0, sa[0]);
1175 buf_put_le32(b + 4, sa[1]);
1176 buf_put_le32(b + 8, sa[2]);
1177 buf_put_le32(b + 12, sa[3]);
1178 #undef rk
1179
1180 return (56+2*sizeof(int));
1181 }
1182 #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
1183
1184
1185 /* Decrypt one block. AX and BX may be the same. */
1186 static unsigned int
do_decrypt(const RIJNDAEL_context * ctx,unsigned char * bx,const unsigned char * ax)1187 do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx,
1188 const unsigned char *ax)
1189 {
1190 #ifdef USE_AMD64_ASM
1191 return _gcry_aes_amd64_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds,
1192 dec_tables.T);
1193 #elif defined(USE_ARM_ASM)
1194 return _gcry_aes_arm_decrypt_block(ctx->keyschdec, bx, ax, ctx->rounds,
1195 dec_tables.T);
1196 #else
1197 return do_decrypt_fn (ctx, bx, ax);
1198 #endif /*!USE_ARM_ASM && !USE_AMD64_ASM*/
1199 }
1200
1201
1202 static inline void
check_decryption_preparation(RIJNDAEL_context * ctx)1203 check_decryption_preparation (RIJNDAEL_context *ctx)
1204 {
1205 if ( !ctx->decryption_prepared )
1206 {
1207 ctx->prepare_decryption ( ctx );
1208 ctx->decryption_prepared = 1;
1209 }
1210 }
1211
1212
1213 static unsigned int
rijndael_decrypt(void * context,byte * b,const byte * a)1214 rijndael_decrypt (void *context, byte *b, const byte *a)
1215 {
1216 RIJNDAEL_context *ctx = context;
1217
1218 check_decryption_preparation (ctx);
1219
1220 if (ctx->prefetch_dec_fn)
1221 ctx->prefetch_dec_fn();
1222
1223 return ctx->decrypt_fn (ctx, b, a);
1224 }
1225
1226
1227 /* Bulk decryption of complete blocks in CFB mode. Caller needs to
1228 make sure that IV is aligned on an unsigned long boundary. This
1229 function is only intended for the bulk encryption feature of
1230 cipher.c. */
1231 static void
_gcry_aes_cfb_dec(void * context,unsigned char * iv,void * outbuf_arg,const void * inbuf_arg,size_t nblocks)1232 _gcry_aes_cfb_dec (void *context, unsigned char *iv,
1233 void *outbuf_arg, const void *inbuf_arg,
1234 size_t nblocks)
1235 {
1236 RIJNDAEL_context *ctx = context;
1237 unsigned char *outbuf = outbuf_arg;
1238 const unsigned char *inbuf = inbuf_arg;
1239 unsigned int burn_depth = 0;
1240 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1241
1242 if (ctx->prefetch_enc_fn)
1243 ctx->prefetch_enc_fn();
1244
1245 for ( ;nblocks; nblocks-- )
1246 {
1247 burn_depth = encrypt_fn (ctx, iv, iv);
1248 cipher_block_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE);
1249 outbuf += BLOCKSIZE;
1250 inbuf += BLOCKSIZE;
1251 }
1252
1253 if (burn_depth)
1254 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1255 }
1256
1257
1258 /* Bulk decryption of complete blocks in CBC mode. Caller needs to
1259 make sure that IV is aligned on an unsigned long boundary. This
1260 function is only intended for the bulk encryption feature of
1261 cipher.c. */
1262 static void
_gcry_aes_cbc_dec(void * context,unsigned char * iv,void * outbuf_arg,const void * inbuf_arg,size_t nblocks)1263 _gcry_aes_cbc_dec (void *context, unsigned char *iv,
1264 void *outbuf_arg, const void *inbuf_arg,
1265 size_t nblocks)
1266 {
1267 RIJNDAEL_context *ctx = context;
1268 unsigned char *outbuf = outbuf_arg;
1269 const unsigned char *inbuf = inbuf_arg;
1270 unsigned int burn_depth = 0;
1271 unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16;
1272 rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn;
1273
1274 check_decryption_preparation (ctx);
1275
1276 if (ctx->prefetch_dec_fn)
1277 ctx->prefetch_dec_fn();
1278
1279 for ( ;nblocks; nblocks-- )
1280 {
1281 /* INBUF is needed later and it may be identical to OUTBUF, so store
1282 the intermediate result to SAVEBUF. */
1283
1284 burn_depth = decrypt_fn (ctx, savebuf, inbuf);
1285
1286 cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE);
1287 inbuf += BLOCKSIZE;
1288 outbuf += BLOCKSIZE;
1289 }
1290
1291 wipememory(savebuf, sizeof(savebuf));
1292
1293 if (burn_depth)
1294 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1295 }
1296
1297
1298
1299 /* Bulk encryption/decryption of complete blocks in OCB mode. */
1300 static size_t
_gcry_aes_ocb_crypt(gcry_cipher_hd_t c,void * outbuf_arg,const void * inbuf_arg,size_t nblocks,int encrypt)1301 _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg,
1302 const void *inbuf_arg, size_t nblocks, int encrypt)
1303 {
1304 RIJNDAEL_context *ctx = (void *)&c->context.c;
1305 unsigned char *outbuf = outbuf_arg;
1306 const unsigned char *inbuf = inbuf_arg;
1307 unsigned int burn_depth = 0;
1308
1309 if (encrypt)
1310 {
1311 union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1312 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1313
1314 if (ctx->prefetch_enc_fn)
1315 ctx->prefetch_enc_fn();
1316
1317 for ( ;nblocks; nblocks-- )
1318 {
1319 u64 i = ++c->u_mode.ocb.data_nblocks;
1320 const unsigned char *l = ocb_get_l(c, i);
1321
1322 /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1323 cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE);
1324 cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE);
1325 /* Checksum_i = Checksum_{i-1} xor P_i */
1326 cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE);
1327 /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */
1328 cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1329 burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1330 cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1331 cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE);
1332
1333 inbuf += BLOCKSIZE;
1334 outbuf += BLOCKSIZE;
1335 }
1336 }
1337 else
1338 {
1339 union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1340 rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn;
1341
1342 check_decryption_preparation (ctx);
1343
1344 if (ctx->prefetch_dec_fn)
1345 ctx->prefetch_dec_fn();
1346
1347 for ( ;nblocks; nblocks-- )
1348 {
1349 u64 i = ++c->u_mode.ocb.data_nblocks;
1350 const unsigned char *l = ocb_get_l(c, i);
1351
1352 /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1353 cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE);
1354 cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE);
1355 /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */
1356 cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1357 burn_depth = decrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1358 cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE);
1359 /* Checksum_i = Checksum_{i-1} xor P_i */
1360 cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE);
1361 cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE);
1362
1363 inbuf += BLOCKSIZE;
1364 outbuf += BLOCKSIZE;
1365 }
1366 }
1367
1368 if (burn_depth)
1369 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1370
1371 return 0;
1372 }
1373
1374
1375 /* Bulk authentication of complete blocks in OCB mode. */
1376 static size_t
_gcry_aes_ocb_auth(gcry_cipher_hd_t c,const void * abuf_arg,size_t nblocks)1377 _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, size_t nblocks)
1378 {
1379 RIJNDAEL_context *ctx = (void *)&c->context.c;
1380 const unsigned char *abuf = abuf_arg;
1381 unsigned int burn_depth = 0;
1382 union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp;
1383 rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn;
1384
1385 if (ctx->prefetch_enc_fn)
1386 ctx->prefetch_enc_fn();
1387
1388 for ( ;nblocks; nblocks-- )
1389 {
1390 u64 i = ++c->u_mode.ocb.aad_nblocks;
1391 const unsigned char *l = ocb_get_l(c, i);
1392
1393 /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */
1394 cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l, BLOCKSIZE);
1395 /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */
1396 cipher_block_xor (l_tmp.x1, c->u_mode.ocb.aad_offset, abuf,
1397 BLOCKSIZE);
1398 burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1);
1399 cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp.x1, BLOCKSIZE);
1400
1401 abuf += BLOCKSIZE;
1402 }
1403
1404 wipememory(&l_tmp, sizeof(l_tmp));
1405
1406 if (burn_depth)
1407 _gcry_burn_stack (burn_depth + 4 * sizeof(void *));
1408
1409 return 0;
1410 }
1411
1412
1413 /* Bulk encryption/decryption of complete blocks in XTS mode. */
1414 static void
_gcry_aes_xts_crypt(void * context,unsigned char * tweak,void * outbuf_arg,const void * inbuf_arg,size_t nblocks,int encrypt)1415 _gcry_aes_xts_crypt (void *context, unsigned char *tweak,
1416 void *outbuf_arg, const void *inbuf_arg,
1417 size_t nblocks, int encrypt)
1418 {
1419 RIJNDAEL_context *ctx = context;
1420 unsigned char *outbuf = outbuf_arg;
1421 const unsigned char *inbuf = inbuf_arg;
1422 unsigned int burn_depth = 0;
1423 rijndael_cryptfn_t crypt_fn;
1424 u64 tweak_lo, tweak_hi, tweak_next_lo, tweak_next_hi, tmp_lo, tmp_hi, carry;
1425
1426 if (encrypt)
1427 {
1428 if (ctx->prefetch_enc_fn)
1429 ctx->prefetch_enc_fn();
1430
1431 crypt_fn = ctx->encrypt_fn;
1432 }
1433 else
1434 {
1435 check_decryption_preparation (ctx);
1436
1437 if (ctx->prefetch_dec_fn)
1438 ctx->prefetch_dec_fn();
1439
1440 crypt_fn = ctx->decrypt_fn;
1441 }
1442
1443 tweak_next_lo = buf_get_le64 (tweak + 0);
1444 tweak_next_hi = buf_get_le64 (tweak + 8);
1445
1446 while (nblocks)
1447 {
1448 tweak_lo = tweak_next_lo;
1449 tweak_hi = tweak_next_hi;
1450
1451 /* Xor-Encrypt/Decrypt-Xor block. */
1452 tmp_lo = buf_get_le64 (inbuf + 0) ^ tweak_lo;
1453 tmp_hi = buf_get_le64 (inbuf + 8) ^ tweak_hi;
1454
1455 buf_put_le64 (outbuf + 0, tmp_lo);
1456 buf_put_le64 (outbuf + 8, tmp_hi);
1457
1458 /* Generate next tweak. */
1459 carry = -(tweak_next_hi >> 63) & 0x87;
1460 tweak_next_hi = (tweak_next_hi << 1) + (tweak_next_lo >> 63);
1461 tweak_next_lo = (tweak_next_lo << 1) ^ carry;
1462
1463 burn_depth = crypt_fn (ctx, outbuf, outbuf);
1464
1465 buf_put_le64 (outbuf + 0, buf_get_le64 (outbuf + 0) ^ tweak_lo);
1466 buf_put_le64 (outbuf + 8, buf_get_le64 (outbuf + 8) ^ tweak_hi);
1467
1468 outbuf += GCRY_XTS_BLOCK_LEN;
1469 inbuf += GCRY_XTS_BLOCK_LEN;
1470 nblocks--;
1471 }
1472
1473 buf_put_le64 (tweak + 0, tweak_next_lo);
1474 buf_put_le64 (tweak + 8, tweak_next_hi);
1475
1476 if (burn_depth)
1477 _gcry_burn_stack (burn_depth + 5 * sizeof(void *));
1478 }
1479
1480
1481 /* Run the self-tests for AES 128. Returns NULL on success. */
1482 static const char*
selftest_basic_128(void)1483 selftest_basic_128 (void)
1484 {
1485 RIJNDAEL_context *ctx;
1486 unsigned char *ctxmem;
1487 unsigned char scratch[16];
1488 cipher_bulk_ops_t bulk_ops;
1489
1490 /* The test vectors are from the AES supplied ones; more or less
1491 randomly taken from ecb_tbl.txt (I=42,81,14) */
1492 #if 1
1493 static const unsigned char plaintext_128[16] =
1494 {
1495 0x01,0x4B,0xAF,0x22,0x78,0xA6,0x9D,0x33,
1496 0x1D,0x51,0x80,0x10,0x36,0x43,0xE9,0x9A
1497 };
1498 static const unsigned char key_128[16] =
1499 {
1500 0xE8,0xE9,0xEA,0xEB,0xED,0xEE,0xEF,0xF0,
1501 0xF2,0xF3,0xF4,0xF5,0xF7,0xF8,0xF9,0xFA
1502 };
1503 static const unsigned char ciphertext_128[16] =
1504 {
1505 0x67,0x43,0xC3,0xD1,0x51,0x9A,0xB4,0xF2,
1506 0xCD,0x9A,0x78,0xAB,0x09,0xA5,0x11,0xBD
1507 };
1508 #else
1509 /* Test vectors from fips-197, appendix C. */
1510 # warning debug test vectors in use
1511 static const unsigned char plaintext_128[16] =
1512 {
1513 0x00,0x11,0x22,0x33,0x44,0x55,0x66,0x77,
1514 0x88,0x99,0xaa,0xbb,0xcc,0xdd,0xee,0xff
1515 };
1516 static const unsigned char key_128[16] =
1517 {
1518 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,
1519 0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
1520 /* 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, */
1521 /* 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c */
1522 };
1523 static const unsigned char ciphertext_128[16] =
1524 {
1525 0x69,0xc4,0xe0,0xd8,0x6a,0x7b,0x04,0x30,
1526 0xd8,0xcd,0xb7,0x80,0x70,0xb4,0xc5,0x5a
1527 };
1528 #endif
1529
1530 /* Because gcc/ld can only align the CTX struct on 8 bytes on the
1531 stack, we need to allocate that context on the heap. */
1532 ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem);
1533 if (!ctx)
1534 return "failed to allocate memory";
1535
1536 rijndael_setkey (ctx, key_128, sizeof (key_128), &bulk_ops);
1537 rijndael_encrypt (ctx, scratch, plaintext_128);
1538 if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128)))
1539 {
1540 xfree (ctxmem);
1541 return "AES-128 test encryption failed.";
1542 }
1543 rijndael_decrypt (ctx, scratch, scratch);
1544 xfree (ctxmem);
1545 if (memcmp (scratch, plaintext_128, sizeof (plaintext_128)))
1546 return "AES-128 test decryption failed.";
1547
1548 return NULL;
1549 }
1550
1551 /* Run the self-tests for AES 192. Returns NULL on success. */
1552 static const char*
selftest_basic_192(void)1553 selftest_basic_192 (void)
1554 {
1555 RIJNDAEL_context *ctx;
1556 unsigned char *ctxmem;
1557 unsigned char scratch[16];
1558 cipher_bulk_ops_t bulk_ops;
1559
1560 static unsigned char plaintext_192[16] =
1561 {
1562 0x76,0x77,0x74,0x75,0xF1,0xF2,0xF3,0xF4,
1563 0xF8,0xF9,0xE6,0xE7,0x77,0x70,0x71,0x72
1564 };
1565 static unsigned char key_192[24] =
1566 {
1567 0x04,0x05,0x06,0x07,0x09,0x0A,0x0B,0x0C,
1568 0x0E,0x0F,0x10,0x11,0x13,0x14,0x15,0x16,
1569 0x18,0x19,0x1A,0x1B,0x1D,0x1E,0x1F,0x20
1570 };
1571 static const unsigned char ciphertext_192[16] =
1572 {
1573 0x5D,0x1E,0xF2,0x0D,0xCE,0xD6,0xBC,0xBC,
1574 0x12,0x13,0x1A,0xC7,0xC5,0x47,0x88,0xAA
1575 };
1576
1577 ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem);
1578 if (!ctx)
1579 return "failed to allocate memory";
1580 rijndael_setkey (ctx, key_192, sizeof(key_192), &bulk_ops);
1581 rijndael_encrypt (ctx, scratch, plaintext_192);
1582 if (memcmp (scratch, ciphertext_192, sizeof (ciphertext_192)))
1583 {
1584 xfree (ctxmem);
1585 return "AES-192 test encryption failed.";
1586 }
1587 rijndael_decrypt (ctx, scratch, scratch);
1588 xfree (ctxmem);
1589 if (memcmp (scratch, plaintext_192, sizeof (plaintext_192)))
1590 return "AES-192 test decryption failed.";
1591
1592 return NULL;
1593 }
1594
1595
1596 /* Run the self-tests for AES 256. Returns NULL on success. */
1597 static const char*
selftest_basic_256(void)1598 selftest_basic_256 (void)
1599 {
1600 RIJNDAEL_context *ctx;
1601 unsigned char *ctxmem;
1602 unsigned char scratch[16];
1603 cipher_bulk_ops_t bulk_ops;
1604
1605 static unsigned char plaintext_256[16] =
1606 {
1607 0x06,0x9A,0x00,0x7F,0xC7,0x6A,0x45,0x9F,
1608 0x98,0xBA,0xF9,0x17,0xFE,0xDF,0x95,0x21
1609 };
1610 static unsigned char key_256[32] =
1611 {
1612 0x08,0x09,0x0A,0x0B,0x0D,0x0E,0x0F,0x10,
1613 0x12,0x13,0x14,0x15,0x17,0x18,0x19,0x1A,
1614 0x1C,0x1D,0x1E,0x1F,0x21,0x22,0x23,0x24,
1615 0x26,0x27,0x28,0x29,0x2B,0x2C,0x2D,0x2E
1616 };
1617 static const unsigned char ciphertext_256[16] =
1618 {
1619 0x08,0x0E,0x95,0x17,0xEB,0x16,0x77,0x71,
1620 0x9A,0xCF,0x72,0x80,0x86,0x04,0x0A,0xE3
1621 };
1622
1623 ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem);
1624 if (!ctx)
1625 return "failed to allocate memory";
1626 rijndael_setkey (ctx, key_256, sizeof(key_256), &bulk_ops);
1627 rijndael_encrypt (ctx, scratch, plaintext_256);
1628 if (memcmp (scratch, ciphertext_256, sizeof (ciphertext_256)))
1629 {
1630 xfree (ctxmem);
1631 return "AES-256 test encryption failed.";
1632 }
1633 rijndael_decrypt (ctx, scratch, scratch);
1634 xfree (ctxmem);
1635 if (memcmp (scratch, plaintext_256, sizeof (plaintext_256)))
1636 return "AES-256 test decryption failed.";
1637
1638 return NULL;
1639 }
1640
1641
1642 /* Run the self-tests for AES-CTR-128, tests IV increment of bulk CTR
1643 encryption. Returns NULL on success. */
1644 static const char*
selftest_ctr_128(void)1645 selftest_ctr_128 (void)
1646 {
1647 const int nblocks = 8+1;
1648 const int blocksize = BLOCKSIZE;
1649 const int context_size = sizeof(RIJNDAEL_context);
1650
1651 return _gcry_selftest_helper_ctr("AES", &rijndael_setkey,
1652 &rijndael_encrypt, nblocks, blocksize, context_size);
1653 }
1654
1655
1656 /* Run the self-tests for AES-CBC-128, tests bulk CBC decryption.
1657 Returns NULL on success. */
1658 static const char*
selftest_cbc_128(void)1659 selftest_cbc_128 (void)
1660 {
1661 const int nblocks = 8+2;
1662 const int blocksize = BLOCKSIZE;
1663 const int context_size = sizeof(RIJNDAEL_context);
1664
1665 return _gcry_selftest_helper_cbc("AES", &rijndael_setkey,
1666 &rijndael_encrypt, nblocks, blocksize, context_size);
1667 }
1668
1669
1670 /* Run the self-tests for AES-CFB-128, tests bulk CFB decryption.
1671 Returns NULL on success. */
1672 static const char*
selftest_cfb_128(void)1673 selftest_cfb_128 (void)
1674 {
1675 const int nblocks = 8+2;
1676 const int blocksize = BLOCKSIZE;
1677 const int context_size = sizeof(RIJNDAEL_context);
1678
1679 return _gcry_selftest_helper_cfb("AES", &rijndael_setkey,
1680 &rijndael_encrypt, nblocks, blocksize, context_size);
1681 }
1682
1683
1684 /* Run all the self-tests and return NULL on success. This function
1685 is used for the on-the-fly self-tests. */
1686 static const char *
selftest(void)1687 selftest (void)
1688 {
1689 const char *r;
1690
1691 if ( (r = selftest_basic_128 ())
1692 || (r = selftest_basic_192 ())
1693 || (r = selftest_basic_256 ()) )
1694 return r;
1695
1696 if ( (r = selftest_ctr_128 ()) )
1697 return r;
1698
1699 if ( (r = selftest_cbc_128 ()) )
1700 return r;
1701
1702 if ( (r = selftest_cfb_128 ()) )
1703 return r;
1704
1705 return r;
1706 }
1707
1708
1709 /* SP800-38a.pdf for AES-128. */
1710 static const char *
selftest_fips_128_38a(int requested_mode)1711 selftest_fips_128_38a (int requested_mode)
1712 {
1713 static const struct tv
1714 {
1715 int mode;
1716 const unsigned char key[16];
1717 const unsigned char iv[16];
1718 struct
1719 {
1720 const unsigned char input[16];
1721 const unsigned char output[16];
1722 } data[4];
1723 } tv[2] =
1724 {
1725 {
1726 GCRY_CIPHER_MODE_CFB, /* F.3.13, CFB128-AES128 */
1727 { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6,
1728 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c },
1729 { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
1730 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
1731 {
1732 { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
1733 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a },
1734 { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20,
1735 0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } },
1736
1737 { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
1738 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 },
1739 { 0xc8, 0xa6, 0x45, 0x37, 0xa0, 0xb3, 0xa9, 0x3f,
1740 0xcd, 0xe3, 0xcd, 0xad, 0x9f, 0x1c, 0xe5, 0x8b } },
1741
1742 { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
1743 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef },
1744 { 0x26, 0x75, 0x1f, 0x67, 0xa3, 0xcb, 0xb1, 0x40,
1745 0xb1, 0x80, 0x8c, 0xf1, 0x87, 0xa4, 0xf4, 0xdf } },
1746
1747 { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
1748 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
1749 { 0xc0, 0x4b, 0x05, 0x35, 0x7c, 0x5d, 0x1c, 0x0e,
1750 0xea, 0xc4, 0xc6, 0x6f, 0x9f, 0xf7, 0xf2, 0xe6 } }
1751 }
1752 },
1753 {
1754 GCRY_CIPHER_MODE_OFB,
1755 { 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6,
1756 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c },
1757 { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
1758 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f },
1759 {
1760 { { 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96,
1761 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a },
1762 { 0x3b, 0x3f, 0xd9, 0x2e, 0xb7, 0x2d, 0xad, 0x20,
1763 0x33, 0x34, 0x49, 0xf8, 0xe8, 0x3c, 0xfb, 0x4a } },
1764
1765 { { 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c,
1766 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51 },
1767 { 0x77, 0x89, 0x50, 0x8d, 0x16, 0x91, 0x8f, 0x03,
1768 0xf5, 0x3c, 0x52, 0xda, 0xc5, 0x4e, 0xd8, 0x25 } },
1769
1770 { { 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11,
1771 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef },
1772 { 0x97, 0x40, 0x05, 0x1e, 0x9c, 0x5f, 0xec, 0xf6,
1773 0x43, 0x44, 0xf7, 0xa8, 0x22, 0x60, 0xed, 0xcc } },
1774
1775 { { 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17,
1776 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 },
1777 { 0x30, 0x4c, 0x65, 0x28, 0xf6, 0x59, 0xc7, 0x78,
1778 0x66, 0xa5, 0x10, 0xd9, 0xc1, 0xd6, 0xae, 0x5e } },
1779 }
1780 }
1781 };
1782 unsigned char scratch[16];
1783 gpg_error_t err;
1784 int tvi, idx;
1785 gcry_cipher_hd_t hdenc = NULL;
1786 gcry_cipher_hd_t hddec = NULL;
1787
1788 #define Fail(a) do { \
1789 _gcry_cipher_close (hdenc); \
1790 _gcry_cipher_close (hddec); \
1791 return a; \
1792 } while (0)
1793
1794 gcry_assert (sizeof tv[0].data[0].input == sizeof scratch);
1795 gcry_assert (sizeof tv[0].data[0].output == sizeof scratch);
1796
1797 for (tvi=0; tvi < DIM (tv); tvi++)
1798 if (tv[tvi].mode == requested_mode)
1799 break;
1800 if (tvi == DIM (tv))
1801 Fail ("no test data for this mode");
1802
1803 err = _gcry_cipher_open (&hdenc, GCRY_CIPHER_AES, tv[tvi].mode, 0);
1804 if (err)
1805 Fail ("open");
1806 err = _gcry_cipher_open (&hddec, GCRY_CIPHER_AES, tv[tvi].mode, 0);
1807 if (err)
1808 Fail ("open");
1809 err = _gcry_cipher_setkey (hdenc, tv[tvi].key, sizeof tv[tvi].key);
1810 if (!err)
1811 err = _gcry_cipher_setkey (hddec, tv[tvi].key, sizeof tv[tvi].key);
1812 if (err)
1813 Fail ("set key");
1814 err = _gcry_cipher_setiv (hdenc, tv[tvi].iv, sizeof tv[tvi].iv);
1815 if (!err)
1816 err = _gcry_cipher_setiv (hddec, tv[tvi].iv, sizeof tv[tvi].iv);
1817 if (err)
1818 Fail ("set IV");
1819 for (idx=0; idx < DIM (tv[tvi].data); idx++)
1820 {
1821 err = _gcry_cipher_encrypt (hdenc, scratch, sizeof scratch,
1822 tv[tvi].data[idx].input,
1823 sizeof tv[tvi].data[idx].input);
1824 if (err)
1825 Fail ("encrypt command");
1826 if (memcmp (scratch, tv[tvi].data[idx].output, sizeof scratch))
1827 Fail ("encrypt mismatch");
1828 err = _gcry_cipher_decrypt (hddec, scratch, sizeof scratch,
1829 tv[tvi].data[idx].output,
1830 sizeof tv[tvi].data[idx].output);
1831 if (err)
1832 Fail ("decrypt command");
1833 if (memcmp (scratch, tv[tvi].data[idx].input, sizeof scratch))
1834 Fail ("decrypt mismatch");
1835 }
1836
1837 #undef Fail
1838 _gcry_cipher_close (hdenc);
1839 _gcry_cipher_close (hddec);
1840 return NULL;
1841 }
1842
1843
1844 /* Complete selftest for AES-128 with all modes and driver code. */
1845 static gpg_err_code_t
selftest_fips_128(int extended,selftest_report_func_t report)1846 selftest_fips_128 (int extended, selftest_report_func_t report)
1847 {
1848 const char *what;
1849 const char *errtxt;
1850
1851 what = "low-level";
1852 errtxt = selftest_basic_128 ();
1853 if (errtxt)
1854 goto failed;
1855
1856 if (extended)
1857 {
1858 what = "cfb";
1859 errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_CFB);
1860 if (errtxt)
1861 goto failed;
1862
1863 what = "ofb";
1864 errtxt = selftest_fips_128_38a (GCRY_CIPHER_MODE_OFB);
1865 if (errtxt)
1866 goto failed;
1867 }
1868
1869 return 0; /* Succeeded. */
1870
1871 failed:
1872 if (report)
1873 report ("cipher", GCRY_CIPHER_AES128, what, errtxt);
1874 return GPG_ERR_SELFTEST_FAILED;
1875 }
1876
1877 /* Complete selftest for AES-192. */
1878 static gpg_err_code_t
selftest_fips_192(int extended,selftest_report_func_t report)1879 selftest_fips_192 (int extended, selftest_report_func_t report)
1880 {
1881 const char *what;
1882 const char *errtxt;
1883
1884 (void)extended; /* No extended tests available. */
1885
1886 what = "low-level";
1887 errtxt = selftest_basic_192 ();
1888 if (errtxt)
1889 goto failed;
1890
1891
1892 return 0; /* Succeeded. */
1893
1894 failed:
1895 if (report)
1896 report ("cipher", GCRY_CIPHER_AES192, what, errtxt);
1897 return GPG_ERR_SELFTEST_FAILED;
1898 }
1899
1900
1901 /* Complete selftest for AES-256. */
1902 static gpg_err_code_t
selftest_fips_256(int extended,selftest_report_func_t report)1903 selftest_fips_256 (int extended, selftest_report_func_t report)
1904 {
1905 const char *what;
1906 const char *errtxt;
1907
1908 (void)extended; /* No extended tests available. */
1909
1910 what = "low-level";
1911 errtxt = selftest_basic_256 ();
1912 if (errtxt)
1913 goto failed;
1914
1915 return 0; /* Succeeded. */
1916
1917 failed:
1918 if (report)
1919 report ("cipher", GCRY_CIPHER_AES256, what, errtxt);
1920 return GPG_ERR_SELFTEST_FAILED;
1921 }
1922
1923
1924
1925 /* Run a full self-test for ALGO and return 0 on success. */
1926 static gpg_err_code_t
run_selftests(int algo,int extended,selftest_report_func_t report)1927 run_selftests (int algo, int extended, selftest_report_func_t report)
1928 {
1929 gpg_err_code_t ec;
1930
1931 switch (algo)
1932 {
1933 case GCRY_CIPHER_AES128:
1934 ec = selftest_fips_128 (extended, report);
1935 break;
1936 case GCRY_CIPHER_AES192:
1937 ec = selftest_fips_192 (extended, report);
1938 break;
1939 case GCRY_CIPHER_AES256:
1940 ec = selftest_fips_256 (extended, report);
1941 break;
1942 default:
1943 ec = GPG_ERR_CIPHER_ALGO;
1944 break;
1945
1946 }
1947 return ec;
1948 }
1949
1950
1951
1952
1953 static const char *rijndael_names[] =
1954 {
1955 "RIJNDAEL",
1956 "AES128",
1957 "AES-128",
1958 NULL
1959 };
1960
1961 static gcry_cipher_oid_spec_t rijndael_oids[] =
1962 {
1963 { "2.16.840.1.101.3.4.1.1", GCRY_CIPHER_MODE_ECB },
1964 { "2.16.840.1.101.3.4.1.2", GCRY_CIPHER_MODE_CBC },
1965 { "2.16.840.1.101.3.4.1.3", GCRY_CIPHER_MODE_OFB },
1966 { "2.16.840.1.101.3.4.1.4", GCRY_CIPHER_MODE_CFB },
1967 { NULL }
1968 };
1969
1970 gcry_cipher_spec_t _gcry_cipher_spec_aes =
1971 {
1972 GCRY_CIPHER_AES, {0, 1},
1973 "AES", rijndael_names, rijndael_oids, 16, 128,
1974 sizeof (RIJNDAEL_context),
1975 rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
1976 NULL, NULL,
1977 run_selftests
1978 };
1979
1980
1981 static const char *rijndael192_names[] =
1982 {
1983 "RIJNDAEL192",
1984 "AES-192",
1985 NULL
1986 };
1987
1988 static gcry_cipher_oid_spec_t rijndael192_oids[] =
1989 {
1990 { "2.16.840.1.101.3.4.1.21", GCRY_CIPHER_MODE_ECB },
1991 { "2.16.840.1.101.3.4.1.22", GCRY_CIPHER_MODE_CBC },
1992 { "2.16.840.1.101.3.4.1.23", GCRY_CIPHER_MODE_OFB },
1993 { "2.16.840.1.101.3.4.1.24", GCRY_CIPHER_MODE_CFB },
1994 { NULL }
1995 };
1996
1997 gcry_cipher_spec_t _gcry_cipher_spec_aes192 =
1998 {
1999 GCRY_CIPHER_AES192, {0, 1},
2000 "AES192", rijndael192_names, rijndael192_oids, 16, 192,
2001 sizeof (RIJNDAEL_context),
2002 rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
2003 NULL, NULL,
2004 run_selftests
2005 };
2006
2007
2008 static const char *rijndael256_names[] =
2009 {
2010 "RIJNDAEL256",
2011 "AES-256",
2012 NULL
2013 };
2014
2015 static gcry_cipher_oid_spec_t rijndael256_oids[] =
2016 {
2017 { "2.16.840.1.101.3.4.1.41", GCRY_CIPHER_MODE_ECB },
2018 { "2.16.840.1.101.3.4.1.42", GCRY_CIPHER_MODE_CBC },
2019 { "2.16.840.1.101.3.4.1.43", GCRY_CIPHER_MODE_OFB },
2020 { "2.16.840.1.101.3.4.1.44", GCRY_CIPHER_MODE_CFB },
2021 { NULL }
2022 };
2023
2024 gcry_cipher_spec_t _gcry_cipher_spec_aes256 =
2025 {
2026 GCRY_CIPHER_AES256, {0, 1},
2027 "AES256", rijndael256_names, rijndael256_oids, 16, 256,
2028 sizeof (RIJNDAEL_context),
2029 rijndael_setkey, rijndael_encrypt, rijndael_decrypt,
2030 NULL, NULL,
2031 run_selftests
2032 };
2033