1 /*
2 * Copyright 2017-2019 The OpenSSL Project Authors. All Rights Reserved.
3 * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
4 *
5 * Licensed under the OpenSSL license (the "License"). You may not use
6 * this file except in compliance with the License. You can obtain a copy
7 * in the file LICENSE in the source distribution or at
8 * https://www.openssl.org/source/license.html
9 */
10
11 #include "internal/cryptlib.h"
12 #ifndef OPENSSL_NO_ARIA
13 # include <openssl/evp.h>
14 # include <openssl/modes.h>
15 # include <openssl/rand.h>
16 # include <openssl/rand_drbg.h>
17 # include "crypto/aria.h"
18 # include "crypto/evp.h"
19 # include "modes_local.h"
20 # include "evp_local.h"
21
22 /* ARIA subkey Structure */
23 typedef struct {
24 ARIA_KEY ks;
25 } EVP_ARIA_KEY;
26
27 /* ARIA GCM context */
28 typedef struct {
29 union {
30 double align;
31 ARIA_KEY ks;
32 } ks; /* ARIA subkey to use */
33 int key_set; /* Set if key initialised */
34 int iv_set; /* Set if an iv is set */
35 GCM128_CONTEXT gcm;
36 unsigned char *iv; /* Temporary IV store */
37 int ivlen; /* IV length */
38 int taglen;
39 int iv_gen; /* It is OK to generate IVs */
40 int tls_aad_len; /* TLS AAD length */
41 } EVP_ARIA_GCM_CTX;
42
43 /* ARIA CCM context */
44 typedef struct {
45 union {
46 double align;
47 ARIA_KEY ks;
48 } ks; /* ARIA key schedule to use */
49 int key_set; /* Set if key initialised */
50 int iv_set; /* Set if an iv is set */
51 int tag_set; /* Set if tag is valid */
52 int len_set; /* Set if message length set */
53 int L, M; /* L and M parameters from RFC3610 */
54 int tls_aad_len; /* TLS AAD length */
55 CCM128_CONTEXT ccm;
56 ccm128_f str;
57 } EVP_ARIA_CCM_CTX;
58
59 /* The subkey for ARIA is generated. */
aria_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)60 static int aria_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
61 const unsigned char *iv, int enc)
62 {
63 int ret;
64 int mode = EVP_CIPHER_CTX_mode(ctx);
65
66 if (enc || (mode != EVP_CIPH_ECB_MODE && mode != EVP_CIPH_CBC_MODE))
67 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
68 EVP_CIPHER_CTX_get_cipher_data(ctx));
69 else
70 ret = aria_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
71 EVP_CIPHER_CTX_get_cipher_data(ctx));
72 if (ret < 0) {
73 EVPerr(EVP_F_ARIA_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
74 return 0;
75 }
76 return 1;
77 }
78
aria_cbc_encrypt(const unsigned char * in,unsigned char * out,size_t len,const ARIA_KEY * key,unsigned char * ivec,const int enc)79 static void aria_cbc_encrypt(const unsigned char *in, unsigned char *out,
80 size_t len, const ARIA_KEY *key,
81 unsigned char *ivec, const int enc)
82 {
83
84 if (enc)
85 CRYPTO_cbc128_encrypt(in, out, len, key, ivec,
86 (block128_f) aria_encrypt);
87 else
88 CRYPTO_cbc128_decrypt(in, out, len, key, ivec,
89 (block128_f) aria_encrypt);
90 }
91
aria_cfb128_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)92 static void aria_cfb128_encrypt(const unsigned char *in, unsigned char *out,
93 size_t length, const ARIA_KEY *key,
94 unsigned char *ivec, int *num, const int enc)
95 {
96
97 CRYPTO_cfb128_encrypt(in, out, length, key, ivec, num, enc,
98 (block128_f) aria_encrypt);
99 }
100
aria_cfb1_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)101 static void aria_cfb1_encrypt(const unsigned char *in, unsigned char *out,
102 size_t length, const ARIA_KEY *key,
103 unsigned char *ivec, int *num, const int enc)
104 {
105 CRYPTO_cfb128_1_encrypt(in, out, length, key, ivec, num, enc,
106 (block128_f) aria_encrypt);
107 }
108
aria_cfb8_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num,const int enc)109 static void aria_cfb8_encrypt(const unsigned char *in, unsigned char *out,
110 size_t length, const ARIA_KEY *key,
111 unsigned char *ivec, int *num, const int enc)
112 {
113 CRYPTO_cfb128_8_encrypt(in, out, length, key, ivec, num, enc,
114 (block128_f) aria_encrypt);
115 }
116
aria_ecb_encrypt(const unsigned char * in,unsigned char * out,const ARIA_KEY * key,const int enc)117 static void aria_ecb_encrypt(const unsigned char *in, unsigned char *out,
118 const ARIA_KEY *key, const int enc)
119 {
120 aria_encrypt(in, out, key);
121 }
122
aria_ofb128_encrypt(const unsigned char * in,unsigned char * out,size_t length,const ARIA_KEY * key,unsigned char * ivec,int * num)123 static void aria_ofb128_encrypt(const unsigned char *in, unsigned char *out,
124 size_t length, const ARIA_KEY *key,
125 unsigned char *ivec, int *num)
126 {
127 CRYPTO_ofb128_encrypt(in, out, length, key, ivec, num,
128 (block128_f) aria_encrypt);
129 }
130
131 IMPLEMENT_BLOCK_CIPHER(aria_128, ks, aria, EVP_ARIA_KEY,
132 NID_aria_128, 16, 16, 16, 128,
133 0, aria_init_key, NULL,
134 EVP_CIPHER_set_asn1_iv,
135 EVP_CIPHER_get_asn1_iv,
136 NULL)
137 IMPLEMENT_BLOCK_CIPHER(aria_192, ks, aria, EVP_ARIA_KEY,
138 NID_aria_192, 16, 24, 16, 128,
139 0, aria_init_key, NULL,
140 EVP_CIPHER_set_asn1_iv,
141 EVP_CIPHER_get_asn1_iv,
142 NULL)
143 IMPLEMENT_BLOCK_CIPHER(aria_256, ks, aria, EVP_ARIA_KEY,
144 NID_aria_256, 16, 32, 16, 128,
145 0, aria_init_key, NULL,
146 EVP_CIPHER_set_asn1_iv,
147 EVP_CIPHER_get_asn1_iv,
148 NULL)
149
150 # define IMPLEMENT_ARIA_CFBR(ksize,cbits) \
151 IMPLEMENT_CFBR(aria,aria,EVP_ARIA_KEY,ks,ksize,cbits,16,0)
152 IMPLEMENT_ARIA_CFBR(128,1)
153 IMPLEMENT_ARIA_CFBR(192,1)
154 IMPLEMENT_ARIA_CFBR(256,1)
155 IMPLEMENT_ARIA_CFBR(128,8)
156 IMPLEMENT_ARIA_CFBR(192,8)
157 IMPLEMENT_ARIA_CFBR(256,8)
158
159 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
160 static const EVP_CIPHER aria_##keylen##_##mode = { \
161 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
162 flags|EVP_CIPH_##MODE##_MODE, \
163 aria_init_key, \
164 aria_##mode##_cipher, \
165 NULL, \
166 sizeof(EVP_ARIA_KEY), \
167 NULL,NULL,NULL,NULL }; \
168 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
169 { return &aria_##keylen##_##mode; }
170
aria_ctr_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)171 static int aria_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
172 const unsigned char *in, size_t len)
173 {
174 unsigned int num = EVP_CIPHER_CTX_num(ctx);
175 EVP_ARIA_KEY *dat = EVP_C_DATA(EVP_ARIA_KEY,ctx);
176
177 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
178 EVP_CIPHER_CTX_iv_noconst(ctx),
179 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
180 (block128_f) aria_encrypt);
181 EVP_CIPHER_CTX_set_num(ctx, num);
182 return 1;
183 }
184
185 BLOCK_CIPHER_generic(NID_aria, 128, 1, 16, ctr, ctr, CTR, 0)
186 BLOCK_CIPHER_generic(NID_aria, 192, 1, 16, ctr, ctr, CTR, 0)
187 BLOCK_CIPHER_generic(NID_aria, 256, 1, 16, ctr, ctr, CTR, 0)
188
189 /* Authenticated cipher modes (GCM/CCM) */
190
191 /* increment counter (64-bit int) by 1 */
ctr64_inc(unsigned char * counter)192 static void ctr64_inc(unsigned char *counter)
193 {
194 int n = 8;
195 unsigned char c;
196
197 do {
198 --n;
199 c = counter[n];
200 ++c;
201 counter[n] = c;
202 if (c)
203 return;
204 } while (n);
205 }
206
aria_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)207 static int aria_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
208 const unsigned char *iv, int enc)
209 {
210 int ret;
211 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
212
213 if (!iv && !key)
214 return 1;
215 if (key) {
216 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
217 &gctx->ks.ks);
218 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
219 (block128_f) aria_encrypt);
220 if (ret < 0) {
221 EVPerr(EVP_F_ARIA_GCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
222 return 0;
223 }
224
225 /*
226 * If we have an iv can set it directly, otherwise use saved IV.
227 */
228 if (iv == NULL && gctx->iv_set)
229 iv = gctx->iv;
230 if (iv) {
231 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
232 gctx->iv_set = 1;
233 }
234 gctx->key_set = 1;
235 } else {
236 /* If key set use IV, otherwise copy */
237 if (gctx->key_set)
238 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
239 else
240 memcpy(gctx->iv, iv, gctx->ivlen);
241 gctx->iv_set = 1;
242 gctx->iv_gen = 0;
243 }
244 return 1;
245 }
246
aria_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)247 static int aria_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
248 {
249 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,c);
250
251 switch (type) {
252 case EVP_CTRL_INIT:
253 gctx->key_set = 0;
254 gctx->iv_set = 0;
255 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
256 gctx->iv = EVP_CIPHER_CTX_iv_noconst(c);
257 gctx->taglen = -1;
258 gctx->iv_gen = 0;
259 gctx->tls_aad_len = -1;
260 return 1;
261
262 case EVP_CTRL_AEAD_SET_IVLEN:
263 if (arg <= 0)
264 return 0;
265 /* Allocate memory for IV if needed */
266 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
267 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
268 OPENSSL_free(gctx->iv);
269 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
270 EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
271 return 0;
272 }
273 }
274 gctx->ivlen = arg;
275 return 1;
276
277 case EVP_CTRL_GET_IVLEN:
278 *(int *)ptr = gctx->ivlen;
279 return 1;
280
281 case EVP_CTRL_AEAD_SET_TAG:
282 if (arg <= 0 || arg > 16 || EVP_CIPHER_CTX_encrypting(c))
283 return 0;
284 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
285 gctx->taglen = arg;
286 return 1;
287
288 case EVP_CTRL_AEAD_GET_TAG:
289 if (arg <= 0 || arg > 16 || !EVP_CIPHER_CTX_encrypting(c)
290 || gctx->taglen < 0)
291 return 0;
292 memcpy(ptr, EVP_CIPHER_CTX_buf_noconst(c), arg);
293 return 1;
294
295 case EVP_CTRL_GCM_SET_IV_FIXED:
296 /* Special case: -1 length restores whole IV */
297 if (arg == -1) {
298 memcpy(gctx->iv, ptr, gctx->ivlen);
299 gctx->iv_gen = 1;
300 return 1;
301 }
302 /*
303 * Fixed field must be at least 4 bytes and invocation field at least
304 * 8.
305 */
306 if ((arg < 4) || (gctx->ivlen - arg) < 8)
307 return 0;
308 if (arg)
309 memcpy(gctx->iv, ptr, arg);
310 if (EVP_CIPHER_CTX_encrypting(c)
311 && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
312 return 0;
313 gctx->iv_gen = 1;
314 return 1;
315
316 case EVP_CTRL_GCM_IV_GEN:
317 if (gctx->iv_gen == 0 || gctx->key_set == 0)
318 return 0;
319 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
320 if (arg <= 0 || arg > gctx->ivlen)
321 arg = gctx->ivlen;
322 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
323 /*
324 * Invocation field will be at least 8 bytes in size and so no need
325 * to check wrap around or increment more than last 8 bytes.
326 */
327 ctr64_inc(gctx->iv + gctx->ivlen - 8);
328 gctx->iv_set = 1;
329 return 1;
330
331 case EVP_CTRL_GCM_SET_IV_INV:
332 if (gctx->iv_gen == 0 || gctx->key_set == 0
333 || EVP_CIPHER_CTX_encrypting(c))
334 return 0;
335 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
336 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
337 gctx->iv_set = 1;
338 return 1;
339
340 case EVP_CTRL_AEAD_TLS1_AAD:
341 /* Save the AAD for later use */
342 if (arg != EVP_AEAD_TLS1_AAD_LEN)
343 return 0;
344 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
345 gctx->tls_aad_len = arg;
346 {
347 unsigned int len =
348 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
349 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
350 /* Correct length for explicit IV */
351 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
352 return 0;
353 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
354 /* If decrypting correct for tag too */
355 if (!EVP_CIPHER_CTX_encrypting(c)) {
356 if (len < EVP_GCM_TLS_TAG_LEN)
357 return 0;
358 len -= EVP_GCM_TLS_TAG_LEN;
359 }
360 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
361 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
362 }
363 /* Extra padding: tag appended to record */
364 return EVP_GCM_TLS_TAG_LEN;
365
366 case EVP_CTRL_COPY:
367 {
368 EVP_CIPHER_CTX *out = ptr;
369 EVP_ARIA_GCM_CTX *gctx_out = EVP_C_DATA(EVP_ARIA_GCM_CTX,out);
370 if (gctx->gcm.key) {
371 if (gctx->gcm.key != &gctx->ks)
372 return 0;
373 gctx_out->gcm.key = &gctx_out->ks;
374 }
375 if (gctx->iv == EVP_CIPHER_CTX_iv_noconst(c))
376 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
377 else {
378 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
379 EVPerr(EVP_F_ARIA_GCM_CTRL, ERR_R_MALLOC_FAILURE);
380 return 0;
381 }
382 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
383 }
384 return 1;
385 }
386
387 default:
388 return -1;
389
390 }
391 }
392
aria_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)393 static int aria_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
394 const unsigned char *in, size_t len)
395 {
396 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
397 int rv = -1;
398
399 /* Encrypt/decrypt must be performed in place */
400 if (out != in
401 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
402 return -1;
403 /*
404 * Set IV from start of buffer or generate IV and write to start of
405 * buffer.
406 */
407 if (EVP_CIPHER_CTX_ctrl(ctx, EVP_CIPHER_CTX_encrypting(ctx) ?
408 EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
409 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
410 goto err;
411 /* Use saved AAD */
412 if (CRYPTO_gcm128_aad(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
413 gctx->tls_aad_len))
414 goto err;
415 /* Fix buffer and length to point to payload */
416 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
417 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
418 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
419 if (EVP_CIPHER_CTX_encrypting(ctx)) {
420 /* Encrypt payload */
421 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
422 goto err;
423 out += len;
424 /* Finally write tag */
425 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
426 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
427 } else {
428 /* Decrypt */
429 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
430 goto err;
431 /* Retrieve tag */
432 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx),
433 EVP_GCM_TLS_TAG_LEN);
434 /* If tag mismatch wipe buffer */
435 if (CRYPTO_memcmp(EVP_CIPHER_CTX_buf_noconst(ctx), in + len,
436 EVP_GCM_TLS_TAG_LEN)) {
437 OPENSSL_cleanse(out, len);
438 goto err;
439 }
440 rv = len;
441 }
442
443 err:
444 gctx->iv_set = 0;
445 gctx->tls_aad_len = -1;
446 return rv;
447 }
448
aria_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)449 static int aria_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
450 const unsigned char *in, size_t len)
451 {
452 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX,ctx);
453
454 /* If not set up, return error */
455 if (!gctx->key_set)
456 return -1;
457
458 if (gctx->tls_aad_len >= 0)
459 return aria_gcm_tls_cipher(ctx, out, in, len);
460
461 if (!gctx->iv_set)
462 return -1;
463 if (in) {
464 if (out == NULL) {
465 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
466 return -1;
467 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
468 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
469 return -1;
470 } else {
471 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
472 return -1;
473 }
474 return len;
475 }
476 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
477 if (gctx->taglen < 0)
478 return -1;
479 if (CRYPTO_gcm128_finish(&gctx->gcm,
480 EVP_CIPHER_CTX_buf_noconst(ctx),
481 gctx->taglen) != 0)
482 return -1;
483 gctx->iv_set = 0;
484 return 0;
485 }
486 CRYPTO_gcm128_tag(&gctx->gcm, EVP_CIPHER_CTX_buf_noconst(ctx), 16);
487 gctx->taglen = 16;
488 /* Don't reuse the IV */
489 gctx->iv_set = 0;
490 return 0;
491 }
492
aria_gcm_cleanup(EVP_CIPHER_CTX * ctx)493 static int aria_gcm_cleanup(EVP_CIPHER_CTX *ctx)
494 {
495 EVP_ARIA_GCM_CTX *gctx = EVP_C_DATA(EVP_ARIA_GCM_CTX, ctx);
496
497 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(ctx))
498 OPENSSL_free(gctx->iv);
499
500 return 1;
501 }
502
aria_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)503 static int aria_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
504 const unsigned char *iv, int enc)
505 {
506 int ret;
507 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
508
509 if (!iv && !key)
510 return 1;
511
512 if (key) {
513 ret = aria_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
514 &cctx->ks.ks);
515 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
516 &cctx->ks, (block128_f) aria_encrypt);
517 if (ret < 0) {
518 EVPerr(EVP_F_ARIA_CCM_INIT_KEY,EVP_R_ARIA_KEY_SETUP_FAILED);
519 return 0;
520 }
521 cctx->str = NULL;
522 cctx->key_set = 1;
523 }
524 if (iv) {
525 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
526 cctx->iv_set = 1;
527 }
528 return 1;
529 }
530
aria_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)531 static int aria_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
532 {
533 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,c);
534
535 switch (type) {
536 case EVP_CTRL_INIT:
537 cctx->key_set = 0;
538 cctx->iv_set = 0;
539 cctx->L = 8;
540 cctx->M = 12;
541 cctx->tag_set = 0;
542 cctx->len_set = 0;
543 cctx->tls_aad_len = -1;
544 return 1;
545
546 case EVP_CTRL_AEAD_TLS1_AAD:
547 /* Save the AAD for later use */
548 if (arg != EVP_AEAD_TLS1_AAD_LEN)
549 return 0;
550 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
551 cctx->tls_aad_len = arg;
552 {
553 uint16_t len =
554 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
555 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
556 /* Correct length for explicit IV */
557 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
558 return 0;
559 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
560 /* If decrypting correct for tag too */
561 if (!EVP_CIPHER_CTX_encrypting(c)) {
562 if (len < cctx->M)
563 return 0;
564 len -= cctx->M;
565 }
566 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
567 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
568 }
569 /* Extra padding: tag appended to record */
570 return cctx->M;
571
572 case EVP_CTRL_CCM_SET_IV_FIXED:
573 /* Sanity check length */
574 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
575 return 0;
576 /* Just copy to first part of IV */
577 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
578 return 1;
579
580 case EVP_CTRL_GET_IVLEN:
581 *(int *)ptr = 15 - cctx->L;
582 return 1;
583
584 case EVP_CTRL_AEAD_SET_IVLEN:
585 arg = 15 - arg;
586 /* fall thru */
587 case EVP_CTRL_CCM_SET_L:
588 if (arg < 2 || arg > 8)
589 return 0;
590 cctx->L = arg;
591 return 1;
592 case EVP_CTRL_AEAD_SET_TAG:
593 if ((arg & 1) || arg < 4 || arg > 16)
594 return 0;
595 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
596 return 0;
597 if (ptr) {
598 cctx->tag_set = 1;
599 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
600 }
601 cctx->M = arg;
602 return 1;
603
604 case EVP_CTRL_AEAD_GET_TAG:
605 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
606 return 0;
607 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
608 return 0;
609 cctx->tag_set = 0;
610 cctx->iv_set = 0;
611 cctx->len_set = 0;
612 return 1;
613
614 case EVP_CTRL_COPY:
615 {
616 EVP_CIPHER_CTX *out = ptr;
617 EVP_ARIA_CCM_CTX *cctx_out = EVP_C_DATA(EVP_ARIA_CCM_CTX,out);
618 if (cctx->ccm.key) {
619 if (cctx->ccm.key != &cctx->ks)
620 return 0;
621 cctx_out->ccm.key = &cctx_out->ks;
622 }
623 return 1;
624 }
625
626 default:
627 return -1;
628 }
629 }
630
aria_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)631 static int aria_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
632 const unsigned char *in, size_t len)
633 {
634 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
635 CCM128_CONTEXT *ccm = &cctx->ccm;
636
637 /* Encrypt/decrypt must be performed in place */
638 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
639 return -1;
640 /* If encrypting set explicit IV from sequence number (start of AAD) */
641 if (EVP_CIPHER_CTX_encrypting(ctx))
642 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
643 EVP_CCM_TLS_EXPLICIT_IV_LEN);
644 /* Get rest of IV from explicit IV */
645 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
646 EVP_CCM_TLS_EXPLICIT_IV_LEN);
647 /* Correct length value */
648 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
649 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
650 len))
651 return -1;
652 /* Use saved AAD */
653 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
654 /* Fix buffer to point to payload */
655 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
656 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
657 if (EVP_CIPHER_CTX_encrypting(ctx)) {
658 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
659 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
660 return -1;
661 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
662 return -1;
663 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
664 } else {
665 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len, cctx->str)
666 : !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
667 unsigned char tag[16];
668 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
669 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
670 return len;
671 }
672 }
673 OPENSSL_cleanse(out, len);
674 return -1;
675 }
676 }
677
aria_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)678 static int aria_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
679 const unsigned char *in, size_t len)
680 {
681 EVP_ARIA_CCM_CTX *cctx = EVP_C_DATA(EVP_ARIA_CCM_CTX,ctx);
682 CCM128_CONTEXT *ccm = &cctx->ccm;
683
684 /* If not set up, return error */
685 if (!cctx->key_set)
686 return -1;
687
688 if (cctx->tls_aad_len >= 0)
689 return aria_ccm_tls_cipher(ctx, out, in, len);
690
691 /* EVP_*Final() doesn't return any data */
692 if (in == NULL && out != NULL)
693 return 0;
694
695 if (!cctx->iv_set)
696 return -1;
697
698 if (!out) {
699 if (!in) {
700 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
701 15 - cctx->L, len))
702 return -1;
703 cctx->len_set = 1;
704 return len;
705 }
706 /* If have AAD need message length */
707 if (!cctx->len_set && len)
708 return -1;
709 CRYPTO_ccm128_aad(ccm, in, len);
710 return len;
711 }
712
713 /* The tag must be set before actually decrypting data */
714 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
715 return -1;
716
717 /* If not set length yet do it */
718 if (!cctx->len_set) {
719 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
720 15 - cctx->L, len))
721 return -1;
722 cctx->len_set = 1;
723 }
724 if (EVP_CIPHER_CTX_encrypting(ctx)) {
725 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len, cctx->str)
726 : CRYPTO_ccm128_encrypt(ccm, in, out, len))
727 return -1;
728 cctx->tag_set = 1;
729 return len;
730 } else {
731 int rv = -1;
732 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
733 cctx->str) :
734 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
735 unsigned char tag[16];
736 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
737 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
738 cctx->M))
739 rv = len;
740 }
741 }
742 if (rv == -1)
743 OPENSSL_cleanse(out, len);
744 cctx->iv_set = 0;
745 cctx->tag_set = 0;
746 cctx->len_set = 0;
747 return rv;
748 }
749 }
750
751 #define aria_ccm_cleanup NULL
752
753 #define ARIA_AUTH_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
754 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
755 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
756 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_FLAG_AEAD_CIPHER \
757 | EVP_CIPH_CUSTOM_IV_LENGTH)
758
759 #define BLOCK_CIPHER_aead(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
760 static const EVP_CIPHER aria_##keylen##_##mode = { \
761 nid##_##keylen##_##nmode, \
762 blocksize, keylen/8, ivlen, \
763 ARIA_AUTH_FLAGS|EVP_CIPH_##MODE##_MODE, \
764 aria_##mode##_init_key, \
765 aria_##mode##_cipher, \
766 aria_##mode##_cleanup, \
767 sizeof(EVP_ARIA_##MODE##_CTX), \
768 NULL,NULL,aria_##mode##_ctrl,NULL }; \
769 const EVP_CIPHER *EVP_aria_##keylen##_##mode(void) \
770 { return (EVP_CIPHER*)&aria_##keylen##_##mode; }
771
772 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, gcm, gcm, GCM, 0)
773 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, gcm, gcm, GCM, 0)
774 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, gcm, gcm, GCM, 0)
775
776 BLOCK_CIPHER_aead(NID_aria, 128, 1, 12, ccm, ccm, CCM, 0)
777 BLOCK_CIPHER_aead(NID_aria, 192, 1, 12, ccm, ccm, CCM, 0)
778 BLOCK_CIPHER_aead(NID_aria, 256, 1, 12, ccm, ccm, CCM, 0)
779
780 #endif
781