1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or https://opensource.org/licenses/CDDL-1.0.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 #include <sys/zfs_context.h>
28 #include <sys/crypto/common.h>
29 #include <sys/crypto/spi.h>
30 #include <sys/crypto/icp.h>
31 #include <sys/sha2.h>
32 #include <sha2/sha2_impl.h>
33
34 /*
35 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
36 * by KCF to one of the entry points.
37 */
38
39 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
40 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
41
42 /* to extract the digest length passed as mechanism parameter */
43 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
44 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
45 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
46 else { \
47 ulong_t tmp_ulong; \
48 memcpy(&tmp_ulong, (m)->cm_param, sizeof (ulong_t)); \
49 (len) = (uint32_t)tmp_ulong; \
50 } \
51 }
52
53 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
54 SHA2Init(mech, ctx); \
55 SHA2Update(ctx, key, len); \
56 SHA2Final(digest, ctx); \
57 }
58
59 /*
60 * Mechanism info structure passed to KCF during registration.
61 */
62 static const crypto_mech_info_t sha2_mech_info_tab[] = {
63 /* SHA256 */
64 {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
65 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
66 /* SHA256-HMAC */
67 {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
68 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
69 /* SHA256-HMAC GENERAL */
70 {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
71 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
72 /* SHA384 */
73 {SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
74 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
75 /* SHA384-HMAC */
76 {SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
77 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
78 /* SHA384-HMAC GENERAL */
79 {SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
80 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
81 /* SHA512 */
82 {SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
83 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC},
84 /* SHA512-HMAC */
85 {SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
86 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
87 /* SHA512-HMAC GENERAL */
88 {SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
89 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
90 };
91
92 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *);
93 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
94 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *);
95 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *);
96 static int sha2_digest_atomic(crypto_mechanism_t *, crypto_data_t *,
97 crypto_data_t *);
98
99 static const crypto_digest_ops_t sha2_digest_ops = {
100 .digest_init = sha2_digest_init,
101 .digest = sha2_digest,
102 .digest_update = sha2_digest_update,
103 .digest_final = sha2_digest_final,
104 .digest_atomic = sha2_digest_atomic
105 };
106
107 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
108 crypto_spi_ctx_template_t);
109 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *);
110 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *);
111 static int sha2_mac_atomic(crypto_mechanism_t *, crypto_key_t *,
112 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
113 static int sha2_mac_verify_atomic(crypto_mechanism_t *, crypto_key_t *,
114 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
115
116 static const crypto_mac_ops_t sha2_mac_ops = {
117 .mac_init = sha2_mac_init,
118 .mac = NULL,
119 .mac_update = sha2_mac_update,
120 .mac_final = sha2_mac_final,
121 .mac_atomic = sha2_mac_atomic,
122 .mac_verify_atomic = sha2_mac_verify_atomic
123 };
124
125 static int sha2_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
126 crypto_spi_ctx_template_t *, size_t *);
127 static int sha2_free_context(crypto_ctx_t *);
128
129 static const crypto_ctx_ops_t sha2_ctx_ops = {
130 .create_ctx_template = sha2_create_ctx_template,
131 .free_context = sha2_free_context
132 };
133
134 static const crypto_ops_t sha2_crypto_ops = {
135 &sha2_digest_ops,
136 NULL,
137 &sha2_mac_ops,
138 &sha2_ctx_ops,
139 };
140
141 static const crypto_provider_info_t sha2_prov_info = {
142 "SHA2 Software Provider",
143 &sha2_crypto_ops,
144 sizeof (sha2_mech_info_tab) / sizeof (crypto_mech_info_t),
145 sha2_mech_info_tab
146 };
147
148 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
149
150 int
sha2_mod_init(void)151 sha2_mod_init(void)
152 {
153 int ret;
154
155 /*
156 * Register with KCF. If the registration fails, log an
157 * error but do not uninstall the module, since the functionality
158 * provided by misc/sha2 should still be available.
159 */
160 if ((ret = crypto_register_provider(&sha2_prov_info,
161 &sha2_prov_handle)) != CRYPTO_SUCCESS)
162 cmn_err(CE_WARN, "sha2 _init: "
163 "crypto_register_provider() failed (0x%x)", ret);
164
165 return (0);
166 }
167
168 int
sha2_mod_fini(void)169 sha2_mod_fini(void)
170 {
171 int ret = 0;
172
173 if (sha2_prov_handle != 0) {
174 if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
175 CRYPTO_SUCCESS) {
176 cmn_err(CE_WARN,
177 "sha2 _fini: crypto_unregister_provider() "
178 "failed (0x%x)", ret);
179 return (EBUSY);
180 }
181 sha2_prov_handle = 0;
182 }
183
184 return (ret);
185 }
186
187 /*
188 * KCF software provider digest entry points.
189 */
190
191 static int
sha2_digest_init(crypto_ctx_t * ctx,crypto_mechanism_t * mechanism)192 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism)
193 {
194
195 /*
196 * Allocate and initialize SHA2 context.
197 */
198 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t), KM_SLEEP);
199 if (ctx->cc_provider_private == NULL)
200 return (CRYPTO_HOST_MEMORY);
201
202 PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
203 SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
204
205 return (CRYPTO_SUCCESS);
206 }
207
208 /*
209 * Helper SHA2 digest update function for uio data.
210 */
211 static int
sha2_digest_update_uio(SHA2_CTX * sha2_ctx,crypto_data_t * data)212 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
213 {
214 off_t offset = data->cd_offset;
215 size_t length = data->cd_length;
216 uint_t vec_idx = 0;
217 size_t cur_len;
218
219 /* we support only kernel buffer */
220 if (zfs_uio_segflg(data->cd_uio) != UIO_SYSSPACE)
221 return (CRYPTO_ARGUMENTS_BAD);
222
223 /*
224 * Jump to the first iovec containing data to be
225 * digested.
226 */
227 offset = zfs_uio_index_at_offset(data->cd_uio, offset, &vec_idx);
228 if (vec_idx == zfs_uio_iovcnt(data->cd_uio)) {
229 /*
230 * The caller specified an offset that is larger than the
231 * total size of the buffers it provided.
232 */
233 return (CRYPTO_DATA_LEN_RANGE);
234 }
235
236 /*
237 * Now do the digesting on the iovecs.
238 */
239 while (vec_idx < zfs_uio_iovcnt(data->cd_uio) && length > 0) {
240 cur_len = MIN(zfs_uio_iovlen(data->cd_uio, vec_idx) -
241 offset, length);
242
243 SHA2Update(sha2_ctx, (uint8_t *)zfs_uio_iovbase(data->cd_uio,
244 vec_idx) + offset, cur_len);
245 length -= cur_len;
246 vec_idx++;
247 offset = 0;
248 }
249
250 if (vec_idx == zfs_uio_iovcnt(data->cd_uio) && length > 0) {
251 /*
252 * The end of the specified iovec's was reached but
253 * the length requested could not be processed, i.e.
254 * The caller requested to digest more data than it provided.
255 */
256 return (CRYPTO_DATA_LEN_RANGE);
257 }
258
259 return (CRYPTO_SUCCESS);
260 }
261
262 /*
263 * Helper SHA2 digest final function for uio data.
264 * digest_len is the length of the desired digest. If digest_len
265 * is smaller than the default SHA2 digest length, the caller
266 * must pass a scratch buffer, digest_scratch, which must
267 * be at least the algorithm's digest length bytes.
268 */
269 static int
sha2_digest_final_uio(SHA2_CTX * sha2_ctx,crypto_data_t * digest,ulong_t digest_len,uchar_t * digest_scratch)270 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
271 ulong_t digest_len, uchar_t *digest_scratch)
272 {
273 off_t offset = digest->cd_offset;
274 uint_t vec_idx = 0;
275
276 /* we support only kernel buffer */
277 if (zfs_uio_segflg(digest->cd_uio) != UIO_SYSSPACE)
278 return (CRYPTO_ARGUMENTS_BAD);
279
280 /*
281 * Jump to the first iovec containing ptr to the digest to
282 * be returned.
283 */
284 offset = zfs_uio_index_at_offset(digest->cd_uio, offset, &vec_idx);
285 if (vec_idx == zfs_uio_iovcnt(digest->cd_uio)) {
286 /*
287 * The caller specified an offset that is
288 * larger than the total size of the buffers
289 * it provided.
290 */
291 return (CRYPTO_DATA_LEN_RANGE);
292 }
293
294 if (offset + digest_len <=
295 zfs_uio_iovlen(digest->cd_uio, vec_idx)) {
296 /*
297 * The computed SHA2 digest will fit in the current
298 * iovec.
299 */
300 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
301 (digest_len != SHA256_DIGEST_LENGTH)) ||
302 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
303 (digest_len != SHA512_DIGEST_LENGTH))) {
304 /*
305 * The caller requested a short digest. Digest
306 * into a scratch buffer and return to
307 * the user only what was requested.
308 */
309 SHA2Final(digest_scratch, sha2_ctx);
310
311 memcpy((uchar_t *)
312 zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
313 digest_scratch, digest_len);
314 } else {
315 SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
316 cd_uio, vec_idx) + offset,
317 sha2_ctx);
318
319 }
320 } else {
321 /*
322 * The computed digest will be crossing one or more iovec's.
323 * This is bad performance-wise but we need to support it.
324 * Allocate a small scratch buffer on the stack and
325 * copy it piece meal to the specified digest iovec's.
326 */
327 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
328 off_t scratch_offset = 0;
329 size_t length = digest_len;
330 size_t cur_len;
331
332 SHA2Final(digest_tmp, sha2_ctx);
333
334 while (vec_idx < zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
335 cur_len =
336 MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
337 offset, length);
338 memcpy(
339 zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
340 digest_tmp + scratch_offset,
341 cur_len);
342
343 length -= cur_len;
344 vec_idx++;
345 scratch_offset += cur_len;
346 offset = 0;
347 }
348
349 if (vec_idx == zfs_uio_iovcnt(digest->cd_uio) && length > 0) {
350 /*
351 * The end of the specified iovec's was reached but
352 * the length requested could not be processed, i.e.
353 * The caller requested to digest more data than it
354 * provided.
355 */
356 return (CRYPTO_DATA_LEN_RANGE);
357 }
358 }
359
360 return (CRYPTO_SUCCESS);
361 }
362
363 static int
sha2_digest(crypto_ctx_t * ctx,crypto_data_t * data,crypto_data_t * digest)364 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest)
365 {
366 int ret = CRYPTO_SUCCESS;
367 uint_t sha_digest_len;
368
369 ASSERT(ctx->cc_provider_private != NULL);
370
371 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
372 case SHA256_MECH_INFO_TYPE:
373 sha_digest_len = SHA256_DIGEST_LENGTH;
374 break;
375 case SHA384_MECH_INFO_TYPE:
376 sha_digest_len = SHA384_DIGEST_LENGTH;
377 break;
378 case SHA512_MECH_INFO_TYPE:
379 sha_digest_len = SHA512_DIGEST_LENGTH;
380 break;
381 default:
382 return (CRYPTO_MECHANISM_INVALID);
383 }
384
385 /*
386 * We need to just return the length needed to store the output.
387 * We should not destroy the context for the following cases.
388 */
389 if ((digest->cd_length == 0) ||
390 (digest->cd_length < sha_digest_len)) {
391 digest->cd_length = sha_digest_len;
392 return (CRYPTO_BUFFER_TOO_SMALL);
393 }
394
395 /*
396 * Do the SHA2 update on the specified input data.
397 */
398 switch (data->cd_format) {
399 case CRYPTO_DATA_RAW:
400 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
401 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
402 data->cd_length);
403 break;
404 case CRYPTO_DATA_UIO:
405 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
406 data);
407 break;
408 default:
409 ret = CRYPTO_ARGUMENTS_BAD;
410 }
411
412 if (ret != CRYPTO_SUCCESS) {
413 /* the update failed, free context and bail */
414 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
415 ctx->cc_provider_private = NULL;
416 digest->cd_length = 0;
417 return (ret);
418 }
419
420 /*
421 * Do a SHA2 final, must be done separately since the digest
422 * type can be different than the input data type.
423 */
424 switch (digest->cd_format) {
425 case CRYPTO_DATA_RAW:
426 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
427 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
428 break;
429 case CRYPTO_DATA_UIO:
430 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
431 digest, sha_digest_len, NULL);
432 break;
433 default:
434 ret = CRYPTO_ARGUMENTS_BAD;
435 }
436
437 /* all done, free context and return */
438
439 if (ret == CRYPTO_SUCCESS)
440 digest->cd_length = sha_digest_len;
441 else
442 digest->cd_length = 0;
443
444 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
445 ctx->cc_provider_private = NULL;
446 return (ret);
447 }
448
449 static int
sha2_digest_update(crypto_ctx_t * ctx,crypto_data_t * data)450 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data)
451 {
452 int ret = CRYPTO_SUCCESS;
453
454 ASSERT(ctx->cc_provider_private != NULL);
455
456 /*
457 * Do the SHA2 update on the specified input data.
458 */
459 switch (data->cd_format) {
460 case CRYPTO_DATA_RAW:
461 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
462 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
463 data->cd_length);
464 break;
465 case CRYPTO_DATA_UIO:
466 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
467 data);
468 break;
469 default:
470 ret = CRYPTO_ARGUMENTS_BAD;
471 }
472
473 return (ret);
474 }
475
476 static int
sha2_digest_final(crypto_ctx_t * ctx,crypto_data_t * digest)477 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest)
478 {
479 int ret = CRYPTO_SUCCESS;
480 uint_t sha_digest_len;
481
482 ASSERT(ctx->cc_provider_private != NULL);
483
484 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
485 case SHA256_MECH_INFO_TYPE:
486 sha_digest_len = SHA256_DIGEST_LENGTH;
487 break;
488 case SHA384_MECH_INFO_TYPE:
489 sha_digest_len = SHA384_DIGEST_LENGTH;
490 break;
491 case SHA512_MECH_INFO_TYPE:
492 sha_digest_len = SHA512_DIGEST_LENGTH;
493 break;
494 default:
495 return (CRYPTO_MECHANISM_INVALID);
496 }
497
498 /*
499 * We need to just return the length needed to store the output.
500 * We should not destroy the context for the following cases.
501 */
502 if ((digest->cd_length == 0) ||
503 (digest->cd_length < sha_digest_len)) {
504 digest->cd_length = sha_digest_len;
505 return (CRYPTO_BUFFER_TOO_SMALL);
506 }
507
508 /*
509 * Do a SHA2 final.
510 */
511 switch (digest->cd_format) {
512 case CRYPTO_DATA_RAW:
513 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
514 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
515 break;
516 case CRYPTO_DATA_UIO:
517 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
518 digest, sha_digest_len, NULL);
519 break;
520 default:
521 ret = CRYPTO_ARGUMENTS_BAD;
522 }
523
524 /* all done, free context and return */
525
526 if (ret == CRYPTO_SUCCESS)
527 digest->cd_length = sha_digest_len;
528 else
529 digest->cd_length = 0;
530
531 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
532 ctx->cc_provider_private = NULL;
533
534 return (ret);
535 }
536
537 static int
sha2_digest_atomic(crypto_mechanism_t * mechanism,crypto_data_t * data,crypto_data_t * digest)538 sha2_digest_atomic(crypto_mechanism_t *mechanism, crypto_data_t *data,
539 crypto_data_t *digest)
540 {
541 int ret = CRYPTO_SUCCESS;
542 SHA2_CTX sha2_ctx;
543 uint32_t sha_digest_len;
544
545 /*
546 * Do the SHA inits.
547 */
548
549 SHA2Init(mechanism->cm_type, &sha2_ctx);
550
551 switch (data->cd_format) {
552 case CRYPTO_DATA_RAW:
553 SHA2Update(&sha2_ctx, (uint8_t *)data->
554 cd_raw.iov_base + data->cd_offset, data->cd_length);
555 break;
556 case CRYPTO_DATA_UIO:
557 ret = sha2_digest_update_uio(&sha2_ctx, data);
558 break;
559 default:
560 ret = CRYPTO_ARGUMENTS_BAD;
561 }
562
563 /*
564 * Do the SHA updates on the specified input data.
565 */
566
567 if (ret != CRYPTO_SUCCESS) {
568 /* the update failed, bail */
569 digest->cd_length = 0;
570 return (ret);
571 }
572
573 if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
574 sha_digest_len = SHA256_DIGEST_LENGTH;
575 else
576 sha_digest_len = SHA512_DIGEST_LENGTH;
577
578 /*
579 * Do a SHA2 final, must be done separately since the digest
580 * type can be different than the input data type.
581 */
582 switch (digest->cd_format) {
583 case CRYPTO_DATA_RAW:
584 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
585 digest->cd_offset, &sha2_ctx);
586 break;
587 case CRYPTO_DATA_UIO:
588 ret = sha2_digest_final_uio(&sha2_ctx, digest,
589 sha_digest_len, NULL);
590 break;
591 default:
592 ret = CRYPTO_ARGUMENTS_BAD;
593 }
594
595 if (ret == CRYPTO_SUCCESS)
596 digest->cd_length = sha_digest_len;
597 else
598 digest->cd_length = 0;
599
600 return (ret);
601 }
602
603 /*
604 * KCF software provider mac entry points.
605 *
606 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
607 *
608 * Init:
609 * The initialization routine initializes what we denote
610 * as the inner and outer contexts by doing
611 * - for inner context: SHA2(key XOR ipad)
612 * - for outer context: SHA2(key XOR opad)
613 *
614 * Update:
615 * Each subsequent SHA2 HMAC update will result in an
616 * update of the inner context with the specified data.
617 *
618 * Final:
619 * The SHA2 HMAC final will do a SHA2 final operation on the
620 * inner context, and the resulting digest will be used
621 * as the data for an update on the outer context. Last
622 * but not least, a SHA2 final on the outer context will
623 * be performed to obtain the SHA2 HMAC digest to return
624 * to the user.
625 */
626
627 /*
628 * Initialize a SHA2-HMAC context.
629 */
630 static void
sha2_mac_init_ctx(sha2_hmac_ctx_t * ctx,void * keyval,uint_t length_in_bytes)631 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
632 {
633 uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
634 uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
635 int i, block_size, blocks_per_int64;
636
637 /* Determine the block size */
638 if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
639 block_size = SHA256_HMAC_BLOCK_SIZE;
640 blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
641 } else {
642 block_size = SHA512_HMAC_BLOCK_SIZE;
643 blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
644 }
645
646 (void) memset(ipad, 0, block_size);
647 (void) memset(opad, 0, block_size);
648
649 if (keyval != NULL) {
650 (void) memcpy(ipad, keyval, length_in_bytes);
651 (void) memcpy(opad, keyval, length_in_bytes);
652 } else {
653 ASSERT0(length_in_bytes);
654 }
655
656 /* XOR key with ipad (0x36) and opad (0x5c) */
657 for (i = 0; i < blocks_per_int64; i ++) {
658 ipad[i] ^= 0x3636363636363636;
659 opad[i] ^= 0x5c5c5c5c5c5c5c5c;
660 }
661
662 /* perform SHA2 on ipad */
663 SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
664 SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
665
666 /* perform SHA2 on opad */
667 SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
668 SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
669 }
670
671 /*
672 */
673 static int
sha2_mac_init(crypto_ctx_t * ctx,crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t ctx_template)674 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
675 crypto_key_t *key, crypto_spi_ctx_template_t ctx_template)
676 {
677 int ret = CRYPTO_SUCCESS;
678 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
679 uint_t sha_digest_len, sha_hmac_block_size;
680
681 /*
682 * Set the digest length and block size to values appropriate to the
683 * mechanism
684 */
685 switch (mechanism->cm_type) {
686 case SHA256_HMAC_MECH_INFO_TYPE:
687 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
688 sha_digest_len = SHA256_DIGEST_LENGTH;
689 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
690 break;
691 case SHA384_HMAC_MECH_INFO_TYPE:
692 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
693 case SHA512_HMAC_MECH_INFO_TYPE:
694 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
695 sha_digest_len = SHA512_DIGEST_LENGTH;
696 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
697 break;
698 default:
699 return (CRYPTO_MECHANISM_INVALID);
700 }
701
702 ctx->cc_provider_private =
703 kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
704 if (ctx->cc_provider_private == NULL)
705 return (CRYPTO_HOST_MEMORY);
706
707 PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
708 if (ctx_template != NULL) {
709 /* reuse context template */
710 memcpy(PROV_SHA2_HMAC_CTX(ctx), ctx_template,
711 sizeof (sha2_hmac_ctx_t));
712 } else {
713 /* no context template, compute context */
714 if (keylen_in_bytes > sha_hmac_block_size) {
715 uchar_t digested_key[SHA512_DIGEST_LENGTH];
716 sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
717
718 /*
719 * Hash the passed-in key to get a smaller key.
720 * The inner context is used since it hasn't been
721 * initialized yet.
722 */
723 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
724 &hmac_ctx->hc_icontext,
725 key->ck_data, keylen_in_bytes, digested_key);
726 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
727 digested_key, sha_digest_len);
728 } else {
729 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
730 key->ck_data, keylen_in_bytes);
731 }
732 }
733
734 /*
735 * Get the mechanism parameters, if applicable.
736 */
737 if (mechanism->cm_type % 3 == 2) {
738 if (mechanism->cm_param == NULL ||
739 mechanism->cm_param_len != sizeof (ulong_t)) {
740 ret = CRYPTO_MECHANISM_PARAM_INVALID;
741 } else {
742 PROV_SHA2_GET_DIGEST_LEN(mechanism,
743 PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
744 if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len >
745 sha_digest_len)
746 ret = CRYPTO_MECHANISM_PARAM_INVALID;
747 }
748 }
749
750 if (ret != CRYPTO_SUCCESS) {
751 memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
752 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
753 ctx->cc_provider_private = NULL;
754 }
755
756 return (ret);
757 }
758
759 static int
sha2_mac_update(crypto_ctx_t * ctx,crypto_data_t * data)760 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data)
761 {
762 int ret = CRYPTO_SUCCESS;
763
764 ASSERT(ctx->cc_provider_private != NULL);
765
766 /*
767 * Do a SHA2 update of the inner context using the specified
768 * data.
769 */
770 switch (data->cd_format) {
771 case CRYPTO_DATA_RAW:
772 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
773 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
774 data->cd_length);
775 break;
776 case CRYPTO_DATA_UIO:
777 ret = sha2_digest_update_uio(
778 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
779 break;
780 default:
781 ret = CRYPTO_ARGUMENTS_BAD;
782 }
783
784 return (ret);
785 }
786
787 static int
sha2_mac_final(crypto_ctx_t * ctx,crypto_data_t * mac)788 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac)
789 {
790 int ret = CRYPTO_SUCCESS;
791 uchar_t digest[SHA512_DIGEST_LENGTH];
792 uint32_t digest_len, sha_digest_len;
793
794 ASSERT(ctx->cc_provider_private != NULL);
795
796 /* Set the digest lengths to values appropriate to the mechanism */
797 switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
798 case SHA256_HMAC_MECH_INFO_TYPE:
799 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
800 break;
801 case SHA384_HMAC_MECH_INFO_TYPE:
802 sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
803 break;
804 case SHA512_HMAC_MECH_INFO_TYPE:
805 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
806 break;
807 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
808 sha_digest_len = SHA256_DIGEST_LENGTH;
809 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
810 break;
811 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
812 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
813 sha_digest_len = SHA512_DIGEST_LENGTH;
814 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
815 break;
816 default:
817 return (CRYPTO_ARGUMENTS_BAD);
818 }
819
820 /*
821 * We need to just return the length needed to store the output.
822 * We should not destroy the context for the following cases.
823 */
824 if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
825 mac->cd_length = digest_len;
826 return (CRYPTO_BUFFER_TOO_SMALL);
827 }
828
829 /*
830 * Do a SHA2 final on the inner context.
831 */
832 SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
833
834 /*
835 * Do a SHA2 update on the outer context, feeding the inner
836 * digest as data.
837 */
838 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
839 sha_digest_len);
840
841 /*
842 * Do a SHA2 final on the outer context, storing the computing
843 * digest in the users buffer.
844 */
845 switch (mac->cd_format) {
846 case CRYPTO_DATA_RAW:
847 if (digest_len != sha_digest_len) {
848 /*
849 * The caller requested a short digest. Digest
850 * into a scratch buffer and return to
851 * the user only what was requested.
852 */
853 SHA2Final(digest,
854 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
855 memcpy((unsigned char *)mac->cd_raw.iov_base +
856 mac->cd_offset, digest, digest_len);
857 } else {
858 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
859 mac->cd_offset,
860 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
861 }
862 break;
863 case CRYPTO_DATA_UIO:
864 ret = sha2_digest_final_uio(
865 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
866 digest_len, digest);
867 break;
868 default:
869 ret = CRYPTO_ARGUMENTS_BAD;
870 }
871
872 if (ret == CRYPTO_SUCCESS)
873 mac->cd_length = digest_len;
874 else
875 mac->cd_length = 0;
876
877 memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
878 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
879 ctx->cc_provider_private = NULL;
880
881 return (ret);
882 }
883
884 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
885 switch (data->cd_format) { \
886 case CRYPTO_DATA_RAW: \
887 SHA2Update(&(ctx).hc_icontext, \
888 (uint8_t *)data->cd_raw.iov_base + \
889 data->cd_offset, data->cd_length); \
890 break; \
891 case CRYPTO_DATA_UIO: \
892 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
893 break; \
894 default: \
895 ret = CRYPTO_ARGUMENTS_BAD; \
896 } \
897 }
898
899 static int
sha2_mac_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * data,crypto_data_t * mac,crypto_spi_ctx_template_t ctx_template)900 sha2_mac_atomic(crypto_mechanism_t *mechanism,
901 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
902 crypto_spi_ctx_template_t ctx_template)
903 {
904 int ret = CRYPTO_SUCCESS;
905 uchar_t digest[SHA512_DIGEST_LENGTH];
906 sha2_hmac_ctx_t sha2_hmac_ctx;
907 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
908 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
909
910 /*
911 * Set the digest length and block size to values appropriate to the
912 * mechanism
913 */
914 switch (mechanism->cm_type) {
915 case SHA256_HMAC_MECH_INFO_TYPE:
916 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
917 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
918 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
919 break;
920 case SHA384_HMAC_MECH_INFO_TYPE:
921 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
922 case SHA512_HMAC_MECH_INFO_TYPE:
923 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
924 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
925 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
926 break;
927 default:
928 return (CRYPTO_MECHANISM_INVALID);
929 }
930
931 if (ctx_template != NULL) {
932 /* reuse context template */
933 memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
934 } else {
935 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
936 /* no context template, initialize context */
937 if (keylen_in_bytes > sha_hmac_block_size) {
938 /*
939 * Hash the passed-in key to get a smaller key.
940 * The inner context is used since it hasn't been
941 * initialized yet.
942 */
943 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
944 &sha2_hmac_ctx.hc_icontext,
945 key->ck_data, keylen_in_bytes, digest);
946 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
947 sha_digest_len);
948 } else {
949 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
950 keylen_in_bytes);
951 }
952 }
953
954 /* get the mechanism parameters, if applicable */
955 if ((mechanism->cm_type % 3) == 2) {
956 if (mechanism->cm_param == NULL ||
957 mechanism->cm_param_len != sizeof (ulong_t)) {
958 ret = CRYPTO_MECHANISM_PARAM_INVALID;
959 goto bail;
960 }
961 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
962 if (digest_len > sha_digest_len) {
963 ret = CRYPTO_MECHANISM_PARAM_INVALID;
964 goto bail;
965 }
966 }
967
968 /* do a SHA2 update of the inner context using the specified data */
969 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
970 if (ret != CRYPTO_SUCCESS)
971 /* the update failed, free context and bail */
972 goto bail;
973
974 /*
975 * Do a SHA2 final on the inner context.
976 */
977 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
978
979 /*
980 * Do an SHA2 update on the outer context, feeding the inner
981 * digest as data.
982 *
983 * HMAC-SHA384 needs special handling as the outer hash needs only 48
984 * bytes of the inner hash value.
985 */
986 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
987 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
988 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
989 SHA384_DIGEST_LENGTH);
990 else
991 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
992
993 /*
994 * Do a SHA2 final on the outer context, storing the computed
995 * digest in the users buffer.
996 */
997 switch (mac->cd_format) {
998 case CRYPTO_DATA_RAW:
999 if (digest_len != sha_digest_len) {
1000 /*
1001 * The caller requested a short digest. Digest
1002 * into a scratch buffer and return to
1003 * the user only what was requested.
1004 */
1005 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1006 memcpy((unsigned char *)mac->cd_raw.iov_base +
1007 mac->cd_offset, digest, digest_len);
1008 } else {
1009 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1010 mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1011 }
1012 break;
1013 case CRYPTO_DATA_UIO:
1014 ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1015 digest_len, digest);
1016 break;
1017 default:
1018 ret = CRYPTO_ARGUMENTS_BAD;
1019 }
1020
1021 if (ret == CRYPTO_SUCCESS) {
1022 mac->cd_length = digest_len;
1023 return (CRYPTO_SUCCESS);
1024 }
1025 bail:
1026 memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
1027 mac->cd_length = 0;
1028 return (ret);
1029 }
1030
1031 static int
sha2_mac_verify_atomic(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_data_t * data,crypto_data_t * mac,crypto_spi_ctx_template_t ctx_template)1032 sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
1033 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1034 crypto_spi_ctx_template_t ctx_template)
1035 {
1036 int ret = CRYPTO_SUCCESS;
1037 uchar_t digest[SHA512_DIGEST_LENGTH];
1038 sha2_hmac_ctx_t sha2_hmac_ctx;
1039 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1040 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1041
1042 /*
1043 * Set the digest length and block size to values appropriate to the
1044 * mechanism
1045 */
1046 switch (mechanism->cm_type) {
1047 case SHA256_HMAC_MECH_INFO_TYPE:
1048 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1049 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1050 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1051 break;
1052 case SHA384_HMAC_MECH_INFO_TYPE:
1053 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1054 case SHA512_HMAC_MECH_INFO_TYPE:
1055 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1056 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1057 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1058 break;
1059 default:
1060 return (CRYPTO_MECHANISM_INVALID);
1061 }
1062
1063 if (ctx_template != NULL) {
1064 /* reuse context template */
1065 memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
1066 } else {
1067 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1068 /* no context template, initialize context */
1069 if (keylen_in_bytes > sha_hmac_block_size) {
1070 /*
1071 * Hash the passed-in key to get a smaller key.
1072 * The inner context is used since it hasn't been
1073 * initialized yet.
1074 */
1075 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1076 &sha2_hmac_ctx.hc_icontext,
1077 key->ck_data, keylen_in_bytes, digest);
1078 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1079 sha_digest_len);
1080 } else {
1081 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1082 keylen_in_bytes);
1083 }
1084 }
1085
1086 /* get the mechanism parameters, if applicable */
1087 if (mechanism->cm_type % 3 == 2) {
1088 if (mechanism->cm_param == NULL ||
1089 mechanism->cm_param_len != sizeof (ulong_t)) {
1090 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1091 goto bail;
1092 }
1093 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1094 if (digest_len > sha_digest_len) {
1095 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1096 goto bail;
1097 }
1098 }
1099
1100 if (mac->cd_length != digest_len) {
1101 ret = CRYPTO_INVALID_MAC;
1102 goto bail;
1103 }
1104
1105 /* do a SHA2 update of the inner context using the specified data */
1106 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1107 if (ret != CRYPTO_SUCCESS)
1108 /* the update failed, free context and bail */
1109 goto bail;
1110
1111 /* do a SHA2 final on the inner context */
1112 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1113
1114 /*
1115 * Do an SHA2 update on the outer context, feeding the inner
1116 * digest as data.
1117 *
1118 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1119 * bytes of the inner hash value.
1120 */
1121 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1122 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1123 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1124 SHA384_DIGEST_LENGTH);
1125 else
1126 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1127
1128 /*
1129 * Do a SHA2 final on the outer context, storing the computed
1130 * digest in the users buffer.
1131 */
1132 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1133
1134 /*
1135 * Compare the computed digest against the expected digest passed
1136 * as argument.
1137 */
1138
1139 switch (mac->cd_format) {
1140
1141 case CRYPTO_DATA_RAW:
1142 if (memcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1143 mac->cd_offset, digest_len) != 0)
1144 ret = CRYPTO_INVALID_MAC;
1145 break;
1146
1147 case CRYPTO_DATA_UIO: {
1148 off_t offset = mac->cd_offset;
1149 uint_t vec_idx = 0;
1150 off_t scratch_offset = 0;
1151 size_t length = digest_len;
1152 size_t cur_len;
1153
1154 /* we support only kernel buffer */
1155 if (zfs_uio_segflg(mac->cd_uio) != UIO_SYSSPACE)
1156 return (CRYPTO_ARGUMENTS_BAD);
1157
1158 /* jump to the first iovec containing the expected digest */
1159 offset = zfs_uio_index_at_offset(mac->cd_uio, offset, &vec_idx);
1160 if (vec_idx == zfs_uio_iovcnt(mac->cd_uio)) {
1161 /*
1162 * The caller specified an offset that is
1163 * larger than the total size of the buffers
1164 * it provided.
1165 */
1166 ret = CRYPTO_DATA_LEN_RANGE;
1167 break;
1168 }
1169
1170 /* do the comparison of computed digest vs specified one */
1171 while (vec_idx < zfs_uio_iovcnt(mac->cd_uio) && length > 0) {
1172 cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
1173 offset, length);
1174
1175 if (memcmp(digest + scratch_offset,
1176 zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
1177 cur_len) != 0) {
1178 ret = CRYPTO_INVALID_MAC;
1179 break;
1180 }
1181
1182 length -= cur_len;
1183 vec_idx++;
1184 scratch_offset += cur_len;
1185 offset = 0;
1186 }
1187 break;
1188 }
1189
1190 default:
1191 ret = CRYPTO_ARGUMENTS_BAD;
1192 }
1193
1194 return (ret);
1195 bail:
1196 memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
1197 mac->cd_length = 0;
1198 return (ret);
1199 }
1200
1201 /*
1202 * KCF software provider context management entry points.
1203 */
1204
1205 static int
sha2_create_ctx_template(crypto_mechanism_t * mechanism,crypto_key_t * key,crypto_spi_ctx_template_t * ctx_template,size_t * ctx_template_size)1206 sha2_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
1207 crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size)
1208 {
1209 sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1210 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1211 uint32_t sha_digest_len, sha_hmac_block_size;
1212
1213 /*
1214 * Set the digest length and block size to values appropriate to the
1215 * mechanism
1216 */
1217 switch (mechanism->cm_type) {
1218 case SHA256_HMAC_MECH_INFO_TYPE:
1219 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1220 sha_digest_len = SHA256_DIGEST_LENGTH;
1221 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1222 break;
1223 case SHA384_HMAC_MECH_INFO_TYPE:
1224 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1225 case SHA512_HMAC_MECH_INFO_TYPE:
1226 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1227 sha_digest_len = SHA512_DIGEST_LENGTH;
1228 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1229 break;
1230 default:
1231 return (CRYPTO_MECHANISM_INVALID);
1232 }
1233
1234 /*
1235 * Allocate and initialize SHA2 context.
1236 */
1237 sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t), KM_SLEEP);
1238 if (sha2_hmac_ctx_tmpl == NULL)
1239 return (CRYPTO_HOST_MEMORY);
1240
1241 sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1242
1243 if (keylen_in_bytes > sha_hmac_block_size) {
1244 uchar_t digested_key[SHA512_DIGEST_LENGTH];
1245
1246 /*
1247 * Hash the passed-in key to get a smaller key.
1248 * The inner context is used since it hasn't been
1249 * initialized yet.
1250 */
1251 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1252 &sha2_hmac_ctx_tmpl->hc_icontext,
1253 key->ck_data, keylen_in_bytes, digested_key);
1254 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1255 sha_digest_len);
1256 } else {
1257 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1258 keylen_in_bytes);
1259 }
1260
1261 *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1262 *ctx_template_size = sizeof (sha2_hmac_ctx_t);
1263
1264 return (CRYPTO_SUCCESS);
1265 }
1266
1267 static int
sha2_free_context(crypto_ctx_t * ctx)1268 sha2_free_context(crypto_ctx_t *ctx)
1269 {
1270 uint_t ctx_len;
1271
1272 if (ctx->cc_provider_private == NULL)
1273 return (CRYPTO_SUCCESS);
1274
1275 /*
1276 * We have to free either SHA2 or SHA2-HMAC contexts, which
1277 * have different lengths.
1278 *
1279 * Note: Below is dependent on the mechanism ordering.
1280 */
1281
1282 if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1283 ctx_len = sizeof (sha2_ctx_t);
1284 else
1285 ctx_len = sizeof (sha2_hmac_ctx_t);
1286
1287 memset(ctx->cc_provider_private, 0, ctx_len);
1288 kmem_free(ctx->cc_provider_private, ctx_len);
1289 ctx->cc_provider_private = NULL;
1290
1291 return (CRYPTO_SUCCESS);
1292 }
1293