xref: /linux/crypto/ahash.c (revision 84b9b44b)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Asynchronous Cryptographic Hash operations.
4  *
5  * This is the asynchronous version of hash.c with notification of
6  * completion via a callback.
7  *
8  * Copyright (c) 2008 Loc Ho <lho@amcc.com>
9  */
10 
11 #include <crypto/scatterwalk.h>
12 #include <linux/cryptouser.h>
13 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/seq_file.h>
19 #include <linux/string.h>
20 #include <net/netlink.h>
21 
22 #include "hash.h"
23 
24 static const struct crypto_type crypto_ahash_type;
25 
26 struct ahash_request_priv {
27 	crypto_completion_t complete;
28 	void *data;
29 	u8 *result;
30 	u32 flags;
31 	void *ubuf[] CRYPTO_MINALIGN_ATTR;
32 };
33 
34 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
35 {
36 	return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
37 			    halg);
38 }
39 
40 static int hash_walk_next(struct crypto_hash_walk *walk)
41 {
42 	unsigned int alignmask = walk->alignmask;
43 	unsigned int offset = walk->offset;
44 	unsigned int nbytes = min(walk->entrylen,
45 				  ((unsigned int)(PAGE_SIZE)) - offset);
46 
47 	walk->data = kmap_local_page(walk->pg);
48 	walk->data += offset;
49 
50 	if (offset & alignmask) {
51 		unsigned int unaligned = alignmask + 1 - (offset & alignmask);
52 
53 		if (nbytes > unaligned)
54 			nbytes = unaligned;
55 	}
56 
57 	walk->entrylen -= nbytes;
58 	return nbytes;
59 }
60 
61 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
62 {
63 	struct scatterlist *sg;
64 
65 	sg = walk->sg;
66 	walk->offset = sg->offset;
67 	walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
68 	walk->offset = offset_in_page(walk->offset);
69 	walk->entrylen = sg->length;
70 
71 	if (walk->entrylen > walk->total)
72 		walk->entrylen = walk->total;
73 	walk->total -= walk->entrylen;
74 
75 	return hash_walk_next(walk);
76 }
77 
78 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
79 {
80 	unsigned int alignmask = walk->alignmask;
81 
82 	walk->data -= walk->offset;
83 
84 	if (walk->entrylen && (walk->offset & alignmask) && !err) {
85 		unsigned int nbytes;
86 
87 		walk->offset = ALIGN(walk->offset, alignmask + 1);
88 		nbytes = min(walk->entrylen,
89 			     (unsigned int)(PAGE_SIZE - walk->offset));
90 		if (nbytes) {
91 			walk->entrylen -= nbytes;
92 			walk->data += walk->offset;
93 			return nbytes;
94 		}
95 	}
96 
97 	kunmap_local(walk->data);
98 	crypto_yield(walk->flags);
99 
100 	if (err)
101 		return err;
102 
103 	if (walk->entrylen) {
104 		walk->offset = 0;
105 		walk->pg++;
106 		return hash_walk_next(walk);
107 	}
108 
109 	if (!walk->total)
110 		return 0;
111 
112 	walk->sg = sg_next(walk->sg);
113 
114 	return hash_walk_new_entry(walk);
115 }
116 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
117 
118 int crypto_hash_walk_first(struct ahash_request *req,
119 			   struct crypto_hash_walk *walk)
120 {
121 	walk->total = req->nbytes;
122 
123 	if (!walk->total) {
124 		walk->entrylen = 0;
125 		return 0;
126 	}
127 
128 	walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
129 	walk->sg = req->src;
130 	walk->flags = req->base.flags;
131 
132 	return hash_walk_new_entry(walk);
133 }
134 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
135 
136 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
137 				unsigned int keylen)
138 {
139 	unsigned long alignmask = crypto_ahash_alignmask(tfm);
140 	int ret;
141 	u8 *buffer, *alignbuffer;
142 	unsigned long absize;
143 
144 	absize = keylen + alignmask;
145 	buffer = kmalloc(absize, GFP_KERNEL);
146 	if (!buffer)
147 		return -ENOMEM;
148 
149 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
150 	memcpy(alignbuffer, key, keylen);
151 	ret = tfm->setkey(tfm, alignbuffer, keylen);
152 	kfree_sensitive(buffer);
153 	return ret;
154 }
155 
156 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
157 			  unsigned int keylen)
158 {
159 	return -ENOSYS;
160 }
161 
162 static void ahash_set_needkey(struct crypto_ahash *tfm)
163 {
164 	const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
165 
166 	if (tfm->setkey != ahash_nosetkey &&
167 	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
168 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
169 }
170 
171 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
172 			unsigned int keylen)
173 {
174 	unsigned long alignmask = crypto_ahash_alignmask(tfm);
175 	int err;
176 
177 	if ((unsigned long)key & alignmask)
178 		err = ahash_setkey_unaligned(tfm, key, keylen);
179 	else
180 		err = tfm->setkey(tfm, key, keylen);
181 
182 	if (unlikely(err)) {
183 		ahash_set_needkey(tfm);
184 		return err;
185 	}
186 
187 	crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
188 	return 0;
189 }
190 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
191 
192 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
193 			  bool has_state)
194 {
195 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
196 	unsigned long alignmask = crypto_ahash_alignmask(tfm);
197 	unsigned int ds = crypto_ahash_digestsize(tfm);
198 	struct ahash_request *subreq;
199 	unsigned int subreq_size;
200 	unsigned int reqsize;
201 	u8 *result;
202 	gfp_t gfp;
203 	u32 flags;
204 
205 	subreq_size = sizeof(*subreq);
206 	reqsize = crypto_ahash_reqsize(tfm);
207 	reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
208 	subreq_size += reqsize;
209 	subreq_size += ds;
210 	subreq_size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
211 
212 	flags = ahash_request_flags(req);
213 	gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?  GFP_KERNEL : GFP_ATOMIC;
214 	subreq = kmalloc(subreq_size, gfp);
215 	if (!subreq)
216 		return -ENOMEM;
217 
218 	ahash_request_set_tfm(subreq, tfm);
219 	ahash_request_set_callback(subreq, flags, cplt, req);
220 
221 	result = (u8 *)(subreq + 1) + reqsize;
222 	result = PTR_ALIGN(result, alignmask + 1);
223 
224 	ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
225 
226 	if (has_state) {
227 		void *state;
228 
229 		state = kmalloc(crypto_ahash_statesize(tfm), gfp);
230 		if (!state) {
231 			kfree(subreq);
232 			return -ENOMEM;
233 		}
234 
235 		crypto_ahash_export(req, state);
236 		crypto_ahash_import(subreq, state);
237 		kfree_sensitive(state);
238 	}
239 
240 	req->priv = subreq;
241 
242 	return 0;
243 }
244 
245 static void ahash_restore_req(struct ahash_request *req, int err)
246 {
247 	struct ahash_request *subreq = req->priv;
248 
249 	if (!err)
250 		memcpy(req->result, subreq->result,
251 		       crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
252 
253 	req->priv = NULL;
254 
255 	kfree_sensitive(subreq);
256 }
257 
258 static void ahash_op_unaligned_done(void *data, int err)
259 {
260 	struct ahash_request *areq = data;
261 
262 	if (err == -EINPROGRESS)
263 		goto out;
264 
265 	/* First copy req->result into req->priv.result */
266 	ahash_restore_req(areq, err);
267 
268 out:
269 	/* Complete the ORIGINAL request. */
270 	ahash_request_complete(areq, err);
271 }
272 
273 static int ahash_op_unaligned(struct ahash_request *req,
274 			      int (*op)(struct ahash_request *),
275 			      bool has_state)
276 {
277 	int err;
278 
279 	err = ahash_save_req(req, ahash_op_unaligned_done, has_state);
280 	if (err)
281 		return err;
282 
283 	err = op(req->priv);
284 	if (err == -EINPROGRESS || err == -EBUSY)
285 		return err;
286 
287 	ahash_restore_req(req, err);
288 
289 	return err;
290 }
291 
292 static int crypto_ahash_op(struct ahash_request *req,
293 			   int (*op)(struct ahash_request *),
294 			   bool has_state)
295 {
296 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
297 	unsigned long alignmask = crypto_ahash_alignmask(tfm);
298 	int err;
299 
300 	if ((unsigned long)req->result & alignmask)
301 		err = ahash_op_unaligned(req, op, has_state);
302 	else
303 		err = op(req);
304 
305 	return crypto_hash_errstat(crypto_hash_alg_common(tfm), err);
306 }
307 
308 int crypto_ahash_final(struct ahash_request *req)
309 {
310 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
311 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
312 
313 	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
314 		atomic64_inc(&hash_get_stat(alg)->hash_cnt);
315 
316 	return crypto_ahash_op(req, tfm->final, true);
317 }
318 EXPORT_SYMBOL_GPL(crypto_ahash_final);
319 
320 int crypto_ahash_finup(struct ahash_request *req)
321 {
322 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
323 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
324 
325 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
326 		struct crypto_istat_hash *istat = hash_get_stat(alg);
327 
328 		atomic64_inc(&istat->hash_cnt);
329 		atomic64_add(req->nbytes, &istat->hash_tlen);
330 	}
331 
332 	return crypto_ahash_op(req, tfm->finup, true);
333 }
334 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
335 
336 int crypto_ahash_digest(struct ahash_request *req)
337 {
338 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
339 	struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
340 
341 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
342 		struct crypto_istat_hash *istat = hash_get_stat(alg);
343 
344 		atomic64_inc(&istat->hash_cnt);
345 		atomic64_add(req->nbytes, &istat->hash_tlen);
346 	}
347 
348 	if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
349 		return crypto_hash_errstat(alg, -ENOKEY);
350 
351 	return crypto_ahash_op(req, tfm->digest, false);
352 }
353 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
354 
355 static void ahash_def_finup_done2(void *data, int err)
356 {
357 	struct ahash_request *areq = data;
358 
359 	if (err == -EINPROGRESS)
360 		return;
361 
362 	ahash_restore_req(areq, err);
363 
364 	ahash_request_complete(areq, err);
365 }
366 
367 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
368 {
369 	struct ahash_request *subreq = req->priv;
370 
371 	if (err)
372 		goto out;
373 
374 	subreq->base.complete = ahash_def_finup_done2;
375 
376 	err = crypto_ahash_reqtfm(req)->final(subreq);
377 	if (err == -EINPROGRESS || err == -EBUSY)
378 		return err;
379 
380 out:
381 	ahash_restore_req(req, err);
382 	return err;
383 }
384 
385 static void ahash_def_finup_done1(void *data, int err)
386 {
387 	struct ahash_request *areq = data;
388 	struct ahash_request *subreq;
389 
390 	if (err == -EINPROGRESS)
391 		goto out;
392 
393 	subreq = areq->priv;
394 	subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
395 
396 	err = ahash_def_finup_finish1(areq, err);
397 	if (err == -EINPROGRESS || err == -EBUSY)
398 		return;
399 
400 out:
401 	ahash_request_complete(areq, err);
402 }
403 
404 static int ahash_def_finup(struct ahash_request *req)
405 {
406 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
407 	int err;
408 
409 	err = ahash_save_req(req, ahash_def_finup_done1, true);
410 	if (err)
411 		return err;
412 
413 	err = tfm->update(req->priv);
414 	if (err == -EINPROGRESS || err == -EBUSY)
415 		return err;
416 
417 	return ahash_def_finup_finish1(req, err);
418 }
419 
420 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
421 {
422 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
423 	struct ahash_alg *alg = crypto_ahash_alg(hash);
424 
425 	alg->exit_tfm(hash);
426 }
427 
428 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
429 {
430 	struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
431 	struct ahash_alg *alg = crypto_ahash_alg(hash);
432 
433 	hash->setkey = ahash_nosetkey;
434 
435 	if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
436 		return crypto_init_shash_ops_async(tfm);
437 
438 	hash->init = alg->init;
439 	hash->update = alg->update;
440 	hash->final = alg->final;
441 	hash->finup = alg->finup ?: ahash_def_finup;
442 	hash->digest = alg->digest;
443 	hash->export = alg->export;
444 	hash->import = alg->import;
445 
446 	if (alg->setkey) {
447 		hash->setkey = alg->setkey;
448 		ahash_set_needkey(hash);
449 	}
450 
451 	if (alg->exit_tfm)
452 		tfm->exit = crypto_ahash_exit_tfm;
453 
454 	return alg->init_tfm ? alg->init_tfm(hash) : 0;
455 }
456 
457 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
458 {
459 	if (alg->cra_type != &crypto_ahash_type)
460 		return sizeof(struct crypto_shash *);
461 
462 	return crypto_alg_extsize(alg);
463 }
464 
465 static void crypto_ahash_free_instance(struct crypto_instance *inst)
466 {
467 	struct ahash_instance *ahash = ahash_instance(inst);
468 
469 	ahash->free(ahash);
470 }
471 
472 static int __maybe_unused crypto_ahash_report(
473 	struct sk_buff *skb, struct crypto_alg *alg)
474 {
475 	struct crypto_report_hash rhash;
476 
477 	memset(&rhash, 0, sizeof(rhash));
478 
479 	strscpy(rhash.type, "ahash", sizeof(rhash.type));
480 
481 	rhash.blocksize = alg->cra_blocksize;
482 	rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
483 
484 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
485 }
486 
487 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
488 	__maybe_unused;
489 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
490 {
491 	seq_printf(m, "type         : ahash\n");
492 	seq_printf(m, "async        : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
493 					     "yes" : "no");
494 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
495 	seq_printf(m, "digestsize   : %u\n",
496 		   __crypto_hash_alg_common(alg)->digestsize);
497 }
498 
499 static int __maybe_unused crypto_ahash_report_stat(
500 	struct sk_buff *skb, struct crypto_alg *alg)
501 {
502 	return crypto_hash_report_stat(skb, alg, "ahash");
503 }
504 
505 static const struct crypto_type crypto_ahash_type = {
506 	.extsize = crypto_ahash_extsize,
507 	.init_tfm = crypto_ahash_init_tfm,
508 	.free = crypto_ahash_free_instance,
509 #ifdef CONFIG_PROC_FS
510 	.show = crypto_ahash_show,
511 #endif
512 #if IS_ENABLED(CONFIG_CRYPTO_USER)
513 	.report = crypto_ahash_report,
514 #endif
515 #ifdef CONFIG_CRYPTO_STATS
516 	.report_stat = crypto_ahash_report_stat,
517 #endif
518 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
519 	.maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
520 	.type = CRYPTO_ALG_TYPE_AHASH,
521 	.tfmsize = offsetof(struct crypto_ahash, base),
522 };
523 
524 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
525 		      struct crypto_instance *inst,
526 		      const char *name, u32 type, u32 mask)
527 {
528 	spawn->base.frontend = &crypto_ahash_type;
529 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
530 }
531 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
532 
533 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
534 					u32 mask)
535 {
536 	return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
537 }
538 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
539 
540 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
541 {
542 	return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
543 }
544 EXPORT_SYMBOL_GPL(crypto_has_ahash);
545 
546 struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
547 {
548 	struct hash_alg_common *halg = crypto_hash_alg_common(hash);
549 	struct crypto_tfm *tfm = crypto_ahash_tfm(hash);
550 	struct crypto_ahash *nhash;
551 	struct ahash_alg *alg;
552 	int err;
553 
554 	if (!crypto_hash_alg_has_setkey(halg)) {
555 		tfm = crypto_tfm_get(tfm);
556 		if (IS_ERR(tfm))
557 			return ERR_CAST(tfm);
558 
559 		return hash;
560 	}
561 
562 	nhash = crypto_clone_tfm(&crypto_ahash_type, tfm);
563 
564 	if (IS_ERR(nhash))
565 		return nhash;
566 
567 	nhash->init = hash->init;
568 	nhash->update = hash->update;
569 	nhash->final = hash->final;
570 	nhash->finup = hash->finup;
571 	nhash->digest = hash->digest;
572 	nhash->export = hash->export;
573 	nhash->import = hash->import;
574 	nhash->setkey = hash->setkey;
575 	nhash->reqsize = hash->reqsize;
576 
577 	if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
578 		return crypto_clone_shash_ops_async(nhash, hash);
579 
580 	err = -ENOSYS;
581 	alg = crypto_ahash_alg(hash);
582 	if (!alg->clone_tfm)
583 		goto out_free_nhash;
584 
585 	err = alg->clone_tfm(nhash, hash);
586 	if (err)
587 		goto out_free_nhash;
588 
589 	return nhash;
590 
591 out_free_nhash:
592 	crypto_free_ahash(nhash);
593 	return ERR_PTR(err);
594 }
595 EXPORT_SYMBOL_GPL(crypto_clone_ahash);
596 
597 static int ahash_prepare_alg(struct ahash_alg *alg)
598 {
599 	struct crypto_alg *base = &alg->halg.base;
600 	int err;
601 
602 	if (alg->halg.statesize == 0)
603 		return -EINVAL;
604 
605 	err = hash_prepare_alg(&alg->halg);
606 	if (err)
607 		return err;
608 
609 	base->cra_type = &crypto_ahash_type;
610 	base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
611 
612 	return 0;
613 }
614 
615 int crypto_register_ahash(struct ahash_alg *alg)
616 {
617 	struct crypto_alg *base = &alg->halg.base;
618 	int err;
619 
620 	err = ahash_prepare_alg(alg);
621 	if (err)
622 		return err;
623 
624 	return crypto_register_alg(base);
625 }
626 EXPORT_SYMBOL_GPL(crypto_register_ahash);
627 
628 void crypto_unregister_ahash(struct ahash_alg *alg)
629 {
630 	crypto_unregister_alg(&alg->halg.base);
631 }
632 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
633 
634 int crypto_register_ahashes(struct ahash_alg *algs, int count)
635 {
636 	int i, ret;
637 
638 	for (i = 0; i < count; i++) {
639 		ret = crypto_register_ahash(&algs[i]);
640 		if (ret)
641 			goto err;
642 	}
643 
644 	return 0;
645 
646 err:
647 	for (--i; i >= 0; --i)
648 		crypto_unregister_ahash(&algs[i]);
649 
650 	return ret;
651 }
652 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
653 
654 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
655 {
656 	int i;
657 
658 	for (i = count - 1; i >= 0; --i)
659 		crypto_unregister_ahash(&algs[i]);
660 }
661 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
662 
663 int ahash_register_instance(struct crypto_template *tmpl,
664 			    struct ahash_instance *inst)
665 {
666 	int err;
667 
668 	if (WARN_ON(!inst->free))
669 		return -EINVAL;
670 
671 	err = ahash_prepare_alg(&inst->alg);
672 	if (err)
673 		return err;
674 
675 	return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
676 }
677 EXPORT_SYMBOL_GPL(ahash_register_instance);
678 
679 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
680 {
681 	struct crypto_alg *alg = &halg->base;
682 
683 	if (alg->cra_type != &crypto_ahash_type)
684 		return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
685 
686 	return __crypto_ahash_alg(alg)->setkey != NULL;
687 }
688 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
689 
690 MODULE_LICENSE("GPL");
691 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
692