xref: /linux/crypto/shash.c (revision 29ce50e0)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Synchronous Cryptographic Hash operations.
4  *
5  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/seq_file.h>
14 #include <linux/string.h>
15 #include <net/netlink.h>
16 
17 #include "hash.h"
18 
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)19 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
20 		    unsigned int keylen)
21 {
22 	return -ENOSYS;
23 }
24 EXPORT_SYMBOL_GPL(shash_no_setkey);
25 
shash_set_needkey(struct crypto_shash * tfm,struct shash_alg * alg)26 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
27 {
28 	if (crypto_shash_alg_needs_key(alg))
29 		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
30 }
31 
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)32 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
33 			unsigned int keylen)
34 {
35 	struct shash_alg *shash = crypto_shash_alg(tfm);
36 	int err;
37 
38 	err = shash->setkey(tfm, key, keylen);
39 	if (unlikely(err)) {
40 		shash_set_needkey(tfm, shash);
41 		return err;
42 	}
43 
44 	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
45 	return 0;
46 }
47 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
48 
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)49 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
50 			unsigned int len)
51 {
52 	return crypto_shash_alg(desc->tfm)->update(desc, data, len);
53 }
54 EXPORT_SYMBOL_GPL(crypto_shash_update);
55 
crypto_shash_final(struct shash_desc * desc,u8 * out)56 int crypto_shash_final(struct shash_desc *desc, u8 *out)
57 {
58 	return crypto_shash_alg(desc->tfm)->final(desc, out);
59 }
60 EXPORT_SYMBOL_GPL(crypto_shash_final);
61 
shash_default_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)62 static int shash_default_finup(struct shash_desc *desc, const u8 *data,
63 			       unsigned int len, u8 *out)
64 {
65 	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
66 
67 	return shash->update(desc, data, len) ?:
68 	       shash->final(desc, out);
69 }
70 
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)71 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
72 		       unsigned int len, u8 *out)
73 {
74 	return crypto_shash_alg(desc->tfm)->finup(desc, data, len, out);
75 }
76 EXPORT_SYMBOL_GPL(crypto_shash_finup);
77 
shash_default_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)78 static int shash_default_digest(struct shash_desc *desc, const u8 *data,
79 				unsigned int len, u8 *out)
80 {
81 	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
82 
83 	return shash->init(desc) ?:
84 	       shash->finup(desc, data, len, out);
85 }
86 
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)87 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
88 			unsigned int len, u8 *out)
89 {
90 	struct crypto_shash *tfm = desc->tfm;
91 
92 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
93 		return -ENOKEY;
94 
95 	return crypto_shash_alg(tfm)->digest(desc, data, len, out);
96 }
97 EXPORT_SYMBOL_GPL(crypto_shash_digest);
98 
crypto_shash_tfm_digest(struct crypto_shash * tfm,const u8 * data,unsigned int len,u8 * out)99 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
100 			    unsigned int len, u8 *out)
101 {
102 	SHASH_DESC_ON_STACK(desc, tfm);
103 	int err;
104 
105 	desc->tfm = tfm;
106 
107 	err = crypto_shash_digest(desc, data, len, out);
108 
109 	shash_desc_zero(desc);
110 
111 	return err;
112 }
113 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
114 
crypto_shash_export(struct shash_desc * desc,void * out)115 int crypto_shash_export(struct shash_desc *desc, void *out)
116 {
117 	struct crypto_shash *tfm = desc->tfm;
118 	struct shash_alg *shash = crypto_shash_alg(tfm);
119 
120 	if (shash->export)
121 		return shash->export(desc, out);
122 
123 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
124 	return 0;
125 }
126 EXPORT_SYMBOL_GPL(crypto_shash_export);
127 
crypto_shash_import(struct shash_desc * desc,const void * in)128 int crypto_shash_import(struct shash_desc *desc, const void *in)
129 {
130 	struct crypto_shash *tfm = desc->tfm;
131 	struct shash_alg *shash = crypto_shash_alg(tfm);
132 
133 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
134 		return -ENOKEY;
135 
136 	if (shash->import)
137 		return shash->import(desc, in);
138 
139 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
140 	return 0;
141 }
142 EXPORT_SYMBOL_GPL(crypto_shash_import);
143 
crypto_shash_exit_tfm(struct crypto_tfm * tfm)144 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
145 {
146 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
147 	struct shash_alg *alg = crypto_shash_alg(hash);
148 
149 	alg->exit_tfm(hash);
150 }
151 
crypto_shash_init_tfm(struct crypto_tfm * tfm)152 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
153 {
154 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
155 	struct shash_alg *alg = crypto_shash_alg(hash);
156 	int err;
157 
158 	hash->descsize = alg->descsize;
159 
160 	shash_set_needkey(hash, alg);
161 
162 	if (alg->exit_tfm)
163 		tfm->exit = crypto_shash_exit_tfm;
164 
165 	if (!alg->init_tfm)
166 		return 0;
167 
168 	err = alg->init_tfm(hash);
169 	if (err)
170 		return err;
171 
172 	/* ->init_tfm() may have increased the descsize. */
173 	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
174 		if (alg->exit_tfm)
175 			alg->exit_tfm(hash);
176 		return -EINVAL;
177 	}
178 
179 	return 0;
180 }
181 
crypto_shash_free_instance(struct crypto_instance * inst)182 static void crypto_shash_free_instance(struct crypto_instance *inst)
183 {
184 	struct shash_instance *shash = shash_instance(inst);
185 
186 	shash->free(shash);
187 }
188 
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)189 static int __maybe_unused crypto_shash_report(
190 	struct sk_buff *skb, struct crypto_alg *alg)
191 {
192 	struct crypto_report_hash rhash;
193 	struct shash_alg *salg = __crypto_shash_alg(alg);
194 
195 	memset(&rhash, 0, sizeof(rhash));
196 
197 	strscpy(rhash.type, "shash", sizeof(rhash.type));
198 
199 	rhash.blocksize = alg->cra_blocksize;
200 	rhash.digestsize = salg->digestsize;
201 
202 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
203 }
204 
205 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
206 	__maybe_unused;
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)207 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
208 {
209 	struct shash_alg *salg = __crypto_shash_alg(alg);
210 
211 	seq_printf(m, "type         : shash\n");
212 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
213 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
214 }
215 
216 const struct crypto_type crypto_shash_type = {
217 	.extsize = crypto_alg_extsize,
218 	.init_tfm = crypto_shash_init_tfm,
219 	.free = crypto_shash_free_instance,
220 #ifdef CONFIG_PROC_FS
221 	.show = crypto_shash_show,
222 #endif
223 #if IS_ENABLED(CONFIG_CRYPTO_USER)
224 	.report = crypto_shash_report,
225 #endif
226 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
227 	.maskset = CRYPTO_ALG_TYPE_MASK,
228 	.type = CRYPTO_ALG_TYPE_SHASH,
229 	.tfmsize = offsetof(struct crypto_shash, base),
230 };
231 
crypto_grab_shash(struct crypto_shash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)232 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
233 		      struct crypto_instance *inst,
234 		      const char *name, u32 type, u32 mask)
235 {
236 	spawn->base.frontend = &crypto_shash_type;
237 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
238 }
239 EXPORT_SYMBOL_GPL(crypto_grab_shash);
240 
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)241 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
242 					u32 mask)
243 {
244 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
245 }
246 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
247 
crypto_has_shash(const char * alg_name,u32 type,u32 mask)248 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
249 {
250 	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
251 }
252 EXPORT_SYMBOL_GPL(crypto_has_shash);
253 
crypto_clone_shash(struct crypto_shash * hash)254 struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
255 {
256 	struct crypto_tfm *tfm = crypto_shash_tfm(hash);
257 	struct shash_alg *alg = crypto_shash_alg(hash);
258 	struct crypto_shash *nhash;
259 	int err;
260 
261 	if (!crypto_shash_alg_has_setkey(alg)) {
262 		tfm = crypto_tfm_get(tfm);
263 		if (IS_ERR(tfm))
264 			return ERR_CAST(tfm);
265 
266 		return hash;
267 	}
268 
269 	if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
270 		return ERR_PTR(-ENOSYS);
271 
272 	nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
273 	if (IS_ERR(nhash))
274 		return nhash;
275 
276 	nhash->descsize = hash->descsize;
277 
278 	if (alg->clone_tfm) {
279 		err = alg->clone_tfm(nhash, hash);
280 		if (err) {
281 			crypto_free_shash(nhash);
282 			return ERR_PTR(err);
283 		}
284 	}
285 
286 	return nhash;
287 }
288 EXPORT_SYMBOL_GPL(crypto_clone_shash);
289 
hash_prepare_alg(struct hash_alg_common * alg)290 int hash_prepare_alg(struct hash_alg_common *alg)
291 {
292 	struct crypto_alg *base = &alg->base;
293 
294 	if (alg->digestsize > HASH_MAX_DIGESTSIZE)
295 		return -EINVAL;
296 
297 	/* alignmask is not useful for hashes, so it is not supported. */
298 	if (base->cra_alignmask)
299 		return -EINVAL;
300 
301 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
302 
303 	return 0;
304 }
305 
shash_prepare_alg(struct shash_alg * alg)306 static int shash_prepare_alg(struct shash_alg *alg)
307 {
308 	struct crypto_alg *base = &alg->halg.base;
309 	int err;
310 
311 	if (alg->descsize > HASH_MAX_DESCSIZE)
312 		return -EINVAL;
313 
314 	if ((alg->export && !alg->import) || (alg->import && !alg->export))
315 		return -EINVAL;
316 
317 	err = hash_prepare_alg(&alg->halg);
318 	if (err)
319 		return err;
320 
321 	base->cra_type = &crypto_shash_type;
322 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
323 
324 	/*
325 	 * Handle missing optional functions.  For each one we can either
326 	 * install a default here, or we can leave the pointer as NULL and check
327 	 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
328 	 * when the default behavior is desired.  For ->finup and ->digest we
329 	 * install defaults, since for optimal performance algorithms should
330 	 * implement these anyway.  On the other hand, for ->import and
331 	 * ->export the common case and best performance comes from the simple
332 	 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
333 	 * leave them NULL and provide the memcpy with no indirect call.
334 	 */
335 	if (!alg->finup)
336 		alg->finup = shash_default_finup;
337 	if (!alg->digest)
338 		alg->digest = shash_default_digest;
339 	if (!alg->export)
340 		alg->halg.statesize = alg->descsize;
341 	if (!alg->setkey)
342 		alg->setkey = shash_no_setkey;
343 
344 	return 0;
345 }
346 
crypto_register_shash(struct shash_alg * alg)347 int crypto_register_shash(struct shash_alg *alg)
348 {
349 	struct crypto_alg *base = &alg->base;
350 	int err;
351 
352 	err = shash_prepare_alg(alg);
353 	if (err)
354 		return err;
355 
356 	return crypto_register_alg(base);
357 }
358 EXPORT_SYMBOL_GPL(crypto_register_shash);
359 
crypto_unregister_shash(struct shash_alg * alg)360 void crypto_unregister_shash(struct shash_alg *alg)
361 {
362 	crypto_unregister_alg(&alg->base);
363 }
364 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
365 
crypto_register_shashes(struct shash_alg * algs,int count)366 int crypto_register_shashes(struct shash_alg *algs, int count)
367 {
368 	int i, ret;
369 
370 	for (i = 0; i < count; i++) {
371 		ret = crypto_register_shash(&algs[i]);
372 		if (ret)
373 			goto err;
374 	}
375 
376 	return 0;
377 
378 err:
379 	for (--i; i >= 0; --i)
380 		crypto_unregister_shash(&algs[i]);
381 
382 	return ret;
383 }
384 EXPORT_SYMBOL_GPL(crypto_register_shashes);
385 
crypto_unregister_shashes(struct shash_alg * algs,int count)386 void crypto_unregister_shashes(struct shash_alg *algs, int count)
387 {
388 	int i;
389 
390 	for (i = count - 1; i >= 0; --i)
391 		crypto_unregister_shash(&algs[i]);
392 }
393 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
394 
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)395 int shash_register_instance(struct crypto_template *tmpl,
396 			    struct shash_instance *inst)
397 {
398 	int err;
399 
400 	if (WARN_ON(!inst->free))
401 		return -EINVAL;
402 
403 	err = shash_prepare_alg(&inst->alg);
404 	if (err)
405 		return err;
406 
407 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
408 }
409 EXPORT_SYMBOL_GPL(shash_register_instance);
410 
shash_free_singlespawn_instance(struct shash_instance * inst)411 void shash_free_singlespawn_instance(struct shash_instance *inst)
412 {
413 	crypto_drop_spawn(shash_instance_ctx(inst));
414 	kfree(inst);
415 }
416 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
417 
418 MODULE_LICENSE("GPL");
419 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
420