xref: /linux/crypto/lskcipher.c (revision 7cfc2ab3)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Linear symmetric key cipher operations.
4  *
5  * Generic encrypt/decrypt wrapper for ciphers.
6  *
7  * Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
8  */
9 
10 #include <linux/cryptouser.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/kernel.h>
14 #include <linux/seq_file.h>
15 #include <linux/slab.h>
16 #include <linux/string.h>
17 #include <net/netlink.h>
18 #include "skcipher.h"
19 
__crypto_lskcipher_cast(struct crypto_tfm * tfm)20 static inline struct crypto_lskcipher *__crypto_lskcipher_cast(
21 	struct crypto_tfm *tfm)
22 {
23 	return container_of(tfm, struct crypto_lskcipher, base);
24 }
25 
__crypto_lskcipher_alg(struct crypto_alg * alg)26 static inline struct lskcipher_alg *__crypto_lskcipher_alg(
27 	struct crypto_alg *alg)
28 {
29 	return container_of(alg, struct lskcipher_alg, co.base);
30 }
31 
lskcipher_get_stat(struct lskcipher_alg * alg)32 static inline struct crypto_istat_cipher *lskcipher_get_stat(
33 	struct lskcipher_alg *alg)
34 {
35 	return skcipher_get_stat_common(&alg->co);
36 }
37 
crypto_lskcipher_errstat(struct lskcipher_alg * alg,int err)38 static inline int crypto_lskcipher_errstat(struct lskcipher_alg *alg, int err)
39 {
40 	struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
41 
42 	if (!IS_ENABLED(CONFIG_CRYPTO_STATS))
43 		return err;
44 
45 	if (err)
46 		atomic64_inc(&istat->err_cnt);
47 
48 	return err;
49 }
50 
lskcipher_setkey_unaligned(struct crypto_lskcipher * tfm,const u8 * key,unsigned int keylen)51 static int lskcipher_setkey_unaligned(struct crypto_lskcipher *tfm,
52 				      const u8 *key, unsigned int keylen)
53 {
54 	unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
55 	struct lskcipher_alg *cipher = crypto_lskcipher_alg(tfm);
56 	u8 *buffer, *alignbuffer;
57 	unsigned long absize;
58 	int ret;
59 
60 	absize = keylen + alignmask;
61 	buffer = kmalloc(absize, GFP_ATOMIC);
62 	if (!buffer)
63 		return -ENOMEM;
64 
65 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
66 	memcpy(alignbuffer, key, keylen);
67 	ret = cipher->setkey(tfm, alignbuffer, keylen);
68 	kfree_sensitive(buffer);
69 	return ret;
70 }
71 
crypto_lskcipher_setkey(struct crypto_lskcipher * tfm,const u8 * key,unsigned int keylen)72 int crypto_lskcipher_setkey(struct crypto_lskcipher *tfm, const u8 *key,
73 			    unsigned int keylen)
74 {
75 	unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
76 	struct lskcipher_alg *cipher = crypto_lskcipher_alg(tfm);
77 
78 	if (keylen < cipher->co.min_keysize || keylen > cipher->co.max_keysize)
79 		return -EINVAL;
80 
81 	if ((unsigned long)key & alignmask)
82 		return lskcipher_setkey_unaligned(tfm, key, keylen);
83 	else
84 		return cipher->setkey(tfm, key, keylen);
85 }
86 EXPORT_SYMBOL_GPL(crypto_lskcipher_setkey);
87 
crypto_lskcipher_crypt_unaligned(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv,int (* crypt)(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv,u32 flags))88 static int crypto_lskcipher_crypt_unaligned(
89 	struct crypto_lskcipher *tfm, const u8 *src, u8 *dst, unsigned len,
90 	u8 *iv, int (*crypt)(struct crypto_lskcipher *tfm, const u8 *src,
91 			     u8 *dst, unsigned len, u8 *iv, u32 flags))
92 {
93 	unsigned statesize = crypto_lskcipher_statesize(tfm);
94 	unsigned ivsize = crypto_lskcipher_ivsize(tfm);
95 	unsigned bs = crypto_lskcipher_blocksize(tfm);
96 	unsigned cs = crypto_lskcipher_chunksize(tfm);
97 	int err;
98 	u8 *tiv;
99 	u8 *p;
100 
101 	BUILD_BUG_ON(MAX_CIPHER_BLOCKSIZE > PAGE_SIZE ||
102 		     MAX_CIPHER_ALIGNMASK >= PAGE_SIZE);
103 
104 	tiv = kmalloc(PAGE_SIZE, GFP_ATOMIC);
105 	if (!tiv)
106 		return -ENOMEM;
107 
108 	memcpy(tiv, iv, ivsize + statesize);
109 
110 	p = kmalloc(PAGE_SIZE, GFP_ATOMIC);
111 	err = -ENOMEM;
112 	if (!p)
113 		goto out;
114 
115 	while (len >= bs) {
116 		unsigned chunk = min((unsigned)PAGE_SIZE, len);
117 		int err;
118 
119 		if (chunk > cs)
120 			chunk &= ~(cs - 1);
121 
122 		memcpy(p, src, chunk);
123 		err = crypt(tfm, p, p, chunk, tiv, CRYPTO_LSKCIPHER_FLAG_FINAL);
124 		if (err)
125 			goto out;
126 
127 		memcpy(dst, p, chunk);
128 		src += chunk;
129 		dst += chunk;
130 		len -= chunk;
131 	}
132 
133 	err = len ? -EINVAL : 0;
134 
135 out:
136 	memcpy(iv, tiv, ivsize + statesize);
137 	kfree_sensitive(p);
138 	kfree_sensitive(tiv);
139 	return err;
140 }
141 
crypto_lskcipher_crypt(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv,int (* crypt)(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv,u32 flags))142 static int crypto_lskcipher_crypt(struct crypto_lskcipher *tfm, const u8 *src,
143 				  u8 *dst, unsigned len, u8 *iv,
144 				  int (*crypt)(struct crypto_lskcipher *tfm,
145 					       const u8 *src, u8 *dst,
146 					       unsigned len, u8 *iv,
147 					       u32 flags))
148 {
149 	unsigned long alignmask = crypto_lskcipher_alignmask(tfm);
150 	struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
151 	int ret;
152 
153 	if (((unsigned long)src | (unsigned long)dst | (unsigned long)iv) &
154 	    alignmask) {
155 		ret = crypto_lskcipher_crypt_unaligned(tfm, src, dst, len, iv,
156 						       crypt);
157 		goto out;
158 	}
159 
160 	ret = crypt(tfm, src, dst, len, iv, CRYPTO_LSKCIPHER_FLAG_FINAL);
161 
162 out:
163 	return crypto_lskcipher_errstat(alg, ret);
164 }
165 
crypto_lskcipher_encrypt(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv)166 int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
167 			     u8 *dst, unsigned len, u8 *iv)
168 {
169 	struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
170 
171 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
172 		struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
173 
174 		atomic64_inc(&istat->encrypt_cnt);
175 		atomic64_add(len, &istat->encrypt_tlen);
176 	}
177 
178 	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->encrypt);
179 }
180 EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt);
181 
crypto_lskcipher_decrypt(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * iv)182 int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
183 			     u8 *dst, unsigned len, u8 *iv)
184 {
185 	struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
186 
187 	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
188 		struct crypto_istat_cipher *istat = lskcipher_get_stat(alg);
189 
190 		atomic64_inc(&istat->decrypt_cnt);
191 		atomic64_add(len, &istat->decrypt_tlen);
192 	}
193 
194 	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->decrypt);
195 }
196 EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt);
197 
crypto_lskcipher_crypt_sg(struct skcipher_request * req,int (* crypt)(struct crypto_lskcipher * tfm,const u8 * src,u8 * dst,unsigned len,u8 * ivs,u32 flags))198 static int crypto_lskcipher_crypt_sg(struct skcipher_request *req,
199 				     int (*crypt)(struct crypto_lskcipher *tfm,
200 						  const u8 *src, u8 *dst,
201 						  unsigned len, u8 *ivs,
202 						  u32 flags))
203 {
204 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
205 	struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
206 	u8 *ivs = skcipher_request_ctx(req);
207 	struct crypto_lskcipher *tfm = *ctx;
208 	struct skcipher_walk walk;
209 	unsigned ivsize;
210 	u32 flags;
211 	int err;
212 
213 	ivsize = crypto_lskcipher_ivsize(tfm);
214 	ivs = PTR_ALIGN(ivs, crypto_skcipher_alignmask(skcipher) + 1);
215 	memcpy(ivs, req->iv, ivsize);
216 
217 	flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
218 
219 	if (req->base.flags & CRYPTO_SKCIPHER_REQ_CONT)
220 		flags |= CRYPTO_LSKCIPHER_FLAG_CONT;
221 
222 	if (!(req->base.flags & CRYPTO_SKCIPHER_REQ_NOTFINAL))
223 		flags |= CRYPTO_LSKCIPHER_FLAG_FINAL;
224 
225 	err = skcipher_walk_virt(&walk, req, false);
226 
227 	while (walk.nbytes) {
228 		err = crypt(tfm, walk.src.virt.addr, walk.dst.virt.addr,
229 			    walk.nbytes, ivs,
230 			    flags & ~(walk.nbytes == walk.total ?
231 			    0 : CRYPTO_LSKCIPHER_FLAG_FINAL));
232 		err = skcipher_walk_done(&walk, err);
233 		flags |= CRYPTO_LSKCIPHER_FLAG_CONT;
234 	}
235 
236 	memcpy(req->iv, ivs, ivsize);
237 
238 	return err;
239 }
240 
crypto_lskcipher_encrypt_sg(struct skcipher_request * req)241 int crypto_lskcipher_encrypt_sg(struct skcipher_request *req)
242 {
243 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
244 	struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
245 	struct lskcipher_alg *alg = crypto_lskcipher_alg(*ctx);
246 
247 	return crypto_lskcipher_crypt_sg(req, alg->encrypt);
248 }
249 
crypto_lskcipher_decrypt_sg(struct skcipher_request * req)250 int crypto_lskcipher_decrypt_sg(struct skcipher_request *req)
251 {
252 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
253 	struct crypto_lskcipher **ctx = crypto_skcipher_ctx(skcipher);
254 	struct lskcipher_alg *alg = crypto_lskcipher_alg(*ctx);
255 
256 	return crypto_lskcipher_crypt_sg(req, alg->decrypt);
257 }
258 
crypto_lskcipher_exit_tfm(struct crypto_tfm * tfm)259 static void crypto_lskcipher_exit_tfm(struct crypto_tfm *tfm)
260 {
261 	struct crypto_lskcipher *skcipher = __crypto_lskcipher_cast(tfm);
262 	struct lskcipher_alg *alg = crypto_lskcipher_alg(skcipher);
263 
264 	alg->exit(skcipher);
265 }
266 
crypto_lskcipher_init_tfm(struct crypto_tfm * tfm)267 static int crypto_lskcipher_init_tfm(struct crypto_tfm *tfm)
268 {
269 	struct crypto_lskcipher *skcipher = __crypto_lskcipher_cast(tfm);
270 	struct lskcipher_alg *alg = crypto_lskcipher_alg(skcipher);
271 
272 	if (alg->exit)
273 		skcipher->base.exit = crypto_lskcipher_exit_tfm;
274 
275 	if (alg->init)
276 		return alg->init(skcipher);
277 
278 	return 0;
279 }
280 
crypto_lskcipher_free_instance(struct crypto_instance * inst)281 static void crypto_lskcipher_free_instance(struct crypto_instance *inst)
282 {
283 	struct lskcipher_instance *skcipher =
284 		container_of(inst, struct lskcipher_instance, s.base);
285 
286 	skcipher->free(skcipher);
287 }
288 
crypto_lskcipher_show(struct seq_file * m,struct crypto_alg * alg)289 static void __maybe_unused crypto_lskcipher_show(
290 	struct seq_file *m, struct crypto_alg *alg)
291 {
292 	struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
293 
294 	seq_printf(m, "type         : lskcipher\n");
295 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
296 	seq_printf(m, "min keysize  : %u\n", skcipher->co.min_keysize);
297 	seq_printf(m, "max keysize  : %u\n", skcipher->co.max_keysize);
298 	seq_printf(m, "ivsize       : %u\n", skcipher->co.ivsize);
299 	seq_printf(m, "chunksize    : %u\n", skcipher->co.chunksize);
300 	seq_printf(m, "statesize    : %u\n", skcipher->co.statesize);
301 }
302 
crypto_lskcipher_report(struct sk_buff * skb,struct crypto_alg * alg)303 static int __maybe_unused crypto_lskcipher_report(
304 	struct sk_buff *skb, struct crypto_alg *alg)
305 {
306 	struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
307 	struct crypto_report_blkcipher rblkcipher;
308 
309 	memset(&rblkcipher, 0, sizeof(rblkcipher));
310 
311 	strscpy(rblkcipher.type, "lskcipher", sizeof(rblkcipher.type));
312 	strscpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv));
313 
314 	rblkcipher.blocksize = alg->cra_blocksize;
315 	rblkcipher.min_keysize = skcipher->co.min_keysize;
316 	rblkcipher.max_keysize = skcipher->co.max_keysize;
317 	rblkcipher.ivsize = skcipher->co.ivsize;
318 
319 	return nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER,
320 		       sizeof(rblkcipher), &rblkcipher);
321 }
322 
crypto_lskcipher_report_stat(struct sk_buff * skb,struct crypto_alg * alg)323 static int __maybe_unused crypto_lskcipher_report_stat(
324 	struct sk_buff *skb, struct crypto_alg *alg)
325 {
326 	struct lskcipher_alg *skcipher = __crypto_lskcipher_alg(alg);
327 	struct crypto_istat_cipher *istat;
328 	struct crypto_stat_cipher rcipher;
329 
330 	istat = lskcipher_get_stat(skcipher);
331 
332 	memset(&rcipher, 0, sizeof(rcipher));
333 
334 	strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
335 
336 	rcipher.stat_encrypt_cnt = atomic64_read(&istat->encrypt_cnt);
337 	rcipher.stat_encrypt_tlen = atomic64_read(&istat->encrypt_tlen);
338 	rcipher.stat_decrypt_cnt =  atomic64_read(&istat->decrypt_cnt);
339 	rcipher.stat_decrypt_tlen = atomic64_read(&istat->decrypt_tlen);
340 	rcipher.stat_err_cnt =  atomic64_read(&istat->err_cnt);
341 
342 	return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
343 }
344 
345 static const struct crypto_type crypto_lskcipher_type = {
346 	.extsize = crypto_alg_extsize,
347 	.init_tfm = crypto_lskcipher_init_tfm,
348 	.free = crypto_lskcipher_free_instance,
349 #ifdef CONFIG_PROC_FS
350 	.show = crypto_lskcipher_show,
351 #endif
352 #if IS_ENABLED(CONFIG_CRYPTO_USER)
353 	.report = crypto_lskcipher_report,
354 #endif
355 #ifdef CONFIG_CRYPTO_STATS
356 	.report_stat = crypto_lskcipher_report_stat,
357 #endif
358 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
359 	.maskset = CRYPTO_ALG_TYPE_MASK,
360 	.type = CRYPTO_ALG_TYPE_LSKCIPHER,
361 	.tfmsize = offsetof(struct crypto_lskcipher, base),
362 };
363 
crypto_lskcipher_exit_tfm_sg(struct crypto_tfm * tfm)364 static void crypto_lskcipher_exit_tfm_sg(struct crypto_tfm *tfm)
365 {
366 	struct crypto_lskcipher **ctx = crypto_tfm_ctx(tfm);
367 
368 	crypto_free_lskcipher(*ctx);
369 }
370 
crypto_init_lskcipher_ops_sg(struct crypto_tfm * tfm)371 int crypto_init_lskcipher_ops_sg(struct crypto_tfm *tfm)
372 {
373 	struct crypto_lskcipher **ctx = crypto_tfm_ctx(tfm);
374 	struct crypto_alg *calg = tfm->__crt_alg;
375 	struct crypto_lskcipher *skcipher;
376 
377 	if (!crypto_mod_get(calg))
378 		return -EAGAIN;
379 
380 	skcipher = crypto_create_tfm(calg, &crypto_lskcipher_type);
381 	if (IS_ERR(skcipher)) {
382 		crypto_mod_put(calg);
383 		return PTR_ERR(skcipher);
384 	}
385 
386 	*ctx = skcipher;
387 	tfm->exit = crypto_lskcipher_exit_tfm_sg;
388 
389 	return 0;
390 }
391 
crypto_grab_lskcipher(struct crypto_lskcipher_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)392 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn *spawn,
393 			  struct crypto_instance *inst,
394 			  const char *name, u32 type, u32 mask)
395 {
396 	spawn->base.frontend = &crypto_lskcipher_type;
397 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
398 }
399 EXPORT_SYMBOL_GPL(crypto_grab_lskcipher);
400 
crypto_alloc_lskcipher(const char * alg_name,u32 type,u32 mask)401 struct crypto_lskcipher *crypto_alloc_lskcipher(const char *alg_name,
402 						u32 type, u32 mask)
403 {
404 	return crypto_alloc_tfm(alg_name, &crypto_lskcipher_type, type, mask);
405 }
406 EXPORT_SYMBOL_GPL(crypto_alloc_lskcipher);
407 
lskcipher_prepare_alg(struct lskcipher_alg * alg)408 static int lskcipher_prepare_alg(struct lskcipher_alg *alg)
409 {
410 	struct crypto_alg *base = &alg->co.base;
411 	int err;
412 
413 	err = skcipher_prepare_alg_common(&alg->co);
414 	if (err)
415 		return err;
416 
417 	if (alg->co.chunksize & (alg->co.chunksize - 1))
418 		return -EINVAL;
419 
420 	base->cra_type = &crypto_lskcipher_type;
421 	base->cra_flags |= CRYPTO_ALG_TYPE_LSKCIPHER;
422 
423 	return 0;
424 }
425 
crypto_register_lskcipher(struct lskcipher_alg * alg)426 int crypto_register_lskcipher(struct lskcipher_alg *alg)
427 {
428 	struct crypto_alg *base = &alg->co.base;
429 	int err;
430 
431 	err = lskcipher_prepare_alg(alg);
432 	if (err)
433 		return err;
434 
435 	return crypto_register_alg(base);
436 }
437 EXPORT_SYMBOL_GPL(crypto_register_lskcipher);
438 
crypto_unregister_lskcipher(struct lskcipher_alg * alg)439 void crypto_unregister_lskcipher(struct lskcipher_alg *alg)
440 {
441 	crypto_unregister_alg(&alg->co.base);
442 }
443 EXPORT_SYMBOL_GPL(crypto_unregister_lskcipher);
444 
crypto_register_lskciphers(struct lskcipher_alg * algs,int count)445 int crypto_register_lskciphers(struct lskcipher_alg *algs, int count)
446 {
447 	int i, ret;
448 
449 	for (i = 0; i < count; i++) {
450 		ret = crypto_register_lskcipher(&algs[i]);
451 		if (ret)
452 			goto err;
453 	}
454 
455 	return 0;
456 
457 err:
458 	for (--i; i >= 0; --i)
459 		crypto_unregister_lskcipher(&algs[i]);
460 
461 	return ret;
462 }
463 EXPORT_SYMBOL_GPL(crypto_register_lskciphers);
464 
crypto_unregister_lskciphers(struct lskcipher_alg * algs,int count)465 void crypto_unregister_lskciphers(struct lskcipher_alg *algs, int count)
466 {
467 	int i;
468 
469 	for (i = count - 1; i >= 0; --i)
470 		crypto_unregister_lskcipher(&algs[i]);
471 }
472 EXPORT_SYMBOL_GPL(crypto_unregister_lskciphers);
473 
lskcipher_register_instance(struct crypto_template * tmpl,struct lskcipher_instance * inst)474 int lskcipher_register_instance(struct crypto_template *tmpl,
475 				struct lskcipher_instance *inst)
476 {
477 	int err;
478 
479 	if (WARN_ON(!inst->free))
480 		return -EINVAL;
481 
482 	err = lskcipher_prepare_alg(&inst->alg);
483 	if (err)
484 		return err;
485 
486 	return crypto_register_instance(tmpl, lskcipher_crypto_instance(inst));
487 }
488 EXPORT_SYMBOL_GPL(lskcipher_register_instance);
489 
lskcipher_setkey_simple(struct crypto_lskcipher * tfm,const u8 * key,unsigned int keylen)490 static int lskcipher_setkey_simple(struct crypto_lskcipher *tfm, const u8 *key,
491 				   unsigned int keylen)
492 {
493 	struct crypto_lskcipher *cipher = lskcipher_cipher_simple(tfm);
494 
495 	crypto_lskcipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
496 	crypto_lskcipher_set_flags(cipher, crypto_lskcipher_get_flags(tfm) &
497 				   CRYPTO_TFM_REQ_MASK);
498 	return crypto_lskcipher_setkey(cipher, key, keylen);
499 }
500 
lskcipher_init_tfm_simple(struct crypto_lskcipher * tfm)501 static int lskcipher_init_tfm_simple(struct crypto_lskcipher *tfm)
502 {
503 	struct lskcipher_instance *inst = lskcipher_alg_instance(tfm);
504 	struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
505 	struct crypto_lskcipher_spawn *spawn;
506 	struct crypto_lskcipher *cipher;
507 
508 	spawn = lskcipher_instance_ctx(inst);
509 	cipher = crypto_spawn_lskcipher(spawn);
510 	if (IS_ERR(cipher))
511 		return PTR_ERR(cipher);
512 
513 	*ctx = cipher;
514 	return 0;
515 }
516 
lskcipher_exit_tfm_simple(struct crypto_lskcipher * tfm)517 static void lskcipher_exit_tfm_simple(struct crypto_lskcipher *tfm)
518 {
519 	struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
520 
521 	crypto_free_lskcipher(*ctx);
522 }
523 
lskcipher_free_instance_simple(struct lskcipher_instance * inst)524 static void lskcipher_free_instance_simple(struct lskcipher_instance *inst)
525 {
526 	crypto_drop_lskcipher(lskcipher_instance_ctx(inst));
527 	kfree(inst);
528 }
529 
530 /**
531  * lskcipher_alloc_instance_simple - allocate instance of simple block cipher
532  *
533  * Allocate an lskcipher_instance for a simple block cipher mode of operation,
534  * e.g. cbc or ecb.  The instance context will have just a single crypto_spawn,
535  * that for the underlying cipher.  The {min,max}_keysize, ivsize, blocksize,
536  * alignmask, and priority are set from the underlying cipher but can be
537  * overridden if needed.  The tfm context defaults to
538  * struct crypto_lskcipher *, and default ->setkey(), ->init(), and
539  * ->exit() methods are installed.
540  *
541  * @tmpl: the template being instantiated
542  * @tb: the template parameters
543  *
544  * Return: a pointer to the new instance, or an ERR_PTR().  The caller still
545  *	   needs to register the instance.
546  */
lskcipher_alloc_instance_simple(struct crypto_template * tmpl,struct rtattr ** tb)547 struct lskcipher_instance *lskcipher_alloc_instance_simple(
548 	struct crypto_template *tmpl, struct rtattr **tb)
549 {
550 	u32 mask;
551 	struct lskcipher_instance *inst;
552 	struct crypto_lskcipher_spawn *spawn;
553 	char ecb_name[CRYPTO_MAX_ALG_NAME];
554 	struct lskcipher_alg *cipher_alg;
555 	const char *cipher_name;
556 	int err;
557 
558 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_LSKCIPHER, &mask);
559 	if (err)
560 		return ERR_PTR(err);
561 
562 	cipher_name = crypto_attr_alg_name(tb[1]);
563 	if (IS_ERR(cipher_name))
564 		return ERR_CAST(cipher_name);
565 
566 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
567 	if (!inst)
568 		return ERR_PTR(-ENOMEM);
569 
570 	spawn = lskcipher_instance_ctx(inst);
571 	err = crypto_grab_lskcipher(spawn,
572 				    lskcipher_crypto_instance(inst),
573 				    cipher_name, 0, mask);
574 
575 	ecb_name[0] = 0;
576 	if (err == -ENOENT && !!memcmp(tmpl->name, "ecb", 4)) {
577 		err = -ENAMETOOLONG;
578 		if (snprintf(ecb_name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
579 			     cipher_name) >= CRYPTO_MAX_ALG_NAME)
580 			goto err_free_inst;
581 
582 		err = crypto_grab_lskcipher(spawn,
583 					    lskcipher_crypto_instance(inst),
584 					    ecb_name, 0, mask);
585 	}
586 
587 	if (err)
588 		goto err_free_inst;
589 
590 	cipher_alg = crypto_lskcipher_spawn_alg(spawn);
591 
592 	err = crypto_inst_setname(lskcipher_crypto_instance(inst), tmpl->name,
593 				  &cipher_alg->co.base);
594 	if (err)
595 		goto err_free_inst;
596 
597 	if (ecb_name[0]) {
598 		int len;
599 
600 		err = -EINVAL;
601 		len = strscpy(ecb_name, &cipher_alg->co.base.cra_name[4],
602 			      sizeof(ecb_name));
603 		if (len < 2)
604 			goto err_free_inst;
605 
606 		if (ecb_name[len - 1] != ')')
607 			goto err_free_inst;
608 
609 		ecb_name[len - 1] = 0;
610 
611 		err = -ENAMETOOLONG;
612 		if (snprintf(inst->alg.co.base.cra_name, CRYPTO_MAX_ALG_NAME,
613 			     "%s(%s)", tmpl->name, ecb_name) >=
614 		    CRYPTO_MAX_ALG_NAME)
615 			goto err_free_inst;
616 
617 		if (strcmp(ecb_name, cipher_name) &&
618 		    snprintf(inst->alg.co.base.cra_driver_name,
619 			     CRYPTO_MAX_ALG_NAME,
620 			     "%s(%s)", tmpl->name, cipher_name) >=
621 		    CRYPTO_MAX_ALG_NAME)
622 			goto err_free_inst;
623 	} else {
624 		/* Don't allow nesting. */
625 		err = -ELOOP;
626 		if ((cipher_alg->co.base.cra_flags & CRYPTO_ALG_INSTANCE))
627 			goto err_free_inst;
628 	}
629 
630 	err = -EINVAL;
631 	if (cipher_alg->co.ivsize)
632 		goto err_free_inst;
633 
634 	inst->free = lskcipher_free_instance_simple;
635 
636 	/* Default algorithm properties, can be overridden */
637 	inst->alg.co.base.cra_blocksize = cipher_alg->co.base.cra_blocksize;
638 	inst->alg.co.base.cra_alignmask = cipher_alg->co.base.cra_alignmask;
639 	inst->alg.co.base.cra_priority = cipher_alg->co.base.cra_priority;
640 	inst->alg.co.min_keysize = cipher_alg->co.min_keysize;
641 	inst->alg.co.max_keysize = cipher_alg->co.max_keysize;
642 	inst->alg.co.ivsize = cipher_alg->co.base.cra_blocksize;
643 	inst->alg.co.statesize = cipher_alg->co.statesize;
644 
645 	/* Use struct crypto_lskcipher * by default, can be overridden */
646 	inst->alg.co.base.cra_ctxsize = sizeof(struct crypto_lskcipher *);
647 	inst->alg.setkey = lskcipher_setkey_simple;
648 	inst->alg.init = lskcipher_init_tfm_simple;
649 	inst->alg.exit = lskcipher_exit_tfm_simple;
650 
651 	return inst;
652 
653 err_free_inst:
654 	lskcipher_free_instance_simple(inst);
655 	return ERR_PTR(err);
656 }
657 EXPORT_SYMBOL_GPL(lskcipher_alloc_instance_simple);
658