xref: /linux/arch/s390/crypto/paes_s390.c (revision 52338415)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Cryptographic API.
4  *
5  * s390 implementation of the AES Cipher Algorithm with protected keys.
6  *
7  * s390 Version:
8  *   Copyright IBM Corp. 2017,2019
9  *   Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10  *		Harald Freudenberger <freude@de.ibm.com>
11  */
12 
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
15 
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
26 #include <asm/pkey.h>
27 
28 /*
29  * Key blobs smaller/bigger than these defines are rejected
30  * by the common code even before the individual setkey function
31  * is called. As paes can handle different kinds of key blobs
32  * and padding is also possible, the limits need to be generous.
33  */
34 #define PAES_MIN_KEYSIZE 64
35 #define PAES_MAX_KEYSIZE 256
36 
37 static u8 *ctrblk;
38 static DEFINE_SPINLOCK(ctrblk_lock);
39 
40 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
41 
42 struct key_blob {
43 	/*
44 	 * Small keys will be stored in the keybuf. Larger keys are
45 	 * stored in extra allocated memory. In both cases does
46 	 * key point to the memory where the key is stored.
47 	 * The code distinguishes by checking keylen against
48 	 * sizeof(keybuf). See the two following helper functions.
49 	 */
50 	u8 *key;
51 	u8 keybuf[128];
52 	unsigned int keylen;
53 };
54 
55 static inline int _copy_key_to_kb(struct key_blob *kb,
56 				  const u8 *key,
57 				  unsigned int keylen)
58 {
59 	if (keylen <= sizeof(kb->keybuf))
60 		kb->key = kb->keybuf;
61 	else {
62 		kb->key = kmalloc(keylen, GFP_KERNEL);
63 		if (!kb->key)
64 			return -ENOMEM;
65 	}
66 	memcpy(kb->key, key, keylen);
67 	kb->keylen = keylen;
68 
69 	return 0;
70 }
71 
72 static inline void _free_kb_keybuf(struct key_blob *kb)
73 {
74 	if (kb->key && kb->key != kb->keybuf
75 	    && kb->keylen > sizeof(kb->keybuf)) {
76 		kfree(kb->key);
77 		kb->key = NULL;
78 	}
79 }
80 
81 struct s390_paes_ctx {
82 	struct key_blob kb;
83 	struct pkey_protkey pk;
84 	unsigned long fc;
85 };
86 
87 struct s390_pxts_ctx {
88 	struct key_blob kb[2];
89 	struct pkey_protkey pk[2];
90 	unsigned long fc;
91 };
92 
93 static inline int __paes_convert_key(struct key_blob *kb,
94 				     struct pkey_protkey *pk)
95 {
96 	int i, ret;
97 
98 	/* try three times in case of failure */
99 	for (i = 0; i < 3; i++) {
100 		ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk);
101 		if (ret == 0)
102 			break;
103 	}
104 
105 	return ret;
106 }
107 
108 static int __paes_set_key(struct s390_paes_ctx *ctx)
109 {
110 	unsigned long fc;
111 
112 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
113 		return -EINVAL;
114 
115 	/* Pick the correct function code based on the protected key type */
116 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
117 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
118 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
119 
120 	/* Check if the function code is available */
121 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
122 
123 	return ctx->fc ? 0 : -EINVAL;
124 }
125 
126 static int ecb_paes_init(struct crypto_tfm *tfm)
127 {
128 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
129 
130 	ctx->kb.key = NULL;
131 
132 	return 0;
133 }
134 
135 static void ecb_paes_exit(struct crypto_tfm *tfm)
136 {
137 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
138 
139 	_free_kb_keybuf(&ctx->kb);
140 }
141 
142 static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
143 			    unsigned int key_len)
144 {
145 	int rc;
146 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
147 
148 	_free_kb_keybuf(&ctx->kb);
149 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
150 	if (rc)
151 		return rc;
152 
153 	if (__paes_set_key(ctx)) {
154 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
155 		return -EINVAL;
156 	}
157 	return 0;
158 }
159 
160 static int ecb_paes_crypt(struct blkcipher_desc *desc,
161 			  unsigned long modifier,
162 			  struct blkcipher_walk *walk)
163 {
164 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
165 	unsigned int nbytes, n, k;
166 	int ret;
167 
168 	ret = blkcipher_walk_virt(desc, walk);
169 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
170 		/* only use complete blocks */
171 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
172 		k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
173 			     walk->dst.virt.addr, walk->src.virt.addr, n);
174 		if (k)
175 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
176 		if (k < n) {
177 			if (__paes_set_key(ctx) != 0)
178 				return blkcipher_walk_done(desc, walk, -EIO);
179 		}
180 	}
181 	return ret;
182 }
183 
184 static int ecb_paes_encrypt(struct blkcipher_desc *desc,
185 			    struct scatterlist *dst, struct scatterlist *src,
186 			    unsigned int nbytes)
187 {
188 	struct blkcipher_walk walk;
189 
190 	blkcipher_walk_init(&walk, dst, src, nbytes);
191 	return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
192 }
193 
194 static int ecb_paes_decrypt(struct blkcipher_desc *desc,
195 			    struct scatterlist *dst, struct scatterlist *src,
196 			    unsigned int nbytes)
197 {
198 	struct blkcipher_walk walk;
199 
200 	blkcipher_walk_init(&walk, dst, src, nbytes);
201 	return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
202 }
203 
204 static struct crypto_alg ecb_paes_alg = {
205 	.cra_name		=	"ecb(paes)",
206 	.cra_driver_name	=	"ecb-paes-s390",
207 	.cra_priority		=	401,	/* combo: aes + ecb + 1 */
208 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
209 	.cra_blocksize		=	AES_BLOCK_SIZE,
210 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
211 	.cra_type		=	&crypto_blkcipher_type,
212 	.cra_module		=	THIS_MODULE,
213 	.cra_list		=	LIST_HEAD_INIT(ecb_paes_alg.cra_list),
214 	.cra_init		=	ecb_paes_init,
215 	.cra_exit		=	ecb_paes_exit,
216 	.cra_u			=	{
217 		.blkcipher = {
218 			.min_keysize		=	PAES_MIN_KEYSIZE,
219 			.max_keysize		=	PAES_MAX_KEYSIZE,
220 			.setkey			=	ecb_paes_set_key,
221 			.encrypt		=	ecb_paes_encrypt,
222 			.decrypt		=	ecb_paes_decrypt,
223 		}
224 	}
225 };
226 
227 static int cbc_paes_init(struct crypto_tfm *tfm)
228 {
229 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
230 
231 	ctx->kb.key = NULL;
232 
233 	return 0;
234 }
235 
236 static void cbc_paes_exit(struct crypto_tfm *tfm)
237 {
238 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
239 
240 	_free_kb_keybuf(&ctx->kb);
241 }
242 
243 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
244 {
245 	unsigned long fc;
246 
247 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
248 		return -EINVAL;
249 
250 	/* Pick the correct function code based on the protected key type */
251 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
252 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
253 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
254 
255 	/* Check if the function code is available */
256 	ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
257 
258 	return ctx->fc ? 0 : -EINVAL;
259 }
260 
261 static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
262 			    unsigned int key_len)
263 {
264 	int rc;
265 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
266 
267 	_free_kb_keybuf(&ctx->kb);
268 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
269 	if (rc)
270 		return rc;
271 
272 	if (__cbc_paes_set_key(ctx)) {
273 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
274 		return -EINVAL;
275 	}
276 	return 0;
277 }
278 
279 static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
280 			  struct blkcipher_walk *walk)
281 {
282 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
283 	unsigned int nbytes, n, k;
284 	int ret;
285 	struct {
286 		u8 iv[AES_BLOCK_SIZE];
287 		u8 key[MAXPROTKEYSIZE];
288 	} param;
289 
290 	ret = blkcipher_walk_virt(desc, walk);
291 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
292 	memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
293 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
294 		/* only use complete blocks */
295 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
296 		k = cpacf_kmc(ctx->fc | modifier, &param,
297 			      walk->dst.virt.addr, walk->src.virt.addr, n);
298 		if (k)
299 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
300 		if (k < n) {
301 			if (__cbc_paes_set_key(ctx) != 0)
302 				return blkcipher_walk_done(desc, walk, -EIO);
303 			memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
304 		}
305 	}
306 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
307 	return ret;
308 }
309 
310 static int cbc_paes_encrypt(struct blkcipher_desc *desc,
311 			    struct scatterlist *dst, struct scatterlist *src,
312 			    unsigned int nbytes)
313 {
314 	struct blkcipher_walk walk;
315 
316 	blkcipher_walk_init(&walk, dst, src, nbytes);
317 	return cbc_paes_crypt(desc, 0, &walk);
318 }
319 
320 static int cbc_paes_decrypt(struct blkcipher_desc *desc,
321 			    struct scatterlist *dst, struct scatterlist *src,
322 			    unsigned int nbytes)
323 {
324 	struct blkcipher_walk walk;
325 
326 	blkcipher_walk_init(&walk, dst, src, nbytes);
327 	return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
328 }
329 
330 static struct crypto_alg cbc_paes_alg = {
331 	.cra_name		=	"cbc(paes)",
332 	.cra_driver_name	=	"cbc-paes-s390",
333 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
334 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
335 	.cra_blocksize		=	AES_BLOCK_SIZE,
336 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
337 	.cra_type		=	&crypto_blkcipher_type,
338 	.cra_module		=	THIS_MODULE,
339 	.cra_list		=	LIST_HEAD_INIT(cbc_paes_alg.cra_list),
340 	.cra_init		=	cbc_paes_init,
341 	.cra_exit		=	cbc_paes_exit,
342 	.cra_u			=	{
343 		.blkcipher = {
344 			.min_keysize		=	PAES_MIN_KEYSIZE,
345 			.max_keysize		=	PAES_MAX_KEYSIZE,
346 			.ivsize			=	AES_BLOCK_SIZE,
347 			.setkey			=	cbc_paes_set_key,
348 			.encrypt		=	cbc_paes_encrypt,
349 			.decrypt		=	cbc_paes_decrypt,
350 		}
351 	}
352 };
353 
354 static int xts_paes_init(struct crypto_tfm *tfm)
355 {
356 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
357 
358 	ctx->kb[0].key = NULL;
359 	ctx->kb[1].key = NULL;
360 
361 	return 0;
362 }
363 
364 static void xts_paes_exit(struct crypto_tfm *tfm)
365 {
366 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
367 
368 	_free_kb_keybuf(&ctx->kb[0]);
369 	_free_kb_keybuf(&ctx->kb[1]);
370 }
371 
372 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
373 {
374 	unsigned long fc;
375 
376 	if (__paes_convert_key(&ctx->kb[0], &ctx->pk[0]) ||
377 	    __paes_convert_key(&ctx->kb[1], &ctx->pk[1]))
378 		return -EINVAL;
379 
380 	if (ctx->pk[0].type != ctx->pk[1].type)
381 		return -EINVAL;
382 
383 	/* Pick the correct function code based on the protected key type */
384 	fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
385 		(ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
386 		CPACF_KM_PXTS_256 : 0;
387 
388 	/* Check if the function code is available */
389 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
390 
391 	return ctx->fc ? 0 : -EINVAL;
392 }
393 
394 static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
395 			    unsigned int xts_key_len)
396 {
397 	int rc;
398 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
399 	u8 ckey[2 * AES_MAX_KEY_SIZE];
400 	unsigned int ckey_len, key_len;
401 
402 	if (xts_key_len % 2)
403 		return -EINVAL;
404 
405 	key_len = xts_key_len / 2;
406 
407 	_free_kb_keybuf(&ctx->kb[0]);
408 	_free_kb_keybuf(&ctx->kb[1]);
409 	rc = _copy_key_to_kb(&ctx->kb[0], in_key, key_len);
410 	if (rc)
411 		return rc;
412 	rc = _copy_key_to_kb(&ctx->kb[1], in_key + key_len, key_len);
413 	if (rc)
414 		return rc;
415 
416 	if (__xts_paes_set_key(ctx)) {
417 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
418 		return -EINVAL;
419 	}
420 
421 	/*
422 	 * xts_check_key verifies the key length is not odd and makes
423 	 * sure that the two keys are not the same. This can be done
424 	 * on the two protected keys as well
425 	 */
426 	ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
427 		AES_KEYSIZE_128 : AES_KEYSIZE_256;
428 	memcpy(ckey, ctx->pk[0].protkey, ckey_len);
429 	memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
430 	return xts_check_key(tfm, ckey, 2*ckey_len);
431 }
432 
433 static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
434 			  struct blkcipher_walk *walk)
435 {
436 	struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
437 	unsigned int keylen, offset, nbytes, n, k;
438 	int ret;
439 	struct {
440 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
441 		u8 tweak[16];
442 		u8 block[16];
443 		u8 bit[16];
444 		u8 xts[16];
445 	} pcc_param;
446 	struct {
447 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
448 		u8 init[16];
449 	} xts_param;
450 
451 	ret = blkcipher_walk_virt(desc, walk);
452 	keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
453 	offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
454 retry:
455 	memset(&pcc_param, 0, sizeof(pcc_param));
456 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
457 	memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
458 	cpacf_pcc(ctx->fc, pcc_param.key + offset);
459 
460 	memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
461 	memcpy(xts_param.init, pcc_param.xts, 16);
462 
463 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
464 		/* only use complete blocks */
465 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
466 		k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
467 			     walk->dst.virt.addr, walk->src.virt.addr, n);
468 		if (k)
469 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
470 		if (k < n) {
471 			if (__xts_paes_set_key(ctx) != 0)
472 				return blkcipher_walk_done(desc, walk, -EIO);
473 			goto retry;
474 		}
475 	}
476 	return ret;
477 }
478 
479 static int xts_paes_encrypt(struct blkcipher_desc *desc,
480 			    struct scatterlist *dst, struct scatterlist *src,
481 			    unsigned int nbytes)
482 {
483 	struct blkcipher_walk walk;
484 
485 	blkcipher_walk_init(&walk, dst, src, nbytes);
486 	return xts_paes_crypt(desc, 0, &walk);
487 }
488 
489 static int xts_paes_decrypt(struct blkcipher_desc *desc,
490 			    struct scatterlist *dst, struct scatterlist *src,
491 			    unsigned int nbytes)
492 {
493 	struct blkcipher_walk walk;
494 
495 	blkcipher_walk_init(&walk, dst, src, nbytes);
496 	return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
497 }
498 
499 static struct crypto_alg xts_paes_alg = {
500 	.cra_name		=	"xts(paes)",
501 	.cra_driver_name	=	"xts-paes-s390",
502 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
503 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
504 	.cra_blocksize		=	AES_BLOCK_SIZE,
505 	.cra_ctxsize		=	sizeof(struct s390_pxts_ctx),
506 	.cra_type		=	&crypto_blkcipher_type,
507 	.cra_module		=	THIS_MODULE,
508 	.cra_list		=	LIST_HEAD_INIT(xts_paes_alg.cra_list),
509 	.cra_init		=	xts_paes_init,
510 	.cra_exit		=	xts_paes_exit,
511 	.cra_u			=	{
512 		.blkcipher = {
513 			.min_keysize		=	2 * PAES_MIN_KEYSIZE,
514 			.max_keysize		=	2 * PAES_MAX_KEYSIZE,
515 			.ivsize			=	AES_BLOCK_SIZE,
516 			.setkey			=	xts_paes_set_key,
517 			.encrypt		=	xts_paes_encrypt,
518 			.decrypt		=	xts_paes_decrypt,
519 		}
520 	}
521 };
522 
523 static int ctr_paes_init(struct crypto_tfm *tfm)
524 {
525 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
526 
527 	ctx->kb.key = NULL;
528 
529 	return 0;
530 }
531 
532 static void ctr_paes_exit(struct crypto_tfm *tfm)
533 {
534 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
535 
536 	_free_kb_keybuf(&ctx->kb);
537 }
538 
539 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
540 {
541 	unsigned long fc;
542 
543 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
544 		return -EINVAL;
545 
546 	/* Pick the correct function code based on the protected key type */
547 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
548 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
549 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
550 		CPACF_KMCTR_PAES_256 : 0;
551 
552 	/* Check if the function code is available */
553 	ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
554 
555 	return ctx->fc ? 0 : -EINVAL;
556 }
557 
558 static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
559 			    unsigned int key_len)
560 {
561 	int rc;
562 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
563 
564 	_free_kb_keybuf(&ctx->kb);
565 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
566 	if (rc)
567 		return rc;
568 
569 	if (__ctr_paes_set_key(ctx)) {
570 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
571 		return -EINVAL;
572 	}
573 	return 0;
574 }
575 
576 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
577 {
578 	unsigned int i, n;
579 
580 	/* only use complete blocks, max. PAGE_SIZE */
581 	memcpy(ctrptr, iv, AES_BLOCK_SIZE);
582 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
583 	for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
584 		memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
585 		crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
586 		ctrptr += AES_BLOCK_SIZE;
587 	}
588 	return n;
589 }
590 
591 static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
592 			  struct blkcipher_walk *walk)
593 {
594 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
595 	u8 buf[AES_BLOCK_SIZE], *ctrptr;
596 	unsigned int nbytes, n, k;
597 	int ret, locked;
598 
599 	locked = spin_trylock(&ctrblk_lock);
600 
601 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
602 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
603 		n = AES_BLOCK_SIZE;
604 		if (nbytes >= 2*AES_BLOCK_SIZE && locked)
605 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
606 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
607 		k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
608 				walk->dst.virt.addr, walk->src.virt.addr,
609 				n, ctrptr);
610 		if (k) {
611 			if (ctrptr == ctrblk)
612 				memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
613 				       AES_BLOCK_SIZE);
614 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
615 			ret = blkcipher_walk_done(desc, walk, nbytes - n);
616 		}
617 		if (k < n) {
618 			if (__ctr_paes_set_key(ctx) != 0) {
619 				if (locked)
620 					spin_unlock(&ctrblk_lock);
621 				return blkcipher_walk_done(desc, walk, -EIO);
622 			}
623 		}
624 	}
625 	if (locked)
626 		spin_unlock(&ctrblk_lock);
627 	/*
628 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
629 	 */
630 	if (nbytes) {
631 		while (1) {
632 			if (cpacf_kmctr(ctx->fc | modifier,
633 					ctx->pk.protkey, buf,
634 					walk->src.virt.addr, AES_BLOCK_SIZE,
635 					walk->iv) == AES_BLOCK_SIZE)
636 				break;
637 			if (__ctr_paes_set_key(ctx) != 0)
638 				return blkcipher_walk_done(desc, walk, -EIO);
639 		}
640 		memcpy(walk->dst.virt.addr, buf, nbytes);
641 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
642 		ret = blkcipher_walk_done(desc, walk, 0);
643 	}
644 
645 	return ret;
646 }
647 
648 static int ctr_paes_encrypt(struct blkcipher_desc *desc,
649 			    struct scatterlist *dst, struct scatterlist *src,
650 			    unsigned int nbytes)
651 {
652 	struct blkcipher_walk walk;
653 
654 	blkcipher_walk_init(&walk, dst, src, nbytes);
655 	return ctr_paes_crypt(desc, 0, &walk);
656 }
657 
658 static int ctr_paes_decrypt(struct blkcipher_desc *desc,
659 			    struct scatterlist *dst, struct scatterlist *src,
660 			    unsigned int nbytes)
661 {
662 	struct blkcipher_walk walk;
663 
664 	blkcipher_walk_init(&walk, dst, src, nbytes);
665 	return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
666 }
667 
668 static struct crypto_alg ctr_paes_alg = {
669 	.cra_name		=	"ctr(paes)",
670 	.cra_driver_name	=	"ctr-paes-s390",
671 	.cra_priority		=	402,	/* ecb-paes-s390 + 1 */
672 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
673 	.cra_blocksize		=	1,
674 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
675 	.cra_type		=	&crypto_blkcipher_type,
676 	.cra_module		=	THIS_MODULE,
677 	.cra_list		=	LIST_HEAD_INIT(ctr_paes_alg.cra_list),
678 	.cra_init		=	ctr_paes_init,
679 	.cra_exit		=	ctr_paes_exit,
680 	.cra_u			=	{
681 		.blkcipher = {
682 			.min_keysize		=	PAES_MIN_KEYSIZE,
683 			.max_keysize		=	PAES_MAX_KEYSIZE,
684 			.ivsize			=	AES_BLOCK_SIZE,
685 			.setkey			=	ctr_paes_set_key,
686 			.encrypt		=	ctr_paes_encrypt,
687 			.decrypt		=	ctr_paes_decrypt,
688 		}
689 	}
690 };
691 
692 static inline void __crypto_unregister_alg(struct crypto_alg *alg)
693 {
694 	if (!list_empty(&alg->cra_list))
695 		crypto_unregister_alg(alg);
696 }
697 
698 static void paes_s390_fini(void)
699 {
700 	if (ctrblk)
701 		free_page((unsigned long) ctrblk);
702 	__crypto_unregister_alg(&ctr_paes_alg);
703 	__crypto_unregister_alg(&xts_paes_alg);
704 	__crypto_unregister_alg(&cbc_paes_alg);
705 	__crypto_unregister_alg(&ecb_paes_alg);
706 }
707 
708 static int __init paes_s390_init(void)
709 {
710 	int ret;
711 
712 	/* Query available functions for KM, KMC and KMCTR */
713 	cpacf_query(CPACF_KM, &km_functions);
714 	cpacf_query(CPACF_KMC, &kmc_functions);
715 	cpacf_query(CPACF_KMCTR, &kmctr_functions);
716 
717 	if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
718 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
719 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
720 		ret = crypto_register_alg(&ecb_paes_alg);
721 		if (ret)
722 			goto out_err;
723 	}
724 
725 	if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
726 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
727 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
728 		ret = crypto_register_alg(&cbc_paes_alg);
729 		if (ret)
730 			goto out_err;
731 	}
732 
733 	if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
734 	    cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
735 		ret = crypto_register_alg(&xts_paes_alg);
736 		if (ret)
737 			goto out_err;
738 	}
739 
740 	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
741 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
742 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
743 		ret = crypto_register_alg(&ctr_paes_alg);
744 		if (ret)
745 			goto out_err;
746 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
747 		if (!ctrblk) {
748 			ret = -ENOMEM;
749 			goto out_err;
750 		}
751 	}
752 
753 	return 0;
754 out_err:
755 	paes_s390_fini();
756 	return ret;
757 }
758 
759 module_init(paes_s390_init);
760 module_exit(paes_s390_fini);
761 
762 MODULE_ALIAS_CRYPTO("paes");
763 
764 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
765 MODULE_LICENSE("GPL");
766