1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha1.h>
22 #include <crypto/sha2.h>
23 #include <crypto/sm3.h>
24 #include <crypto/sm4.h>
25 #include <crypto/xts.h>
26 #include <crypto/skcipher.h>
27 #include <crypto/internal/aead.h>
28 #include <crypto/internal/skcipher.h>
29 
30 #include "safexcel.h"
31 
32 enum safexcel_cipher_direction {
33 	SAFEXCEL_ENCRYPT,
34 	SAFEXCEL_DECRYPT,
35 };
36 
37 enum safexcel_cipher_alg {
38 	SAFEXCEL_DES,
39 	SAFEXCEL_3DES,
40 	SAFEXCEL_AES,
41 	SAFEXCEL_CHACHA20,
42 	SAFEXCEL_SM4,
43 };
44 
45 struct safexcel_cipher_ctx {
46 	struct safexcel_context base;
47 	struct safexcel_crypto_priv *priv;
48 
49 	u32 mode;
50 	enum safexcel_cipher_alg alg;
51 	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 	u8 aadskip;
54 	u8 blocksz;
55 	u32 ivmask;
56 	u32 ctrinit;
57 
58 	__le32 key[16];
59 	u32 nonce;
60 	unsigned int key_len, xts;
61 
62 	/* All the below is AEAD specific */
63 	u32 hash_alg;
64 	u32 state_sz;
65 
66 	struct crypto_cipher *hkaes;
67 	struct crypto_aead *fback;
68 };
69 
70 struct safexcel_cipher_req {
71 	enum safexcel_cipher_direction direction;
72 	/* Number of result descriptors associated to the request */
73 	unsigned int rdescs;
74 	bool needs_inv;
75 	int  nr_src, nr_dst;
76 };
77 
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)78 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
79 				struct safexcel_command_desc *cdesc)
80 {
81 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
82 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83 		/* 32 bit nonce */
84 		cdesc->control_data.token[0] = ctx->nonce;
85 		/* 64 bit IV part */
86 		memcpy(&cdesc->control_data.token[1], iv, 8);
87 		/* 32 bit counter, start at 0 or 1 (big endian!) */
88 		cdesc->control_data.token[3] =
89 			(__force u32)cpu_to_be32(ctx->ctrinit);
90 		return 4;
91 	}
92 	if (ctx->alg == SAFEXCEL_CHACHA20) {
93 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
94 		/* 96 bit nonce part */
95 		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
96 		/* 32 bit counter */
97 		cdesc->control_data.token[3] = *(u32 *)iv;
98 		return 4;
99 	}
100 
101 	cdesc->control_data.options |= ctx->ivmask;
102 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
103 	return ctx->blocksz / sizeof(u32);
104 }
105 
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)106 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
107 				    struct safexcel_command_desc *cdesc,
108 				    struct safexcel_token *atoken,
109 				    u32 length)
110 {
111 	struct safexcel_token *token;
112 	int ivlen;
113 
114 	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
115 	if (ivlen == 4) {
116 		/* No space in cdesc, instruction moves to atoken */
117 		cdesc->additional_cdata_size = 1;
118 		token = atoken;
119 	} else {
120 		/* Everything fits in cdesc */
121 		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
122 		/* Need to pad with NOP */
123 		eip197_noop_token(&token[1]);
124 	}
125 
126 	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
127 	token->packet_length = length;
128 	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
129 		      EIP197_TOKEN_STAT_LAST_HASH;
130 	token->instructions = EIP197_TOKEN_INS_LAST |
131 			      EIP197_TOKEN_INS_TYPE_CRYPTO |
132 			      EIP197_TOKEN_INS_TYPE_OUTPUT;
133 }
134 
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)135 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
136 			     struct safexcel_command_desc *cdesc)
137 {
138 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
139 	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
140 		/* 32 bit nonce */
141 		cdesc->control_data.token[0] = ctx->nonce;
142 		/* 64 bit IV part */
143 		memcpy(&cdesc->control_data.token[1], iv, 8);
144 		/* 32 bit counter, start at 0 or 1 (big endian!) */
145 		cdesc->control_data.token[3] =
146 			(__force u32)cpu_to_be32(ctx->ctrinit);
147 		return;
148 	}
149 	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
150 		/* 96 bit IV part */
151 		memcpy(&cdesc->control_data.token[0], iv, 12);
152 		/* 32 bit counter, start at 0 or 1 (big endian!) */
153 		cdesc->control_data.token[3] =
154 			(__force u32)cpu_to_be32(ctx->ctrinit);
155 		return;
156 	}
157 	/* CBC */
158 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
159 }
160 
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)161 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
162 				struct safexcel_command_desc *cdesc,
163 				struct safexcel_token *atoken,
164 				enum safexcel_cipher_direction direction,
165 				u32 cryptlen, u32 assoclen, u32 digestsize)
166 {
167 	struct safexcel_token *aadref;
168 	int atoksize = 2; /* Start with minimum size */
169 	int assocadj = assoclen - ctx->aadskip, aadalign;
170 
171 	/* Always 4 dwords of embedded IV  for AEAD modes */
172 	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
173 
174 	if (direction == SAFEXCEL_DECRYPT)
175 		cryptlen -= digestsize;
176 
177 	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
178 		/* Construct IV block B0 for the CBC-MAC */
179 		u8 *final_iv = (u8 *)cdesc->control_data.token;
180 		u8 *cbcmaciv = (u8 *)&atoken[1];
181 		__le32 *aadlen = (__le32 *)&atoken[5];
182 
183 		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
184 			/* Length + nonce */
185 			cdesc->control_data.token[0] = ctx->nonce;
186 			/* Fixup flags byte */
187 			*(__le32 *)cbcmaciv =
188 				cpu_to_le32(ctx->nonce |
189 					    ((assocadj > 0) << 6) |
190 					    ((digestsize - 2) << 2));
191 			/* 64 bit IV part */
192 			memcpy(&cdesc->control_data.token[1], iv, 8);
193 			memcpy(cbcmaciv + 4, iv, 8);
194 			/* Start counter at 0 */
195 			cdesc->control_data.token[3] = 0;
196 			/* Message length */
197 			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
198 		} else {
199 			/* Variable length IV part */
200 			memcpy(final_iv, iv, 15 - iv[0]);
201 			memcpy(cbcmaciv, iv, 15 - iv[0]);
202 			/* Start variable length counter at 0 */
203 			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
204 			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
205 			/* fixup flags byte */
206 			cbcmaciv[0] |= ((assocadj > 0) << 6) |
207 				       ((digestsize - 2) << 2);
208 			/* insert lower 2 bytes of message length */
209 			cbcmaciv[14] = cryptlen >> 8;
210 			cbcmaciv[15] = cryptlen & 255;
211 		}
212 
213 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
214 		atoken->packet_length = AES_BLOCK_SIZE +
215 					((assocadj > 0) << 1);
216 		atoken->stat = 0;
217 		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
218 				       EIP197_TOKEN_INS_TYPE_HASH;
219 
220 		if (likely(assocadj)) {
221 			*aadlen = cpu_to_le32((assocadj >> 8) |
222 					      (assocadj & 255) << 8);
223 			atoken += 6;
224 			atoksize += 7;
225 		} else {
226 			atoken += 5;
227 			atoksize += 6;
228 		}
229 
230 		/* Process AAD data */
231 		aadref = atoken;
232 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
233 		atoken->packet_length = assocadj;
234 		atoken->stat = 0;
235 		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
236 		atoken++;
237 
238 		/* For CCM only, align AAD data towards hash engine */
239 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
240 		aadalign = (assocadj + 2) & 15;
241 		atoken->packet_length = assocadj && aadalign ?
242 						16 - aadalign :
243 						0;
244 		if (likely(cryptlen)) {
245 			atoken->stat = 0;
246 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
247 		} else {
248 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
249 			atoken->instructions = EIP197_TOKEN_INS_LAST |
250 					       EIP197_TOKEN_INS_TYPE_HASH;
251 		}
252 	} else {
253 		safexcel_aead_iv(ctx, iv, cdesc);
254 
255 		/* Process AAD data */
256 		aadref = atoken;
257 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
258 		atoken->packet_length = assocadj;
259 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
260 		atoken->instructions = EIP197_TOKEN_INS_LAST |
261 				       EIP197_TOKEN_INS_TYPE_HASH;
262 	}
263 	atoken++;
264 
265 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
266 		/* For ESP mode (and not GMAC), skip over the IV */
267 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
268 		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
269 		atoken->stat = 0;
270 		atoken->instructions = 0;
271 		atoken++;
272 		atoksize++;
273 	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
274 			    direction == SAFEXCEL_DECRYPT)) {
275 		/* Poly-chacha decryption needs a dummy NOP here ... */
276 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
277 		atoken->packet_length = 16; /* According to Op Manual */
278 		atoken->stat = 0;
279 		atoken->instructions = 0;
280 		atoken++;
281 		atoksize++;
282 	}
283 
284 	if  (ctx->xcm) {
285 		/* For GCM and CCM, obtain enc(Y0) */
286 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
287 		atoken->packet_length = 0;
288 		atoken->stat = 0;
289 		atoken->instructions = AES_BLOCK_SIZE;
290 		atoken++;
291 
292 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
293 		atoken->packet_length = AES_BLOCK_SIZE;
294 		atoken->stat = 0;
295 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
296 				       EIP197_TOKEN_INS_TYPE_CRYPTO;
297 		atoken++;
298 		atoksize += 2;
299 	}
300 
301 	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
302 		/* Fixup stat field for AAD direction instruction */
303 		aadref->stat = 0;
304 
305 		/* Process crypto data */
306 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
307 		atoken->packet_length = cryptlen;
308 
309 		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
310 			/* Fixup instruction field for AAD dir instruction */
311 			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
312 
313 			/* Do not send to crypt engine in case of GMAC */
314 			atoken->instructions = EIP197_TOKEN_INS_LAST |
315 					       EIP197_TOKEN_INS_TYPE_HASH |
316 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
317 		} else {
318 			atoken->instructions = EIP197_TOKEN_INS_LAST |
319 					       EIP197_TOKEN_INS_TYPE_CRYPTO |
320 					       EIP197_TOKEN_INS_TYPE_HASH |
321 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
322 		}
323 
324 		cryptlen &= 15;
325 		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
326 			atoken->stat = 0;
327 			/* For CCM only, pad crypto data to the hash engine */
328 			atoken++;
329 			atoksize++;
330 			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
331 			atoken->packet_length = 16 - cryptlen;
332 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
333 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
334 		} else {
335 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
336 		}
337 		atoken++;
338 		atoksize++;
339 	}
340 
341 	if (direction == SAFEXCEL_ENCRYPT) {
342 		/* Append ICV */
343 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
344 		atoken->packet_length = digestsize;
345 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
346 			       EIP197_TOKEN_STAT_LAST_PACKET;
347 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
348 				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
349 	} else {
350 		/* Extract ICV */
351 		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
352 		atoken->packet_length = digestsize;
353 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
354 			       EIP197_TOKEN_STAT_LAST_PACKET;
355 		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
356 		atoken++;
357 		atoksize++;
358 
359 		/* Verify ICV */
360 		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
361 		atoken->packet_length = digestsize |
362 					EIP197_TOKEN_HASH_RESULT_VERIFY;
363 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
364 			       EIP197_TOKEN_STAT_LAST_PACKET;
365 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
366 	}
367 
368 	/* Fixup length of the token in the command descriptor */
369 	cdesc->additional_cdata_size = atoksize;
370 }
371 
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)372 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
373 					const u8 *key, unsigned int len)
374 {
375 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
376 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
377 	struct safexcel_crypto_priv *priv = ctx->base.priv;
378 	struct crypto_aes_ctx aes;
379 	int ret, i;
380 
381 	ret = aes_expandkey(&aes, key, len);
382 	if (ret)
383 		return ret;
384 
385 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
386 		for (i = 0; i < len / sizeof(u32); i++) {
387 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
388 				ctx->base.needs_inv = true;
389 				break;
390 			}
391 		}
392 	}
393 
394 	for (i = 0; i < len / sizeof(u32); i++)
395 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
396 
397 	ctx->key_len = len;
398 
399 	memzero_explicit(&aes, sizeof(aes));
400 	return 0;
401 }
402 
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)403 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
404 				unsigned int len)
405 {
406 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
407 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
408 	struct safexcel_crypto_priv *priv = ctx->base.priv;
409 	struct crypto_authenc_keys keys;
410 	struct crypto_aes_ctx aes;
411 	int err = -EINVAL, i;
412 	const char *alg;
413 
414 	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
415 		goto badkey;
416 
417 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
418 		/* Must have at least space for the nonce here */
419 		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
420 			goto badkey;
421 		/* last 4 bytes of key are the nonce! */
422 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
423 				      CTR_RFC3686_NONCE_SIZE);
424 		/* exclude the nonce here */
425 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
426 	}
427 
428 	/* Encryption key */
429 	switch (ctx->alg) {
430 	case SAFEXCEL_DES:
431 		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
432 		if (unlikely(err))
433 			goto badkey;
434 		break;
435 	case SAFEXCEL_3DES:
436 		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
437 		if (unlikely(err))
438 			goto badkey;
439 		break;
440 	case SAFEXCEL_AES:
441 		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
442 		if (unlikely(err))
443 			goto badkey;
444 		break;
445 	case SAFEXCEL_SM4:
446 		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
447 			goto badkey;
448 		break;
449 	default:
450 		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
451 		goto badkey;
452 	}
453 
454 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
455 		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
456 			if (le32_to_cpu(ctx->key[i]) !=
457 			    ((u32 *)keys.enckey)[i]) {
458 				ctx->base.needs_inv = true;
459 				break;
460 			}
461 		}
462 	}
463 
464 	/* Auth key */
465 	switch (ctx->hash_alg) {
466 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
467 		alg = "safexcel-sha1";
468 		break;
469 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
470 		alg = "safexcel-sha224";
471 		break;
472 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
473 		alg = "safexcel-sha256";
474 		break;
475 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
476 		alg = "safexcel-sha384";
477 		break;
478 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
479 		alg = "safexcel-sha512";
480 		break;
481 	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
482 		alg = "safexcel-sm3";
483 		break;
484 	default:
485 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
486 		goto badkey;
487 	}
488 
489 	if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
490 				 alg, ctx->state_sz))
491 		goto badkey;
492 
493 	/* Now copy the keys into the context */
494 	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
495 		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
496 	ctx->key_len = keys.enckeylen;
497 
498 	memzero_explicit(&keys, sizeof(keys));
499 	return 0;
500 
501 badkey:
502 	memzero_explicit(&keys, sizeof(keys));
503 	return err;
504 }
505 
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)506 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
507 				    struct crypto_async_request *async,
508 				    struct safexcel_cipher_req *sreq,
509 				    struct safexcel_command_desc *cdesc)
510 {
511 	struct safexcel_crypto_priv *priv = ctx->base.priv;
512 	int ctrl_size = ctx->key_len / sizeof(u32);
513 
514 	cdesc->control_data.control1 = ctx->mode;
515 
516 	if (ctx->aead) {
517 		/* Take in account the ipad+opad digests */
518 		if (ctx->xcm) {
519 			ctrl_size += ctx->state_sz / sizeof(u32);
520 			cdesc->control_data.control0 =
521 				CONTEXT_CONTROL_KEY_EN |
522 				CONTEXT_CONTROL_DIGEST_XCM |
523 				ctx->hash_alg |
524 				CONTEXT_CONTROL_SIZE(ctrl_size);
525 		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
526 			/* Chacha20-Poly1305 */
527 			cdesc->control_data.control0 =
528 				CONTEXT_CONTROL_KEY_EN |
529 				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
530 				(sreq->direction == SAFEXCEL_ENCRYPT ?
531 					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
532 					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
533 				ctx->hash_alg |
534 				CONTEXT_CONTROL_SIZE(ctrl_size);
535 			return 0;
536 		} else {
537 			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
538 			cdesc->control_data.control0 =
539 				CONTEXT_CONTROL_KEY_EN |
540 				CONTEXT_CONTROL_DIGEST_HMAC |
541 				ctx->hash_alg |
542 				CONTEXT_CONTROL_SIZE(ctrl_size);
543 		}
544 
545 		if (sreq->direction == SAFEXCEL_ENCRYPT &&
546 		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
547 		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
548 			cdesc->control_data.control0 |=
549 				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
550 		else if (sreq->direction == SAFEXCEL_ENCRYPT)
551 			cdesc->control_data.control0 |=
552 				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
553 		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
554 			cdesc->control_data.control0 |=
555 				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
556 		else
557 			cdesc->control_data.control0 |=
558 				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
559 	} else {
560 		if (sreq->direction == SAFEXCEL_ENCRYPT)
561 			cdesc->control_data.control0 =
562 				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
563 				CONTEXT_CONTROL_KEY_EN |
564 				CONTEXT_CONTROL_SIZE(ctrl_size);
565 		else
566 			cdesc->control_data.control0 =
567 				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
568 				CONTEXT_CONTROL_KEY_EN |
569 				CONTEXT_CONTROL_SIZE(ctrl_size);
570 	}
571 
572 	if (ctx->alg == SAFEXCEL_DES) {
573 		cdesc->control_data.control0 |=
574 			CONTEXT_CONTROL_CRYPTO_ALG_DES;
575 	} else if (ctx->alg == SAFEXCEL_3DES) {
576 		cdesc->control_data.control0 |=
577 			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
578 	} else if (ctx->alg == SAFEXCEL_AES) {
579 		switch (ctx->key_len >> ctx->xts) {
580 		case AES_KEYSIZE_128:
581 			cdesc->control_data.control0 |=
582 				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
583 			break;
584 		case AES_KEYSIZE_192:
585 			cdesc->control_data.control0 |=
586 				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
587 			break;
588 		case AES_KEYSIZE_256:
589 			cdesc->control_data.control0 |=
590 				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
591 			break;
592 		default:
593 			dev_err(priv->dev, "aes keysize not supported: %u\n",
594 				ctx->key_len >> ctx->xts);
595 			return -EINVAL;
596 		}
597 	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
598 		cdesc->control_data.control0 |=
599 			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
600 	} else if (ctx->alg == SAFEXCEL_SM4) {
601 		cdesc->control_data.control0 |=
602 			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
603 	}
604 
605 	return 0;
606 }
607 
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)608 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
609 				      struct crypto_async_request *async,
610 				      struct scatterlist *src,
611 				      struct scatterlist *dst,
612 				      unsigned int cryptlen,
613 				      struct safexcel_cipher_req *sreq,
614 				      bool *should_complete, int *ret)
615 {
616 	struct skcipher_request *areq = skcipher_request_cast(async);
617 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
618 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
619 	struct safexcel_result_desc *rdesc;
620 	int ndesc = 0;
621 
622 	*ret = 0;
623 
624 	if (unlikely(!sreq->rdescs))
625 		return 0;
626 
627 	while (sreq->rdescs--) {
628 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
629 		if (IS_ERR(rdesc)) {
630 			dev_err(priv->dev,
631 				"cipher: result: could not retrieve the result descriptor\n");
632 			*ret = PTR_ERR(rdesc);
633 			break;
634 		}
635 
636 		if (likely(!*ret))
637 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
638 
639 		ndesc++;
640 	}
641 
642 	safexcel_complete(priv, ring);
643 
644 	if (src == dst) {
645 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
646 	} else {
647 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
648 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
649 	}
650 
651 	/*
652 	 * Update IV in req from last crypto output word for CBC modes
653 	 */
654 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
655 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
656 		/* For encrypt take the last output word */
657 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
658 				   crypto_skcipher_ivsize(skcipher),
659 				   (cryptlen -
660 				    crypto_skcipher_ivsize(skcipher)));
661 	}
662 
663 	*should_complete = true;
664 
665 	return ndesc;
666 }
667 
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)668 static int safexcel_send_req(struct crypto_async_request *base, int ring,
669 			     struct safexcel_cipher_req *sreq,
670 			     struct scatterlist *src, struct scatterlist *dst,
671 			     unsigned int cryptlen, unsigned int assoclen,
672 			     unsigned int digestsize, u8 *iv, int *commands,
673 			     int *results)
674 {
675 	struct skcipher_request *areq = skcipher_request_cast(base);
676 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
677 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
678 	struct safexcel_crypto_priv *priv = ctx->base.priv;
679 	struct safexcel_command_desc *cdesc;
680 	struct safexcel_command_desc *first_cdesc = NULL;
681 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
682 	struct scatterlist *sg;
683 	unsigned int totlen;
684 	unsigned int totlen_src = cryptlen + assoclen;
685 	unsigned int totlen_dst = totlen_src;
686 	struct safexcel_token *atoken;
687 	int n_cdesc = 0, n_rdesc = 0;
688 	int queued, i, ret = 0;
689 	bool first = true;
690 
691 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
692 
693 	if (ctx->aead) {
694 		/*
695 		 * AEAD has auth tag appended to output for encrypt and
696 		 * removed from the output for decrypt!
697 		 */
698 		if (sreq->direction == SAFEXCEL_DECRYPT)
699 			totlen_dst -= digestsize;
700 		else
701 			totlen_dst += digestsize;
702 
703 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
704 		       &ctx->base.ipad, ctx->state_sz);
705 		if (!ctx->xcm)
706 			memcpy(ctx->base.ctxr->data + (ctx->key_len +
707 			       ctx->state_sz) / sizeof(u32), &ctx->base.opad,
708 			       ctx->state_sz);
709 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
710 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
711 		/*
712 		 * Save IV from last crypto input word for CBC modes in decrypt
713 		 * direction. Need to do this first in case of inplace operation
714 		 * as it will be overwritten.
715 		 */
716 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
717 				   crypto_skcipher_ivsize(skcipher),
718 				   (totlen_src -
719 				    crypto_skcipher_ivsize(skcipher)));
720 	}
721 
722 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
723 
724 	/*
725 	 * Remember actual input length, source buffer length may be
726 	 * updated in case of inline operation below.
727 	 */
728 	totlen = totlen_src;
729 	queued = totlen_src;
730 
731 	if (src == dst) {
732 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
733 		sreq->nr_dst = sreq->nr_src;
734 		if (unlikely((totlen_src || totlen_dst) &&
735 		    (sreq->nr_src <= 0))) {
736 			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
737 				max(totlen_src, totlen_dst));
738 			return -EINVAL;
739 		}
740 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
741 	} else {
742 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
743 			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
744 				totlen_src);
745 			return -EINVAL;
746 		}
747 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
748 
749 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
750 			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
751 				totlen_dst);
752 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
753 				     DMA_TO_DEVICE);
754 			return -EINVAL;
755 		}
756 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
757 	}
758 
759 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
760 
761 	if (!totlen) {
762 		/*
763 		 * The EIP97 cannot deal with zero length input packets!
764 		 * So stuff a dummy command descriptor indicating a 1 byte
765 		 * (dummy) input packet, using the context record as source.
766 		 */
767 		first_cdesc = safexcel_add_cdesc(priv, ring,
768 						 1, 1, ctx->base.ctxr_dma,
769 						 1, 1, ctx->base.ctxr_dma,
770 						 &atoken);
771 		if (IS_ERR(first_cdesc)) {
772 			/* No space left in the command descriptor ring */
773 			ret = PTR_ERR(first_cdesc);
774 			goto cdesc_rollback;
775 		}
776 		n_cdesc = 1;
777 		goto skip_cdesc;
778 	}
779 
780 	/* command descriptors */
781 	for_each_sg(src, sg, sreq->nr_src, i) {
782 		int len = sg_dma_len(sg);
783 
784 		/* Do not overflow the request */
785 		if (queued < len)
786 			len = queued;
787 
788 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
789 					   !(queued - len),
790 					   sg_dma_address(sg), len, totlen,
791 					   ctx->base.ctxr_dma, &atoken);
792 		if (IS_ERR(cdesc)) {
793 			/* No space left in the command descriptor ring */
794 			ret = PTR_ERR(cdesc);
795 			goto cdesc_rollback;
796 		}
797 
798 		if (!n_cdesc)
799 			first_cdesc = cdesc;
800 
801 		n_cdesc++;
802 		queued -= len;
803 		if (!queued)
804 			break;
805 	}
806 skip_cdesc:
807 	/* Add context control words and token to first command descriptor */
808 	safexcel_context_control(ctx, base, sreq, first_cdesc);
809 	if (ctx->aead)
810 		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
811 				    sreq->direction, cryptlen,
812 				    assoclen, digestsize);
813 	else
814 		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
815 					cryptlen);
816 
817 	/* result descriptors */
818 	for_each_sg(dst, sg, sreq->nr_dst, i) {
819 		bool last = (i == sreq->nr_dst - 1);
820 		u32 len = sg_dma_len(sg);
821 
822 		/* only allow the part of the buffer we know we need */
823 		if (len > totlen_dst)
824 			len = totlen_dst;
825 		if (unlikely(!len))
826 			break;
827 		totlen_dst -= len;
828 
829 		/* skip over AAD space in buffer - not written */
830 		if (assoclen) {
831 			if (assoclen >= len) {
832 				assoclen -= len;
833 				continue;
834 			}
835 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
836 						   sg_dma_address(sg) +
837 						   assoclen,
838 						   len - assoclen);
839 			assoclen = 0;
840 		} else {
841 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
842 						   sg_dma_address(sg),
843 						   len);
844 		}
845 		if (IS_ERR(rdesc)) {
846 			/* No space left in the result descriptor ring */
847 			ret = PTR_ERR(rdesc);
848 			goto rdesc_rollback;
849 		}
850 		if (first) {
851 			first_rdesc = rdesc;
852 			first = false;
853 		}
854 		n_rdesc++;
855 	}
856 
857 	if (unlikely(first)) {
858 		/*
859 		 * Special case: AEAD decrypt with only AAD data.
860 		 * In this case there is NO output data from the engine,
861 		 * but the engine still needs a result descriptor!
862 		 * Create a dummy one just for catching the result token.
863 		 */
864 		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
865 		if (IS_ERR(rdesc)) {
866 			/* No space left in the result descriptor ring */
867 			ret = PTR_ERR(rdesc);
868 			goto rdesc_rollback;
869 		}
870 		first_rdesc = rdesc;
871 		n_rdesc = 1;
872 	}
873 
874 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
875 
876 	*commands = n_cdesc;
877 	*results = n_rdesc;
878 	return 0;
879 
880 rdesc_rollback:
881 	for (i = 0; i < n_rdesc; i++)
882 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
883 cdesc_rollback:
884 	for (i = 0; i < n_cdesc; i++)
885 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
886 
887 	if (src == dst) {
888 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
889 	} else {
890 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
891 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
892 	}
893 
894 	return ret;
895 }
896 
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)897 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
898 				      int ring,
899 				      struct crypto_async_request *base,
900 				      struct safexcel_cipher_req *sreq,
901 				      bool *should_complete, int *ret)
902 {
903 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
904 	struct safexcel_result_desc *rdesc;
905 	int ndesc = 0, enq_ret;
906 
907 	*ret = 0;
908 
909 	if (unlikely(!sreq->rdescs))
910 		return 0;
911 
912 	while (sreq->rdescs--) {
913 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
914 		if (IS_ERR(rdesc)) {
915 			dev_err(priv->dev,
916 				"cipher: invalidate: could not retrieve the result descriptor\n");
917 			*ret = PTR_ERR(rdesc);
918 			break;
919 		}
920 
921 		if (likely(!*ret))
922 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
923 
924 		ndesc++;
925 	}
926 
927 	safexcel_complete(priv, ring);
928 
929 	if (ctx->base.exit_inv) {
930 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
931 			      ctx->base.ctxr_dma);
932 
933 		*should_complete = true;
934 
935 		return ndesc;
936 	}
937 
938 	ring = safexcel_select_ring(priv);
939 	ctx->base.ring = ring;
940 
941 	spin_lock_bh(&priv->ring[ring].queue_lock);
942 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
943 	spin_unlock_bh(&priv->ring[ring].queue_lock);
944 
945 	if (enq_ret != -EINPROGRESS)
946 		*ret = enq_ret;
947 
948 	queue_work(priv->ring[ring].workqueue,
949 		   &priv->ring[ring].work_data.work);
950 
951 	*should_complete = false;
952 
953 	return ndesc;
954 }
955 
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)956 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
957 					   int ring,
958 					   struct crypto_async_request *async,
959 					   bool *should_complete, int *ret)
960 {
961 	struct skcipher_request *req = skcipher_request_cast(async);
962 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
963 	int err;
964 
965 	if (sreq->needs_inv) {
966 		sreq->needs_inv = false;
967 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
968 						 should_complete, ret);
969 	} else {
970 		err = safexcel_handle_req_result(priv, ring, async, req->src,
971 						 req->dst, req->cryptlen, sreq,
972 						 should_complete, ret);
973 	}
974 
975 	return err;
976 }
977 
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)978 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
979 				       int ring,
980 				       struct crypto_async_request *async,
981 				       bool *should_complete, int *ret)
982 {
983 	struct aead_request *req = aead_request_cast(async);
984 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
985 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
986 	int err;
987 
988 	if (sreq->needs_inv) {
989 		sreq->needs_inv = false;
990 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
991 						 should_complete, ret);
992 	} else {
993 		err = safexcel_handle_req_result(priv, ring, async, req->src,
994 						 req->dst,
995 						 req->cryptlen + crypto_aead_authsize(tfm),
996 						 sreq, should_complete, ret);
997 	}
998 
999 	return err;
1000 }
1001 
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1002 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1003 				    int ring, int *commands, int *results)
1004 {
1005 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1006 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1007 	int ret;
1008 
1009 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1010 	if (unlikely(ret))
1011 		return ret;
1012 
1013 	*commands = 1;
1014 	*results = 1;
1015 
1016 	return 0;
1017 }
1018 
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1019 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1020 				  int *commands, int *results)
1021 {
1022 	struct skcipher_request *req = skcipher_request_cast(async);
1023 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1024 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1025 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1026 	int ret;
1027 
1028 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1029 
1030 	if (sreq->needs_inv) {
1031 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1032 	} else {
1033 		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1034 		u8 input_iv[AES_BLOCK_SIZE];
1035 
1036 		/*
1037 		 * Save input IV in case of CBC decrypt mode
1038 		 * Will be overwritten with output IV prior to use!
1039 		 */
1040 		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1041 
1042 		ret = safexcel_send_req(async, ring, sreq, req->src,
1043 					req->dst, req->cryptlen, 0, 0, input_iv,
1044 					commands, results);
1045 	}
1046 
1047 	sreq->rdescs = *results;
1048 	return ret;
1049 }
1050 
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1051 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1052 			      int *commands, int *results)
1053 {
1054 	struct aead_request *req = aead_request_cast(async);
1055 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1056 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1057 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1058 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1059 	int ret;
1060 
1061 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1062 
1063 	if (sreq->needs_inv)
1064 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1065 	else
1066 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1067 					req->cryptlen, req->assoclen,
1068 					crypto_aead_authsize(tfm), req->iv,
1069 					commands, results);
1070 	sreq->rdescs = *results;
1071 	return ret;
1072 }
1073 
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct safexcel_inv_result * result)1074 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1075 				    struct crypto_async_request *base,
1076 				    struct safexcel_cipher_req *sreq,
1077 				    struct safexcel_inv_result *result)
1078 {
1079 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1080 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1081 	int ring = ctx->base.ring;
1082 
1083 	init_completion(&result->completion);
1084 
1085 	ctx = crypto_tfm_ctx(base->tfm);
1086 	ctx->base.exit_inv = true;
1087 	sreq->needs_inv = true;
1088 
1089 	spin_lock_bh(&priv->ring[ring].queue_lock);
1090 	crypto_enqueue_request(&priv->ring[ring].queue, base);
1091 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1092 
1093 	queue_work(priv->ring[ring].workqueue,
1094 		   &priv->ring[ring].work_data.work);
1095 
1096 	wait_for_completion(&result->completion);
1097 
1098 	if (result->error) {
1099 		dev_warn(priv->dev,
1100 			"cipher: sync: invalidate: completion error %d\n",
1101 			 result->error);
1102 		return result->error;
1103 	}
1104 
1105 	return 0;
1106 }
1107 
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1108 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1109 {
1110 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1111 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1112 	struct safexcel_inv_result result = {};
1113 
1114 	memset(req, 0, sizeof(struct skcipher_request));
1115 
1116 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1117 				      safexcel_inv_complete, &result);
1118 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1119 
1120 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1121 }
1122 
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1123 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1124 {
1125 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1126 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1127 	struct safexcel_inv_result result = {};
1128 
1129 	memset(req, 0, sizeof(struct aead_request));
1130 
1131 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1132 				  safexcel_inv_complete, &result);
1133 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1134 
1135 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1136 }
1137 
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1138 static int safexcel_queue_req(struct crypto_async_request *base,
1139 			struct safexcel_cipher_req *sreq,
1140 			enum safexcel_cipher_direction dir)
1141 {
1142 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1143 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1144 	int ret, ring;
1145 
1146 	sreq->needs_inv = false;
1147 	sreq->direction = dir;
1148 
1149 	if (ctx->base.ctxr) {
1150 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1151 			sreq->needs_inv = true;
1152 			ctx->base.needs_inv = false;
1153 		}
1154 	} else {
1155 		ctx->base.ring = safexcel_select_ring(priv);
1156 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1157 						 EIP197_GFP_FLAGS(*base),
1158 						 &ctx->base.ctxr_dma);
1159 		if (!ctx->base.ctxr)
1160 			return -ENOMEM;
1161 	}
1162 
1163 	ring = ctx->base.ring;
1164 
1165 	spin_lock_bh(&priv->ring[ring].queue_lock);
1166 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1167 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1168 
1169 	queue_work(priv->ring[ring].workqueue,
1170 		   &priv->ring[ring].work_data.work);
1171 
1172 	return ret;
1173 }
1174 
safexcel_encrypt(struct skcipher_request * req)1175 static int safexcel_encrypt(struct skcipher_request *req)
1176 {
1177 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1178 			SAFEXCEL_ENCRYPT);
1179 }
1180 
safexcel_decrypt(struct skcipher_request * req)1181 static int safexcel_decrypt(struct skcipher_request *req)
1182 {
1183 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1184 			SAFEXCEL_DECRYPT);
1185 }
1186 
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1187 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1188 {
1189 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1190 	struct safexcel_alg_template *tmpl =
1191 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1192 			     alg.skcipher.base);
1193 
1194 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1195 				    sizeof(struct safexcel_cipher_req));
1196 
1197 	ctx->base.priv = tmpl->priv;
1198 
1199 	ctx->base.send = safexcel_skcipher_send;
1200 	ctx->base.handle_result = safexcel_skcipher_handle_result;
1201 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1202 	ctx->ctrinit = 1;
1203 	return 0;
1204 }
1205 
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1206 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1207 {
1208 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 
1210 	memzero_explicit(ctx->key, sizeof(ctx->key));
1211 
1212 	/* context not allocated, skip invalidation */
1213 	if (!ctx->base.ctxr)
1214 		return -ENOMEM;
1215 
1216 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1217 	return 0;
1218 }
1219 
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1220 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1221 {
1222 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1223 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1224 	int ret;
1225 
1226 	if (safexcel_cipher_cra_exit(tfm))
1227 		return;
1228 
1229 	if (priv->flags & EIP197_TRC_CACHE) {
1230 		ret = safexcel_skcipher_exit_inv(tfm);
1231 		if (ret)
1232 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1233 				 ret);
1234 	} else {
1235 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1236 			      ctx->base.ctxr_dma);
1237 	}
1238 }
1239 
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1240 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1241 {
1242 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1244 	int ret;
1245 
1246 	if (safexcel_cipher_cra_exit(tfm))
1247 		return;
1248 
1249 	if (priv->flags & EIP197_TRC_CACHE) {
1250 		ret = safexcel_aead_exit_inv(tfm);
1251 		if (ret)
1252 			dev_warn(priv->dev, "aead: invalidation error %d\n",
1253 				 ret);
1254 	} else {
1255 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1256 			      ctx->base.ctxr_dma);
1257 	}
1258 }
1259 
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1260 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1261 {
1262 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1263 
1264 	safexcel_skcipher_cra_init(tfm);
1265 	ctx->alg  = SAFEXCEL_AES;
1266 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1267 	ctx->blocksz = 0;
1268 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1269 	return 0;
1270 }
1271 
1272 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1273 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1274 	.algo_mask = SAFEXCEL_ALG_AES,
1275 	.alg.skcipher = {
1276 		.setkey = safexcel_skcipher_aes_setkey,
1277 		.encrypt = safexcel_encrypt,
1278 		.decrypt = safexcel_decrypt,
1279 		.min_keysize = AES_MIN_KEY_SIZE,
1280 		.max_keysize = AES_MAX_KEY_SIZE,
1281 		.base = {
1282 			.cra_name = "ecb(aes)",
1283 			.cra_driver_name = "safexcel-ecb-aes",
1284 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1285 			.cra_flags = CRYPTO_ALG_ASYNC |
1286 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1287 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1288 			.cra_blocksize = AES_BLOCK_SIZE,
1289 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1290 			.cra_alignmask = 0,
1291 			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1292 			.cra_exit = safexcel_skcipher_cra_exit,
1293 			.cra_module = THIS_MODULE,
1294 		},
1295 	},
1296 };
1297 
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1298 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1299 {
1300 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1301 
1302 	safexcel_skcipher_cra_init(tfm);
1303 	ctx->alg  = SAFEXCEL_AES;
1304 	ctx->blocksz = AES_BLOCK_SIZE;
1305 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1306 	return 0;
1307 }
1308 
1309 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1310 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1311 	.algo_mask = SAFEXCEL_ALG_AES,
1312 	.alg.skcipher = {
1313 		.setkey = safexcel_skcipher_aes_setkey,
1314 		.encrypt = safexcel_encrypt,
1315 		.decrypt = safexcel_decrypt,
1316 		.min_keysize = AES_MIN_KEY_SIZE,
1317 		.max_keysize = AES_MAX_KEY_SIZE,
1318 		.ivsize = AES_BLOCK_SIZE,
1319 		.base = {
1320 			.cra_name = "cbc(aes)",
1321 			.cra_driver_name = "safexcel-cbc-aes",
1322 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1323 			.cra_flags = CRYPTO_ALG_ASYNC |
1324 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1325 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1326 			.cra_blocksize = AES_BLOCK_SIZE,
1327 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1328 			.cra_alignmask = 0,
1329 			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1330 			.cra_exit = safexcel_skcipher_cra_exit,
1331 			.cra_module = THIS_MODULE,
1332 		},
1333 	},
1334 };
1335 
safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm * tfm)1336 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1337 {
1338 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1339 
1340 	safexcel_skcipher_cra_init(tfm);
1341 	ctx->alg  = SAFEXCEL_AES;
1342 	ctx->blocksz = AES_BLOCK_SIZE;
1343 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1344 	return 0;
1345 }
1346 
1347 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1348 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1349 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1350 	.alg.skcipher = {
1351 		.setkey = safexcel_skcipher_aes_setkey,
1352 		.encrypt = safexcel_encrypt,
1353 		.decrypt = safexcel_decrypt,
1354 		.min_keysize = AES_MIN_KEY_SIZE,
1355 		.max_keysize = AES_MAX_KEY_SIZE,
1356 		.ivsize = AES_BLOCK_SIZE,
1357 		.base = {
1358 			.cra_name = "cfb(aes)",
1359 			.cra_driver_name = "safexcel-cfb-aes",
1360 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1361 			.cra_flags = CRYPTO_ALG_ASYNC |
1362 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1363 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1364 			.cra_blocksize = 1,
1365 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1366 			.cra_alignmask = 0,
1367 			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1368 			.cra_exit = safexcel_skcipher_cra_exit,
1369 			.cra_module = THIS_MODULE,
1370 		},
1371 	},
1372 };
1373 
safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm * tfm)1374 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1375 {
1376 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1377 
1378 	safexcel_skcipher_cra_init(tfm);
1379 	ctx->alg  = SAFEXCEL_AES;
1380 	ctx->blocksz = AES_BLOCK_SIZE;
1381 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1382 	return 0;
1383 }
1384 
1385 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1386 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1387 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1388 	.alg.skcipher = {
1389 		.setkey = safexcel_skcipher_aes_setkey,
1390 		.encrypt = safexcel_encrypt,
1391 		.decrypt = safexcel_decrypt,
1392 		.min_keysize = AES_MIN_KEY_SIZE,
1393 		.max_keysize = AES_MAX_KEY_SIZE,
1394 		.ivsize = AES_BLOCK_SIZE,
1395 		.base = {
1396 			.cra_name = "ofb(aes)",
1397 			.cra_driver_name = "safexcel-ofb-aes",
1398 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1399 			.cra_flags = CRYPTO_ALG_ASYNC |
1400 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1401 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1402 			.cra_blocksize = 1,
1403 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1404 			.cra_alignmask = 0,
1405 			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1406 			.cra_exit = safexcel_skcipher_cra_exit,
1407 			.cra_module = THIS_MODULE,
1408 		},
1409 	},
1410 };
1411 
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1412 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1413 					   const u8 *key, unsigned int len)
1414 {
1415 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1416 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1417 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1418 	struct crypto_aes_ctx aes;
1419 	int ret, i;
1420 	unsigned int keylen;
1421 
1422 	/* last 4 bytes of key are the nonce! */
1423 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1424 	/* exclude the nonce here */
1425 	keylen = len - CTR_RFC3686_NONCE_SIZE;
1426 	ret = aes_expandkey(&aes, key, keylen);
1427 	if (ret)
1428 		return ret;
1429 
1430 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1431 		for (i = 0; i < keylen / sizeof(u32); i++) {
1432 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1433 				ctx->base.needs_inv = true;
1434 				break;
1435 			}
1436 		}
1437 	}
1438 
1439 	for (i = 0; i < keylen / sizeof(u32); i++)
1440 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1441 
1442 	ctx->key_len = keylen;
1443 
1444 	memzero_explicit(&aes, sizeof(aes));
1445 	return 0;
1446 }
1447 
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1448 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1449 {
1450 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1451 
1452 	safexcel_skcipher_cra_init(tfm);
1453 	ctx->alg  = SAFEXCEL_AES;
1454 	ctx->blocksz = AES_BLOCK_SIZE;
1455 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1456 	return 0;
1457 }
1458 
1459 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1460 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1461 	.algo_mask = SAFEXCEL_ALG_AES,
1462 	.alg.skcipher = {
1463 		.setkey = safexcel_skcipher_aesctr_setkey,
1464 		.encrypt = safexcel_encrypt,
1465 		.decrypt = safexcel_decrypt,
1466 		/* Add nonce size */
1467 		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468 		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1469 		.ivsize = CTR_RFC3686_IV_SIZE,
1470 		.base = {
1471 			.cra_name = "rfc3686(ctr(aes))",
1472 			.cra_driver_name = "safexcel-ctr-aes",
1473 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1474 			.cra_flags = CRYPTO_ALG_ASYNC |
1475 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1476 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1477 			.cra_blocksize = 1,
1478 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1479 			.cra_alignmask = 0,
1480 			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1481 			.cra_exit = safexcel_skcipher_cra_exit,
1482 			.cra_module = THIS_MODULE,
1483 		},
1484 	},
1485 };
1486 
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1487 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1488 			       unsigned int len)
1489 {
1490 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1491 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1492 	int ret;
1493 
1494 	ret = verify_skcipher_des_key(ctfm, key);
1495 	if (ret)
1496 		return ret;
1497 
1498 	/* if context exits and key changed, need to invalidate it */
1499 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1500 		if (memcmp(ctx->key, key, len))
1501 			ctx->base.needs_inv = true;
1502 
1503 	memcpy(ctx->key, key, len);
1504 	ctx->key_len = len;
1505 
1506 	return 0;
1507 }
1508 
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1509 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1510 {
1511 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1512 
1513 	safexcel_skcipher_cra_init(tfm);
1514 	ctx->alg  = SAFEXCEL_DES;
1515 	ctx->blocksz = DES_BLOCK_SIZE;
1516 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1517 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1518 	return 0;
1519 }
1520 
1521 struct safexcel_alg_template safexcel_alg_cbc_des = {
1522 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1523 	.algo_mask = SAFEXCEL_ALG_DES,
1524 	.alg.skcipher = {
1525 		.setkey = safexcel_des_setkey,
1526 		.encrypt = safexcel_encrypt,
1527 		.decrypt = safexcel_decrypt,
1528 		.min_keysize = DES_KEY_SIZE,
1529 		.max_keysize = DES_KEY_SIZE,
1530 		.ivsize = DES_BLOCK_SIZE,
1531 		.base = {
1532 			.cra_name = "cbc(des)",
1533 			.cra_driver_name = "safexcel-cbc-des",
1534 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1535 			.cra_flags = CRYPTO_ALG_ASYNC |
1536 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1537 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1538 			.cra_blocksize = DES_BLOCK_SIZE,
1539 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1540 			.cra_alignmask = 0,
1541 			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1542 			.cra_exit = safexcel_skcipher_cra_exit,
1543 			.cra_module = THIS_MODULE,
1544 		},
1545 	},
1546 };
1547 
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1548 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1549 {
1550 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1551 
1552 	safexcel_skcipher_cra_init(tfm);
1553 	ctx->alg  = SAFEXCEL_DES;
1554 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1555 	ctx->blocksz = 0;
1556 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1557 	return 0;
1558 }
1559 
1560 struct safexcel_alg_template safexcel_alg_ecb_des = {
1561 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1562 	.algo_mask = SAFEXCEL_ALG_DES,
1563 	.alg.skcipher = {
1564 		.setkey = safexcel_des_setkey,
1565 		.encrypt = safexcel_encrypt,
1566 		.decrypt = safexcel_decrypt,
1567 		.min_keysize = DES_KEY_SIZE,
1568 		.max_keysize = DES_KEY_SIZE,
1569 		.base = {
1570 			.cra_name = "ecb(des)",
1571 			.cra_driver_name = "safexcel-ecb-des",
1572 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1573 			.cra_flags = CRYPTO_ALG_ASYNC |
1574 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1575 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1576 			.cra_blocksize = DES_BLOCK_SIZE,
1577 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1578 			.cra_alignmask = 0,
1579 			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1580 			.cra_exit = safexcel_skcipher_cra_exit,
1581 			.cra_module = THIS_MODULE,
1582 		},
1583 	},
1584 };
1585 
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1586 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1587 				   const u8 *key, unsigned int len)
1588 {
1589 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1590 	struct safexcel_crypto_priv *priv = ctx->base.priv;
1591 	int err;
1592 
1593 	err = verify_skcipher_des3_key(ctfm, key);
1594 	if (err)
1595 		return err;
1596 
1597 	/* if context exits and key changed, need to invalidate it */
1598 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1599 		if (memcmp(ctx->key, key, len))
1600 			ctx->base.needs_inv = true;
1601 
1602 	memcpy(ctx->key, key, len);
1603 	ctx->key_len = len;
1604 
1605 	return 0;
1606 }
1607 
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1608 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1609 {
1610 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1611 
1612 	safexcel_skcipher_cra_init(tfm);
1613 	ctx->alg  = SAFEXCEL_3DES;
1614 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1615 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1616 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1617 	return 0;
1618 }
1619 
1620 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1621 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1622 	.algo_mask = SAFEXCEL_ALG_DES,
1623 	.alg.skcipher = {
1624 		.setkey = safexcel_des3_ede_setkey,
1625 		.encrypt = safexcel_encrypt,
1626 		.decrypt = safexcel_decrypt,
1627 		.min_keysize = DES3_EDE_KEY_SIZE,
1628 		.max_keysize = DES3_EDE_KEY_SIZE,
1629 		.ivsize = DES3_EDE_BLOCK_SIZE,
1630 		.base = {
1631 			.cra_name = "cbc(des3_ede)",
1632 			.cra_driver_name = "safexcel-cbc-des3_ede",
1633 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1634 			.cra_flags = CRYPTO_ALG_ASYNC |
1635 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1636 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1637 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1638 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1639 			.cra_alignmask = 0,
1640 			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1641 			.cra_exit = safexcel_skcipher_cra_exit,
1642 			.cra_module = THIS_MODULE,
1643 		},
1644 	},
1645 };
1646 
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1647 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1648 {
1649 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1650 
1651 	safexcel_skcipher_cra_init(tfm);
1652 	ctx->alg  = SAFEXCEL_3DES;
1653 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1654 	ctx->blocksz = 0;
1655 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1656 	return 0;
1657 }
1658 
1659 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1660 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1661 	.algo_mask = SAFEXCEL_ALG_DES,
1662 	.alg.skcipher = {
1663 		.setkey = safexcel_des3_ede_setkey,
1664 		.encrypt = safexcel_encrypt,
1665 		.decrypt = safexcel_decrypt,
1666 		.min_keysize = DES3_EDE_KEY_SIZE,
1667 		.max_keysize = DES3_EDE_KEY_SIZE,
1668 		.base = {
1669 			.cra_name = "ecb(des3_ede)",
1670 			.cra_driver_name = "safexcel-ecb-des3_ede",
1671 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1672 			.cra_flags = CRYPTO_ALG_ASYNC |
1673 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1674 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1675 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1676 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1677 			.cra_alignmask = 0,
1678 			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1679 			.cra_exit = safexcel_skcipher_cra_exit,
1680 			.cra_module = THIS_MODULE,
1681 		},
1682 	},
1683 };
1684 
safexcel_aead_encrypt(struct aead_request * req)1685 static int safexcel_aead_encrypt(struct aead_request *req)
1686 {
1687 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1688 
1689 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1690 }
1691 
safexcel_aead_decrypt(struct aead_request * req)1692 static int safexcel_aead_decrypt(struct aead_request *req)
1693 {
1694 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1695 
1696 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1697 }
1698 
safexcel_aead_cra_init(struct crypto_tfm * tfm)1699 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1700 {
1701 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1702 	struct safexcel_alg_template *tmpl =
1703 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1704 			     alg.aead.base);
1705 
1706 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1707 				sizeof(struct safexcel_cipher_req));
1708 
1709 	ctx->base.priv = tmpl->priv;
1710 
1711 	ctx->alg  = SAFEXCEL_AES; /* default */
1712 	ctx->blocksz = AES_BLOCK_SIZE;
1713 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1714 	ctx->ctrinit = 1;
1715 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1716 	ctx->aead = true;
1717 	ctx->base.send = safexcel_aead_send;
1718 	ctx->base.handle_result = safexcel_aead_handle_result;
1719 	return 0;
1720 }
1721 
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1722 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1723 {
1724 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1725 
1726 	safexcel_aead_cra_init(tfm);
1727 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1728 	ctx->state_sz = SHA1_DIGEST_SIZE;
1729 	return 0;
1730 }
1731 
1732 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1733 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1734 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1735 	.alg.aead = {
1736 		.setkey = safexcel_aead_setkey,
1737 		.encrypt = safexcel_aead_encrypt,
1738 		.decrypt = safexcel_aead_decrypt,
1739 		.ivsize = AES_BLOCK_SIZE,
1740 		.maxauthsize = SHA1_DIGEST_SIZE,
1741 		.base = {
1742 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1743 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1744 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1745 			.cra_flags = CRYPTO_ALG_ASYNC |
1746 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1747 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1748 			.cra_blocksize = AES_BLOCK_SIZE,
1749 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1750 			.cra_alignmask = 0,
1751 			.cra_init = safexcel_aead_sha1_cra_init,
1752 			.cra_exit = safexcel_aead_cra_exit,
1753 			.cra_module = THIS_MODULE,
1754 		},
1755 	},
1756 };
1757 
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1758 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1759 {
1760 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1761 
1762 	safexcel_aead_cra_init(tfm);
1763 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1764 	ctx->state_sz = SHA256_DIGEST_SIZE;
1765 	return 0;
1766 }
1767 
1768 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1769 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1770 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1771 	.alg.aead = {
1772 		.setkey = safexcel_aead_setkey,
1773 		.encrypt = safexcel_aead_encrypt,
1774 		.decrypt = safexcel_aead_decrypt,
1775 		.ivsize = AES_BLOCK_SIZE,
1776 		.maxauthsize = SHA256_DIGEST_SIZE,
1777 		.base = {
1778 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1779 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1780 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1781 			.cra_flags = CRYPTO_ALG_ASYNC |
1782 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1783 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1784 			.cra_blocksize = AES_BLOCK_SIZE,
1785 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1786 			.cra_alignmask = 0,
1787 			.cra_init = safexcel_aead_sha256_cra_init,
1788 			.cra_exit = safexcel_aead_cra_exit,
1789 			.cra_module = THIS_MODULE,
1790 		},
1791 	},
1792 };
1793 
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1794 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1795 {
1796 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1797 
1798 	safexcel_aead_cra_init(tfm);
1799 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1800 	ctx->state_sz = SHA256_DIGEST_SIZE;
1801 	return 0;
1802 }
1803 
1804 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1805 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1806 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1807 	.alg.aead = {
1808 		.setkey = safexcel_aead_setkey,
1809 		.encrypt = safexcel_aead_encrypt,
1810 		.decrypt = safexcel_aead_decrypt,
1811 		.ivsize = AES_BLOCK_SIZE,
1812 		.maxauthsize = SHA224_DIGEST_SIZE,
1813 		.base = {
1814 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1815 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1816 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1817 			.cra_flags = CRYPTO_ALG_ASYNC |
1818 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1819 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1820 			.cra_blocksize = AES_BLOCK_SIZE,
1821 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1822 			.cra_alignmask = 0,
1823 			.cra_init = safexcel_aead_sha224_cra_init,
1824 			.cra_exit = safexcel_aead_cra_exit,
1825 			.cra_module = THIS_MODULE,
1826 		},
1827 	},
1828 };
1829 
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1830 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1831 {
1832 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1833 
1834 	safexcel_aead_cra_init(tfm);
1835 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1836 	ctx->state_sz = SHA512_DIGEST_SIZE;
1837 	return 0;
1838 }
1839 
1840 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1841 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1842 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1843 	.alg.aead = {
1844 		.setkey = safexcel_aead_setkey,
1845 		.encrypt = safexcel_aead_encrypt,
1846 		.decrypt = safexcel_aead_decrypt,
1847 		.ivsize = AES_BLOCK_SIZE,
1848 		.maxauthsize = SHA512_DIGEST_SIZE,
1849 		.base = {
1850 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1851 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1852 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1853 			.cra_flags = CRYPTO_ALG_ASYNC |
1854 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1855 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1856 			.cra_blocksize = AES_BLOCK_SIZE,
1857 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1858 			.cra_alignmask = 0,
1859 			.cra_init = safexcel_aead_sha512_cra_init,
1860 			.cra_exit = safexcel_aead_cra_exit,
1861 			.cra_module = THIS_MODULE,
1862 		},
1863 	},
1864 };
1865 
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1866 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1867 {
1868 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1869 
1870 	safexcel_aead_cra_init(tfm);
1871 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1872 	ctx->state_sz = SHA512_DIGEST_SIZE;
1873 	return 0;
1874 }
1875 
1876 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1877 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1878 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1879 	.alg.aead = {
1880 		.setkey = safexcel_aead_setkey,
1881 		.encrypt = safexcel_aead_encrypt,
1882 		.decrypt = safexcel_aead_decrypt,
1883 		.ivsize = AES_BLOCK_SIZE,
1884 		.maxauthsize = SHA384_DIGEST_SIZE,
1885 		.base = {
1886 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1887 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1888 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1889 			.cra_flags = CRYPTO_ALG_ASYNC |
1890 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1891 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1892 			.cra_blocksize = AES_BLOCK_SIZE,
1893 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1894 			.cra_alignmask = 0,
1895 			.cra_init = safexcel_aead_sha384_cra_init,
1896 			.cra_exit = safexcel_aead_cra_exit,
1897 			.cra_module = THIS_MODULE,
1898 		},
1899 	},
1900 };
1901 
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1902 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1903 {
1904 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1905 
1906 	safexcel_aead_sha1_cra_init(tfm);
1907 	ctx->alg = SAFEXCEL_3DES; /* override default */
1908 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1909 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1910 	return 0;
1911 }
1912 
1913 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1914 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1915 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1916 	.alg.aead = {
1917 		.setkey = safexcel_aead_setkey,
1918 		.encrypt = safexcel_aead_encrypt,
1919 		.decrypt = safexcel_aead_decrypt,
1920 		.ivsize = DES3_EDE_BLOCK_SIZE,
1921 		.maxauthsize = SHA1_DIGEST_SIZE,
1922 		.base = {
1923 			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1924 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1925 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1926 			.cra_flags = CRYPTO_ALG_ASYNC |
1927 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1928 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1929 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1931 			.cra_alignmask = 0,
1932 			.cra_init = safexcel_aead_sha1_des3_cra_init,
1933 			.cra_exit = safexcel_aead_cra_exit,
1934 			.cra_module = THIS_MODULE,
1935 		},
1936 	},
1937 };
1938 
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1939 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1940 {
1941 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1942 
1943 	safexcel_aead_sha256_cra_init(tfm);
1944 	ctx->alg = SAFEXCEL_3DES; /* override default */
1945 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1946 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1947 	return 0;
1948 }
1949 
1950 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1951 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1952 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1953 	.alg.aead = {
1954 		.setkey = safexcel_aead_setkey,
1955 		.encrypt = safexcel_aead_encrypt,
1956 		.decrypt = safexcel_aead_decrypt,
1957 		.ivsize = DES3_EDE_BLOCK_SIZE,
1958 		.maxauthsize = SHA256_DIGEST_SIZE,
1959 		.base = {
1960 			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1961 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1962 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1963 			.cra_flags = CRYPTO_ALG_ASYNC |
1964 				     CRYPTO_ALG_ALLOCATES_MEMORY |
1965 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1966 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1967 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1968 			.cra_alignmask = 0,
1969 			.cra_init = safexcel_aead_sha256_des3_cra_init,
1970 			.cra_exit = safexcel_aead_cra_exit,
1971 			.cra_module = THIS_MODULE,
1972 		},
1973 	},
1974 };
1975 
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1976 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1977 {
1978 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1979 
1980 	safexcel_aead_sha224_cra_init(tfm);
1981 	ctx->alg = SAFEXCEL_3DES; /* override default */
1982 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1983 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1984 	return 0;
1985 }
1986 
1987 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1988 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1989 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1990 	.alg.aead = {
1991 		.setkey = safexcel_aead_setkey,
1992 		.encrypt = safexcel_aead_encrypt,
1993 		.decrypt = safexcel_aead_decrypt,
1994 		.ivsize = DES3_EDE_BLOCK_SIZE,
1995 		.maxauthsize = SHA224_DIGEST_SIZE,
1996 		.base = {
1997 			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1998 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1999 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2000 			.cra_flags = CRYPTO_ALG_ASYNC |
2001 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2002 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2003 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2005 			.cra_alignmask = 0,
2006 			.cra_init = safexcel_aead_sha224_des3_cra_init,
2007 			.cra_exit = safexcel_aead_cra_exit,
2008 			.cra_module = THIS_MODULE,
2009 		},
2010 	},
2011 };
2012 
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)2013 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2014 {
2015 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2016 
2017 	safexcel_aead_sha512_cra_init(tfm);
2018 	ctx->alg = SAFEXCEL_3DES; /* override default */
2019 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2020 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2021 	return 0;
2022 }
2023 
2024 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2025 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2026 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2027 	.alg.aead = {
2028 		.setkey = safexcel_aead_setkey,
2029 		.encrypt = safexcel_aead_encrypt,
2030 		.decrypt = safexcel_aead_decrypt,
2031 		.ivsize = DES3_EDE_BLOCK_SIZE,
2032 		.maxauthsize = SHA512_DIGEST_SIZE,
2033 		.base = {
2034 			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2035 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2037 			.cra_flags = CRYPTO_ALG_ASYNC |
2038 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2039 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2040 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2041 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2042 			.cra_alignmask = 0,
2043 			.cra_init = safexcel_aead_sha512_des3_cra_init,
2044 			.cra_exit = safexcel_aead_cra_exit,
2045 			.cra_module = THIS_MODULE,
2046 		},
2047 	},
2048 };
2049 
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)2050 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2051 {
2052 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2053 
2054 	safexcel_aead_sha384_cra_init(tfm);
2055 	ctx->alg = SAFEXCEL_3DES; /* override default */
2056 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2057 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2058 	return 0;
2059 }
2060 
2061 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2062 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2063 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2064 	.alg.aead = {
2065 		.setkey = safexcel_aead_setkey,
2066 		.encrypt = safexcel_aead_encrypt,
2067 		.decrypt = safexcel_aead_decrypt,
2068 		.ivsize = DES3_EDE_BLOCK_SIZE,
2069 		.maxauthsize = SHA384_DIGEST_SIZE,
2070 		.base = {
2071 			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2072 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2073 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2074 			.cra_flags = CRYPTO_ALG_ASYNC |
2075 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2076 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2077 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2078 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2079 			.cra_alignmask = 0,
2080 			.cra_init = safexcel_aead_sha384_des3_cra_init,
2081 			.cra_exit = safexcel_aead_cra_exit,
2082 			.cra_module = THIS_MODULE,
2083 		},
2084 	},
2085 };
2086 
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2087 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2088 {
2089 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2090 
2091 	safexcel_aead_sha1_cra_init(tfm);
2092 	ctx->alg = SAFEXCEL_DES; /* override default */
2093 	ctx->blocksz = DES_BLOCK_SIZE;
2094 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2095 	return 0;
2096 }
2097 
2098 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2099 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2100 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2101 	.alg.aead = {
2102 		.setkey = safexcel_aead_setkey,
2103 		.encrypt = safexcel_aead_encrypt,
2104 		.decrypt = safexcel_aead_decrypt,
2105 		.ivsize = DES_BLOCK_SIZE,
2106 		.maxauthsize = SHA1_DIGEST_SIZE,
2107 		.base = {
2108 			.cra_name = "authenc(hmac(sha1),cbc(des))",
2109 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2110 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2111 			.cra_flags = CRYPTO_ALG_ASYNC |
2112 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2113 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2114 			.cra_blocksize = DES_BLOCK_SIZE,
2115 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2116 			.cra_alignmask = 0,
2117 			.cra_init = safexcel_aead_sha1_des_cra_init,
2118 			.cra_exit = safexcel_aead_cra_exit,
2119 			.cra_module = THIS_MODULE,
2120 		},
2121 	},
2122 };
2123 
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2124 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2125 {
2126 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2127 
2128 	safexcel_aead_sha256_cra_init(tfm);
2129 	ctx->alg = SAFEXCEL_DES; /* override default */
2130 	ctx->blocksz = DES_BLOCK_SIZE;
2131 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2132 	return 0;
2133 }
2134 
2135 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2136 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2137 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2138 	.alg.aead = {
2139 		.setkey = safexcel_aead_setkey,
2140 		.encrypt = safexcel_aead_encrypt,
2141 		.decrypt = safexcel_aead_decrypt,
2142 		.ivsize = DES_BLOCK_SIZE,
2143 		.maxauthsize = SHA256_DIGEST_SIZE,
2144 		.base = {
2145 			.cra_name = "authenc(hmac(sha256),cbc(des))",
2146 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2147 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2148 			.cra_flags = CRYPTO_ALG_ASYNC |
2149 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2150 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2151 			.cra_blocksize = DES_BLOCK_SIZE,
2152 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2153 			.cra_alignmask = 0,
2154 			.cra_init = safexcel_aead_sha256_des_cra_init,
2155 			.cra_exit = safexcel_aead_cra_exit,
2156 			.cra_module = THIS_MODULE,
2157 		},
2158 	},
2159 };
2160 
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2161 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2162 {
2163 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2164 
2165 	safexcel_aead_sha224_cra_init(tfm);
2166 	ctx->alg = SAFEXCEL_DES; /* override default */
2167 	ctx->blocksz = DES_BLOCK_SIZE;
2168 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2169 	return 0;
2170 }
2171 
2172 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2173 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2174 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2175 	.alg.aead = {
2176 		.setkey = safexcel_aead_setkey,
2177 		.encrypt = safexcel_aead_encrypt,
2178 		.decrypt = safexcel_aead_decrypt,
2179 		.ivsize = DES_BLOCK_SIZE,
2180 		.maxauthsize = SHA224_DIGEST_SIZE,
2181 		.base = {
2182 			.cra_name = "authenc(hmac(sha224),cbc(des))",
2183 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2184 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2185 			.cra_flags = CRYPTO_ALG_ASYNC |
2186 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2187 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2188 			.cra_blocksize = DES_BLOCK_SIZE,
2189 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2190 			.cra_alignmask = 0,
2191 			.cra_init = safexcel_aead_sha224_des_cra_init,
2192 			.cra_exit = safexcel_aead_cra_exit,
2193 			.cra_module = THIS_MODULE,
2194 		},
2195 	},
2196 };
2197 
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2198 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2199 {
2200 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2201 
2202 	safexcel_aead_sha512_cra_init(tfm);
2203 	ctx->alg = SAFEXCEL_DES; /* override default */
2204 	ctx->blocksz = DES_BLOCK_SIZE;
2205 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2206 	return 0;
2207 }
2208 
2209 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2210 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2211 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2212 	.alg.aead = {
2213 		.setkey = safexcel_aead_setkey,
2214 		.encrypt = safexcel_aead_encrypt,
2215 		.decrypt = safexcel_aead_decrypt,
2216 		.ivsize = DES_BLOCK_SIZE,
2217 		.maxauthsize = SHA512_DIGEST_SIZE,
2218 		.base = {
2219 			.cra_name = "authenc(hmac(sha512),cbc(des))",
2220 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2221 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2222 			.cra_flags = CRYPTO_ALG_ASYNC |
2223 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2224 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2225 			.cra_blocksize = DES_BLOCK_SIZE,
2226 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2227 			.cra_alignmask = 0,
2228 			.cra_init = safexcel_aead_sha512_des_cra_init,
2229 			.cra_exit = safexcel_aead_cra_exit,
2230 			.cra_module = THIS_MODULE,
2231 		},
2232 	},
2233 };
2234 
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2235 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2236 {
2237 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2238 
2239 	safexcel_aead_sha384_cra_init(tfm);
2240 	ctx->alg = SAFEXCEL_DES; /* override default */
2241 	ctx->blocksz = DES_BLOCK_SIZE;
2242 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2243 	return 0;
2244 }
2245 
2246 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2247 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2248 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2249 	.alg.aead = {
2250 		.setkey = safexcel_aead_setkey,
2251 		.encrypt = safexcel_aead_encrypt,
2252 		.decrypt = safexcel_aead_decrypt,
2253 		.ivsize = DES_BLOCK_SIZE,
2254 		.maxauthsize = SHA384_DIGEST_SIZE,
2255 		.base = {
2256 			.cra_name = "authenc(hmac(sha384),cbc(des))",
2257 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2258 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2259 			.cra_flags = CRYPTO_ALG_ASYNC |
2260 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2261 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2262 			.cra_blocksize = DES_BLOCK_SIZE,
2263 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2264 			.cra_alignmask = 0,
2265 			.cra_init = safexcel_aead_sha384_des_cra_init,
2266 			.cra_exit = safexcel_aead_cra_exit,
2267 			.cra_module = THIS_MODULE,
2268 		},
2269 	},
2270 };
2271 
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2272 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2273 {
2274 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2275 
2276 	safexcel_aead_sha1_cra_init(tfm);
2277 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2278 	return 0;
2279 }
2280 
2281 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2282 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2283 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2284 	.alg.aead = {
2285 		.setkey = safexcel_aead_setkey,
2286 		.encrypt = safexcel_aead_encrypt,
2287 		.decrypt = safexcel_aead_decrypt,
2288 		.ivsize = CTR_RFC3686_IV_SIZE,
2289 		.maxauthsize = SHA1_DIGEST_SIZE,
2290 		.base = {
2291 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2292 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2293 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2294 			.cra_flags = CRYPTO_ALG_ASYNC |
2295 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2296 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2297 			.cra_blocksize = 1,
2298 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2299 			.cra_alignmask = 0,
2300 			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2301 			.cra_exit = safexcel_aead_cra_exit,
2302 			.cra_module = THIS_MODULE,
2303 		},
2304 	},
2305 };
2306 
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2307 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2308 {
2309 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2310 
2311 	safexcel_aead_sha256_cra_init(tfm);
2312 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2313 	return 0;
2314 }
2315 
2316 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2317 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2318 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2319 	.alg.aead = {
2320 		.setkey = safexcel_aead_setkey,
2321 		.encrypt = safexcel_aead_encrypt,
2322 		.decrypt = safexcel_aead_decrypt,
2323 		.ivsize = CTR_RFC3686_IV_SIZE,
2324 		.maxauthsize = SHA256_DIGEST_SIZE,
2325 		.base = {
2326 			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2327 			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2328 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2329 			.cra_flags = CRYPTO_ALG_ASYNC |
2330 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2331 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2332 			.cra_blocksize = 1,
2333 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2334 			.cra_alignmask = 0,
2335 			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2336 			.cra_exit = safexcel_aead_cra_exit,
2337 			.cra_module = THIS_MODULE,
2338 		},
2339 	},
2340 };
2341 
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2342 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2343 {
2344 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2345 
2346 	safexcel_aead_sha224_cra_init(tfm);
2347 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2348 	return 0;
2349 }
2350 
2351 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2352 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2353 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2354 	.alg.aead = {
2355 		.setkey = safexcel_aead_setkey,
2356 		.encrypt = safexcel_aead_encrypt,
2357 		.decrypt = safexcel_aead_decrypt,
2358 		.ivsize = CTR_RFC3686_IV_SIZE,
2359 		.maxauthsize = SHA224_DIGEST_SIZE,
2360 		.base = {
2361 			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2362 			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2363 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2364 			.cra_flags = CRYPTO_ALG_ASYNC |
2365 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2366 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2367 			.cra_blocksize = 1,
2368 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2369 			.cra_alignmask = 0,
2370 			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2371 			.cra_exit = safexcel_aead_cra_exit,
2372 			.cra_module = THIS_MODULE,
2373 		},
2374 	},
2375 };
2376 
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2377 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2378 {
2379 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2380 
2381 	safexcel_aead_sha512_cra_init(tfm);
2382 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2383 	return 0;
2384 }
2385 
2386 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2387 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2388 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2389 	.alg.aead = {
2390 		.setkey = safexcel_aead_setkey,
2391 		.encrypt = safexcel_aead_encrypt,
2392 		.decrypt = safexcel_aead_decrypt,
2393 		.ivsize = CTR_RFC3686_IV_SIZE,
2394 		.maxauthsize = SHA512_DIGEST_SIZE,
2395 		.base = {
2396 			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2397 			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2398 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2399 			.cra_flags = CRYPTO_ALG_ASYNC |
2400 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2401 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2402 			.cra_blocksize = 1,
2403 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2404 			.cra_alignmask = 0,
2405 			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2406 			.cra_exit = safexcel_aead_cra_exit,
2407 			.cra_module = THIS_MODULE,
2408 		},
2409 	},
2410 };
2411 
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2412 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2413 {
2414 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2415 
2416 	safexcel_aead_sha384_cra_init(tfm);
2417 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2418 	return 0;
2419 }
2420 
2421 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2422 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2423 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2424 	.alg.aead = {
2425 		.setkey = safexcel_aead_setkey,
2426 		.encrypt = safexcel_aead_encrypt,
2427 		.decrypt = safexcel_aead_decrypt,
2428 		.ivsize = CTR_RFC3686_IV_SIZE,
2429 		.maxauthsize = SHA384_DIGEST_SIZE,
2430 		.base = {
2431 			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2432 			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2433 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2434 			.cra_flags = CRYPTO_ALG_ASYNC |
2435 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2436 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2437 			.cra_blocksize = 1,
2438 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2439 			.cra_alignmask = 0,
2440 			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2441 			.cra_exit = safexcel_aead_cra_exit,
2442 			.cra_module = THIS_MODULE,
2443 		},
2444 	},
2445 };
2446 
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2447 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2448 					   const u8 *key, unsigned int len)
2449 {
2450 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2451 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2452 	struct safexcel_crypto_priv *priv = ctx->base.priv;
2453 	struct crypto_aes_ctx aes;
2454 	int ret, i;
2455 	unsigned int keylen;
2456 
2457 	/* Check for illegal XTS keys */
2458 	ret = xts_verify_key(ctfm, key, len);
2459 	if (ret)
2460 		return ret;
2461 
2462 	/* Only half of the key data is cipher key */
2463 	keylen = (len >> 1);
2464 	ret = aes_expandkey(&aes, key, keylen);
2465 	if (ret)
2466 		return ret;
2467 
2468 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2469 		for (i = 0; i < keylen / sizeof(u32); i++) {
2470 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2471 				ctx->base.needs_inv = true;
2472 				break;
2473 			}
2474 		}
2475 	}
2476 
2477 	for (i = 0; i < keylen / sizeof(u32); i++)
2478 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2479 
2480 	/* The other half is the tweak key */
2481 	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2482 	if (ret)
2483 		return ret;
2484 
2485 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486 		for (i = 0; i < keylen / sizeof(u32); i++) {
2487 			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2488 			    aes.key_enc[i]) {
2489 				ctx->base.needs_inv = true;
2490 				break;
2491 			}
2492 		}
2493 	}
2494 
2495 	for (i = 0; i < keylen / sizeof(u32); i++)
2496 		ctx->key[i + keylen / sizeof(u32)] =
2497 			cpu_to_le32(aes.key_enc[i]);
2498 
2499 	ctx->key_len = keylen << 1;
2500 
2501 	memzero_explicit(&aes, sizeof(aes));
2502 	return 0;
2503 }
2504 
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2505 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2506 {
2507 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2508 
2509 	safexcel_skcipher_cra_init(tfm);
2510 	ctx->alg  = SAFEXCEL_AES;
2511 	ctx->blocksz = AES_BLOCK_SIZE;
2512 	ctx->xts  = 1;
2513 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2514 	return 0;
2515 }
2516 
safexcel_encrypt_xts(struct skcipher_request * req)2517 static int safexcel_encrypt_xts(struct skcipher_request *req)
2518 {
2519 	if (req->cryptlen < XTS_BLOCK_SIZE)
2520 		return -EINVAL;
2521 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2522 				  SAFEXCEL_ENCRYPT);
2523 }
2524 
safexcel_decrypt_xts(struct skcipher_request * req)2525 static int safexcel_decrypt_xts(struct skcipher_request *req)
2526 {
2527 	if (req->cryptlen < XTS_BLOCK_SIZE)
2528 		return -EINVAL;
2529 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2530 				  SAFEXCEL_DECRYPT);
2531 }
2532 
2533 struct safexcel_alg_template safexcel_alg_xts_aes = {
2534 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2535 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2536 	.alg.skcipher = {
2537 		.setkey = safexcel_skcipher_aesxts_setkey,
2538 		.encrypt = safexcel_encrypt_xts,
2539 		.decrypt = safexcel_decrypt_xts,
2540 		/* XTS actually uses 2 AES keys glued together */
2541 		.min_keysize = AES_MIN_KEY_SIZE * 2,
2542 		.max_keysize = AES_MAX_KEY_SIZE * 2,
2543 		.ivsize = XTS_BLOCK_SIZE,
2544 		.base = {
2545 			.cra_name = "xts(aes)",
2546 			.cra_driver_name = "safexcel-xts-aes",
2547 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2548 			.cra_flags = CRYPTO_ALG_ASYNC |
2549 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2550 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2551 			.cra_blocksize = XTS_BLOCK_SIZE,
2552 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2553 			.cra_alignmask = 0,
2554 			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2555 			.cra_exit = safexcel_skcipher_cra_exit,
2556 			.cra_module = THIS_MODULE,
2557 		},
2558 	},
2559 };
2560 
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2561 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2562 				    unsigned int len)
2563 {
2564 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2565 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2566 	struct safexcel_crypto_priv *priv = ctx->base.priv;
2567 	struct crypto_aes_ctx aes;
2568 	u32 hashkey[AES_BLOCK_SIZE >> 2];
2569 	int ret, i;
2570 
2571 	ret = aes_expandkey(&aes, key, len);
2572 	if (ret) {
2573 		memzero_explicit(&aes, sizeof(aes));
2574 		return ret;
2575 	}
2576 
2577 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2578 		for (i = 0; i < len / sizeof(u32); i++) {
2579 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2580 				ctx->base.needs_inv = true;
2581 				break;
2582 			}
2583 		}
2584 	}
2585 
2586 	for (i = 0; i < len / sizeof(u32); i++)
2587 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2588 
2589 	ctx->key_len = len;
2590 
2591 	/* Compute hash key by encrypting zeroes with cipher key */
2592 	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2593 	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2594 				CRYPTO_TFM_REQ_MASK);
2595 	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2596 	if (ret)
2597 		return ret;
2598 
2599 	memset(hashkey, 0, AES_BLOCK_SIZE);
2600 	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2601 
2602 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2603 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2604 			if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2605 				ctx->base.needs_inv = true;
2606 				break;
2607 			}
2608 		}
2609 	}
2610 
2611 	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2612 		ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2613 
2614 	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2615 	memzero_explicit(&aes, sizeof(aes));
2616 	return 0;
2617 }
2618 
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2619 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2620 {
2621 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2622 
2623 	safexcel_aead_cra_init(tfm);
2624 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2625 	ctx->state_sz = GHASH_BLOCK_SIZE;
2626 	ctx->xcm = EIP197_XCM_MODE_GCM;
2627 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2628 
2629 	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2630 	return PTR_ERR_OR_ZERO(ctx->hkaes);
2631 }
2632 
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2633 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2634 {
2635 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2636 
2637 	crypto_free_cipher(ctx->hkaes);
2638 	safexcel_aead_cra_exit(tfm);
2639 }
2640 
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2641 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2642 					 unsigned int authsize)
2643 {
2644 	return crypto_gcm_check_authsize(authsize);
2645 }
2646 
2647 struct safexcel_alg_template safexcel_alg_gcm = {
2648 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2649 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2650 	.alg.aead = {
2651 		.setkey = safexcel_aead_gcm_setkey,
2652 		.setauthsize = safexcel_aead_gcm_setauthsize,
2653 		.encrypt = safexcel_aead_encrypt,
2654 		.decrypt = safexcel_aead_decrypt,
2655 		.ivsize = GCM_AES_IV_SIZE,
2656 		.maxauthsize = GHASH_DIGEST_SIZE,
2657 		.base = {
2658 			.cra_name = "gcm(aes)",
2659 			.cra_driver_name = "safexcel-gcm-aes",
2660 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2661 			.cra_flags = CRYPTO_ALG_ASYNC |
2662 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2663 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2664 			.cra_blocksize = 1,
2665 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2666 			.cra_alignmask = 0,
2667 			.cra_init = safexcel_aead_gcm_cra_init,
2668 			.cra_exit = safexcel_aead_gcm_cra_exit,
2669 			.cra_module = THIS_MODULE,
2670 		},
2671 	},
2672 };
2673 
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2674 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2675 				    unsigned int len)
2676 {
2677 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2678 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2679 	struct safexcel_crypto_priv *priv = ctx->base.priv;
2680 	struct crypto_aes_ctx aes;
2681 	int ret, i;
2682 
2683 	ret = aes_expandkey(&aes, key, len);
2684 	if (ret) {
2685 		memzero_explicit(&aes, sizeof(aes));
2686 		return ret;
2687 	}
2688 
2689 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2690 		for (i = 0; i < len / sizeof(u32); i++) {
2691 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2692 				ctx->base.needs_inv = true;
2693 				break;
2694 			}
2695 		}
2696 	}
2697 
2698 	for (i = 0; i < len / sizeof(u32); i++) {
2699 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2700 		ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2701 			cpu_to_be32(aes.key_enc[i]);
2702 	}
2703 
2704 	ctx->key_len = len;
2705 	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2706 
2707 	if (len == AES_KEYSIZE_192)
2708 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2709 	else if (len == AES_KEYSIZE_256)
2710 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2711 	else
2712 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2713 
2714 	memzero_explicit(&aes, sizeof(aes));
2715 	return 0;
2716 }
2717 
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2718 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2719 {
2720 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2721 
2722 	safexcel_aead_cra_init(tfm);
2723 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2724 	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2725 	ctx->xcm = EIP197_XCM_MODE_CCM;
2726 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2727 	ctx->ctrinit = 0;
2728 	return 0;
2729 }
2730 
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2731 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2732 					 unsigned int authsize)
2733 {
2734 	/* Borrowed from crypto/ccm.c */
2735 	switch (authsize) {
2736 	case 4:
2737 	case 6:
2738 	case 8:
2739 	case 10:
2740 	case 12:
2741 	case 14:
2742 	case 16:
2743 		break;
2744 	default:
2745 		return -EINVAL;
2746 	}
2747 
2748 	return 0;
2749 }
2750 
safexcel_ccm_encrypt(struct aead_request * req)2751 static int safexcel_ccm_encrypt(struct aead_request *req)
2752 {
2753 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2754 
2755 	if (req->iv[0] < 1 || req->iv[0] > 7)
2756 		return -EINVAL;
2757 
2758 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2759 }
2760 
safexcel_ccm_decrypt(struct aead_request * req)2761 static int safexcel_ccm_decrypt(struct aead_request *req)
2762 {
2763 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2764 
2765 	if (req->iv[0] < 1 || req->iv[0] > 7)
2766 		return -EINVAL;
2767 
2768 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2769 }
2770 
2771 struct safexcel_alg_template safexcel_alg_ccm = {
2772 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2773 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2774 	.alg.aead = {
2775 		.setkey = safexcel_aead_ccm_setkey,
2776 		.setauthsize = safexcel_aead_ccm_setauthsize,
2777 		.encrypt = safexcel_ccm_encrypt,
2778 		.decrypt = safexcel_ccm_decrypt,
2779 		.ivsize = AES_BLOCK_SIZE,
2780 		.maxauthsize = AES_BLOCK_SIZE,
2781 		.base = {
2782 			.cra_name = "ccm(aes)",
2783 			.cra_driver_name = "safexcel-ccm-aes",
2784 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2785 			.cra_flags = CRYPTO_ALG_ASYNC |
2786 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2787 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2788 			.cra_blocksize = 1,
2789 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2790 			.cra_alignmask = 0,
2791 			.cra_init = safexcel_aead_ccm_cra_init,
2792 			.cra_exit = safexcel_aead_cra_exit,
2793 			.cra_module = THIS_MODULE,
2794 		},
2795 	},
2796 };
2797 
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2798 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2799 				     const u8 *key)
2800 {
2801 	struct safexcel_crypto_priv *priv = ctx->base.priv;
2802 
2803 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2804 		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2805 			ctx->base.needs_inv = true;
2806 
2807 	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2808 	ctx->key_len = CHACHA_KEY_SIZE;
2809 }
2810 
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2811 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2812 					     const u8 *key, unsigned int len)
2813 {
2814 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2815 
2816 	if (len != CHACHA_KEY_SIZE)
2817 		return -EINVAL;
2818 
2819 	safexcel_chacha20_setkey(ctx, key);
2820 
2821 	return 0;
2822 }
2823 
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2824 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2825 {
2826 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2827 
2828 	safexcel_skcipher_cra_init(tfm);
2829 	ctx->alg  = SAFEXCEL_CHACHA20;
2830 	ctx->ctrinit = 0;
2831 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2832 	return 0;
2833 }
2834 
2835 struct safexcel_alg_template safexcel_alg_chacha20 = {
2836 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2837 	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2838 	.alg.skcipher = {
2839 		.setkey = safexcel_skcipher_chacha20_setkey,
2840 		.encrypt = safexcel_encrypt,
2841 		.decrypt = safexcel_decrypt,
2842 		.min_keysize = CHACHA_KEY_SIZE,
2843 		.max_keysize = CHACHA_KEY_SIZE,
2844 		.ivsize = CHACHA_IV_SIZE,
2845 		.base = {
2846 			.cra_name = "chacha20",
2847 			.cra_driver_name = "safexcel-chacha20",
2848 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2849 			.cra_flags = CRYPTO_ALG_ASYNC |
2850 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2851 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2852 			.cra_blocksize = 1,
2853 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2854 			.cra_alignmask = 0,
2855 			.cra_init = safexcel_skcipher_chacha20_cra_init,
2856 			.cra_exit = safexcel_skcipher_cra_exit,
2857 			.cra_module = THIS_MODULE,
2858 		},
2859 	},
2860 };
2861 
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2862 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2863 				    const u8 *key, unsigned int len)
2864 {
2865 	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2866 
2867 	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2868 	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2869 		/* ESP variant has nonce appended to key */
2870 		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2871 		ctx->nonce = *(u32 *)(key + len);
2872 	}
2873 	if (len != CHACHA_KEY_SIZE)
2874 		return -EINVAL;
2875 
2876 	safexcel_chacha20_setkey(ctx, key);
2877 
2878 	return 0;
2879 }
2880 
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2881 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2882 					 unsigned int authsize)
2883 {
2884 	if (authsize != POLY1305_DIGEST_SIZE)
2885 		return -EINVAL;
2886 	return 0;
2887 }
2888 
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2889 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2890 					  enum safexcel_cipher_direction dir)
2891 {
2892 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2893 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2894 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2895 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2896 	struct aead_request *subreq = aead_request_ctx(req);
2897 	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2898 	int ret = 0;
2899 
2900 	/*
2901 	 * Instead of wasting time detecting umpteen silly corner cases,
2902 	 * just dump all "small" requests to the fallback implementation.
2903 	 * HW would not be faster on such small requests anyway.
2904 	 */
2905 	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2906 		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2907 		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2908 		return safexcel_queue_req(&req->base, creq, dir);
2909 	}
2910 
2911 	/* HW cannot do full (AAD+payload) zero length, use fallback */
2912 	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2913 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2914 		/* ESP variant has nonce appended to the key */
2915 		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2916 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2917 					 CHACHA_KEY_SIZE +
2918 					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2919 	} else {
2920 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2921 					 CHACHA_KEY_SIZE);
2922 	}
2923 	if (ret) {
2924 		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2925 		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2926 					    CRYPTO_TFM_REQ_MASK);
2927 		return ret;
2928 	}
2929 
2930 	aead_request_set_tfm(subreq, ctx->fback);
2931 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2932 				  req->base.data);
2933 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2934 			       req->iv);
2935 	aead_request_set_ad(subreq, req->assoclen);
2936 
2937 	return (dir ==  SAFEXCEL_ENCRYPT) ?
2938 		crypto_aead_encrypt(subreq) :
2939 		crypto_aead_decrypt(subreq);
2940 }
2941 
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2942 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2943 {
2944 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2945 }
2946 
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2947 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2948 {
2949 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2950 }
2951 
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2952 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2953 {
2954 	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2955 	struct aead_alg *alg = crypto_aead_alg(aead);
2956 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2957 
2958 	safexcel_aead_cra_init(tfm);
2959 
2960 	/* Allocate fallback implementation */
2961 	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2962 				       CRYPTO_ALG_ASYNC |
2963 				       CRYPTO_ALG_NEED_FALLBACK);
2964 	if (IS_ERR(ctx->fback))
2965 		return PTR_ERR(ctx->fback);
2966 
2967 	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2968 					  sizeof(struct aead_request) +
2969 					  crypto_aead_reqsize(ctx->fback)));
2970 
2971 	return 0;
2972 }
2973 
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2974 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2975 {
2976 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2977 
2978 	safexcel_aead_fallback_cra_init(tfm);
2979 	ctx->alg  = SAFEXCEL_CHACHA20;
2980 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2981 		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2982 	ctx->ctrinit = 0;
2983 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2984 	ctx->state_sz = 0; /* Precomputed by HW */
2985 	return 0;
2986 }
2987 
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2988 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2989 {
2990 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2991 
2992 	crypto_free_aead(ctx->fback);
2993 	safexcel_aead_cra_exit(tfm);
2994 }
2995 
2996 struct safexcel_alg_template safexcel_alg_chachapoly = {
2997 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2998 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2999 	.alg.aead = {
3000 		.setkey = safexcel_aead_chachapoly_setkey,
3001 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3002 		.encrypt = safexcel_aead_chachapoly_encrypt,
3003 		.decrypt = safexcel_aead_chachapoly_decrypt,
3004 		.ivsize = CHACHAPOLY_IV_SIZE,
3005 		.maxauthsize = POLY1305_DIGEST_SIZE,
3006 		.base = {
3007 			.cra_name = "rfc7539(chacha20,poly1305)",
3008 			.cra_driver_name = "safexcel-chacha20-poly1305",
3009 			/* +1 to put it above HW chacha + SW poly */
3010 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3011 			.cra_flags = CRYPTO_ALG_ASYNC |
3012 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3013 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3014 				     CRYPTO_ALG_NEED_FALLBACK,
3015 			.cra_blocksize = 1,
3016 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3017 			.cra_alignmask = 0,
3018 			.cra_init = safexcel_aead_chachapoly_cra_init,
3019 			.cra_exit = safexcel_aead_fallback_cra_exit,
3020 			.cra_module = THIS_MODULE,
3021 		},
3022 	},
3023 };
3024 
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)3025 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3026 {
3027 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3028 	int ret;
3029 
3030 	ret = safexcel_aead_chachapoly_cra_init(tfm);
3031 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3032 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3033 	return ret;
3034 }
3035 
3036 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3037 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3038 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3039 	.alg.aead = {
3040 		.setkey = safexcel_aead_chachapoly_setkey,
3041 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3042 		.encrypt = safexcel_aead_chachapoly_encrypt,
3043 		.decrypt = safexcel_aead_chachapoly_decrypt,
3044 		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3045 		.maxauthsize = POLY1305_DIGEST_SIZE,
3046 		.base = {
3047 			.cra_name = "rfc7539esp(chacha20,poly1305)",
3048 			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
3049 			/* +1 to put it above HW chacha + SW poly */
3050 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3051 			.cra_flags = CRYPTO_ALG_ASYNC |
3052 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3053 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3054 				     CRYPTO_ALG_NEED_FALLBACK,
3055 			.cra_blocksize = 1,
3056 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3057 			.cra_alignmask = 0,
3058 			.cra_init = safexcel_aead_chachapolyesp_cra_init,
3059 			.cra_exit = safexcel_aead_fallback_cra_exit,
3060 			.cra_module = THIS_MODULE,
3061 		},
3062 	},
3063 };
3064 
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3065 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3066 					const u8 *key, unsigned int len)
3067 {
3068 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3069 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3070 	struct safexcel_crypto_priv *priv = ctx->base.priv;
3071 
3072 	if (len != SM4_KEY_SIZE)
3073 		return -EINVAL;
3074 
3075 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3076 		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3077 			ctx->base.needs_inv = true;
3078 
3079 	memcpy(ctx->key, key, SM4_KEY_SIZE);
3080 	ctx->key_len = SM4_KEY_SIZE;
3081 
3082 	return 0;
3083 }
3084 
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3085 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3086 {
3087 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3088 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3089 		return -EINVAL;
3090 	else
3091 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3092 					  SAFEXCEL_ENCRYPT);
3093 }
3094 
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3095 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3096 {
3097 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3098 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3099 		return -EINVAL;
3100 	else
3101 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3102 					  SAFEXCEL_DECRYPT);
3103 }
3104 
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3105 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3106 {
3107 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3108 
3109 	safexcel_skcipher_cra_init(tfm);
3110 	ctx->alg  = SAFEXCEL_SM4;
3111 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3112 	ctx->blocksz = 0;
3113 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3114 	return 0;
3115 }
3116 
3117 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3118 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3119 	.algo_mask = SAFEXCEL_ALG_SM4,
3120 	.alg.skcipher = {
3121 		.setkey = safexcel_skcipher_sm4_setkey,
3122 		.encrypt = safexcel_sm4_blk_encrypt,
3123 		.decrypt = safexcel_sm4_blk_decrypt,
3124 		.min_keysize = SM4_KEY_SIZE,
3125 		.max_keysize = SM4_KEY_SIZE,
3126 		.base = {
3127 			.cra_name = "ecb(sm4)",
3128 			.cra_driver_name = "safexcel-ecb-sm4",
3129 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3130 			.cra_flags = CRYPTO_ALG_ASYNC |
3131 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3132 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3133 			.cra_blocksize = SM4_BLOCK_SIZE,
3134 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3135 			.cra_alignmask = 0,
3136 			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3137 			.cra_exit = safexcel_skcipher_cra_exit,
3138 			.cra_module = THIS_MODULE,
3139 		},
3140 	},
3141 };
3142 
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3143 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3144 {
3145 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3146 
3147 	safexcel_skcipher_cra_init(tfm);
3148 	ctx->alg  = SAFEXCEL_SM4;
3149 	ctx->blocksz = SM4_BLOCK_SIZE;
3150 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3151 	return 0;
3152 }
3153 
3154 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3155 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3156 	.algo_mask = SAFEXCEL_ALG_SM4,
3157 	.alg.skcipher = {
3158 		.setkey = safexcel_skcipher_sm4_setkey,
3159 		.encrypt = safexcel_sm4_blk_encrypt,
3160 		.decrypt = safexcel_sm4_blk_decrypt,
3161 		.min_keysize = SM4_KEY_SIZE,
3162 		.max_keysize = SM4_KEY_SIZE,
3163 		.ivsize = SM4_BLOCK_SIZE,
3164 		.base = {
3165 			.cra_name = "cbc(sm4)",
3166 			.cra_driver_name = "safexcel-cbc-sm4",
3167 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3168 			.cra_flags = CRYPTO_ALG_ASYNC |
3169 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3170 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3171 			.cra_blocksize = SM4_BLOCK_SIZE,
3172 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3173 			.cra_alignmask = 0,
3174 			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3175 			.cra_exit = safexcel_skcipher_cra_exit,
3176 			.cra_module = THIS_MODULE,
3177 		},
3178 	},
3179 };
3180 
safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm * tfm)3181 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3182 {
3183 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3184 
3185 	safexcel_skcipher_cra_init(tfm);
3186 	ctx->alg  = SAFEXCEL_SM4;
3187 	ctx->blocksz = SM4_BLOCK_SIZE;
3188 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3189 	return 0;
3190 }
3191 
3192 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3193 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3194 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3195 	.alg.skcipher = {
3196 		.setkey = safexcel_skcipher_sm4_setkey,
3197 		.encrypt = safexcel_encrypt,
3198 		.decrypt = safexcel_decrypt,
3199 		.min_keysize = SM4_KEY_SIZE,
3200 		.max_keysize = SM4_KEY_SIZE,
3201 		.ivsize = SM4_BLOCK_SIZE,
3202 		.base = {
3203 			.cra_name = "ofb(sm4)",
3204 			.cra_driver_name = "safexcel-ofb-sm4",
3205 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3206 			.cra_flags = CRYPTO_ALG_ASYNC |
3207 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3208 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3209 			.cra_blocksize = 1,
3210 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3211 			.cra_alignmask = 0,
3212 			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3213 			.cra_exit = safexcel_skcipher_cra_exit,
3214 			.cra_module = THIS_MODULE,
3215 		},
3216 	},
3217 };
3218 
safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm * tfm)3219 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3220 {
3221 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3222 
3223 	safexcel_skcipher_cra_init(tfm);
3224 	ctx->alg  = SAFEXCEL_SM4;
3225 	ctx->blocksz = SM4_BLOCK_SIZE;
3226 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3227 	return 0;
3228 }
3229 
3230 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3231 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3232 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3233 	.alg.skcipher = {
3234 		.setkey = safexcel_skcipher_sm4_setkey,
3235 		.encrypt = safexcel_encrypt,
3236 		.decrypt = safexcel_decrypt,
3237 		.min_keysize = SM4_KEY_SIZE,
3238 		.max_keysize = SM4_KEY_SIZE,
3239 		.ivsize = SM4_BLOCK_SIZE,
3240 		.base = {
3241 			.cra_name = "cfb(sm4)",
3242 			.cra_driver_name = "safexcel-cfb-sm4",
3243 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3244 			.cra_flags = CRYPTO_ALG_ASYNC |
3245 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3246 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3247 			.cra_blocksize = 1,
3248 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3249 			.cra_alignmask = 0,
3250 			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3251 			.cra_exit = safexcel_skcipher_cra_exit,
3252 			.cra_module = THIS_MODULE,
3253 		},
3254 	},
3255 };
3256 
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3257 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3258 					   const u8 *key, unsigned int len)
3259 {
3260 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3261 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3262 
3263 	/* last 4 bytes of key are the nonce! */
3264 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3265 	/* exclude the nonce here */
3266 	len -= CTR_RFC3686_NONCE_SIZE;
3267 
3268 	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3269 }
3270 
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3271 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3272 {
3273 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3274 
3275 	safexcel_skcipher_cra_init(tfm);
3276 	ctx->alg  = SAFEXCEL_SM4;
3277 	ctx->blocksz = SM4_BLOCK_SIZE;
3278 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3279 	return 0;
3280 }
3281 
3282 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3283 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3284 	.algo_mask = SAFEXCEL_ALG_SM4,
3285 	.alg.skcipher = {
3286 		.setkey = safexcel_skcipher_sm4ctr_setkey,
3287 		.encrypt = safexcel_encrypt,
3288 		.decrypt = safexcel_decrypt,
3289 		/* Add nonce size */
3290 		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291 		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3292 		.ivsize = CTR_RFC3686_IV_SIZE,
3293 		.base = {
3294 			.cra_name = "rfc3686(ctr(sm4))",
3295 			.cra_driver_name = "safexcel-ctr-sm4",
3296 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3297 			.cra_flags = CRYPTO_ALG_ASYNC |
3298 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3299 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3300 			.cra_blocksize = 1,
3301 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3302 			.cra_alignmask = 0,
3303 			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3304 			.cra_exit = safexcel_skcipher_cra_exit,
3305 			.cra_module = THIS_MODULE,
3306 		},
3307 	},
3308 };
3309 
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3310 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3311 {
3312 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3313 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3314 		return -EINVAL;
3315 
3316 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3317 				  SAFEXCEL_ENCRYPT);
3318 }
3319 
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3320 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3321 {
3322 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3323 
3324 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3325 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3326 		return -EINVAL;
3327 
3328 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3329 				  SAFEXCEL_DECRYPT);
3330 }
3331 
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3332 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3333 {
3334 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3335 
3336 	safexcel_aead_cra_init(tfm);
3337 	ctx->alg = SAFEXCEL_SM4;
3338 	ctx->blocksz = SM4_BLOCK_SIZE;
3339 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3340 	ctx->state_sz = SHA1_DIGEST_SIZE;
3341 	return 0;
3342 }
3343 
3344 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3345 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3346 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3347 	.alg.aead = {
3348 		.setkey = safexcel_aead_setkey,
3349 		.encrypt = safexcel_aead_sm4_blk_encrypt,
3350 		.decrypt = safexcel_aead_sm4_blk_decrypt,
3351 		.ivsize = SM4_BLOCK_SIZE,
3352 		.maxauthsize = SHA1_DIGEST_SIZE,
3353 		.base = {
3354 			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3355 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3356 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3357 			.cra_flags = CRYPTO_ALG_ASYNC |
3358 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3359 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3360 			.cra_blocksize = SM4_BLOCK_SIZE,
3361 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3362 			.cra_alignmask = 0,
3363 			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3364 			.cra_exit = safexcel_aead_cra_exit,
3365 			.cra_module = THIS_MODULE,
3366 		},
3367 	},
3368 };
3369 
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3370 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3371 					 const u8 *key, unsigned int len)
3372 {
3373 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3374 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375 
3376 	/* Keep fallback cipher synchronized */
3377 	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3378 	       safexcel_aead_setkey(ctfm, key, len);
3379 }
3380 
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3381 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3382 					      unsigned int authsize)
3383 {
3384 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3385 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3386 
3387 	/* Keep fallback cipher synchronized */
3388 	return crypto_aead_setauthsize(ctx->fback, authsize);
3389 }
3390 
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3391 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3392 					enum safexcel_cipher_direction dir)
3393 {
3394 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3395 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3396 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3397 	struct aead_request *subreq = aead_request_ctx(req);
3398 
3399 	aead_request_set_tfm(subreq, ctx->fback);
3400 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3401 				  req->base.data);
3402 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3403 			       req->iv);
3404 	aead_request_set_ad(subreq, req->assoclen);
3405 
3406 	return (dir ==  SAFEXCEL_ENCRYPT) ?
3407 		crypto_aead_encrypt(subreq) :
3408 		crypto_aead_decrypt(subreq);
3409 }
3410 
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3411 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3412 {
3413 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3414 
3415 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3416 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3417 		return -EINVAL;
3418 	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3419 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3420 
3421 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3422 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3423 }
3424 
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3425 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3426 {
3427 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3428 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3429 
3430 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3431 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3432 		return -EINVAL;
3433 	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3434 		/* If input length > 0 only */
3435 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3436 
3437 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3438 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3439 }
3440 
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3441 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3442 {
3443 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3444 
3445 	safexcel_aead_fallback_cra_init(tfm);
3446 	ctx->alg = SAFEXCEL_SM4;
3447 	ctx->blocksz = SM4_BLOCK_SIZE;
3448 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3449 	ctx->state_sz = SM3_DIGEST_SIZE;
3450 	return 0;
3451 }
3452 
3453 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3454 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3455 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3456 	.alg.aead = {
3457 		.setkey = safexcel_aead_fallback_setkey,
3458 		.setauthsize = safexcel_aead_fallback_setauthsize,
3459 		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3460 		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3461 		.ivsize = SM4_BLOCK_SIZE,
3462 		.maxauthsize = SM3_DIGEST_SIZE,
3463 		.base = {
3464 			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3465 			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3466 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3467 			.cra_flags = CRYPTO_ALG_ASYNC |
3468 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3469 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3470 				     CRYPTO_ALG_NEED_FALLBACK,
3471 			.cra_blocksize = SM4_BLOCK_SIZE,
3472 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3473 			.cra_alignmask = 0,
3474 			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3475 			.cra_exit = safexcel_aead_fallback_cra_exit,
3476 			.cra_module = THIS_MODULE,
3477 		},
3478 	},
3479 };
3480 
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3481 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3482 {
3483 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3484 
3485 	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3486 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3487 	return 0;
3488 }
3489 
3490 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3491 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3492 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3493 	.alg.aead = {
3494 		.setkey = safexcel_aead_setkey,
3495 		.encrypt = safexcel_aead_encrypt,
3496 		.decrypt = safexcel_aead_decrypt,
3497 		.ivsize = CTR_RFC3686_IV_SIZE,
3498 		.maxauthsize = SHA1_DIGEST_SIZE,
3499 		.base = {
3500 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3501 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3502 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3503 			.cra_flags = CRYPTO_ALG_ASYNC |
3504 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3505 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3506 			.cra_blocksize = 1,
3507 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3508 			.cra_alignmask = 0,
3509 			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3510 			.cra_exit = safexcel_aead_cra_exit,
3511 			.cra_module = THIS_MODULE,
3512 		},
3513 	},
3514 };
3515 
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3516 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3517 {
3518 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3519 
3520 	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3521 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3522 	return 0;
3523 }
3524 
3525 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3526 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3527 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3528 	.alg.aead = {
3529 		.setkey = safexcel_aead_setkey,
3530 		.encrypt = safexcel_aead_encrypt,
3531 		.decrypt = safexcel_aead_decrypt,
3532 		.ivsize = CTR_RFC3686_IV_SIZE,
3533 		.maxauthsize = SM3_DIGEST_SIZE,
3534 		.base = {
3535 			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3536 			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3537 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3538 			.cra_flags = CRYPTO_ALG_ASYNC |
3539 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3540 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3541 			.cra_blocksize = 1,
3542 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3543 			.cra_alignmask = 0,
3544 			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3545 			.cra_exit = safexcel_aead_cra_exit,
3546 			.cra_module = THIS_MODULE,
3547 		},
3548 	},
3549 };
3550 
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3551 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3552 				       unsigned int len)
3553 {
3554 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3555 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3556 
3557 	/* last 4 bytes of key are the nonce! */
3558 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3559 
3560 	len -= CTR_RFC3686_NONCE_SIZE;
3561 	return safexcel_aead_gcm_setkey(ctfm, key, len);
3562 }
3563 
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3564 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3565 					    unsigned int authsize)
3566 {
3567 	return crypto_rfc4106_check_authsize(authsize);
3568 }
3569 
safexcel_rfc4106_encrypt(struct aead_request * req)3570 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3571 {
3572 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3573 	       safexcel_aead_encrypt(req);
3574 }
3575 
safexcel_rfc4106_decrypt(struct aead_request * req)3576 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3577 {
3578 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3579 	       safexcel_aead_decrypt(req);
3580 }
3581 
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3582 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3583 {
3584 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3585 	int ret;
3586 
3587 	ret = safexcel_aead_gcm_cra_init(tfm);
3588 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3589 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3590 	return ret;
3591 }
3592 
3593 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3594 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3595 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3596 	.alg.aead = {
3597 		.setkey = safexcel_rfc4106_gcm_setkey,
3598 		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3599 		.encrypt = safexcel_rfc4106_encrypt,
3600 		.decrypt = safexcel_rfc4106_decrypt,
3601 		.ivsize = GCM_RFC4106_IV_SIZE,
3602 		.maxauthsize = GHASH_DIGEST_SIZE,
3603 		.base = {
3604 			.cra_name = "rfc4106(gcm(aes))",
3605 			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3606 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3607 			.cra_flags = CRYPTO_ALG_ASYNC |
3608 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3609 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3610 			.cra_blocksize = 1,
3611 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3612 			.cra_alignmask = 0,
3613 			.cra_init = safexcel_rfc4106_gcm_cra_init,
3614 			.cra_exit = safexcel_aead_gcm_cra_exit,
3615 		},
3616 	},
3617 };
3618 
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3619 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3620 					    unsigned int authsize)
3621 {
3622 	if (authsize != GHASH_DIGEST_SIZE)
3623 		return -EINVAL;
3624 
3625 	return 0;
3626 }
3627 
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3628 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3629 {
3630 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3631 	int ret;
3632 
3633 	ret = safexcel_aead_gcm_cra_init(tfm);
3634 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3635 	return ret;
3636 }
3637 
3638 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3639 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3640 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3641 	.alg.aead = {
3642 		.setkey = safexcel_rfc4106_gcm_setkey,
3643 		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3644 		.encrypt = safexcel_rfc4106_encrypt,
3645 		.decrypt = safexcel_rfc4106_decrypt,
3646 		.ivsize = GCM_RFC4543_IV_SIZE,
3647 		.maxauthsize = GHASH_DIGEST_SIZE,
3648 		.base = {
3649 			.cra_name = "rfc4543(gcm(aes))",
3650 			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3651 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3652 			.cra_flags = CRYPTO_ALG_ASYNC |
3653 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3654 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3655 			.cra_blocksize = 1,
3656 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3657 			.cra_alignmask = 0,
3658 			.cra_init = safexcel_rfc4543_gcm_cra_init,
3659 			.cra_exit = safexcel_aead_gcm_cra_exit,
3660 		},
3661 	},
3662 };
3663 
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3664 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3665 				       unsigned int len)
3666 {
3667 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3668 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3669 
3670 	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3671 	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3672 	/* last 3 bytes of key are the nonce! */
3673 	memcpy((u8 *)&ctx->nonce + 1, key + len -
3674 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3675 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3676 
3677 	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3678 	return safexcel_aead_ccm_setkey(ctfm, key, len);
3679 }
3680 
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3681 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3682 					    unsigned int authsize)
3683 {
3684 	/* Borrowed from crypto/ccm.c */
3685 	switch (authsize) {
3686 	case 8:
3687 	case 12:
3688 	case 16:
3689 		break;
3690 	default:
3691 		return -EINVAL;
3692 	}
3693 
3694 	return 0;
3695 }
3696 
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3697 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3698 {
3699 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3700 
3701 	/* Borrowed from crypto/ccm.c */
3702 	if (req->assoclen != 16 && req->assoclen != 20)
3703 		return -EINVAL;
3704 
3705 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3706 }
3707 
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3708 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3709 {
3710 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3711 
3712 	/* Borrowed from crypto/ccm.c */
3713 	if (req->assoclen != 16 && req->assoclen != 20)
3714 		return -EINVAL;
3715 
3716 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3717 }
3718 
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3719 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3720 {
3721 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3722 	int ret;
3723 
3724 	ret = safexcel_aead_ccm_cra_init(tfm);
3725 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3726 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3727 	return ret;
3728 }
3729 
3730 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3731 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3732 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3733 	.alg.aead = {
3734 		.setkey = safexcel_rfc4309_ccm_setkey,
3735 		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3736 		.encrypt = safexcel_rfc4309_ccm_encrypt,
3737 		.decrypt = safexcel_rfc4309_ccm_decrypt,
3738 		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3739 		.maxauthsize = AES_BLOCK_SIZE,
3740 		.base = {
3741 			.cra_name = "rfc4309(ccm(aes))",
3742 			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3743 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3744 			.cra_flags = CRYPTO_ALG_ASYNC |
3745 				     CRYPTO_ALG_ALLOCATES_MEMORY |
3746 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3747 			.cra_blocksize = 1,
3748 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3749 			.cra_alignmask = 0,
3750 			.cra_init = safexcel_rfc4309_ccm_cra_init,
3751 			.cra_exit = safexcel_aead_cra_exit,
3752 			.cra_module = THIS_MODULE,
3753 		},
3754 	},
3755 };
3756