1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
28 
29 #include "safexcel.h"
30 
31 enum safexcel_cipher_direction {
32 	SAFEXCEL_ENCRYPT,
33 	SAFEXCEL_DECRYPT,
34 };
35 
36 enum safexcel_cipher_alg {
37 	SAFEXCEL_DES,
38 	SAFEXCEL_3DES,
39 	SAFEXCEL_AES,
40 	SAFEXCEL_CHACHA20,
41 	SAFEXCEL_SM4,
42 };
43 
44 struct safexcel_cipher_ctx {
45 	struct safexcel_context base;
46 	struct safexcel_crypto_priv *priv;
47 
48 	u32 mode;
49 	enum safexcel_cipher_alg alg;
50 	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
52 	u8 aadskip;
53 	u8 blocksz;
54 	u32 ivmask;
55 	u32 ctrinit;
56 
57 	__le32 key[16];
58 	u32 nonce;
59 	unsigned int key_len, xts;
60 
61 	/* All the below is AEAD specific */
62 	u32 hash_alg;
63 	u32 state_sz;
64 	__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
65 	__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
66 
67 	struct crypto_cipher *hkaes;
68 	struct crypto_aead *fback;
69 };
70 
71 struct safexcel_cipher_req {
72 	enum safexcel_cipher_direction direction;
73 	/* Number of result descriptors associated to the request */
74 	unsigned int rdescs;
75 	bool needs_inv;
76 	int  nr_src, nr_dst;
77 };
78 
79 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
80 				struct safexcel_command_desc *cdesc)
81 {
82 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
83 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
84 		/* 32 bit nonce */
85 		cdesc->control_data.token[0] = ctx->nonce;
86 		/* 64 bit IV part */
87 		memcpy(&cdesc->control_data.token[1], iv, 8);
88 		/* 32 bit counter, start at 0 or 1 (big endian!) */
89 		cdesc->control_data.token[3] =
90 			(__force u32)cpu_to_be32(ctx->ctrinit);
91 		return 4;
92 	}
93 	if (ctx->alg == SAFEXCEL_CHACHA20) {
94 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
95 		/* 96 bit nonce part */
96 		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
97 		/* 32 bit counter */
98 		cdesc->control_data.token[3] = *(u32 *)iv;
99 		return 4;
100 	}
101 
102 	cdesc->control_data.options |= ctx->ivmask;
103 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
104 	return ctx->blocksz / sizeof(u32);
105 }
106 
107 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108 				    struct safexcel_command_desc *cdesc,
109 				    struct safexcel_token *atoken,
110 				    u32 length)
111 {
112 	struct safexcel_token *token;
113 	int ivlen;
114 
115 	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
116 	if (ivlen == 4) {
117 		/* No space in cdesc, instruction moves to atoken */
118 		cdesc->additional_cdata_size = 1;
119 		token = atoken;
120 	} else {
121 		/* Everything fits in cdesc */
122 		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
123 		/* Need to pad with NOP */
124 		eip197_noop_token(&token[1]);
125 	}
126 
127 	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
128 	token->packet_length = length;
129 	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
130 		      EIP197_TOKEN_STAT_LAST_HASH;
131 	token->instructions = EIP197_TOKEN_INS_LAST |
132 			      EIP197_TOKEN_INS_TYPE_CRYPTO |
133 			      EIP197_TOKEN_INS_TYPE_OUTPUT;
134 }
135 
136 static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
137 			     struct safexcel_command_desc *cdesc)
138 {
139 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
140 	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
141 		/* 32 bit nonce */
142 		cdesc->control_data.token[0] = ctx->nonce;
143 		/* 64 bit IV part */
144 		memcpy(&cdesc->control_data.token[1], iv, 8);
145 		/* 32 bit counter, start at 0 or 1 (big endian!) */
146 		cdesc->control_data.token[3] =
147 			(__force u32)cpu_to_be32(ctx->ctrinit);
148 		return;
149 	}
150 	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
151 		/* 96 bit IV part */
152 		memcpy(&cdesc->control_data.token[0], iv, 12);
153 		/* 32 bit counter, start at 0 or 1 (big endian!) */
154 		cdesc->control_data.token[3] =
155 			(__force u32)cpu_to_be32(ctx->ctrinit);
156 		return;
157 	}
158 	/* CBC */
159 	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
160 }
161 
162 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
163 				struct safexcel_command_desc *cdesc,
164 				struct safexcel_token *atoken,
165 				enum safexcel_cipher_direction direction,
166 				u32 cryptlen, u32 assoclen, u32 digestsize)
167 {
168 	struct safexcel_token *aadref;
169 	int atoksize = 2; /* Start with minimum size */
170 	int assocadj = assoclen - ctx->aadskip, aadalign;
171 
172 	/* Always 4 dwords of embedded IV  for AEAD modes */
173 	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
174 
175 	if (direction == SAFEXCEL_DECRYPT)
176 		cryptlen -= digestsize;
177 
178 	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
179 		/* Construct IV block B0 for the CBC-MAC */
180 		u8 *final_iv = (u8 *)cdesc->control_data.token;
181 		u8 *cbcmaciv = (u8 *)&atoken[1];
182 		__le32 *aadlen = (__le32 *)&atoken[5];
183 
184 		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
185 			/* Length + nonce */
186 			cdesc->control_data.token[0] = ctx->nonce;
187 			/* Fixup flags byte */
188 			*(__le32 *)cbcmaciv =
189 				cpu_to_le32(ctx->nonce |
190 					    ((assocadj > 0) << 6) |
191 					    ((digestsize - 2) << 2));
192 			/* 64 bit IV part */
193 			memcpy(&cdesc->control_data.token[1], iv, 8);
194 			memcpy(cbcmaciv + 4, iv, 8);
195 			/* Start counter at 0 */
196 			cdesc->control_data.token[3] = 0;
197 			/* Message length */
198 			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
199 		} else {
200 			/* Variable length IV part */
201 			memcpy(final_iv, iv, 15 - iv[0]);
202 			memcpy(cbcmaciv, iv, 15 - iv[0]);
203 			/* Start variable length counter at 0 */
204 			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
205 			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
206 			/* fixup flags byte */
207 			cbcmaciv[0] |= ((assocadj > 0) << 6) |
208 				       ((digestsize - 2) << 2);
209 			/* insert lower 2 bytes of message length */
210 			cbcmaciv[14] = cryptlen >> 8;
211 			cbcmaciv[15] = cryptlen & 255;
212 		}
213 
214 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
215 		atoken->packet_length = AES_BLOCK_SIZE +
216 					((assocadj > 0) << 1);
217 		atoken->stat = 0;
218 		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
219 				       EIP197_TOKEN_INS_TYPE_HASH;
220 
221 		if (likely(assocadj)) {
222 			*aadlen = cpu_to_le32((assocadj >> 8) |
223 					      (assocadj & 255) << 8);
224 			atoken += 6;
225 			atoksize += 7;
226 		} else {
227 			atoken += 5;
228 			atoksize += 6;
229 		}
230 
231 		/* Process AAD data */
232 		aadref = atoken;
233 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
234 		atoken->packet_length = assocadj;
235 		atoken->stat = 0;
236 		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
237 		atoken++;
238 
239 		/* For CCM only, align AAD data towards hash engine */
240 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
241 		aadalign = (assocadj + 2) & 15;
242 		atoken->packet_length = assocadj && aadalign ?
243 						16 - aadalign :
244 						0;
245 		if (likely(cryptlen)) {
246 			atoken->stat = 0;
247 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
248 		} else {
249 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
250 			atoken->instructions = EIP197_TOKEN_INS_LAST |
251 					       EIP197_TOKEN_INS_TYPE_HASH;
252 		}
253 	} else {
254 		safexcel_aead_iv(ctx, iv, cdesc);
255 
256 		/* Process AAD data */
257 		aadref = atoken;
258 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
259 		atoken->packet_length = assocadj;
260 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
261 		atoken->instructions = EIP197_TOKEN_INS_LAST |
262 				       EIP197_TOKEN_INS_TYPE_HASH;
263 	}
264 	atoken++;
265 
266 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
267 		/* For ESP mode (and not GMAC), skip over the IV */
268 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
269 		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
270 		atoken->stat = 0;
271 		atoken->instructions = 0;
272 		atoken++;
273 		atoksize++;
274 	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
275 			    direction == SAFEXCEL_DECRYPT)) {
276 		/* Poly-chacha decryption needs a dummy NOP here ... */
277 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
278 		atoken->packet_length = 16; /* According to Op Manual */
279 		atoken->stat = 0;
280 		atoken->instructions = 0;
281 		atoken++;
282 		atoksize++;
283 	}
284 
285 	if  (ctx->xcm) {
286 		/* For GCM and CCM, obtain enc(Y0) */
287 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
288 		atoken->packet_length = 0;
289 		atoken->stat = 0;
290 		atoken->instructions = AES_BLOCK_SIZE;
291 		atoken++;
292 
293 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
294 		atoken->packet_length = AES_BLOCK_SIZE;
295 		atoken->stat = 0;
296 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
297 				       EIP197_TOKEN_INS_TYPE_CRYPTO;
298 		atoken++;
299 		atoksize += 2;
300 	}
301 
302 	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
303 		/* Fixup stat field for AAD direction instruction */
304 		aadref->stat = 0;
305 
306 		/* Process crypto data */
307 		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
308 		atoken->packet_length = cryptlen;
309 
310 		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
311 			/* Fixup instruction field for AAD dir instruction */
312 			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
313 
314 			/* Do not send to crypt engine in case of GMAC */
315 			atoken->instructions = EIP197_TOKEN_INS_LAST |
316 					       EIP197_TOKEN_INS_TYPE_HASH |
317 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
318 		} else {
319 			atoken->instructions = EIP197_TOKEN_INS_LAST |
320 					       EIP197_TOKEN_INS_TYPE_CRYPTO |
321 					       EIP197_TOKEN_INS_TYPE_HASH |
322 					       EIP197_TOKEN_INS_TYPE_OUTPUT;
323 		}
324 
325 		cryptlen &= 15;
326 		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
327 			atoken->stat = 0;
328 			/* For CCM only, pad crypto data to the hash engine */
329 			atoken++;
330 			atoksize++;
331 			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
332 			atoken->packet_length = 16 - cryptlen;
333 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
334 			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
335 		} else {
336 			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
337 		}
338 		atoken++;
339 		atoksize++;
340 	}
341 
342 	if (direction == SAFEXCEL_ENCRYPT) {
343 		/* Append ICV */
344 		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
345 		atoken->packet_length = digestsize;
346 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
347 			       EIP197_TOKEN_STAT_LAST_PACKET;
348 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
349 				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
350 	} else {
351 		/* Extract ICV */
352 		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
353 		atoken->packet_length = digestsize;
354 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
355 			       EIP197_TOKEN_STAT_LAST_PACKET;
356 		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
357 		atoken++;
358 		atoksize++;
359 
360 		/* Verify ICV */
361 		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
362 		atoken->packet_length = digestsize |
363 					EIP197_TOKEN_HASH_RESULT_VERIFY;
364 		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
365 			       EIP197_TOKEN_STAT_LAST_PACKET;
366 		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
367 	}
368 
369 	/* Fixup length of the token in the command descriptor */
370 	cdesc->additional_cdata_size = atoksize;
371 }
372 
373 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
374 					const u8 *key, unsigned int len)
375 {
376 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
377 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
378 	struct safexcel_crypto_priv *priv = ctx->priv;
379 	struct crypto_aes_ctx aes;
380 	int ret, i;
381 
382 	ret = aes_expandkey(&aes, key, len);
383 	if (ret)
384 		return ret;
385 
386 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
387 		for (i = 0; i < len / sizeof(u32); i++) {
388 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
389 				ctx->base.needs_inv = true;
390 				break;
391 			}
392 		}
393 	}
394 
395 	for (i = 0; i < len / sizeof(u32); i++)
396 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
397 
398 	ctx->key_len = len;
399 
400 	memzero_explicit(&aes, sizeof(aes));
401 	return 0;
402 }
403 
404 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
405 				unsigned int len)
406 {
407 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
408 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
409 	struct safexcel_ahash_export_state istate, ostate;
410 	struct safexcel_crypto_priv *priv = ctx->priv;
411 	struct crypto_authenc_keys keys;
412 	struct crypto_aes_ctx aes;
413 	int err = -EINVAL, i;
414 
415 	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
416 		goto badkey;
417 
418 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
419 		/* Must have at least space for the nonce here */
420 		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
421 			goto badkey;
422 		/* last 4 bytes of key are the nonce! */
423 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
424 				      CTR_RFC3686_NONCE_SIZE);
425 		/* exclude the nonce here */
426 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
427 	}
428 
429 	/* Encryption key */
430 	switch (ctx->alg) {
431 	case SAFEXCEL_DES:
432 		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
433 		if (unlikely(err))
434 			goto badkey;
435 		break;
436 	case SAFEXCEL_3DES:
437 		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
438 		if (unlikely(err))
439 			goto badkey;
440 		break;
441 	case SAFEXCEL_AES:
442 		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
443 		if (unlikely(err))
444 			goto badkey;
445 		break;
446 	case SAFEXCEL_SM4:
447 		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
448 			goto badkey;
449 		break;
450 	default:
451 		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
452 		goto badkey;
453 	}
454 
455 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
456 		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
457 			if (le32_to_cpu(ctx->key[i]) !=
458 			    ((u32 *)keys.enckey)[i]) {
459 				ctx->base.needs_inv = true;
460 				break;
461 			}
462 		}
463 	}
464 
465 	/* Auth key */
466 	switch (ctx->hash_alg) {
467 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
468 		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
469 					 keys.authkeylen, &istate, &ostate))
470 			goto badkey;
471 		break;
472 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
473 		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
474 					 keys.authkeylen, &istate, &ostate))
475 			goto badkey;
476 		break;
477 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
478 		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
479 					 keys.authkeylen, &istate, &ostate))
480 			goto badkey;
481 		break;
482 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
483 		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
484 					 keys.authkeylen, &istate, &ostate))
485 			goto badkey;
486 		break;
487 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
488 		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
489 					 keys.authkeylen, &istate, &ostate))
490 			goto badkey;
491 		break;
492 	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
493 		if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
494 					 keys.authkeylen, &istate, &ostate))
495 			goto badkey;
496 		break;
497 	default:
498 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
499 		goto badkey;
500 	}
501 
502 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
503 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
504 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
505 		ctx->base.needs_inv = true;
506 
507 	/* Now copy the keys into the context */
508 	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
509 		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
510 	ctx->key_len = keys.enckeylen;
511 
512 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
513 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
514 
515 	memzero_explicit(&keys, sizeof(keys));
516 	return 0;
517 
518 badkey:
519 	memzero_explicit(&keys, sizeof(keys));
520 	return err;
521 }
522 
523 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
524 				    struct crypto_async_request *async,
525 				    struct safexcel_cipher_req *sreq,
526 				    struct safexcel_command_desc *cdesc)
527 {
528 	struct safexcel_crypto_priv *priv = ctx->priv;
529 	int ctrl_size = ctx->key_len / sizeof(u32);
530 
531 	cdesc->control_data.control1 = ctx->mode;
532 
533 	if (ctx->aead) {
534 		/* Take in account the ipad+opad digests */
535 		if (ctx->xcm) {
536 			ctrl_size += ctx->state_sz / sizeof(u32);
537 			cdesc->control_data.control0 =
538 				CONTEXT_CONTROL_KEY_EN |
539 				CONTEXT_CONTROL_DIGEST_XCM |
540 				ctx->hash_alg |
541 				CONTEXT_CONTROL_SIZE(ctrl_size);
542 		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
543 			/* Chacha20-Poly1305 */
544 			cdesc->control_data.control0 =
545 				CONTEXT_CONTROL_KEY_EN |
546 				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
547 				(sreq->direction == SAFEXCEL_ENCRYPT ?
548 					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
549 					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
550 				ctx->hash_alg |
551 				CONTEXT_CONTROL_SIZE(ctrl_size);
552 			return 0;
553 		} else {
554 			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
555 			cdesc->control_data.control0 =
556 				CONTEXT_CONTROL_KEY_EN |
557 				CONTEXT_CONTROL_DIGEST_HMAC |
558 				ctx->hash_alg |
559 				CONTEXT_CONTROL_SIZE(ctrl_size);
560 		}
561 
562 		if (sreq->direction == SAFEXCEL_ENCRYPT &&
563 		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
564 		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
565 			cdesc->control_data.control0 |=
566 				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
567 		else if (sreq->direction == SAFEXCEL_ENCRYPT)
568 			cdesc->control_data.control0 |=
569 				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
570 		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
571 			cdesc->control_data.control0 |=
572 				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
573 		else
574 			cdesc->control_data.control0 |=
575 				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
576 	} else {
577 		if (sreq->direction == SAFEXCEL_ENCRYPT)
578 			cdesc->control_data.control0 =
579 				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
580 				CONTEXT_CONTROL_KEY_EN |
581 				CONTEXT_CONTROL_SIZE(ctrl_size);
582 		else
583 			cdesc->control_data.control0 =
584 				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
585 				CONTEXT_CONTROL_KEY_EN |
586 				CONTEXT_CONTROL_SIZE(ctrl_size);
587 	}
588 
589 	if (ctx->alg == SAFEXCEL_DES) {
590 		cdesc->control_data.control0 |=
591 			CONTEXT_CONTROL_CRYPTO_ALG_DES;
592 	} else if (ctx->alg == SAFEXCEL_3DES) {
593 		cdesc->control_data.control0 |=
594 			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
595 	} else if (ctx->alg == SAFEXCEL_AES) {
596 		switch (ctx->key_len >> ctx->xts) {
597 		case AES_KEYSIZE_128:
598 			cdesc->control_data.control0 |=
599 				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
600 			break;
601 		case AES_KEYSIZE_192:
602 			cdesc->control_data.control0 |=
603 				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
604 			break;
605 		case AES_KEYSIZE_256:
606 			cdesc->control_data.control0 |=
607 				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
608 			break;
609 		default:
610 			dev_err(priv->dev, "aes keysize not supported: %u\n",
611 				ctx->key_len >> ctx->xts);
612 			return -EINVAL;
613 		}
614 	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
615 		cdesc->control_data.control0 |=
616 			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
617 	} else if (ctx->alg == SAFEXCEL_SM4) {
618 		cdesc->control_data.control0 |=
619 			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
620 	}
621 
622 	return 0;
623 }
624 
625 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
626 				      struct crypto_async_request *async,
627 				      struct scatterlist *src,
628 				      struct scatterlist *dst,
629 				      unsigned int cryptlen,
630 				      struct safexcel_cipher_req *sreq,
631 				      bool *should_complete, int *ret)
632 {
633 	struct skcipher_request *areq = skcipher_request_cast(async);
634 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
635 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
636 	struct safexcel_result_desc *rdesc;
637 	int ndesc = 0;
638 
639 	*ret = 0;
640 
641 	if (unlikely(!sreq->rdescs))
642 		return 0;
643 
644 	while (sreq->rdescs--) {
645 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
646 		if (IS_ERR(rdesc)) {
647 			dev_err(priv->dev,
648 				"cipher: result: could not retrieve the result descriptor\n");
649 			*ret = PTR_ERR(rdesc);
650 			break;
651 		}
652 
653 		if (likely(!*ret))
654 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
655 
656 		ndesc++;
657 	}
658 
659 	safexcel_complete(priv, ring);
660 
661 	if (src == dst) {
662 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
663 	} else {
664 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
665 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
666 	}
667 
668 	/*
669 	 * Update IV in req from last crypto output word for CBC modes
670 	 */
671 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
672 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
673 		/* For encrypt take the last output word */
674 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
675 				   crypto_skcipher_ivsize(skcipher),
676 				   (cryptlen -
677 				    crypto_skcipher_ivsize(skcipher)));
678 	}
679 
680 	*should_complete = true;
681 
682 	return ndesc;
683 }
684 
685 static int safexcel_send_req(struct crypto_async_request *base, int ring,
686 			     struct safexcel_cipher_req *sreq,
687 			     struct scatterlist *src, struct scatterlist *dst,
688 			     unsigned int cryptlen, unsigned int assoclen,
689 			     unsigned int digestsize, u8 *iv, int *commands,
690 			     int *results)
691 {
692 	struct skcipher_request *areq = skcipher_request_cast(base);
693 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
694 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
695 	struct safexcel_crypto_priv *priv = ctx->priv;
696 	struct safexcel_command_desc *cdesc;
697 	struct safexcel_command_desc *first_cdesc = NULL;
698 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
699 	struct scatterlist *sg;
700 	unsigned int totlen;
701 	unsigned int totlen_src = cryptlen + assoclen;
702 	unsigned int totlen_dst = totlen_src;
703 	struct safexcel_token *atoken;
704 	int n_cdesc = 0, n_rdesc = 0;
705 	int queued, i, ret = 0;
706 	bool first = true;
707 
708 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
709 
710 	if (ctx->aead) {
711 		/*
712 		 * AEAD has auth tag appended to output for encrypt and
713 		 * removed from the output for decrypt!
714 		 */
715 		if (sreq->direction == SAFEXCEL_DECRYPT)
716 			totlen_dst -= digestsize;
717 		else
718 			totlen_dst += digestsize;
719 
720 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
721 		       ctx->ipad, ctx->state_sz);
722 		if (!ctx->xcm)
723 			memcpy(ctx->base.ctxr->data + (ctx->key_len +
724 			       ctx->state_sz) / sizeof(u32), ctx->opad,
725 			       ctx->state_sz);
726 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
727 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
728 		/*
729 		 * Save IV from last crypto input word for CBC modes in decrypt
730 		 * direction. Need to do this first in case of inplace operation
731 		 * as it will be overwritten.
732 		 */
733 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
734 				   crypto_skcipher_ivsize(skcipher),
735 				   (totlen_src -
736 				    crypto_skcipher_ivsize(skcipher)));
737 	}
738 
739 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
740 
741 	/*
742 	 * Remember actual input length, source buffer length may be
743 	 * updated in case of inline operation below.
744 	 */
745 	totlen = totlen_src;
746 	queued = totlen_src;
747 
748 	if (src == dst) {
749 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
750 		sreq->nr_dst = sreq->nr_src;
751 		if (unlikely((totlen_src || totlen_dst) &&
752 		    (sreq->nr_src <= 0))) {
753 			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
754 				max(totlen_src, totlen_dst));
755 			return -EINVAL;
756 		}
757 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
758 	} else {
759 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
760 			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
761 				totlen_src);
762 			return -EINVAL;
763 		}
764 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
765 
766 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
767 			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
768 				totlen_dst);
769 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
770 				     DMA_TO_DEVICE);
771 			return -EINVAL;
772 		}
773 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
774 	}
775 
776 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
777 
778 	if (!totlen) {
779 		/*
780 		 * The EIP97 cannot deal with zero length input packets!
781 		 * So stuff a dummy command descriptor indicating a 1 byte
782 		 * (dummy) input packet, using the context record as source.
783 		 */
784 		first_cdesc = safexcel_add_cdesc(priv, ring,
785 						 1, 1, ctx->base.ctxr_dma,
786 						 1, 1, ctx->base.ctxr_dma,
787 						 &atoken);
788 		if (IS_ERR(first_cdesc)) {
789 			/* No space left in the command descriptor ring */
790 			ret = PTR_ERR(first_cdesc);
791 			goto cdesc_rollback;
792 		}
793 		n_cdesc = 1;
794 		goto skip_cdesc;
795 	}
796 
797 	/* command descriptors */
798 	for_each_sg(src, sg, sreq->nr_src, i) {
799 		int len = sg_dma_len(sg);
800 
801 		/* Do not overflow the request */
802 		if (queued < len)
803 			len = queued;
804 
805 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
806 					   !(queued - len),
807 					   sg_dma_address(sg), len, totlen,
808 					   ctx->base.ctxr_dma, &atoken);
809 		if (IS_ERR(cdesc)) {
810 			/* No space left in the command descriptor ring */
811 			ret = PTR_ERR(cdesc);
812 			goto cdesc_rollback;
813 		}
814 
815 		if (!n_cdesc)
816 			first_cdesc = cdesc;
817 
818 		n_cdesc++;
819 		queued -= len;
820 		if (!queued)
821 			break;
822 	}
823 skip_cdesc:
824 	/* Add context control words and token to first command descriptor */
825 	safexcel_context_control(ctx, base, sreq, first_cdesc);
826 	if (ctx->aead)
827 		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
828 				    sreq->direction, cryptlen,
829 				    assoclen, digestsize);
830 	else
831 		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
832 					cryptlen);
833 
834 	/* result descriptors */
835 	for_each_sg(dst, sg, sreq->nr_dst, i) {
836 		bool last = (i == sreq->nr_dst - 1);
837 		u32 len = sg_dma_len(sg);
838 
839 		/* only allow the part of the buffer we know we need */
840 		if (len > totlen_dst)
841 			len = totlen_dst;
842 		if (unlikely(!len))
843 			break;
844 		totlen_dst -= len;
845 
846 		/* skip over AAD space in buffer - not written */
847 		if (assoclen) {
848 			if (assoclen >= len) {
849 				assoclen -= len;
850 				continue;
851 			}
852 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
853 						   sg_dma_address(sg) +
854 						   assoclen,
855 						   len - assoclen);
856 			assoclen = 0;
857 		} else {
858 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
859 						   sg_dma_address(sg),
860 						   len);
861 		}
862 		if (IS_ERR(rdesc)) {
863 			/* No space left in the result descriptor ring */
864 			ret = PTR_ERR(rdesc);
865 			goto rdesc_rollback;
866 		}
867 		if (first) {
868 			first_rdesc = rdesc;
869 			first = false;
870 		}
871 		n_rdesc++;
872 	}
873 
874 	if (unlikely(first)) {
875 		/*
876 		 * Special case: AEAD decrypt with only AAD data.
877 		 * In this case there is NO output data from the engine,
878 		 * but the engine still needs a result descriptor!
879 		 * Create a dummy one just for catching the result token.
880 		 */
881 		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
882 		if (IS_ERR(rdesc)) {
883 			/* No space left in the result descriptor ring */
884 			ret = PTR_ERR(rdesc);
885 			goto rdesc_rollback;
886 		}
887 		first_rdesc = rdesc;
888 		n_rdesc = 1;
889 	}
890 
891 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
892 
893 	*commands = n_cdesc;
894 	*results = n_rdesc;
895 	return 0;
896 
897 rdesc_rollback:
898 	for (i = 0; i < n_rdesc; i++)
899 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
900 cdesc_rollback:
901 	for (i = 0; i < n_cdesc; i++)
902 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
903 
904 	if (src == dst) {
905 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
906 	} else {
907 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
908 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
909 	}
910 
911 	return ret;
912 }
913 
914 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
915 				      int ring,
916 				      struct crypto_async_request *base,
917 				      struct safexcel_cipher_req *sreq,
918 				      bool *should_complete, int *ret)
919 {
920 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
921 	struct safexcel_result_desc *rdesc;
922 	int ndesc = 0, enq_ret;
923 
924 	*ret = 0;
925 
926 	if (unlikely(!sreq->rdescs))
927 		return 0;
928 
929 	while (sreq->rdescs--) {
930 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
931 		if (IS_ERR(rdesc)) {
932 			dev_err(priv->dev,
933 				"cipher: invalidate: could not retrieve the result descriptor\n");
934 			*ret = PTR_ERR(rdesc);
935 			break;
936 		}
937 
938 		if (likely(!*ret))
939 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
940 
941 		ndesc++;
942 	}
943 
944 	safexcel_complete(priv, ring);
945 
946 	if (ctx->base.exit_inv) {
947 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
948 			      ctx->base.ctxr_dma);
949 
950 		*should_complete = true;
951 
952 		return ndesc;
953 	}
954 
955 	ring = safexcel_select_ring(priv);
956 	ctx->base.ring = ring;
957 
958 	spin_lock_bh(&priv->ring[ring].queue_lock);
959 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
960 	spin_unlock_bh(&priv->ring[ring].queue_lock);
961 
962 	if (enq_ret != -EINPROGRESS)
963 		*ret = enq_ret;
964 
965 	queue_work(priv->ring[ring].workqueue,
966 		   &priv->ring[ring].work_data.work);
967 
968 	*should_complete = false;
969 
970 	return ndesc;
971 }
972 
973 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
974 					   int ring,
975 					   struct crypto_async_request *async,
976 					   bool *should_complete, int *ret)
977 {
978 	struct skcipher_request *req = skcipher_request_cast(async);
979 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
980 	int err;
981 
982 	if (sreq->needs_inv) {
983 		sreq->needs_inv = false;
984 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
985 						 should_complete, ret);
986 	} else {
987 		err = safexcel_handle_req_result(priv, ring, async, req->src,
988 						 req->dst, req->cryptlen, sreq,
989 						 should_complete, ret);
990 	}
991 
992 	return err;
993 }
994 
995 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
996 				       int ring,
997 				       struct crypto_async_request *async,
998 				       bool *should_complete, int *ret)
999 {
1000 	struct aead_request *req = aead_request_cast(async);
1001 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003 	int err;
1004 
1005 	if (sreq->needs_inv) {
1006 		sreq->needs_inv = false;
1007 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008 						 should_complete, ret);
1009 	} else {
1010 		err = safexcel_handle_req_result(priv, ring, async, req->src,
1011 						 req->dst,
1012 						 req->cryptlen + crypto_aead_authsize(tfm),
1013 						 sreq, should_complete, ret);
1014 	}
1015 
1016 	return err;
1017 }
1018 
1019 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020 				    int ring, int *commands, int *results)
1021 {
1022 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023 	struct safexcel_crypto_priv *priv = ctx->priv;
1024 	int ret;
1025 
1026 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027 	if (unlikely(ret))
1028 		return ret;
1029 
1030 	*commands = 1;
1031 	*results = 1;
1032 
1033 	return 0;
1034 }
1035 
1036 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037 				  int *commands, int *results)
1038 {
1039 	struct skcipher_request *req = skcipher_request_cast(async);
1040 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042 	struct safexcel_crypto_priv *priv = ctx->priv;
1043 	int ret;
1044 
1045 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046 
1047 	if (sreq->needs_inv) {
1048 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049 	} else {
1050 		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051 		u8 input_iv[AES_BLOCK_SIZE];
1052 
1053 		/*
1054 		 * Save input IV in case of CBC decrypt mode
1055 		 * Will be overwritten with output IV prior to use!
1056 		 */
1057 		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058 
1059 		ret = safexcel_send_req(async, ring, sreq, req->src,
1060 					req->dst, req->cryptlen, 0, 0, input_iv,
1061 					commands, results);
1062 	}
1063 
1064 	sreq->rdescs = *results;
1065 	return ret;
1066 }
1067 
1068 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069 			      int *commands, int *results)
1070 {
1071 	struct aead_request *req = aead_request_cast(async);
1072 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075 	struct safexcel_crypto_priv *priv = ctx->priv;
1076 	int ret;
1077 
1078 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079 
1080 	if (sreq->needs_inv)
1081 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082 	else
1083 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084 					req->cryptlen, req->assoclen,
1085 					crypto_aead_authsize(tfm), req->iv,
1086 					commands, results);
1087 	sreq->rdescs = *results;
1088 	return ret;
1089 }
1090 
1091 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092 				    struct crypto_async_request *base,
1093 				    struct safexcel_cipher_req *sreq,
1094 				    struct safexcel_inv_result *result)
1095 {
1096 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097 	struct safexcel_crypto_priv *priv = ctx->priv;
1098 	int ring = ctx->base.ring;
1099 
1100 	init_completion(&result->completion);
1101 
1102 	ctx = crypto_tfm_ctx(base->tfm);
1103 	ctx->base.exit_inv = true;
1104 	sreq->needs_inv = true;
1105 
1106 	spin_lock_bh(&priv->ring[ring].queue_lock);
1107 	crypto_enqueue_request(&priv->ring[ring].queue, base);
1108 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1109 
1110 	queue_work(priv->ring[ring].workqueue,
1111 		   &priv->ring[ring].work_data.work);
1112 
1113 	wait_for_completion(&result->completion);
1114 
1115 	if (result->error) {
1116 		dev_warn(priv->dev,
1117 			"cipher: sync: invalidate: completion error %d\n",
1118 			 result->error);
1119 		return result->error;
1120 	}
1121 
1122 	return 0;
1123 }
1124 
1125 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1126 {
1127 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1128 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1129 	struct safexcel_inv_result result = {};
1130 
1131 	memset(req, 0, sizeof(struct skcipher_request));
1132 
1133 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1134 				      safexcel_inv_complete, &result);
1135 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1136 
1137 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138 }
1139 
1140 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1141 {
1142 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1143 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1144 	struct safexcel_inv_result result = {};
1145 
1146 	memset(req, 0, sizeof(struct aead_request));
1147 
1148 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1149 				  safexcel_inv_complete, &result);
1150 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1151 
1152 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1153 }
1154 
1155 static int safexcel_queue_req(struct crypto_async_request *base,
1156 			struct safexcel_cipher_req *sreq,
1157 			enum safexcel_cipher_direction dir)
1158 {
1159 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1160 	struct safexcel_crypto_priv *priv = ctx->priv;
1161 	int ret, ring;
1162 
1163 	sreq->needs_inv = false;
1164 	sreq->direction = dir;
1165 
1166 	if (ctx->base.ctxr) {
1167 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1168 			sreq->needs_inv = true;
1169 			ctx->base.needs_inv = false;
1170 		}
1171 	} else {
1172 		ctx->base.ring = safexcel_select_ring(priv);
1173 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1174 						 EIP197_GFP_FLAGS(*base),
1175 						 &ctx->base.ctxr_dma);
1176 		if (!ctx->base.ctxr)
1177 			return -ENOMEM;
1178 	}
1179 
1180 	ring = ctx->base.ring;
1181 
1182 	spin_lock_bh(&priv->ring[ring].queue_lock);
1183 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1184 	spin_unlock_bh(&priv->ring[ring].queue_lock);
1185 
1186 	queue_work(priv->ring[ring].workqueue,
1187 		   &priv->ring[ring].work_data.work);
1188 
1189 	return ret;
1190 }
1191 
1192 static int safexcel_encrypt(struct skcipher_request *req)
1193 {
1194 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1195 			SAFEXCEL_ENCRYPT);
1196 }
1197 
1198 static int safexcel_decrypt(struct skcipher_request *req)
1199 {
1200 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201 			SAFEXCEL_DECRYPT);
1202 }
1203 
1204 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1205 {
1206 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207 	struct safexcel_alg_template *tmpl =
1208 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1209 			     alg.skcipher.base);
1210 
1211 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1212 				    sizeof(struct safexcel_cipher_req));
1213 
1214 	ctx->priv = tmpl->priv;
1215 
1216 	ctx->base.send = safexcel_skcipher_send;
1217 	ctx->base.handle_result = safexcel_skcipher_handle_result;
1218 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1219 	ctx->ctrinit = 1;
1220 	return 0;
1221 }
1222 
1223 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1224 {
1225 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1226 
1227 	memzero_explicit(ctx->key, sizeof(ctx->key));
1228 
1229 	/* context not allocated, skip invalidation */
1230 	if (!ctx->base.ctxr)
1231 		return -ENOMEM;
1232 
1233 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1234 	return 0;
1235 }
1236 
1237 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1238 {
1239 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1240 	struct safexcel_crypto_priv *priv = ctx->priv;
1241 	int ret;
1242 
1243 	if (safexcel_cipher_cra_exit(tfm))
1244 		return;
1245 
1246 	if (priv->flags & EIP197_TRC_CACHE) {
1247 		ret = safexcel_skcipher_exit_inv(tfm);
1248 		if (ret)
1249 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1250 				 ret);
1251 	} else {
1252 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1253 			      ctx->base.ctxr_dma);
1254 	}
1255 }
1256 
1257 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1258 {
1259 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1260 	struct safexcel_crypto_priv *priv = ctx->priv;
1261 	int ret;
1262 
1263 	if (safexcel_cipher_cra_exit(tfm))
1264 		return;
1265 
1266 	if (priv->flags & EIP197_TRC_CACHE) {
1267 		ret = safexcel_aead_exit_inv(tfm);
1268 		if (ret)
1269 			dev_warn(priv->dev, "aead: invalidation error %d\n",
1270 				 ret);
1271 	} else {
1272 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1273 			      ctx->base.ctxr_dma);
1274 	}
1275 }
1276 
1277 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1278 {
1279 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1280 
1281 	safexcel_skcipher_cra_init(tfm);
1282 	ctx->alg  = SAFEXCEL_AES;
1283 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1284 	ctx->blocksz = 0;
1285 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1286 	return 0;
1287 }
1288 
1289 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1290 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1291 	.algo_mask = SAFEXCEL_ALG_AES,
1292 	.alg.skcipher = {
1293 		.setkey = safexcel_skcipher_aes_setkey,
1294 		.encrypt = safexcel_encrypt,
1295 		.decrypt = safexcel_decrypt,
1296 		.min_keysize = AES_MIN_KEY_SIZE,
1297 		.max_keysize = AES_MAX_KEY_SIZE,
1298 		.base = {
1299 			.cra_name = "ecb(aes)",
1300 			.cra_driver_name = "safexcel-ecb-aes",
1301 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1302 			.cra_flags = CRYPTO_ALG_ASYNC |
1303 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1304 			.cra_blocksize = AES_BLOCK_SIZE,
1305 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1306 			.cra_alignmask = 0,
1307 			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1308 			.cra_exit = safexcel_skcipher_cra_exit,
1309 			.cra_module = THIS_MODULE,
1310 		},
1311 	},
1312 };
1313 
1314 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1315 {
1316 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1317 
1318 	safexcel_skcipher_cra_init(tfm);
1319 	ctx->alg  = SAFEXCEL_AES;
1320 	ctx->blocksz = AES_BLOCK_SIZE;
1321 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1322 	return 0;
1323 }
1324 
1325 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1326 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1327 	.algo_mask = SAFEXCEL_ALG_AES,
1328 	.alg.skcipher = {
1329 		.setkey = safexcel_skcipher_aes_setkey,
1330 		.encrypt = safexcel_encrypt,
1331 		.decrypt = safexcel_decrypt,
1332 		.min_keysize = AES_MIN_KEY_SIZE,
1333 		.max_keysize = AES_MAX_KEY_SIZE,
1334 		.ivsize = AES_BLOCK_SIZE,
1335 		.base = {
1336 			.cra_name = "cbc(aes)",
1337 			.cra_driver_name = "safexcel-cbc-aes",
1338 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1339 			.cra_flags = CRYPTO_ALG_ASYNC |
1340 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1341 			.cra_blocksize = AES_BLOCK_SIZE,
1342 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1343 			.cra_alignmask = 0,
1344 			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1345 			.cra_exit = safexcel_skcipher_cra_exit,
1346 			.cra_module = THIS_MODULE,
1347 		},
1348 	},
1349 };
1350 
1351 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1352 {
1353 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1354 
1355 	safexcel_skcipher_cra_init(tfm);
1356 	ctx->alg  = SAFEXCEL_AES;
1357 	ctx->blocksz = AES_BLOCK_SIZE;
1358 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1359 	return 0;
1360 }
1361 
1362 struct safexcel_alg_template safexcel_alg_cfb_aes = {
1363 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1364 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1365 	.alg.skcipher = {
1366 		.setkey = safexcel_skcipher_aes_setkey,
1367 		.encrypt = safexcel_encrypt,
1368 		.decrypt = safexcel_decrypt,
1369 		.min_keysize = AES_MIN_KEY_SIZE,
1370 		.max_keysize = AES_MAX_KEY_SIZE,
1371 		.ivsize = AES_BLOCK_SIZE,
1372 		.base = {
1373 			.cra_name = "cfb(aes)",
1374 			.cra_driver_name = "safexcel-cfb-aes",
1375 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1376 			.cra_flags = CRYPTO_ALG_ASYNC |
1377 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1378 			.cra_blocksize = 1,
1379 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1380 			.cra_alignmask = 0,
1381 			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1382 			.cra_exit = safexcel_skcipher_cra_exit,
1383 			.cra_module = THIS_MODULE,
1384 		},
1385 	},
1386 };
1387 
1388 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1389 {
1390 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1391 
1392 	safexcel_skcipher_cra_init(tfm);
1393 	ctx->alg  = SAFEXCEL_AES;
1394 	ctx->blocksz = AES_BLOCK_SIZE;
1395 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1396 	return 0;
1397 }
1398 
1399 struct safexcel_alg_template safexcel_alg_ofb_aes = {
1400 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1401 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1402 	.alg.skcipher = {
1403 		.setkey = safexcel_skcipher_aes_setkey,
1404 		.encrypt = safexcel_encrypt,
1405 		.decrypt = safexcel_decrypt,
1406 		.min_keysize = AES_MIN_KEY_SIZE,
1407 		.max_keysize = AES_MAX_KEY_SIZE,
1408 		.ivsize = AES_BLOCK_SIZE,
1409 		.base = {
1410 			.cra_name = "ofb(aes)",
1411 			.cra_driver_name = "safexcel-ofb-aes",
1412 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1413 			.cra_flags = CRYPTO_ALG_ASYNC |
1414 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1415 			.cra_blocksize = 1,
1416 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1417 			.cra_alignmask = 0,
1418 			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1419 			.cra_exit = safexcel_skcipher_cra_exit,
1420 			.cra_module = THIS_MODULE,
1421 		},
1422 	},
1423 };
1424 
1425 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1426 					   const u8 *key, unsigned int len)
1427 {
1428 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1429 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1430 	struct safexcel_crypto_priv *priv = ctx->priv;
1431 	struct crypto_aes_ctx aes;
1432 	int ret, i;
1433 	unsigned int keylen;
1434 
1435 	/* last 4 bytes of key are the nonce! */
1436 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1437 	/* exclude the nonce here */
1438 	keylen = len - CTR_RFC3686_NONCE_SIZE;
1439 	ret = aes_expandkey(&aes, key, keylen);
1440 	if (ret)
1441 		return ret;
1442 
1443 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1444 		for (i = 0; i < keylen / sizeof(u32); i++) {
1445 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1446 				ctx->base.needs_inv = true;
1447 				break;
1448 			}
1449 		}
1450 	}
1451 
1452 	for (i = 0; i < keylen / sizeof(u32); i++)
1453 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1454 
1455 	ctx->key_len = keylen;
1456 
1457 	memzero_explicit(&aes, sizeof(aes));
1458 	return 0;
1459 }
1460 
1461 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1462 {
1463 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1464 
1465 	safexcel_skcipher_cra_init(tfm);
1466 	ctx->alg  = SAFEXCEL_AES;
1467 	ctx->blocksz = AES_BLOCK_SIZE;
1468 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1469 	return 0;
1470 }
1471 
1472 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1473 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1474 	.algo_mask = SAFEXCEL_ALG_AES,
1475 	.alg.skcipher = {
1476 		.setkey = safexcel_skcipher_aesctr_setkey,
1477 		.encrypt = safexcel_encrypt,
1478 		.decrypt = safexcel_decrypt,
1479 		/* Add nonce size */
1480 		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1481 		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1482 		.ivsize = CTR_RFC3686_IV_SIZE,
1483 		.base = {
1484 			.cra_name = "rfc3686(ctr(aes))",
1485 			.cra_driver_name = "safexcel-ctr-aes",
1486 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1487 			.cra_flags = CRYPTO_ALG_ASYNC |
1488 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1489 			.cra_blocksize = 1,
1490 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1491 			.cra_alignmask = 0,
1492 			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1493 			.cra_exit = safexcel_skcipher_cra_exit,
1494 			.cra_module = THIS_MODULE,
1495 		},
1496 	},
1497 };
1498 
1499 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1500 			       unsigned int len)
1501 {
1502 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1503 	struct safexcel_crypto_priv *priv = ctx->priv;
1504 	int ret;
1505 
1506 	ret = verify_skcipher_des_key(ctfm, key);
1507 	if (ret)
1508 		return ret;
1509 
1510 	/* if context exits and key changed, need to invalidate it */
1511 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1512 		if (memcmp(ctx->key, key, len))
1513 			ctx->base.needs_inv = true;
1514 
1515 	memcpy(ctx->key, key, len);
1516 	ctx->key_len = len;
1517 
1518 	return 0;
1519 }
1520 
1521 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1522 {
1523 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1524 
1525 	safexcel_skcipher_cra_init(tfm);
1526 	ctx->alg  = SAFEXCEL_DES;
1527 	ctx->blocksz = DES_BLOCK_SIZE;
1528 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1529 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1530 	return 0;
1531 }
1532 
1533 struct safexcel_alg_template safexcel_alg_cbc_des = {
1534 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1535 	.algo_mask = SAFEXCEL_ALG_DES,
1536 	.alg.skcipher = {
1537 		.setkey = safexcel_des_setkey,
1538 		.encrypt = safexcel_encrypt,
1539 		.decrypt = safexcel_decrypt,
1540 		.min_keysize = DES_KEY_SIZE,
1541 		.max_keysize = DES_KEY_SIZE,
1542 		.ivsize = DES_BLOCK_SIZE,
1543 		.base = {
1544 			.cra_name = "cbc(des)",
1545 			.cra_driver_name = "safexcel-cbc-des",
1546 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1547 			.cra_flags = CRYPTO_ALG_ASYNC |
1548 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1549 			.cra_blocksize = DES_BLOCK_SIZE,
1550 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1551 			.cra_alignmask = 0,
1552 			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1553 			.cra_exit = safexcel_skcipher_cra_exit,
1554 			.cra_module = THIS_MODULE,
1555 		},
1556 	},
1557 };
1558 
1559 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1560 {
1561 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1562 
1563 	safexcel_skcipher_cra_init(tfm);
1564 	ctx->alg  = SAFEXCEL_DES;
1565 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1566 	ctx->blocksz = 0;
1567 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1568 	return 0;
1569 }
1570 
1571 struct safexcel_alg_template safexcel_alg_ecb_des = {
1572 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1573 	.algo_mask = SAFEXCEL_ALG_DES,
1574 	.alg.skcipher = {
1575 		.setkey = safexcel_des_setkey,
1576 		.encrypt = safexcel_encrypt,
1577 		.decrypt = safexcel_decrypt,
1578 		.min_keysize = DES_KEY_SIZE,
1579 		.max_keysize = DES_KEY_SIZE,
1580 		.base = {
1581 			.cra_name = "ecb(des)",
1582 			.cra_driver_name = "safexcel-ecb-des",
1583 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1584 			.cra_flags = CRYPTO_ALG_ASYNC |
1585 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1586 			.cra_blocksize = DES_BLOCK_SIZE,
1587 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1588 			.cra_alignmask = 0,
1589 			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1590 			.cra_exit = safexcel_skcipher_cra_exit,
1591 			.cra_module = THIS_MODULE,
1592 		},
1593 	},
1594 };
1595 
1596 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1597 				   const u8 *key, unsigned int len)
1598 {
1599 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1600 	struct safexcel_crypto_priv *priv = ctx->priv;
1601 	int err;
1602 
1603 	err = verify_skcipher_des3_key(ctfm, key);
1604 	if (err)
1605 		return err;
1606 
1607 	/* if context exits and key changed, need to invalidate it */
1608 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1609 		if (memcmp(ctx->key, key, len))
1610 			ctx->base.needs_inv = true;
1611 
1612 	memcpy(ctx->key, key, len);
1613 	ctx->key_len = len;
1614 
1615 	return 0;
1616 }
1617 
1618 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1619 {
1620 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1621 
1622 	safexcel_skcipher_cra_init(tfm);
1623 	ctx->alg  = SAFEXCEL_3DES;
1624 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1625 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1626 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1627 	return 0;
1628 }
1629 
1630 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1631 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1632 	.algo_mask = SAFEXCEL_ALG_DES,
1633 	.alg.skcipher = {
1634 		.setkey = safexcel_des3_ede_setkey,
1635 		.encrypt = safexcel_encrypt,
1636 		.decrypt = safexcel_decrypt,
1637 		.min_keysize = DES3_EDE_KEY_SIZE,
1638 		.max_keysize = DES3_EDE_KEY_SIZE,
1639 		.ivsize = DES3_EDE_BLOCK_SIZE,
1640 		.base = {
1641 			.cra_name = "cbc(des3_ede)",
1642 			.cra_driver_name = "safexcel-cbc-des3_ede",
1643 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1644 			.cra_flags = CRYPTO_ALG_ASYNC |
1645 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1646 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1647 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1648 			.cra_alignmask = 0,
1649 			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1650 			.cra_exit = safexcel_skcipher_cra_exit,
1651 			.cra_module = THIS_MODULE,
1652 		},
1653 	},
1654 };
1655 
1656 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1657 {
1658 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1659 
1660 	safexcel_skcipher_cra_init(tfm);
1661 	ctx->alg  = SAFEXCEL_3DES;
1662 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1663 	ctx->blocksz = 0;
1664 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1665 	return 0;
1666 }
1667 
1668 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1669 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1670 	.algo_mask = SAFEXCEL_ALG_DES,
1671 	.alg.skcipher = {
1672 		.setkey = safexcel_des3_ede_setkey,
1673 		.encrypt = safexcel_encrypt,
1674 		.decrypt = safexcel_decrypt,
1675 		.min_keysize = DES3_EDE_KEY_SIZE,
1676 		.max_keysize = DES3_EDE_KEY_SIZE,
1677 		.base = {
1678 			.cra_name = "ecb(des3_ede)",
1679 			.cra_driver_name = "safexcel-ecb-des3_ede",
1680 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1681 			.cra_flags = CRYPTO_ALG_ASYNC |
1682 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1683 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1684 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1685 			.cra_alignmask = 0,
1686 			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1687 			.cra_exit = safexcel_skcipher_cra_exit,
1688 			.cra_module = THIS_MODULE,
1689 		},
1690 	},
1691 };
1692 
1693 static int safexcel_aead_encrypt(struct aead_request *req)
1694 {
1695 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1696 
1697 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1698 }
1699 
1700 static int safexcel_aead_decrypt(struct aead_request *req)
1701 {
1702 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1703 
1704 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1705 }
1706 
1707 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1708 {
1709 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1710 	struct safexcel_alg_template *tmpl =
1711 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1712 			     alg.aead.base);
1713 
1714 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1715 				sizeof(struct safexcel_cipher_req));
1716 
1717 	ctx->priv = tmpl->priv;
1718 
1719 	ctx->alg  = SAFEXCEL_AES; /* default */
1720 	ctx->blocksz = AES_BLOCK_SIZE;
1721 	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1722 	ctx->ctrinit = 1;
1723 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1724 	ctx->aead = true;
1725 	ctx->base.send = safexcel_aead_send;
1726 	ctx->base.handle_result = safexcel_aead_handle_result;
1727 	return 0;
1728 }
1729 
1730 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1731 {
1732 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1733 
1734 	safexcel_aead_cra_init(tfm);
1735 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1736 	ctx->state_sz = SHA1_DIGEST_SIZE;
1737 	return 0;
1738 }
1739 
1740 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1741 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1742 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1743 	.alg.aead = {
1744 		.setkey = safexcel_aead_setkey,
1745 		.encrypt = safexcel_aead_encrypt,
1746 		.decrypt = safexcel_aead_decrypt,
1747 		.ivsize = AES_BLOCK_SIZE,
1748 		.maxauthsize = SHA1_DIGEST_SIZE,
1749 		.base = {
1750 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1751 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1752 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1753 			.cra_flags = CRYPTO_ALG_ASYNC |
1754 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1755 			.cra_blocksize = AES_BLOCK_SIZE,
1756 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1757 			.cra_alignmask = 0,
1758 			.cra_init = safexcel_aead_sha1_cra_init,
1759 			.cra_exit = safexcel_aead_cra_exit,
1760 			.cra_module = THIS_MODULE,
1761 		},
1762 	},
1763 };
1764 
1765 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1766 {
1767 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1768 
1769 	safexcel_aead_cra_init(tfm);
1770 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1771 	ctx->state_sz = SHA256_DIGEST_SIZE;
1772 	return 0;
1773 }
1774 
1775 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1776 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1777 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1778 	.alg.aead = {
1779 		.setkey = safexcel_aead_setkey,
1780 		.encrypt = safexcel_aead_encrypt,
1781 		.decrypt = safexcel_aead_decrypt,
1782 		.ivsize = AES_BLOCK_SIZE,
1783 		.maxauthsize = SHA256_DIGEST_SIZE,
1784 		.base = {
1785 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1786 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1787 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1788 			.cra_flags = CRYPTO_ALG_ASYNC |
1789 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1790 			.cra_blocksize = AES_BLOCK_SIZE,
1791 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1792 			.cra_alignmask = 0,
1793 			.cra_init = safexcel_aead_sha256_cra_init,
1794 			.cra_exit = safexcel_aead_cra_exit,
1795 			.cra_module = THIS_MODULE,
1796 		},
1797 	},
1798 };
1799 
1800 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1801 {
1802 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1803 
1804 	safexcel_aead_cra_init(tfm);
1805 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1806 	ctx->state_sz = SHA256_DIGEST_SIZE;
1807 	return 0;
1808 }
1809 
1810 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1811 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1812 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1813 	.alg.aead = {
1814 		.setkey = safexcel_aead_setkey,
1815 		.encrypt = safexcel_aead_encrypt,
1816 		.decrypt = safexcel_aead_decrypt,
1817 		.ivsize = AES_BLOCK_SIZE,
1818 		.maxauthsize = SHA224_DIGEST_SIZE,
1819 		.base = {
1820 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1821 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1822 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1823 			.cra_flags = CRYPTO_ALG_ASYNC |
1824 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1825 			.cra_blocksize = AES_BLOCK_SIZE,
1826 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1827 			.cra_alignmask = 0,
1828 			.cra_init = safexcel_aead_sha224_cra_init,
1829 			.cra_exit = safexcel_aead_cra_exit,
1830 			.cra_module = THIS_MODULE,
1831 		},
1832 	},
1833 };
1834 
1835 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1836 {
1837 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1838 
1839 	safexcel_aead_cra_init(tfm);
1840 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1841 	ctx->state_sz = SHA512_DIGEST_SIZE;
1842 	return 0;
1843 }
1844 
1845 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1846 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1847 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1848 	.alg.aead = {
1849 		.setkey = safexcel_aead_setkey,
1850 		.encrypt = safexcel_aead_encrypt,
1851 		.decrypt = safexcel_aead_decrypt,
1852 		.ivsize = AES_BLOCK_SIZE,
1853 		.maxauthsize = SHA512_DIGEST_SIZE,
1854 		.base = {
1855 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1856 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1857 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1858 			.cra_flags = CRYPTO_ALG_ASYNC |
1859 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1860 			.cra_blocksize = AES_BLOCK_SIZE,
1861 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1862 			.cra_alignmask = 0,
1863 			.cra_init = safexcel_aead_sha512_cra_init,
1864 			.cra_exit = safexcel_aead_cra_exit,
1865 			.cra_module = THIS_MODULE,
1866 		},
1867 	},
1868 };
1869 
1870 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1871 {
1872 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1873 
1874 	safexcel_aead_cra_init(tfm);
1875 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1876 	ctx->state_sz = SHA512_DIGEST_SIZE;
1877 	return 0;
1878 }
1879 
1880 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1881 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1882 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1883 	.alg.aead = {
1884 		.setkey = safexcel_aead_setkey,
1885 		.encrypt = safexcel_aead_encrypt,
1886 		.decrypt = safexcel_aead_decrypt,
1887 		.ivsize = AES_BLOCK_SIZE,
1888 		.maxauthsize = SHA384_DIGEST_SIZE,
1889 		.base = {
1890 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1891 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1892 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1893 			.cra_flags = CRYPTO_ALG_ASYNC |
1894 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1895 			.cra_blocksize = AES_BLOCK_SIZE,
1896 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1897 			.cra_alignmask = 0,
1898 			.cra_init = safexcel_aead_sha384_cra_init,
1899 			.cra_exit = safexcel_aead_cra_exit,
1900 			.cra_module = THIS_MODULE,
1901 		},
1902 	},
1903 };
1904 
1905 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1906 {
1907 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1908 
1909 	safexcel_aead_sha1_cra_init(tfm);
1910 	ctx->alg = SAFEXCEL_3DES; /* override default */
1911 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1912 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1913 	return 0;
1914 }
1915 
1916 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1917 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1918 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1919 	.alg.aead = {
1920 		.setkey = safexcel_aead_setkey,
1921 		.encrypt = safexcel_aead_encrypt,
1922 		.decrypt = safexcel_aead_decrypt,
1923 		.ivsize = DES3_EDE_BLOCK_SIZE,
1924 		.maxauthsize = SHA1_DIGEST_SIZE,
1925 		.base = {
1926 			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1927 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1928 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1929 			.cra_flags = CRYPTO_ALG_ASYNC |
1930 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1931 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1932 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1933 			.cra_alignmask = 0,
1934 			.cra_init = safexcel_aead_sha1_des3_cra_init,
1935 			.cra_exit = safexcel_aead_cra_exit,
1936 			.cra_module = THIS_MODULE,
1937 		},
1938 	},
1939 };
1940 
1941 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1942 {
1943 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1944 
1945 	safexcel_aead_sha256_cra_init(tfm);
1946 	ctx->alg = SAFEXCEL_3DES; /* override default */
1947 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1948 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1949 	return 0;
1950 }
1951 
1952 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1953 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1954 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1955 	.alg.aead = {
1956 		.setkey = safexcel_aead_setkey,
1957 		.encrypt = safexcel_aead_encrypt,
1958 		.decrypt = safexcel_aead_decrypt,
1959 		.ivsize = DES3_EDE_BLOCK_SIZE,
1960 		.maxauthsize = SHA256_DIGEST_SIZE,
1961 		.base = {
1962 			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1963 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1964 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1965 			.cra_flags = CRYPTO_ALG_ASYNC |
1966 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1967 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1968 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1969 			.cra_alignmask = 0,
1970 			.cra_init = safexcel_aead_sha256_des3_cra_init,
1971 			.cra_exit = safexcel_aead_cra_exit,
1972 			.cra_module = THIS_MODULE,
1973 		},
1974 	},
1975 };
1976 
1977 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1978 {
1979 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1980 
1981 	safexcel_aead_sha224_cra_init(tfm);
1982 	ctx->alg = SAFEXCEL_3DES; /* override default */
1983 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1984 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1985 	return 0;
1986 }
1987 
1988 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1989 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1990 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1991 	.alg.aead = {
1992 		.setkey = safexcel_aead_setkey,
1993 		.encrypt = safexcel_aead_encrypt,
1994 		.decrypt = safexcel_aead_decrypt,
1995 		.ivsize = DES3_EDE_BLOCK_SIZE,
1996 		.maxauthsize = SHA224_DIGEST_SIZE,
1997 		.base = {
1998 			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1999 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2000 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2001 			.cra_flags = CRYPTO_ALG_ASYNC |
2002 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2003 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2005 			.cra_alignmask = 0,
2006 			.cra_init = safexcel_aead_sha224_des3_cra_init,
2007 			.cra_exit = safexcel_aead_cra_exit,
2008 			.cra_module = THIS_MODULE,
2009 		},
2010 	},
2011 };
2012 
2013 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2014 {
2015 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2016 
2017 	safexcel_aead_sha512_cra_init(tfm);
2018 	ctx->alg = SAFEXCEL_3DES; /* override default */
2019 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2020 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2021 	return 0;
2022 }
2023 
2024 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2025 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2026 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2027 	.alg.aead = {
2028 		.setkey = safexcel_aead_setkey,
2029 		.encrypt = safexcel_aead_encrypt,
2030 		.decrypt = safexcel_aead_decrypt,
2031 		.ivsize = DES3_EDE_BLOCK_SIZE,
2032 		.maxauthsize = SHA512_DIGEST_SIZE,
2033 		.base = {
2034 			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2035 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2037 			.cra_flags = CRYPTO_ALG_ASYNC |
2038 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2039 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2040 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2041 			.cra_alignmask = 0,
2042 			.cra_init = safexcel_aead_sha512_des3_cra_init,
2043 			.cra_exit = safexcel_aead_cra_exit,
2044 			.cra_module = THIS_MODULE,
2045 		},
2046 	},
2047 };
2048 
2049 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2050 {
2051 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2052 
2053 	safexcel_aead_sha384_cra_init(tfm);
2054 	ctx->alg = SAFEXCEL_3DES; /* override default */
2055 	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2056 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2057 	return 0;
2058 }
2059 
2060 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2061 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2062 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2063 	.alg.aead = {
2064 		.setkey = safexcel_aead_setkey,
2065 		.encrypt = safexcel_aead_encrypt,
2066 		.decrypt = safexcel_aead_decrypt,
2067 		.ivsize = DES3_EDE_BLOCK_SIZE,
2068 		.maxauthsize = SHA384_DIGEST_SIZE,
2069 		.base = {
2070 			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2071 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2072 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2073 			.cra_flags = CRYPTO_ALG_ASYNC |
2074 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2075 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2076 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2077 			.cra_alignmask = 0,
2078 			.cra_init = safexcel_aead_sha384_des3_cra_init,
2079 			.cra_exit = safexcel_aead_cra_exit,
2080 			.cra_module = THIS_MODULE,
2081 		},
2082 	},
2083 };
2084 
2085 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2086 {
2087 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2088 
2089 	safexcel_aead_sha1_cra_init(tfm);
2090 	ctx->alg = SAFEXCEL_DES; /* override default */
2091 	ctx->blocksz = DES_BLOCK_SIZE;
2092 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2093 	return 0;
2094 }
2095 
2096 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2097 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2098 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2099 	.alg.aead = {
2100 		.setkey = safexcel_aead_setkey,
2101 		.encrypt = safexcel_aead_encrypt,
2102 		.decrypt = safexcel_aead_decrypt,
2103 		.ivsize = DES_BLOCK_SIZE,
2104 		.maxauthsize = SHA1_DIGEST_SIZE,
2105 		.base = {
2106 			.cra_name = "authenc(hmac(sha1),cbc(des))",
2107 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2108 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2109 			.cra_flags = CRYPTO_ALG_ASYNC |
2110 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2111 			.cra_blocksize = DES_BLOCK_SIZE,
2112 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2113 			.cra_alignmask = 0,
2114 			.cra_init = safexcel_aead_sha1_des_cra_init,
2115 			.cra_exit = safexcel_aead_cra_exit,
2116 			.cra_module = THIS_MODULE,
2117 		},
2118 	},
2119 };
2120 
2121 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2122 {
2123 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2124 
2125 	safexcel_aead_sha256_cra_init(tfm);
2126 	ctx->alg = SAFEXCEL_DES; /* override default */
2127 	ctx->blocksz = DES_BLOCK_SIZE;
2128 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2129 	return 0;
2130 }
2131 
2132 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2133 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2134 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2135 	.alg.aead = {
2136 		.setkey = safexcel_aead_setkey,
2137 		.encrypt = safexcel_aead_encrypt,
2138 		.decrypt = safexcel_aead_decrypt,
2139 		.ivsize = DES_BLOCK_SIZE,
2140 		.maxauthsize = SHA256_DIGEST_SIZE,
2141 		.base = {
2142 			.cra_name = "authenc(hmac(sha256),cbc(des))",
2143 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2144 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2145 			.cra_flags = CRYPTO_ALG_ASYNC |
2146 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2147 			.cra_blocksize = DES_BLOCK_SIZE,
2148 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2149 			.cra_alignmask = 0,
2150 			.cra_init = safexcel_aead_sha256_des_cra_init,
2151 			.cra_exit = safexcel_aead_cra_exit,
2152 			.cra_module = THIS_MODULE,
2153 		},
2154 	},
2155 };
2156 
2157 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2158 {
2159 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2160 
2161 	safexcel_aead_sha224_cra_init(tfm);
2162 	ctx->alg = SAFEXCEL_DES; /* override default */
2163 	ctx->blocksz = DES_BLOCK_SIZE;
2164 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2165 	return 0;
2166 }
2167 
2168 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2169 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2170 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2171 	.alg.aead = {
2172 		.setkey = safexcel_aead_setkey,
2173 		.encrypt = safexcel_aead_encrypt,
2174 		.decrypt = safexcel_aead_decrypt,
2175 		.ivsize = DES_BLOCK_SIZE,
2176 		.maxauthsize = SHA224_DIGEST_SIZE,
2177 		.base = {
2178 			.cra_name = "authenc(hmac(sha224),cbc(des))",
2179 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2180 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2181 			.cra_flags = CRYPTO_ALG_ASYNC |
2182 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2183 			.cra_blocksize = DES_BLOCK_SIZE,
2184 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2185 			.cra_alignmask = 0,
2186 			.cra_init = safexcel_aead_sha224_des_cra_init,
2187 			.cra_exit = safexcel_aead_cra_exit,
2188 			.cra_module = THIS_MODULE,
2189 		},
2190 	},
2191 };
2192 
2193 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2194 {
2195 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2196 
2197 	safexcel_aead_sha512_cra_init(tfm);
2198 	ctx->alg = SAFEXCEL_DES; /* override default */
2199 	ctx->blocksz = DES_BLOCK_SIZE;
2200 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2201 	return 0;
2202 }
2203 
2204 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2205 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2206 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2207 	.alg.aead = {
2208 		.setkey = safexcel_aead_setkey,
2209 		.encrypt = safexcel_aead_encrypt,
2210 		.decrypt = safexcel_aead_decrypt,
2211 		.ivsize = DES_BLOCK_SIZE,
2212 		.maxauthsize = SHA512_DIGEST_SIZE,
2213 		.base = {
2214 			.cra_name = "authenc(hmac(sha512),cbc(des))",
2215 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2216 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2217 			.cra_flags = CRYPTO_ALG_ASYNC |
2218 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2219 			.cra_blocksize = DES_BLOCK_SIZE,
2220 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2221 			.cra_alignmask = 0,
2222 			.cra_init = safexcel_aead_sha512_des_cra_init,
2223 			.cra_exit = safexcel_aead_cra_exit,
2224 			.cra_module = THIS_MODULE,
2225 		},
2226 	},
2227 };
2228 
2229 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2230 {
2231 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2232 
2233 	safexcel_aead_sha384_cra_init(tfm);
2234 	ctx->alg = SAFEXCEL_DES; /* override default */
2235 	ctx->blocksz = DES_BLOCK_SIZE;
2236 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2237 	return 0;
2238 }
2239 
2240 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2241 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2242 	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2243 	.alg.aead = {
2244 		.setkey = safexcel_aead_setkey,
2245 		.encrypt = safexcel_aead_encrypt,
2246 		.decrypt = safexcel_aead_decrypt,
2247 		.ivsize = DES_BLOCK_SIZE,
2248 		.maxauthsize = SHA384_DIGEST_SIZE,
2249 		.base = {
2250 			.cra_name = "authenc(hmac(sha384),cbc(des))",
2251 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2252 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2253 			.cra_flags = CRYPTO_ALG_ASYNC |
2254 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2255 			.cra_blocksize = DES_BLOCK_SIZE,
2256 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2257 			.cra_alignmask = 0,
2258 			.cra_init = safexcel_aead_sha384_des_cra_init,
2259 			.cra_exit = safexcel_aead_cra_exit,
2260 			.cra_module = THIS_MODULE,
2261 		},
2262 	},
2263 };
2264 
2265 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2266 {
2267 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2268 
2269 	safexcel_aead_sha1_cra_init(tfm);
2270 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2271 	return 0;
2272 }
2273 
2274 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2275 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2276 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2277 	.alg.aead = {
2278 		.setkey = safexcel_aead_setkey,
2279 		.encrypt = safexcel_aead_encrypt,
2280 		.decrypt = safexcel_aead_decrypt,
2281 		.ivsize = CTR_RFC3686_IV_SIZE,
2282 		.maxauthsize = SHA1_DIGEST_SIZE,
2283 		.base = {
2284 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2285 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2286 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2287 			.cra_flags = CRYPTO_ALG_ASYNC |
2288 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2289 			.cra_blocksize = 1,
2290 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2291 			.cra_alignmask = 0,
2292 			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2293 			.cra_exit = safexcel_aead_cra_exit,
2294 			.cra_module = THIS_MODULE,
2295 		},
2296 	},
2297 };
2298 
2299 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2300 {
2301 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2302 
2303 	safexcel_aead_sha256_cra_init(tfm);
2304 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2305 	return 0;
2306 }
2307 
2308 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2309 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2310 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2311 	.alg.aead = {
2312 		.setkey = safexcel_aead_setkey,
2313 		.encrypt = safexcel_aead_encrypt,
2314 		.decrypt = safexcel_aead_decrypt,
2315 		.ivsize = CTR_RFC3686_IV_SIZE,
2316 		.maxauthsize = SHA256_DIGEST_SIZE,
2317 		.base = {
2318 			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2319 			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2320 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2321 			.cra_flags = CRYPTO_ALG_ASYNC |
2322 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2323 			.cra_blocksize = 1,
2324 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2325 			.cra_alignmask = 0,
2326 			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2327 			.cra_exit = safexcel_aead_cra_exit,
2328 			.cra_module = THIS_MODULE,
2329 		},
2330 	},
2331 };
2332 
2333 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2334 {
2335 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2336 
2337 	safexcel_aead_sha224_cra_init(tfm);
2338 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2339 	return 0;
2340 }
2341 
2342 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2343 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2344 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2345 	.alg.aead = {
2346 		.setkey = safexcel_aead_setkey,
2347 		.encrypt = safexcel_aead_encrypt,
2348 		.decrypt = safexcel_aead_decrypt,
2349 		.ivsize = CTR_RFC3686_IV_SIZE,
2350 		.maxauthsize = SHA224_DIGEST_SIZE,
2351 		.base = {
2352 			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2353 			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2354 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2355 			.cra_flags = CRYPTO_ALG_ASYNC |
2356 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2357 			.cra_blocksize = 1,
2358 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2359 			.cra_alignmask = 0,
2360 			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2361 			.cra_exit = safexcel_aead_cra_exit,
2362 			.cra_module = THIS_MODULE,
2363 		},
2364 	},
2365 };
2366 
2367 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2368 {
2369 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2370 
2371 	safexcel_aead_sha512_cra_init(tfm);
2372 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2373 	return 0;
2374 }
2375 
2376 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2377 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2378 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2379 	.alg.aead = {
2380 		.setkey = safexcel_aead_setkey,
2381 		.encrypt = safexcel_aead_encrypt,
2382 		.decrypt = safexcel_aead_decrypt,
2383 		.ivsize = CTR_RFC3686_IV_SIZE,
2384 		.maxauthsize = SHA512_DIGEST_SIZE,
2385 		.base = {
2386 			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2387 			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2388 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2389 			.cra_flags = CRYPTO_ALG_ASYNC |
2390 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2391 			.cra_blocksize = 1,
2392 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2393 			.cra_alignmask = 0,
2394 			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2395 			.cra_exit = safexcel_aead_cra_exit,
2396 			.cra_module = THIS_MODULE,
2397 		},
2398 	},
2399 };
2400 
2401 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2402 {
2403 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2404 
2405 	safexcel_aead_sha384_cra_init(tfm);
2406 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2407 	return 0;
2408 }
2409 
2410 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2411 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2412 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2413 	.alg.aead = {
2414 		.setkey = safexcel_aead_setkey,
2415 		.encrypt = safexcel_aead_encrypt,
2416 		.decrypt = safexcel_aead_decrypt,
2417 		.ivsize = CTR_RFC3686_IV_SIZE,
2418 		.maxauthsize = SHA384_DIGEST_SIZE,
2419 		.base = {
2420 			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2421 			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2422 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2423 			.cra_flags = CRYPTO_ALG_ASYNC |
2424 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2425 			.cra_blocksize = 1,
2426 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2427 			.cra_alignmask = 0,
2428 			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2429 			.cra_exit = safexcel_aead_cra_exit,
2430 			.cra_module = THIS_MODULE,
2431 		},
2432 	},
2433 };
2434 
2435 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2436 					   const u8 *key, unsigned int len)
2437 {
2438 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2439 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2440 	struct safexcel_crypto_priv *priv = ctx->priv;
2441 	struct crypto_aes_ctx aes;
2442 	int ret, i;
2443 	unsigned int keylen;
2444 
2445 	/* Check for illegal XTS keys */
2446 	ret = xts_verify_key(ctfm, key, len);
2447 	if (ret)
2448 		return ret;
2449 
2450 	/* Only half of the key data is cipher key */
2451 	keylen = (len >> 1);
2452 	ret = aes_expandkey(&aes, key, keylen);
2453 	if (ret)
2454 		return ret;
2455 
2456 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2457 		for (i = 0; i < keylen / sizeof(u32); i++) {
2458 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2459 				ctx->base.needs_inv = true;
2460 				break;
2461 			}
2462 		}
2463 	}
2464 
2465 	for (i = 0; i < keylen / sizeof(u32); i++)
2466 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2467 
2468 	/* The other half is the tweak key */
2469 	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2470 	if (ret)
2471 		return ret;
2472 
2473 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2474 		for (i = 0; i < keylen / sizeof(u32); i++) {
2475 			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2476 			    aes.key_enc[i]) {
2477 				ctx->base.needs_inv = true;
2478 				break;
2479 			}
2480 		}
2481 	}
2482 
2483 	for (i = 0; i < keylen / sizeof(u32); i++)
2484 		ctx->key[i + keylen / sizeof(u32)] =
2485 			cpu_to_le32(aes.key_enc[i]);
2486 
2487 	ctx->key_len = keylen << 1;
2488 
2489 	memzero_explicit(&aes, sizeof(aes));
2490 	return 0;
2491 }
2492 
2493 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2494 {
2495 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2496 
2497 	safexcel_skcipher_cra_init(tfm);
2498 	ctx->alg  = SAFEXCEL_AES;
2499 	ctx->blocksz = AES_BLOCK_SIZE;
2500 	ctx->xts  = 1;
2501 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2502 	return 0;
2503 }
2504 
2505 static int safexcel_encrypt_xts(struct skcipher_request *req)
2506 {
2507 	if (req->cryptlen < XTS_BLOCK_SIZE)
2508 		return -EINVAL;
2509 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2510 				  SAFEXCEL_ENCRYPT);
2511 }
2512 
2513 static int safexcel_decrypt_xts(struct skcipher_request *req)
2514 {
2515 	if (req->cryptlen < XTS_BLOCK_SIZE)
2516 		return -EINVAL;
2517 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2518 				  SAFEXCEL_DECRYPT);
2519 }
2520 
2521 struct safexcel_alg_template safexcel_alg_xts_aes = {
2522 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2523 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2524 	.alg.skcipher = {
2525 		.setkey = safexcel_skcipher_aesxts_setkey,
2526 		.encrypt = safexcel_encrypt_xts,
2527 		.decrypt = safexcel_decrypt_xts,
2528 		/* XTS actually uses 2 AES keys glued together */
2529 		.min_keysize = AES_MIN_KEY_SIZE * 2,
2530 		.max_keysize = AES_MAX_KEY_SIZE * 2,
2531 		.ivsize = XTS_BLOCK_SIZE,
2532 		.base = {
2533 			.cra_name = "xts(aes)",
2534 			.cra_driver_name = "safexcel-xts-aes",
2535 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2536 			.cra_flags = CRYPTO_ALG_ASYNC |
2537 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2538 			.cra_blocksize = XTS_BLOCK_SIZE,
2539 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2540 			.cra_alignmask = 0,
2541 			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2542 			.cra_exit = safexcel_skcipher_cra_exit,
2543 			.cra_module = THIS_MODULE,
2544 		},
2545 	},
2546 };
2547 
2548 static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2549 				    unsigned int len)
2550 {
2551 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2552 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2553 	struct safexcel_crypto_priv *priv = ctx->priv;
2554 	struct crypto_aes_ctx aes;
2555 	u32 hashkey[AES_BLOCK_SIZE >> 2];
2556 	int ret, i;
2557 
2558 	ret = aes_expandkey(&aes, key, len);
2559 	if (ret) {
2560 		memzero_explicit(&aes, sizeof(aes));
2561 		return ret;
2562 	}
2563 
2564 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2565 		for (i = 0; i < len / sizeof(u32); i++) {
2566 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2567 				ctx->base.needs_inv = true;
2568 				break;
2569 			}
2570 		}
2571 	}
2572 
2573 	for (i = 0; i < len / sizeof(u32); i++)
2574 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2575 
2576 	ctx->key_len = len;
2577 
2578 	/* Compute hash key by encrypting zeroes with cipher key */
2579 	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2580 	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2581 				CRYPTO_TFM_REQ_MASK);
2582 	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2583 	if (ret)
2584 		return ret;
2585 
2586 	memset(hashkey, 0, AES_BLOCK_SIZE);
2587 	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2588 
2589 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2590 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2591 			if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2592 				ctx->base.needs_inv = true;
2593 				break;
2594 			}
2595 		}
2596 	}
2597 
2598 	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2599 		ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2600 
2601 	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2602 	memzero_explicit(&aes, sizeof(aes));
2603 	return 0;
2604 }
2605 
2606 static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2607 {
2608 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2609 
2610 	safexcel_aead_cra_init(tfm);
2611 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2612 	ctx->state_sz = GHASH_BLOCK_SIZE;
2613 	ctx->xcm = EIP197_XCM_MODE_GCM;
2614 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2615 
2616 	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2617 	return PTR_ERR_OR_ZERO(ctx->hkaes);
2618 }
2619 
2620 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2621 {
2622 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2623 
2624 	crypto_free_cipher(ctx->hkaes);
2625 	safexcel_aead_cra_exit(tfm);
2626 }
2627 
2628 static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2629 					 unsigned int authsize)
2630 {
2631 	return crypto_gcm_check_authsize(authsize);
2632 }
2633 
2634 struct safexcel_alg_template safexcel_alg_gcm = {
2635 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2636 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2637 	.alg.aead = {
2638 		.setkey = safexcel_aead_gcm_setkey,
2639 		.setauthsize = safexcel_aead_gcm_setauthsize,
2640 		.encrypt = safexcel_aead_encrypt,
2641 		.decrypt = safexcel_aead_decrypt,
2642 		.ivsize = GCM_AES_IV_SIZE,
2643 		.maxauthsize = GHASH_DIGEST_SIZE,
2644 		.base = {
2645 			.cra_name = "gcm(aes)",
2646 			.cra_driver_name = "safexcel-gcm-aes",
2647 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2648 			.cra_flags = CRYPTO_ALG_ASYNC |
2649 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2650 			.cra_blocksize = 1,
2651 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2652 			.cra_alignmask = 0,
2653 			.cra_init = safexcel_aead_gcm_cra_init,
2654 			.cra_exit = safexcel_aead_gcm_cra_exit,
2655 			.cra_module = THIS_MODULE,
2656 		},
2657 	},
2658 };
2659 
2660 static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2661 				    unsigned int len)
2662 {
2663 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2664 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2665 	struct safexcel_crypto_priv *priv = ctx->priv;
2666 	struct crypto_aes_ctx aes;
2667 	int ret, i;
2668 
2669 	ret = aes_expandkey(&aes, key, len);
2670 	if (ret) {
2671 		memzero_explicit(&aes, sizeof(aes));
2672 		return ret;
2673 	}
2674 
2675 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2676 		for (i = 0; i < len / sizeof(u32); i++) {
2677 			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2678 				ctx->base.needs_inv = true;
2679 				break;
2680 			}
2681 		}
2682 	}
2683 
2684 	for (i = 0; i < len / sizeof(u32); i++) {
2685 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2686 		ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2687 			cpu_to_be32(aes.key_enc[i]);
2688 	}
2689 
2690 	ctx->key_len = len;
2691 	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2692 
2693 	if (len == AES_KEYSIZE_192)
2694 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2695 	else if (len == AES_KEYSIZE_256)
2696 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2697 	else
2698 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2699 
2700 	memzero_explicit(&aes, sizeof(aes));
2701 	return 0;
2702 }
2703 
2704 static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2705 {
2706 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2707 
2708 	safexcel_aead_cra_init(tfm);
2709 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2710 	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2711 	ctx->xcm = EIP197_XCM_MODE_CCM;
2712 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2713 	ctx->ctrinit = 0;
2714 	return 0;
2715 }
2716 
2717 static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2718 					 unsigned int authsize)
2719 {
2720 	/* Borrowed from crypto/ccm.c */
2721 	switch (authsize) {
2722 	case 4:
2723 	case 6:
2724 	case 8:
2725 	case 10:
2726 	case 12:
2727 	case 14:
2728 	case 16:
2729 		break;
2730 	default:
2731 		return -EINVAL;
2732 	}
2733 
2734 	return 0;
2735 }
2736 
2737 static int safexcel_ccm_encrypt(struct aead_request *req)
2738 {
2739 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2740 
2741 	if (req->iv[0] < 1 || req->iv[0] > 7)
2742 		return -EINVAL;
2743 
2744 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2745 }
2746 
2747 static int safexcel_ccm_decrypt(struct aead_request *req)
2748 {
2749 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2750 
2751 	if (req->iv[0] < 1 || req->iv[0] > 7)
2752 		return -EINVAL;
2753 
2754 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2755 }
2756 
2757 struct safexcel_alg_template safexcel_alg_ccm = {
2758 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2759 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2760 	.alg.aead = {
2761 		.setkey = safexcel_aead_ccm_setkey,
2762 		.setauthsize = safexcel_aead_ccm_setauthsize,
2763 		.encrypt = safexcel_ccm_encrypt,
2764 		.decrypt = safexcel_ccm_decrypt,
2765 		.ivsize = AES_BLOCK_SIZE,
2766 		.maxauthsize = AES_BLOCK_SIZE,
2767 		.base = {
2768 			.cra_name = "ccm(aes)",
2769 			.cra_driver_name = "safexcel-ccm-aes",
2770 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2771 			.cra_flags = CRYPTO_ALG_ASYNC |
2772 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2773 			.cra_blocksize = 1,
2774 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2775 			.cra_alignmask = 0,
2776 			.cra_init = safexcel_aead_ccm_cra_init,
2777 			.cra_exit = safexcel_aead_cra_exit,
2778 			.cra_module = THIS_MODULE,
2779 		},
2780 	},
2781 };
2782 
2783 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2784 				     const u8 *key)
2785 {
2786 	struct safexcel_crypto_priv *priv = ctx->priv;
2787 
2788 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2789 		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2790 			ctx->base.needs_inv = true;
2791 
2792 	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2793 	ctx->key_len = CHACHA_KEY_SIZE;
2794 }
2795 
2796 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2797 					     const u8 *key, unsigned int len)
2798 {
2799 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2800 
2801 	if (len != CHACHA_KEY_SIZE)
2802 		return -EINVAL;
2803 
2804 	safexcel_chacha20_setkey(ctx, key);
2805 
2806 	return 0;
2807 }
2808 
2809 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2810 {
2811 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2812 
2813 	safexcel_skcipher_cra_init(tfm);
2814 	ctx->alg  = SAFEXCEL_CHACHA20;
2815 	ctx->ctrinit = 0;
2816 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2817 	return 0;
2818 }
2819 
2820 struct safexcel_alg_template safexcel_alg_chacha20 = {
2821 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2822 	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2823 	.alg.skcipher = {
2824 		.setkey = safexcel_skcipher_chacha20_setkey,
2825 		.encrypt = safexcel_encrypt,
2826 		.decrypt = safexcel_decrypt,
2827 		.min_keysize = CHACHA_KEY_SIZE,
2828 		.max_keysize = CHACHA_KEY_SIZE,
2829 		.ivsize = CHACHA_IV_SIZE,
2830 		.base = {
2831 			.cra_name = "chacha20",
2832 			.cra_driver_name = "safexcel-chacha20",
2833 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2834 			.cra_flags = CRYPTO_ALG_ASYNC |
2835 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2836 			.cra_blocksize = 1,
2837 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2838 			.cra_alignmask = 0,
2839 			.cra_init = safexcel_skcipher_chacha20_cra_init,
2840 			.cra_exit = safexcel_skcipher_cra_exit,
2841 			.cra_module = THIS_MODULE,
2842 		},
2843 	},
2844 };
2845 
2846 static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2847 				    const u8 *key, unsigned int len)
2848 {
2849 	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2850 
2851 	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2852 	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2853 		/* ESP variant has nonce appended to key */
2854 		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2855 		ctx->nonce = *(u32 *)(key + len);
2856 	}
2857 	if (len != CHACHA_KEY_SIZE)
2858 		return -EINVAL;
2859 
2860 	safexcel_chacha20_setkey(ctx, key);
2861 
2862 	return 0;
2863 }
2864 
2865 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2866 					 unsigned int authsize)
2867 {
2868 	if (authsize != POLY1305_DIGEST_SIZE)
2869 		return -EINVAL;
2870 	return 0;
2871 }
2872 
2873 static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2874 					  enum safexcel_cipher_direction dir)
2875 {
2876 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2877 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2878 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2879 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2880 	struct aead_request *subreq = aead_request_ctx(req);
2881 	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2882 	int ret = 0;
2883 
2884 	/*
2885 	 * Instead of wasting time detecting umpteen silly corner cases,
2886 	 * just dump all "small" requests to the fallback implementation.
2887 	 * HW would not be faster on such small requests anyway.
2888 	 */
2889 	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2890 		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2891 		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2892 		return safexcel_queue_req(&req->base, creq, dir);
2893 	}
2894 
2895 	/* HW cannot do full (AAD+payload) zero length, use fallback */
2896 	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2897 	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2898 		/* ESP variant has nonce appended to the key */
2899 		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2900 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2901 					 CHACHA_KEY_SIZE +
2902 					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2903 	} else {
2904 		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2905 					 CHACHA_KEY_SIZE);
2906 	}
2907 	if (ret) {
2908 		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2909 		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2910 					    CRYPTO_TFM_REQ_MASK);
2911 		return ret;
2912 	}
2913 
2914 	aead_request_set_tfm(subreq, ctx->fback);
2915 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2916 				  req->base.data);
2917 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2918 			       req->iv);
2919 	aead_request_set_ad(subreq, req->assoclen);
2920 
2921 	return (dir ==  SAFEXCEL_ENCRYPT) ?
2922 		crypto_aead_encrypt(subreq) :
2923 		crypto_aead_decrypt(subreq);
2924 }
2925 
2926 static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2927 {
2928 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2929 }
2930 
2931 static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2932 {
2933 	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2934 }
2935 
2936 static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2937 {
2938 	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2939 	struct aead_alg *alg = crypto_aead_alg(aead);
2940 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2941 
2942 	safexcel_aead_cra_init(tfm);
2943 
2944 	/* Allocate fallback implementation */
2945 	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2946 				       CRYPTO_ALG_ASYNC |
2947 				       CRYPTO_ALG_NEED_FALLBACK);
2948 	if (IS_ERR(ctx->fback))
2949 		return PTR_ERR(ctx->fback);
2950 
2951 	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2952 					  sizeof(struct aead_request) +
2953 					  crypto_aead_reqsize(ctx->fback)));
2954 
2955 	return 0;
2956 }
2957 
2958 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2959 {
2960 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2961 
2962 	safexcel_aead_fallback_cra_init(tfm);
2963 	ctx->alg  = SAFEXCEL_CHACHA20;
2964 	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2965 		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2966 	ctx->ctrinit = 0;
2967 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2968 	ctx->state_sz = 0; /* Precomputed by HW */
2969 	return 0;
2970 }
2971 
2972 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2973 {
2974 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2975 
2976 	crypto_free_aead(ctx->fback);
2977 	safexcel_aead_cra_exit(tfm);
2978 }
2979 
2980 struct safexcel_alg_template safexcel_alg_chachapoly = {
2981 	.type = SAFEXCEL_ALG_TYPE_AEAD,
2982 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2983 	.alg.aead = {
2984 		.setkey = safexcel_aead_chachapoly_setkey,
2985 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2986 		.encrypt = safexcel_aead_chachapoly_encrypt,
2987 		.decrypt = safexcel_aead_chachapoly_decrypt,
2988 		.ivsize = CHACHAPOLY_IV_SIZE,
2989 		.maxauthsize = POLY1305_DIGEST_SIZE,
2990 		.base = {
2991 			.cra_name = "rfc7539(chacha20,poly1305)",
2992 			.cra_driver_name = "safexcel-chacha20-poly1305",
2993 			/* +1 to put it above HW chacha + SW poly */
2994 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2995 			.cra_flags = CRYPTO_ALG_ASYNC |
2996 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2997 				     CRYPTO_ALG_NEED_FALLBACK,
2998 			.cra_blocksize = 1,
2999 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3000 			.cra_alignmask = 0,
3001 			.cra_init = safexcel_aead_chachapoly_cra_init,
3002 			.cra_exit = safexcel_aead_fallback_cra_exit,
3003 			.cra_module = THIS_MODULE,
3004 		},
3005 	},
3006 };
3007 
3008 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3009 {
3010 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3011 	int ret;
3012 
3013 	ret = safexcel_aead_chachapoly_cra_init(tfm);
3014 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3015 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3016 	return ret;
3017 }
3018 
3019 struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3020 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3021 	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3022 	.alg.aead = {
3023 		.setkey = safexcel_aead_chachapoly_setkey,
3024 		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3025 		.encrypt = safexcel_aead_chachapoly_encrypt,
3026 		.decrypt = safexcel_aead_chachapoly_decrypt,
3027 		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3028 		.maxauthsize = POLY1305_DIGEST_SIZE,
3029 		.base = {
3030 			.cra_name = "rfc7539esp(chacha20,poly1305)",
3031 			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
3032 			/* +1 to put it above HW chacha + SW poly */
3033 			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3034 			.cra_flags = CRYPTO_ALG_ASYNC |
3035 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3036 				     CRYPTO_ALG_NEED_FALLBACK,
3037 			.cra_blocksize = 1,
3038 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3039 			.cra_alignmask = 0,
3040 			.cra_init = safexcel_aead_chachapolyesp_cra_init,
3041 			.cra_exit = safexcel_aead_fallback_cra_exit,
3042 			.cra_module = THIS_MODULE,
3043 		},
3044 	},
3045 };
3046 
3047 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3048 					const u8 *key, unsigned int len)
3049 {
3050 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3051 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3052 	struct safexcel_crypto_priv *priv = ctx->priv;
3053 
3054 	if (len != SM4_KEY_SIZE)
3055 		return -EINVAL;
3056 
3057 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3058 		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3059 			ctx->base.needs_inv = true;
3060 
3061 	memcpy(ctx->key, key, SM4_KEY_SIZE);
3062 	ctx->key_len = SM4_KEY_SIZE;
3063 
3064 	return 0;
3065 }
3066 
3067 static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3068 {
3069 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3070 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3071 		return -EINVAL;
3072 	else
3073 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3074 					  SAFEXCEL_ENCRYPT);
3075 }
3076 
3077 static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3078 {
3079 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3080 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3081 		return -EINVAL;
3082 	else
3083 		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3084 					  SAFEXCEL_DECRYPT);
3085 }
3086 
3087 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3088 {
3089 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3090 
3091 	safexcel_skcipher_cra_init(tfm);
3092 	ctx->alg  = SAFEXCEL_SM4;
3093 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3094 	ctx->blocksz = 0;
3095 	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3096 	return 0;
3097 }
3098 
3099 struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3100 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3101 	.algo_mask = SAFEXCEL_ALG_SM4,
3102 	.alg.skcipher = {
3103 		.setkey = safexcel_skcipher_sm4_setkey,
3104 		.encrypt = safexcel_sm4_blk_encrypt,
3105 		.decrypt = safexcel_sm4_blk_decrypt,
3106 		.min_keysize = SM4_KEY_SIZE,
3107 		.max_keysize = SM4_KEY_SIZE,
3108 		.base = {
3109 			.cra_name = "ecb(sm4)",
3110 			.cra_driver_name = "safexcel-ecb-sm4",
3111 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3112 			.cra_flags = CRYPTO_ALG_ASYNC |
3113 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3114 			.cra_blocksize = SM4_BLOCK_SIZE,
3115 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3116 			.cra_alignmask = 0,
3117 			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3118 			.cra_exit = safexcel_skcipher_cra_exit,
3119 			.cra_module = THIS_MODULE,
3120 		},
3121 	},
3122 };
3123 
3124 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3125 {
3126 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3127 
3128 	safexcel_skcipher_cra_init(tfm);
3129 	ctx->alg  = SAFEXCEL_SM4;
3130 	ctx->blocksz = SM4_BLOCK_SIZE;
3131 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3132 	return 0;
3133 }
3134 
3135 struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3136 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3137 	.algo_mask = SAFEXCEL_ALG_SM4,
3138 	.alg.skcipher = {
3139 		.setkey = safexcel_skcipher_sm4_setkey,
3140 		.encrypt = safexcel_sm4_blk_encrypt,
3141 		.decrypt = safexcel_sm4_blk_decrypt,
3142 		.min_keysize = SM4_KEY_SIZE,
3143 		.max_keysize = SM4_KEY_SIZE,
3144 		.ivsize = SM4_BLOCK_SIZE,
3145 		.base = {
3146 			.cra_name = "cbc(sm4)",
3147 			.cra_driver_name = "safexcel-cbc-sm4",
3148 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3149 			.cra_flags = CRYPTO_ALG_ASYNC |
3150 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3151 			.cra_blocksize = SM4_BLOCK_SIZE,
3152 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3153 			.cra_alignmask = 0,
3154 			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3155 			.cra_exit = safexcel_skcipher_cra_exit,
3156 			.cra_module = THIS_MODULE,
3157 		},
3158 	},
3159 };
3160 
3161 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3162 {
3163 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3164 
3165 	safexcel_skcipher_cra_init(tfm);
3166 	ctx->alg  = SAFEXCEL_SM4;
3167 	ctx->blocksz = SM4_BLOCK_SIZE;
3168 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3169 	return 0;
3170 }
3171 
3172 struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3173 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3174 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3175 	.alg.skcipher = {
3176 		.setkey = safexcel_skcipher_sm4_setkey,
3177 		.encrypt = safexcel_encrypt,
3178 		.decrypt = safexcel_decrypt,
3179 		.min_keysize = SM4_KEY_SIZE,
3180 		.max_keysize = SM4_KEY_SIZE,
3181 		.ivsize = SM4_BLOCK_SIZE,
3182 		.base = {
3183 			.cra_name = "ofb(sm4)",
3184 			.cra_driver_name = "safexcel-ofb-sm4",
3185 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3186 			.cra_flags = CRYPTO_ALG_ASYNC |
3187 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3188 			.cra_blocksize = 1,
3189 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3190 			.cra_alignmask = 0,
3191 			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3192 			.cra_exit = safexcel_skcipher_cra_exit,
3193 			.cra_module = THIS_MODULE,
3194 		},
3195 	},
3196 };
3197 
3198 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3199 {
3200 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3201 
3202 	safexcel_skcipher_cra_init(tfm);
3203 	ctx->alg  = SAFEXCEL_SM4;
3204 	ctx->blocksz = SM4_BLOCK_SIZE;
3205 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3206 	return 0;
3207 }
3208 
3209 struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3210 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3211 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3212 	.alg.skcipher = {
3213 		.setkey = safexcel_skcipher_sm4_setkey,
3214 		.encrypt = safexcel_encrypt,
3215 		.decrypt = safexcel_decrypt,
3216 		.min_keysize = SM4_KEY_SIZE,
3217 		.max_keysize = SM4_KEY_SIZE,
3218 		.ivsize = SM4_BLOCK_SIZE,
3219 		.base = {
3220 			.cra_name = "cfb(sm4)",
3221 			.cra_driver_name = "safexcel-cfb-sm4",
3222 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3223 			.cra_flags = CRYPTO_ALG_ASYNC |
3224 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3225 			.cra_blocksize = 1,
3226 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3227 			.cra_alignmask = 0,
3228 			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3229 			.cra_exit = safexcel_skcipher_cra_exit,
3230 			.cra_module = THIS_MODULE,
3231 		},
3232 	},
3233 };
3234 
3235 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3236 					   const u8 *key, unsigned int len)
3237 {
3238 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3239 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3240 
3241 	/* last 4 bytes of key are the nonce! */
3242 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3243 	/* exclude the nonce here */
3244 	len -= CTR_RFC3686_NONCE_SIZE;
3245 
3246 	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3247 }
3248 
3249 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3250 {
3251 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3252 
3253 	safexcel_skcipher_cra_init(tfm);
3254 	ctx->alg  = SAFEXCEL_SM4;
3255 	ctx->blocksz = SM4_BLOCK_SIZE;
3256 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3257 	return 0;
3258 }
3259 
3260 struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3261 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3262 	.algo_mask = SAFEXCEL_ALG_SM4,
3263 	.alg.skcipher = {
3264 		.setkey = safexcel_skcipher_sm4ctr_setkey,
3265 		.encrypt = safexcel_encrypt,
3266 		.decrypt = safexcel_decrypt,
3267 		/* Add nonce size */
3268 		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3269 		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3270 		.ivsize = CTR_RFC3686_IV_SIZE,
3271 		.base = {
3272 			.cra_name = "rfc3686(ctr(sm4))",
3273 			.cra_driver_name = "safexcel-ctr-sm4",
3274 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3275 			.cra_flags = CRYPTO_ALG_ASYNC |
3276 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3277 			.cra_blocksize = 1,
3278 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3279 			.cra_alignmask = 0,
3280 			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3281 			.cra_exit = safexcel_skcipher_cra_exit,
3282 			.cra_module = THIS_MODULE,
3283 		},
3284 	},
3285 };
3286 
3287 static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3288 {
3289 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3290 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3291 		return -EINVAL;
3292 
3293 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3294 				  SAFEXCEL_ENCRYPT);
3295 }
3296 
3297 static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3298 {
3299 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3300 
3301 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3302 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3303 		return -EINVAL;
3304 
3305 	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3306 				  SAFEXCEL_DECRYPT);
3307 }
3308 
3309 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3310 {
3311 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3312 
3313 	safexcel_aead_cra_init(tfm);
3314 	ctx->alg = SAFEXCEL_SM4;
3315 	ctx->blocksz = SM4_BLOCK_SIZE;
3316 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3317 	ctx->state_sz = SHA1_DIGEST_SIZE;
3318 	return 0;
3319 }
3320 
3321 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3322 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3323 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3324 	.alg.aead = {
3325 		.setkey = safexcel_aead_setkey,
3326 		.encrypt = safexcel_aead_sm4_blk_encrypt,
3327 		.decrypt = safexcel_aead_sm4_blk_decrypt,
3328 		.ivsize = SM4_BLOCK_SIZE,
3329 		.maxauthsize = SHA1_DIGEST_SIZE,
3330 		.base = {
3331 			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3332 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3333 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3334 			.cra_flags = CRYPTO_ALG_ASYNC |
3335 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3336 			.cra_blocksize = SM4_BLOCK_SIZE,
3337 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3338 			.cra_alignmask = 0,
3339 			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3340 			.cra_exit = safexcel_aead_cra_exit,
3341 			.cra_module = THIS_MODULE,
3342 		},
3343 	},
3344 };
3345 
3346 static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3347 					 const u8 *key, unsigned int len)
3348 {
3349 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3350 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3351 
3352 	/* Keep fallback cipher synchronized */
3353 	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3354 	       safexcel_aead_setkey(ctfm, key, len);
3355 }
3356 
3357 static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3358 					      unsigned int authsize)
3359 {
3360 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3361 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3362 
3363 	/* Keep fallback cipher synchronized */
3364 	return crypto_aead_setauthsize(ctx->fback, authsize);
3365 }
3366 
3367 static int safexcel_aead_fallback_crypt(struct aead_request *req,
3368 					enum safexcel_cipher_direction dir)
3369 {
3370 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3371 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3372 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3373 	struct aead_request *subreq = aead_request_ctx(req);
3374 
3375 	aead_request_set_tfm(subreq, ctx->fback);
3376 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3377 				  req->base.data);
3378 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3379 			       req->iv);
3380 	aead_request_set_ad(subreq, req->assoclen);
3381 
3382 	return (dir ==  SAFEXCEL_ENCRYPT) ?
3383 		crypto_aead_encrypt(subreq) :
3384 		crypto_aead_decrypt(subreq);
3385 }
3386 
3387 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3388 {
3389 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3390 
3391 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3392 	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3393 		return -EINVAL;
3394 	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3395 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3396 
3397 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3398 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3399 }
3400 
3401 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3402 {
3403 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3404 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3405 
3406 	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3407 	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3408 		return -EINVAL;
3409 	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3410 		/* If input length > 0 only */
3411 		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3412 
3413 	/* HW cannot do full (AAD+payload) zero length, use fallback */
3414 	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3415 }
3416 
3417 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3418 {
3419 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3420 
3421 	safexcel_aead_fallback_cra_init(tfm);
3422 	ctx->alg = SAFEXCEL_SM4;
3423 	ctx->blocksz = SM4_BLOCK_SIZE;
3424 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3425 	ctx->state_sz = SM3_DIGEST_SIZE;
3426 	return 0;
3427 }
3428 
3429 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3430 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3431 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3432 	.alg.aead = {
3433 		.setkey = safexcel_aead_fallback_setkey,
3434 		.setauthsize = safexcel_aead_fallback_setauthsize,
3435 		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3436 		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3437 		.ivsize = SM4_BLOCK_SIZE,
3438 		.maxauthsize = SM3_DIGEST_SIZE,
3439 		.base = {
3440 			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3441 			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3442 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3443 			.cra_flags = CRYPTO_ALG_ASYNC |
3444 				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3445 				     CRYPTO_ALG_NEED_FALLBACK,
3446 			.cra_blocksize = SM4_BLOCK_SIZE,
3447 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3448 			.cra_alignmask = 0,
3449 			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3450 			.cra_exit = safexcel_aead_fallback_cra_exit,
3451 			.cra_module = THIS_MODULE,
3452 		},
3453 	},
3454 };
3455 
3456 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3457 {
3458 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3459 
3460 	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3461 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3462 	return 0;
3463 }
3464 
3465 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3466 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3467 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3468 	.alg.aead = {
3469 		.setkey = safexcel_aead_setkey,
3470 		.encrypt = safexcel_aead_encrypt,
3471 		.decrypt = safexcel_aead_decrypt,
3472 		.ivsize = CTR_RFC3686_IV_SIZE,
3473 		.maxauthsize = SHA1_DIGEST_SIZE,
3474 		.base = {
3475 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3476 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3477 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3478 			.cra_flags = CRYPTO_ALG_ASYNC |
3479 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3480 			.cra_blocksize = 1,
3481 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3482 			.cra_alignmask = 0,
3483 			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3484 			.cra_exit = safexcel_aead_cra_exit,
3485 			.cra_module = THIS_MODULE,
3486 		},
3487 	},
3488 };
3489 
3490 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3491 {
3492 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3493 
3494 	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3495 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3496 	return 0;
3497 }
3498 
3499 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3500 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3501 	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3502 	.alg.aead = {
3503 		.setkey = safexcel_aead_setkey,
3504 		.encrypt = safexcel_aead_encrypt,
3505 		.decrypt = safexcel_aead_decrypt,
3506 		.ivsize = CTR_RFC3686_IV_SIZE,
3507 		.maxauthsize = SM3_DIGEST_SIZE,
3508 		.base = {
3509 			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3510 			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3511 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3512 			.cra_flags = CRYPTO_ALG_ASYNC |
3513 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3514 			.cra_blocksize = 1,
3515 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3516 			.cra_alignmask = 0,
3517 			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3518 			.cra_exit = safexcel_aead_cra_exit,
3519 			.cra_module = THIS_MODULE,
3520 		},
3521 	},
3522 };
3523 
3524 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3525 				       unsigned int len)
3526 {
3527 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3528 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3529 
3530 	/* last 4 bytes of key are the nonce! */
3531 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3532 
3533 	len -= CTR_RFC3686_NONCE_SIZE;
3534 	return safexcel_aead_gcm_setkey(ctfm, key, len);
3535 }
3536 
3537 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3538 					    unsigned int authsize)
3539 {
3540 	return crypto_rfc4106_check_authsize(authsize);
3541 }
3542 
3543 static int safexcel_rfc4106_encrypt(struct aead_request *req)
3544 {
3545 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3546 	       safexcel_aead_encrypt(req);
3547 }
3548 
3549 static int safexcel_rfc4106_decrypt(struct aead_request *req)
3550 {
3551 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3552 	       safexcel_aead_decrypt(req);
3553 }
3554 
3555 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3556 {
3557 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3558 	int ret;
3559 
3560 	ret = safexcel_aead_gcm_cra_init(tfm);
3561 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3562 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3563 	return ret;
3564 }
3565 
3566 struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3567 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3568 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3569 	.alg.aead = {
3570 		.setkey = safexcel_rfc4106_gcm_setkey,
3571 		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3572 		.encrypt = safexcel_rfc4106_encrypt,
3573 		.decrypt = safexcel_rfc4106_decrypt,
3574 		.ivsize = GCM_RFC4106_IV_SIZE,
3575 		.maxauthsize = GHASH_DIGEST_SIZE,
3576 		.base = {
3577 			.cra_name = "rfc4106(gcm(aes))",
3578 			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3579 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3580 			.cra_flags = CRYPTO_ALG_ASYNC |
3581 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3582 			.cra_blocksize = 1,
3583 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3584 			.cra_alignmask = 0,
3585 			.cra_init = safexcel_rfc4106_gcm_cra_init,
3586 			.cra_exit = safexcel_aead_gcm_cra_exit,
3587 		},
3588 	},
3589 };
3590 
3591 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3592 					    unsigned int authsize)
3593 {
3594 	if (authsize != GHASH_DIGEST_SIZE)
3595 		return -EINVAL;
3596 
3597 	return 0;
3598 }
3599 
3600 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3601 {
3602 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3603 	int ret;
3604 
3605 	ret = safexcel_aead_gcm_cra_init(tfm);
3606 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3607 	return ret;
3608 }
3609 
3610 struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3611 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3612 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3613 	.alg.aead = {
3614 		.setkey = safexcel_rfc4106_gcm_setkey,
3615 		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3616 		.encrypt = safexcel_rfc4106_encrypt,
3617 		.decrypt = safexcel_rfc4106_decrypt,
3618 		.ivsize = GCM_RFC4543_IV_SIZE,
3619 		.maxauthsize = GHASH_DIGEST_SIZE,
3620 		.base = {
3621 			.cra_name = "rfc4543(gcm(aes))",
3622 			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3623 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3624 			.cra_flags = CRYPTO_ALG_ASYNC |
3625 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3626 			.cra_blocksize = 1,
3627 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3628 			.cra_alignmask = 0,
3629 			.cra_init = safexcel_rfc4543_gcm_cra_init,
3630 			.cra_exit = safexcel_aead_gcm_cra_exit,
3631 		},
3632 	},
3633 };
3634 
3635 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3636 				       unsigned int len)
3637 {
3638 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3639 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3640 
3641 	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3642 	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3643 	/* last 3 bytes of key are the nonce! */
3644 	memcpy((u8 *)&ctx->nonce + 1, key + len -
3645 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3646 	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3647 
3648 	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3649 	return safexcel_aead_ccm_setkey(ctfm, key, len);
3650 }
3651 
3652 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3653 					    unsigned int authsize)
3654 {
3655 	/* Borrowed from crypto/ccm.c */
3656 	switch (authsize) {
3657 	case 8:
3658 	case 12:
3659 	case 16:
3660 		break;
3661 	default:
3662 		return -EINVAL;
3663 	}
3664 
3665 	return 0;
3666 }
3667 
3668 static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3669 {
3670 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3671 
3672 	/* Borrowed from crypto/ccm.c */
3673 	if (req->assoclen != 16 && req->assoclen != 20)
3674 		return -EINVAL;
3675 
3676 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3677 }
3678 
3679 static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3680 {
3681 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3682 
3683 	/* Borrowed from crypto/ccm.c */
3684 	if (req->assoclen != 16 && req->assoclen != 20)
3685 		return -EINVAL;
3686 
3687 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3688 }
3689 
3690 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3691 {
3692 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3693 	int ret;
3694 
3695 	ret = safexcel_aead_ccm_cra_init(tfm);
3696 	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3697 	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3698 	return ret;
3699 }
3700 
3701 struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3702 	.type = SAFEXCEL_ALG_TYPE_AEAD,
3703 	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3704 	.alg.aead = {
3705 		.setkey = safexcel_rfc4309_ccm_setkey,
3706 		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3707 		.encrypt = safexcel_rfc4309_ccm_encrypt,
3708 		.decrypt = safexcel_rfc4309_ccm_decrypt,
3709 		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3710 		.maxauthsize = AES_BLOCK_SIZE,
3711 		.base = {
3712 			.cra_name = "rfc4309(ccm(aes))",
3713 			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3714 			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3715 			.cra_flags = CRYPTO_ALG_ASYNC |
3716 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3717 			.cra_blocksize = 1,
3718 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3719 			.cra_alignmask = 0,
3720 			.cra_init = safexcel_rfc4309_ccm_cra_init,
3721 			.cra_exit = safexcel_aead_cra_exit,
3722 			.cra_module = THIS_MODULE,
3723 		},
3724 	},
3725 };
3726