1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Crypto acceleration support for Rockchip RK3288
4  *
5  * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6  *
7  * Author: Zain Wang <zain.wang@rock-chips.com>
8  *
9  * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
10  */
11 #include <linux/device.h>
12 #include <crypto/scatterwalk.h>
13 #include "rk3288_crypto.h"
14 
15 #define RK_CRYPTO_DEC			BIT(0)
16 
17 static int rk_cipher_need_fallback(struct skcipher_request *req)
18 {
19 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
20 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
21 	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
22 	struct scatterlist *sgs, *sgd;
23 	unsigned int stodo, dtodo, len;
24 	unsigned int bs = crypto_skcipher_blocksize(tfm);
25 
26 	if (!req->cryptlen)
27 		return true;
28 
29 	len = req->cryptlen;
30 	sgs = req->src;
31 	sgd = req->dst;
32 	while (sgs && sgd) {
33 		if (!IS_ALIGNED(sgs->offset, sizeof(u32))) {
34 			algt->stat_fb_align++;
35 			return true;
36 		}
37 		if (!IS_ALIGNED(sgd->offset, sizeof(u32))) {
38 			algt->stat_fb_align++;
39 			return true;
40 		}
41 		stodo = min(len, sgs->length);
42 		if (stodo % bs) {
43 			algt->stat_fb_len++;
44 			return true;
45 		}
46 		dtodo = min(len, sgd->length);
47 		if (dtodo % bs) {
48 			algt->stat_fb_len++;
49 			return true;
50 		}
51 		if (stodo != dtodo) {
52 			algt->stat_fb_sgdiff++;
53 			return true;
54 		}
55 		len -= stodo;
56 		sgs = sg_next(sgs);
57 		sgd = sg_next(sgd);
58 	}
59 	return false;
60 }
61 
62 static int rk_cipher_fallback(struct skcipher_request *areq)
63 {
64 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
65 	struct rk_cipher_ctx *op = crypto_skcipher_ctx(tfm);
66 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
67 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
68 	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
69 	int err;
70 
71 	algt->stat_fb++;
72 
73 	skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
74 	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
75 				      areq->base.complete, areq->base.data);
76 	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
77 				   areq->cryptlen, areq->iv);
78 	if (rctx->mode & RK_CRYPTO_DEC)
79 		err = crypto_skcipher_decrypt(&rctx->fallback_req);
80 	else
81 		err = crypto_skcipher_encrypt(&rctx->fallback_req);
82 	return err;
83 }
84 
85 static int rk_cipher_handle_req(struct skcipher_request *req)
86 {
87 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
88 	struct rk_crypto_info *rkc;
89 	struct crypto_engine *engine;
90 
91 	if (rk_cipher_need_fallback(req))
92 		return rk_cipher_fallback(req);
93 
94 	rkc = get_rk_crypto();
95 
96 	engine = rkc->engine;
97 	rctx->dev = rkc;
98 
99 	return crypto_transfer_skcipher_request_to_engine(engine, req);
100 }
101 
102 static int rk_aes_setkey(struct crypto_skcipher *cipher,
103 			 const u8 *key, unsigned int keylen)
104 {
105 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
106 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
107 
108 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
109 	    keylen != AES_KEYSIZE_256)
110 		return -EINVAL;
111 	ctx->keylen = keylen;
112 	memcpy(ctx->key, key, keylen);
113 
114 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
115 }
116 
117 static int rk_des_setkey(struct crypto_skcipher *cipher,
118 			 const u8 *key, unsigned int keylen)
119 {
120 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
121 	int err;
122 
123 	err = verify_skcipher_des_key(cipher, key);
124 	if (err)
125 		return err;
126 
127 	ctx->keylen = keylen;
128 	memcpy(ctx->key, key, keylen);
129 
130 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
131 }
132 
133 static int rk_tdes_setkey(struct crypto_skcipher *cipher,
134 			  const u8 *key, unsigned int keylen)
135 {
136 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
137 	int err;
138 
139 	err = verify_skcipher_des3_key(cipher, key);
140 	if (err)
141 		return err;
142 
143 	ctx->keylen = keylen;
144 	memcpy(ctx->key, key, keylen);
145 
146 	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
147 }
148 
149 static int rk_aes_ecb_encrypt(struct skcipher_request *req)
150 {
151 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
152 
153 	rctx->mode = RK_CRYPTO_AES_ECB_MODE;
154 	return rk_cipher_handle_req(req);
155 }
156 
157 static int rk_aes_ecb_decrypt(struct skcipher_request *req)
158 {
159 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
160 
161 	rctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
162 	return rk_cipher_handle_req(req);
163 }
164 
165 static int rk_aes_cbc_encrypt(struct skcipher_request *req)
166 {
167 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
168 
169 	rctx->mode = RK_CRYPTO_AES_CBC_MODE;
170 	return rk_cipher_handle_req(req);
171 }
172 
173 static int rk_aes_cbc_decrypt(struct skcipher_request *req)
174 {
175 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
176 
177 	rctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
178 	return rk_cipher_handle_req(req);
179 }
180 
181 static int rk_des_ecb_encrypt(struct skcipher_request *req)
182 {
183 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
184 
185 	rctx->mode = 0;
186 	return rk_cipher_handle_req(req);
187 }
188 
189 static int rk_des_ecb_decrypt(struct skcipher_request *req)
190 {
191 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
192 
193 	rctx->mode = RK_CRYPTO_DEC;
194 	return rk_cipher_handle_req(req);
195 }
196 
197 static int rk_des_cbc_encrypt(struct skcipher_request *req)
198 {
199 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
200 
201 	rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
202 	return rk_cipher_handle_req(req);
203 }
204 
205 static int rk_des_cbc_decrypt(struct skcipher_request *req)
206 {
207 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
208 
209 	rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
210 	return rk_cipher_handle_req(req);
211 }
212 
213 static int rk_des3_ede_ecb_encrypt(struct skcipher_request *req)
214 {
215 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
216 
217 	rctx->mode = RK_CRYPTO_TDES_SELECT;
218 	return rk_cipher_handle_req(req);
219 }
220 
221 static int rk_des3_ede_ecb_decrypt(struct skcipher_request *req)
222 {
223 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
224 
225 	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
226 	return rk_cipher_handle_req(req);
227 }
228 
229 static int rk_des3_ede_cbc_encrypt(struct skcipher_request *req)
230 {
231 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
232 
233 	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
234 	return rk_cipher_handle_req(req);
235 }
236 
237 static int rk_des3_ede_cbc_decrypt(struct skcipher_request *req)
238 {
239 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
240 
241 	rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
242 		    RK_CRYPTO_DEC;
243 	return rk_cipher_handle_req(req);
244 }
245 
246 static void rk_cipher_hw_init(struct rk_crypto_info *dev, struct skcipher_request *req)
247 {
248 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
249 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
250 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(req);
251 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
252 	u32 block, conf_reg = 0;
253 
254 	block = crypto_tfm_alg_blocksize(tfm);
255 
256 	if (block == DES_BLOCK_SIZE) {
257 		rctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
258 			     RK_CRYPTO_TDES_BYTESWAP_KEY |
259 			     RK_CRYPTO_TDES_BYTESWAP_IV;
260 		CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, rctx->mode);
261 		memcpy_toio(dev->reg + RK_CRYPTO_TDES_KEY1_0, ctx->key, ctx->keylen);
262 		conf_reg = RK_CRYPTO_DESSEL;
263 	} else {
264 		rctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
265 			     RK_CRYPTO_AES_KEY_CHANGE |
266 			     RK_CRYPTO_AES_BYTESWAP_KEY |
267 			     RK_CRYPTO_AES_BYTESWAP_IV;
268 		if (ctx->keylen == AES_KEYSIZE_192)
269 			rctx->mode |= RK_CRYPTO_AES_192BIT_key;
270 		else if (ctx->keylen == AES_KEYSIZE_256)
271 			rctx->mode |= RK_CRYPTO_AES_256BIT_key;
272 		CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, rctx->mode);
273 		memcpy_toio(dev->reg + RK_CRYPTO_AES_KEY_0, ctx->key, ctx->keylen);
274 	}
275 	conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
276 		    RK_CRYPTO_BYTESWAP_BRFIFO;
277 	CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
278 	CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
279 		     RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
280 }
281 
282 static void crypto_dma_start(struct rk_crypto_info *dev,
283 			     struct scatterlist *sgs,
284 			     struct scatterlist *sgd, unsigned int todo)
285 {
286 	CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, sg_dma_address(sgs));
287 	CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, todo);
288 	CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, sg_dma_address(sgd));
289 	CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
290 		     _SBF(RK_CRYPTO_BLOCK_START, 16));
291 }
292 
293 static int rk_cipher_run(struct crypto_engine *engine, void *async_req)
294 {
295 	struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);
296 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
297 	struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
298 	struct scatterlist *sgs, *sgd;
299 	int err = 0;
300 	int ivsize = crypto_skcipher_ivsize(tfm);
301 	int offset;
302 	u8 iv[AES_BLOCK_SIZE];
303 	u8 biv[AES_BLOCK_SIZE];
304 	u8 *ivtouse = areq->iv;
305 	unsigned int len = areq->cryptlen;
306 	unsigned int todo;
307 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
308 	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
309 	struct rk_crypto_info *rkc = rctx->dev;
310 
311 	err = pm_runtime_resume_and_get(rkc->dev);
312 	if (err)
313 		return err;
314 
315 	algt->stat_req++;
316 	rkc->nreq++;
317 
318 	ivsize = crypto_skcipher_ivsize(tfm);
319 	if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
320 		if (rctx->mode & RK_CRYPTO_DEC) {
321 			offset = areq->cryptlen - ivsize;
322 			scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
323 						 offset, ivsize, 0);
324 		}
325 	}
326 
327 	sgs = areq->src;
328 	sgd = areq->dst;
329 
330 	while (sgs && sgd && len) {
331 		if (!sgs->length) {
332 			sgs = sg_next(sgs);
333 			sgd = sg_next(sgd);
334 			continue;
335 		}
336 		if (rctx->mode & RK_CRYPTO_DEC) {
337 			/* we backup last block of source to be used as IV at next step */
338 			offset = sgs->length - ivsize;
339 			scatterwalk_map_and_copy(biv, sgs, offset, ivsize, 0);
340 		}
341 		if (sgs == sgd) {
342 			err = dma_map_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
343 			if (err <= 0) {
344 				err = -EINVAL;
345 				goto theend_iv;
346 			}
347 		} else {
348 			err = dma_map_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
349 			if (err <= 0) {
350 				err = -EINVAL;
351 				goto theend_iv;
352 			}
353 			err = dma_map_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
354 			if (err <= 0) {
355 				err = -EINVAL;
356 				goto theend_sgs;
357 			}
358 		}
359 		err = 0;
360 		rk_cipher_hw_init(rkc, areq);
361 		if (ivsize) {
362 			if (ivsize == DES_BLOCK_SIZE)
363 				memcpy_toio(rkc->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize);
364 			else
365 				memcpy_toio(rkc->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize);
366 		}
367 		reinit_completion(&rkc->complete);
368 		rkc->status = 0;
369 
370 		todo = min(sg_dma_len(sgs), len);
371 		len -= todo;
372 		crypto_dma_start(rkc, sgs, sgd, todo / 4);
373 		wait_for_completion_interruptible_timeout(&rkc->complete,
374 							  msecs_to_jiffies(2000));
375 		if (!rkc->status) {
376 			dev_err(rkc->dev, "DMA timeout\n");
377 			err = -EFAULT;
378 			goto theend;
379 		}
380 		if (sgs == sgd) {
381 			dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
382 		} else {
383 			dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
384 			dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
385 		}
386 		if (rctx->mode & RK_CRYPTO_DEC) {
387 			memcpy(iv, biv, ivsize);
388 			ivtouse = iv;
389 		} else {
390 			offset = sgd->length - ivsize;
391 			scatterwalk_map_and_copy(iv, sgd, offset, ivsize, 0);
392 			ivtouse = iv;
393 		}
394 		sgs = sg_next(sgs);
395 		sgd = sg_next(sgd);
396 	}
397 
398 	if (areq->iv && ivsize > 0) {
399 		offset = areq->cryptlen - ivsize;
400 		if (rctx->mode & RK_CRYPTO_DEC) {
401 			memcpy(areq->iv, rctx->backup_iv, ivsize);
402 			memzero_explicit(rctx->backup_iv, ivsize);
403 		} else {
404 			scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
405 						 ivsize, 0);
406 		}
407 	}
408 
409 theend:
410 	pm_runtime_put_autosuspend(rkc->dev);
411 
412 	local_bh_disable();
413 	crypto_finalize_skcipher_request(engine, areq, err);
414 	local_bh_enable();
415 	return 0;
416 
417 theend_sgs:
418 	if (sgs == sgd) {
419 		dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL);
420 	} else {
421 		dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE);
422 		dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE);
423 	}
424 theend_iv:
425 	return err;
426 }
427 
428 static int rk_cipher_tfm_init(struct crypto_skcipher *tfm)
429 {
430 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
431 	const char *name = crypto_tfm_alg_name(&tfm->base);
432 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
433 	struct rk_crypto_tmp *algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
434 
435 	ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
436 	if (IS_ERR(ctx->fallback_tfm)) {
437 		dev_err(algt->dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
438 			name, PTR_ERR(ctx->fallback_tfm));
439 		return PTR_ERR(ctx->fallback_tfm);
440 	}
441 
442 	tfm->reqsize = sizeof(struct rk_cipher_rctx) +
443 		crypto_skcipher_reqsize(ctx->fallback_tfm);
444 
445 	ctx->enginectx.op.do_one_request = rk_cipher_run;
446 
447 	return 0;
448 }
449 
450 static void rk_cipher_tfm_exit(struct crypto_skcipher *tfm)
451 {
452 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
453 
454 	memzero_explicit(ctx->key, ctx->keylen);
455 	crypto_free_skcipher(ctx->fallback_tfm);
456 }
457 
458 struct rk_crypto_tmp rk_ecb_aes_alg = {
459 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
460 	.alg.skcipher = {
461 		.base.cra_name		= "ecb(aes)",
462 		.base.cra_driver_name	= "ecb-aes-rk",
463 		.base.cra_priority	= 300,
464 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
465 		.base.cra_blocksize	= AES_BLOCK_SIZE,
466 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
467 		.base.cra_alignmask	= 0x0f,
468 		.base.cra_module	= THIS_MODULE,
469 
470 		.init			= rk_cipher_tfm_init,
471 		.exit			= rk_cipher_tfm_exit,
472 		.min_keysize		= AES_MIN_KEY_SIZE,
473 		.max_keysize		= AES_MAX_KEY_SIZE,
474 		.setkey			= rk_aes_setkey,
475 		.encrypt		= rk_aes_ecb_encrypt,
476 		.decrypt		= rk_aes_ecb_decrypt,
477 	}
478 };
479 
480 struct rk_crypto_tmp rk_cbc_aes_alg = {
481 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
482 	.alg.skcipher = {
483 		.base.cra_name		= "cbc(aes)",
484 		.base.cra_driver_name	= "cbc-aes-rk",
485 		.base.cra_priority	= 300,
486 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
487 		.base.cra_blocksize	= AES_BLOCK_SIZE,
488 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
489 		.base.cra_alignmask	= 0x0f,
490 		.base.cra_module	= THIS_MODULE,
491 
492 		.init			= rk_cipher_tfm_init,
493 		.exit			= rk_cipher_tfm_exit,
494 		.min_keysize		= AES_MIN_KEY_SIZE,
495 		.max_keysize		= AES_MAX_KEY_SIZE,
496 		.ivsize			= AES_BLOCK_SIZE,
497 		.setkey			= rk_aes_setkey,
498 		.encrypt		= rk_aes_cbc_encrypt,
499 		.decrypt		= rk_aes_cbc_decrypt,
500 	}
501 };
502 
503 struct rk_crypto_tmp rk_ecb_des_alg = {
504 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
505 	.alg.skcipher = {
506 		.base.cra_name		= "ecb(des)",
507 		.base.cra_driver_name	= "ecb-des-rk",
508 		.base.cra_priority	= 300,
509 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
510 		.base.cra_blocksize	= DES_BLOCK_SIZE,
511 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
512 		.base.cra_alignmask	= 0x07,
513 		.base.cra_module	= THIS_MODULE,
514 
515 		.init			= rk_cipher_tfm_init,
516 		.exit			= rk_cipher_tfm_exit,
517 		.min_keysize		= DES_KEY_SIZE,
518 		.max_keysize		= DES_KEY_SIZE,
519 		.setkey			= rk_des_setkey,
520 		.encrypt		= rk_des_ecb_encrypt,
521 		.decrypt		= rk_des_ecb_decrypt,
522 	}
523 };
524 
525 struct rk_crypto_tmp rk_cbc_des_alg = {
526 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
527 	.alg.skcipher = {
528 		.base.cra_name		= "cbc(des)",
529 		.base.cra_driver_name	= "cbc-des-rk",
530 		.base.cra_priority	= 300,
531 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
532 		.base.cra_blocksize	= DES_BLOCK_SIZE,
533 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
534 		.base.cra_alignmask	= 0x07,
535 		.base.cra_module	= THIS_MODULE,
536 
537 		.init			= rk_cipher_tfm_init,
538 		.exit			= rk_cipher_tfm_exit,
539 		.min_keysize		= DES_KEY_SIZE,
540 		.max_keysize		= DES_KEY_SIZE,
541 		.ivsize			= DES_BLOCK_SIZE,
542 		.setkey			= rk_des_setkey,
543 		.encrypt		= rk_des_cbc_encrypt,
544 		.decrypt		= rk_des_cbc_decrypt,
545 	}
546 };
547 
548 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
549 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
550 	.alg.skcipher = {
551 		.base.cra_name		= "ecb(des3_ede)",
552 		.base.cra_driver_name	= "ecb-des3-ede-rk",
553 		.base.cra_priority	= 300,
554 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
555 		.base.cra_blocksize	= DES_BLOCK_SIZE,
556 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
557 		.base.cra_alignmask	= 0x07,
558 		.base.cra_module	= THIS_MODULE,
559 
560 		.init			= rk_cipher_tfm_init,
561 		.exit			= rk_cipher_tfm_exit,
562 		.min_keysize		= DES3_EDE_KEY_SIZE,
563 		.max_keysize		= DES3_EDE_KEY_SIZE,
564 		.setkey			= rk_tdes_setkey,
565 		.encrypt		= rk_des3_ede_ecb_encrypt,
566 		.decrypt		= rk_des3_ede_ecb_decrypt,
567 	}
568 };
569 
570 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
571 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
572 	.alg.skcipher = {
573 		.base.cra_name		= "cbc(des3_ede)",
574 		.base.cra_driver_name	= "cbc-des3-ede-rk",
575 		.base.cra_priority	= 300,
576 		.base.cra_flags		= CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
577 		.base.cra_blocksize	= DES_BLOCK_SIZE,
578 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),
579 		.base.cra_alignmask	= 0x07,
580 		.base.cra_module	= THIS_MODULE,
581 
582 		.init			= rk_cipher_tfm_init,
583 		.exit			= rk_cipher_tfm_exit,
584 		.min_keysize		= DES3_EDE_KEY_SIZE,
585 		.max_keysize		= DES3_EDE_KEY_SIZE,
586 		.ivsize			= DES_BLOCK_SIZE,
587 		.setkey			= rk_tdes_setkey,
588 		.encrypt		= rk_des3_ede_cbc_encrypt,
589 		.decrypt		= rk_des3_ede_cbc_decrypt,
590 	}
591 };
592