xref: /linux/arch/arm64/crypto/aes-ce-ccm-glue.c (revision a3fd8210)
1 /*
2  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
3  *
4  * Copyright (C) 2013 - 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/unaligned.h>
13 #include <crypto/aes.h>
14 #include <crypto/algapi.h>
15 #include <crypto/scatterwalk.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
18 
19 static int num_rounds(struct crypto_aes_ctx *ctx)
20 {
21 	/*
22 	 * # of rounds specified by AES:
23 	 * 128 bit key		10 rounds
24 	 * 192 bit key		12 rounds
25 	 * 256 bit key		14 rounds
26 	 * => n byte key	=> 6 + (n/4) rounds
27 	 */
28 	return 6 + ctx->key_length / 4;
29 }
30 
31 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
32 				     u32 *macp, u32 const rk[], u32 rounds);
33 
34 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
35 				   u32 const rk[], u32 rounds, u8 mac[],
36 				   u8 ctr[]);
37 
38 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
39 				   u32 const rk[], u32 rounds, u8 mac[],
40 				   u8 ctr[]);
41 
42 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
43 				 u32 rounds);
44 
45 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
46 		      unsigned int key_len)
47 {
48 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
49 	int ret;
50 
51 	ret = crypto_aes_expand_key(ctx, in_key, key_len);
52 	if (!ret)
53 		return 0;
54 
55 	tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
56 	return -EINVAL;
57 }
58 
59 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
60 {
61 	if ((authsize & 1) || authsize < 4)
62 		return -EINVAL;
63 	return 0;
64 }
65 
66 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
67 {
68 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
69 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
70 	u32 l = req->iv[0] + 1;
71 
72 	/* verify that CCM dimension 'L' is set correctly in the IV */
73 	if (l < 2 || l > 8)
74 		return -EINVAL;
75 
76 	/* verify that msglen can in fact be represented in L bytes */
77 	if (l < 4 && msglen >> (8 * l))
78 		return -EOVERFLOW;
79 
80 	/*
81 	 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
82 	 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
83 	 */
84 	n[0] = 0;
85 	n[1] = cpu_to_be32(msglen);
86 
87 	memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
88 
89 	/*
90 	 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
91 	 * - bits 0..2	: max # of bytes required to represent msglen, minus 1
92 	 *                (already set by caller)
93 	 * - bits 3..5	: size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
94 	 * - bit 6	: indicates presence of authenticate-only data
95 	 */
96 	maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
97 	if (req->assoclen)
98 		maciv[0] |= 0x40;
99 
100 	memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
101 	return 0;
102 }
103 
104 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
105 {
106 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
107 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
108 	struct __packed { __be16 l; __be32 h; u16 len; } ltag;
109 	struct scatter_walk walk;
110 	u32 len = req->assoclen;
111 	u32 macp = 0;
112 
113 	/* prepend the AAD with a length tag */
114 	if (len < 0xff00) {
115 		ltag.l = cpu_to_be16(len);
116 		ltag.len = 2;
117 	} else  {
118 		ltag.l = cpu_to_be16(0xfffe);
119 		put_unaligned_be32(len, &ltag.h);
120 		ltag.len = 6;
121 	}
122 
123 	ce_aes_ccm_auth_data(mac, (u8 *)&ltag, ltag.len, &macp, ctx->key_enc,
124 			     num_rounds(ctx));
125 	scatterwalk_start(&walk, req->assoc);
126 
127 	do {
128 		u32 n = scatterwalk_clamp(&walk, len);
129 		u8 *p;
130 
131 		if (!n) {
132 			scatterwalk_start(&walk, sg_next(walk.sg));
133 			n = scatterwalk_clamp(&walk, len);
134 		}
135 		p = scatterwalk_map(&walk);
136 		ce_aes_ccm_auth_data(mac, p, n, &macp, ctx->key_enc,
137 				     num_rounds(ctx));
138 		len -= n;
139 
140 		scatterwalk_unmap(p);
141 		scatterwalk_advance(&walk, n);
142 		scatterwalk_done(&walk, 0, len);
143 	} while (len);
144 }
145 
146 static int ccm_encrypt(struct aead_request *req)
147 {
148 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
149 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
150 	struct blkcipher_desc desc = { .info = req->iv };
151 	struct blkcipher_walk walk;
152 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
153 	u8 buf[AES_BLOCK_SIZE];
154 	u32 len = req->cryptlen;
155 	int err;
156 
157 	err = ccm_init_mac(req, mac, len);
158 	if (err)
159 		return err;
160 
161 	kernel_neon_begin_partial(6);
162 
163 	if (req->assoclen)
164 		ccm_calculate_auth_mac(req, mac);
165 
166 	/* preserve the original iv for the final round */
167 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
168 
169 	blkcipher_walk_init(&walk, req->dst, req->src, len);
170 	err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
171 					     AES_BLOCK_SIZE);
172 
173 	while (walk.nbytes) {
174 		u32 tail = walk.nbytes % AES_BLOCK_SIZE;
175 
176 		if (walk.nbytes == len)
177 			tail = 0;
178 
179 		ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
180 				   walk.nbytes - tail, ctx->key_enc,
181 				   num_rounds(ctx), mac, walk.iv);
182 
183 		len -= walk.nbytes - tail;
184 		err = blkcipher_walk_done(&desc, &walk, tail);
185 	}
186 	if (!err)
187 		ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
188 
189 	kernel_neon_end();
190 
191 	if (err)
192 		return err;
193 
194 	/* copy authtag to end of dst */
195 	scatterwalk_map_and_copy(mac, req->dst, req->cryptlen,
196 				 crypto_aead_authsize(aead), 1);
197 
198 	return 0;
199 }
200 
201 static int ccm_decrypt(struct aead_request *req)
202 {
203 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
204 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
205 	unsigned int authsize = crypto_aead_authsize(aead);
206 	struct blkcipher_desc desc = { .info = req->iv };
207 	struct blkcipher_walk walk;
208 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
209 	u8 buf[AES_BLOCK_SIZE];
210 	u32 len = req->cryptlen - authsize;
211 	int err;
212 
213 	err = ccm_init_mac(req, mac, len);
214 	if (err)
215 		return err;
216 
217 	kernel_neon_begin_partial(6);
218 
219 	if (req->assoclen)
220 		ccm_calculate_auth_mac(req, mac);
221 
222 	/* preserve the original iv for the final round */
223 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
224 
225 	blkcipher_walk_init(&walk, req->dst, req->src, len);
226 	err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
227 					     AES_BLOCK_SIZE);
228 
229 	while (walk.nbytes) {
230 		u32 tail = walk.nbytes % AES_BLOCK_SIZE;
231 
232 		if (walk.nbytes == len)
233 			tail = 0;
234 
235 		ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
236 				   walk.nbytes - tail, ctx->key_enc,
237 				   num_rounds(ctx), mac, walk.iv);
238 
239 		len -= walk.nbytes - tail;
240 		err = blkcipher_walk_done(&desc, &walk, tail);
241 	}
242 	if (!err)
243 		ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
244 
245 	kernel_neon_end();
246 
247 	if (err)
248 		return err;
249 
250 	/* compare calculated auth tag with the stored one */
251 	scatterwalk_map_and_copy(buf, req->src, req->cryptlen - authsize,
252 				 authsize, 0);
253 
254 	if (memcmp(mac, buf, authsize))
255 		return -EBADMSG;
256 	return 0;
257 }
258 
259 static struct crypto_alg ccm_aes_alg = {
260 	.cra_name		= "ccm(aes)",
261 	.cra_driver_name	= "ccm-aes-ce",
262 	.cra_priority		= 300,
263 	.cra_flags		= CRYPTO_ALG_TYPE_AEAD,
264 	.cra_blocksize		= 1,
265 	.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
266 	.cra_alignmask		= 7,
267 	.cra_type		= &crypto_aead_type,
268 	.cra_module		= THIS_MODULE,
269 	.cra_aead = {
270 		.ivsize		= AES_BLOCK_SIZE,
271 		.maxauthsize	= AES_BLOCK_SIZE,
272 		.setkey		= ccm_setkey,
273 		.setauthsize	= ccm_setauthsize,
274 		.encrypt	= ccm_encrypt,
275 		.decrypt	= ccm_decrypt,
276 	}
277 };
278 
279 static int __init aes_mod_init(void)
280 {
281 	if (!(elf_hwcap & HWCAP_AES))
282 		return -ENODEV;
283 	return crypto_register_alg(&ccm_aes_alg);
284 }
285 
286 static void __exit aes_mod_exit(void)
287 {
288 	crypto_unregister_alg(&ccm_aes_alg);
289 }
290 
291 module_init(aes_mod_init);
292 module_exit(aes_mod_exit);
293 
294 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
295 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
296 MODULE_LICENSE("GPL v2");
297 MODULE_ALIAS("ccm(aes)");
298