xref: /linux/arch/arm64/crypto/aes-ce-ccm-glue.c (revision 741691c4)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
4  *
5  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <asm/neon.h>
9 #include <asm/unaligned.h>
10 #include <crypto/aes.h>
11 #include <crypto/scatterwalk.h>
12 #include <crypto/internal/aead.h>
13 #include <crypto/internal/skcipher.h>
14 #include <linux/module.h>
15 
16 #include "aes-ce-setkey.h"
17 
18 static int num_rounds(struct crypto_aes_ctx *ctx)
19 {
20 	/*
21 	 * # of rounds specified by AES:
22 	 * 128 bit key		10 rounds
23 	 * 192 bit key		12 rounds
24 	 * 256 bit key		14 rounds
25 	 * => n byte key	=> 6 + (n/4) rounds
26 	 */
27 	return 6 + ctx->key_length / 4;
28 }
29 
30 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
31 				     u32 *macp, u32 const rk[], u32 rounds);
32 
33 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
34 				   u32 const rk[], u32 rounds, u8 mac[],
35 				   u8 ctr[]);
36 
37 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
38 				   u32 const rk[], u32 rounds, u8 mac[],
39 				   u8 ctr[]);
40 
41 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
42 				 u32 rounds);
43 
44 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
45 		      unsigned int key_len)
46 {
47 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
48 
49 	return ce_aes_expandkey(ctx, in_key, key_len);
50 }
51 
52 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
53 {
54 	if ((authsize & 1) || authsize < 4)
55 		return -EINVAL;
56 	return 0;
57 }
58 
59 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
60 {
61 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
62 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
63 	u32 l = req->iv[0] + 1;
64 
65 	/* verify that CCM dimension 'L' is set correctly in the IV */
66 	if (l < 2 || l > 8)
67 		return -EINVAL;
68 
69 	/* verify that msglen can in fact be represented in L bytes */
70 	if (l < 4 && msglen >> (8 * l))
71 		return -EOVERFLOW;
72 
73 	/*
74 	 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
75 	 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
76 	 */
77 	n[0] = 0;
78 	n[1] = cpu_to_be32(msglen);
79 
80 	memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
81 
82 	/*
83 	 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
84 	 * - bits 0..2	: max # of bytes required to represent msglen, minus 1
85 	 *                (already set by caller)
86 	 * - bits 3..5	: size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
87 	 * - bit 6	: indicates presence of authenticate-only data
88 	 */
89 	maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
90 	if (req->assoclen)
91 		maciv[0] |= 0x40;
92 
93 	memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
94 	return 0;
95 }
96 
97 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
98 			   u32 abytes, u32 *macp)
99 {
100 	ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
101 			     num_rounds(key));
102 }
103 
104 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
105 {
106 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
107 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
108 	struct __packed { __be16 l; __be32 h; u16 len; } ltag;
109 	struct scatter_walk walk;
110 	u32 len = req->assoclen;
111 	u32 macp = 0;
112 
113 	/* prepend the AAD with a length tag */
114 	if (len < 0xff00) {
115 		ltag.l = cpu_to_be16(len);
116 		ltag.len = 2;
117 	} else  {
118 		ltag.l = cpu_to_be16(0xfffe);
119 		put_unaligned_be32(len, &ltag.h);
120 		ltag.len = 6;
121 	}
122 
123 	ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
124 	scatterwalk_start(&walk, req->src);
125 
126 	do {
127 		u32 n = scatterwalk_clamp(&walk, len);
128 		u8 *p;
129 
130 		if (!n) {
131 			scatterwalk_start(&walk, sg_next(walk.sg));
132 			n = scatterwalk_clamp(&walk, len);
133 		}
134 		n = min_t(u32, n, SZ_4K); /* yield NEON at least every 4k */
135 		p = scatterwalk_map(&walk);
136 		ccm_update_mac(ctx, mac, p, n, &macp);
137 
138 		if (len / SZ_4K > (len - n) / SZ_4K) {
139 			kernel_neon_end();
140 			kernel_neon_begin();
141 		}
142 
143 		len -= n;
144 
145 		scatterwalk_unmap(p);
146 		scatterwalk_advance(&walk, n);
147 		scatterwalk_done(&walk, 0, len);
148 	} while (len);
149 }
150 
151 static int ccm_encrypt(struct aead_request *req)
152 {
153 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
154 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
155 	struct skcipher_walk walk;
156 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
157 	u8 buf[AES_BLOCK_SIZE];
158 	u32 len = req->cryptlen;
159 	int err;
160 
161 	err = ccm_init_mac(req, mac, len);
162 	if (err)
163 		return err;
164 
165 	/* preserve the original iv for the final round */
166 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
167 
168 	err = skcipher_walk_aead_encrypt(&walk, req, false);
169 	if (unlikely(err))
170 		return err;
171 
172 	kernel_neon_begin();
173 
174 	if (req->assoclen)
175 		ccm_calculate_auth_mac(req, mac);
176 
177 	do {
178 		u32 tail = walk.nbytes % AES_BLOCK_SIZE;
179 
180 		if (walk.nbytes == walk.total)
181 			tail = 0;
182 
183 		ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
184 				   walk.nbytes - tail, ctx->key_enc,
185 				   num_rounds(ctx), mac, walk.iv);
186 
187 		if (walk.nbytes == walk.total)
188 			ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
189 
190 		kernel_neon_end();
191 
192 		if (walk.nbytes) {
193 			err = skcipher_walk_done(&walk, tail);
194 			if (unlikely(err))
195 				return err;
196 			if (unlikely(walk.nbytes))
197 				kernel_neon_begin();
198 		}
199 	} while (walk.nbytes);
200 
201 	/* copy authtag to end of dst */
202 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
203 				 crypto_aead_authsize(aead), 1);
204 
205 	return 0;
206 }
207 
208 static int ccm_decrypt(struct aead_request *req)
209 {
210 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
211 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
212 	unsigned int authsize = crypto_aead_authsize(aead);
213 	struct skcipher_walk walk;
214 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
215 	u8 buf[AES_BLOCK_SIZE];
216 	u32 len = req->cryptlen - authsize;
217 	int err;
218 
219 	err = ccm_init_mac(req, mac, len);
220 	if (err)
221 		return err;
222 
223 	/* preserve the original iv for the final round */
224 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
225 
226 	err = skcipher_walk_aead_decrypt(&walk, req, false);
227 	if (unlikely(err))
228 		return err;
229 
230 	kernel_neon_begin();
231 
232 	if (req->assoclen)
233 		ccm_calculate_auth_mac(req, mac);
234 
235 	do {
236 		u32 tail = walk.nbytes % AES_BLOCK_SIZE;
237 
238 		if (walk.nbytes == walk.total)
239 			tail = 0;
240 
241 		ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
242 				   walk.nbytes - tail, ctx->key_enc,
243 				   num_rounds(ctx), mac, walk.iv);
244 
245 		if (walk.nbytes == walk.total)
246 			ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
247 
248 		kernel_neon_end();
249 
250 		if (walk.nbytes) {
251 			err = skcipher_walk_done(&walk, tail);
252 			if (unlikely(err))
253 				return err;
254 			if (unlikely(walk.nbytes))
255 				kernel_neon_begin();
256 		}
257 	} while (walk.nbytes);
258 
259 	/* compare calculated auth tag with the stored one */
260 	scatterwalk_map_and_copy(buf, req->src,
261 				 req->assoclen + req->cryptlen - authsize,
262 				 authsize, 0);
263 
264 	if (crypto_memneq(mac, buf, authsize))
265 		return -EBADMSG;
266 	return 0;
267 }
268 
269 static struct aead_alg ccm_aes_alg = {
270 	.base = {
271 		.cra_name		= "ccm(aes)",
272 		.cra_driver_name	= "ccm-aes-ce",
273 		.cra_priority		= 300,
274 		.cra_blocksize		= 1,
275 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
276 		.cra_module		= THIS_MODULE,
277 	},
278 	.ivsize		= AES_BLOCK_SIZE,
279 	.chunksize	= AES_BLOCK_SIZE,
280 	.maxauthsize	= AES_BLOCK_SIZE,
281 	.setkey		= ccm_setkey,
282 	.setauthsize	= ccm_setauthsize,
283 	.encrypt	= ccm_encrypt,
284 	.decrypt	= ccm_decrypt,
285 };
286 
287 static int __init aes_mod_init(void)
288 {
289 	if (!cpu_have_named_feature(AES))
290 		return -ENODEV;
291 	return crypto_register_aead(&ccm_aes_alg);
292 }
293 
294 static void __exit aes_mod_exit(void)
295 {
296 	crypto_unregister_aead(&ccm_aes_alg);
297 }
298 
299 module_init(aes_mod_init);
300 module_exit(aes_mod_exit);
301 
302 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
303 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
304 MODULE_LICENSE("GPL v2");
305 MODULE_ALIAS_CRYPTO("ccm(aes)");
306