xref: /linux/arch/arm/crypto/ghash-ce-glue.c (revision 00227e3a)
1 /*
2  * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
3  *
4  * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published
8  * by the Free Software Foundation.
9  */
10 
11 #include <asm/hwcap.h>
12 #include <asm/neon.h>
13 #include <asm/simd.h>
14 #include <asm/unaligned.h>
15 #include <crypto/cryptd.h>
16 #include <crypto/internal/hash.h>
17 #include <crypto/gf128mul.h>
18 #include <linux/cpufeature.h>
19 #include <linux/crypto.h>
20 #include <linux/module.h>
21 
22 MODULE_DESCRIPTION("GHASH secure hash using ARMv8 Crypto Extensions");
23 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
24 MODULE_LICENSE("GPL v2");
25 MODULE_ALIAS_CRYPTO("ghash");
26 
27 #define GHASH_BLOCK_SIZE	16
28 #define GHASH_DIGEST_SIZE	16
29 
30 struct ghash_key {
31 	u64	h[2];
32 	u64	h2[2];
33 	u64	h3[2];
34 	u64	h4[2];
35 };
36 
37 struct ghash_desc_ctx {
38 	u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
39 	u8 buf[GHASH_BLOCK_SIZE];
40 	u32 count;
41 };
42 
43 struct ghash_async_ctx {
44 	struct cryptd_ahash *cryptd_tfm;
45 };
46 
47 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
48 				       struct ghash_key const *k,
49 				       const char *head);
50 
51 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
52 				      struct ghash_key const *k,
53 				      const char *head);
54 
55 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
56 				  struct ghash_key const *k,
57 				  const char *head);
58 
59 static int ghash_init(struct shash_desc *desc)
60 {
61 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
62 
63 	*ctx = (struct ghash_desc_ctx){};
64 	return 0;
65 }
66 
67 static int ghash_update(struct shash_desc *desc, const u8 *src,
68 			unsigned int len)
69 {
70 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
71 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
72 
73 	ctx->count += len;
74 
75 	if ((partial + len) >= GHASH_BLOCK_SIZE) {
76 		struct ghash_key *key = crypto_shash_ctx(desc->tfm);
77 		int blocks;
78 
79 		if (partial) {
80 			int p = GHASH_BLOCK_SIZE - partial;
81 
82 			memcpy(ctx->buf + partial, src, p);
83 			src += p;
84 			len -= p;
85 		}
86 
87 		blocks = len / GHASH_BLOCK_SIZE;
88 		len %= GHASH_BLOCK_SIZE;
89 
90 		kernel_neon_begin();
91 		pmull_ghash_update(blocks, ctx->digest, src, key,
92 				   partial ? ctx->buf : NULL);
93 		kernel_neon_end();
94 		src += blocks * GHASH_BLOCK_SIZE;
95 		partial = 0;
96 	}
97 	if (len)
98 		memcpy(ctx->buf + partial, src, len);
99 	return 0;
100 }
101 
102 static int ghash_final(struct shash_desc *desc, u8 *dst)
103 {
104 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
105 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
106 
107 	if (partial) {
108 		struct ghash_key *key = crypto_shash_ctx(desc->tfm);
109 
110 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
111 		kernel_neon_begin();
112 		pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
113 		kernel_neon_end();
114 	}
115 	put_unaligned_be64(ctx->digest[1], dst);
116 	put_unaligned_be64(ctx->digest[0], dst + 8);
117 
118 	*ctx = (struct ghash_desc_ctx){};
119 	return 0;
120 }
121 
122 static void ghash_reflect(u64 h[], const be128 *k)
123 {
124 	u64 carry = be64_to_cpu(k->a) >> 63;
125 
126 	h[0] = (be64_to_cpu(k->b) << 1) | carry;
127 	h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
128 
129 	if (carry)
130 		h[1] ^= 0xc200000000000000UL;
131 }
132 
133 static int ghash_setkey(struct crypto_shash *tfm,
134 			const u8 *inkey, unsigned int keylen)
135 {
136 	struct ghash_key *key = crypto_shash_ctx(tfm);
137 	be128 h, k;
138 
139 	if (keylen != GHASH_BLOCK_SIZE) {
140 		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
141 		return -EINVAL;
142 	}
143 
144 	memcpy(&k, inkey, GHASH_BLOCK_SIZE);
145 	ghash_reflect(key->h, &k);
146 
147 	h = k;
148 	gf128mul_lle(&h, &k);
149 	ghash_reflect(key->h2, &h);
150 
151 	gf128mul_lle(&h, &k);
152 	ghash_reflect(key->h3, &h);
153 
154 	gf128mul_lle(&h, &k);
155 	ghash_reflect(key->h4, &h);
156 
157 	return 0;
158 }
159 
160 static struct shash_alg ghash_alg = {
161 	.digestsize		= GHASH_DIGEST_SIZE,
162 	.init			= ghash_init,
163 	.update			= ghash_update,
164 	.final			= ghash_final,
165 	.setkey			= ghash_setkey,
166 	.descsize		= sizeof(struct ghash_desc_ctx),
167 	.base			= {
168 		.cra_name	= "__ghash",
169 		.cra_driver_name = "__driver-ghash-ce",
170 		.cra_priority	= 0,
171 		.cra_flags	= CRYPTO_ALG_INTERNAL,
172 		.cra_blocksize	= GHASH_BLOCK_SIZE,
173 		.cra_ctxsize	= sizeof(struct ghash_key),
174 		.cra_module	= THIS_MODULE,
175 	},
176 };
177 
178 static int ghash_async_init(struct ahash_request *req)
179 {
180 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
181 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
182 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
183 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
184 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
185 	struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
186 
187 	desc->tfm = child;
188 	desc->flags = req->base.flags;
189 	return crypto_shash_init(desc);
190 }
191 
192 static int ghash_async_update(struct ahash_request *req)
193 {
194 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
195 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
196 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
197 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
198 
199 	if (!may_use_simd() ||
200 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
201 		memcpy(cryptd_req, req, sizeof(*req));
202 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
203 		return crypto_ahash_update(cryptd_req);
204 	} else {
205 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
206 		return shash_ahash_update(req, desc);
207 	}
208 }
209 
210 static int ghash_async_final(struct ahash_request *req)
211 {
212 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
213 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
214 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
215 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
216 
217 	if (!may_use_simd() ||
218 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
219 		memcpy(cryptd_req, req, sizeof(*req));
220 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
221 		return crypto_ahash_final(cryptd_req);
222 	} else {
223 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
224 		return crypto_shash_final(desc, req->result);
225 	}
226 }
227 
228 static int ghash_async_digest(struct ahash_request *req)
229 {
230 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
231 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
232 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
233 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
234 
235 	if (!may_use_simd() ||
236 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
237 		memcpy(cryptd_req, req, sizeof(*req));
238 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
239 		return crypto_ahash_digest(cryptd_req);
240 	} else {
241 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
242 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
243 
244 		desc->tfm = child;
245 		desc->flags = req->base.flags;
246 		return shash_ahash_digest(req, desc);
247 	}
248 }
249 
250 static int ghash_async_import(struct ahash_request *req, const void *in)
251 {
252 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
253 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
255 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
256 
257 	desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
258 	desc->flags = req->base.flags;
259 
260 	return crypto_shash_import(desc, in);
261 }
262 
263 static int ghash_async_export(struct ahash_request *req, void *out)
264 {
265 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
266 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
267 
268 	return crypto_shash_export(desc, out);
269 }
270 
271 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
272 			      unsigned int keylen)
273 {
274 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
275 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
276 	int err;
277 
278 	crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
279 	crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
280 			       & CRYPTO_TFM_REQ_MASK);
281 	err = crypto_ahash_setkey(child, key, keylen);
282 	crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
283 			       & CRYPTO_TFM_RES_MASK);
284 
285 	return err;
286 }
287 
288 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
289 {
290 	struct cryptd_ahash *cryptd_tfm;
291 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
292 
293 	cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce",
294 					CRYPTO_ALG_INTERNAL,
295 					CRYPTO_ALG_INTERNAL);
296 	if (IS_ERR(cryptd_tfm))
297 		return PTR_ERR(cryptd_tfm);
298 	ctx->cryptd_tfm = cryptd_tfm;
299 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
300 				 sizeof(struct ahash_request) +
301 				 crypto_ahash_reqsize(&cryptd_tfm->base));
302 
303 	return 0;
304 }
305 
306 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
307 {
308 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
309 
310 	cryptd_free_ahash(ctx->cryptd_tfm);
311 }
312 
313 static struct ahash_alg ghash_async_alg = {
314 	.init			= ghash_async_init,
315 	.update			= ghash_async_update,
316 	.final			= ghash_async_final,
317 	.setkey			= ghash_async_setkey,
318 	.digest			= ghash_async_digest,
319 	.import			= ghash_async_import,
320 	.export			= ghash_async_export,
321 	.halg.digestsize	= GHASH_DIGEST_SIZE,
322 	.halg.statesize		= sizeof(struct ghash_desc_ctx),
323 	.halg.base		= {
324 		.cra_name	= "ghash",
325 		.cra_driver_name = "ghash-ce",
326 		.cra_priority	= 300,
327 		.cra_flags	= CRYPTO_ALG_ASYNC,
328 		.cra_blocksize	= GHASH_BLOCK_SIZE,
329 		.cra_ctxsize	= sizeof(struct ghash_async_ctx),
330 		.cra_module	= THIS_MODULE,
331 		.cra_init	= ghash_async_init_tfm,
332 		.cra_exit	= ghash_async_exit_tfm,
333 	},
334 };
335 
336 static int __init ghash_ce_mod_init(void)
337 {
338 	int err;
339 
340 	if (!(elf_hwcap & HWCAP_NEON))
341 		return -ENODEV;
342 
343 	if (elf_hwcap2 & HWCAP2_PMULL)
344 		pmull_ghash_update = pmull_ghash_update_p64;
345 	else
346 		pmull_ghash_update = pmull_ghash_update_p8;
347 
348 	err = crypto_register_shash(&ghash_alg);
349 	if (err)
350 		return err;
351 	err = crypto_register_ahash(&ghash_async_alg);
352 	if (err)
353 		goto err_shash;
354 
355 	return 0;
356 
357 err_shash:
358 	crypto_unregister_shash(&ghash_alg);
359 	return err;
360 }
361 
362 static void __exit ghash_ce_mod_exit(void)
363 {
364 	crypto_unregister_ahash(&ghash_async_alg);
365 	crypto_unregister_shash(&ghash_alg);
366 }
367 
368 module_init(ghash_ce_mod_init);
369 module_exit(ghash_ce_mod_exit);
370