xref: /linux/arch/powerpc/crypto/sha256-spe-glue.c (revision 44f57d78)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Glue code for SHA-256 implementation for SPE instructions (PPC)
4  *
5  * Based on generic implementation. The assembler module takes care
6  * about the SPE registers so it can run from interrupt context.
7  *
8  * Copyright (c) 2015 Markus Stockhausen <stockhausen@collogia.de>
9  */
10 
11 #include <crypto/internal/hash.h>
12 #include <linux/init.h>
13 #include <linux/module.h>
14 #include <linux/mm.h>
15 #include <linux/cryptohash.h>
16 #include <linux/types.h>
17 #include <crypto/sha.h>
18 #include <asm/byteorder.h>
19 #include <asm/switch_to.h>
20 #include <linux/hardirq.h>
21 
22 /*
23  * MAX_BYTES defines the number of bytes that are allowed to be processed
24  * between preempt_disable() and preempt_enable(). SHA256 takes ~2,000
25  * operations per 64 bytes. e500 cores can issue two arithmetic instructions
26  * per clock cycle using one 32/64 bit unit (SU1) and one 32 bit unit (SU2).
27  * Thus 1KB of input data will need an estimated maximum of 18,000 cycles.
28  * Headroom for cache misses included. Even with the low end model clocked
29  * at 667 MHz this equals to a critical time window of less than 27us.
30  *
31  */
32 #define MAX_BYTES 1024
33 
34 extern void ppc_spe_sha256_transform(u32 *state, const u8 *src, u32 blocks);
35 
36 static void spe_begin(void)
37 {
38 	/* We just start SPE operations and will save SPE registers later. */
39 	preempt_disable();
40 	enable_kernel_spe();
41 }
42 
43 static void spe_end(void)
44 {
45 	disable_kernel_spe();
46 	/* reenable preemption */
47 	preempt_enable();
48 }
49 
50 static inline void ppc_sha256_clear_context(struct sha256_state *sctx)
51 {
52 	int count = sizeof(struct sha256_state) >> 2;
53 	u32 *ptr = (u32 *)sctx;
54 
55 	/* make sure we can clear the fast way */
56 	BUILD_BUG_ON(sizeof(struct sha256_state) % 4);
57 	do { *ptr++ = 0; } while (--count);
58 }
59 
60 static int ppc_spe_sha256_init(struct shash_desc *desc)
61 {
62 	struct sha256_state *sctx = shash_desc_ctx(desc);
63 
64 	sctx->state[0] = SHA256_H0;
65 	sctx->state[1] = SHA256_H1;
66 	sctx->state[2] = SHA256_H2;
67 	sctx->state[3] = SHA256_H3;
68 	sctx->state[4] = SHA256_H4;
69 	sctx->state[5] = SHA256_H5;
70 	sctx->state[6] = SHA256_H6;
71 	sctx->state[7] = SHA256_H7;
72 	sctx->count = 0;
73 
74 	return 0;
75 }
76 
77 static int ppc_spe_sha224_init(struct shash_desc *desc)
78 {
79 	struct sha256_state *sctx = shash_desc_ctx(desc);
80 
81 	sctx->state[0] = SHA224_H0;
82 	sctx->state[1] = SHA224_H1;
83 	sctx->state[2] = SHA224_H2;
84 	sctx->state[3] = SHA224_H3;
85 	sctx->state[4] = SHA224_H4;
86 	sctx->state[5] = SHA224_H5;
87 	sctx->state[6] = SHA224_H6;
88 	sctx->state[7] = SHA224_H7;
89 	sctx->count = 0;
90 
91 	return 0;
92 }
93 
94 static int ppc_spe_sha256_update(struct shash_desc *desc, const u8 *data,
95 			unsigned int len)
96 {
97 	struct sha256_state *sctx = shash_desc_ctx(desc);
98 	const unsigned int offset = sctx->count & 0x3f;
99 	const unsigned int avail = 64 - offset;
100 	unsigned int bytes;
101 	const u8 *src = data;
102 
103 	if (avail > len) {
104 		sctx->count += len;
105 		memcpy((char *)sctx->buf + offset, src, len);
106 		return 0;
107 	}
108 
109 	sctx->count += len;
110 
111 	if (offset) {
112 		memcpy((char *)sctx->buf + offset, src, avail);
113 
114 		spe_begin();
115 		ppc_spe_sha256_transform(sctx->state, (const u8 *)sctx->buf, 1);
116 		spe_end();
117 
118 		len -= avail;
119 		src += avail;
120 	}
121 
122 	while (len > 63) {
123 		/* cut input data into smaller blocks */
124 		bytes = (len > MAX_BYTES) ? MAX_BYTES : len;
125 		bytes = bytes & ~0x3f;
126 
127 		spe_begin();
128 		ppc_spe_sha256_transform(sctx->state, src, bytes >> 6);
129 		spe_end();
130 
131 		src += bytes;
132 		len -= bytes;
133 	};
134 
135 	memcpy((char *)sctx->buf, src, len);
136 	return 0;
137 }
138 
139 static int ppc_spe_sha256_final(struct shash_desc *desc, u8 *out)
140 {
141 	struct sha256_state *sctx = shash_desc_ctx(desc);
142 	const unsigned int offset = sctx->count & 0x3f;
143 	char *p = (char *)sctx->buf + offset;
144 	int padlen;
145 	__be64 *pbits = (__be64 *)(((char *)&sctx->buf) + 56);
146 	__be32 *dst = (__be32 *)out;
147 
148 	padlen = 55 - offset;
149 	*p++ = 0x80;
150 
151 	spe_begin();
152 
153 	if (padlen < 0) {
154 		memset(p, 0x00, padlen + sizeof (u64));
155 		ppc_spe_sha256_transform(sctx->state, sctx->buf, 1);
156 		p = (char *)sctx->buf;
157 		padlen = 56;
158 	}
159 
160 	memset(p, 0, padlen);
161 	*pbits = cpu_to_be64(sctx->count << 3);
162 	ppc_spe_sha256_transform(sctx->state, sctx->buf, 1);
163 
164 	spe_end();
165 
166 	dst[0] = cpu_to_be32(sctx->state[0]);
167 	dst[1] = cpu_to_be32(sctx->state[1]);
168 	dst[2] = cpu_to_be32(sctx->state[2]);
169 	dst[3] = cpu_to_be32(sctx->state[3]);
170 	dst[4] = cpu_to_be32(sctx->state[4]);
171 	dst[5] = cpu_to_be32(sctx->state[5]);
172 	dst[6] = cpu_to_be32(sctx->state[6]);
173 	dst[7] = cpu_to_be32(sctx->state[7]);
174 
175 	ppc_sha256_clear_context(sctx);
176 	return 0;
177 }
178 
179 static int ppc_spe_sha224_final(struct shash_desc *desc, u8 *out)
180 {
181 	u32 D[SHA256_DIGEST_SIZE >> 2];
182 	__be32 *dst = (__be32 *)out;
183 
184 	ppc_spe_sha256_final(desc, (u8 *)D);
185 
186 	/* avoid bytewise memcpy */
187 	dst[0] = D[0];
188 	dst[1] = D[1];
189 	dst[2] = D[2];
190 	dst[3] = D[3];
191 	dst[4] = D[4];
192 	dst[5] = D[5];
193 	dst[6] = D[6];
194 
195 	/* clear sensitive data */
196 	memzero_explicit(D, SHA256_DIGEST_SIZE);
197 	return 0;
198 }
199 
200 static int ppc_spe_sha256_export(struct shash_desc *desc, void *out)
201 {
202 	struct sha256_state *sctx = shash_desc_ctx(desc);
203 
204 	memcpy(out, sctx, sizeof(*sctx));
205 	return 0;
206 }
207 
208 static int ppc_spe_sha256_import(struct shash_desc *desc, const void *in)
209 {
210 	struct sha256_state *sctx = shash_desc_ctx(desc);
211 
212 	memcpy(sctx, in, sizeof(*sctx));
213 	return 0;
214 }
215 
216 static struct shash_alg algs[2] = { {
217 	.digestsize	=	SHA256_DIGEST_SIZE,
218 	.init		=	ppc_spe_sha256_init,
219 	.update		=	ppc_spe_sha256_update,
220 	.final		=	ppc_spe_sha256_final,
221 	.export		=	ppc_spe_sha256_export,
222 	.import		=	ppc_spe_sha256_import,
223 	.descsize	=	sizeof(struct sha256_state),
224 	.statesize	=	sizeof(struct sha256_state),
225 	.base		=	{
226 		.cra_name	=	"sha256",
227 		.cra_driver_name=	"sha256-ppc-spe",
228 		.cra_priority	=	300,
229 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
230 		.cra_module	=	THIS_MODULE,
231 	}
232 }, {
233 	.digestsize	=	SHA224_DIGEST_SIZE,
234 	.init		=	ppc_spe_sha224_init,
235 	.update		=	ppc_spe_sha256_update,
236 	.final		=	ppc_spe_sha224_final,
237 	.export		=	ppc_spe_sha256_export,
238 	.import		=	ppc_spe_sha256_import,
239 	.descsize	=	sizeof(struct sha256_state),
240 	.statesize	=	sizeof(struct sha256_state),
241 	.base		=	{
242 		.cra_name	=	"sha224",
243 		.cra_driver_name=	"sha224-ppc-spe",
244 		.cra_priority	=	300,
245 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
246 		.cra_module	=	THIS_MODULE,
247 	}
248 } };
249 
250 static int __init ppc_spe_sha256_mod_init(void)
251 {
252 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
253 }
254 
255 static void __exit ppc_spe_sha256_mod_fini(void)
256 {
257 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
258 }
259 
260 module_init(ppc_spe_sha256_mod_init);
261 module_exit(ppc_spe_sha256_mod_fini);
262 
263 MODULE_LICENSE("GPL");
264 MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm, SPE optimized");
265 
266 MODULE_ALIAS_CRYPTO("sha224");
267 MODULE_ALIAS_CRYPTO("sha224-ppc-spe");
268 MODULE_ALIAS_CRYPTO("sha256");
269 MODULE_ALIAS_CRYPTO("sha256-ppc-spe");
270