xref: /linux/crypto/sha3_generic.c (revision 9332a9e7)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * SHA-3, as specified in
6  * https://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
7  *
8  * SHA-3 code by Jeff Garzik <jeff@garzik.org>
9  *               Ard Biesheuvel <ard.biesheuvel@linaro.org>
10  */
11 #include <crypto/internal/hash.h>
12 #include <linux/init.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <crypto/sha3.h>
16 #include <asm/unaligned.h>
17 
18 /*
19  * On some 32-bit architectures (h8300), GCC ends up using
20  * over 1 KB of stack if we inline the round calculation into the loop
21  * in keccakf(). On the other hand, on 64-bit architectures with plenty
22  * of [64-bit wide] general purpose registers, not inlining it severely
23  * hurts performance. So let's use 64-bitness as a heuristic to decide
24  * whether to inline or not.
25  */
26 #ifdef CONFIG_64BIT
27 #define SHA3_INLINE	inline
28 #else
29 #define SHA3_INLINE	noinline
30 #endif
31 
32 #define KECCAK_ROUNDS 24
33 
34 static const u64 keccakf_rndc[24] = {
35 	0x0000000000000001ULL, 0x0000000000008082ULL, 0x800000000000808aULL,
36 	0x8000000080008000ULL, 0x000000000000808bULL, 0x0000000080000001ULL,
37 	0x8000000080008081ULL, 0x8000000000008009ULL, 0x000000000000008aULL,
38 	0x0000000000000088ULL, 0x0000000080008009ULL, 0x000000008000000aULL,
39 	0x000000008000808bULL, 0x800000000000008bULL, 0x8000000000008089ULL,
40 	0x8000000000008003ULL, 0x8000000000008002ULL, 0x8000000000000080ULL,
41 	0x000000000000800aULL, 0x800000008000000aULL, 0x8000000080008081ULL,
42 	0x8000000000008080ULL, 0x0000000080000001ULL, 0x8000000080008008ULL
43 };
44 
45 /* update the state with given number of rounds */
46 
keccakf_round(u64 st[25])47 static SHA3_INLINE void keccakf_round(u64 st[25])
48 {
49 	u64 t[5], tt, bc[5];
50 
51 	/* Theta */
52 	bc[0] = st[0] ^ st[5] ^ st[10] ^ st[15] ^ st[20];
53 	bc[1] = st[1] ^ st[6] ^ st[11] ^ st[16] ^ st[21];
54 	bc[2] = st[2] ^ st[7] ^ st[12] ^ st[17] ^ st[22];
55 	bc[3] = st[3] ^ st[8] ^ st[13] ^ st[18] ^ st[23];
56 	bc[4] = st[4] ^ st[9] ^ st[14] ^ st[19] ^ st[24];
57 
58 	t[0] = bc[4] ^ rol64(bc[1], 1);
59 	t[1] = bc[0] ^ rol64(bc[2], 1);
60 	t[2] = bc[1] ^ rol64(bc[3], 1);
61 	t[3] = bc[2] ^ rol64(bc[4], 1);
62 	t[4] = bc[3] ^ rol64(bc[0], 1);
63 
64 	st[0] ^= t[0];
65 
66 	/* Rho Pi */
67 	tt = st[1];
68 	st[ 1] = rol64(st[ 6] ^ t[1], 44);
69 	st[ 6] = rol64(st[ 9] ^ t[4], 20);
70 	st[ 9] = rol64(st[22] ^ t[2], 61);
71 	st[22] = rol64(st[14] ^ t[4], 39);
72 	st[14] = rol64(st[20] ^ t[0], 18);
73 	st[20] = rol64(st[ 2] ^ t[2], 62);
74 	st[ 2] = rol64(st[12] ^ t[2], 43);
75 	st[12] = rol64(st[13] ^ t[3], 25);
76 	st[13] = rol64(st[19] ^ t[4],  8);
77 	st[19] = rol64(st[23] ^ t[3], 56);
78 	st[23] = rol64(st[15] ^ t[0], 41);
79 	st[15] = rol64(st[ 4] ^ t[4], 27);
80 	st[ 4] = rol64(st[24] ^ t[4], 14);
81 	st[24] = rol64(st[21] ^ t[1],  2);
82 	st[21] = rol64(st[ 8] ^ t[3], 55);
83 	st[ 8] = rol64(st[16] ^ t[1], 45);
84 	st[16] = rol64(st[ 5] ^ t[0], 36);
85 	st[ 5] = rol64(st[ 3] ^ t[3], 28);
86 	st[ 3] = rol64(st[18] ^ t[3], 21);
87 	st[18] = rol64(st[17] ^ t[2], 15);
88 	st[17] = rol64(st[11] ^ t[1], 10);
89 	st[11] = rol64(st[ 7] ^ t[2],  6);
90 	st[ 7] = rol64(st[10] ^ t[0],  3);
91 	st[10] = rol64(    tt ^ t[1],  1);
92 
93 	/* Chi */
94 	bc[ 0] = ~st[ 1] & st[ 2];
95 	bc[ 1] = ~st[ 2] & st[ 3];
96 	bc[ 2] = ~st[ 3] & st[ 4];
97 	bc[ 3] = ~st[ 4] & st[ 0];
98 	bc[ 4] = ~st[ 0] & st[ 1];
99 	st[ 0] ^= bc[ 0];
100 	st[ 1] ^= bc[ 1];
101 	st[ 2] ^= bc[ 2];
102 	st[ 3] ^= bc[ 3];
103 	st[ 4] ^= bc[ 4];
104 
105 	bc[ 0] = ~st[ 6] & st[ 7];
106 	bc[ 1] = ~st[ 7] & st[ 8];
107 	bc[ 2] = ~st[ 8] & st[ 9];
108 	bc[ 3] = ~st[ 9] & st[ 5];
109 	bc[ 4] = ~st[ 5] & st[ 6];
110 	st[ 5] ^= bc[ 0];
111 	st[ 6] ^= bc[ 1];
112 	st[ 7] ^= bc[ 2];
113 	st[ 8] ^= bc[ 3];
114 	st[ 9] ^= bc[ 4];
115 
116 	bc[ 0] = ~st[11] & st[12];
117 	bc[ 1] = ~st[12] & st[13];
118 	bc[ 2] = ~st[13] & st[14];
119 	bc[ 3] = ~st[14] & st[10];
120 	bc[ 4] = ~st[10] & st[11];
121 	st[10] ^= bc[ 0];
122 	st[11] ^= bc[ 1];
123 	st[12] ^= bc[ 2];
124 	st[13] ^= bc[ 3];
125 	st[14] ^= bc[ 4];
126 
127 	bc[ 0] = ~st[16] & st[17];
128 	bc[ 1] = ~st[17] & st[18];
129 	bc[ 2] = ~st[18] & st[19];
130 	bc[ 3] = ~st[19] & st[15];
131 	bc[ 4] = ~st[15] & st[16];
132 	st[15] ^= bc[ 0];
133 	st[16] ^= bc[ 1];
134 	st[17] ^= bc[ 2];
135 	st[18] ^= bc[ 3];
136 	st[19] ^= bc[ 4];
137 
138 	bc[ 0] = ~st[21] & st[22];
139 	bc[ 1] = ~st[22] & st[23];
140 	bc[ 2] = ~st[23] & st[24];
141 	bc[ 3] = ~st[24] & st[20];
142 	bc[ 4] = ~st[20] & st[21];
143 	st[20] ^= bc[ 0];
144 	st[21] ^= bc[ 1];
145 	st[22] ^= bc[ 2];
146 	st[23] ^= bc[ 3];
147 	st[24] ^= bc[ 4];
148 }
149 
keccakf(u64 st[25])150 static void keccakf(u64 st[25])
151 {
152 	int round;
153 
154 	for (round = 0; round < KECCAK_ROUNDS; round++) {
155 		keccakf_round(st);
156 		/* Iota */
157 		st[0] ^= keccakf_rndc[round];
158 	}
159 }
160 
crypto_sha3_init(struct shash_desc * desc)161 int crypto_sha3_init(struct shash_desc *desc)
162 {
163 	struct sha3_state *sctx = shash_desc_ctx(desc);
164 	unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
165 
166 	sctx->rsiz = 200 - 2 * digest_size;
167 	sctx->rsizw = sctx->rsiz / 8;
168 	sctx->partial = 0;
169 
170 	memset(sctx->st, 0, sizeof(sctx->st));
171 	return 0;
172 }
173 EXPORT_SYMBOL(crypto_sha3_init);
174 
crypto_sha3_update(struct shash_desc * desc,const u8 * data,unsigned int len)175 int crypto_sha3_update(struct shash_desc *desc, const u8 *data,
176 		       unsigned int len)
177 {
178 	struct sha3_state *sctx = shash_desc_ctx(desc);
179 	unsigned int done;
180 	const u8 *src;
181 
182 	done = 0;
183 	src = data;
184 
185 	if ((sctx->partial + len) > (sctx->rsiz - 1)) {
186 		if (sctx->partial) {
187 			done = -sctx->partial;
188 			memcpy(sctx->buf + sctx->partial, data,
189 			       done + sctx->rsiz);
190 			src = sctx->buf;
191 		}
192 
193 		do {
194 			unsigned int i;
195 
196 			for (i = 0; i < sctx->rsizw; i++)
197 				sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
198 			keccakf(sctx->st);
199 
200 			done += sctx->rsiz;
201 			src = data + done;
202 		} while (done + (sctx->rsiz - 1) < len);
203 
204 		sctx->partial = 0;
205 	}
206 	memcpy(sctx->buf + sctx->partial, src, len - done);
207 	sctx->partial += (len - done);
208 
209 	return 0;
210 }
211 EXPORT_SYMBOL(crypto_sha3_update);
212 
crypto_sha3_final(struct shash_desc * desc,u8 * out)213 int crypto_sha3_final(struct shash_desc *desc, u8 *out)
214 {
215 	struct sha3_state *sctx = shash_desc_ctx(desc);
216 	unsigned int i, inlen = sctx->partial;
217 	unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
218 	__le64 *digest = (__le64 *)out;
219 
220 	sctx->buf[inlen++] = 0x06;
221 	memset(sctx->buf + inlen, 0, sctx->rsiz - inlen);
222 	sctx->buf[sctx->rsiz - 1] |= 0x80;
223 
224 	for (i = 0; i < sctx->rsizw; i++)
225 		sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
226 
227 	keccakf(sctx->st);
228 
229 	for (i = 0; i < digest_size / 8; i++)
230 		put_unaligned_le64(sctx->st[i], digest++);
231 
232 	if (digest_size & 4)
233 		put_unaligned_le32(sctx->st[i], (__le32 *)digest);
234 
235 	memset(sctx, 0, sizeof(*sctx));
236 	return 0;
237 }
238 EXPORT_SYMBOL(crypto_sha3_final);
239 
240 static struct shash_alg algs[] = { {
241 	.digestsize		= SHA3_224_DIGEST_SIZE,
242 	.init			= crypto_sha3_init,
243 	.update			= crypto_sha3_update,
244 	.final			= crypto_sha3_final,
245 	.descsize		= sizeof(struct sha3_state),
246 	.base.cra_name		= "sha3-224",
247 	.base.cra_driver_name	= "sha3-224-generic",
248 	.base.cra_blocksize	= SHA3_224_BLOCK_SIZE,
249 	.base.cra_module	= THIS_MODULE,
250 }, {
251 	.digestsize		= SHA3_256_DIGEST_SIZE,
252 	.init			= crypto_sha3_init,
253 	.update			= crypto_sha3_update,
254 	.final			= crypto_sha3_final,
255 	.descsize		= sizeof(struct sha3_state),
256 	.base.cra_name		= "sha3-256",
257 	.base.cra_driver_name	= "sha3-256-generic",
258 	.base.cra_blocksize	= SHA3_256_BLOCK_SIZE,
259 	.base.cra_module	= THIS_MODULE,
260 }, {
261 	.digestsize		= SHA3_384_DIGEST_SIZE,
262 	.init			= crypto_sha3_init,
263 	.update			= crypto_sha3_update,
264 	.final			= crypto_sha3_final,
265 	.descsize		= sizeof(struct sha3_state),
266 	.base.cra_name		= "sha3-384",
267 	.base.cra_driver_name	= "sha3-384-generic",
268 	.base.cra_blocksize	= SHA3_384_BLOCK_SIZE,
269 	.base.cra_module	= THIS_MODULE,
270 }, {
271 	.digestsize		= SHA3_512_DIGEST_SIZE,
272 	.init			= crypto_sha3_init,
273 	.update			= crypto_sha3_update,
274 	.final			= crypto_sha3_final,
275 	.descsize		= sizeof(struct sha3_state),
276 	.base.cra_name		= "sha3-512",
277 	.base.cra_driver_name	= "sha3-512-generic",
278 	.base.cra_blocksize	= SHA3_512_BLOCK_SIZE,
279 	.base.cra_module	= THIS_MODULE,
280 } };
281 
sha3_generic_mod_init(void)282 static int __init sha3_generic_mod_init(void)
283 {
284 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
285 }
286 
sha3_generic_mod_fini(void)287 static void __exit sha3_generic_mod_fini(void)
288 {
289 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
290 }
291 
292 subsys_initcall(sha3_generic_mod_init);
293 module_exit(sha3_generic_mod_fini);
294 
295 MODULE_LICENSE("GPL");
296 MODULE_DESCRIPTION("SHA-3 Secure Hash Algorithm");
297 
298 MODULE_ALIAS_CRYPTO("sha3-224");
299 MODULE_ALIAS_CRYPTO("sha3-224-generic");
300 MODULE_ALIAS_CRYPTO("sha3-256");
301 MODULE_ALIAS_CRYPTO("sha3-256-generic");
302 MODULE_ALIAS_CRYPTO("sha3-384");
303 MODULE_ALIAS_CRYPTO("sha3-384-generic");
304 MODULE_ALIAS_CRYPTO("sha3-512");
305 MODULE_ALIAS_CRYPTO("sha3-512-generic");
306