xref: /freebsd/sys/opencrypto/rmd160.c (revision fdafd315)
1091d81d1SSam Leffler /*	$OpenBSD: rmd160.c,v 1.3 2001/09/26 21:40:13 markus Exp $	*/
260727d8bSWarner Losh /*-
3091d81d1SSam Leffler  * Copyright (c) 2001 Markus Friedl.  All rights reserved.
4091d81d1SSam Leffler  *
5091d81d1SSam Leffler  * Redistribution and use in source and binary forms, with or without
6091d81d1SSam Leffler  * modification, are permitted provided that the following conditions
7091d81d1SSam Leffler  * are met:
8091d81d1SSam Leffler  * 1. Redistributions of source code must retain the above copyright
9091d81d1SSam Leffler  *    notice, this list of conditions and the following disclaimer.
10091d81d1SSam Leffler  * 2. Redistributions in binary form must reproduce the above copyright
11091d81d1SSam Leffler  *    notice, this list of conditions and the following disclaimer in the
12091d81d1SSam Leffler  *    documentation and/or other materials provided with the distribution.
13091d81d1SSam Leffler  *
14091d81d1SSam Leffler  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
15091d81d1SSam Leffler  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
16091d81d1SSam Leffler  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
17091d81d1SSam Leffler  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
18091d81d1SSam Leffler  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
19091d81d1SSam Leffler  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
20091d81d1SSam Leffler  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
21091d81d1SSam Leffler  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22091d81d1SSam Leffler  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
23091d81d1SSam Leffler  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24091d81d1SSam Leffler  */
252c446514SDavid E. O'Brien 
26091d81d1SSam Leffler /*
27091d81d1SSam Leffler  * Preneel, Bosselaers, Dobbertin, "The Cryptographic Hash Function RIPEMD-160",
28091d81d1SSam Leffler  * RSA Laboratories, CryptoBytes, Volume 3, Number 2, Autumn 1997,
29091d81d1SSam Leffler  * ftp://ftp.rsasecurity.com/pub/cryptobytes/crypto3n2.pdf
30091d81d1SSam Leffler  */
312c446514SDavid E. O'Brien 
32091d81d1SSam Leffler #include <sys/param.h>
33091d81d1SSam Leffler #include <sys/systm.h>
34091d81d1SSam Leffler #include <sys/endian.h>
35091d81d1SSam Leffler #include <opencrypto/rmd160.h>
36091d81d1SSam Leffler 
37091d81d1SSam Leffler #define PUT_64BIT_LE(cp, value) do { \
38091d81d1SSam Leffler 	(cp)[7] = (value) >> 56; \
39091d81d1SSam Leffler 	(cp)[6] = (value) >> 48; \
40091d81d1SSam Leffler 	(cp)[5] = (value) >> 40; \
41091d81d1SSam Leffler 	(cp)[4] = (value) >> 32; \
42091d81d1SSam Leffler 	(cp)[3] = (value) >> 24; \
43091d81d1SSam Leffler 	(cp)[2] = (value) >> 16; \
44091d81d1SSam Leffler 	(cp)[1] = (value) >> 8; \
45091d81d1SSam Leffler 	(cp)[0] = (value); } while (0)
46091d81d1SSam Leffler 
47091d81d1SSam Leffler #define PUT_32BIT_LE(cp, value) do { \
48091d81d1SSam Leffler 	(cp)[3] = (value) >> 24; \
49091d81d1SSam Leffler 	(cp)[2] = (value) >> 16; \
50091d81d1SSam Leffler 	(cp)[1] = (value) >> 8; \
51091d81d1SSam Leffler 	(cp)[0] = (value); } while (0)
52091d81d1SSam Leffler 
53091d81d1SSam Leffler #define	H0	0x67452301U
54091d81d1SSam Leffler #define	H1	0xEFCDAB89U
55091d81d1SSam Leffler #define	H2	0x98BADCFEU
56091d81d1SSam Leffler #define	H3	0x10325476U
57091d81d1SSam Leffler #define	H4	0xC3D2E1F0U
58091d81d1SSam Leffler 
59091d81d1SSam Leffler #define	K0	0x00000000U
60091d81d1SSam Leffler #define	K1	0x5A827999U
61091d81d1SSam Leffler #define	K2	0x6ED9EBA1U
62091d81d1SSam Leffler #define	K3	0x8F1BBCDCU
63091d81d1SSam Leffler #define	K4	0xA953FD4EU
64091d81d1SSam Leffler 
65091d81d1SSam Leffler #define	KK0	0x50A28BE6U
66091d81d1SSam Leffler #define	KK1	0x5C4DD124U
67091d81d1SSam Leffler #define	KK2	0x6D703EF3U
68091d81d1SSam Leffler #define	KK3	0x7A6D76E9U
69091d81d1SSam Leffler #define	KK4	0x00000000U
70091d81d1SSam Leffler 
71091d81d1SSam Leffler /* rotate x left n bits.  */
72091d81d1SSam Leffler #define ROL(n, x) (((x) << (n)) | ((x) >> (32-(n))))
73091d81d1SSam Leffler 
74091d81d1SSam Leffler #define F0(x, y, z) ((x) ^ (y) ^ (z))
75091d81d1SSam Leffler #define F1(x, y, z) (((x) & (y)) | ((~x) & (z)))
76091d81d1SSam Leffler #define F2(x, y, z) (((x) | (~y)) ^ (z))
77091d81d1SSam Leffler #define F3(x, y, z) (((x) & (z)) | ((y) & (~z)))
78091d81d1SSam Leffler #define F4(x, y, z) ((x) ^ ((y) | (~z)))
79091d81d1SSam Leffler 
80091d81d1SSam Leffler #define R(a, b, c, d, e, Fj, Kj, sj, rj) \
81091d81d1SSam Leffler 	do { \
82091d81d1SSam Leffler 		a = ROL(sj, a + Fj(b,c,d) + X(rj) + Kj) + e; \
83091d81d1SSam Leffler 		c = ROL(10, c); \
84091d81d1SSam Leffler 	} while(0)
85091d81d1SSam Leffler 
86091d81d1SSam Leffler #define X(i)	x[i]
87091d81d1SSam Leffler 
88091d81d1SSam Leffler static u_char PADDING[64] = {
89091d81d1SSam Leffler 	0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
90091d81d1SSam Leffler 	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
91091d81d1SSam Leffler 	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
92091d81d1SSam Leffler };
93091d81d1SSam Leffler 
94091d81d1SSam Leffler void
RMD160Init(RMD160_CTX * ctx)95091d81d1SSam Leffler RMD160Init(RMD160_CTX *ctx)
96091d81d1SSam Leffler {
97091d81d1SSam Leffler 	ctx->count = 0;
98091d81d1SSam Leffler 	ctx->state[0] = H0;
99091d81d1SSam Leffler 	ctx->state[1] = H1;
100091d81d1SSam Leffler 	ctx->state[2] = H2;
101091d81d1SSam Leffler 	ctx->state[3] = H3;
102091d81d1SSam Leffler 	ctx->state[4] = H4;
103091d81d1SSam Leffler }
104091d81d1SSam Leffler 
105091d81d1SSam Leffler void
RMD160Update(RMD160_CTX * ctx,const u_char * input,uint32_t len)106d3d79e96SJohn Baldwin RMD160Update(RMD160_CTX *ctx, const u_char *input, uint32_t len)
107091d81d1SSam Leffler {
108d3d79e96SJohn Baldwin 	uint32_t have, off, need;
109091d81d1SSam Leffler 
110091d81d1SSam Leffler 	have = (ctx->count/8) % 64;
111091d81d1SSam Leffler 	need = 64 - have;
112091d81d1SSam Leffler 	ctx->count += 8 * len;
113091d81d1SSam Leffler 	off = 0;
114091d81d1SSam Leffler 
115091d81d1SSam Leffler 	if (len >= need) {
116091d81d1SSam Leffler 		if (have) {
117091d81d1SSam Leffler 			memcpy(ctx->buffer + have, input, need);
118091d81d1SSam Leffler 			RMD160Transform(ctx->state, ctx->buffer);
119091d81d1SSam Leffler 			off = need;
120091d81d1SSam Leffler 			have = 0;
121091d81d1SSam Leffler 		}
122091d81d1SSam Leffler 		/* now the buffer is empty */
123091d81d1SSam Leffler 		while (off + 64 <= len) {
124091d81d1SSam Leffler 			RMD160Transform(ctx->state, input+off);
125091d81d1SSam Leffler 			off += 64;
126091d81d1SSam Leffler 		}
127091d81d1SSam Leffler 	}
128091d81d1SSam Leffler 	if (off < len)
129091d81d1SSam Leffler 		memcpy(ctx->buffer + have, input+off, len-off);
130091d81d1SSam Leffler }
131091d81d1SSam Leffler 
132091d81d1SSam Leffler void
RMD160Final(u_char digest[20],RMD160_CTX * ctx)133091d81d1SSam Leffler RMD160Final(u_char digest[20], RMD160_CTX *ctx)
134091d81d1SSam Leffler {
135091d81d1SSam Leffler 	int i;
136091d81d1SSam Leffler 	u_char size[8];
137d3d79e96SJohn Baldwin 	uint32_t padlen;
138091d81d1SSam Leffler 
139091d81d1SSam Leffler 	PUT_64BIT_LE(size, ctx->count);
140091d81d1SSam Leffler 
141091d81d1SSam Leffler 	/*
142091d81d1SSam Leffler 	 * pad to 64 byte blocks, at least one byte from PADDING plus 8 bytes
143091d81d1SSam Leffler 	 * for the size
144091d81d1SSam Leffler 	 */
145091d81d1SSam Leffler 	padlen = 64 - ((ctx->count/8) % 64);
146091d81d1SSam Leffler 	if (padlen < 1 + 8)
147091d81d1SSam Leffler 		padlen += 64;
148091d81d1SSam Leffler 	RMD160Update(ctx, PADDING, padlen - 8);		/* padlen - 8 <= 64 */
149091d81d1SSam Leffler 	RMD160Update(ctx, size, 8);
150091d81d1SSam Leffler 
151091d81d1SSam Leffler 	if (digest != NULL)
152091d81d1SSam Leffler 		for (i = 0; i < 5; i++)
153091d81d1SSam Leffler 			PUT_32BIT_LE(digest + i*4, ctx->state[i]);
154091d81d1SSam Leffler 
155091d81d1SSam Leffler 	memset(ctx, 0, sizeof (*ctx));
156091d81d1SSam Leffler }
157091d81d1SSam Leffler 
158091d81d1SSam Leffler void
RMD160Transform(uint32_t state[5],const u_char block[64])159d3d79e96SJohn Baldwin RMD160Transform(uint32_t state[5], const u_char block[64])
160091d81d1SSam Leffler {
161d3d79e96SJohn Baldwin 	uint32_t a, b, c, d, e, aa, bb, cc, dd, ee, t, x[16];
162091d81d1SSam Leffler 
163091d81d1SSam Leffler #if BYTE_ORDER == LITTLE_ENDIAN
164091d81d1SSam Leffler 	memcpy(x, block, 64);
165091d81d1SSam Leffler #else
166091d81d1SSam Leffler 	int i;
167091d81d1SSam Leffler 
168091d81d1SSam Leffler 	for (i = 0; i < 16; i++)
169d3d79e96SJohn Baldwin 		x[i] = bswap32(*(const uint32_t*)(block+i*4));
170091d81d1SSam Leffler #endif
171091d81d1SSam Leffler 
172091d81d1SSam Leffler 	a = state[0];
173091d81d1SSam Leffler 	b = state[1];
174091d81d1SSam Leffler 	c = state[2];
175091d81d1SSam Leffler 	d = state[3];
176091d81d1SSam Leffler 	e = state[4];
177091d81d1SSam Leffler 
178091d81d1SSam Leffler 	/* Round 1 */
179091d81d1SSam Leffler 	R(a, b, c, d, e, F0, K0, 11,  0);
180091d81d1SSam Leffler 	R(e, a, b, c, d, F0, K0, 14,  1);
181091d81d1SSam Leffler 	R(d, e, a, b, c, F0, K0, 15,  2);
182091d81d1SSam Leffler 	R(c, d, e, a, b, F0, K0, 12,  3);
183091d81d1SSam Leffler 	R(b, c, d, e, a, F0, K0,  5,  4);
184091d81d1SSam Leffler 	R(a, b, c, d, e, F0, K0,  8,  5);
185091d81d1SSam Leffler 	R(e, a, b, c, d, F0, K0,  7,  6);
186091d81d1SSam Leffler 	R(d, e, a, b, c, F0, K0,  9,  7);
187091d81d1SSam Leffler 	R(c, d, e, a, b, F0, K0, 11,  8);
188091d81d1SSam Leffler 	R(b, c, d, e, a, F0, K0, 13,  9);
189091d81d1SSam Leffler 	R(a, b, c, d, e, F0, K0, 14, 10);
190091d81d1SSam Leffler 	R(e, a, b, c, d, F0, K0, 15, 11);
191091d81d1SSam Leffler 	R(d, e, a, b, c, F0, K0,  6, 12);
192091d81d1SSam Leffler 	R(c, d, e, a, b, F0, K0,  7, 13);
193091d81d1SSam Leffler 	R(b, c, d, e, a, F0, K0,  9, 14);
194091d81d1SSam Leffler 	R(a, b, c, d, e, F0, K0,  8, 15); /* #15 */
195091d81d1SSam Leffler 	/* Round 2 */
196091d81d1SSam Leffler 	R(e, a, b, c, d, F1, K1,  7,  7);
197091d81d1SSam Leffler 	R(d, e, a, b, c, F1, K1,  6,  4);
198091d81d1SSam Leffler 	R(c, d, e, a, b, F1, K1,  8, 13);
199091d81d1SSam Leffler 	R(b, c, d, e, a, F1, K1, 13,  1);
200091d81d1SSam Leffler 	R(a, b, c, d, e, F1, K1, 11, 10);
201091d81d1SSam Leffler 	R(e, a, b, c, d, F1, K1,  9,  6);
202091d81d1SSam Leffler 	R(d, e, a, b, c, F1, K1,  7, 15);
203091d81d1SSam Leffler 	R(c, d, e, a, b, F1, K1, 15,  3);
204091d81d1SSam Leffler 	R(b, c, d, e, a, F1, K1,  7, 12);
205091d81d1SSam Leffler 	R(a, b, c, d, e, F1, K1, 12,  0);
206091d81d1SSam Leffler 	R(e, a, b, c, d, F1, K1, 15,  9);
207091d81d1SSam Leffler 	R(d, e, a, b, c, F1, K1,  9,  5);
208091d81d1SSam Leffler 	R(c, d, e, a, b, F1, K1, 11,  2);
209091d81d1SSam Leffler 	R(b, c, d, e, a, F1, K1,  7, 14);
210091d81d1SSam Leffler 	R(a, b, c, d, e, F1, K1, 13, 11);
211091d81d1SSam Leffler 	R(e, a, b, c, d, F1, K1, 12,  8); /* #31 */
212091d81d1SSam Leffler 	/* Round 3 */
213091d81d1SSam Leffler 	R(d, e, a, b, c, F2, K2, 11,  3);
214091d81d1SSam Leffler 	R(c, d, e, a, b, F2, K2, 13, 10);
215091d81d1SSam Leffler 	R(b, c, d, e, a, F2, K2,  6, 14);
216091d81d1SSam Leffler 	R(a, b, c, d, e, F2, K2,  7,  4);
217091d81d1SSam Leffler 	R(e, a, b, c, d, F2, K2, 14,  9);
218091d81d1SSam Leffler 	R(d, e, a, b, c, F2, K2,  9, 15);
219091d81d1SSam Leffler 	R(c, d, e, a, b, F2, K2, 13,  8);
220091d81d1SSam Leffler 	R(b, c, d, e, a, F2, K2, 15,  1);
221091d81d1SSam Leffler 	R(a, b, c, d, e, F2, K2, 14,  2);
222091d81d1SSam Leffler 	R(e, a, b, c, d, F2, K2,  8,  7);
223091d81d1SSam Leffler 	R(d, e, a, b, c, F2, K2, 13,  0);
224091d81d1SSam Leffler 	R(c, d, e, a, b, F2, K2,  6,  6);
225091d81d1SSam Leffler 	R(b, c, d, e, a, F2, K2,  5, 13);
226091d81d1SSam Leffler 	R(a, b, c, d, e, F2, K2, 12, 11);
227091d81d1SSam Leffler 	R(e, a, b, c, d, F2, K2,  7,  5);
228091d81d1SSam Leffler 	R(d, e, a, b, c, F2, K2,  5, 12); /* #47 */
229091d81d1SSam Leffler 	/* Round 4 */
230091d81d1SSam Leffler 	R(c, d, e, a, b, F3, K3, 11,  1);
231091d81d1SSam Leffler 	R(b, c, d, e, a, F3, K3, 12,  9);
232091d81d1SSam Leffler 	R(a, b, c, d, e, F3, K3, 14, 11);
233091d81d1SSam Leffler 	R(e, a, b, c, d, F3, K3, 15, 10);
234091d81d1SSam Leffler 	R(d, e, a, b, c, F3, K3, 14,  0);
235091d81d1SSam Leffler 	R(c, d, e, a, b, F3, K3, 15,  8);
236091d81d1SSam Leffler 	R(b, c, d, e, a, F3, K3,  9, 12);
237091d81d1SSam Leffler 	R(a, b, c, d, e, F3, K3,  8,  4);
238091d81d1SSam Leffler 	R(e, a, b, c, d, F3, K3,  9, 13);
239091d81d1SSam Leffler 	R(d, e, a, b, c, F3, K3, 14,  3);
240091d81d1SSam Leffler 	R(c, d, e, a, b, F3, K3,  5,  7);
241091d81d1SSam Leffler 	R(b, c, d, e, a, F3, K3,  6, 15);
242091d81d1SSam Leffler 	R(a, b, c, d, e, F3, K3,  8, 14);
243091d81d1SSam Leffler 	R(e, a, b, c, d, F3, K3,  6,  5);
244091d81d1SSam Leffler 	R(d, e, a, b, c, F3, K3,  5,  6);
245091d81d1SSam Leffler 	R(c, d, e, a, b, F3, K3, 12,  2); /* #63 */
246091d81d1SSam Leffler 	/* Round 5 */
247091d81d1SSam Leffler 	R(b, c, d, e, a, F4, K4,  9,  4);
248091d81d1SSam Leffler 	R(a, b, c, d, e, F4, K4, 15,  0);
249091d81d1SSam Leffler 	R(e, a, b, c, d, F4, K4,  5,  5);
250091d81d1SSam Leffler 	R(d, e, a, b, c, F4, K4, 11,  9);
251091d81d1SSam Leffler 	R(c, d, e, a, b, F4, K4,  6,  7);
252091d81d1SSam Leffler 	R(b, c, d, e, a, F4, K4,  8, 12);
253091d81d1SSam Leffler 	R(a, b, c, d, e, F4, K4, 13,  2);
254091d81d1SSam Leffler 	R(e, a, b, c, d, F4, K4, 12, 10);
255091d81d1SSam Leffler 	R(d, e, a, b, c, F4, K4,  5, 14);
256091d81d1SSam Leffler 	R(c, d, e, a, b, F4, K4, 12,  1);
257091d81d1SSam Leffler 	R(b, c, d, e, a, F4, K4, 13,  3);
258091d81d1SSam Leffler 	R(a, b, c, d, e, F4, K4, 14,  8);
259091d81d1SSam Leffler 	R(e, a, b, c, d, F4, K4, 11, 11);
260091d81d1SSam Leffler 	R(d, e, a, b, c, F4, K4,  8,  6);
261091d81d1SSam Leffler 	R(c, d, e, a, b, F4, K4,  5, 15);
262091d81d1SSam Leffler 	R(b, c, d, e, a, F4, K4,  6, 13); /* #79 */
263091d81d1SSam Leffler 
264091d81d1SSam Leffler 	aa = a ; bb = b; cc = c; dd = d; ee = e;
265091d81d1SSam Leffler 
266091d81d1SSam Leffler 	a = state[0];
267091d81d1SSam Leffler 	b = state[1];
268091d81d1SSam Leffler 	c = state[2];
269091d81d1SSam Leffler 	d = state[3];
270091d81d1SSam Leffler 	e = state[4];
271091d81d1SSam Leffler 
272091d81d1SSam Leffler 	/* Parallel round 1 */
273091d81d1SSam Leffler 	R(a, b, c, d, e, F4, KK0,  8,  5);
274091d81d1SSam Leffler 	R(e, a, b, c, d, F4, KK0,  9, 14);
275091d81d1SSam Leffler 	R(d, e, a, b, c, F4, KK0,  9,  7);
276091d81d1SSam Leffler 	R(c, d, e, a, b, F4, KK0, 11,  0);
277091d81d1SSam Leffler 	R(b, c, d, e, a, F4, KK0, 13,  9);
278091d81d1SSam Leffler 	R(a, b, c, d, e, F4, KK0, 15,  2);
279091d81d1SSam Leffler 	R(e, a, b, c, d, F4, KK0, 15, 11);
280091d81d1SSam Leffler 	R(d, e, a, b, c, F4, KK0,  5,  4);
281091d81d1SSam Leffler 	R(c, d, e, a, b, F4, KK0,  7, 13);
282091d81d1SSam Leffler 	R(b, c, d, e, a, F4, KK0,  7,  6);
283091d81d1SSam Leffler 	R(a, b, c, d, e, F4, KK0,  8, 15);
284091d81d1SSam Leffler 	R(e, a, b, c, d, F4, KK0, 11,  8);
285091d81d1SSam Leffler 	R(d, e, a, b, c, F4, KK0, 14,  1);
286091d81d1SSam Leffler 	R(c, d, e, a, b, F4, KK0, 14, 10);
287091d81d1SSam Leffler 	R(b, c, d, e, a, F4, KK0, 12,  3);
288091d81d1SSam Leffler 	R(a, b, c, d, e, F4, KK0,  6, 12); /* #15 */
289091d81d1SSam Leffler 	/* Parallel round 2 */
290091d81d1SSam Leffler 	R(e, a, b, c, d, F3, KK1,  9,  6);
291091d81d1SSam Leffler 	R(d, e, a, b, c, F3, KK1, 13, 11);
292091d81d1SSam Leffler 	R(c, d, e, a, b, F3, KK1, 15,  3);
293091d81d1SSam Leffler 	R(b, c, d, e, a, F3, KK1,  7,  7);
294091d81d1SSam Leffler 	R(a, b, c, d, e, F3, KK1, 12,  0);
295091d81d1SSam Leffler 	R(e, a, b, c, d, F3, KK1,  8, 13);
296091d81d1SSam Leffler 	R(d, e, a, b, c, F3, KK1,  9,  5);
297091d81d1SSam Leffler 	R(c, d, e, a, b, F3, KK1, 11, 10);
298091d81d1SSam Leffler 	R(b, c, d, e, a, F3, KK1,  7, 14);
299091d81d1SSam Leffler 	R(a, b, c, d, e, F3, KK1,  7, 15);
300091d81d1SSam Leffler 	R(e, a, b, c, d, F3, KK1, 12,  8);
301091d81d1SSam Leffler 	R(d, e, a, b, c, F3, KK1,  7, 12);
302091d81d1SSam Leffler 	R(c, d, e, a, b, F3, KK1,  6,  4);
303091d81d1SSam Leffler 	R(b, c, d, e, a, F3, KK1, 15,  9);
304091d81d1SSam Leffler 	R(a, b, c, d, e, F3, KK1, 13,  1);
305091d81d1SSam Leffler 	R(e, a, b, c, d, F3, KK1, 11,  2); /* #31 */
306091d81d1SSam Leffler 	/* Parallel round 3 */
307091d81d1SSam Leffler 	R(d, e, a, b, c, F2, KK2,  9, 15);
308091d81d1SSam Leffler 	R(c, d, e, a, b, F2, KK2,  7,  5);
309091d81d1SSam Leffler 	R(b, c, d, e, a, F2, KK2, 15,  1);
310091d81d1SSam Leffler 	R(a, b, c, d, e, F2, KK2, 11,  3);
311091d81d1SSam Leffler 	R(e, a, b, c, d, F2, KK2,  8,  7);
312091d81d1SSam Leffler 	R(d, e, a, b, c, F2, KK2,  6, 14);
313091d81d1SSam Leffler 	R(c, d, e, a, b, F2, KK2,  6,  6);
314091d81d1SSam Leffler 	R(b, c, d, e, a, F2, KK2, 14,  9);
315091d81d1SSam Leffler 	R(a, b, c, d, e, F2, KK2, 12, 11);
316091d81d1SSam Leffler 	R(e, a, b, c, d, F2, KK2, 13,  8);
317091d81d1SSam Leffler 	R(d, e, a, b, c, F2, KK2,  5, 12);
318091d81d1SSam Leffler 	R(c, d, e, a, b, F2, KK2, 14,  2);
319091d81d1SSam Leffler 	R(b, c, d, e, a, F2, KK2, 13, 10);
320091d81d1SSam Leffler 	R(a, b, c, d, e, F2, KK2, 13,  0);
321091d81d1SSam Leffler 	R(e, a, b, c, d, F2, KK2,  7,  4);
322091d81d1SSam Leffler 	R(d, e, a, b, c, F2, KK2,  5, 13); /* #47 */
323091d81d1SSam Leffler 	/* Parallel round 4 */
324091d81d1SSam Leffler 	R(c, d, e, a, b, F1, KK3, 15,  8);
325091d81d1SSam Leffler 	R(b, c, d, e, a, F1, KK3,  5,  6);
326091d81d1SSam Leffler 	R(a, b, c, d, e, F1, KK3,  8,  4);
327091d81d1SSam Leffler 	R(e, a, b, c, d, F1, KK3, 11,  1);
328091d81d1SSam Leffler 	R(d, e, a, b, c, F1, KK3, 14,  3);
329091d81d1SSam Leffler 	R(c, d, e, a, b, F1, KK3, 14, 11);
330091d81d1SSam Leffler 	R(b, c, d, e, a, F1, KK3,  6, 15);
331091d81d1SSam Leffler 	R(a, b, c, d, e, F1, KK3, 14,  0);
332091d81d1SSam Leffler 	R(e, a, b, c, d, F1, KK3,  6,  5);
333091d81d1SSam Leffler 	R(d, e, a, b, c, F1, KK3,  9, 12);
334091d81d1SSam Leffler 	R(c, d, e, a, b, F1, KK3, 12,  2);
335091d81d1SSam Leffler 	R(b, c, d, e, a, F1, KK3,  9, 13);
336091d81d1SSam Leffler 	R(a, b, c, d, e, F1, KK3, 12,  9);
337091d81d1SSam Leffler 	R(e, a, b, c, d, F1, KK3,  5,  7);
338091d81d1SSam Leffler 	R(d, e, a, b, c, F1, KK3, 15, 10);
339091d81d1SSam Leffler 	R(c, d, e, a, b, F1, KK3,  8, 14); /* #63 */
340091d81d1SSam Leffler 	/* Parallel round 5 */
341091d81d1SSam Leffler 	R(b, c, d, e, a, F0, KK4,  8, 12);
342091d81d1SSam Leffler 	R(a, b, c, d, e, F0, KK4,  5, 15);
343091d81d1SSam Leffler 	R(e, a, b, c, d, F0, KK4, 12, 10);
344091d81d1SSam Leffler 	R(d, e, a, b, c, F0, KK4,  9,  4);
345091d81d1SSam Leffler 	R(c, d, e, a, b, F0, KK4, 12,  1);
346091d81d1SSam Leffler 	R(b, c, d, e, a, F0, KK4,  5,  5);
347091d81d1SSam Leffler 	R(a, b, c, d, e, F0, KK4, 14,  8);
348091d81d1SSam Leffler 	R(e, a, b, c, d, F0, KK4,  6,  7);
349091d81d1SSam Leffler 	R(d, e, a, b, c, F0, KK4,  8,  6);
350091d81d1SSam Leffler 	R(c, d, e, a, b, F0, KK4, 13,  2);
351091d81d1SSam Leffler 	R(b, c, d, e, a, F0, KK4,  6, 13);
352091d81d1SSam Leffler 	R(a, b, c, d, e, F0, KK4,  5, 14);
353091d81d1SSam Leffler 	R(e, a, b, c, d, F0, KK4, 15,  0);
354091d81d1SSam Leffler 	R(d, e, a, b, c, F0, KK4, 13,  3);
355091d81d1SSam Leffler 	R(c, d, e, a, b, F0, KK4, 11,  9);
356091d81d1SSam Leffler 	R(b, c, d, e, a, F0, KK4, 11, 11); /* #79 */
357091d81d1SSam Leffler 
358091d81d1SSam Leffler 	t =        state[1] + cc + d;
359091d81d1SSam Leffler 	state[1] = state[2] + dd + e;
360091d81d1SSam Leffler 	state[2] = state[3] + ee + a;
361091d81d1SSam Leffler 	state[3] = state[4] + aa + b;
362091d81d1SSam Leffler 	state[4] = state[0] + bb + c;
363091d81d1SSam Leffler 	state[0] = t;
364091d81d1SSam Leffler }
365