1 /* 2 * Copyright 2005-2016 The OpenSSL Project Authors. All Rights Reserved. 3 * 4 * Licensed under the OpenSSL license (the "License"). You may not use 5 * this file except in compliance with the License. You can obtain a copy 6 * in the file LICENSE in the source distribution or at 7 * https://www.openssl.org/source/license.html 8 */ 9 10 /** 11 * The Whirlpool hashing function. 12 * 13 * See 14 * P.S.L.M. Barreto, V. Rijmen, 15 * ``The Whirlpool hashing function,'' 16 * NESSIE submission, 2000 (tweaked version, 2001), 17 * <https://www.cosic.esat.kuleuven.ac.be/nessie/workshop/submissions/whirlpool.zip> 18 * 19 * Based on "@version 3.0 (2003.03.12)" by Paulo S.L.M. Barreto and 20 * Vincent Rijmen. Lookup "reference implementations" on 21 * <http://planeta.terra.com.br/informatica/paulobarreto/> 22 * 23 * ============================================================================= 24 * 25 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS 26 * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 27 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 28 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE 29 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 30 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 31 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 32 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 33 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 34 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, 35 * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 36 * 37 */ 38 39 #include "wp_locl.h" 40 #include <string.h> 41 42 typedef unsigned char u8; 43 #if (defined(_WIN32) || defined(_WIN64)) && !defined(__MINGW32) 44 typedef unsigned __int64 u64; 45 #elif defined(__arch64__) 46 typedef unsigned long u64; 47 #else 48 typedef unsigned long long u64; 49 #endif 50 51 #define ROUNDS 10 52 53 #define STRICT_ALIGNMENT 54 #if !defined(PEDANTIC) && (defined(__i386) || defined(__i386__) || \ 55 defined(__x86_64) || defined(__x86_64__) || \ 56 defined(_M_IX86) || defined(_M_AMD64) || \ 57 defined(_M_X64)) 58 /* 59 * Well, formally there're couple of other architectures, which permit 60 * unaligned loads, specifically those not crossing cache lines, IA-64 and 61 * PowerPC... 62 */ 63 # undef STRICT_ALIGNMENT 64 #endif 65 66 #undef SMALL_REGISTER_BANK 67 #if defined(__i386) || defined(__i386__) || defined(_M_IX86) 68 # define SMALL_REGISTER_BANK 69 # if defined(WHIRLPOOL_ASM) 70 # ifndef OPENSSL_SMALL_FOOTPRINT 71 /* 72 * it appears that for elder non-MMX 73 * CPUs this is actually faster! 74 */ 75 # define OPENSSL_SMALL_FOOTPRINT 76 # endif 77 # define GO_FOR_MMX(ctx,inp,num) do { \ 78 extern unsigned long OPENSSL_ia32cap_P[]; \ 79 void whirlpool_block_mmx(void *,const void *,size_t); \ 80 if (!(OPENSSL_ia32cap_P[0] & (1<<23))) break; \ 81 whirlpool_block_mmx(ctx->H.c,inp,num); return; \ 82 } while (0) 83 # endif 84 #endif 85 86 #undef ROTATE 87 #ifndef PEDANTIC 88 # if defined(_MSC_VER) 89 # if defined(_WIN64) /* applies to both IA-64 and AMD64 */ 90 # pragma intrinsic(_rotl64) 91 # define ROTATE(a,n) _rotl64((a),n) 92 # endif 93 # elif defined(__GNUC__) && __GNUC__>=2 94 # if defined(__x86_64) || defined(__x86_64__) 95 # if defined(L_ENDIAN) 96 # define ROTATE(a,n) ({ u64 ret; asm ("rolq %1,%0" \ 97 : "=r"(ret) : "J"(n),"0"(a) : "cc"); ret; }) 98 # elif defined(B_ENDIAN) 99 /* 100 * Most will argue that x86_64 is always little-endian. Well, yes, but 101 * then we have stratus.com who has modified gcc to "emulate" 102 * big-endian on x86. Is there evidence that they [or somebody else] 103 * won't do same for x86_64? Naturally no. And this line is waiting 104 * ready for that brave soul:-) 105 */ 106 # define ROTATE(a,n) ({ u64 ret; asm ("rorq %1,%0" \ 107 : "=r"(ret) : "J"(n),"0"(a) : "cc"); ret; }) 108 # endif 109 # elif defined(__ia64) || defined(__ia64__) 110 # if defined(L_ENDIAN) 111 # define ROTATE(a,n) ({ u64 ret; asm ("shrp %0=%1,%1,%2" \ 112 : "=r"(ret) : "r"(a),"M"(64-(n))); ret; }) 113 # elif defined(B_ENDIAN) 114 # define ROTATE(a,n) ({ u64 ret; asm ("shrp %0=%1,%1,%2" \ 115 : "=r"(ret) : "r"(a),"M"(n)); ret; }) 116 # endif 117 # endif 118 # endif 119 #endif 120 121 #if defined(OPENSSL_SMALL_FOOTPRINT) 122 # if !defined(ROTATE) 123 # if defined(L_ENDIAN) /* little-endians have to rotate left */ 124 # define ROTATE(i,n) ((i)<<(n) ^ (i)>>(64-n)) 125 # elif defined(B_ENDIAN) /* big-endians have to rotate right */ 126 # define ROTATE(i,n) ((i)>>(n) ^ (i)<<(64-n)) 127 # endif 128 # endif 129 # if defined(ROTATE) && !defined(STRICT_ALIGNMENT) 130 # define STRICT_ALIGNMENT /* ensure smallest table size */ 131 # endif 132 #endif 133 134 /* 135 * Table size depends on STRICT_ALIGNMENT and whether or not endian- 136 * specific ROTATE macro is defined. If STRICT_ALIGNMENT is not 137 * defined, which is normally the case on x86[_64] CPUs, the table is 138 * 4KB large unconditionally. Otherwise if ROTATE is defined, the 139 * table is 2KB large, and otherwise - 16KB. 2KB table requires a 140 * whole bunch of additional rotations, but I'm willing to "trade," 141 * because 16KB table certainly trashes L1 cache. I wish all CPUs 142 * could handle unaligned load as 4KB table doesn't trash the cache, 143 * nor does it require additional rotations. 144 */ 145 /* 146 * Note that every Cn macro expands as two loads: one byte load and 147 * one quadword load. One can argue that that many single-byte loads 148 * is too excessive, as one could load a quadword and "milk" it for 149 * eight 8-bit values instead. Well, yes, but in order to do so *and* 150 * avoid excessive loads you have to accommodate a handful of 64-bit 151 * values in the register bank and issue a bunch of shifts and mask. 152 * It's a tradeoff: loads vs. shift and mask in big register bank[!]. 153 * On most CPUs eight single-byte loads are faster and I let other 154 * ones to depend on smart compiler to fold byte loads if beneficial. 155 * Hand-coded assembler would be another alternative:-) 156 */ 157 #ifdef STRICT_ALIGNMENT 158 # if defined(ROTATE) 159 # define N 1 160 # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7 161 # define C0(K,i) (Cx.q[K.c[(i)*8+0]]) 162 # define C1(K,i) ROTATE(Cx.q[K.c[(i)*8+1]],8) 163 # define C2(K,i) ROTATE(Cx.q[K.c[(i)*8+2]],16) 164 # define C3(K,i) ROTATE(Cx.q[K.c[(i)*8+3]],24) 165 # define C4(K,i) ROTATE(Cx.q[K.c[(i)*8+4]],32) 166 # define C5(K,i) ROTATE(Cx.q[K.c[(i)*8+5]],40) 167 # define C6(K,i) ROTATE(Cx.q[K.c[(i)*8+6]],48) 168 # define C7(K,i) ROTATE(Cx.q[K.c[(i)*8+7]],56) 169 # else 170 # define N 8 171 # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7, \ 172 c7,c0,c1,c2,c3,c4,c5,c6, \ 173 c6,c7,c0,c1,c2,c3,c4,c5, \ 174 c5,c6,c7,c0,c1,c2,c3,c4, \ 175 c4,c5,c6,c7,c0,c1,c2,c3, \ 176 c3,c4,c5,c6,c7,c0,c1,c2, \ 177 c2,c3,c4,c5,c6,c7,c0,c1, \ 178 c1,c2,c3,c4,c5,c6,c7,c0 179 # define C0(K,i) (Cx.q[0+8*K.c[(i)*8+0]]) 180 # define C1(K,i) (Cx.q[1+8*K.c[(i)*8+1]]) 181 # define C2(K,i) (Cx.q[2+8*K.c[(i)*8+2]]) 182 # define C3(K,i) (Cx.q[3+8*K.c[(i)*8+3]]) 183 # define C4(K,i) (Cx.q[4+8*K.c[(i)*8+4]]) 184 # define C5(K,i) (Cx.q[5+8*K.c[(i)*8+5]]) 185 # define C6(K,i) (Cx.q[6+8*K.c[(i)*8+6]]) 186 # define C7(K,i) (Cx.q[7+8*K.c[(i)*8+7]]) 187 # endif 188 #else 189 # define N 2 190 # define LL(c0,c1,c2,c3,c4,c5,c6,c7) c0,c1,c2,c3,c4,c5,c6,c7, \ 191 c0,c1,c2,c3,c4,c5,c6,c7 192 # define C0(K,i) (((u64*)(Cx.c+0))[2*K.c[(i)*8+0]]) 193 # define C1(K,i) (((u64*)(Cx.c+7))[2*K.c[(i)*8+1]]) 194 # define C2(K,i) (((u64*)(Cx.c+6))[2*K.c[(i)*8+2]]) 195 # define C3(K,i) (((u64*)(Cx.c+5))[2*K.c[(i)*8+3]]) 196 # define C4(K,i) (((u64*)(Cx.c+4))[2*K.c[(i)*8+4]]) 197 # define C5(K,i) (((u64*)(Cx.c+3))[2*K.c[(i)*8+5]]) 198 # define C6(K,i) (((u64*)(Cx.c+2))[2*K.c[(i)*8+6]]) 199 # define C7(K,i) (((u64*)(Cx.c+1))[2*K.c[(i)*8+7]]) 200 #endif 201 202 static const 203 union { 204 u8 c[(256 * N + ROUNDS) * sizeof(u64)]; 205 u64 q[(256 * N + ROUNDS)]; 206 } Cx = { 207 { 208 /* Note endian-neutral representation:-) */ 209 LL(0x18, 0x18, 0x60, 0x18, 0xc0, 0x78, 0x30, 0xd8), 210 LL(0x23, 0x23, 0x8c, 0x23, 0x05, 0xaf, 0x46, 0x26), 211 LL(0xc6, 0xc6, 0x3f, 0xc6, 0x7e, 0xf9, 0x91, 0xb8), 212 LL(0xe8, 0xe8, 0x87, 0xe8, 0x13, 0x6f, 0xcd, 0xfb), 213 LL(0x87, 0x87, 0x26, 0x87, 0x4c, 0xa1, 0x13, 0xcb), 214 LL(0xb8, 0xb8, 0xda, 0xb8, 0xa9, 0x62, 0x6d, 0x11), 215 LL(0x01, 0x01, 0x04, 0x01, 0x08, 0x05, 0x02, 0x09), 216 LL(0x4f, 0x4f, 0x21, 0x4f, 0x42, 0x6e, 0x9e, 0x0d), 217 LL(0x36, 0x36, 0xd8, 0x36, 0xad, 0xee, 0x6c, 0x9b), 218 LL(0xa6, 0xa6, 0xa2, 0xa6, 0x59, 0x04, 0x51, 0xff), 219 LL(0xd2, 0xd2, 0x6f, 0xd2, 0xde, 0xbd, 0xb9, 0x0c), 220 LL(0xf5, 0xf5, 0xf3, 0xf5, 0xfb, 0x06, 0xf7, 0x0e), 221 LL(0x79, 0x79, 0xf9, 0x79, 0xef, 0x80, 0xf2, 0x96), 222 LL(0x6f, 0x6f, 0xa1, 0x6f, 0x5f, 0xce, 0xde, 0x30), 223 LL(0x91, 0x91, 0x7e, 0x91, 0xfc, 0xef, 0x3f, 0x6d), 224 LL(0x52, 0x52, 0x55, 0x52, 0xaa, 0x07, 0xa4, 0xf8), 225 LL(0x60, 0x60, 0x9d, 0x60, 0x27, 0xfd, 0xc0, 0x47), 226 LL(0xbc, 0xbc, 0xca, 0xbc, 0x89, 0x76, 0x65, 0x35), 227 LL(0x9b, 0x9b, 0x56, 0x9b, 0xac, 0xcd, 0x2b, 0x37), 228 LL(0x8e, 0x8e, 0x02, 0x8e, 0x04, 0x8c, 0x01, 0x8a), 229 LL(0xa3, 0xa3, 0xb6, 0xa3, 0x71, 0x15, 0x5b, 0xd2), 230 LL(0x0c, 0x0c, 0x30, 0x0c, 0x60, 0x3c, 0x18, 0x6c), 231 LL(0x7b, 0x7b, 0xf1, 0x7b, 0xff, 0x8a, 0xf6, 0x84), 232 LL(0x35, 0x35, 0xd4, 0x35, 0xb5, 0xe1, 0x6a, 0x80), 233 LL(0x1d, 0x1d, 0x74, 0x1d, 0xe8, 0x69, 0x3a, 0xf5), 234 LL(0xe0, 0xe0, 0xa7, 0xe0, 0x53, 0x47, 0xdd, 0xb3), 235 LL(0xd7, 0xd7, 0x7b, 0xd7, 0xf6, 0xac, 0xb3, 0x21), 236 LL(0xc2, 0xc2, 0x2f, 0xc2, 0x5e, 0xed, 0x99, 0x9c), 237 LL(0x2e, 0x2e, 0xb8, 0x2e, 0x6d, 0x96, 0x5c, 0x43), 238 LL(0x4b, 0x4b, 0x31, 0x4b, 0x62, 0x7a, 0x96, 0x29), 239 LL(0xfe, 0xfe, 0xdf, 0xfe, 0xa3, 0x21, 0xe1, 0x5d), 240 LL(0x57, 0x57, 0x41, 0x57, 0x82, 0x16, 0xae, 0xd5), 241 LL(0x15, 0x15, 0x54, 0x15, 0xa8, 0x41, 0x2a, 0xbd), 242 LL(0x77, 0x77, 0xc1, 0x77, 0x9f, 0xb6, 0xee, 0xe8), 243 LL(0x37, 0x37, 0xdc, 0x37, 0xa5, 0xeb, 0x6e, 0x92), 244 LL(0xe5, 0xe5, 0xb3, 0xe5, 0x7b, 0x56, 0xd7, 0x9e), 245 LL(0x9f, 0x9f, 0x46, 0x9f, 0x8c, 0xd9, 0x23, 0x13), 246 LL(0xf0, 0xf0, 0xe7, 0xf0, 0xd3, 0x17, 0xfd, 0x23), 247 LL(0x4a, 0x4a, 0x35, 0x4a, 0x6a, 0x7f, 0x94, 0x20), 248 LL(0xda, 0xda, 0x4f, 0xda, 0x9e, 0x95, 0xa9, 0x44), 249 LL(0x58, 0x58, 0x7d, 0x58, 0xfa, 0x25, 0xb0, 0xa2), 250 LL(0xc9, 0xc9, 0x03, 0xc9, 0x06, 0xca, 0x8f, 0xcf), 251 LL(0x29, 0x29, 0xa4, 0x29, 0x55, 0x8d, 0x52, 0x7c), 252 LL(0x0a, 0x0a, 0x28, 0x0a, 0x50, 0x22, 0x14, 0x5a), 253 LL(0xb1, 0xb1, 0xfe, 0xb1, 0xe1, 0x4f, 0x7f, 0x50), 254 LL(0xa0, 0xa0, 0xba, 0xa0, 0x69, 0x1a, 0x5d, 0xc9), 255 LL(0x6b, 0x6b, 0xb1, 0x6b, 0x7f, 0xda, 0xd6, 0x14), 256 LL(0x85, 0x85, 0x2e, 0x85, 0x5c, 0xab, 0x17, 0xd9), 257 LL(0xbd, 0xbd, 0xce, 0xbd, 0x81, 0x73, 0x67, 0x3c), 258 LL(0x5d, 0x5d, 0x69, 0x5d, 0xd2, 0x34, 0xba, 0x8f), 259 LL(0x10, 0x10, 0x40, 0x10, 0x80, 0x50, 0x20, 0x90), 260 LL(0xf4, 0xf4, 0xf7, 0xf4, 0xf3, 0x03, 0xf5, 0x07), 261 LL(0xcb, 0xcb, 0x0b, 0xcb, 0x16, 0xc0, 0x8b, 0xdd), 262 LL(0x3e, 0x3e, 0xf8, 0x3e, 0xed, 0xc6, 0x7c, 0xd3), 263 LL(0x05, 0x05, 0x14, 0x05, 0x28, 0x11, 0x0a, 0x2d), 264 LL(0x67, 0x67, 0x81, 0x67, 0x1f, 0xe6, 0xce, 0x78), 265 LL(0xe4, 0xe4, 0xb7, 0xe4, 0x73, 0x53, 0xd5, 0x97), 266 LL(0x27, 0x27, 0x9c, 0x27, 0x25, 0xbb, 0x4e, 0x02), 267 LL(0x41, 0x41, 0x19, 0x41, 0x32, 0x58, 0x82, 0x73), 268 LL(0x8b, 0x8b, 0x16, 0x8b, 0x2c, 0x9d, 0x0b, 0xa7), 269 LL(0xa7, 0xa7, 0xa6, 0xa7, 0x51, 0x01, 0x53, 0xf6), 270 LL(0x7d, 0x7d, 0xe9, 0x7d, 0xcf, 0x94, 0xfa, 0xb2), 271 LL(0x95, 0x95, 0x6e, 0x95, 0xdc, 0xfb, 0x37, 0x49), 272 LL(0xd8, 0xd8, 0x47, 0xd8, 0x8e, 0x9f, 0xad, 0x56), 273 LL(0xfb, 0xfb, 0xcb, 0xfb, 0x8b, 0x30, 0xeb, 0x70), 274 LL(0xee, 0xee, 0x9f, 0xee, 0x23, 0x71, 0xc1, 0xcd), 275 LL(0x7c, 0x7c, 0xed, 0x7c, 0xc7, 0x91, 0xf8, 0xbb), 276 LL(0x66, 0x66, 0x85, 0x66, 0x17, 0xe3, 0xcc, 0x71), 277 LL(0xdd, 0xdd, 0x53, 0xdd, 0xa6, 0x8e, 0xa7, 0x7b), 278 LL(0x17, 0x17, 0x5c, 0x17, 0xb8, 0x4b, 0x2e, 0xaf), 279 LL(0x47, 0x47, 0x01, 0x47, 0x02, 0x46, 0x8e, 0x45), 280 LL(0x9e, 0x9e, 0x42, 0x9e, 0x84, 0xdc, 0x21, 0x1a), 281 LL(0xca, 0xca, 0x0f, 0xca, 0x1e, 0xc5, 0x89, 0xd4), 282 LL(0x2d, 0x2d, 0xb4, 0x2d, 0x75, 0x99, 0x5a, 0x58), 283 LL(0xbf, 0xbf, 0xc6, 0xbf, 0x91, 0x79, 0x63, 0x2e), 284 LL(0x07, 0x07, 0x1c, 0x07, 0x38, 0x1b, 0x0e, 0x3f), 285 LL(0xad, 0xad, 0x8e, 0xad, 0x01, 0x23, 0x47, 0xac), 286 LL(0x5a, 0x5a, 0x75, 0x5a, 0xea, 0x2f, 0xb4, 0xb0), 287 LL(0x83, 0x83, 0x36, 0x83, 0x6c, 0xb5, 0x1b, 0xef), 288 LL(0x33, 0x33, 0xcc, 0x33, 0x85, 0xff, 0x66, 0xb6), 289 LL(0x63, 0x63, 0x91, 0x63, 0x3f, 0xf2, 0xc6, 0x5c), 290 LL(0x02, 0x02, 0x08, 0x02, 0x10, 0x0a, 0x04, 0x12), 291 LL(0xaa, 0xaa, 0x92, 0xaa, 0x39, 0x38, 0x49, 0x93), 292 LL(0x71, 0x71, 0xd9, 0x71, 0xaf, 0xa8, 0xe2, 0xde), 293 LL(0xc8, 0xc8, 0x07, 0xc8, 0x0e, 0xcf, 0x8d, 0xc6), 294 LL(0x19, 0x19, 0x64, 0x19, 0xc8, 0x7d, 0x32, 0xd1), 295 LL(0x49, 0x49, 0x39, 0x49, 0x72, 0x70, 0x92, 0x3b), 296 LL(0xd9, 0xd9, 0x43, 0xd9, 0x86, 0x9a, 0xaf, 0x5f), 297 LL(0xf2, 0xf2, 0xef, 0xf2, 0xc3, 0x1d, 0xf9, 0x31), 298 LL(0xe3, 0xe3, 0xab, 0xe3, 0x4b, 0x48, 0xdb, 0xa8), 299 LL(0x5b, 0x5b, 0x71, 0x5b, 0xe2, 0x2a, 0xb6, 0xb9), 300 LL(0x88, 0x88, 0x1a, 0x88, 0x34, 0x92, 0x0d, 0xbc), 301 LL(0x9a, 0x9a, 0x52, 0x9a, 0xa4, 0xc8, 0x29, 0x3e), 302 LL(0x26, 0x26, 0x98, 0x26, 0x2d, 0xbe, 0x4c, 0x0b), 303 LL(0x32, 0x32, 0xc8, 0x32, 0x8d, 0xfa, 0x64, 0xbf), 304 LL(0xb0, 0xb0, 0xfa, 0xb0, 0xe9, 0x4a, 0x7d, 0x59), 305 LL(0xe9, 0xe9, 0x83, 0xe9, 0x1b, 0x6a, 0xcf, 0xf2), 306 LL(0x0f, 0x0f, 0x3c, 0x0f, 0x78, 0x33, 0x1e, 0x77), 307 LL(0xd5, 0xd5, 0x73, 0xd5, 0xe6, 0xa6, 0xb7, 0x33), 308 LL(0x80, 0x80, 0x3a, 0x80, 0x74, 0xba, 0x1d, 0xf4), 309 LL(0xbe, 0xbe, 0xc2, 0xbe, 0x99, 0x7c, 0x61, 0x27), 310 LL(0xcd, 0xcd, 0x13, 0xcd, 0x26, 0xde, 0x87, 0xeb), 311 LL(0x34, 0x34, 0xd0, 0x34, 0xbd, 0xe4, 0x68, 0x89), 312 LL(0x48, 0x48, 0x3d, 0x48, 0x7a, 0x75, 0x90, 0x32), 313 LL(0xff, 0xff, 0xdb, 0xff, 0xab, 0x24, 0xe3, 0x54), 314 LL(0x7a, 0x7a, 0xf5, 0x7a, 0xf7, 0x8f, 0xf4, 0x8d), 315 LL(0x90, 0x90, 0x7a, 0x90, 0xf4, 0xea, 0x3d, 0x64), 316 LL(0x5f, 0x5f, 0x61, 0x5f, 0xc2, 0x3e, 0xbe, 0x9d), 317 LL(0x20, 0x20, 0x80, 0x20, 0x1d, 0xa0, 0x40, 0x3d), 318 LL(0x68, 0x68, 0xbd, 0x68, 0x67, 0xd5, 0xd0, 0x0f), 319 LL(0x1a, 0x1a, 0x68, 0x1a, 0xd0, 0x72, 0x34, 0xca), 320 LL(0xae, 0xae, 0x82, 0xae, 0x19, 0x2c, 0x41, 0xb7), 321 LL(0xb4, 0xb4, 0xea, 0xb4, 0xc9, 0x5e, 0x75, 0x7d), 322 LL(0x54, 0x54, 0x4d, 0x54, 0x9a, 0x19, 0xa8, 0xce), 323 LL(0x93, 0x93, 0x76, 0x93, 0xec, 0xe5, 0x3b, 0x7f), 324 LL(0x22, 0x22, 0x88, 0x22, 0x0d, 0xaa, 0x44, 0x2f), 325 LL(0x64, 0x64, 0x8d, 0x64, 0x07, 0xe9, 0xc8, 0x63), 326 LL(0xf1, 0xf1, 0xe3, 0xf1, 0xdb, 0x12, 0xff, 0x2a), 327 LL(0x73, 0x73, 0xd1, 0x73, 0xbf, 0xa2, 0xe6, 0xcc), 328 LL(0x12, 0x12, 0x48, 0x12, 0x90, 0x5a, 0x24, 0x82), 329 LL(0x40, 0x40, 0x1d, 0x40, 0x3a, 0x5d, 0x80, 0x7a), 330 LL(0x08, 0x08, 0x20, 0x08, 0x40, 0x28, 0x10, 0x48), 331 LL(0xc3, 0xc3, 0x2b, 0xc3, 0x56, 0xe8, 0x9b, 0x95), 332 LL(0xec, 0xec, 0x97, 0xec, 0x33, 0x7b, 0xc5, 0xdf), 333 LL(0xdb, 0xdb, 0x4b, 0xdb, 0x96, 0x90, 0xab, 0x4d), 334 LL(0xa1, 0xa1, 0xbe, 0xa1, 0x61, 0x1f, 0x5f, 0xc0), 335 LL(0x8d, 0x8d, 0x0e, 0x8d, 0x1c, 0x83, 0x07, 0x91), 336 LL(0x3d, 0x3d, 0xf4, 0x3d, 0xf5, 0xc9, 0x7a, 0xc8), 337 LL(0x97, 0x97, 0x66, 0x97, 0xcc, 0xf1, 0x33, 0x5b), 338 LL(0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00), 339 LL(0xcf, 0xcf, 0x1b, 0xcf, 0x36, 0xd4, 0x83, 0xf9), 340 LL(0x2b, 0x2b, 0xac, 0x2b, 0x45, 0x87, 0x56, 0x6e), 341 LL(0x76, 0x76, 0xc5, 0x76, 0x97, 0xb3, 0xec, 0xe1), 342 LL(0x82, 0x82, 0x32, 0x82, 0x64, 0xb0, 0x19, 0xe6), 343 LL(0xd6, 0xd6, 0x7f, 0xd6, 0xfe, 0xa9, 0xb1, 0x28), 344 LL(0x1b, 0x1b, 0x6c, 0x1b, 0xd8, 0x77, 0x36, 0xc3), 345 LL(0xb5, 0xb5, 0xee, 0xb5, 0xc1, 0x5b, 0x77, 0x74), 346 LL(0xaf, 0xaf, 0x86, 0xaf, 0x11, 0x29, 0x43, 0xbe), 347 LL(0x6a, 0x6a, 0xb5, 0x6a, 0x77, 0xdf, 0xd4, 0x1d), 348 LL(0x50, 0x50, 0x5d, 0x50, 0xba, 0x0d, 0xa0, 0xea), 349 LL(0x45, 0x45, 0x09, 0x45, 0x12, 0x4c, 0x8a, 0x57), 350 LL(0xf3, 0xf3, 0xeb, 0xf3, 0xcb, 0x18, 0xfb, 0x38), 351 LL(0x30, 0x30, 0xc0, 0x30, 0x9d, 0xf0, 0x60, 0xad), 352 LL(0xef, 0xef, 0x9b, 0xef, 0x2b, 0x74, 0xc3, 0xc4), 353 LL(0x3f, 0x3f, 0xfc, 0x3f, 0xe5, 0xc3, 0x7e, 0xda), 354 LL(0x55, 0x55, 0x49, 0x55, 0x92, 0x1c, 0xaa, 0xc7), 355 LL(0xa2, 0xa2, 0xb2, 0xa2, 0x79, 0x10, 0x59, 0xdb), 356 LL(0xea, 0xea, 0x8f, 0xea, 0x03, 0x65, 0xc9, 0xe9), 357 LL(0x65, 0x65, 0x89, 0x65, 0x0f, 0xec, 0xca, 0x6a), 358 LL(0xba, 0xba, 0xd2, 0xba, 0xb9, 0x68, 0x69, 0x03), 359 LL(0x2f, 0x2f, 0xbc, 0x2f, 0x65, 0x93, 0x5e, 0x4a), 360 LL(0xc0, 0xc0, 0x27, 0xc0, 0x4e, 0xe7, 0x9d, 0x8e), 361 LL(0xde, 0xde, 0x5f, 0xde, 0xbe, 0x81, 0xa1, 0x60), 362 LL(0x1c, 0x1c, 0x70, 0x1c, 0xe0, 0x6c, 0x38, 0xfc), 363 LL(0xfd, 0xfd, 0xd3, 0xfd, 0xbb, 0x2e, 0xe7, 0x46), 364 LL(0x4d, 0x4d, 0x29, 0x4d, 0x52, 0x64, 0x9a, 0x1f), 365 LL(0x92, 0x92, 0x72, 0x92, 0xe4, 0xe0, 0x39, 0x76), 366 LL(0x75, 0x75, 0xc9, 0x75, 0x8f, 0xbc, 0xea, 0xfa), 367 LL(0x06, 0x06, 0x18, 0x06, 0x30, 0x1e, 0x0c, 0x36), 368 LL(0x8a, 0x8a, 0x12, 0x8a, 0x24, 0x98, 0x09, 0xae), 369 LL(0xb2, 0xb2, 0xf2, 0xb2, 0xf9, 0x40, 0x79, 0x4b), 370 LL(0xe6, 0xe6, 0xbf, 0xe6, 0x63, 0x59, 0xd1, 0x85), 371 LL(0x0e, 0x0e, 0x38, 0x0e, 0x70, 0x36, 0x1c, 0x7e), 372 LL(0x1f, 0x1f, 0x7c, 0x1f, 0xf8, 0x63, 0x3e, 0xe7), 373 LL(0x62, 0x62, 0x95, 0x62, 0x37, 0xf7, 0xc4, 0x55), 374 LL(0xd4, 0xd4, 0x77, 0xd4, 0xee, 0xa3, 0xb5, 0x3a), 375 LL(0xa8, 0xa8, 0x9a, 0xa8, 0x29, 0x32, 0x4d, 0x81), 376 LL(0x96, 0x96, 0x62, 0x96, 0xc4, 0xf4, 0x31, 0x52), 377 LL(0xf9, 0xf9, 0xc3, 0xf9, 0x9b, 0x3a, 0xef, 0x62), 378 LL(0xc5, 0xc5, 0x33, 0xc5, 0x66, 0xf6, 0x97, 0xa3), 379 LL(0x25, 0x25, 0x94, 0x25, 0x35, 0xb1, 0x4a, 0x10), 380 LL(0x59, 0x59, 0x79, 0x59, 0xf2, 0x20, 0xb2, 0xab), 381 LL(0x84, 0x84, 0x2a, 0x84, 0x54, 0xae, 0x15, 0xd0), 382 LL(0x72, 0x72, 0xd5, 0x72, 0xb7, 0xa7, 0xe4, 0xc5), 383 LL(0x39, 0x39, 0xe4, 0x39, 0xd5, 0xdd, 0x72, 0xec), 384 LL(0x4c, 0x4c, 0x2d, 0x4c, 0x5a, 0x61, 0x98, 0x16), 385 LL(0x5e, 0x5e, 0x65, 0x5e, 0xca, 0x3b, 0xbc, 0x94), 386 LL(0x78, 0x78, 0xfd, 0x78, 0xe7, 0x85, 0xf0, 0x9f), 387 LL(0x38, 0x38, 0xe0, 0x38, 0xdd, 0xd8, 0x70, 0xe5), 388 LL(0x8c, 0x8c, 0x0a, 0x8c, 0x14, 0x86, 0x05, 0x98), 389 LL(0xd1, 0xd1, 0x63, 0xd1, 0xc6, 0xb2, 0xbf, 0x17), 390 LL(0xa5, 0xa5, 0xae, 0xa5, 0x41, 0x0b, 0x57, 0xe4), 391 LL(0xe2, 0xe2, 0xaf, 0xe2, 0x43, 0x4d, 0xd9, 0xa1), 392 LL(0x61, 0x61, 0x99, 0x61, 0x2f, 0xf8, 0xc2, 0x4e), 393 LL(0xb3, 0xb3, 0xf6, 0xb3, 0xf1, 0x45, 0x7b, 0x42), 394 LL(0x21, 0x21, 0x84, 0x21, 0x15, 0xa5, 0x42, 0x34), 395 LL(0x9c, 0x9c, 0x4a, 0x9c, 0x94, 0xd6, 0x25, 0x08), 396 LL(0x1e, 0x1e, 0x78, 0x1e, 0xf0, 0x66, 0x3c, 0xee), 397 LL(0x43, 0x43, 0x11, 0x43, 0x22, 0x52, 0x86, 0x61), 398 LL(0xc7, 0xc7, 0x3b, 0xc7, 0x76, 0xfc, 0x93, 0xb1), 399 LL(0xfc, 0xfc, 0xd7, 0xfc, 0xb3, 0x2b, 0xe5, 0x4f), 400 LL(0x04, 0x04, 0x10, 0x04, 0x20, 0x14, 0x08, 0x24), 401 LL(0x51, 0x51, 0x59, 0x51, 0xb2, 0x08, 0xa2, 0xe3), 402 LL(0x99, 0x99, 0x5e, 0x99, 0xbc, 0xc7, 0x2f, 0x25), 403 LL(0x6d, 0x6d, 0xa9, 0x6d, 0x4f, 0xc4, 0xda, 0x22), 404 LL(0x0d, 0x0d, 0x34, 0x0d, 0x68, 0x39, 0x1a, 0x65), 405 LL(0xfa, 0xfa, 0xcf, 0xfa, 0x83, 0x35, 0xe9, 0x79), 406 LL(0xdf, 0xdf, 0x5b, 0xdf, 0xb6, 0x84, 0xa3, 0x69), 407 LL(0x7e, 0x7e, 0xe5, 0x7e, 0xd7, 0x9b, 0xfc, 0xa9), 408 LL(0x24, 0x24, 0x90, 0x24, 0x3d, 0xb4, 0x48, 0x19), 409 LL(0x3b, 0x3b, 0xec, 0x3b, 0xc5, 0xd7, 0x76, 0xfe), 410 LL(0xab, 0xab, 0x96, 0xab, 0x31, 0x3d, 0x4b, 0x9a), 411 LL(0xce, 0xce, 0x1f, 0xce, 0x3e, 0xd1, 0x81, 0xf0), 412 LL(0x11, 0x11, 0x44, 0x11, 0x88, 0x55, 0x22, 0x99), 413 LL(0x8f, 0x8f, 0x06, 0x8f, 0x0c, 0x89, 0x03, 0x83), 414 LL(0x4e, 0x4e, 0x25, 0x4e, 0x4a, 0x6b, 0x9c, 0x04), 415 LL(0xb7, 0xb7, 0xe6, 0xb7, 0xd1, 0x51, 0x73, 0x66), 416 LL(0xeb, 0xeb, 0x8b, 0xeb, 0x0b, 0x60, 0xcb, 0xe0), 417 LL(0x3c, 0x3c, 0xf0, 0x3c, 0xfd, 0xcc, 0x78, 0xc1), 418 LL(0x81, 0x81, 0x3e, 0x81, 0x7c, 0xbf, 0x1f, 0xfd), 419 LL(0x94, 0x94, 0x6a, 0x94, 0xd4, 0xfe, 0x35, 0x40), 420 LL(0xf7, 0xf7, 0xfb, 0xf7, 0xeb, 0x0c, 0xf3, 0x1c), 421 LL(0xb9, 0xb9, 0xde, 0xb9, 0xa1, 0x67, 0x6f, 0x18), 422 LL(0x13, 0x13, 0x4c, 0x13, 0x98, 0x5f, 0x26, 0x8b), 423 LL(0x2c, 0x2c, 0xb0, 0x2c, 0x7d, 0x9c, 0x58, 0x51), 424 LL(0xd3, 0xd3, 0x6b, 0xd3, 0xd6, 0xb8, 0xbb, 0x05), 425 LL(0xe7, 0xe7, 0xbb, 0xe7, 0x6b, 0x5c, 0xd3, 0x8c), 426 LL(0x6e, 0x6e, 0xa5, 0x6e, 0x57, 0xcb, 0xdc, 0x39), 427 LL(0xc4, 0xc4, 0x37, 0xc4, 0x6e, 0xf3, 0x95, 0xaa), 428 LL(0x03, 0x03, 0x0c, 0x03, 0x18, 0x0f, 0x06, 0x1b), 429 LL(0x56, 0x56, 0x45, 0x56, 0x8a, 0x13, 0xac, 0xdc), 430 LL(0x44, 0x44, 0x0d, 0x44, 0x1a, 0x49, 0x88, 0x5e), 431 LL(0x7f, 0x7f, 0xe1, 0x7f, 0xdf, 0x9e, 0xfe, 0xa0), 432 LL(0xa9, 0xa9, 0x9e, 0xa9, 0x21, 0x37, 0x4f, 0x88), 433 LL(0x2a, 0x2a, 0xa8, 0x2a, 0x4d, 0x82, 0x54, 0x67), 434 LL(0xbb, 0xbb, 0xd6, 0xbb, 0xb1, 0x6d, 0x6b, 0x0a), 435 LL(0xc1, 0xc1, 0x23, 0xc1, 0x46, 0xe2, 0x9f, 0x87), 436 LL(0x53, 0x53, 0x51, 0x53, 0xa2, 0x02, 0xa6, 0xf1), 437 LL(0xdc, 0xdc, 0x57, 0xdc, 0xae, 0x8b, 0xa5, 0x72), 438 LL(0x0b, 0x0b, 0x2c, 0x0b, 0x58, 0x27, 0x16, 0x53), 439 LL(0x9d, 0x9d, 0x4e, 0x9d, 0x9c, 0xd3, 0x27, 0x01), 440 LL(0x6c, 0x6c, 0xad, 0x6c, 0x47, 0xc1, 0xd8, 0x2b), 441 LL(0x31, 0x31, 0xc4, 0x31, 0x95, 0xf5, 0x62, 0xa4), 442 LL(0x74, 0x74, 0xcd, 0x74, 0x87, 0xb9, 0xe8, 0xf3), 443 LL(0xf6, 0xf6, 0xff, 0xf6, 0xe3, 0x09, 0xf1, 0x15), 444 LL(0x46, 0x46, 0x05, 0x46, 0x0a, 0x43, 0x8c, 0x4c), 445 LL(0xac, 0xac, 0x8a, 0xac, 0x09, 0x26, 0x45, 0xa5), 446 LL(0x89, 0x89, 0x1e, 0x89, 0x3c, 0x97, 0x0f, 0xb5), 447 LL(0x14, 0x14, 0x50, 0x14, 0xa0, 0x44, 0x28, 0xb4), 448 LL(0xe1, 0xe1, 0xa3, 0xe1, 0x5b, 0x42, 0xdf, 0xba), 449 LL(0x16, 0x16, 0x58, 0x16, 0xb0, 0x4e, 0x2c, 0xa6), 450 LL(0x3a, 0x3a, 0xe8, 0x3a, 0xcd, 0xd2, 0x74, 0xf7), 451 LL(0x69, 0x69, 0xb9, 0x69, 0x6f, 0xd0, 0xd2, 0x06), 452 LL(0x09, 0x09, 0x24, 0x09, 0x48, 0x2d, 0x12, 0x41), 453 LL(0x70, 0x70, 0xdd, 0x70, 0xa7, 0xad, 0xe0, 0xd7), 454 LL(0xb6, 0xb6, 0xe2, 0xb6, 0xd9, 0x54, 0x71, 0x6f), 455 LL(0xd0, 0xd0, 0x67, 0xd0, 0xce, 0xb7, 0xbd, 0x1e), 456 LL(0xed, 0xed, 0x93, 0xed, 0x3b, 0x7e, 0xc7, 0xd6), 457 LL(0xcc, 0xcc, 0x17, 0xcc, 0x2e, 0xdb, 0x85, 0xe2), 458 LL(0x42, 0x42, 0x15, 0x42, 0x2a, 0x57, 0x84, 0x68), 459 LL(0x98, 0x98, 0x5a, 0x98, 0xb4, 0xc2, 0x2d, 0x2c), 460 LL(0xa4, 0xa4, 0xaa, 0xa4, 0x49, 0x0e, 0x55, 0xed), 461 LL(0x28, 0x28, 0xa0, 0x28, 0x5d, 0x88, 0x50, 0x75), 462 LL(0x5c, 0x5c, 0x6d, 0x5c, 0xda, 0x31, 0xb8, 0x86), 463 LL(0xf8, 0xf8, 0xc7, 0xf8, 0x93, 0x3f, 0xed, 0x6b), 464 LL(0x86, 0x86, 0x22, 0x86, 0x44, 0xa4, 0x11, 0xc2), 465 #define RC (&(Cx.q[256*N])) 466 0x18, 0x23, 0xc6, 0xe8, 0x87, 0xb8, 0x01, 0x4f, 467 /* rc[ROUNDS] */ 468 0x36, 0xa6, 0xd2, 0xf5, 0x79, 0x6f, 0x91, 0x52, 0x60, 0xbc, 0x9b, 469 0x8e, 0xa3, 0x0c, 0x7b, 0x35, 0x1d, 0xe0, 0xd7, 0xc2, 0x2e, 0x4b, 470 0xfe, 0x57, 0x15, 0x77, 0x37, 0xe5, 0x9f, 0xf0, 0x4a, 0xda, 0x58, 471 0xc9, 0x29, 0x0a, 0xb1, 0xa0, 0x6b, 0x85, 0xbd, 0x5d, 0x10, 0xf4, 472 0xcb, 0x3e, 0x05, 0x67, 0xe4, 0x27, 0x41, 0x8b, 0xa7, 0x7d, 0x95, 473 0xd8, 0xfb, 0xee, 0x7c, 0x66, 0xdd, 0x17, 0x47, 0x9e, 0xca, 0x2d, 474 0xbf, 0x07, 0xad, 0x5a, 0x83, 0x33 475 } 476 }; 477 478 void whirlpool_block(WHIRLPOOL_CTX *ctx, const void *inp, size_t n) 479 { 480 int r; 481 const u8 *p = inp; 482 union { 483 u64 q[8]; 484 u8 c[64]; 485 } S, K, *H = (void *)ctx->H.q; 486 487 #ifdef GO_FOR_MMX 488 GO_FOR_MMX(ctx, inp, n); 489 #endif 490 do { 491 #ifdef OPENSSL_SMALL_FOOTPRINT 492 u64 L[8]; 493 int i; 494 495 for (i = 0; i < 64; i++) 496 S.c[i] = (K.c[i] = H->c[i]) ^ p[i]; 497 for (r = 0; r < ROUNDS; r++) { 498 for (i = 0; i < 8; i++) { 499 L[i] = i ? 0 : RC[r]; 500 L[i] ^= C0(K, i) ^ C1(K, (i - 1) & 7) ^ 501 C2(K, (i - 2) & 7) ^ C3(K, (i - 3) & 7) ^ 502 C4(K, (i - 4) & 7) ^ C5(K, (i - 5) & 7) ^ 503 C6(K, (i - 6) & 7) ^ C7(K, (i - 7) & 7); 504 } 505 memcpy(K.q, L, 64); 506 for (i = 0; i < 8; i++) { 507 L[i] ^= C0(S, i) ^ C1(S, (i - 1) & 7) ^ 508 C2(S, (i - 2) & 7) ^ C3(S, (i - 3) & 7) ^ 509 C4(S, (i - 4) & 7) ^ C5(S, (i - 5) & 7) ^ 510 C6(S, (i - 6) & 7) ^ C7(S, (i - 7) & 7); 511 } 512 memcpy(S.q, L, 64); 513 } 514 for (i = 0; i < 64; i++) 515 H->c[i] ^= S.c[i] ^ p[i]; 516 #else 517 u64 L0, L1, L2, L3, L4, L5, L6, L7; 518 519 # ifdef STRICT_ALIGNMENT 520 if ((size_t)p & 7) { 521 memcpy(S.c, p, 64); 522 S.q[0] ^= (K.q[0] = H->q[0]); 523 S.q[1] ^= (K.q[1] = H->q[1]); 524 S.q[2] ^= (K.q[2] = H->q[2]); 525 S.q[3] ^= (K.q[3] = H->q[3]); 526 S.q[4] ^= (K.q[4] = H->q[4]); 527 S.q[5] ^= (K.q[5] = H->q[5]); 528 S.q[6] ^= (K.q[6] = H->q[6]); 529 S.q[7] ^= (K.q[7] = H->q[7]); 530 } else 531 # endif 532 { 533 const u64 *pa = (const u64 *)p; 534 S.q[0] = (K.q[0] = H->q[0]) ^ pa[0]; 535 S.q[1] = (K.q[1] = H->q[1]) ^ pa[1]; 536 S.q[2] = (K.q[2] = H->q[2]) ^ pa[2]; 537 S.q[3] = (K.q[3] = H->q[3]) ^ pa[3]; 538 S.q[4] = (K.q[4] = H->q[4]) ^ pa[4]; 539 S.q[5] = (K.q[5] = H->q[5]) ^ pa[5]; 540 S.q[6] = (K.q[6] = H->q[6]) ^ pa[6]; 541 S.q[7] = (K.q[7] = H->q[7]) ^ pa[7]; 542 } 543 544 for (r = 0; r < ROUNDS; r++) { 545 # ifdef SMALL_REGISTER_BANK 546 L0 = C0(K, 0) ^ C1(K, 7) ^ C2(K, 6) ^ C3(K, 5) ^ 547 C4(K, 4) ^ C5(K, 3) ^ C6(K, 2) ^ C7(K, 1) ^ RC[r]; 548 L1 = C0(K, 1) ^ C1(K, 0) ^ C2(K, 7) ^ C3(K, 6) ^ 549 C4(K, 5) ^ C5(K, 4) ^ C6(K, 3) ^ C7(K, 2); 550 L2 = C0(K, 2) ^ C1(K, 1) ^ C2(K, 0) ^ C3(K, 7) ^ 551 C4(K, 6) ^ C5(K, 5) ^ C6(K, 4) ^ C7(K, 3); 552 L3 = C0(K, 3) ^ C1(K, 2) ^ C2(K, 1) ^ C3(K, 0) ^ 553 C4(K, 7) ^ C5(K, 6) ^ C6(K, 5) ^ C7(K, 4); 554 L4 = C0(K, 4) ^ C1(K, 3) ^ C2(K, 2) ^ C3(K, 1) ^ 555 C4(K, 0) ^ C5(K, 7) ^ C6(K, 6) ^ C7(K, 5); 556 L5 = C0(K, 5) ^ C1(K, 4) ^ C2(K, 3) ^ C3(K, 2) ^ 557 C4(K, 1) ^ C5(K, 0) ^ C6(K, 7) ^ C7(K, 6); 558 L6 = C0(K, 6) ^ C1(K, 5) ^ C2(K, 4) ^ C3(K, 3) ^ 559 C4(K, 2) ^ C5(K, 1) ^ C6(K, 0) ^ C7(K, 7); 560 L7 = C0(K, 7) ^ C1(K, 6) ^ C2(K, 5) ^ C3(K, 4) ^ 561 C4(K, 3) ^ C5(K, 2) ^ C6(K, 1) ^ C7(K, 0); 562 563 K.q[0] = L0; 564 K.q[1] = L1; 565 K.q[2] = L2; 566 K.q[3] = L3; 567 K.q[4] = L4; 568 K.q[5] = L5; 569 K.q[6] = L6; 570 K.q[7] = L7; 571 572 L0 ^= C0(S, 0) ^ C1(S, 7) ^ C2(S, 6) ^ C3(S, 5) ^ 573 C4(S, 4) ^ C5(S, 3) ^ C6(S, 2) ^ C7(S, 1); 574 L1 ^= C0(S, 1) ^ C1(S, 0) ^ C2(S, 7) ^ C3(S, 6) ^ 575 C4(S, 5) ^ C5(S, 4) ^ C6(S, 3) ^ C7(S, 2); 576 L2 ^= C0(S, 2) ^ C1(S, 1) ^ C2(S, 0) ^ C3(S, 7) ^ 577 C4(S, 6) ^ C5(S, 5) ^ C6(S, 4) ^ C7(S, 3); 578 L3 ^= C0(S, 3) ^ C1(S, 2) ^ C2(S, 1) ^ C3(S, 0) ^ 579 C4(S, 7) ^ C5(S, 6) ^ C6(S, 5) ^ C7(S, 4); 580 L4 ^= C0(S, 4) ^ C1(S, 3) ^ C2(S, 2) ^ C3(S, 1) ^ 581 C4(S, 0) ^ C5(S, 7) ^ C6(S, 6) ^ C7(S, 5); 582 L5 ^= C0(S, 5) ^ C1(S, 4) ^ C2(S, 3) ^ C3(S, 2) ^ 583 C4(S, 1) ^ C5(S, 0) ^ C6(S, 7) ^ C7(S, 6); 584 L6 ^= C0(S, 6) ^ C1(S, 5) ^ C2(S, 4) ^ C3(S, 3) ^ 585 C4(S, 2) ^ C5(S, 1) ^ C6(S, 0) ^ C7(S, 7); 586 L7 ^= C0(S, 7) ^ C1(S, 6) ^ C2(S, 5) ^ C3(S, 4) ^ 587 C4(S, 3) ^ C5(S, 2) ^ C6(S, 1) ^ C7(S, 0); 588 589 S.q[0] = L0; 590 S.q[1] = L1; 591 S.q[2] = L2; 592 S.q[3] = L3; 593 S.q[4] = L4; 594 S.q[5] = L5; 595 S.q[6] = L6; 596 S.q[7] = L7; 597 # else 598 L0 = C0(K, 0); 599 L1 = C1(K, 0); 600 L2 = C2(K, 0); 601 L3 = C3(K, 0); 602 L4 = C4(K, 0); 603 L5 = C5(K, 0); 604 L6 = C6(K, 0); 605 L7 = C7(K, 0); 606 L0 ^= RC[r]; 607 608 L1 ^= C0(K, 1); 609 L2 ^= C1(K, 1); 610 L3 ^= C2(K, 1); 611 L4 ^= C3(K, 1); 612 L5 ^= C4(K, 1); 613 L6 ^= C5(K, 1); 614 L7 ^= C6(K, 1); 615 L0 ^= C7(K, 1); 616 617 L2 ^= C0(K, 2); 618 L3 ^= C1(K, 2); 619 L4 ^= C2(K, 2); 620 L5 ^= C3(K, 2); 621 L6 ^= C4(K, 2); 622 L7 ^= C5(K, 2); 623 L0 ^= C6(K, 2); 624 L1 ^= C7(K, 2); 625 626 L3 ^= C0(K, 3); 627 L4 ^= C1(K, 3); 628 L5 ^= C2(K, 3); 629 L6 ^= C3(K, 3); 630 L7 ^= C4(K, 3); 631 L0 ^= C5(K, 3); 632 L1 ^= C6(K, 3); 633 L2 ^= C7(K, 3); 634 635 L4 ^= C0(K, 4); 636 L5 ^= C1(K, 4); 637 L6 ^= C2(K, 4); 638 L7 ^= C3(K, 4); 639 L0 ^= C4(K, 4); 640 L1 ^= C5(K, 4); 641 L2 ^= C6(K, 4); 642 L3 ^= C7(K, 4); 643 644 L5 ^= C0(K, 5); 645 L6 ^= C1(K, 5); 646 L7 ^= C2(K, 5); 647 L0 ^= C3(K, 5); 648 L1 ^= C4(K, 5); 649 L2 ^= C5(K, 5); 650 L3 ^= C6(K, 5); 651 L4 ^= C7(K, 5); 652 653 L6 ^= C0(K, 6); 654 L7 ^= C1(K, 6); 655 L0 ^= C2(K, 6); 656 L1 ^= C3(K, 6); 657 L2 ^= C4(K, 6); 658 L3 ^= C5(K, 6); 659 L4 ^= C6(K, 6); 660 L5 ^= C7(K, 6); 661 662 L7 ^= C0(K, 7); 663 L0 ^= C1(K, 7); 664 L1 ^= C2(K, 7); 665 L2 ^= C3(K, 7); 666 L3 ^= C4(K, 7); 667 L4 ^= C5(K, 7); 668 L5 ^= C6(K, 7); 669 L6 ^= C7(K, 7); 670 671 K.q[0] = L0; 672 K.q[1] = L1; 673 K.q[2] = L2; 674 K.q[3] = L3; 675 K.q[4] = L4; 676 K.q[5] = L5; 677 K.q[6] = L6; 678 K.q[7] = L7; 679 680 L0 ^= C0(S, 0); 681 L1 ^= C1(S, 0); 682 L2 ^= C2(S, 0); 683 L3 ^= C3(S, 0); 684 L4 ^= C4(S, 0); 685 L5 ^= C5(S, 0); 686 L6 ^= C6(S, 0); 687 L7 ^= C7(S, 0); 688 689 L1 ^= C0(S, 1); 690 L2 ^= C1(S, 1); 691 L3 ^= C2(S, 1); 692 L4 ^= C3(S, 1); 693 L5 ^= C4(S, 1); 694 L6 ^= C5(S, 1); 695 L7 ^= C6(S, 1); 696 L0 ^= C7(S, 1); 697 698 L2 ^= C0(S, 2); 699 L3 ^= C1(S, 2); 700 L4 ^= C2(S, 2); 701 L5 ^= C3(S, 2); 702 L6 ^= C4(S, 2); 703 L7 ^= C5(S, 2); 704 L0 ^= C6(S, 2); 705 L1 ^= C7(S, 2); 706 707 L3 ^= C0(S, 3); 708 L4 ^= C1(S, 3); 709 L5 ^= C2(S, 3); 710 L6 ^= C3(S, 3); 711 L7 ^= C4(S, 3); 712 L0 ^= C5(S, 3); 713 L1 ^= C6(S, 3); 714 L2 ^= C7(S, 3); 715 716 L4 ^= C0(S, 4); 717 L5 ^= C1(S, 4); 718 L6 ^= C2(S, 4); 719 L7 ^= C3(S, 4); 720 L0 ^= C4(S, 4); 721 L1 ^= C5(S, 4); 722 L2 ^= C6(S, 4); 723 L3 ^= C7(S, 4); 724 725 L5 ^= C0(S, 5); 726 L6 ^= C1(S, 5); 727 L7 ^= C2(S, 5); 728 L0 ^= C3(S, 5); 729 L1 ^= C4(S, 5); 730 L2 ^= C5(S, 5); 731 L3 ^= C6(S, 5); 732 L4 ^= C7(S, 5); 733 734 L6 ^= C0(S, 6); 735 L7 ^= C1(S, 6); 736 L0 ^= C2(S, 6); 737 L1 ^= C3(S, 6); 738 L2 ^= C4(S, 6); 739 L3 ^= C5(S, 6); 740 L4 ^= C6(S, 6); 741 L5 ^= C7(S, 6); 742 743 L7 ^= C0(S, 7); 744 L0 ^= C1(S, 7); 745 L1 ^= C2(S, 7); 746 L2 ^= C3(S, 7); 747 L3 ^= C4(S, 7); 748 L4 ^= C5(S, 7); 749 L5 ^= C6(S, 7); 750 L6 ^= C7(S, 7); 751 752 S.q[0] = L0; 753 S.q[1] = L1; 754 S.q[2] = L2; 755 S.q[3] = L3; 756 S.q[4] = L4; 757 S.q[5] = L5; 758 S.q[6] = L6; 759 S.q[7] = L7; 760 # endif 761 } 762 763 # ifdef STRICT_ALIGNMENT 764 if ((size_t)p & 7) { 765 int i; 766 for (i = 0; i < 64; i++) 767 H->c[i] ^= S.c[i] ^ p[i]; 768 } else 769 # endif 770 { 771 const u64 *pa = (const u64 *)p; 772 H->q[0] ^= S.q[0] ^ pa[0]; 773 H->q[1] ^= S.q[1] ^ pa[1]; 774 H->q[2] ^= S.q[2] ^ pa[2]; 775 H->q[3] ^= S.q[3] ^ pa[3]; 776 H->q[4] ^= S.q[4] ^ pa[4]; 777 H->q[5] ^= S.q[5] ^ pa[5]; 778 H->q[6] ^= S.q[6] ^ pa[6]; 779 H->q[7] ^= S.q[7] ^ pa[7]; 780 } 781 #endif 782 p += 64; 783 } while (--n); 784 } 785