xref: /qemu/crypto/clmul.c (revision 00f463b3)
107f348d7SRichard Henderson /*
207f348d7SRichard Henderson  * Carry-less multiply operations.
307f348d7SRichard Henderson  * SPDX-License-Identifier: GPL-2.0-or-later
407f348d7SRichard Henderson  *
507f348d7SRichard Henderson  * Copyright (C) 2023 Linaro, Ltd.
607f348d7SRichard Henderson  */
707f348d7SRichard Henderson 
807f348d7SRichard Henderson #include "qemu/osdep.h"
907f348d7SRichard Henderson #include "crypto/clmul.h"
1007f348d7SRichard Henderson 
clmul_8x8_low(uint64_t n,uint64_t m)1107f348d7SRichard Henderson uint64_t clmul_8x8_low(uint64_t n, uint64_t m)
1207f348d7SRichard Henderson {
1307f348d7SRichard Henderson     uint64_t r = 0;
1407f348d7SRichard Henderson 
1507f348d7SRichard Henderson     for (int i = 0; i < 8; ++i) {
1607f348d7SRichard Henderson         uint64_t mask = (n & 0x0101010101010101ull) * 0xff;
1707f348d7SRichard Henderson         r ^= m & mask;
1807f348d7SRichard Henderson         m = (m << 1) & 0xfefefefefefefefeull;
1907f348d7SRichard Henderson         n >>= 1;
2007f348d7SRichard Henderson     }
2107f348d7SRichard Henderson     return r;
2207f348d7SRichard Henderson }
2307f348d7SRichard Henderson 
clmul_8x4_even_int(uint64_t n,uint64_t m)2407f348d7SRichard Henderson static uint64_t clmul_8x4_even_int(uint64_t n, uint64_t m)
2507f348d7SRichard Henderson {
2607f348d7SRichard Henderson     uint64_t r = 0;
2707f348d7SRichard Henderson 
2807f348d7SRichard Henderson     for (int i = 0; i < 8; ++i) {
2907f348d7SRichard Henderson         uint64_t mask = (n & 0x0001000100010001ull) * 0xffff;
3007f348d7SRichard Henderson         r ^= m & mask;
3107f348d7SRichard Henderson         n >>= 1;
3207f348d7SRichard Henderson         m <<= 1;
3307f348d7SRichard Henderson     }
3407f348d7SRichard Henderson     return r;
3507f348d7SRichard Henderson }
3607f348d7SRichard Henderson 
clmul_8x4_even(uint64_t n,uint64_t m)3707f348d7SRichard Henderson uint64_t clmul_8x4_even(uint64_t n, uint64_t m)
3807f348d7SRichard Henderson {
3907f348d7SRichard Henderson     n &= 0x00ff00ff00ff00ffull;
4007f348d7SRichard Henderson     m &= 0x00ff00ff00ff00ffull;
4107f348d7SRichard Henderson     return clmul_8x4_even_int(n, m);
4207f348d7SRichard Henderson }
4307f348d7SRichard Henderson 
clmul_8x4_odd(uint64_t n,uint64_t m)4407f348d7SRichard Henderson uint64_t clmul_8x4_odd(uint64_t n, uint64_t m)
4507f348d7SRichard Henderson {
4607f348d7SRichard Henderson     return clmul_8x4_even(n >> 8, m >> 8);
4707f348d7SRichard Henderson }
4807f348d7SRichard Henderson 
unpack_8_to_16(uint64_t x)4907f348d7SRichard Henderson static uint64_t unpack_8_to_16(uint64_t x)
5007f348d7SRichard Henderson {
5107f348d7SRichard Henderson     return  (x & 0x000000ff)
5207f348d7SRichard Henderson          | ((x & 0x0000ff00) << 8)
5307f348d7SRichard Henderson          | ((x & 0x00ff0000) << 16)
5407f348d7SRichard Henderson          | ((x & 0xff000000) << 24);
5507f348d7SRichard Henderson }
5607f348d7SRichard Henderson 
clmul_8x4_packed(uint32_t n,uint32_t m)5707f348d7SRichard Henderson uint64_t clmul_8x4_packed(uint32_t n, uint32_t m)
5807f348d7SRichard Henderson {
5907f348d7SRichard Henderson     return clmul_8x4_even_int(unpack_8_to_16(n), unpack_8_to_16(m));
6007f348d7SRichard Henderson }
61cf1b2cabSRichard Henderson 
clmul_16x2_even(uint64_t n,uint64_t m)62cf1b2cabSRichard Henderson uint64_t clmul_16x2_even(uint64_t n, uint64_t m)
63cf1b2cabSRichard Henderson {
64cf1b2cabSRichard Henderson     uint64_t r = 0;
65cf1b2cabSRichard Henderson 
66cf1b2cabSRichard Henderson     n &= 0x0000ffff0000ffffull;
67cf1b2cabSRichard Henderson     m &= 0x0000ffff0000ffffull;
68cf1b2cabSRichard Henderson 
69cf1b2cabSRichard Henderson     for (int i = 0; i < 16; ++i) {
70cf1b2cabSRichard Henderson         uint64_t mask = (n & 0x0000000100000001ull) * 0xffffffffull;
71cf1b2cabSRichard Henderson         r ^= m & mask;
72cf1b2cabSRichard Henderson         n >>= 1;
73cf1b2cabSRichard Henderson         m <<= 1;
74cf1b2cabSRichard Henderson     }
75cf1b2cabSRichard Henderson     return r;
76cf1b2cabSRichard Henderson }
77cf1b2cabSRichard Henderson 
clmul_16x2_odd(uint64_t n,uint64_t m)78cf1b2cabSRichard Henderson uint64_t clmul_16x2_odd(uint64_t n, uint64_t m)
79cf1b2cabSRichard Henderson {
80cf1b2cabSRichard Henderson     return clmul_16x2_even(n >> 16, m >> 16);
81cf1b2cabSRichard Henderson }
829a65a570SRichard Henderson 
clmul_32(uint32_t n,uint32_t m32)839a65a570SRichard Henderson uint64_t clmul_32(uint32_t n, uint32_t m32)
849a65a570SRichard Henderson {
859a65a570SRichard Henderson     uint64_t r = 0;
869a65a570SRichard Henderson     uint64_t m = m32;
879a65a570SRichard Henderson 
889a65a570SRichard Henderson     for (int i = 0; i < 32; ++i) {
899a65a570SRichard Henderson         r ^= n & 1 ? m : 0;
909a65a570SRichard Henderson         n >>= 1;
919a65a570SRichard Henderson         m <<= 1;
929a65a570SRichard Henderson     }
939a65a570SRichard Henderson     return r;
949a65a570SRichard Henderson }
95*00f463b3SRichard Henderson 
clmul_64_gen(uint64_t n,uint64_t m)96*00f463b3SRichard Henderson Int128 clmul_64_gen(uint64_t n, uint64_t m)
97*00f463b3SRichard Henderson {
98*00f463b3SRichard Henderson     uint64_t rl = 0, rh = 0;
99*00f463b3SRichard Henderson 
100*00f463b3SRichard Henderson     /* Bit 0 can only influence the low 64-bit result.  */
101*00f463b3SRichard Henderson     if (n & 1) {
102*00f463b3SRichard Henderson         rl = m;
103*00f463b3SRichard Henderson     }
104*00f463b3SRichard Henderson 
105*00f463b3SRichard Henderson     for (int i = 1; i < 64; ++i) {
106*00f463b3SRichard Henderson         uint64_t mask = -((n >> i) & 1);
107*00f463b3SRichard Henderson         rl ^= (m << i) & mask;
108*00f463b3SRichard Henderson         rh ^= (m >> (64 - i)) & mask;
109*00f463b3SRichard Henderson     }
110*00f463b3SRichard Henderson     return int128_make128(rl, rh);
111*00f463b3SRichard Henderson }
112