xref: /qemu/crypto/xts.c (revision 6f0dd6c5)
1 /*
2  * QEMU Crypto XTS cipher mode
3  *
4  * Copyright (c) 2015-2016 Red Hat, Inc.
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  *
19  * This code is originally derived from public domain / WTFPL code in
20  * LibTomCrypt crytographic library http://libtom.org. The XTS code
21  * was donated by Elliptic Semiconductor Inc (www.ellipticsemi.com)
22  * to the LibTom Projects
23  *
24  */
25 
26 #include "qemu/osdep.h"
27 #include "qemu/bswap.h"
28 #include "crypto/xts.h"
29 
30 typedef union {
31     uint8_t b[XTS_BLOCK_SIZE];
32     uint64_t u[2];
33 } xts_uint128;
34 
35 static inline void xts_uint128_xor(xts_uint128 *D,
36                                    const xts_uint128 *S1,
37                                    const xts_uint128 *S2)
38 {
39     D->u[0] = S1->u[0] ^ S2->u[0];
40     D->u[1] = S1->u[1] ^ S2->u[1];
41 }
42 
43 static inline void xts_uint128_cpu_to_les(xts_uint128 *v)
44 {
45     cpu_to_le64s(&v->u[0]);
46     cpu_to_le64s(&v->u[1]);
47 }
48 
49 static inline void xts_uint128_le_to_cpus(xts_uint128 *v)
50 {
51     le64_to_cpus(&v->u[0]);
52     le64_to_cpus(&v->u[1]);
53 }
54 
55 static void xts_mult_x(xts_uint128 *I)
56 {
57     uint64_t tt;
58 
59     xts_uint128_le_to_cpus(I);
60 
61     tt = I->u[0] >> 63;
62     I->u[0] <<= 1;
63 
64     if (I->u[1] >> 63) {
65         I->u[0] ^= 0x87;
66     }
67     I->u[1] <<= 1;
68     I->u[1] |= tt;
69 
70     xts_uint128_cpu_to_les(I);
71 }
72 
73 
74 /**
75  * xts_tweak_encdec:
76  * @param ctxt: the cipher context
77  * @param func: the cipher function
78  * @src: buffer providing the input text of XTS_BLOCK_SIZE bytes
79  * @dst: buffer to output the output text of XTS_BLOCK_SIZE bytes
80  * @iv: the initialization vector tweak of XTS_BLOCK_SIZE bytes
81  *
82  * Encrypt/decrypt data with a tweak
83  */
84 static inline void xts_tweak_encdec(const void *ctx,
85                                     xts_cipher_func *func,
86                                     const xts_uint128 *src,
87                                     xts_uint128 *dst,
88                                     xts_uint128 *iv)
89 {
90     /* tweak encrypt block i */
91     xts_uint128_xor(dst, src, iv);
92 
93     func(ctx, XTS_BLOCK_SIZE, dst->b, dst->b);
94 
95     xts_uint128_xor(dst, dst, iv);
96 
97     /* LFSR the tweak */
98     xts_mult_x(iv);
99 }
100 
101 
102 void xts_decrypt(const void *datactx,
103                  const void *tweakctx,
104                  xts_cipher_func *encfunc,
105                  xts_cipher_func *decfunc,
106                  uint8_t *iv,
107                  size_t length,
108                  uint8_t *dst,
109                  const uint8_t *src)
110 {
111     xts_uint128 PP, CC, T;
112     unsigned long i, m, mo, lim;
113 
114     /* get number of blocks */
115     m = length >> 4;
116     mo = length & 15;
117 
118     /* must have at least one full block */
119     g_assert(m != 0);
120 
121     if (mo == 0) {
122         lim = m;
123     } else {
124         lim = m - 1;
125     }
126 
127     /* encrypt the iv */
128     encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
129 
130     if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
131         QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
132         xts_uint128 *S = (xts_uint128 *)src;
133         xts_uint128 *D = (xts_uint128 *)dst;
134         for (i = 0; i < lim; i++, S++, D++) {
135             xts_tweak_encdec(datactx, decfunc, S, D, &T);
136         }
137     } else {
138         xts_uint128 D;
139 
140         for (i = 0; i < lim; i++) {
141             memcpy(&D, src, XTS_BLOCK_SIZE);
142             xts_tweak_encdec(datactx, decfunc, &D, &D, &T);
143             memcpy(dst, &D, XTS_BLOCK_SIZE);
144             src += XTS_BLOCK_SIZE;
145             dst += XTS_BLOCK_SIZE;
146         }
147     }
148 
149     /* if length is not a multiple of XTS_BLOCK_SIZE then */
150     if (mo > 0) {
151         xts_uint128 S, D;
152         memcpy(&CC, &T, XTS_BLOCK_SIZE);
153         xts_mult_x(&CC);
154 
155         /* PP = tweak decrypt block m-1 */
156         memcpy(&S, src, XTS_BLOCK_SIZE);
157         xts_tweak_encdec(datactx, decfunc, &S, &PP, &CC);
158 
159         /* Pm = first length % XTS_BLOCK_SIZE bytes of PP */
160         for (i = 0; i < mo; i++) {
161             CC.b[i] = src[XTS_BLOCK_SIZE + i];
162             dst[XTS_BLOCK_SIZE + i] = PP.b[i];
163         }
164         for (; i < XTS_BLOCK_SIZE; i++) {
165             CC.b[i] = PP.b[i];
166         }
167 
168         /* Pm-1 = Tweak uncrypt CC */
169         xts_tweak_encdec(datactx, decfunc, &CC, &D, &T);
170         memcpy(dst, &D, XTS_BLOCK_SIZE);
171     }
172 
173     /* Decrypt the iv back */
174     decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);
175 }
176 
177 
178 void xts_encrypt(const void *datactx,
179                  const void *tweakctx,
180                  xts_cipher_func *encfunc,
181                  xts_cipher_func *decfunc,
182                  uint8_t *iv,
183                  size_t length,
184                  uint8_t *dst,
185                  const uint8_t *src)
186 {
187     xts_uint128 PP, CC, T;
188     unsigned long i, m, mo, lim;
189 
190     /* get number of blocks */
191     m = length >> 4;
192     mo = length & 15;
193 
194     /* must have at least one full block */
195     g_assert(m != 0);
196 
197     if (mo == 0) {
198         lim = m;
199     } else {
200         lim = m - 1;
201     }
202 
203     /* encrypt the iv */
204     encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
205 
206     if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
207         QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
208         xts_uint128 *S = (xts_uint128 *)src;
209         xts_uint128 *D = (xts_uint128 *)dst;
210         for (i = 0; i < lim; i++, S++, D++) {
211             xts_tweak_encdec(datactx, encfunc, S, D, &T);
212         }
213     } else {
214         xts_uint128 D;
215 
216         for (i = 0; i < lim; i++) {
217             memcpy(&D, src, XTS_BLOCK_SIZE);
218             xts_tweak_encdec(datactx, encfunc, &D, &D, &T);
219             memcpy(dst, &D, XTS_BLOCK_SIZE);
220 
221             dst += XTS_BLOCK_SIZE;
222             src += XTS_BLOCK_SIZE;
223         }
224     }
225 
226     /* if length is not a multiple of XTS_BLOCK_SIZE then */
227     if (mo > 0) {
228         xts_uint128 S, D;
229         /* CC = tweak encrypt block m-1 */
230         memcpy(&S, src, XTS_BLOCK_SIZE);
231         xts_tweak_encdec(datactx, encfunc, &S, &CC, &T);
232 
233         /* Cm = first length % XTS_BLOCK_SIZE bytes of CC */
234         for (i = 0; i < mo; i++) {
235             PP.b[i] = src[XTS_BLOCK_SIZE + i];
236             dst[XTS_BLOCK_SIZE + i] = CC.b[i];
237         }
238 
239         for (; i < XTS_BLOCK_SIZE; i++) {
240             PP.b[i] = CC.b[i];
241         }
242 
243         /* Cm-1 = Tweak encrypt PP */
244         xts_tweak_encdec(datactx, encfunc, &PP, &D, &T);
245         memcpy(dst, &D, XTS_BLOCK_SIZE);
246     }
247 
248     /* Decrypt the iv back */
249     decfunc(tweakctx, XTS_BLOCK_SIZE, iv, T.b);
250 }
251