1 /*-
2 * Copyright (c) 2010 Konstantin Belousov <kib@FreeBSD.org>
3 * Copyright (c) 2010 Pawel Jakub Dawidek <pjd@FreeBSD.org>
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
16 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
19 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
20 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
21 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
22 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
23 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
24 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
25 * SUCH DAMAGE.
26 *
27 * $FreeBSD: src/sys/crypto/aesni/aesni_wrap.c,v 1.7 2010/11/27 15:41:44 kib Exp $
28 */
29
30 #include <sys/param.h>
31 #include <sys/libkern.h>
32 #include <sys/malloc.h>
33 #include <sys/proc.h>
34 #include <sys/systm.h>
35 #include <dev/crypto/aesni/aesni.h>
36
37 MALLOC_DECLARE(M_AESNI);
38
39 void
aesni_encrypt_cbc(int rounds,const void * key_schedule,size_t len,const uint8_t * from,uint8_t * to,const uint8_t iv[AES_BLOCK_LEN])40 aesni_encrypt_cbc(int rounds, const void *key_schedule, size_t len,
41 const uint8_t *from, uint8_t *to, const uint8_t iv[AES_BLOCK_LEN])
42 {
43 const uint8_t *ivp;
44 size_t i;
45
46 len /= AES_BLOCK_LEN;
47 ivp = iv;
48 for (i = 0; i < len; i++) {
49 aesni_enc(rounds - 1, key_schedule, from, to, ivp);
50 ivp = to;
51 from += AES_BLOCK_LEN;
52 to += AES_BLOCK_LEN;
53 }
54 }
55
56 void
aesni_encrypt_ecb(int rounds,const void * key_schedule,size_t len,const uint8_t from[AES_BLOCK_LEN],uint8_t to[AES_BLOCK_LEN])57 aesni_encrypt_ecb(int rounds, const void *key_schedule, size_t len,
58 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
59 {
60 size_t i;
61
62 len /= AES_BLOCK_LEN;
63 for (i = 0; i < len; i++) {
64 aesni_enc(rounds - 1, key_schedule, from, to, NULL);
65 from += AES_BLOCK_LEN;
66 to += AES_BLOCK_LEN;
67 }
68 }
69
70 void
aesni_decrypt_ecb(int rounds,const void * key_schedule,size_t len,const uint8_t from[AES_BLOCK_LEN],uint8_t to[AES_BLOCK_LEN])71 aesni_decrypt_ecb(int rounds, const void *key_schedule, size_t len,
72 const uint8_t from[AES_BLOCK_LEN], uint8_t to[AES_BLOCK_LEN])
73 {
74 size_t i;
75
76 len /= AES_BLOCK_LEN;
77 for (i = 0; i < len; i++) {
78 aesni_dec(rounds - 1, key_schedule, from, to, NULL);
79 from += AES_BLOCK_LEN;
80 to += AES_BLOCK_LEN;
81 }
82 }
83
84 #define AES_XTS_BLOCKSIZE 16
85 #define AES_XTS_IVSIZE 8
86 #define AES_XTS_ALPHA 0x87 /* GF(2^128) generator polynomial */
87
88 static void
aesni_crypt_xts_block(int rounds,const void * key_schedule,uint8_t * tweak,const uint8_t * from,uint8_t * to,int do_encrypt)89 aesni_crypt_xts_block(int rounds, const void *key_schedule, uint8_t *tweak,
90 const uint8_t *from, uint8_t *to, int do_encrypt)
91 {
92 uint8_t block[AES_XTS_BLOCKSIZE];
93 u_int i, carry_in, carry_out;
94
95 for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
96 block[i] = from[i] ^ tweak[i];
97
98 if (do_encrypt)
99 aesni_enc(rounds - 1, key_schedule, block, to, NULL);
100 else
101 aesni_dec(rounds - 1, key_schedule, block, to, NULL);
102
103 for (i = 0; i < AES_XTS_BLOCKSIZE; i++)
104 to[i] ^= tweak[i];
105
106 /* Exponentiate tweak. */
107 carry_in = 0;
108 for (i = 0; i < AES_XTS_BLOCKSIZE; i++) {
109 carry_out = tweak[i] & 0x80;
110 tweak[i] = (tweak[i] << 1) | (carry_in ? 1 : 0);
111 carry_in = carry_out;
112 }
113 if (carry_in)
114 tweak[0] ^= AES_XTS_ALPHA;
115 bzero(block, sizeof(block));
116 }
117
118 static void
aesni_crypt_xts(int rounds,const void * data_schedule,const void * tweak_schedule,size_t len,const uint8_t * from,uint8_t * to,const uint8_t iv[AES_BLOCK_LEN],int do_encrypt)119 aesni_crypt_xts(int rounds, const void *data_schedule,
120 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
121 const uint8_t iv[AES_BLOCK_LEN], int do_encrypt)
122 {
123 uint8_t tweak[AES_XTS_BLOCKSIZE];
124 uint64_t blocknum;
125 size_t i;
126
127 /*
128 * Prepare tweak as E_k2(IV). IV is specified as LE representation
129 * of a 64-bit block number which we allow to be passed in directly.
130 */
131 bcopy(iv, &blocknum, AES_XTS_IVSIZE);
132 for (i = 0; i < AES_XTS_IVSIZE; i++) {
133 tweak[i] = blocknum & 0xff;
134 blocknum >>= 8;
135 }
136 /* Last 64 bits of IV are always zero. */
137 bzero(tweak + AES_XTS_IVSIZE, AES_XTS_IVSIZE);
138 aesni_enc(rounds - 1, tweak_schedule, tweak, tweak, NULL);
139
140 len /= AES_XTS_BLOCKSIZE;
141 for (i = 0; i < len; i++) {
142 aesni_crypt_xts_block(rounds, data_schedule, tweak, from, to,
143 do_encrypt);
144 from += AES_XTS_BLOCKSIZE;
145 to += AES_XTS_BLOCKSIZE;
146 }
147
148 bzero(tweak, sizeof(tweak));
149 }
150
151 static void
aesni_encrypt_xts(int rounds,const void * data_schedule,const void * tweak_schedule,size_t len,const uint8_t * from,uint8_t * to,const uint8_t iv[AES_BLOCK_LEN])152 aesni_encrypt_xts(int rounds, const void *data_schedule,
153 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
154 const uint8_t iv[AES_BLOCK_LEN])
155 {
156
157 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
158 iv, 1);
159 }
160
161 static void
aesni_decrypt_xts(int rounds,const void * data_schedule,const void * tweak_schedule,size_t len,const uint8_t * from,uint8_t * to,const uint8_t iv[AES_BLOCK_LEN])162 aesni_decrypt_xts(int rounds, const void *data_schedule,
163 const void *tweak_schedule, size_t len, const uint8_t *from, uint8_t *to,
164 const uint8_t iv[AES_BLOCK_LEN])
165 {
166
167 aesni_crypt_xts(rounds, data_schedule, tweak_schedule, len, from, to,
168 iv, 0);
169 }
170
171 static int
aesni_cipher_setup_common(struct aesni_session * ses,const uint8_t * key,int keylen)172 aesni_cipher_setup_common(struct aesni_session *ses, const uint8_t *key,
173 int keylen)
174 {
175
176 switch (ses->algo) {
177 case CRYPTO_AES_CBC:
178 switch (keylen) {
179 case 128:
180 ses->rounds = AES128_ROUNDS;
181 break;
182 case 192:
183 ses->rounds = AES192_ROUNDS;
184 break;
185 case 256:
186 ses->rounds = AES256_ROUNDS;
187 break;
188 default:
189 return (EINVAL);
190 }
191 break;
192 case CRYPTO_AES_XTS:
193 switch (keylen) {
194 case 256:
195 ses->rounds = AES128_ROUNDS;
196 break;
197 case 512:
198 ses->rounds = AES256_ROUNDS;
199 break;
200 default:
201 return (EINVAL);
202 }
203 break;
204 default:
205 return (EINVAL);
206 }
207
208 aesni_set_enckey(key, ses->enc_schedule, ses->rounds);
209 aesni_set_deckey(ses->enc_schedule, ses->dec_schedule, ses->rounds);
210 if (ses->algo == CRYPTO_AES_CBC)
211 karc4random_buf(ses->iv, sizeof(ses->iv));
212 else /* if (ses->algo == CRYPTO_AES_XTS) */ {
213 aesni_set_enckey(key + keylen / 16, ses->xts_schedule,
214 ses->rounds);
215 }
216
217 return (0);
218 }
219
220 int
aesni_cipher_setup(struct aesni_session * ses,struct cryptoini * encini)221 aesni_cipher_setup(struct aesni_session *ses, struct cryptoini *encini)
222 {
223 int error = 0;
224 #if 0
225 struct thread *td;
226 int saved_ctx;
227 #endif
228
229 #if 0
230 td = curthread;
231 if (!is_fpu_kern_thread(0)) {
232 error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
233 saved_ctx = 1;
234 } else {
235 error = 0;
236 saved_ctx = 0;
237 }
238 #endif
239 if (error == 0) {
240 error = aesni_cipher_setup_common(ses, encini->cri_key,
241 encini->cri_klen);
242 #if 0
243 if (saved_ctx)
244 fpu_kern_leave(td, &ses->fpu_ctx);
245 #endif
246 }
247 return (error);
248 }
249
250 int
aesni_cipher_process(struct aesni_session * ses,struct cryptodesc * enccrd,struct cryptop * crp)251 aesni_cipher_process(struct aesni_session *ses, struct cryptodesc *enccrd,
252 struct cryptop *crp)
253 {
254 uint8_t *buf;
255 int error = 0, allocated;
256 #if 0
257 struct thread *td;
258 int saved_ctx;
259 #endif
260
261 buf = aesni_cipher_alloc(enccrd, crp, &allocated);
262 if (buf == NULL)
263 return (ENOMEM);
264
265 #if 0
266 td = curthread;
267 if (!is_fpu_kern_thread(0)) {
268 error = fpu_kern_enter(td, &ses->fpu_ctx, FPU_KERN_NORMAL);
269 if (error != 0)
270 goto out;
271 saved_ctx = 1;
272 } else {
273 saved_ctx = 0;
274 error = 0;
275 }
276 #endif
277
278 if ((enccrd->crd_flags & CRD_F_KEY_EXPLICIT) != 0) {
279 error = aesni_cipher_setup_common(ses, enccrd->crd_key,
280 enccrd->crd_klen);
281 if (error != 0)
282 goto out;
283 }
284
285 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0) {
286 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
287 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
288 if ((enccrd->crd_flags & CRD_F_IV_PRESENT) == 0)
289 crypto_copyback(crp->crp_flags, crp->crp_buf,
290 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
291 if (ses->algo == CRYPTO_AES_CBC) {
292 aesni_encrypt_cbc(ses->rounds, ses->enc_schedule,
293 enccrd->crd_len, buf, buf, ses->iv);
294 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
295 aesni_encrypt_xts(ses->rounds, ses->enc_schedule,
296 ses->xts_schedule, enccrd->crd_len, buf, buf,
297 ses->iv);
298 }
299 } else {
300 if ((enccrd->crd_flags & CRD_F_IV_EXPLICIT) != 0)
301 bcopy(enccrd->crd_iv, ses->iv, AES_BLOCK_LEN);
302 else
303 crypto_copydata(crp->crp_flags, crp->crp_buf,
304 enccrd->crd_inject, AES_BLOCK_LEN, ses->iv);
305 if (ses->algo == CRYPTO_AES_CBC) {
306 aesni_decrypt_cbc(ses->rounds, ses->dec_schedule,
307 enccrd->crd_len, buf, ses->iv);
308 } else /* if (ses->algo == CRYPTO_AES_XTS) */ {
309 aesni_decrypt_xts(ses->rounds, ses->dec_schedule,
310 ses->xts_schedule, enccrd->crd_len, buf, buf,
311 ses->iv);
312 }
313 }
314 #if 0
315 if (saved_ctx)
316 fpu_kern_leave(td, &ses->fpu_ctx);
317 #endif
318 if (allocated)
319 crypto_copyback(crp->crp_flags, crp->crp_buf, enccrd->crd_skip,
320 enccrd->crd_len, buf);
321 if ((enccrd->crd_flags & CRD_F_ENCRYPT) != 0)
322 crypto_copydata(crp->crp_flags, crp->crp_buf,
323 enccrd->crd_skip + enccrd->crd_len - AES_BLOCK_LEN,
324 AES_BLOCK_LEN, ses->iv);
325 out:
326 if (allocated) {
327 bzero(buf, enccrd->crd_len);
328 kfree(buf, M_AESNI);
329 }
330 return (error);
331 }
332