1 /*
2 * Copyright (C) 2011-2016 Free Software Foundation, Inc.
3 * Copyright (C) 2016-2018 Red Hat, Inc.
4 *
5 * Author: Nikos Mavrogiannopoulos
6 *
7 * This file is part of GnuTLS.
8 *
9 * The GnuTLS is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public License
11 * as published by the Free Software Foundation; either version 2.1 of
12 * the License, or (at your option) any later version.
13 *
14 * This library is distributed in the hope that it will be useful, but
15 * WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public License
20 * along with this program. If not, see <https://www.gnu.org/licenses/>
21 *
22 */
23
24 /*
25 * The following code is an implementation of the AES-GCM cipher
26 * using the AES and neon instruction sets.
27 */
28
29 #include "errors.h"
30 #include "gnutls_int.h"
31 #include <gnutls/crypto.h>
32 #include "errors.h"
33 #include <aes-aarch64.h>
34 #include <aarch64-common.h>
35 #include <nettle/memxor.h>
36 #include <nettle/macros.h>
37 #include <byteswap.h>
38
39 #define GCM_BLOCK_SIZE 16
40 #define INC32(block) INCREMENT(4, block + GCM_BLOCK_SIZE - 4)
41
42 /* GCM mode */
43
44 typedef struct {
45 uint64_t hi, lo;
46 } u128;
47
48 /* This is the gcm128 structure used in openssl. It
49 * is compatible with the included assembly code.
50 */
51 struct gcm128_context {
52 union {
53 uint64_t u[2];
54 uint32_t d[4];
55 uint8_t c[16];
56 } Yi, EKi, EK0, len, Xi, H;
57 u128 Htable[16];
58 };
59
60 struct aes_gcm_ctx {
61 AES_KEY expanded_key;
62 struct gcm128_context gcm;
63 unsigned finished;
64 unsigned auth_finished;
65 };
66
67 void gcm_init_v8(u128 Htable[16], const uint64_t Xi[2]);
68 void gcm_ghash_v8(uint64_t Xi[2], const u128 Htable[16],
69 const uint8_t * inp, size_t len);
70 void gcm_gmult_v8(uint64_t Xi[2], const u128 Htable[16]);
71
aes_gcm_deinit(void * _ctx)72 static void aes_gcm_deinit(void *_ctx)
73 {
74 struct aes_gcm_ctx *ctx = _ctx;
75
76 zeroize_temp_key(ctx, sizeof(*ctx));
77 gnutls_free(ctx);
78 }
79
80 static int
aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm,void ** _ctx,int enc)81 aes_gcm_cipher_init(gnutls_cipher_algorithm_t algorithm, void **_ctx,
82 int enc)
83 {
84 /* we use key size to distinguish */
85 if (algorithm != GNUTLS_CIPHER_AES_128_GCM &&
86 algorithm != GNUTLS_CIPHER_AES_192_GCM &&
87 algorithm != GNUTLS_CIPHER_AES_256_GCM)
88 return GNUTLS_E_INVALID_REQUEST;
89
90 *_ctx = gnutls_calloc(1, sizeof(struct aes_gcm_ctx));
91 if (*_ctx == NULL) {
92 gnutls_assert();
93 return GNUTLS_E_MEMORY_ERROR;
94 }
95
96 return 0;
97 }
98
99 static int
aes_gcm_cipher_setkey(void * _ctx,const void * userkey,size_t keysize)100 aes_gcm_cipher_setkey(void *_ctx, const void *userkey, size_t keysize)
101 {
102 struct aes_gcm_ctx *ctx = _ctx;
103 int ret;
104
105 CHECK_AES_KEYSIZE(keysize);
106
107 ret =
108 aes_v8_set_encrypt_key(userkey, keysize * 8,
109 ALIGN16(&ctx->expanded_key));
110 if (ret != 0)
111 return gnutls_assert_val(GNUTLS_E_ENCRYPTION_FAILED);
112
113 aes_v8_encrypt(ctx->gcm.H.c, ctx->gcm.H.c, ALIGN16(&ctx->expanded_key));
114
115 ctx->gcm.H.u[0] = bswap_64(ctx->gcm.H.u[0]);
116 ctx->gcm.H.u[1] = bswap_64(ctx->gcm.H.u[1]);
117
118 gcm_init_v8(ctx->gcm.Htable, ctx->gcm.H.u);
119
120 return 0;
121 }
122
aes_gcm_setiv(void * _ctx,const void * iv,size_t iv_size)123 static int aes_gcm_setiv(void *_ctx, const void *iv, size_t iv_size)
124 {
125 struct aes_gcm_ctx *ctx = _ctx;
126
127 if (iv_size != GCM_BLOCK_SIZE - 4)
128 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
129
130 memset(ctx->gcm.Xi.c, 0, sizeof(ctx->gcm.Xi.c));
131 memset(ctx->gcm.len.c, 0, sizeof(ctx->gcm.len.c));
132
133 memcpy(ctx->gcm.Yi.c, iv, GCM_BLOCK_SIZE - 4);
134 ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 4] = 0;
135 ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 3] = 0;
136 ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 2] = 0;
137 ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 1;
138
139 aes_v8_encrypt(ctx->gcm.Yi.c, ctx->gcm.EK0.c,
140 ALIGN16(&ctx->expanded_key));
141 ctx->gcm.Yi.c[GCM_BLOCK_SIZE - 1] = 2;
142 ctx->finished = 0;
143 ctx->auth_finished = 0;
144 return 0;
145 }
146
147 static void
gcm_ghash(struct aes_gcm_ctx * ctx,const uint8_t * src,size_t src_size)148 gcm_ghash(struct aes_gcm_ctx *ctx, const uint8_t * src, size_t src_size)
149 {
150 size_t rest = src_size % GCM_BLOCK_SIZE;
151 size_t aligned_size = src_size - rest;
152
153 if (aligned_size > 0)
154 gcm_ghash_v8(ctx->gcm.Xi.u, ctx->gcm.Htable, src,
155 aligned_size);
156
157 if (rest > 0) {
158 memxor(ctx->gcm.Xi.c, src + aligned_size, rest);
159 gcm_gmult_v8(ctx->gcm.Xi.u, ctx->gcm.Htable);
160 }
161 }
162
163 static void
ctr32_encrypt_blocks_inplace(const unsigned char * in,unsigned char * out,size_t blocks,const AES_KEY * key,const unsigned char ivec[16])164 ctr32_encrypt_blocks_inplace(const unsigned char *in, unsigned char *out,
165 size_t blocks, const AES_KEY *key,
166 const unsigned char ivec[16])
167 {
168 unsigned i;
169 uint8_t ctr[16];
170 uint8_t tmp[16];
171
172 memcpy(ctr, ivec, 16);
173
174 for (i=0;i<blocks;i++) {
175 aes_v8_encrypt(ctr, tmp, key);
176 memxor3(out, tmp, in, 16);
177
178 out += 16;
179 in += 16;
180 INC32(ctr);
181 }
182 }
183
184 static void
ctr32_encrypt_blocks(const unsigned char * in,unsigned char * out,size_t blocks,const AES_KEY * key,const unsigned char ivec[16])185 ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
186 size_t blocks, const AES_KEY *key,
187 const unsigned char ivec[16])
188 {
189 unsigned i;
190 uint8_t ctr[16];
191
192 if (in == out)
193 return ctr32_encrypt_blocks_inplace(in, out, blocks, key, ivec);
194
195 memcpy(ctr, ivec, 16);
196
197 for (i=0;i<blocks;i++) {
198 aes_v8_encrypt(ctr, out, key);
199 memxor(out, in, 16);
200
201 out += 16;
202 in += 16;
203 INC32(ctr);
204 }
205 }
206
207 static inline void
ctr_encrypt_last(struct aes_gcm_ctx * ctx,const uint8_t * src,uint8_t * dst,size_t pos,size_t length)208 ctr_encrypt_last(struct aes_gcm_ctx *ctx, const uint8_t * src,
209 uint8_t * dst, size_t pos, size_t length)
210 {
211 uint8_t tmp[GCM_BLOCK_SIZE];
212 uint8_t out[GCM_BLOCK_SIZE];
213
214 memcpy(tmp, &src[pos], length);
215 ctr32_encrypt_blocks(tmp, out, 1,
216 ALIGN16(&ctx->expanded_key),
217 ctx->gcm.Yi.c);
218
219 memcpy(&dst[pos], out, length);
220
221 }
222
223 static int
aes_gcm_encrypt(void * _ctx,const void * src,size_t src_size,void * dst,size_t length)224 aes_gcm_encrypt(void *_ctx, const void *src, size_t src_size,
225 void *dst, size_t length)
226 {
227 struct aes_gcm_ctx *ctx = _ctx;
228 int blocks = src_size / GCM_BLOCK_SIZE;
229 int exp_blocks = blocks * GCM_BLOCK_SIZE;
230 int rest = src_size - (exp_blocks);
231 uint32_t counter;
232
233 if (unlikely(ctx->finished))
234 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
235
236 if (blocks > 0) {
237 ctr32_encrypt_blocks(src, dst,
238 blocks,
239 ALIGN16(&ctx->expanded_key),
240 ctx->gcm.Yi.c);
241
242 counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
243 counter += blocks;
244 _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
245 }
246
247 if (rest > 0) { /* last incomplete block */
248 ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
249 ctx->finished = 1;
250 }
251
252 gcm_ghash(ctx, dst, src_size);
253 ctx->gcm.len.u[1] += src_size;
254
255 return 0;
256 }
257
258 static int
aes_gcm_decrypt(void * _ctx,const void * src,size_t src_size,void * dst,size_t dst_size)259 aes_gcm_decrypt(void *_ctx, const void *src, size_t src_size,
260 void *dst, size_t dst_size)
261 {
262 struct aes_gcm_ctx *ctx = _ctx;
263 int blocks = src_size / GCM_BLOCK_SIZE;
264 int exp_blocks = blocks * GCM_BLOCK_SIZE;
265 int rest = src_size - (exp_blocks);
266 uint32_t counter;
267
268 if (unlikely(ctx->finished))
269 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
270
271 gcm_ghash(ctx, src, src_size);
272 ctx->gcm.len.u[1] += src_size;
273
274 if (blocks > 0) {
275 ctr32_encrypt_blocks(src, dst,
276 blocks,
277 ALIGN16(&ctx->expanded_key),
278 ctx->gcm.Yi.c);
279
280 counter = _gnutls_read_uint32(ctx->gcm.Yi.c + 12);
281 counter += blocks;
282 _gnutls_write_uint32(counter, ctx->gcm.Yi.c + 12);
283 }
284
285 if (rest > 0) { /* last incomplete block */
286 ctr_encrypt_last(ctx, src, dst, exp_blocks, rest);
287 ctx->finished = 1;
288 }
289
290 return 0;
291 }
292
aes_gcm_auth(void * _ctx,const void * src,size_t src_size)293 static int aes_gcm_auth(void *_ctx, const void *src, size_t src_size)
294 {
295 struct aes_gcm_ctx *ctx = _ctx;
296
297 if (unlikely(ctx->auth_finished))
298 return gnutls_assert_val(GNUTLS_E_INVALID_REQUEST);
299
300 gcm_ghash(ctx, src, src_size);
301 ctx->gcm.len.u[0] += src_size;
302
303 if (src_size % GCM_BLOCK_SIZE != 0)
304 ctx->auth_finished = 1;
305
306 return 0;
307 }
308
309
aes_gcm_tag(void * _ctx,void * tag,size_t tagsize)310 static void aes_gcm_tag(void *_ctx, void *tag, size_t tagsize)
311 {
312 struct aes_gcm_ctx *ctx = _ctx;
313 uint8_t buffer[GCM_BLOCK_SIZE];
314 uint64_t alen, clen;
315
316 alen = ctx->gcm.len.u[0] * 8;
317 clen = ctx->gcm.len.u[1] * 8;
318
319 _gnutls_write_uint64(alen, buffer);
320 _gnutls_write_uint64(clen, &buffer[8]);
321
322 gcm_ghash_v8(ctx->gcm.Xi.u, ctx->gcm.Htable, buffer,
323 GCM_BLOCK_SIZE);
324
325 ctx->gcm.Xi.u[0] ^= ctx->gcm.EK0.u[0];
326 ctx->gcm.Xi.u[1] ^= ctx->gcm.EK0.u[1];
327
328 memcpy(tag, ctx->gcm.Xi.c, MIN(GCM_BLOCK_SIZE, tagsize));
329 }
330
331 #include "../x86/aes-gcm-aead.h"
332
333 const gnutls_crypto_cipher_st _gnutls_aes_gcm_aarch64 = {
334 .init = aes_gcm_cipher_init,
335 .setkey = aes_gcm_cipher_setkey,
336 .setiv = aes_gcm_setiv,
337 .aead_encrypt = aes_gcm_aead_encrypt,
338 .aead_decrypt = aes_gcm_aead_decrypt,
339 .encrypt = aes_gcm_encrypt,
340 .decrypt = aes_gcm_decrypt,
341 .deinit = aes_gcm_deinit,
342 .tag = aes_gcm_tag,
343 .auth = aes_gcm_auth,
344 };
345