1// Copyright 2016 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package aes
6
7import (
8	"crypto/cipher"
9	subtleoverlap "crypto/internal/subtle"
10	"crypto/subtle"
11	"encoding/binary"
12	"errors"
13	"internal/cpu"
14)
15
16// This file contains two implementations of AES-GCM. The first implementation
17// (gcmAsm) uses the KMCTR instruction to encrypt using AES in counter mode and
18// the KIMD instruction for GHASH. The second implementation (gcmKMA) uses the
19// newer KMA instruction which performs both operations.
20
21// gcmCount represents a 16-byte big-endian count value.
22type gcmCount [16]byte
23
24// inc increments the rightmost 32-bits of the count value by 1.
25func (x *gcmCount) inc() {
26	binary.BigEndian.PutUint32(x[len(x)-4:], binary.BigEndian.Uint32(x[len(x)-4:])+1)
27}
28
29// gcmLengths writes len0 || len1 as big-endian values to a 16-byte array.
30func gcmLengths(len0, len1 uint64) [16]byte {
31	v := [16]byte{}
32	binary.BigEndian.PutUint64(v[0:], len0)
33	binary.BigEndian.PutUint64(v[8:], len1)
34	return v
35}
36
37// gcmHashKey represents the 16-byte hash key required by the GHASH algorithm.
38type gcmHashKey [16]byte
39
40type gcmAsm struct {
41	block     *aesCipherAsm
42	hashKey   gcmHashKey
43	nonceSize int
44	tagSize   int
45}
46
47const (
48	gcmBlockSize         = 16
49	gcmTagSize           = 16
50	gcmMinimumTagSize    = 12 // NIST SP 800-38D recommends tags with 12 or more bytes.
51	gcmStandardNonceSize = 12
52)
53
54var errOpen = errors.New("cipher: message authentication failed")
55
56// Assert that aesCipherAsm implements the gcmAble interface.
57var _ gcmAble = (*aesCipherAsm)(nil)
58
59// NewGCM returns the AES cipher wrapped in Galois Counter Mode. This is only
60// called by crypto/cipher.NewGCM via the gcmAble interface.
61func (c *aesCipherAsm) NewGCM(nonceSize, tagSize int) (cipher.AEAD, error) {
62	var hk gcmHashKey
63	c.Encrypt(hk[:], hk[:])
64	g := gcmAsm{
65		block:     c,
66		hashKey:   hk,
67		nonceSize: nonceSize,
68		tagSize:   tagSize,
69	}
70	if cpu.S390X.HasAESGCM {
71		g := gcmKMA{g}
72		return &g, nil
73	}
74	return &g, nil
75}
76
77func (g *gcmAsm) NonceSize() int {
78	return g.nonceSize
79}
80
81func (g *gcmAsm) Overhead() int {
82	return g.tagSize
83}
84
85// sliceForAppend takes a slice and a requested number of bytes. It returns a
86// slice with the contents of the given slice followed by that many bytes and a
87// second slice that aliases into it and contains only the extra bytes. If the
88// original slice has sufficient capacity then no allocation is performed.
89func sliceForAppend(in []byte, n int) (head, tail []byte) {
90	if total := len(in) + n; cap(in) >= total {
91		head = in[:total]
92	} else {
93		head = make([]byte, total)
94		copy(head, in)
95	}
96	tail = head[len(in):]
97	return
98}
99
100// ghash uses the GHASH algorithm to hash data with the given key. The initial
101// hash value is given by hash which will be updated with the new hash value.
102// The length of data must be a multiple of 16-bytes.
103//go:noescape
104func ghash(key *gcmHashKey, hash *[16]byte, data []byte)
105
106// paddedGHASH pads data with zeroes until its length is a multiple of
107// 16-bytes. It then calculates a new value for hash using the GHASH algorithm.
108func (g *gcmAsm) paddedGHASH(hash *[16]byte, data []byte) {
109	siz := len(data) &^ 0xf // align size to 16-bytes
110	if siz > 0 {
111		ghash(&g.hashKey, hash, data[:siz])
112		data = data[siz:]
113	}
114	if len(data) > 0 {
115		var s [16]byte
116		copy(s[:], data)
117		ghash(&g.hashKey, hash, s[:])
118	}
119}
120
121// cryptBlocksGCM encrypts src using AES in counter mode using the given
122// function code and key. The rightmost 32-bits of the counter are incremented
123// between each block as required by the GCM spec. The initial counter value
124// is given by cnt, which is updated with the value of the next counter value
125// to use.
126//
127// The lengths of both dst and buf must be greater than or equal to the length
128// of src. buf may be partially or completely overwritten during the execution
129// of the function.
130//go:noescape
131func cryptBlocksGCM(fn code, key, dst, src, buf []byte, cnt *gcmCount)
132
133// counterCrypt encrypts src using AES in counter mode and places the result
134// into dst. cnt is the initial count value and will be updated with the next
135// count value. The length of dst must be greater than or equal to the length
136// of src.
137func (g *gcmAsm) counterCrypt(dst, src []byte, cnt *gcmCount) {
138	// Copying src into a buffer improves performance on some models when
139	// src and dst point to the same underlying array. We also need a
140	// buffer for counter values.
141	var ctrbuf, srcbuf [2048]byte
142	for len(src) >= 16 {
143		siz := len(src)
144		if len(src) > len(ctrbuf) {
145			siz = len(ctrbuf)
146		}
147		siz &^= 0xf // align siz to 16-bytes
148		copy(srcbuf[:], src[:siz])
149		cryptBlocksGCM(g.block.function, g.block.key, dst[:siz], srcbuf[:siz], ctrbuf[:], cnt)
150		src = src[siz:]
151		dst = dst[siz:]
152	}
153	if len(src) > 0 {
154		var x [16]byte
155		g.block.Encrypt(x[:], cnt[:])
156		for i := range src {
157			dst[i] = src[i] ^ x[i]
158		}
159		cnt.inc()
160	}
161}
162
163// deriveCounter computes the initial GCM counter state from the given nonce.
164// See NIST SP 800-38D, section 7.1.
165func (g *gcmAsm) deriveCounter(nonce []byte) gcmCount {
166	// GCM has two modes of operation with respect to the initial counter
167	// state: a "fast path" for 96-bit (12-byte) nonces, and a "slow path"
168	// for nonces of other lengths. For a 96-bit nonce, the nonce, along
169	// with a four-byte big-endian counter starting at one, is used
170	// directly as the starting counter. For other nonce sizes, the counter
171	// is computed by passing it through the GHASH function.
172	var counter gcmCount
173	if len(nonce) == gcmStandardNonceSize {
174		copy(counter[:], nonce)
175		counter[gcmBlockSize-1] = 1
176	} else {
177		var hash [16]byte
178		g.paddedGHASH(&hash, nonce)
179		lens := gcmLengths(0, uint64(len(nonce))*8)
180		g.paddedGHASH(&hash, lens[:])
181		copy(counter[:], hash[:])
182	}
183	return counter
184}
185
186// auth calculates GHASH(ciphertext, additionalData), masks the result with
187// tagMask and writes the result to out.
188func (g *gcmAsm) auth(out, ciphertext, additionalData []byte, tagMask *[gcmTagSize]byte) {
189	var hash [16]byte
190	g.paddedGHASH(&hash, additionalData)
191	g.paddedGHASH(&hash, ciphertext)
192	lens := gcmLengths(uint64(len(additionalData))*8, uint64(len(ciphertext))*8)
193	g.paddedGHASH(&hash, lens[:])
194
195	copy(out, hash[:])
196	for i := range out {
197		out[i] ^= tagMask[i]
198	}
199}
200
201// Seal encrypts and authenticates plaintext. See the cipher.AEAD interface for
202// details.
203func (g *gcmAsm) Seal(dst, nonce, plaintext, data []byte) []byte {
204	if len(nonce) != g.nonceSize {
205		panic("crypto/cipher: incorrect nonce length given to GCM")
206	}
207	if uint64(len(plaintext)) > ((1<<32)-2)*BlockSize {
208		panic("crypto/cipher: message too large for GCM")
209	}
210
211	ret, out := sliceForAppend(dst, len(plaintext)+g.tagSize)
212	if subtleoverlap.InexactOverlap(out[:len(plaintext)], plaintext) {
213		panic("crypto/cipher: invalid buffer overlap")
214	}
215
216	counter := g.deriveCounter(nonce)
217
218	var tagMask [gcmBlockSize]byte
219	g.block.Encrypt(tagMask[:], counter[:])
220	counter.inc()
221
222	var tagOut [gcmTagSize]byte
223	g.counterCrypt(out, plaintext, &counter)
224	g.auth(tagOut[:], out[:len(plaintext)], data, &tagMask)
225	copy(out[len(plaintext):], tagOut[:])
226
227	return ret
228}
229
230// Open authenticates and decrypts ciphertext. See the cipher.AEAD interface
231// for details.
232func (g *gcmAsm) Open(dst, nonce, ciphertext, data []byte) ([]byte, error) {
233	if len(nonce) != g.nonceSize {
234		panic("crypto/cipher: incorrect nonce length given to GCM")
235	}
236	// Sanity check to prevent the authentication from always succeeding if an implementation
237	// leaves tagSize uninitialized, for example.
238	if g.tagSize < gcmMinimumTagSize {
239		panic("crypto/cipher: incorrect GCM tag size")
240	}
241	if len(ciphertext) < g.tagSize {
242		return nil, errOpen
243	}
244	if uint64(len(ciphertext)) > ((1<<32)-2)*uint64(BlockSize)+uint64(g.tagSize) {
245		return nil, errOpen
246	}
247
248	tag := ciphertext[len(ciphertext)-g.tagSize:]
249	ciphertext = ciphertext[:len(ciphertext)-g.tagSize]
250
251	counter := g.deriveCounter(nonce)
252
253	var tagMask [gcmBlockSize]byte
254	g.block.Encrypt(tagMask[:], counter[:])
255	counter.inc()
256
257	var expectedTag [gcmTagSize]byte
258	g.auth(expectedTag[:], ciphertext, data, &tagMask)
259
260	ret, out := sliceForAppend(dst, len(ciphertext))
261	if subtleoverlap.InexactOverlap(out, ciphertext) {
262		panic("crypto/cipher: invalid buffer overlap")
263	}
264
265	if subtle.ConstantTimeCompare(expectedTag[:g.tagSize], tag) != 1 {
266		// The AESNI code decrypts and authenticates concurrently, and
267		// so overwrites dst in the event of a tag mismatch. That
268		// behavior is mimicked here in order to be consistent across
269		// platforms.
270		for i := range out {
271			out[i] = 0
272		}
273		return nil, errOpen
274	}
275
276	g.counterCrypt(out, ciphertext, &counter)
277	return ret, nil
278}
279
280// gcmKMA implements the cipher.AEAD interface using the KMA instruction. It should
281// only be used if hasKMA is true.
282type gcmKMA struct {
283	gcmAsm
284}
285
286// flags for the KMA instruction
287const (
288	kmaHS      = 1 << 10 // hash subkey supplied
289	kmaLAAD    = 1 << 9  // last series of additional authenticated data
290	kmaLPC     = 1 << 8  // last series of plaintext or ciphertext blocks
291	kmaDecrypt = 1 << 7  // decrypt
292)
293
294// kmaGCM executes the encryption or decryption operation given by fn. The tag
295// will be calculated and written to tag. cnt should contain the current
296// counter state and will be overwritten with the updated counter state.
297// TODO(mundaym): could pass in hash subkey
298//go:noescape
299func kmaGCM(fn code, key, dst, src, aad []byte, tag *[16]byte, cnt *gcmCount)
300
301// Seal encrypts and authenticates plaintext. See the cipher.AEAD interface for
302// details.
303func (g *gcmKMA) Seal(dst, nonce, plaintext, data []byte) []byte {
304	if len(nonce) != g.nonceSize {
305		panic("crypto/cipher: incorrect nonce length given to GCM")
306	}
307	if uint64(len(plaintext)) > ((1<<32)-2)*BlockSize {
308		panic("crypto/cipher: message too large for GCM")
309	}
310
311	ret, out := sliceForAppend(dst, len(plaintext)+g.tagSize)
312	if subtleoverlap.InexactOverlap(out[:len(plaintext)], plaintext) {
313		panic("crypto/cipher: invalid buffer overlap")
314	}
315
316	counter := g.deriveCounter(nonce)
317	fc := g.block.function | kmaLAAD | kmaLPC
318
319	var tag [gcmTagSize]byte
320	kmaGCM(fc, g.block.key, out[:len(plaintext)], plaintext, data, &tag, &counter)
321	copy(out[len(plaintext):], tag[:])
322
323	return ret
324}
325
326// Open authenticates and decrypts ciphertext. See the cipher.AEAD interface
327// for details.
328func (g *gcmKMA) Open(dst, nonce, ciphertext, data []byte) ([]byte, error) {
329	if len(nonce) != g.nonceSize {
330		panic("crypto/cipher: incorrect nonce length given to GCM")
331	}
332	if len(ciphertext) < g.tagSize {
333		return nil, errOpen
334	}
335	if uint64(len(ciphertext)) > ((1<<32)-2)*uint64(BlockSize)+uint64(g.tagSize) {
336		return nil, errOpen
337	}
338
339	tag := ciphertext[len(ciphertext)-g.tagSize:]
340	ciphertext = ciphertext[:len(ciphertext)-g.tagSize]
341	ret, out := sliceForAppend(dst, len(ciphertext))
342	if subtleoverlap.InexactOverlap(out, ciphertext) {
343		panic("crypto/cipher: invalid buffer overlap")
344	}
345
346	if g.tagSize < gcmMinimumTagSize {
347		panic("crypto/cipher: incorrect GCM tag size")
348	}
349
350	counter := g.deriveCounter(nonce)
351	fc := g.block.function | kmaLAAD | kmaLPC | kmaDecrypt
352
353	var expectedTag [gcmTagSize]byte
354	kmaGCM(fc, g.block.key, out[:len(ciphertext)], ciphertext, data, &expectedTag, &counter)
355
356	if subtle.ConstantTimeCompare(expectedTag[:g.tagSize], tag) != 1 {
357		// The AESNI code decrypts and authenticates concurrently, and
358		// so overwrites dst in the event of a tag mismatch. That
359		// behavior is mimicked here in order to be consistent across
360		// platforms.
361		for i := range out {
362			out[i] = 0
363		}
364		return nil, errOpen
365	}
366
367	return ret, nil
368}
369