1package toml
2
3import (
4	"bytes"
5	"fmt"
6	"reflect"
7	"testing"
8	"text/tabwriter"
9)
10
11func testFlow(t *testing.T, input string, expectedFlow []token) {
12	tokens := lexToml([]byte(input))
13
14	if !reflect.DeepEqual(tokens, expectedFlow) {
15		diffFlowsColumnsFatal(t, expectedFlow, tokens)
16	}
17}
18
19func diffFlowsColumnsFatal(t *testing.T, expectedFlow []token, actualFlow []token) {
20	max := len(expectedFlow)
21	if len(actualFlow) > max {
22		max = len(actualFlow)
23	}
24
25	b := &bytes.Buffer{}
26	w := tabwriter.NewWriter(b, 0, 0, 1, ' ', tabwriter.Debug)
27
28	fmt.Fprintln(w, "expected\tT\tP\tactual\tT\tP\tdiff")
29
30	for i := 0; i < max; i++ {
31		expected := ""
32		expectedType := ""
33		expectedPos := ""
34		if i < len(expectedFlow) {
35			expected = fmt.Sprintf("%s", expectedFlow[i])
36			expectedType = fmt.Sprintf("%s", expectedFlow[i].typ)
37			expectedPos = expectedFlow[i].Position.String()
38		}
39		actual := ""
40		actualType := ""
41		actualPos := ""
42		if i < len(actualFlow) {
43			actual = fmt.Sprintf("%s", actualFlow[i])
44			actualType = fmt.Sprintf("%s", actualFlow[i].typ)
45			actualPos = actualFlow[i].Position.String()
46		}
47		different := ""
48		if i >= len(expectedFlow) {
49			different = "+"
50		} else if i >= len(actualFlow) {
51			different = "-"
52		} else if !reflect.DeepEqual(expectedFlow[i], actualFlow[i]) {
53			different = "x"
54		}
55		fmt.Fprintf(w, "%s\t%s\t%s\t%s\t%s\t%s\t%s\n", expected, expectedType, expectedPos, actual, actualType, actualPos, different)
56	}
57	w.Flush()
58	t.Errorf("Different flows:\n%s", b.String())
59}
60
61func TestValidKeyGroup(t *testing.T) {
62	testFlow(t, "[hello world]", []token{
63		{Position{1, 1}, tokenLeftBracket, "["},
64		{Position{1, 2}, tokenKeyGroup, "hello world"},
65		{Position{1, 13}, tokenRightBracket, "]"},
66		{Position{1, 14}, tokenEOF, ""},
67	})
68}
69
70func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
71	testFlow(t, `[ j . "ʞ" . l . 'ɯ' ]`, []token{
72		{Position{1, 1}, tokenLeftBracket, "["},
73		{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l . 'ɯ' `},
74		{Position{1, 21}, tokenRightBracket, "]"},
75		{Position{1, 22}, tokenEOF, ""},
76	})
77}
78
79func TestNestedQuotedUnicodeKeyAssign(t *testing.T) {
80	testFlow(t, ` j . "ʞ" . l . 'ɯ' = 3`, []token{
81		{Position{1, 2}, tokenKey, `j . "ʞ" . l . 'ɯ'`},
82		{Position{1, 20}, tokenEqual, "="},
83		{Position{1, 22}, tokenInteger, "3"},
84		{Position{1, 23}, tokenEOF, ""},
85	})
86}
87
88func TestUnclosedKeyGroup(t *testing.T) {
89	testFlow(t, "[hello world", []token{
90		{Position{1, 1}, tokenLeftBracket, "["},
91		{Position{1, 2}, tokenError, "unclosed table key"},
92	})
93}
94
95func TestComment(t *testing.T) {
96	testFlow(t, "# blahblah", []token{
97		{Position{1, 11}, tokenEOF, ""},
98	})
99}
100
101func TestKeyGroupComment(t *testing.T) {
102	testFlow(t, "[hello world] # blahblah", []token{
103		{Position{1, 1}, tokenLeftBracket, "["},
104		{Position{1, 2}, tokenKeyGroup, "hello world"},
105		{Position{1, 13}, tokenRightBracket, "]"},
106		{Position{1, 25}, tokenEOF, ""},
107	})
108}
109
110func TestMultipleKeyGroupsComment(t *testing.T) {
111	testFlow(t, "[hello world] # blahblah\n[test]", []token{
112		{Position{1, 1}, tokenLeftBracket, "["},
113		{Position{1, 2}, tokenKeyGroup, "hello world"},
114		{Position{1, 13}, tokenRightBracket, "]"},
115		{Position{2, 1}, tokenLeftBracket, "["},
116		{Position{2, 2}, tokenKeyGroup, "test"},
117		{Position{2, 6}, tokenRightBracket, "]"},
118		{Position{2, 7}, tokenEOF, ""},
119	})
120}
121
122func TestSimpleWindowsCRLF(t *testing.T) {
123	testFlow(t, "a=4\r\nb=2", []token{
124		{Position{1, 1}, tokenKey, "a"},
125		{Position{1, 2}, tokenEqual, "="},
126		{Position{1, 3}, tokenInteger, "4"},
127		{Position{2, 1}, tokenKey, "b"},
128		{Position{2, 2}, tokenEqual, "="},
129		{Position{2, 3}, tokenInteger, "2"},
130		{Position{2, 4}, tokenEOF, ""},
131	})
132}
133
134func TestBasicKey(t *testing.T) {
135	testFlow(t, "hello", []token{
136		{Position{1, 1}, tokenKey, "hello"},
137		{Position{1, 6}, tokenEOF, ""},
138	})
139}
140
141func TestBasicKeyWithUnderscore(t *testing.T) {
142	testFlow(t, "hello_hello", []token{
143		{Position{1, 1}, tokenKey, "hello_hello"},
144		{Position{1, 12}, tokenEOF, ""},
145	})
146}
147
148func TestBasicKeyWithDash(t *testing.T) {
149	testFlow(t, "hello-world", []token{
150		{Position{1, 1}, tokenKey, "hello-world"},
151		{Position{1, 12}, tokenEOF, ""},
152	})
153}
154
155func TestBasicKeyWithUppercaseMix(t *testing.T) {
156	testFlow(t, "helloHELLOHello", []token{
157		{Position{1, 1}, tokenKey, "helloHELLOHello"},
158		{Position{1, 16}, tokenEOF, ""},
159	})
160}
161
162func TestBasicKeyWithInternationalCharacters(t *testing.T) {
163	testFlow(t, "'héllÖ'", []token{
164		{Position{1, 1}, tokenKey, "'héllÖ'"},
165		{Position{1, 8}, tokenEOF, ""},
166	})
167}
168
169func TestBasicKeyAndEqual(t *testing.T) {
170	testFlow(t, "hello =", []token{
171		{Position{1, 1}, tokenKey, "hello"},
172		{Position{1, 7}, tokenEqual, "="},
173		{Position{1, 8}, tokenEOF, ""},
174	})
175}
176
177func TestKeyWithSharpAndEqual(t *testing.T) {
178	testFlow(t, "key#name = 5", []token{
179		{Position{1, 1}, tokenError, "keys cannot contain # character"},
180	})
181}
182
183func TestKeyWithSymbolsAndEqual(t *testing.T) {
184	testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
185		{Position{1, 1}, tokenError, "keys cannot contain ~ character"},
186	})
187}
188
189func TestKeyEqualStringEscape(t *testing.T) {
190	testFlow(t, `foo = "hello\""`, []token{
191		{Position{1, 1}, tokenKey, "foo"},
192		{Position{1, 5}, tokenEqual, "="},
193		{Position{1, 8}, tokenString, "hello\""},
194		{Position{1, 16}, tokenEOF, ""},
195	})
196}
197
198func TestKeyEqualStringUnfinished(t *testing.T) {
199	testFlow(t, `foo = "bar`, []token{
200		{Position{1, 1}, tokenKey, "foo"},
201		{Position{1, 5}, tokenEqual, "="},
202		{Position{1, 8}, tokenError, "unclosed string"},
203	})
204}
205
206func TestKeyEqualString(t *testing.T) {
207	testFlow(t, `foo = "bar"`, []token{
208		{Position{1, 1}, tokenKey, "foo"},
209		{Position{1, 5}, tokenEqual, "="},
210		{Position{1, 8}, tokenString, "bar"},
211		{Position{1, 12}, tokenEOF, ""},
212	})
213}
214
215func TestKeyEqualTrue(t *testing.T) {
216	testFlow(t, "foo = true", []token{
217		{Position{1, 1}, tokenKey, "foo"},
218		{Position{1, 5}, tokenEqual, "="},
219		{Position{1, 7}, tokenTrue, "true"},
220		{Position{1, 11}, tokenEOF, ""},
221	})
222}
223
224func TestKeyEqualFalse(t *testing.T) {
225	testFlow(t, "foo = false", []token{
226		{Position{1, 1}, tokenKey, "foo"},
227		{Position{1, 5}, tokenEqual, "="},
228		{Position{1, 7}, tokenFalse, "false"},
229		{Position{1, 12}, tokenEOF, ""},
230	})
231}
232
233func TestArrayNestedString(t *testing.T) {
234	testFlow(t, `a = [ ["hello", "world"] ]`, []token{
235		{Position{1, 1}, tokenKey, "a"},
236		{Position{1, 3}, tokenEqual, "="},
237		{Position{1, 5}, tokenLeftBracket, "["},
238		{Position{1, 7}, tokenLeftBracket, "["},
239		{Position{1, 9}, tokenString, "hello"},
240		{Position{1, 15}, tokenComma, ","},
241		{Position{1, 18}, tokenString, "world"},
242		{Position{1, 24}, tokenRightBracket, "]"},
243		{Position{1, 26}, tokenRightBracket, "]"},
244		{Position{1, 27}, tokenEOF, ""},
245	})
246}
247
248func TestArrayNestedInts(t *testing.T) {
249	testFlow(t, "a = [ [42, 21], [10] ]", []token{
250		{Position{1, 1}, tokenKey, "a"},
251		{Position{1, 3}, tokenEqual, "="},
252		{Position{1, 5}, tokenLeftBracket, "["},
253		{Position{1, 7}, tokenLeftBracket, "["},
254		{Position{1, 8}, tokenInteger, "42"},
255		{Position{1, 10}, tokenComma, ","},
256		{Position{1, 12}, tokenInteger, "21"},
257		{Position{1, 14}, tokenRightBracket, "]"},
258		{Position{1, 15}, tokenComma, ","},
259		{Position{1, 17}, tokenLeftBracket, "["},
260		{Position{1, 18}, tokenInteger, "10"},
261		{Position{1, 20}, tokenRightBracket, "]"},
262		{Position{1, 22}, tokenRightBracket, "]"},
263		{Position{1, 23}, tokenEOF, ""},
264	})
265}
266
267func TestArrayInts(t *testing.T) {
268	testFlow(t, "a = [ 42, 21, 10, ]", []token{
269		{Position{1, 1}, tokenKey, "a"},
270		{Position{1, 3}, tokenEqual, "="},
271		{Position{1, 5}, tokenLeftBracket, "["},
272		{Position{1, 7}, tokenInteger, "42"},
273		{Position{1, 9}, tokenComma, ","},
274		{Position{1, 11}, tokenInteger, "21"},
275		{Position{1, 13}, tokenComma, ","},
276		{Position{1, 15}, tokenInteger, "10"},
277		{Position{1, 17}, tokenComma, ","},
278		{Position{1, 19}, tokenRightBracket, "]"},
279		{Position{1, 20}, tokenEOF, ""},
280	})
281}
282
283func TestMultilineArrayComments(t *testing.T) {
284	testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
285		{Position{1, 1}, tokenKey, "a"},
286		{Position{1, 3}, tokenEqual, "="},
287		{Position{1, 5}, tokenLeftBracket, "["},
288		{Position{1, 6}, tokenInteger, "1"},
289		{Position{1, 7}, tokenComma, ","},
290		{Position{2, 1}, tokenInteger, "2"},
291		{Position{2, 2}, tokenComma, ","},
292		{Position{3, 1}, tokenInteger, "3"},
293		{Position{3, 2}, tokenComma, ","},
294		{Position{4, 1}, tokenRightBracket, "]"},
295		{Position{4, 2}, tokenEOF, ""},
296	})
297}
298
299func TestNestedArraysComment(t *testing.T) {
300	toml := `
301someArray = [
302# does not work
303["entry1"]
304]`
305	testFlow(t, toml, []token{
306		{Position{2, 1}, tokenKey, "someArray"},
307		{Position{2, 11}, tokenEqual, "="},
308		{Position{2, 13}, tokenLeftBracket, "["},
309		{Position{4, 1}, tokenLeftBracket, "["},
310		{Position{4, 3}, tokenString, "entry1"},
311		{Position{4, 10}, tokenRightBracket, "]"},
312		{Position{5, 1}, tokenRightBracket, "]"},
313		{Position{5, 2}, tokenEOF, ""},
314	})
315}
316
317func TestKeyEqualArrayBools(t *testing.T) {
318	testFlow(t, "foo = [true, false, true]", []token{
319		{Position{1, 1}, tokenKey, "foo"},
320		{Position{1, 5}, tokenEqual, "="},
321		{Position{1, 7}, tokenLeftBracket, "["},
322		{Position{1, 8}, tokenTrue, "true"},
323		{Position{1, 12}, tokenComma, ","},
324		{Position{1, 14}, tokenFalse, "false"},
325		{Position{1, 19}, tokenComma, ","},
326		{Position{1, 21}, tokenTrue, "true"},
327		{Position{1, 25}, tokenRightBracket, "]"},
328		{Position{1, 26}, tokenEOF, ""},
329	})
330}
331
332func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
333	testFlow(t, "foo = [true, false, true] # YEAH", []token{
334		{Position{1, 1}, tokenKey, "foo"},
335		{Position{1, 5}, tokenEqual, "="},
336		{Position{1, 7}, tokenLeftBracket, "["},
337		{Position{1, 8}, tokenTrue, "true"},
338		{Position{1, 12}, tokenComma, ","},
339		{Position{1, 14}, tokenFalse, "false"},
340		{Position{1, 19}, tokenComma, ","},
341		{Position{1, 21}, tokenTrue, "true"},
342		{Position{1, 25}, tokenRightBracket, "]"},
343		{Position{1, 33}, tokenEOF, ""},
344	})
345}
346
347func TestKeyEqualDate(t *testing.T) {
348	t.Run("local date time", func(t *testing.T) {
349		testFlow(t, "foo = 1979-05-27T07:32:00", []token{
350			{Position{1, 1}, tokenKey, "foo"},
351			{Position{1, 5}, tokenEqual, "="},
352			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
353			{Position{1, 18}, tokenLocalTime, "07:32:00"},
354			{Position{1, 26}, tokenEOF, ""},
355		})
356	})
357
358	t.Run("local date time space", func(t *testing.T) {
359		testFlow(t, "foo = 1979-05-27 07:32:00", []token{
360			{Position{1, 1}, tokenKey, "foo"},
361			{Position{1, 5}, tokenEqual, "="},
362			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
363			{Position{1, 18}, tokenLocalTime, "07:32:00"},
364			{Position{1, 26}, tokenEOF, ""},
365		})
366	})
367
368	t.Run("local date time fraction", func(t *testing.T) {
369		testFlow(t, "foo = 1979-05-27T00:32:00.999999", []token{
370			{Position{1, 1}, tokenKey, "foo"},
371			{Position{1, 5}, tokenEqual, "="},
372			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
373			{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
374			{Position{1, 33}, tokenEOF, ""},
375		})
376	})
377
378	t.Run("local date time fraction space", func(t *testing.T) {
379		testFlow(t, "foo = 1979-05-27 00:32:00.999999", []token{
380			{Position{1, 1}, tokenKey, "foo"},
381			{Position{1, 5}, tokenEqual, "="},
382			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
383			{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
384			{Position{1, 33}, tokenEOF, ""},
385		})
386	})
387
388	t.Run("offset date-time utc", func(t *testing.T) {
389		testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
390			{Position{1, 1}, tokenKey, "foo"},
391			{Position{1, 5}, tokenEqual, "="},
392			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
393			{Position{1, 18}, tokenLocalTime, "07:32:00"},
394			{Position{1, 26}, tokenTimeOffset, "Z"},
395			{Position{1, 27}, tokenEOF, ""},
396		})
397	})
398
399	t.Run("offset date-time -07:00", func(t *testing.T) {
400		testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
401			{Position{1, 1}, tokenKey, "foo"},
402			{Position{1, 5}, tokenEqual, "="},
403			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
404			{Position{1, 18}, tokenLocalTime, "00:32:00"},
405			{Position{1, 26}, tokenTimeOffset, "-07:00"},
406			{Position{1, 32}, tokenEOF, ""},
407		})
408	})
409
410	t.Run("offset date-time fractions -07:00", func(t *testing.T) {
411		testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
412			{Position{1, 1}, tokenKey, "foo"},
413			{Position{1, 5}, tokenEqual, "="},
414			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
415			{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
416			{Position{1, 33}, tokenTimeOffset, "-07:00"},
417			{Position{1, 39}, tokenEOF, ""},
418		})
419	})
420
421	t.Run("offset date-time space separated utc", func(t *testing.T) {
422		testFlow(t, "foo = 1979-05-27 07:32:00Z", []token{
423			{Position{1, 1}, tokenKey, "foo"},
424			{Position{1, 5}, tokenEqual, "="},
425			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
426			{Position{1, 18}, tokenLocalTime, "07:32:00"},
427			{Position{1, 26}, tokenTimeOffset, "Z"},
428			{Position{1, 27}, tokenEOF, ""},
429		})
430	})
431
432	t.Run("offset date-time space separated offset", func(t *testing.T) {
433		testFlow(t, "foo = 1979-05-27 00:32:00-07:00", []token{
434			{Position{1, 1}, tokenKey, "foo"},
435			{Position{1, 5}, tokenEqual, "="},
436			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
437			{Position{1, 18}, tokenLocalTime, "00:32:00"},
438			{Position{1, 26}, tokenTimeOffset, "-07:00"},
439			{Position{1, 32}, tokenEOF, ""},
440		})
441	})
442
443	t.Run("offset date-time space separated fraction offset", func(t *testing.T) {
444		testFlow(t, "foo = 1979-05-27 00:32:00.999999-07:00", []token{
445			{Position{1, 1}, tokenKey, "foo"},
446			{Position{1, 5}, tokenEqual, "="},
447			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
448			{Position{1, 18}, tokenLocalTime, "00:32:00.999999"},
449			{Position{1, 33}, tokenTimeOffset, "-07:00"},
450			{Position{1, 39}, tokenEOF, ""},
451		})
452	})
453
454	t.Run("local date", func(t *testing.T) {
455		testFlow(t, "foo = 1979-05-27", []token{
456			{Position{1, 1}, tokenKey, "foo"},
457			{Position{1, 5}, tokenEqual, "="},
458			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
459			{Position{1, 17}, tokenEOF, ""},
460		})
461	})
462
463	t.Run("local time", func(t *testing.T) {
464		testFlow(t, "foo = 07:32:00", []token{
465			{Position{1, 1}, tokenKey, "foo"},
466			{Position{1, 5}, tokenEqual, "="},
467			{Position{1, 7}, tokenLocalTime, "07:32:00"},
468			{Position{1, 15}, tokenEOF, ""},
469		})
470	})
471
472	t.Run("local time fraction", func(t *testing.T) {
473		testFlow(t, "foo = 00:32:00.999999", []token{
474			{Position{1, 1}, tokenKey, "foo"},
475			{Position{1, 5}, tokenEqual, "="},
476			{Position{1, 7}, tokenLocalTime, "00:32:00.999999"},
477			{Position{1, 22}, tokenEOF, ""},
478		})
479	})
480
481	t.Run("local time invalid minute digit", func(t *testing.T) {
482		testFlow(t, "foo = 00:3x:00.999999", []token{
483			{Position{1, 1}, tokenKey, "foo"},
484			{Position{1, 5}, tokenEqual, "="},
485			{Position{1, 7}, tokenError, "invalid minute digit in time: x"},
486		})
487	})
488
489	t.Run("local time invalid minute/second digit", func(t *testing.T) {
490		testFlow(t, "foo = 00:30x00.999999", []token{
491			{Position{1, 1}, tokenKey, "foo"},
492			{Position{1, 5}, tokenEqual, "="},
493			{Position{1, 7}, tokenError, "time minute/second separator should be :, not x"},
494		})
495	})
496
497	t.Run("local time invalid second digit", func(t *testing.T) {
498		testFlow(t, "foo = 00:30:x0.999999", []token{
499			{Position{1, 1}, tokenKey, "foo"},
500			{Position{1, 5}, tokenEqual, "="},
501			{Position{1, 7}, tokenError, "invalid second digit in time: x"},
502		})
503	})
504
505	t.Run("local time invalid second digit", func(t *testing.T) {
506		testFlow(t, "foo = 00:30:00.F", []token{
507			{Position{1, 1}, tokenKey, "foo"},
508			{Position{1, 5}, tokenEqual, "="},
509			{Position{1, 7}, tokenError, "expected at least one digit in time's fraction, not F"},
510		})
511	})
512
513	t.Run("local date-time invalid minute digit", func(t *testing.T) {
514		testFlow(t, "foo = 1979-05-27 00:3x:00.999999", []token{
515			{Position{1, 1}, tokenKey, "foo"},
516			{Position{1, 5}, tokenEqual, "="},
517			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
518			{Position{1, 18}, tokenError, "invalid minute digit in time: x"},
519		})
520	})
521
522	t.Run("local date-time invalid hour digit", func(t *testing.T) {
523		testFlow(t, "foo = 1979-05-27T0x:30:00.999999", []token{
524			{Position{1, 1}, tokenKey, "foo"},
525			{Position{1, 5}, tokenEqual, "="},
526			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
527			{Position{1, 18}, tokenError, "invalid hour digit in time: x"},
528		})
529	})
530
531	t.Run("local date-time invalid hour digit", func(t *testing.T) {
532		testFlow(t, "foo = 1979-05-27T00x30:00.999999", []token{
533			{Position{1, 1}, tokenKey, "foo"},
534			{Position{1, 5}, tokenEqual, "="},
535			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
536			{Position{1, 18}, tokenError, "time hour/minute separator should be :, not x"},
537		})
538	})
539
540	t.Run("local date-time invalid minute/second digit", func(t *testing.T) {
541		testFlow(t, "foo = 1979-05-27 00:30x00.999999", []token{
542			{Position{1, 1}, tokenKey, "foo"},
543			{Position{1, 5}, tokenEqual, "="},
544			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
545			{Position{1, 18}, tokenError, "time minute/second separator should be :, not x"},
546		})
547	})
548
549	t.Run("local date-time invalid second digit", func(t *testing.T) {
550		testFlow(t, "foo = 1979-05-27 00:30:x0.999999", []token{
551			{Position{1, 1}, tokenKey, "foo"},
552			{Position{1, 5}, tokenEqual, "="},
553			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
554			{Position{1, 18}, tokenError, "invalid second digit in time: x"},
555		})
556	})
557
558	t.Run("local date-time invalid fraction", func(t *testing.T) {
559		testFlow(t, "foo = 1979-05-27 00:30:00.F", []token{
560			{Position{1, 1}, tokenKey, "foo"},
561			{Position{1, 5}, tokenEqual, "="},
562			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
563			{Position{1, 18}, tokenError, "expected at least one digit in time's fraction, not F"},
564		})
565	})
566
567	t.Run("local date-time invalid month-date separator", func(t *testing.T) {
568		testFlow(t, "foo = 1979-05X27 00:30:00.F", []token{
569			{Position{1, 1}, tokenKey, "foo"},
570			{Position{1, 5}, tokenEqual, "="},
571			{Position{1, 7}, tokenError, "expected - to separate month of a date, not X"},
572		})
573	})
574
575	t.Run("local date-time extra whitespace", func(t *testing.T) {
576		testFlow(t, "foo = 1979-05-27  ", []token{
577			{Position{1, 1}, tokenKey, "foo"},
578			{Position{1, 5}, tokenEqual, "="},
579			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
580			{Position{1, 19}, tokenEOF, ""},
581		})
582	})
583
584	t.Run("local date-time extra whitespace", func(t *testing.T) {
585		testFlow(t, "foo = 1979-05-27     ", []token{
586			{Position{1, 1}, tokenKey, "foo"},
587			{Position{1, 5}, tokenEqual, "="},
588			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
589			{Position{1, 22}, tokenEOF, ""},
590		})
591	})
592
593	t.Run("offset date-time space separated offset", func(t *testing.T) {
594		testFlow(t, "foo = 1979-05-27 00:32:00-0x:00", []token{
595			{Position{1, 1}, tokenKey, "foo"},
596			{Position{1, 5}, tokenEqual, "="},
597			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
598			{Position{1, 18}, tokenLocalTime, "00:32:00"},
599			{Position{1, 26}, tokenError, "invalid hour digit in time offset: x"},
600		})
601	})
602
603	t.Run("offset date-time space separated offset", func(t *testing.T) {
604		testFlow(t, "foo = 1979-05-27 00:32:00-07x00", []token{
605			{Position{1, 1}, tokenKey, "foo"},
606			{Position{1, 5}, tokenEqual, "="},
607			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
608			{Position{1, 18}, tokenLocalTime, "00:32:00"},
609			{Position{1, 26}, tokenError, "time offset hour/minute separator should be :, not x"},
610		})
611	})
612
613	t.Run("offset date-time space separated offset", func(t *testing.T) {
614		testFlow(t, "foo = 1979-05-27 00:32:00-07:x0", []token{
615			{Position{1, 1}, tokenKey, "foo"},
616			{Position{1, 5}, tokenEqual, "="},
617			{Position{1, 7}, tokenLocalDate, "1979-05-27"},
618			{Position{1, 18}, tokenLocalTime, "00:32:00"},
619			{Position{1, 26}, tokenError, "invalid minute digit in time offset: x"},
620		})
621	})
622}
623
624func TestFloatEndingWithDot(t *testing.T) {
625	testFlow(t, "foo = 42.", []token{
626		{Position{1, 1}, tokenKey, "foo"},
627		{Position{1, 5}, tokenEqual, "="},
628		{Position{1, 7}, tokenError, "float cannot end with a dot"},
629	})
630}
631
632func TestFloatWithTwoDots(t *testing.T) {
633	testFlow(t, "foo = 4.2.", []token{
634		{Position{1, 1}, tokenKey, "foo"},
635		{Position{1, 5}, tokenEqual, "="},
636		{Position{1, 7}, tokenError, "cannot have two dots in one float"},
637	})
638}
639
640func TestFloatWithExponent1(t *testing.T) {
641	testFlow(t, "a = 5e+22", []token{
642		{Position{1, 1}, tokenKey, "a"},
643		{Position{1, 3}, tokenEqual, "="},
644		{Position{1, 5}, tokenFloat, "5e+22"},
645		{Position{1, 10}, tokenEOF, ""},
646	})
647}
648
649func TestFloatWithExponent2(t *testing.T) {
650	testFlow(t, "a = 5E+22", []token{
651		{Position{1, 1}, tokenKey, "a"},
652		{Position{1, 3}, tokenEqual, "="},
653		{Position{1, 5}, tokenFloat, "5E+22"},
654		{Position{1, 10}, tokenEOF, ""},
655	})
656}
657
658func TestFloatWithExponent3(t *testing.T) {
659	testFlow(t, "a = -5e+22", []token{
660		{Position{1, 1}, tokenKey, "a"},
661		{Position{1, 3}, tokenEqual, "="},
662		{Position{1, 5}, tokenFloat, "-5e+22"},
663		{Position{1, 11}, tokenEOF, ""},
664	})
665}
666
667func TestFloatWithExponent4(t *testing.T) {
668	testFlow(t, "a = -5e-22", []token{
669		{Position{1, 1}, tokenKey, "a"},
670		{Position{1, 3}, tokenEqual, "="},
671		{Position{1, 5}, tokenFloat, "-5e-22"},
672		{Position{1, 11}, tokenEOF, ""},
673	})
674}
675
676func TestFloatWithExponent5(t *testing.T) {
677	testFlow(t, "a = 6.626e-34", []token{
678		{Position{1, 1}, tokenKey, "a"},
679		{Position{1, 3}, tokenEqual, "="},
680		{Position{1, 5}, tokenFloat, "6.626e-34"},
681		{Position{1, 14}, tokenEOF, ""},
682	})
683}
684
685func TestInvalidEsquapeSequence(t *testing.T) {
686	testFlow(t, `foo = "\x"`, []token{
687		{Position{1, 1}, tokenKey, "foo"},
688		{Position{1, 5}, tokenEqual, "="},
689		{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
690	})
691}
692
693func TestNestedArrays(t *testing.T) {
694	testFlow(t, "foo = [[[]]]", []token{
695		{Position{1, 1}, tokenKey, "foo"},
696		{Position{1, 5}, tokenEqual, "="},
697		{Position{1, 7}, tokenLeftBracket, "["},
698		{Position{1, 8}, tokenLeftBracket, "["},
699		{Position{1, 9}, tokenLeftBracket, "["},
700		{Position{1, 10}, tokenRightBracket, "]"},
701		{Position{1, 11}, tokenRightBracket, "]"},
702		{Position{1, 12}, tokenRightBracket, "]"},
703		{Position{1, 13}, tokenEOF, ""},
704	})
705}
706
707func TestKeyEqualNumber(t *testing.T) {
708	testFlow(t, "foo = 42", []token{
709		{Position{1, 1}, tokenKey, "foo"},
710		{Position{1, 5}, tokenEqual, "="},
711		{Position{1, 7}, tokenInteger, "42"},
712		{Position{1, 9}, tokenEOF, ""},
713	})
714
715	testFlow(t, "foo = +42", []token{
716		{Position{1, 1}, tokenKey, "foo"},
717		{Position{1, 5}, tokenEqual, "="},
718		{Position{1, 7}, tokenInteger, "+42"},
719		{Position{1, 10}, tokenEOF, ""},
720	})
721
722	testFlow(t, "foo = -42", []token{
723		{Position{1, 1}, tokenKey, "foo"},
724		{Position{1, 5}, tokenEqual, "="},
725		{Position{1, 7}, tokenInteger, "-42"},
726		{Position{1, 10}, tokenEOF, ""},
727	})
728
729	testFlow(t, "foo = 4.2", []token{
730		{Position{1, 1}, tokenKey, "foo"},
731		{Position{1, 5}, tokenEqual, "="},
732		{Position{1, 7}, tokenFloat, "4.2"},
733		{Position{1, 10}, tokenEOF, ""},
734	})
735
736	testFlow(t, "foo = +4.2", []token{
737		{Position{1, 1}, tokenKey, "foo"},
738		{Position{1, 5}, tokenEqual, "="},
739		{Position{1, 7}, tokenFloat, "+4.2"},
740		{Position{1, 11}, tokenEOF, ""},
741	})
742
743	testFlow(t, "foo = -4.2", []token{
744		{Position{1, 1}, tokenKey, "foo"},
745		{Position{1, 5}, tokenEqual, "="},
746		{Position{1, 7}, tokenFloat, "-4.2"},
747		{Position{1, 11}, tokenEOF, ""},
748	})
749
750	testFlow(t, "foo = 1_000", []token{
751		{Position{1, 1}, tokenKey, "foo"},
752		{Position{1, 5}, tokenEqual, "="},
753		{Position{1, 7}, tokenInteger, "1_000"},
754		{Position{1, 12}, tokenEOF, ""},
755	})
756
757	testFlow(t, "foo = 5_349_221", []token{
758		{Position{1, 1}, tokenKey, "foo"},
759		{Position{1, 5}, tokenEqual, "="},
760		{Position{1, 7}, tokenInteger, "5_349_221"},
761		{Position{1, 16}, tokenEOF, ""},
762	})
763
764	testFlow(t, "foo = 1_2_3_4_5", []token{
765		{Position{1, 1}, tokenKey, "foo"},
766		{Position{1, 5}, tokenEqual, "="},
767		{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
768		{Position{1, 16}, tokenEOF, ""},
769	})
770
771	testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
772		{Position{1, 1}, tokenKey, "flt8"},
773		{Position{1, 6}, tokenEqual, "="},
774		{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
775		{Position{1, 33}, tokenEOF, ""},
776	})
777
778	testFlow(t, "foo = +", []token{
779		{Position{1, 1}, tokenKey, "foo"},
780		{Position{1, 5}, tokenEqual, "="},
781		{Position{1, 7}, tokenError, "no digit in that number"},
782	})
783}
784
785func TestMultiline(t *testing.T) {
786	testFlow(t, "foo = 42\nbar=21", []token{
787		{Position{1, 1}, tokenKey, "foo"},
788		{Position{1, 5}, tokenEqual, "="},
789		{Position{1, 7}, tokenInteger, "42"},
790		{Position{2, 1}, tokenKey, "bar"},
791		{Position{2, 4}, tokenEqual, "="},
792		{Position{2, 5}, tokenInteger, "21"},
793		{Position{2, 7}, tokenEOF, ""},
794	})
795}
796
797func TestKeyEqualStringUnicodeEscape(t *testing.T) {
798	testFlow(t, `foo = "hello \u2665"`, []token{
799		{Position{1, 1}, tokenKey, "foo"},
800		{Position{1, 5}, tokenEqual, "="},
801		{Position{1, 8}, tokenString, "hello"},
802		{Position{1, 21}, tokenEOF, ""},
803	})
804	testFlow(t, `foo = "hello \U000003B4"`, []token{
805		{Position{1, 1}, tokenKey, "foo"},
806		{Position{1, 5}, tokenEqual, "="},
807		{Position{1, 8}, tokenString, "hello δ"},
808		{Position{1, 25}, tokenEOF, ""},
809	})
810	testFlow(t, `foo = "\uabcd"`, []token{
811		{Position{1, 1}, tokenKey, "foo"},
812		{Position{1, 5}, tokenEqual, "="},
813		{Position{1, 8}, tokenString, "\uabcd"},
814		{Position{1, 15}, tokenEOF, ""},
815	})
816	testFlow(t, `foo = "\uABCD"`, []token{
817		{Position{1, 1}, tokenKey, "foo"},
818		{Position{1, 5}, tokenEqual, "="},
819		{Position{1, 8}, tokenString, "\uABCD"},
820		{Position{1, 15}, tokenEOF, ""},
821	})
822	testFlow(t, `foo = "\U000bcdef"`, []token{
823		{Position{1, 1}, tokenKey, "foo"},
824		{Position{1, 5}, tokenEqual, "="},
825		{Position{1, 8}, tokenString, "\U000bcdef"},
826		{Position{1, 19}, tokenEOF, ""},
827	})
828	testFlow(t, `foo = "\U000BCDEF"`, []token{
829		{Position{1, 1}, tokenKey, "foo"},
830		{Position{1, 5}, tokenEqual, "="},
831		{Position{1, 8}, tokenString, "\U000BCDEF"},
832		{Position{1, 19}, tokenEOF, ""},
833	})
834	testFlow(t, `foo = "\u2"`, []token{
835		{Position{1, 1}, tokenKey, "foo"},
836		{Position{1, 5}, tokenEqual, "="},
837		{Position{1, 8}, tokenError, "unfinished unicode escape"},
838	})
839	testFlow(t, `foo = "\U2"`, []token{
840		{Position{1, 1}, tokenKey, "foo"},
841		{Position{1, 5}, tokenEqual, "="},
842		{Position{1, 8}, tokenError, "unfinished unicode escape"},
843	})
844}
845
846func TestKeyEqualStringNoEscape(t *testing.T) {
847	testFlow(t, "foo = \"hello \u0002\"", []token{
848		{Position{1, 1}, tokenKey, "foo"},
849		{Position{1, 5}, tokenEqual, "="},
850		{Position{1, 8}, tokenError, "unescaped control character U+0002"},
851	})
852	testFlow(t, "foo = \"hello \u001F\"", []token{
853		{Position{1, 1}, tokenKey, "foo"},
854		{Position{1, 5}, tokenEqual, "="},
855		{Position{1, 8}, tokenError, "unescaped control character U+001F"},
856	})
857}
858
859func TestLiteralString(t *testing.T) {
860	testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
861		{Position{1, 1}, tokenKey, "foo"},
862		{Position{1, 5}, tokenEqual, "="},
863		{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
864		{Position{1, 34}, tokenEOF, ""},
865	})
866	testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
867		{Position{1, 1}, tokenKey, "foo"},
868		{Position{1, 5}, tokenEqual, "="},
869		{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
870		{Position{1, 35}, tokenEOF, ""},
871	})
872	testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
873		{Position{1, 1}, tokenKey, "foo"},
874		{Position{1, 5}, tokenEqual, "="},
875		{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
876		{Position{1, 34}, tokenEOF, ""},
877	})
878	testFlow(t, `foo = '<\i\c*\s*>'`, []token{
879		{Position{1, 1}, tokenKey, "foo"},
880		{Position{1, 5}, tokenEqual, "="},
881		{Position{1, 8}, tokenString, `<\i\c*\s*>`},
882		{Position{1, 19}, tokenEOF, ""},
883	})
884	testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
885		{Position{1, 1}, tokenKey, "foo"},
886		{Position{1, 5}, tokenEqual, "="},
887		{Position{1, 8}, tokenError, "unclosed string"},
888	})
889}
890
891func TestMultilineLiteralString(t *testing.T) {
892	testFlow(t, `foo = '''hello 'literal' world'''`, []token{
893		{Position{1, 1}, tokenKey, "foo"},
894		{Position{1, 5}, tokenEqual, "="},
895		{Position{1, 10}, tokenString, `hello 'literal' world`},
896		{Position{1, 34}, tokenEOF, ""},
897	})
898
899	testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
900		{Position{1, 1}, tokenKey, "foo"},
901		{Position{1, 5}, tokenEqual, "="},
902		{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
903		{Position{4, 9}, tokenEOF, ""},
904	})
905	testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
906		{Position{1, 1}, tokenKey, "foo"},
907		{Position{1, 5}, tokenEqual, "="},
908		{Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
909		{Position{4, 9}, tokenEOF, ""},
910	})
911}
912
913func TestMultilineString(t *testing.T) {
914	testFlow(t, `foo = """hello "literal" world"""`, []token{
915		{Position{1, 1}, tokenKey, "foo"},
916		{Position{1, 5}, tokenEqual, "="},
917		{Position{1, 10}, tokenString, `hello "literal" world`},
918		{Position{1, 34}, tokenEOF, ""},
919	})
920
921	testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
922		{Position{1, 1}, tokenKey, "foo"},
923		{Position{1, 5}, tokenEqual, "="},
924		{Position{2, 1}, tokenString, "hello\"literal\"world"},
925		{Position{4, 9}, tokenEOF, ""},
926	})
927
928	testFlow(t, "foo = \"\"\"\\\n    \\\n    \\\n    hello\\\nmultiline\\\nworld\"\"\"", []token{
929		{Position{1, 1}, tokenKey, "foo"},
930		{Position{1, 5}, tokenEqual, "="},
931		{Position{1, 10}, tokenString, "hellomultilineworld"},
932		{Position{6, 9}, tokenEOF, ""},
933	})
934
935	testFlow(t, `foo = """hello	world"""`, []token{
936		{Position{1, 1}, tokenKey, "foo"},
937		{Position{1, 5}, tokenEqual, "="},
938		{Position{1, 10}, tokenString, "hello\tworld"},
939		{Position{1, 24}, tokenEOF, ""},
940	})
941
942	testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n  fox jumps over \\\n    the lazy dog.\"\"\"", []token{
943		{Position{1, 1}, tokenKey, "key2"},
944		{Position{1, 6}, tokenEqual, "="},
945		{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
946		{Position{6, 21}, tokenEOF, ""},
947	})
948
949	testFlow(t, "key2 = \"\"\"\\\n       The quick brown \\\n       fox jumps over \\\n       the lazy dog.\\\n       \"\"\"", []token{
950		{Position{1, 1}, tokenKey, "key2"},
951		{Position{1, 6}, tokenEqual, "="},
952		{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
953		{Position{5, 11}, tokenEOF, ""},
954	})
955
956	testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
957		{Position{1, 1}, tokenKey, "key2"},
958		{Position{1, 6}, tokenEqual, "="},
959		{Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
960		{Position{1, 41}, tokenEOF, ""},
961	})
962
963	testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
964		{Position{1, 1}, tokenKey, "key2"},
965		{Position{1, 6}, tokenEqual, "="},
966		{Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
967		{Position{3, 20}, tokenEOF, ""},
968	})
969}
970
971func TestUnicodeString(t *testing.T) {
972	testFlow(t, `foo = "hello ♥ world"`, []token{
973		{Position{1, 1}, tokenKey, "foo"},
974		{Position{1, 5}, tokenEqual, "="},
975		{Position{1, 8}, tokenString, "hello ♥ world"},
976		{Position{1, 22}, tokenEOF, ""},
977	})
978}
979
980func TestEscapeInString(t *testing.T) {
981	testFlow(t, `foo = "\b\f\/"`, []token{
982		{Position{1, 1}, tokenKey, "foo"},
983		{Position{1, 5}, tokenEqual, "="},
984		{Position{1, 8}, tokenString, "\b\f/"},
985		{Position{1, 15}, tokenEOF, ""},
986	})
987}
988
989func TestTabInString(t *testing.T) {
990	testFlow(t, `foo = "hello	world"`, []token{
991		{Position{1, 1}, tokenKey, "foo"},
992		{Position{1, 5}, tokenEqual, "="},
993		{Position{1, 8}, tokenString, "hello\tworld"},
994		{Position{1, 20}, tokenEOF, ""},
995	})
996}
997
998func TestKeyGroupArray(t *testing.T) {
999	testFlow(t, "[[foo]]", []token{
1000		{Position{1, 1}, tokenDoubleLeftBracket, "[["},
1001		{Position{1, 3}, tokenKeyGroupArray, "foo"},
1002		{Position{1, 6}, tokenDoubleRightBracket, "]]"},
1003		{Position{1, 8}, tokenEOF, ""},
1004	})
1005}
1006
1007func TestQuotedKey(t *testing.T) {
1008	testFlow(t, "\"a b\" = 42", []token{
1009		{Position{1, 1}, tokenKey, "\"a b\""},
1010		{Position{1, 7}, tokenEqual, "="},
1011		{Position{1, 9}, tokenInteger, "42"},
1012		{Position{1, 11}, tokenEOF, ""},
1013	})
1014}
1015
1016func TestQuotedKeyTab(t *testing.T) {
1017	testFlow(t, "\"num\tber\" = 123", []token{
1018		{Position{1, 1}, tokenKey, "\"num\tber\""},
1019		{Position{1, 11}, tokenEqual, "="},
1020		{Position{1, 13}, tokenInteger, "123"},
1021		{Position{1, 16}, tokenEOF, ""},
1022	})
1023}
1024
1025func TestKeyNewline(t *testing.T) {
1026	testFlow(t, "a\n= 4", []token{
1027		{Position{1, 1}, tokenError, "keys cannot contain new lines"},
1028	})
1029}
1030
1031func TestInvalidFloat(t *testing.T) {
1032	testFlow(t, "a=7e1_", []token{
1033		{Position{1, 1}, tokenKey, "a"},
1034		{Position{1, 2}, tokenEqual, "="},
1035		{Position{1, 3}, tokenFloat, "7e1_"},
1036		{Position{1, 7}, tokenEOF, ""},
1037	})
1038}
1039
1040func TestLexUnknownRvalue(t *testing.T) {
1041	testFlow(t, `a = !b`, []token{
1042		{Position{1, 1}, tokenKey, "a"},
1043		{Position{1, 3}, tokenEqual, "="},
1044		{Position{1, 5}, tokenError, "no value can start with !"},
1045	})
1046
1047	testFlow(t, `a = \b`, []token{
1048		{Position{1, 1}, tokenKey, "a"},
1049		{Position{1, 3}, tokenEqual, "="},
1050		{Position{1, 5}, tokenError, `no value can start with \`},
1051	})
1052}
1053
1054func TestLexInlineTableEmpty(t *testing.T) {
1055	testFlow(t, `foo = {}`, []token{
1056		{Position{1, 1}, tokenKey, "foo"},
1057		{Position{1, 5}, tokenEqual, "="},
1058		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1059		{Position{1, 8}, tokenRightCurlyBrace, "}"},
1060		{Position{1, 9}, tokenEOF, ""},
1061	})
1062}
1063
1064func TestLexInlineTableBareKey(t *testing.T) {
1065	testFlow(t, `foo = { bar = "baz" }`, []token{
1066		{Position{1, 1}, tokenKey, "foo"},
1067		{Position{1, 5}, tokenEqual, "="},
1068		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1069		{Position{1, 9}, tokenKey, "bar"},
1070		{Position{1, 13}, tokenEqual, "="},
1071		{Position{1, 16}, tokenString, "baz"},
1072		{Position{1, 21}, tokenRightCurlyBrace, "}"},
1073		{Position{1, 22}, tokenEOF, ""},
1074	})
1075}
1076
1077func TestLexInlineTableBareKeyDash(t *testing.T) {
1078	testFlow(t, `foo = { -bar = "baz" }`, []token{
1079		{Position{1, 1}, tokenKey, "foo"},
1080		{Position{1, 5}, tokenEqual, "="},
1081		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1082		{Position{1, 9}, tokenKey, "-bar"},
1083		{Position{1, 14}, tokenEqual, "="},
1084		{Position{1, 17}, tokenString, "baz"},
1085		{Position{1, 22}, tokenRightCurlyBrace, "}"},
1086		{Position{1, 23}, tokenEOF, ""},
1087	})
1088}
1089
1090func TestLexInlineTableBareKeyInArray(t *testing.T) {
1091	testFlow(t, `foo = [{ -bar_ = "baz" }]`, []token{
1092		{Position{1, 1}, tokenKey, "foo"},
1093		{Position{1, 5}, tokenEqual, "="},
1094		{Position{1, 7}, tokenLeftBracket, "["},
1095		{Position{1, 8}, tokenLeftCurlyBrace, "{"},
1096		{Position{1, 10}, tokenKey, "-bar_"},
1097		{Position{1, 16}, tokenEqual, "="},
1098		{Position{1, 19}, tokenString, "baz"},
1099		{Position{1, 24}, tokenRightCurlyBrace, "}"},
1100		{Position{1, 25}, tokenRightBracket, "]"},
1101		{Position{1, 26}, tokenEOF, ""},
1102	})
1103}
1104
1105func TestLexInlineTableError1(t *testing.T) {
1106	testFlow(t, `foo = { 123 = 0 ]`, []token{
1107		{Position{1, 1}, tokenKey, "foo"},
1108		{Position{1, 5}, tokenEqual, "="},
1109		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1110		{Position{1, 9}, tokenKey, "123"},
1111		{Position{1, 13}, tokenEqual, "="},
1112		{Position{1, 15}, tokenInteger, "0"},
1113		{Position{1, 17}, tokenRightBracket, "]"},
1114		{Position{1, 18}, tokenError, "cannot have ']' here"},
1115	})
1116}
1117
1118func TestLexInlineTableError2(t *testing.T) {
1119	testFlow(t, `foo = { 123 = 0 }}`, []token{
1120		{Position{1, 1}, tokenKey, "foo"},
1121		{Position{1, 5}, tokenEqual, "="},
1122		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1123		{Position{1, 9}, tokenKey, "123"},
1124		{Position{1, 13}, tokenEqual, "="},
1125		{Position{1, 15}, tokenInteger, "0"},
1126		{Position{1, 17}, tokenRightCurlyBrace, "}"},
1127		{Position{1, 18}, tokenRightCurlyBrace, "}"},
1128		{Position{1, 19}, tokenError, "cannot have '}' here"},
1129	})
1130}
1131
1132func TestLexInlineTableDottedKey1(t *testing.T) {
1133	testFlow(t, `foo = { a = 0, 123.45abc = 0 }`, []token{
1134		{Position{1, 1}, tokenKey, "foo"},
1135		{Position{1, 5}, tokenEqual, "="},
1136		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1137		{Position{1, 9}, tokenKey, "a"},
1138		{Position{1, 11}, tokenEqual, "="},
1139		{Position{1, 13}, tokenInteger, "0"},
1140		{Position{1, 14}, tokenComma, ","},
1141		{Position{1, 16}, tokenKey, "123.45abc"},
1142		{Position{1, 26}, tokenEqual, "="},
1143		{Position{1, 28}, tokenInteger, "0"},
1144		{Position{1, 30}, tokenRightCurlyBrace, "}"},
1145		{Position{1, 31}, tokenEOF, ""},
1146	})
1147}
1148
1149func TestLexInlineTableDottedKey2(t *testing.T) {
1150	testFlow(t, `foo = { a = 0, '123'.'45abc' = 0 }`, []token{
1151		{Position{1, 1}, tokenKey, "foo"},
1152		{Position{1, 5}, tokenEqual, "="},
1153		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1154		{Position{1, 9}, tokenKey, "a"},
1155		{Position{1, 11}, tokenEqual, "="},
1156		{Position{1, 13}, tokenInteger, "0"},
1157		{Position{1, 14}, tokenComma, ","},
1158		{Position{1, 16}, tokenKey, "'123'.'45abc'"},
1159		{Position{1, 30}, tokenEqual, "="},
1160		{Position{1, 32}, tokenInteger, "0"},
1161		{Position{1, 34}, tokenRightCurlyBrace, "}"},
1162		{Position{1, 35}, tokenEOF, ""},
1163	})
1164}
1165
1166func TestLexInlineTableDottedKey3(t *testing.T) {
1167	testFlow(t, `foo = { a = 0, "123"."45ʎǝʞ" = 0 }`, []token{
1168		{Position{1, 1}, tokenKey, "foo"},
1169		{Position{1, 5}, tokenEqual, "="},
1170		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1171		{Position{1, 9}, tokenKey, "a"},
1172		{Position{1, 11}, tokenEqual, "="},
1173		{Position{1, 13}, tokenInteger, "0"},
1174		{Position{1, 14}, tokenComma, ","},
1175		{Position{1, 16}, tokenKey, `"123"."45ʎǝʞ"`},
1176		{Position{1, 30}, tokenEqual, "="},
1177		{Position{1, 32}, tokenInteger, "0"},
1178		{Position{1, 34}, tokenRightCurlyBrace, "}"},
1179		{Position{1, 35}, tokenEOF, ""},
1180	})
1181}
1182
1183func TestLexInlineTableBareKeyWithComma(t *testing.T) {
1184	testFlow(t, `foo = { -bar1 = "baz", -bar_ = "baz" }`, []token{
1185		{Position{1, 1}, tokenKey, "foo"},
1186		{Position{1, 5}, tokenEqual, "="},
1187		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1188		{Position{1, 9}, tokenKey, "-bar1"},
1189		{Position{1, 15}, tokenEqual, "="},
1190		{Position{1, 18}, tokenString, "baz"},
1191		{Position{1, 22}, tokenComma, ","},
1192		{Position{1, 24}, tokenKey, "-bar_"},
1193		{Position{1, 30}, tokenEqual, "="},
1194		{Position{1, 33}, tokenString, "baz"},
1195		{Position{1, 38}, tokenRightCurlyBrace, "}"},
1196		{Position{1, 39}, tokenEOF, ""},
1197	})
1198}
1199
1200func TestLexInlineTableBareKeyUnderscore(t *testing.T) {
1201	testFlow(t, `foo = { _bar = "baz" }`, []token{
1202		{Position{1, 1}, tokenKey, "foo"},
1203		{Position{1, 5}, tokenEqual, "="},
1204		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1205		{Position{1, 9}, tokenKey, "_bar"},
1206		{Position{1, 14}, tokenEqual, "="},
1207		{Position{1, 17}, tokenString, "baz"},
1208		{Position{1, 22}, tokenRightCurlyBrace, "}"},
1209		{Position{1, 23}, tokenEOF, ""},
1210	})
1211}
1212
1213func TestLexInlineTableQuotedKey(t *testing.T) {
1214	testFlow(t, `foo = { "bar" = "baz" }`, []token{
1215		{Position{1, 1}, tokenKey, "foo"},
1216		{Position{1, 5}, tokenEqual, "="},
1217		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
1218		{Position{1, 9}, tokenKey, "\"bar\""},
1219		{Position{1, 15}, tokenEqual, "="},
1220		{Position{1, 18}, tokenString, "baz"},
1221		{Position{1, 23}, tokenRightCurlyBrace, "}"},
1222		{Position{1, 24}, tokenEOF, ""},
1223	})
1224}
1225
1226func BenchmarkLexer(b *testing.B) {
1227	sample := `title = "Hugo: A Fast and Flexible Website Generator"
1228baseurl = "http://gohugo.io/"
1229MetaDataFormat = "yaml"
1230pluralizeListTitles = false
1231
1232[params]
1233  description = "Documentation of Hugo, a fast and flexible static site generator built with love by spf13, bep and friends in Go"
1234  author = "Steve Francia (spf13) and friends"
1235  release = "0.22-DEV"
1236
1237[[menu.main]]
1238	name = "Download Hugo"
1239	pre = "<i class='fa fa-download'></i>"
1240	url = "https://github.com/spf13/hugo/releases"
1241	weight = -200
1242`
1243	b.ResetTimer()
1244	for i := 0; i < b.N; i++ {
1245		lexToml([]byte(sample))
1246	}
1247}
1248