1package toml
2
3import (
4	"reflect"
5	"testing"
6)
7
8func testFlow(t *testing.T, input string, expectedFlow []token) {
9	tokens := lexToml([]byte(input))
10	if !reflect.DeepEqual(tokens, expectedFlow) {
11		t.Fatalf("Different flows.\nExpected:\n%v\nGot:\n%v", expectedFlow, tokens)
12	}
13}
14
15func TestValidKeyGroup(t *testing.T) {
16	testFlow(t, "[hello world]", []token{
17		{Position{1, 1}, tokenLeftBracket, "["},
18		{Position{1, 2}, tokenKeyGroup, "hello world"},
19		{Position{1, 13}, tokenRightBracket, "]"},
20		{Position{1, 14}, tokenEOF, ""},
21	})
22}
23
24func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
25	testFlow(t, `[ j . "ʞ" . l . 'ɯ' ]`, []token{
26		{Position{1, 1}, tokenLeftBracket, "["},
27		{Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l . 'ɯ' `},
28		{Position{1, 21}, tokenRightBracket, "]"},
29		{Position{1, 22}, tokenEOF, ""},
30	})
31}
32
33func TestNestedQuotedUnicodeKeyAssign(t *testing.T) {
34	testFlow(t, ` j . "ʞ" . l . 'ɯ' = 3`, []token{
35		{Position{1, 2}, tokenKey, `j . "ʞ" . l . 'ɯ'`},
36		{Position{1, 20}, tokenEqual, "="},
37		{Position{1, 22}, tokenInteger, "3"},
38		{Position{1, 23}, tokenEOF, ""},
39	})
40}
41
42func TestUnclosedKeyGroup(t *testing.T) {
43	testFlow(t, "[hello world", []token{
44		{Position{1, 1}, tokenLeftBracket, "["},
45		{Position{1, 2}, tokenError, "unclosed table key"},
46	})
47}
48
49func TestComment(t *testing.T) {
50	testFlow(t, "# blahblah", []token{
51		{Position{1, 11}, tokenEOF, ""},
52	})
53}
54
55func TestKeyGroupComment(t *testing.T) {
56	testFlow(t, "[hello world] # blahblah", []token{
57		{Position{1, 1}, tokenLeftBracket, "["},
58		{Position{1, 2}, tokenKeyGroup, "hello world"},
59		{Position{1, 13}, tokenRightBracket, "]"},
60		{Position{1, 25}, tokenEOF, ""},
61	})
62}
63
64func TestMultipleKeyGroupsComment(t *testing.T) {
65	testFlow(t, "[hello world] # blahblah\n[test]", []token{
66		{Position{1, 1}, tokenLeftBracket, "["},
67		{Position{1, 2}, tokenKeyGroup, "hello world"},
68		{Position{1, 13}, tokenRightBracket, "]"},
69		{Position{2, 1}, tokenLeftBracket, "["},
70		{Position{2, 2}, tokenKeyGroup, "test"},
71		{Position{2, 6}, tokenRightBracket, "]"},
72		{Position{2, 7}, tokenEOF, ""},
73	})
74}
75
76func TestSimpleWindowsCRLF(t *testing.T) {
77	testFlow(t, "a=4\r\nb=2", []token{
78		{Position{1, 1}, tokenKey, "a"},
79		{Position{1, 2}, tokenEqual, "="},
80		{Position{1, 3}, tokenInteger, "4"},
81		{Position{2, 1}, tokenKey, "b"},
82		{Position{2, 2}, tokenEqual, "="},
83		{Position{2, 3}, tokenInteger, "2"},
84		{Position{2, 4}, tokenEOF, ""},
85	})
86}
87
88func TestBasicKey(t *testing.T) {
89	testFlow(t, "hello", []token{
90		{Position{1, 1}, tokenKey, "hello"},
91		{Position{1, 6}, tokenEOF, ""},
92	})
93}
94
95func TestBasicKeyWithUnderscore(t *testing.T) {
96	testFlow(t, "hello_hello", []token{
97		{Position{1, 1}, tokenKey, "hello_hello"},
98		{Position{1, 12}, tokenEOF, ""},
99	})
100}
101
102func TestBasicKeyWithDash(t *testing.T) {
103	testFlow(t, "hello-world", []token{
104		{Position{1, 1}, tokenKey, "hello-world"},
105		{Position{1, 12}, tokenEOF, ""},
106	})
107}
108
109func TestBasicKeyWithUppercaseMix(t *testing.T) {
110	testFlow(t, "helloHELLOHello", []token{
111		{Position{1, 1}, tokenKey, "helloHELLOHello"},
112		{Position{1, 16}, tokenEOF, ""},
113	})
114}
115
116func TestBasicKeyWithInternationalCharacters(t *testing.T) {
117	testFlow(t, "'héllÖ'", []token{
118		{Position{1, 1}, tokenKey, "'héllÖ'"},
119		{Position{1, 8}, tokenEOF, ""},
120	})
121}
122
123func TestBasicKeyAndEqual(t *testing.T) {
124	testFlow(t, "hello =", []token{
125		{Position{1, 1}, tokenKey, "hello"},
126		{Position{1, 7}, tokenEqual, "="},
127		{Position{1, 8}, tokenEOF, ""},
128	})
129}
130
131func TestKeyWithSharpAndEqual(t *testing.T) {
132	testFlow(t, "key#name = 5", []token{
133		{Position{1, 1}, tokenError, "keys cannot contain # character"},
134	})
135}
136
137func TestKeyWithSymbolsAndEqual(t *testing.T) {
138	testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
139		{Position{1, 1}, tokenError, "keys cannot contain ~ character"},
140	})
141}
142
143func TestKeyEqualStringEscape(t *testing.T) {
144	testFlow(t, `foo = "hello\""`, []token{
145		{Position{1, 1}, tokenKey, "foo"},
146		{Position{1, 5}, tokenEqual, "="},
147		{Position{1, 8}, tokenString, "hello\""},
148		{Position{1, 16}, tokenEOF, ""},
149	})
150}
151
152func TestKeyEqualStringUnfinished(t *testing.T) {
153	testFlow(t, `foo = "bar`, []token{
154		{Position{1, 1}, tokenKey, "foo"},
155		{Position{1, 5}, tokenEqual, "="},
156		{Position{1, 8}, tokenError, "unclosed string"},
157	})
158}
159
160func TestKeyEqualString(t *testing.T) {
161	testFlow(t, `foo = "bar"`, []token{
162		{Position{1, 1}, tokenKey, "foo"},
163		{Position{1, 5}, tokenEqual, "="},
164		{Position{1, 8}, tokenString, "bar"},
165		{Position{1, 12}, tokenEOF, ""},
166	})
167}
168
169func TestKeyEqualTrue(t *testing.T) {
170	testFlow(t, "foo = true", []token{
171		{Position{1, 1}, tokenKey, "foo"},
172		{Position{1, 5}, tokenEqual, "="},
173		{Position{1, 7}, tokenTrue, "true"},
174		{Position{1, 11}, tokenEOF, ""},
175	})
176}
177
178func TestKeyEqualFalse(t *testing.T) {
179	testFlow(t, "foo = false", []token{
180		{Position{1, 1}, tokenKey, "foo"},
181		{Position{1, 5}, tokenEqual, "="},
182		{Position{1, 7}, tokenFalse, "false"},
183		{Position{1, 12}, tokenEOF, ""},
184	})
185}
186
187func TestArrayNestedString(t *testing.T) {
188	testFlow(t, `a = [ ["hello", "world"] ]`, []token{
189		{Position{1, 1}, tokenKey, "a"},
190		{Position{1, 3}, tokenEqual, "="},
191		{Position{1, 5}, tokenLeftBracket, "["},
192		{Position{1, 7}, tokenLeftBracket, "["},
193		{Position{1, 9}, tokenString, "hello"},
194		{Position{1, 15}, tokenComma, ","},
195		{Position{1, 18}, tokenString, "world"},
196		{Position{1, 24}, tokenRightBracket, "]"},
197		{Position{1, 26}, tokenRightBracket, "]"},
198		{Position{1, 27}, tokenEOF, ""},
199	})
200}
201
202func TestArrayNestedInts(t *testing.T) {
203	testFlow(t, "a = [ [42, 21], [10] ]", []token{
204		{Position{1, 1}, tokenKey, "a"},
205		{Position{1, 3}, tokenEqual, "="},
206		{Position{1, 5}, tokenLeftBracket, "["},
207		{Position{1, 7}, tokenLeftBracket, "["},
208		{Position{1, 8}, tokenInteger, "42"},
209		{Position{1, 10}, tokenComma, ","},
210		{Position{1, 12}, tokenInteger, "21"},
211		{Position{1, 14}, tokenRightBracket, "]"},
212		{Position{1, 15}, tokenComma, ","},
213		{Position{1, 17}, tokenLeftBracket, "["},
214		{Position{1, 18}, tokenInteger, "10"},
215		{Position{1, 20}, tokenRightBracket, "]"},
216		{Position{1, 22}, tokenRightBracket, "]"},
217		{Position{1, 23}, tokenEOF, ""},
218	})
219}
220
221func TestArrayInts(t *testing.T) {
222	testFlow(t, "a = [ 42, 21, 10, ]", []token{
223		{Position{1, 1}, tokenKey, "a"},
224		{Position{1, 3}, tokenEqual, "="},
225		{Position{1, 5}, tokenLeftBracket, "["},
226		{Position{1, 7}, tokenInteger, "42"},
227		{Position{1, 9}, tokenComma, ","},
228		{Position{1, 11}, tokenInteger, "21"},
229		{Position{1, 13}, tokenComma, ","},
230		{Position{1, 15}, tokenInteger, "10"},
231		{Position{1, 17}, tokenComma, ","},
232		{Position{1, 19}, tokenRightBracket, "]"},
233		{Position{1, 20}, tokenEOF, ""},
234	})
235}
236
237func TestMultilineArrayComments(t *testing.T) {
238	testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
239		{Position{1, 1}, tokenKey, "a"},
240		{Position{1, 3}, tokenEqual, "="},
241		{Position{1, 5}, tokenLeftBracket, "["},
242		{Position{1, 6}, tokenInteger, "1"},
243		{Position{1, 7}, tokenComma, ","},
244		{Position{2, 1}, tokenInteger, "2"},
245		{Position{2, 2}, tokenComma, ","},
246		{Position{3, 1}, tokenInteger, "3"},
247		{Position{3, 2}, tokenComma, ","},
248		{Position{4, 1}, tokenRightBracket, "]"},
249		{Position{4, 2}, tokenEOF, ""},
250	})
251}
252
253func TestNestedArraysComment(t *testing.T) {
254	toml := `
255someArray = [
256# does not work
257["entry1"]
258]`
259	testFlow(t, toml, []token{
260		{Position{2, 1}, tokenKey, "someArray"},
261		{Position{2, 11}, tokenEqual, "="},
262		{Position{2, 13}, tokenLeftBracket, "["},
263		{Position{4, 1}, tokenLeftBracket, "["},
264		{Position{4, 3}, tokenString, "entry1"},
265		{Position{4, 10}, tokenRightBracket, "]"},
266		{Position{5, 1}, tokenRightBracket, "]"},
267		{Position{5, 2}, tokenEOF, ""},
268	})
269}
270
271func TestKeyEqualArrayBools(t *testing.T) {
272	testFlow(t, "foo = [true, false, true]", []token{
273		{Position{1, 1}, tokenKey, "foo"},
274		{Position{1, 5}, tokenEqual, "="},
275		{Position{1, 7}, tokenLeftBracket, "["},
276		{Position{1, 8}, tokenTrue, "true"},
277		{Position{1, 12}, tokenComma, ","},
278		{Position{1, 14}, tokenFalse, "false"},
279		{Position{1, 19}, tokenComma, ","},
280		{Position{1, 21}, tokenTrue, "true"},
281		{Position{1, 25}, tokenRightBracket, "]"},
282		{Position{1, 26}, tokenEOF, ""},
283	})
284}
285
286func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
287	testFlow(t, "foo = [true, false, true] # YEAH", []token{
288		{Position{1, 1}, tokenKey, "foo"},
289		{Position{1, 5}, tokenEqual, "="},
290		{Position{1, 7}, tokenLeftBracket, "["},
291		{Position{1, 8}, tokenTrue, "true"},
292		{Position{1, 12}, tokenComma, ","},
293		{Position{1, 14}, tokenFalse, "false"},
294		{Position{1, 19}, tokenComma, ","},
295		{Position{1, 21}, tokenTrue, "true"},
296		{Position{1, 25}, tokenRightBracket, "]"},
297		{Position{1, 33}, tokenEOF, ""},
298	})
299}
300
301func TestDateRegexp(t *testing.T) {
302	cases := map[string]string{
303		"basic":               "1979-05-27T07:32:00Z",
304		"offset":              "1979-05-27T00:32:00-07:00",
305		"nano precision":      "1979-05-27T00:32:00.999999-07:00",
306		"basic-no-T":          "1979-05-27 07:32:00Z",
307		"offset-no-T":         "1979-05-27 00:32:00-07:00",
308		"nano precision-no-T": "1979-05-27 00:32:00.999999-07:00",
309		"no-tz":               "1979-05-27T07:32:00",
310		"no-tz-nano":          "1979-05-27T00:32:00.999999",
311		"no-tz-no-t":          "1979-05-27 07:32:00",
312		"no-tz-no-t-nano":     "1979-05-27 00:32:00.999999",
313		"date-no-tz":          "1979-05-27",
314		"time-no-tz":          "07:32:00",
315		"time-no-tz-nano":     "00:32:00.999999",
316	}
317
318	for name, value := range cases {
319		if dateRegexp.FindString(value) == "" {
320			t.Error("failed date regexp test", name)
321		}
322	}
323	if dateRegexp.FindString("1979-05-27 07:32:00Z") == "" {
324		t.Error("space delimiter lexing")
325	}
326}
327
328func TestKeyEqualDate(t *testing.T) {
329	testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
330		{Position{1, 1}, tokenKey, "foo"},
331		{Position{1, 5}, tokenEqual, "="},
332		{Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
333		{Position{1, 27}, tokenEOF, ""},
334	})
335	testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
336		{Position{1, 1}, tokenKey, "foo"},
337		{Position{1, 5}, tokenEqual, "="},
338		{Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
339		{Position{1, 32}, tokenEOF, ""},
340	})
341	testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
342		{Position{1, 1}, tokenKey, "foo"},
343		{Position{1, 5}, tokenEqual, "="},
344		{Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
345		{Position{1, 39}, tokenEOF, ""},
346	})
347	testFlow(t, "foo = 1979-05-27 07:32:00Z", []token{
348		{Position{1, 1}, tokenKey, "foo"},
349		{Position{1, 5}, tokenEqual, "="},
350		{Position{1, 7}, tokenDate, "1979-05-27 07:32:00Z"},
351		{Position{1, 27}, tokenEOF, ""},
352	})
353}
354
355func TestFloatEndingWithDot(t *testing.T) {
356	testFlow(t, "foo = 42.", []token{
357		{Position{1, 1}, tokenKey, "foo"},
358		{Position{1, 5}, tokenEqual, "="},
359		{Position{1, 7}, tokenError, "float cannot end with a dot"},
360	})
361}
362
363func TestFloatWithTwoDots(t *testing.T) {
364	testFlow(t, "foo = 4.2.", []token{
365		{Position{1, 1}, tokenKey, "foo"},
366		{Position{1, 5}, tokenEqual, "="},
367		{Position{1, 7}, tokenError, "cannot have two dots in one float"},
368	})
369}
370
371func TestFloatWithExponent1(t *testing.T) {
372	testFlow(t, "a = 5e+22", []token{
373		{Position{1, 1}, tokenKey, "a"},
374		{Position{1, 3}, tokenEqual, "="},
375		{Position{1, 5}, tokenFloat, "5e+22"},
376		{Position{1, 10}, tokenEOF, ""},
377	})
378}
379
380func TestFloatWithExponent2(t *testing.T) {
381	testFlow(t, "a = 5E+22", []token{
382		{Position{1, 1}, tokenKey, "a"},
383		{Position{1, 3}, tokenEqual, "="},
384		{Position{1, 5}, tokenFloat, "5E+22"},
385		{Position{1, 10}, tokenEOF, ""},
386	})
387}
388
389func TestFloatWithExponent3(t *testing.T) {
390	testFlow(t, "a = -5e+22", []token{
391		{Position{1, 1}, tokenKey, "a"},
392		{Position{1, 3}, tokenEqual, "="},
393		{Position{1, 5}, tokenFloat, "-5e+22"},
394		{Position{1, 11}, tokenEOF, ""},
395	})
396}
397
398func TestFloatWithExponent4(t *testing.T) {
399	testFlow(t, "a = -5e-22", []token{
400		{Position{1, 1}, tokenKey, "a"},
401		{Position{1, 3}, tokenEqual, "="},
402		{Position{1, 5}, tokenFloat, "-5e-22"},
403		{Position{1, 11}, tokenEOF, ""},
404	})
405}
406
407func TestFloatWithExponent5(t *testing.T) {
408	testFlow(t, "a = 6.626e-34", []token{
409		{Position{1, 1}, tokenKey, "a"},
410		{Position{1, 3}, tokenEqual, "="},
411		{Position{1, 5}, tokenFloat, "6.626e-34"},
412		{Position{1, 14}, tokenEOF, ""},
413	})
414}
415
416func TestInvalidEsquapeSequence(t *testing.T) {
417	testFlow(t, `foo = "\x"`, []token{
418		{Position{1, 1}, tokenKey, "foo"},
419		{Position{1, 5}, tokenEqual, "="},
420		{Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
421	})
422}
423
424func TestNestedArrays(t *testing.T) {
425	testFlow(t, "foo = [[[]]]", []token{
426		{Position{1, 1}, tokenKey, "foo"},
427		{Position{1, 5}, tokenEqual, "="},
428		{Position{1, 7}, tokenLeftBracket, "["},
429		{Position{1, 8}, tokenLeftBracket, "["},
430		{Position{1, 9}, tokenLeftBracket, "["},
431		{Position{1, 10}, tokenRightBracket, "]"},
432		{Position{1, 11}, tokenRightBracket, "]"},
433		{Position{1, 12}, tokenRightBracket, "]"},
434		{Position{1, 13}, tokenEOF, ""},
435	})
436}
437
438func TestKeyEqualNumber(t *testing.T) {
439	testFlow(t, "foo = 42", []token{
440		{Position{1, 1}, tokenKey, "foo"},
441		{Position{1, 5}, tokenEqual, "="},
442		{Position{1, 7}, tokenInteger, "42"},
443		{Position{1, 9}, tokenEOF, ""},
444	})
445
446	testFlow(t, "foo = +42", []token{
447		{Position{1, 1}, tokenKey, "foo"},
448		{Position{1, 5}, tokenEqual, "="},
449		{Position{1, 7}, tokenInteger, "+42"},
450		{Position{1, 10}, tokenEOF, ""},
451	})
452
453	testFlow(t, "foo = -42", []token{
454		{Position{1, 1}, tokenKey, "foo"},
455		{Position{1, 5}, tokenEqual, "="},
456		{Position{1, 7}, tokenInteger, "-42"},
457		{Position{1, 10}, tokenEOF, ""},
458	})
459
460	testFlow(t, "foo = 4.2", []token{
461		{Position{1, 1}, tokenKey, "foo"},
462		{Position{1, 5}, tokenEqual, "="},
463		{Position{1, 7}, tokenFloat, "4.2"},
464		{Position{1, 10}, tokenEOF, ""},
465	})
466
467	testFlow(t, "foo = +4.2", []token{
468		{Position{1, 1}, tokenKey, "foo"},
469		{Position{1, 5}, tokenEqual, "="},
470		{Position{1, 7}, tokenFloat, "+4.2"},
471		{Position{1, 11}, tokenEOF, ""},
472	})
473
474	testFlow(t, "foo = -4.2", []token{
475		{Position{1, 1}, tokenKey, "foo"},
476		{Position{1, 5}, tokenEqual, "="},
477		{Position{1, 7}, tokenFloat, "-4.2"},
478		{Position{1, 11}, tokenEOF, ""},
479	})
480
481	testFlow(t, "foo = 1_000", []token{
482		{Position{1, 1}, tokenKey, "foo"},
483		{Position{1, 5}, tokenEqual, "="},
484		{Position{1, 7}, tokenInteger, "1_000"},
485		{Position{1, 12}, tokenEOF, ""},
486	})
487
488	testFlow(t, "foo = 5_349_221", []token{
489		{Position{1, 1}, tokenKey, "foo"},
490		{Position{1, 5}, tokenEqual, "="},
491		{Position{1, 7}, tokenInteger, "5_349_221"},
492		{Position{1, 16}, tokenEOF, ""},
493	})
494
495	testFlow(t, "foo = 1_2_3_4_5", []token{
496		{Position{1, 1}, tokenKey, "foo"},
497		{Position{1, 5}, tokenEqual, "="},
498		{Position{1, 7}, tokenInteger, "1_2_3_4_5"},
499		{Position{1, 16}, tokenEOF, ""},
500	})
501
502	testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
503		{Position{1, 1}, tokenKey, "flt8"},
504		{Position{1, 6}, tokenEqual, "="},
505		{Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
506		{Position{1, 33}, tokenEOF, ""},
507	})
508
509	testFlow(t, "foo = +", []token{
510		{Position{1, 1}, tokenKey, "foo"},
511		{Position{1, 5}, tokenEqual, "="},
512		{Position{1, 7}, tokenError, "no digit in that number"},
513	})
514}
515
516func TestMultiline(t *testing.T) {
517	testFlow(t, "foo = 42\nbar=21", []token{
518		{Position{1, 1}, tokenKey, "foo"},
519		{Position{1, 5}, tokenEqual, "="},
520		{Position{1, 7}, tokenInteger, "42"},
521		{Position{2, 1}, tokenKey, "bar"},
522		{Position{2, 4}, tokenEqual, "="},
523		{Position{2, 5}, tokenInteger, "21"},
524		{Position{2, 7}, tokenEOF, ""},
525	})
526}
527
528func TestKeyEqualStringUnicodeEscape(t *testing.T) {
529	testFlow(t, `foo = "hello \u2665"`, []token{
530		{Position{1, 1}, tokenKey, "foo"},
531		{Position{1, 5}, tokenEqual, "="},
532		{Position{1, 8}, tokenString, "hello"},
533		{Position{1, 21}, tokenEOF, ""},
534	})
535	testFlow(t, `foo = "hello \U000003B4"`, []token{
536		{Position{1, 1}, tokenKey, "foo"},
537		{Position{1, 5}, tokenEqual, "="},
538		{Position{1, 8}, tokenString, "hello δ"},
539		{Position{1, 25}, tokenEOF, ""},
540	})
541	testFlow(t, `foo = "\uabcd"`, []token{
542		{Position{1, 1}, tokenKey, "foo"},
543		{Position{1, 5}, tokenEqual, "="},
544		{Position{1, 8}, tokenString, "\uabcd"},
545		{Position{1, 15}, tokenEOF, ""},
546	})
547	testFlow(t, `foo = "\uABCD"`, []token{
548		{Position{1, 1}, tokenKey, "foo"},
549		{Position{1, 5}, tokenEqual, "="},
550		{Position{1, 8}, tokenString, "\uABCD"},
551		{Position{1, 15}, tokenEOF, ""},
552	})
553	testFlow(t, `foo = "\U000bcdef"`, []token{
554		{Position{1, 1}, tokenKey, "foo"},
555		{Position{1, 5}, tokenEqual, "="},
556		{Position{1, 8}, tokenString, "\U000bcdef"},
557		{Position{1, 19}, tokenEOF, ""},
558	})
559	testFlow(t, `foo = "\U000BCDEF"`, []token{
560		{Position{1, 1}, tokenKey, "foo"},
561		{Position{1, 5}, tokenEqual, "="},
562		{Position{1, 8}, tokenString, "\U000BCDEF"},
563		{Position{1, 19}, tokenEOF, ""},
564	})
565	testFlow(t, `foo = "\u2"`, []token{
566		{Position{1, 1}, tokenKey, "foo"},
567		{Position{1, 5}, tokenEqual, "="},
568		{Position{1, 8}, tokenError, "unfinished unicode escape"},
569	})
570	testFlow(t, `foo = "\U2"`, []token{
571		{Position{1, 1}, tokenKey, "foo"},
572		{Position{1, 5}, tokenEqual, "="},
573		{Position{1, 8}, tokenError, "unfinished unicode escape"},
574	})
575}
576
577func TestKeyEqualStringNoEscape(t *testing.T) {
578	testFlow(t, "foo = \"hello \u0002\"", []token{
579		{Position{1, 1}, tokenKey, "foo"},
580		{Position{1, 5}, tokenEqual, "="},
581		{Position{1, 8}, tokenError, "unescaped control character U+0002"},
582	})
583	testFlow(t, "foo = \"hello \u001F\"", []token{
584		{Position{1, 1}, tokenKey, "foo"},
585		{Position{1, 5}, tokenEqual, "="},
586		{Position{1, 8}, tokenError, "unescaped control character U+001F"},
587	})
588}
589
590func TestLiteralString(t *testing.T) {
591	testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
592		{Position{1, 1}, tokenKey, "foo"},
593		{Position{1, 5}, tokenEqual, "="},
594		{Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
595		{Position{1, 34}, tokenEOF, ""},
596	})
597	testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
598		{Position{1, 1}, tokenKey, "foo"},
599		{Position{1, 5}, tokenEqual, "="},
600		{Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
601		{Position{1, 35}, tokenEOF, ""},
602	})
603	testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
604		{Position{1, 1}, tokenKey, "foo"},
605		{Position{1, 5}, tokenEqual, "="},
606		{Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
607		{Position{1, 34}, tokenEOF, ""},
608	})
609	testFlow(t, `foo = '<\i\c*\s*>'`, []token{
610		{Position{1, 1}, tokenKey, "foo"},
611		{Position{1, 5}, tokenEqual, "="},
612		{Position{1, 8}, tokenString, `<\i\c*\s*>`},
613		{Position{1, 19}, tokenEOF, ""},
614	})
615	testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
616		{Position{1, 1}, tokenKey, "foo"},
617		{Position{1, 5}, tokenEqual, "="},
618		{Position{1, 8}, tokenError, "unclosed string"},
619	})
620}
621
622func TestMultilineLiteralString(t *testing.T) {
623	testFlow(t, `foo = '''hello 'literal' world'''`, []token{
624		{Position{1, 1}, tokenKey, "foo"},
625		{Position{1, 5}, tokenEqual, "="},
626		{Position{1, 10}, tokenString, `hello 'literal' world`},
627		{Position{1, 34}, tokenEOF, ""},
628	})
629
630	testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
631		{Position{1, 1}, tokenKey, "foo"},
632		{Position{1, 5}, tokenEqual, "="},
633		{Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
634		{Position{4, 9}, tokenEOF, ""},
635	})
636	testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
637		{Position{1, 1}, tokenKey, "foo"},
638		{Position{1, 5}, tokenEqual, "="},
639		{Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
640		{Position{4, 9}, tokenEOF, ""},
641	})
642}
643
644func TestMultilineString(t *testing.T) {
645	testFlow(t, `foo = """hello "literal" world"""`, []token{
646		{Position{1, 1}, tokenKey, "foo"},
647		{Position{1, 5}, tokenEqual, "="},
648		{Position{1, 10}, tokenString, `hello "literal" world`},
649		{Position{1, 34}, tokenEOF, ""},
650	})
651
652	testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
653		{Position{1, 1}, tokenKey, "foo"},
654		{Position{1, 5}, tokenEqual, "="},
655		{Position{2, 1}, tokenString, "hello\"literal\"world"},
656		{Position{4, 9}, tokenEOF, ""},
657	})
658
659	testFlow(t, "foo = \"\"\"\\\n    \\\n    \\\n    hello\\\nmultiline\\\nworld\"\"\"", []token{
660		{Position{1, 1}, tokenKey, "foo"},
661		{Position{1, 5}, tokenEqual, "="},
662		{Position{1, 10}, tokenString, "hellomultilineworld"},
663		{Position{6, 9}, tokenEOF, ""},
664	})
665
666	testFlow(t, `foo = """hello	world"""`, []token{
667		{Position{1, 1}, tokenKey, "foo"},
668		{Position{1, 5}, tokenEqual, "="},
669		{Position{1, 10}, tokenString, "hello\tworld"},
670		{Position{1, 24}, tokenEOF, ""},
671	})
672
673	testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n  fox jumps over \\\n    the lazy dog.\"\"\"", []token{
674		{Position{1, 1}, tokenKey, "key2"},
675		{Position{1, 6}, tokenEqual, "="},
676		{Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
677		{Position{6, 21}, tokenEOF, ""},
678	})
679
680	testFlow(t, "key2 = \"\"\"\\\n       The quick brown \\\n       fox jumps over \\\n       the lazy dog.\\\n       \"\"\"", []token{
681		{Position{1, 1}, tokenKey, "key2"},
682		{Position{1, 6}, tokenEqual, "="},
683		{Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
684		{Position{5, 11}, tokenEOF, ""},
685	})
686
687	testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
688		{Position{1, 1}, tokenKey, "key2"},
689		{Position{1, 6}, tokenEqual, "="},
690		{Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
691		{Position{1, 41}, tokenEOF, ""},
692	})
693
694	testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
695		{Position{1, 1}, tokenKey, "key2"},
696		{Position{1, 6}, tokenEqual, "="},
697		{Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
698		{Position{3, 20}, tokenEOF, ""},
699	})
700}
701
702func TestUnicodeString(t *testing.T) {
703	testFlow(t, `foo = "hello ♥ world"`, []token{
704		{Position{1, 1}, tokenKey, "foo"},
705		{Position{1, 5}, tokenEqual, "="},
706		{Position{1, 8}, tokenString, "hello ♥ world"},
707		{Position{1, 22}, tokenEOF, ""},
708	})
709}
710
711func TestEscapeInString(t *testing.T) {
712	testFlow(t, `foo = "\b\f\/"`, []token{
713		{Position{1, 1}, tokenKey, "foo"},
714		{Position{1, 5}, tokenEqual, "="},
715		{Position{1, 8}, tokenString, "\b\f/"},
716		{Position{1, 15}, tokenEOF, ""},
717	})
718}
719
720func TestTabInString(t *testing.T) {
721	testFlow(t, `foo = "hello	world"`, []token{
722		{Position{1, 1}, tokenKey, "foo"},
723		{Position{1, 5}, tokenEqual, "="},
724		{Position{1, 8}, tokenString, "hello\tworld"},
725		{Position{1, 20}, tokenEOF, ""},
726	})
727}
728
729func TestKeyGroupArray(t *testing.T) {
730	testFlow(t, "[[foo]]", []token{
731		{Position{1, 1}, tokenDoubleLeftBracket, "[["},
732		{Position{1, 3}, tokenKeyGroupArray, "foo"},
733		{Position{1, 6}, tokenDoubleRightBracket, "]]"},
734		{Position{1, 8}, tokenEOF, ""},
735	})
736}
737
738func TestQuotedKey(t *testing.T) {
739	testFlow(t, "\"a b\" = 42", []token{
740		{Position{1, 1}, tokenKey, "\"a b\""},
741		{Position{1, 7}, tokenEqual, "="},
742		{Position{1, 9}, tokenInteger, "42"},
743		{Position{1, 11}, tokenEOF, ""},
744	})
745}
746
747func TestQuotedKeyTab(t *testing.T) {
748	testFlow(t, "\"num\tber\" = 123", []token{
749		{Position{1, 1}, tokenKey, "\"num\tber\""},
750		{Position{1, 11}, tokenEqual, "="},
751		{Position{1, 13}, tokenInteger, "123"},
752		{Position{1, 16}, tokenEOF, ""},
753	})
754}
755
756func TestKeyNewline(t *testing.T) {
757	testFlow(t, "a\n= 4", []token{
758		{Position{1, 1}, tokenError, "keys cannot contain new lines"},
759	})
760}
761
762func TestInvalidFloat(t *testing.T) {
763	testFlow(t, "a=7e1_", []token{
764		{Position{1, 1}, tokenKey, "a"},
765		{Position{1, 2}, tokenEqual, "="},
766		{Position{1, 3}, tokenFloat, "7e1_"},
767		{Position{1, 7}, tokenEOF, ""},
768	})
769}
770
771func TestLexUnknownRvalue(t *testing.T) {
772	testFlow(t, `a = !b`, []token{
773		{Position{1, 1}, tokenKey, "a"},
774		{Position{1, 3}, tokenEqual, "="},
775		{Position{1, 5}, tokenError, "no value can start with !"},
776	})
777
778	testFlow(t, `a = \b`, []token{
779		{Position{1, 1}, tokenKey, "a"},
780		{Position{1, 3}, tokenEqual, "="},
781		{Position{1, 5}, tokenError, `no value can start with \`},
782	})
783}
784
785func TestLexInlineTableEmpty(t *testing.T) {
786	testFlow(t, `foo = {}`, []token{
787		{Position{1, 1}, tokenKey, "foo"},
788		{Position{1, 5}, tokenEqual, "="},
789		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
790		{Position{1, 8}, tokenRightCurlyBrace, "}"},
791		{Position{1, 9}, tokenEOF, ""},
792	})
793}
794
795func TestLexInlineTableBareKey(t *testing.T) {
796	testFlow(t, `foo = { bar = "baz" }`, []token{
797		{Position{1, 1}, tokenKey, "foo"},
798		{Position{1, 5}, tokenEqual, "="},
799		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
800		{Position{1, 9}, tokenKey, "bar"},
801		{Position{1, 13}, tokenEqual, "="},
802		{Position{1, 16}, tokenString, "baz"},
803		{Position{1, 21}, tokenRightCurlyBrace, "}"},
804		{Position{1, 22}, tokenEOF, ""},
805	})
806}
807
808func TestLexInlineTableBareKeyDash(t *testing.T) {
809	testFlow(t, `foo = { -bar = "baz" }`, []token{
810		{Position{1, 1}, tokenKey, "foo"},
811		{Position{1, 5}, tokenEqual, "="},
812		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
813		{Position{1, 9}, tokenKey, "-bar"},
814		{Position{1, 14}, tokenEqual, "="},
815		{Position{1, 17}, tokenString, "baz"},
816		{Position{1, 22}, tokenRightCurlyBrace, "}"},
817		{Position{1, 23}, tokenEOF, ""},
818	})
819}
820
821func TestLexInlineTableBareKeyInArray(t *testing.T) {
822	testFlow(t, `foo = [{ -bar_ = "baz" }]`, []token{
823		{Position{1, 1}, tokenKey, "foo"},
824		{Position{1, 5}, tokenEqual, "="},
825		{Position{1, 7}, tokenLeftBracket, "["},
826		{Position{1, 8}, tokenLeftCurlyBrace, "{"},
827		{Position{1, 10}, tokenKey, "-bar_"},
828		{Position{1, 16}, tokenEqual, "="},
829		{Position{1, 19}, tokenString, "baz"},
830		{Position{1, 24}, tokenRightCurlyBrace, "}"},
831		{Position{1, 25}, tokenRightBracket, "]"},
832		{Position{1, 26}, tokenEOF, ""},
833	})
834}
835
836func TestLexInlineTableError1(t *testing.T) {
837	testFlow(t, `foo = { 123 = 0 ]`, []token{
838		{Position{1, 1}, tokenKey, "foo"},
839		{Position{1, 5}, tokenEqual, "="},
840		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
841		{Position{1, 9}, tokenKey, "123"},
842		{Position{1, 13}, tokenEqual, "="},
843		{Position{1, 15}, tokenInteger, "0"},
844		{Position{1, 17}, tokenRightBracket, "]"},
845		{Position{1, 18}, tokenError, "cannot have ']' here"},
846	})
847}
848
849func TestLexInlineTableError2(t *testing.T) {
850	testFlow(t, `foo = { 123 = 0 }}`, []token{
851		{Position{1, 1}, tokenKey, "foo"},
852		{Position{1, 5}, tokenEqual, "="},
853		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
854		{Position{1, 9}, tokenKey, "123"},
855		{Position{1, 13}, tokenEqual, "="},
856		{Position{1, 15}, tokenInteger, "0"},
857		{Position{1, 17}, tokenRightCurlyBrace, "}"},
858		{Position{1, 18}, tokenRightCurlyBrace, "}"},
859		{Position{1, 19}, tokenError, "cannot have '}' here"},
860	})
861}
862
863func TestLexInlineTableDottedKey1(t *testing.T) {
864	testFlow(t, `foo = { a = 0, 123.45abc = 0 }`, []token{
865		{Position{1, 1}, tokenKey, "foo"},
866		{Position{1, 5}, tokenEqual, "="},
867		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
868		{Position{1, 9}, tokenKey, "a"},
869		{Position{1, 11}, tokenEqual, "="},
870		{Position{1, 13}, tokenInteger, "0"},
871		{Position{1, 14}, tokenComma, ","},
872		{Position{1, 16}, tokenKey, "123.45abc"},
873		{Position{1, 26}, tokenEqual, "="},
874		{Position{1, 28}, tokenInteger, "0"},
875		{Position{1, 30}, tokenRightCurlyBrace, "}"},
876		{Position{1, 31}, tokenEOF, ""},
877	})
878}
879
880func TestLexInlineTableDottedKey2(t *testing.T) {
881	testFlow(t, `foo = { a = 0, '123'.'45abc' = 0 }`, []token{
882		{Position{1, 1}, tokenKey, "foo"},
883		{Position{1, 5}, tokenEqual, "="},
884		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
885		{Position{1, 9}, tokenKey, "a"},
886		{Position{1, 11}, tokenEqual, "="},
887		{Position{1, 13}, tokenInteger, "0"},
888		{Position{1, 14}, tokenComma, ","},
889		{Position{1, 16}, tokenKey, "'123'.'45abc'"},
890		{Position{1, 30}, tokenEqual, "="},
891		{Position{1, 32}, tokenInteger, "0"},
892		{Position{1, 34}, tokenRightCurlyBrace, "}"},
893		{Position{1, 35}, tokenEOF, ""},
894	})
895}
896
897func TestLexInlineTableDottedKey3(t *testing.T) {
898	testFlow(t, `foo = { a = 0, "123"."45ʎǝʞ" = 0 }`, []token{
899		{Position{1, 1}, tokenKey, "foo"},
900		{Position{1, 5}, tokenEqual, "="},
901		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
902		{Position{1, 9}, tokenKey, "a"},
903		{Position{1, 11}, tokenEqual, "="},
904		{Position{1, 13}, tokenInteger, "0"},
905		{Position{1, 14}, tokenComma, ","},
906		{Position{1, 16}, tokenKey, `"123"."45ʎǝʞ"`},
907		{Position{1, 30}, tokenEqual, "="},
908		{Position{1, 32}, tokenInteger, "0"},
909		{Position{1, 34}, tokenRightCurlyBrace, "}"},
910		{Position{1, 35}, tokenEOF, ""},
911	})
912}
913
914func TestLexInlineTableBareKeyWithComma(t *testing.T) {
915	testFlow(t, `foo = { -bar1 = "baz", -bar_ = "baz" }`, []token{
916		{Position{1, 1}, tokenKey, "foo"},
917		{Position{1, 5}, tokenEqual, "="},
918		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
919		{Position{1, 9}, tokenKey, "-bar1"},
920		{Position{1, 15}, tokenEqual, "="},
921		{Position{1, 18}, tokenString, "baz"},
922		{Position{1, 22}, tokenComma, ","},
923		{Position{1, 24}, tokenKey, "-bar_"},
924		{Position{1, 30}, tokenEqual, "="},
925		{Position{1, 33}, tokenString, "baz"},
926		{Position{1, 38}, tokenRightCurlyBrace, "}"},
927		{Position{1, 39}, tokenEOF, ""},
928	})
929}
930
931func TestLexInlineTableBareKeyUnderscore(t *testing.T) {
932	testFlow(t, `foo = { _bar = "baz" }`, []token{
933		{Position{1, 1}, tokenKey, "foo"},
934		{Position{1, 5}, tokenEqual, "="},
935		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
936		{Position{1, 9}, tokenKey, "_bar"},
937		{Position{1, 14}, tokenEqual, "="},
938		{Position{1, 17}, tokenString, "baz"},
939		{Position{1, 22}, tokenRightCurlyBrace, "}"},
940		{Position{1, 23}, tokenEOF, ""},
941	})
942}
943
944func TestLexInlineTableQuotedKey(t *testing.T) {
945	testFlow(t, `foo = { "bar" = "baz" }`, []token{
946		{Position{1, 1}, tokenKey, "foo"},
947		{Position{1, 5}, tokenEqual, "="},
948		{Position{1, 7}, tokenLeftCurlyBrace, "{"},
949		{Position{1, 9}, tokenKey, "\"bar\""},
950		{Position{1, 15}, tokenEqual, "="},
951		{Position{1, 18}, tokenString, "baz"},
952		{Position{1, 23}, tokenRightCurlyBrace, "}"},
953		{Position{1, 24}, tokenEOF, ""},
954	})
955}
956
957func BenchmarkLexer(b *testing.B) {
958	sample := `title = "Hugo: A Fast and Flexible Website Generator"
959baseurl = "http://gohugo.io/"
960MetaDataFormat = "yaml"
961pluralizeListTitles = false
962
963[params]
964  description = "Documentation of Hugo, a fast and flexible static site generator built with love by spf13, bep and friends in Go"
965  author = "Steve Francia (spf13) and friends"
966  release = "0.22-DEV"
967
968[[menu.main]]
969	name = "Download Hugo"
970	pre = "<i class='fa fa-download'></i>"
971	url = "https://github.com/spf13/hugo/releases"
972	weight = -200
973`
974	b.ResetTimer()
975	for i := 0; i < b.N; i++ {
976		lexToml([]byte(sample))
977	}
978}
979