1package chroma
2
3import (
4	"testing"
5
6	"github.com/alecthomas/assert"
7)
8
9func TestInclude(t *testing.T) {
10	include := Include("other")
11	actual := CompiledRules{
12		"root": {{Rule: include}},
13		"other": {
14			{Rule: Rule{Pattern: "//.+", Type: Comment}},
15			{Rule: Rule{Pattern: `"[^"]*"`, Type: String}},
16		},
17	}
18	lexer := &RegexLexer{rules: actual}
19	err := include.Mutator.(LexerMutator).MutateLexer(lexer.rules, "root", 0)
20	assert.NoError(t, err)
21	expected := CompiledRules{
22		"root": {
23			{Rule: Rule{
24				Pattern: "//.+",
25				Type:    Comment,
26			}},
27			{Rule: Rule{
28				Pattern: `"[^"]*"`,
29				Type:    String,
30			}},
31		},
32		"other": {
33			{Rule: Rule{
34				Pattern: "//.+",
35				Type:    Comment,
36			}},
37			{Rule: Rule{
38				Pattern: `"[^"]*"`,
39				Type:    String,
40			}},
41		},
42	}
43	assert.Equal(t, expected, actual)
44}
45
46func TestCombine(t *testing.T) {
47	l := MustNewLexer(nil, Rules{ // nolint: forbidigo
48		"root":  {{`hello`, String, Combined("world", "bye", "space")}},
49		"world": {{`world`, Name, nil}},
50		"bye":   {{`bye`, Name, nil}},
51		"space": {{`\s+`, Whitespace, nil}},
52	})
53	it, err := l.Tokenise(nil, "hello world")
54	assert.NoError(t, err)
55	expected := []Token{{String, `hello`}, {Whitespace, ` `}, {Name, `world`}}
56	assert.Equal(t, expected, it.Tokens())
57}
58