1# -*- coding: utf-8 -*-
2"""
3    Pygments regex lexer tests
4    ~~~~~~~~~~~~~~~~~~~~~~~~~~
5
6    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
7    :license: BSD, see LICENSE for details.
8"""
9
10import pytest
11
12from pygments.token import Text
13from pygments.lexer import RegexLexer, default
14
15
16@pytest.fixture(scope='module')
17def lexer():
18    yield MyLexer()
19
20
21class MyLexer(RegexLexer):
22    """Test tuple state transitions including #pop."""
23    tokens = {
24        'root': [
25            ('a', Text.Root, 'rag'),
26            ('e', Text.Root),
27            ('#', Text.Root, '#pop'),
28            ('@', Text.Root, ('#pop', '#pop')),
29            default(('beer', 'beer'))
30        ],
31        'beer': [
32            ('d', Text.Beer, ('#pop', '#pop')),
33        ],
34        'rag': [
35            ('b', Text.Rag, '#push'),
36            ('c', Text.Rag, ('#pop', 'beer')),
37        ],
38    }
39
40
41def test_tuple(lexer):
42    toks = list(lexer.get_tokens_unprocessed('abcde'))
43    assert toks == [
44        (0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),
45        (3, Text.Beer, 'd'), (4, Text.Root, 'e')]
46
47
48def test_multiline(lexer):
49    toks = list(lexer.get_tokens_unprocessed('a\ne'))
50    assert toks == [
51        (0, Text.Root, 'a'), (1, Text, '\n'), (2, Text.Root, 'e')]
52
53
54def test_default(lexer):
55    toks = list(lexer.get_tokens_unprocessed('d'))
56    assert toks == [(0, Text.Beer, 'd')]
57
58
59def test_pop_empty_regular(lexer):
60    toks = list(lexer.get_tokens_unprocessed('#e'))
61    assert toks == [(0, Text.Root, '#'), (1, Text.Root, 'e')]
62
63
64def test_pop_empty_tuple(lexer):
65    toks = list(lexer.get_tokens_unprocessed('@e'))
66    assert toks == [(0, Text.Root, '@'), (1, Text.Root, 'e')]
67