1# -*- coding: utf-8 -*-
2"""
3    Praat lexer tests
4    ~~~~~~~~~~~~~~~~~
5
6    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
7    :license: BSD, see LICENSE for details.
8"""
9
10import pytest
11
12from pygments.token import Token
13from pygments.lexers import PraatLexer
14
15
16@pytest.fixture(scope='module')
17def lexer():
18    yield PraatLexer()
19
20
21def test_numeric_assignment(lexer):
22    fragment = 'var = -15e4\n'
23    tokens = [
24        (Token.Text, 'var'),
25        (Token.Text, ' '),
26        (Token.Operator, '='),
27        (Token.Text, ' '),
28        (Token.Operator, '-'),
29        (Token.Literal.Number, '15e4'),
30        (Token.Text, '\n'),
31    ]
32    assert list(lexer.get_tokens(fragment)) == tokens
33
34
35def testStringAssignment(lexer):
36    fragment = 'var$ = "foo"\n'
37    tokens = [
38        (Token.Text, 'var$'),
39        (Token.Text, ' '),
40        (Token.Operator, '='),
41        (Token.Text, ' '),
42        (Token.Literal.String, '"'),
43        (Token.Literal.String, 'foo'),
44        (Token.Literal.String, '"'),
45        (Token.Text, '\n'),
46    ]
47    assert list(lexer.get_tokens(fragment)) == tokens
48
49
50def test_string_escaped_quotes(lexer):
51    fragment = '"it said ""foo"""\n'
52    tokens = [
53        (Token.Literal.String, '"'),
54        (Token.Literal.String, 'it said '),
55        (Token.Literal.String, '"'),
56        (Token.Literal.String, '"'),
57        (Token.Literal.String, 'foo'),
58        (Token.Literal.String, '"'),
59        (Token.Literal.String, '"'),
60        (Token.Literal.String, '"'),
61        (Token.Text, '\n'),
62    ]
63    assert list(lexer.get_tokens(fragment)) == tokens
64
65
66def test_function_call(lexer):
67    fragment = 'selected("Sound", i+(a*b))\n'
68    tokens = [
69        (Token.Name.Function, 'selected'),
70        (Token.Punctuation, '('),
71        (Token.Literal.String, '"'),
72        (Token.Literal.String, 'Sound'),
73        (Token.Literal.String, '"'),
74        (Token.Punctuation, ','),
75        (Token.Text, ' '),
76        (Token.Text, 'i'),
77        (Token.Operator, '+'),
78        (Token.Text, '('),
79        (Token.Text, 'a'),
80        (Token.Operator, '*'),
81        (Token.Text, 'b'),
82        (Token.Text, ')'),
83        (Token.Punctuation, ')'),
84        (Token.Text, '\n'),
85    ]
86    assert list(lexer.get_tokens(fragment)) == tokens
87
88
89def test_broken_unquoted_string(lexer):
90    fragment = 'printline string\n... \'interpolated\' string\n'
91    tokens = [
92        (Token.Keyword, 'printline'),
93        (Token.Text, ' '),
94        (Token.Literal.String, 'string'),
95        (Token.Text, '\n'),
96        (Token.Punctuation, '...'),
97        (Token.Text, ' '),
98        (Token.Literal.String.Interpol, "'interpolated'"),
99        (Token.Text, ' '),
100        (Token.Literal.String, 'string'),
101        (Token.Text, '\n'),
102    ]
103    assert list(lexer.get_tokens(fragment)) == tokens
104
105
106def test_inline_if(lexer):
107    fragment = 'var = if true == 1 then -1 else 0 fi'
108    tokens = [
109        (Token.Text, 'var'),
110        (Token.Text, ' '),
111        (Token.Operator, '='),
112        (Token.Text, ' '),
113        (Token.Keyword, 'if'),
114        (Token.Text, ' '),
115        (Token.Text, 'true'),
116        (Token.Text, ' '),
117        (Token.Operator, '=='),
118        (Token.Text, ' '),
119        (Token.Literal.Number, '1'),
120        (Token.Text, ' '),
121        (Token.Keyword, 'then'),
122        (Token.Text, ' '),
123        (Token.Operator, '-'),
124        (Token.Literal.Number, '1'),
125        (Token.Text, ' '),
126        (Token.Keyword, 'else'),
127        (Token.Text, ' '),
128        (Token.Literal.Number, '0'),
129        (Token.Text, ' '),
130        (Token.Keyword, 'fi'),
131        (Token.Text, '\n'),
132    ]
133    assert list(lexer.get_tokens(fragment)) == tokens
134
135
136def test_interpolation_boundary(lexer):
137    fragment = '"\'" + "\'"'
138    tokens = [
139        (Token.Literal.String, '"'),
140        (Token.Literal.String, "'"),
141        (Token.Literal.String, '"'),
142        (Token.Text, ' '),
143        (Token.Operator, '+'),
144        (Token.Text, ' '),
145        (Token.Literal.String, '"'),
146        (Token.Literal.String, "'"),
147        (Token.Literal.String, '"'),
148        (Token.Text, '\n'),
149    ]
150    assert list(lexer.get_tokens(fragment)) == tokens
151
152
153def test_interpolated_numeric_indexed(lexer):
154    fragment = "'a[3]'"
155    tokens = [
156        (Token.Literal.String.Interpol, "'a[3]'"),
157        (Token.Text, '\n'),
158    ]
159    assert list(lexer.get_tokens(fragment)) == tokens
160
161
162def test_interpolated_numeric_hash(lexer):
163    fragment = "'a[\"b\"]'"
164    tokens = [
165        (Token.Literal.String.Interpol, "'a[\"b\"]'"),
166        (Token.Text, '\n'),
167    ]
168    assert list(lexer.get_tokens(fragment)) == tokens
169
170
171def test_interpolated_string_indexed(lexer):
172    fragment = "'a$[3]'"
173    tokens = [
174        (Token.Literal.String.Interpol, "'a$[3]'"),
175        (Token.Text, '\n'),
176    ]
177    assert list(lexer.get_tokens(fragment)) == tokens
178
179
180def test_interpolated_string_hash(lexer):
181    fragment = "'a$[\"b\"]'"
182    tokens = [
183        (Token.Literal.String.Interpol, "'a$[\"b\"]'"),
184        (Token.Text, '\n'),
185    ]
186    assert list(lexer.get_tokens(fragment)) == tokens
187
188
189def test_interpolated_numeric_with_precision(lexer):
190    fragment = "'a:3'"
191    tokens = [
192        (Token.Literal.String.Interpol, "'a:3'"),
193        (Token.Text, '\n'),
194    ]
195    assert list(lexer.get_tokens(fragment)) == tokens
196
197
198def test_interpolated_indexed_numeric_with_precision(lexer):
199    fragment = "'a[3]:3'"
200    tokens = [
201        (Token.Literal.String.Interpol, "'a[3]:3'"),
202        (Token.Text, '\n'),
203    ]
204    assert list(lexer.get_tokens(fragment)) == tokens
205
206
207def test_interpolated_local_numeric_with_precision(lexer):
208    fragment = "'a.a:3'"
209    tokens = [
210        (Token.Literal.String.Interpol, "'a.a:3'"),
211        (Token.Text, '\n'),
212    ]
213    assert list(lexer.get_tokens(fragment)) == tokens
214