1# -*- coding: utf-8 -*-
2"""
3    Basic CrystalLexer Test
4    ~~~~~~~~~~~~~~~~~~~~~~~
5
6    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
7    :license: BSD, see LICENSE for details.
8"""
9
10import pytest
11
12from pygments.token import Text, Operator, Keyword, Name, String, Number, \
13    Punctuation, Error
14from pygments.lexers import CrystalLexer
15
16
17@pytest.fixture(scope='module')
18def lexer():
19    yield CrystalLexer()
20
21
22def test_range_syntax1(lexer):
23    fragment = '1...3\n'
24    tokens = [
25        (Number.Integer, '1'),
26        (Operator, '...'),
27        (Number.Integer, '3'),
28        (Text, '\n'),
29    ]
30    assert list(lexer.get_tokens(fragment)) == tokens
31
32
33def test_range_syntax2(lexer):
34    fragment = '1 .. 3\n'
35    tokens = [
36        (Number.Integer, '1'),
37        (Text, ' '),
38        (Operator, '..'),
39        (Text, ' '),
40        (Number.Integer, '3'),
41        (Text, '\n'),
42    ]
43    assert list(lexer.get_tokens(fragment)) == tokens
44
45
46def test_interpolation_nested_curly(lexer):
47    fragment = (
48        '"A#{ (3..5).group_by { |x| x/2}.map '
49        'do |k,v| "#{k}" end.join }" + "Z"\n')
50    tokens = [
51        (String.Double, '"'),
52        (String.Double, 'A'),
53        (String.Interpol, '#{'),
54        (Text, ' '),
55        (Punctuation, '('),
56        (Number.Integer, '3'),
57        (Operator, '..'),
58        (Number.Integer, '5'),
59        (Punctuation, ')'),
60        (Operator, '.'),
61        (Name, 'group_by'),
62        (Text, ' '),
63        (String.Interpol, '{'),
64        (Text, ' '),
65        (Operator, '|'),
66        (Name, 'x'),
67        (Operator, '|'),
68        (Text, ' '),
69        (Name, 'x'),
70        (Operator, '/'),
71        (Number.Integer, '2'),
72        (String.Interpol, '}'),
73        (Operator, '.'),
74        (Name, 'map'),
75        (Text, ' '),
76        (Keyword, 'do'),
77        (Text, ' '),
78        (Operator, '|'),
79        (Name, 'k'),
80        (Punctuation, ','),
81        (Name, 'v'),
82        (Operator, '|'),
83        (Text, ' '),
84        (String.Double, '"'),
85        (String.Interpol, '#{'),
86        (Name, 'k'),
87        (String.Interpol, '}'),
88        (String.Double, '"'),
89        (Text, ' '),
90        (Keyword, 'end'),
91        (Operator, '.'),
92        (Name, 'join'),
93        (Text, ' '),
94        (String.Interpol, '}'),
95        (String.Double, '"'),
96        (Text, ' '),
97        (Operator, '+'),
98        (Text, ' '),
99        (String.Double, '"'),
100        (String.Double, 'Z'),
101        (String.Double, '"'),
102        (Text, '\n'),
103    ]
104    assert list(lexer.get_tokens(fragment)) == tokens
105
106
107def test_operator_methods(lexer):
108    fragment = '([] of Int32).[]?(5)\n'
109    tokens = [
110        (Punctuation, '('),
111        (Operator, '['),
112        (Operator, ']'),
113        (Text, ' '),
114        (Keyword, 'of'),
115        (Text, ' '),
116        (Name.Builtin, 'Int32'),
117        (Punctuation, ')'),
118        (Operator, '.'),
119        (Name.Operator, '[]?'),
120        (Punctuation, '('),
121        (Number.Integer, '5'),
122        (Punctuation, ')'),
123        (Text, '\n')
124    ]
125    assert list(lexer.get_tokens(fragment)) == tokens
126
127
128def test_array_access(lexer):
129    fragment = '[5][5]?\n'
130    tokens = [
131        (Operator, '['),
132        (Number.Integer, '5'),
133        (Operator, ']'),
134        (Operator, '['),
135        (Number.Integer, '5'),
136        (Operator, ']?'),
137        (Text, '\n')
138    ]
139    assert list(lexer.get_tokens(fragment)) == tokens
140
141
142def test_numbers(lexer):
143    for kind, testset in [
144        (Number.Integer, '0  1  1_000_000  1u8  11231231231121312i64'),
145        (Number.Float, '0.0  1.0_f32  1_f32  0f64  1e+4  1e111  1_234.567_890'),
146        (Number.Bin, '0b1001_0110  0b0u8'),
147        (Number.Oct, '0o17  0o7_i32'),
148        (Number.Hex, '0xdeadBEEF'),
149    ]:
150        for fragment in testset.split():
151            assert list(lexer.get_tokens(fragment + '\n')) == \
152                [(kind, fragment), (Text, '\n')]
153
154    for fragment in '01  0b2  0x129g2  0o12358'.split():
155        assert next(lexer.get_tokens(fragment + '\n'))[0] == Error
156
157
158def test_chars(lexer):
159    for fragment in ["'a'", "'я'", "'\\u{1234}'", "'\n'"]:
160        assert list(lexer.get_tokens(fragment + '\n')) == \
161            [(String.Char, fragment), (Text, '\n')]
162    assert next(lexer.get_tokens("'abc'"))[0] == Error
163
164
165def test_macro(lexer):
166    fragment = (
167        'def<=>(other : self) : Int\n'
168        '{%for field in %w(first_name middle_name last_name)%}\n'
169        'cmp={{field.id}}<=>other.{{field.id}}\n'
170        'return cmp if cmp!=0\n'
171        '{%end%}\n'
172        '0\n'
173        'end\n')
174    tokens = [
175        (Keyword, 'def'),
176        (Name.Function, '<=>'),
177        (Punctuation, '('),
178        (Name, 'other'),
179        (Text, ' '),
180        (Punctuation, ':'),
181        (Text, ' '),
182        (Keyword.Pseudo, 'self'),
183        (Punctuation, ')'),
184        (Text, ' '),
185        (Punctuation, ':'),
186        (Text, ' '),
187        (Name.Builtin, 'Int'),
188        (Text, '\n'),
189        (String.Interpol, '{%'),
190        (Keyword, 'for'),
191        (Text, ' '),
192        (Name, 'field'),
193        (Text, ' '),
194        (Keyword, 'in'),
195        (Text, ' '),
196        (String.Other, '%w('),
197        (String.Other, 'first_name middle_name last_name'),
198        (String.Other, ')'),
199        (String.Interpol, '%}'),
200        (Text, '\n'),
201        (Name, 'cmp'),
202        (Operator, '='),
203        (String.Interpol, '{{'),
204        (Name, 'field'),
205        (Operator, '.'),
206        (Name, 'id'),
207        (String.Interpol, '}}'),
208        (Operator, '<=>'),
209        (Name, 'other'),
210        (Operator, '.'),
211        (String.Interpol, '{{'),
212        (Name, 'field'),
213        (Operator, '.'),
214        (Name, 'id'),
215        (String.Interpol, '}}'),
216        (Text, '\n'),
217        (Keyword, 'return'),
218        (Text, ' '),
219        (Name, 'cmp'),
220        (Text, ' '),
221        (Keyword, 'if'),
222        (Text, ' '),
223        (Name, 'cmp'),
224        (Operator, '!='),
225        (Number.Integer, '0'),
226        (Text, '\n'),
227        (String.Interpol, '{%'),
228        (Keyword, 'end'),
229        (String.Interpol, '%}'),
230        (Text, '\n'),
231        (Number.Integer, '0'),
232        (Text, '\n'),
233        (Keyword, 'end'),
234        (Text, '\n')
235    ]
236    assert list(lexer.get_tokens(fragment)) == tokens
237
238
239def test_lib(lexer):
240    fragment = (
241        '@[Link("some")]\nlib LibSome\n'
242        '@[CallConvention("X86_StdCall")]\nfun foo="some.foo"(thing : Void*) : LibC::Int\n'
243        'end\n')
244    tokens = [
245        (Operator, '@['),
246        (Name.Decorator, 'Link'),
247        (Punctuation, '('),
248        (String.Double, '"'),
249        (String.Double, 'some'),
250        (String.Double, '"'),
251        (Punctuation, ')'),
252        (Operator, ']'),
253        (Text, '\n'),
254        (Keyword, 'lib'),
255        (Text, ' '),
256        (Name.Namespace, 'LibSome'),
257        (Text, '\n'),
258        (Operator, '@['),
259        (Name.Decorator, 'CallConvention'),
260        (Punctuation, '('),
261        (String.Double, '"'),
262        (String.Double, 'X86_StdCall'),
263        (String.Double, '"'),
264        (Punctuation, ')'),
265        (Operator, ']'),
266        (Text, '\n'),
267        (Keyword, 'fun'),
268        (Text, ' '),
269        (Name.Function, 'foo'),
270        (Operator, '='),
271        (String.Double, '"'),
272        (String.Double, 'some.foo'),
273        (String.Double, '"'),
274        (Punctuation, '('),
275        (Name, 'thing'),
276        (Text, ' '),
277        (Punctuation, ':'),
278        (Text, ' '),
279        (Name.Builtin, 'Void'),
280        (Operator, '*'),
281        (Punctuation, ')'),
282        (Text, ' '),
283        (Punctuation, ':'),
284        (Text, ' '),
285        (Name, 'LibC'),
286        (Operator, '::'),
287        (Name.Builtin, 'Int'),
288        (Text, '\n'),
289        (Keyword, 'end'),
290        (Text, '\n')
291    ]
292    assert list(lexer.get_tokens(fragment)) == tokens
293
294
295def test_escaped_bracestring(lexer):
296    fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
297    tokens = [
298        (Name, 'str'),
299        (Operator, '.'),
300        (Name, 'gsub'),
301        (Punctuation, '('),
302        (String.Regex, '%r{'),
303        (String.Regex, '\\\\'),
304        (String.Regex, '\\\\'),
305        (String.Regex, '}'),
306        (Punctuation, ','),
307        (Text, ' '),
308        (String.Double, '"'),
309        (String.Double, '/'),
310        (String.Double, '"'),
311        (Punctuation, ')'),
312        (Text, '\n'),
313    ]
314    assert list(lexer.get_tokens(fragment)) == tokens
315