1# -*- coding: utf-8 -*-
2"""
3    R Tests
4    ~~~~~~~
5
6    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
7    :license: BSD, see LICENSE for details.
8"""
9
10import pytest
11
12from pygments.lexers import SLexer
13from pygments.token import Token, Name, Punctuation
14
15
16@pytest.fixture(scope='module')
17def lexer():
18    yield SLexer()
19
20
21def test_call(lexer):
22    fragment = 'f(1, a)\n'
23    tokens = [
24        (Name.Function, 'f'),
25        (Punctuation, '('),
26        (Token.Literal.Number, '1'),
27        (Punctuation, ','),
28        (Token.Text, ' '),
29        (Token.Name, 'a'),
30        (Punctuation, ')'),
31        (Token.Text, '\n'),
32    ]
33    assert list(lexer.get_tokens(fragment)) == tokens
34
35
36def test_name1(lexer):
37    fragment = '._a_2.c'
38    tokens = [
39        (Name, '._a_2.c'),
40        (Token.Text, '\n'),
41    ]
42    assert list(lexer.get_tokens(fragment)) == tokens
43
44
45def test_name2(lexer):
46    # Invalid names are valid if backticks are used
47    fragment = '`.1 blah`'
48    tokens = [
49        (Name, '`.1 blah`'),
50        (Token.Text, '\n'),
51    ]
52    assert list(lexer.get_tokens(fragment)) == tokens
53
54
55def test_name3(lexer):
56    # Internal backticks can be escaped
57    fragment = '`.1 \\` blah`'
58    tokens = [
59        (Name, '`.1 \\` blah`'),
60        (Token.Text, '\n'),
61    ]
62    assert list(lexer.get_tokens(fragment)) == tokens
63
64
65def test_custom_operator(lexer):
66    fragment = '7 % and % 8'
67    tokens = [
68        (Token.Literal.Number, '7'),
69        (Token.Text, ' '),
70        (Token.Operator, '% and %'),
71        (Token.Text, ' '),
72        (Token.Literal.Number, '8'),
73        (Token.Text, '\n'),
74    ]
75    assert list(lexer.get_tokens(fragment)) == tokens
76
77
78def test_indexing(lexer):
79    fragment = 'a[1]'
80    tokens = [
81        (Token.Name, 'a'),
82        (Token.Punctuation, '['),
83        (Token.Literal.Number, '1'),
84        (Token.Punctuation, ']'),
85        (Token.Text, '\n'),
86    ]
87    assert list(lexer.get_tokens(fragment)) == tokens
88
89
90def test_dot_name(lexer):
91    fragment = '. <- 1'
92    tokens = [
93        (Token.Name, '.'),
94        (Token.Text, ' '),
95        (Token.Operator, '<-'),
96        (Token.Text, ' '),
97        (Token.Literal.Number, '1'),
98        (Token.Text, '\n')
99    ]
100    assert list(lexer.get_tokens(fragment)) == tokens
101
102
103def test_dot_indexing(lexer):
104    fragment = '.[1]'
105    tokens = [
106        (Token.Name, '.'),
107        (Token.Punctuation, '['),
108        (Token.Literal.Number, '1'),
109        (Token.Punctuation, ']'),
110        (Token.Text, '\n'),
111    ]
112    assert list(lexer.get_tokens(fragment)) == tokens
113