1# -*- coding: utf-8 -*-
2"""
3    Basic Tests for textfmts
4    ~~~~~~~~~~~~~~~~~~~~~~~~
5
6    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
7    :license: BSD, see LICENSE for details.
8"""
9
10import pytest
11
12from pygments.token import Token
13from pygments.lexers.textfmts import HttpLexer
14
15
16@pytest.fixture(scope='module')
17def lexer():
18    yield HttpLexer()
19
20
21def test_http_status_line(lexer):
22    fragment = 'HTTP/1.1 200 OK\n'
23    tokens = [
24        (Token.Keyword.Reserved, 'HTTP'),
25        (Token.Operator, '/'),
26        (Token.Number, '1.1'),
27        (Token.Text, ' '),
28        (Token.Number, '200'),
29        (Token.Text, ' '),
30        (Token.Name.Exception, 'OK'),
31        (Token.Text, '\n'),
32    ]
33    assert list(lexer.get_tokens(fragment)) == tokens
34
35
36def test_http_status_line_without_reason_phrase(lexer):
37    fragment = 'HTTP/1.1 200\n'
38    tokens = [
39        (Token.Keyword.Reserved, 'HTTP'),
40        (Token.Operator, '/'),
41        (Token.Number, '1.1'),
42        (Token.Text, ' '),
43        (Token.Number, '200'),
44        (Token.Text, '\n'),
45    ]
46    assert list(lexer.get_tokens(fragment)) == tokens
47
48
49def test_http_status_line_without_reason_phrase_rfc_7230(lexer):
50    fragment = 'HTTP/1.1 200 \n'
51    tokens = [
52        (Token.Keyword.Reserved, 'HTTP'),
53        (Token.Operator, '/'),
54        (Token.Number, '1.1'),
55        (Token.Text, ' '),
56        (Token.Number, '200'),
57        (Token.Text, ' '),
58        (Token.Text, '\n'),
59    ]
60    assert list(lexer.get_tokens(fragment)) == tokens
61
62
63def test_application_xml(lexer):
64    fragment = 'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
65    tokens = [
66        (Token.Name.Tag, '<foo'),
67        (Token.Name.Tag, '>'),
68        (Token.Text, '\n'),
69    ]
70    assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
71
72
73def test_application_calendar_xml(lexer):
74    fragment = 'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
75    tokens = [
76        (Token.Name.Tag, '<foo'),
77        (Token.Name.Tag, '>'),
78        (Token.Text, '\n'),
79    ]
80    assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
81