1from typing import Any, Optional, Tuple
2
3whitespace_re: Any
4string_re: Any
5integer_re: Any
6name_re: Any
7float_re: Any
8newline_re: Any
9TOKEN_ADD: Any
10TOKEN_ASSIGN: Any
11TOKEN_COLON: Any
12TOKEN_COMMA: Any
13TOKEN_DIV: Any
14TOKEN_DOT: Any
15TOKEN_EQ: Any
16TOKEN_FLOORDIV: Any
17TOKEN_GT: Any
18TOKEN_GTEQ: Any
19TOKEN_LBRACE: Any
20TOKEN_LBRACKET: Any
21TOKEN_LPAREN: Any
22TOKEN_LT: Any
23TOKEN_LTEQ: Any
24TOKEN_MOD: Any
25TOKEN_MUL: Any
26TOKEN_NE: Any
27TOKEN_PIPE: Any
28TOKEN_POW: Any
29TOKEN_RBRACE: Any
30TOKEN_RBRACKET: Any
31TOKEN_RPAREN: Any
32TOKEN_SEMICOLON: Any
33TOKEN_SUB: Any
34TOKEN_TILDE: Any
35TOKEN_WHITESPACE: Any
36TOKEN_FLOAT: Any
37TOKEN_INTEGER: Any
38TOKEN_NAME: Any
39TOKEN_STRING: Any
40TOKEN_OPERATOR: Any
41TOKEN_BLOCK_BEGIN: Any
42TOKEN_BLOCK_END: Any
43TOKEN_VARIABLE_BEGIN: Any
44TOKEN_VARIABLE_END: Any
45TOKEN_RAW_BEGIN: Any
46TOKEN_RAW_END: Any
47TOKEN_COMMENT_BEGIN: Any
48TOKEN_COMMENT_END: Any
49TOKEN_COMMENT: Any
50TOKEN_LINESTATEMENT_BEGIN: Any
51TOKEN_LINESTATEMENT_END: Any
52TOKEN_LINECOMMENT_BEGIN: Any
53TOKEN_LINECOMMENT_END: Any
54TOKEN_LINECOMMENT: Any
55TOKEN_DATA: Any
56TOKEN_INITIAL: Any
57TOKEN_EOF: Any
58operators: Any
59reverse_operators: Any
60operator_re: Any
61ignored_tokens: Any
62ignore_if_empty: Any
63
64def describe_token(token): ...
65def describe_token_expr(expr): ...
66def count_newlines(value): ...
67def compile_rules(environment): ...
68
69class Failure:
70    message: Any
71    error_class: Any
72    def __init__(self, message, cls: Any = ...) -> None: ...
73    def __call__(self, lineno, filename): ...
74
75class Token(Tuple[int, Any, Any]):
76    lineno: Any
77    type: Any
78    value: Any
79    def __new__(cls, lineno, type, value): ...
80    def test(self, expr): ...
81    def test_any(self, *iterable): ...
82
83class TokenStreamIterator:
84    stream: Any
85    def __init__(self, stream) -> None: ...
86    def __iter__(self): ...
87    def __next__(self): ...
88
89class TokenStream:
90    name: Any
91    filename: Any
92    closed: bool
93    current: Any
94    def __init__(self, generator, name, filename) -> None: ...
95    def __iter__(self): ...
96    def __bool__(self): ...
97    __nonzero__: Any
98    eos: Any
99    def push(self, token): ...
100    def look(self): ...
101    def skip(self, n: int = ...): ...
102    def next_if(self, expr): ...
103    def skip_if(self, expr): ...
104    def __next__(self): ...
105    def close(self): ...
106    def expect(self, expr): ...
107
108def get_lexer(environment): ...
109
110class Lexer:
111    newline_sequence: Any
112    keep_trailing_newline: Any
113    rules: Any
114    def __init__(self, environment) -> None: ...
115    def tokenize(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...): ...
116    def wrap(self, stream, name: Optional[Any] = ..., filename: Optional[Any] = ...): ...
117    def tokeniter(self, source, name, filename: Optional[Any] = ..., state: Optional[Any] = ...): ...
118