1import sys
2from _typeshed import StrOrBytesPath
3from builtins import open as _builtin_open
4from token import *  # noqa: F403
5from typing import (
6    Any,
7    Callable,
8    Dict,
9    Generator,
10    Iterable,
11    List,
12    NamedTuple,
13    Optional,
14    Pattern,
15    Sequence,
16    Set,
17    TextIO,
18    Tuple,
19    Union,
20)
21
22if sys.version_info < (3, 7):
23    COMMENT: int
24    NL: int
25    ENCODING: int
26
27cookie_re: Pattern[str]
28blank_re: Pattern[bytes]
29
30_Position = Tuple[int, int]
31
32class _TokenInfo(NamedTuple):
33    type: int
34    string: str
35    start: _Position
36    end: _Position
37    line: str
38
39class TokenInfo(_TokenInfo):
40    @property
41    def exact_type(self) -> int: ...
42
43# Backwards compatible tokens can be sequences of a shorter length too
44_Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
45
46class TokenError(Exception): ...
47class StopTokenizing(Exception): ...  # undocumented
48
49class Untokenizer:
50    tokens: List[str]
51    prev_row: int
52    prev_col: int
53    encoding: Optional[str]
54    def __init__(self) -> None: ...
55    def add_whitespace(self, start: _Position) -> None: ...
56    def untokenize(self, iterable: Iterable[_Token]) -> str: ...
57    def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...
58
59# the docstring says "returns bytes" but is incorrect --
60# if the ENCODING token is missing, it skips the encode
61def untokenize(iterable: Iterable[_Token]) -> Any: ...
62def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
63def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
64def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...  # undocumented
65def open(filename: Union[StrOrBytesPath, int]) -> TextIO: ...
66def group(*choices: str) -> str: ...  # undocumented
67def any(*choices: str) -> str: ...  # undocumented
68def maybe(*choices: str) -> str: ...  # undocumented
69
70Whitespace: str  # undocumented
71Comment: str  # undocumented
72Ignore: str  # undocumented
73Name: str  # undocumented
74
75Hexnumber: str  # undocumented
76Binnumber: str  # undocumented
77Octnumber: str  # undocumented
78Decnumber: str  # undocumented
79Intnumber: str  # undocumented
80Exponent: str  # undocumented
81Pointfloat: str  # undocumented
82Expfloat: str  # undocumented
83Floatnumber: str  # undocumented
84Imagnumber: str  # undocumented
85Number: str  # undocumented
86
87def _all_string_prefixes() -> Set[str]: ...  # undocumented
88
89StringPrefix: str  # undocumented
90
91Single: str  # undocumented
92Double: str  # undocumented
93Single3: str  # undocumented
94Double3: str  # undocumented
95Triple: str  # undocumented
96String: str  # undocumented
97
98if sys.version_info < (3, 7):
99    Operator: str  # undocumented
100    Bracket: str  # undocumented
101
102Special: str  # undocumented
103Funny: str  # undocumented
104
105PlainToken: str  # undocumented
106Token: str  # undocumented
107
108ContStr: str  # undocumented
109PseudoExtras: str  # undocumented
110PseudoToken: str  # undocumented
111
112endpats: Dict[str, str]  # undocumented
113single_quoted: Set[str]  # undocumented
114triple_quoted: Set[str]  # undocumented
115
116tabsize: int  # undocumented
117