Home
last modified time | relevance | path

Searched refs:generate_tokens (Results 1 – 25 of 552) sorted by relevance

12345678910>>...23

/dports/lang/python-legacy/Python-2.7.18/Lib/test/
H A Dtest_tokenize.py2 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP,
21 for type, token, start, end, line in generate_tokens(f.readline):
63 for tok in generate_tokens(StringIO(indent_error_file).readline):
520 g = generate_tokens(StringIO(s).readline) # tokenize the string
600 token_list = list(generate_tokens(f.readline))
605 tokens2 = [tok[:2] for tok in generate_tokens(readline)]
703 tokens = generate_tokens(StringIO(code).readline)
/dports/lang/python27/Python-2.7.18/Lib/test/
H A Dtest_tokenize.py2 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP,
21 for type, token, start, end, line in generate_tokens(f.readline):
63 for tok in generate_tokens(StringIO(indent_error_file).readline):
520 g = generate_tokens(StringIO(s).readline) # tokenize the string
600 token_list = list(generate_tokens(f.readline))
605 tokens2 = [tok[:2] for tok in generate_tokens(readline)]
703 tokens = generate_tokens(StringIO(code).readline)
/dports/sysutils/uefi-edk2-bhyve-csm/uefi-edk2-aa8d718/AppPkg/Applications/Python/Python-2.7.2/Lib/test/
H A Dtest_tokenize.py532 from tokenize import (untokenize, generate_tokens, NUMBER, NAME, OP,
543 for type, token, start, end, line in generate_tokens(f.readline):
574 g = generate_tokens(StringIO(s).readline) # tokenize the string
/dports/devel/pycharm-pro/pycharm-2020.2.3/plugins/python/helpers/coveragepy/coverage/
H A Dphystokens.py96 tokgen = generate_tokens(source)
143 def generate_tokens(self, text): member in CachedTokenizer
148 self.last_tokens = list(tokenize.generate_tokens(readline))
152 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/mail/thunderbird/thunderbird-91.8.0/third_party/python/coverage/coverage/
H A Dphystokens.py95 tokgen = generate_tokens(source)
142 def generate_tokens(self, text): member in CachedTokenizer
147 self.last_tokens = list(tokenize.generate_tokens(readline))
151 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/www/chromium-legacy/chromium-88.0.4324.182/third_party/catapult/third_party/coverage/coverage/
H A Dphystokens.py97 tokgen = generate_tokens(source)
144 def generate_tokens(self, text): member in CachedTokenizer
149 self.last_tokens = list(tokenize.generate_tokens(readline))
153 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/devel/eric6/eric6-21.11/eric/eric6/DebugClients/Python/coverage/
H A Dphystokens.py95 tokgen = generate_tokens(source)
142 def generate_tokens(self, text): member in CachedTokenizer
147 self.last_tokens = list(tokenize.generate_tokens(readline))
151 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/www/firefox-esr/firefox-91.8.0/third_party/python/coverage/coverage/
H A Dphystokens.py95 tokgen = generate_tokens(source)
142 def generate_tokens(self, text): member in CachedTokenizer
147 self.last_tokens = list(tokenize.generate_tokens(readline))
151 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/lang/spidermonkey78/firefox-78.9.0/third_party/python/coverage/coverage/
H A Dphystokens.py95 tokgen = generate_tokens(source)
142 def generate_tokens(self, text): member in CachedTokenizer
147 self.last_tokens = list(tokenize.generate_tokens(readline))
151 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/devel/py-coverage/coverage-4.5.4/coverage/
H A Dphystokens.py96 tokgen = generate_tokens(source)
143 def generate_tokens(self, text): member in CachedTokenizer
148 self.last_tokens = list(tokenize.generate_tokens(readline))
152 generate_tokens = CachedTokenizer().generate_tokens variable
/dports/devel/py-flake8-quotes/flake8-quotes-3.3.1/test/
H A Dtest_docstring_detection.py11 tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
29 tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
39 tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
/dports/devel/ipython/ipython-7.29.0/IPython/utils/
H A Dtokenutil.py15 def generate_tokens(readline): function
18 for token in tokenize.generate_tokens(readline):
80 for tup in generate_tokens(StringIO(cell).readline):
H A DPyColorize.py44 generate_tokens = tokenize.generate_tokens variable
265 for atoken in generate_tokens(text.readline):
/dports/devel/ipython5/ipython-5.10.0/IPython/utils/
H A Dtokenutil.py17 def generate_tokens(readline): function
20 for token in tokenize2.generate_tokens(readline):
78 for tup in generate_tokens(StringIO(cell).readline):
H A DPyColorize.py48 generate_tokens = tokenize.generate_tokens
52 generate_tokens = tokenize._tokenize variable
262 for atoken in generate_tokens(text.readline):
/dports/graphics/py-urbansim/urbansim-3.2/urbansim/models/
H A Dutil.py12 from tokenize import generate_tokens, NAME
282 for toknum, tokval, _, _, _ in generate_tokens(StringIO(filters).readline):
342 for toknum, tokval, _, _, _ in generate_tokens(
/dports/lang/python-legacy/Python-2.7.18/Doc/library/
H A Dtokenize.rst22 :func:`tokenize.generate_tokens` for the character sequence that identifies a
27 .. function:: generate_tokens(readline)
29 The :func:`generate_tokens` generator requires one argument, *readline*,
64 generated by :func:`generate_tokens`.
137 g = generate_tokens(StringIO(s).readline) # tokenize the string
/dports/lang/python27/Python-2.7.18/Doc/library/
H A Dtokenize.rst22 :func:`tokenize.generate_tokens` for the character sequence that identifies a
27 .. function:: generate_tokens(readline)
29 The :func:`generate_tokens` generator requires one argument, *readline*,
64 generated by :func:`generate_tokens`.
137 g = generate_tokens(StringIO(s).readline) # tokenize the string
/dports/www/qt5-webengine/qtwebengine-everywhere-src-5.15.2/src/3rdparty/chromium/third_party/pycoverage/coverage/
H A Dparser.py111 tokgen = generate_tokens(self.text)
690 def generate_tokens(self, text): member in CachedTokenizer
695 tokenize.generate_tokens(StringIO(text).readline)
700 generate_tokens = CachedTokenizer().generate_tokens variable
H A Dphystokens.py5 from coverage.parser import generate_tokens
83 tokgen = generate_tokens(source)
/dports/www/chromium-legacy/chromium-88.0.4324.182/third_party/pycoverage/coverage/
H A Dparser.py111 tokgen = generate_tokens(self.text)
690 def generate_tokens(self, text): member in CachedTokenizer
695 tokenize.generate_tokens(StringIO(text).readline)
700 generate_tokens = CachedTokenizer().generate_tokens variable
H A Dphystokens.py5 from coverage.parser import generate_tokens
83 tokgen = generate_tokens(source)
/dports/devel/eric6/eric6-21.11/eric/eric6/Plugins/CheckerPlugins/CodeStyleChecker/DocStyle/
H A DDocStyleChecker.py443 for kind, value, (line, _char), _, _ in tokenize.generate_tokens(
468 tokenGenerator = tokenize.generate_tokens(
489 tokenGenerator = tokenize.generate_tokens(self.__readline)
560 tokenGenerator = tokenize.generate_tokens(
883 tokenize.generate_tokens(StringIO(context.ssource()).readline))
1058 tokenize.generate_tokens(StringIO(context.ssource()).readline))
1084 tokenize.generate_tokens(StringIO(context.ssource()).readline))
1167 tokenize.generate_tokens(StringIO(context.ssource()).readline))
1236 tokenize.generate_tokens(StringIO(context.ssource()).readline))
/dports/devel/py-hypothesmith/hypothesmith-0.2.0/tests/
H A Dtest_syntactic.py44 tokens = list(tokenize.generate_tokens(io.StringIO(source_code).readline))
46 output = tokenize.generate_tokens(io.StringIO(outstring).readline)
/dports/devel/py-traits/traits-6.3.2/traits/util/tests/
H A Dtest_trait_documenter.py106 tokens = tokenize.generate_tokens(string_io.readline)
117 tokens = tokenize.generate_tokens(string_io.readline)

12345678910>>...23