Home
last modified time | relevance | path

Searched refs:tokenize (Results 1 – 25 of 4751) sorted by relevance

12345678910>>...191

/dports/lang/elixir-devel/elixir-1.13.1/lib/elixir/test/erlang/
H A Dtokenizer_test.erl4 tokenize(String) -> function
5 tokenize(String, []).
7 tokenize(String, Opts) -> function
18 {int, {1, 6, 3}, "3"}] = tokenize("1 :: 3"),
21 {int, {1, 7, 3}, "3"}] = tokenize("true::3"),
56 [{atom, {1, 1, nil}, '+'}] = tokenize(":+"),
150 {')', {1, 4, nil}}] = tokenize("f.()").
222 {int,{1,5,1},"1"}] = tokenize("& +/1"),
226 {int,{1,5,1},"1"}] = tokenize("& &/1"),
243 {int,{1,3,1},"1"}] = tokenize("&/1"),
[all …]
/dports/lang/elixir/elixir-1.12.3/lib/elixir/test/erlang/
H A Dtokenizer_test.erl4 tokenize(String) -> function
5 tokenize(String, []).
7 tokenize(String, Opts) -> function
18 {int, {1, 6, 3}, "3"}] = tokenize("1 :: 3"),
21 {int, {1, 7, 3}, "3"}] = tokenize("true::3"),
56 [{atom, {1, 1, nil}, '+'}] = tokenize(":+"),
57 [{atom, {1, 1, nil}, '-'}] = tokenize(":-"),
58 [{atom, {1, 1, nil}, '*'}] = tokenize(":*"),
59 [{atom, {1, 1, nil}, '/'}] = tokenize(":/"),
150 {')', {1, 4, nil}}] = tokenize("f.()").
[all …]
/dports/devel/cppcheck/cppcheck-2.6.3/test/
H A Dtestvarid.cpp846 tokenize(code); in varid36()
870 tokenize(code)); in varid38()
890 tokenize(code)); in varid40()
914 tokenize(code)); in varid42()
926 tokenize(code)); in varid44()
932 tokenize(code)); in varid45()
964 tokenize(code)); in varid48()
1215 tokenize(code, "test.c"); in varid_cpp_keywords_in_c_code2()
1533 tokenize(code)); in varid_in_class4()
1543 tokenize(code)); in varid_in_class5()
[all …]
/dports/devel/cppcheck-gui/cppcheck-2.6.3/test/
H A Dtestvarid.cpp846 tokenize(code); in varid36()
870 tokenize(code)); in varid38()
890 tokenize(code)); in varid40()
914 tokenize(code)); in varid42()
926 tokenize(code)); in varid44()
932 tokenize(code)); in varid45()
964 tokenize(code)); in varid48()
1215 tokenize(code, "test.c"); in varid_cpp_keywords_in_c_code2()
1533 tokenize(code)); in varid_in_class4()
1543 tokenize(code)); in varid_in_class5()
[all …]
/dports/games/abstreet/abstreet-0.2.9-49-g74aca40c0/cargo-crates/simplecss-0.2.0/tests/
H A Ddeclaration_tokenizer.rs3 macro_rules! tokenize { macro
25 tokenize!(tokenize_01, "", );
27 tokenize!(tokenize_02, " ", );
29 tokenize!(tokenize_03, "/**/", );
31 tokenize!(tokenize_04, "color:red",
35 tokenize!(tokenize_05, "color:red;",
39 tokenize!(tokenize_06, "color:red ",
110 tokenize!(tokenize_25, "color:", );
123 tokenize!(tokenize_29, "color:0.5",
127 tokenize!(tokenize_30, "color:.5",
[all …]
H A Dselector_tokenizer.rs3 macro_rules! tokenize { macro
17 tokenize!(tokenize_01, "*",
21 tokenize!(tokenize_02, "div",
25 tokenize!(tokenize_03, "#div",
29 tokenize!(tokenize_04, ".div",
33 tokenize!(tokenize_05, "[id]",
74 tokenize!(tokenize_15, "div p",
88 tokenize!(tokenize_17, "div>p",
118 tokenize!(tokenize_22, "div *",
170 tokenize!(tokenize_31, "div,",
[all …]
/dports/textproc/cast2gif/cast2gif-0.1.0/cargo-crates/simplecss-0.2.0/tests/
H A Ddeclaration_tokenizer.rs3 macro_rules! tokenize { macro
25 tokenize!(tokenize_01, "", );
27 tokenize!(tokenize_02, " ", );
29 tokenize!(tokenize_03, "/**/", );
31 tokenize!(tokenize_04, "color:red",
35 tokenize!(tokenize_05, "color:red;",
39 tokenize!(tokenize_06, "color:red ",
110 tokenize!(tokenize_25, "color:", );
123 tokenize!(tokenize_29, "color:0.5",
127 tokenize!(tokenize_30, "color:.5",
[all …]
H A Dselector_tokenizer.rs3 macro_rules! tokenize { macro
17 tokenize!(tokenize_01, "*",
21 tokenize!(tokenize_02, "div",
25 tokenize!(tokenize_03, "#div",
29 tokenize!(tokenize_04, ".div",
33 tokenize!(tokenize_05, "[id]",
74 tokenize!(tokenize_15, "div p",
88 tokenize!(tokenize_17, "div>p",
118 tokenize!(tokenize_22, "div *",
170 tokenize!(tokenize_31, "div,",
[all …]
/dports/astro/p5-Astro-App-Satpass2/Astro-App-Satpass2-0.049/t/
H A Dtokenize.t24 tokenize( 'foo', [ [ 'foo' ], {} ] )
34 tokenize( undef, [ [ qw{ bar } ] ], 'tokenize remainder of source'; )
131 tokenize( q{x~}, [ [ 'x~' ], {} ] )
284 tokenize( q<${#plural[*]}>, [ )
288 tokenize( q<${#plural[0]}>, [ )
292 tokenize( q<${#plural[1]}>, [ )
296 tokenize( q<${#plural[2]}>, [ )
300 tokenize( q<${#plural[3]}>, [ )
385 tokenize( q<${plural[*]:1}>, [ )
423 tokenize( '$0', [ [ $0 ], {} ] )
[all …]
/dports/cad/padring/padring-b2a64ab/src/lef/
H A Dlefreader.cpp258 m_curtok = tokenize(name); in parseMacro()
268 m_curtok = tokenize(m_tokstr); in parseMacro()
350 m_curtok = tokenize(name); in parsePin()
459 m_curtok = tokenize(xnum); in parseOrigin()
466 m_curtok = tokenize(ynum); in parseOrigin()
534 m_curtok = tokenize(xnum); in parseSize()
548 m_curtok = tokenize(ynum); in parseSize()
616 m_curtok = tokenize(xnum); in parseForeign()
623 m_curtok = tokenize(ynum); in parseForeign()
693 m_curtok = tokenize(use); in parseUse()
[all …]
/dports/lang/ruby26/ruby-2.6.9/test/psych/
H A Dtest_scalar_scanner.rb38 assert_equal x, @ss.tokenize(x)
41 assert_equal x, @ss.tokenize(x)
44 assert_equal x, @ss.tokenize(x)
54 assert_equal x, @ss.tokenize(x)
59 token = @ss.tokenize date
74 assert ss.tokenize('.nan').nan?
82 assert_nil ss.tokenize('null')
83 assert_nil ss.tokenize('~')
84 assert_nil ss.tokenize('')
88 assert_equal :foo, ss.tokenize(':foo')
[all …]
/dports/math/py-patsy/patsy-0.5.2/patsy/
H A Dtokens.py13 import tokenize
32 if pytype == tokenize.ENDMARKER:
35 assert pytype != tokenize.NL
36 if pytype == tokenize.NEWLINE:
39 if pytype == tokenize.ERRORTOKEN:
43 if pytype == tokenize.COMMENT:
48 except tokenize.TokenError as e:
103 assert token_type not in (tokenize.INDENT, tokenize.DEDENT,
105 if token_type == tokenize.NEWLINE:
109 if token_type in (tokenize.NAME, tokenize.NUMBER, tokenize.STRING):
[all …]
/dports/textproc/py-tinycss/tinycss-0.4/tinycss/tests/
H A Dtest_tokenizer.py34 (tokenize,) + test_data
123 if tokenize is None: # pragma: no cover
142 def test_positions(tokenize): argument
144 if tokenize is None: # pragma: no cover
159 (tokenize,) + test_data
218 if tokenize is None: # pragma: no cover
238 (tokenize,) + test_data
260 if tokenize is None: # pragma: no cover
269 (tokenize, test_data)
289 (tokenize, test_data)
[all …]
/dports/lang/tcl86/tcl8.6.12/pkgs/tdbc1.1.3/tests/
H A Dtokenize.test1 # tokenize.test --
10 test tokenize-1.0 {wrong args} \
12 ::tdbc::tokenize
17 test tokenize-1.1 {wrong args} \
19 ::tdbc::tokenize foo bar
25 ::tdbc::tokenize {SELECT :a, ':b' FROM y}
29 ::tdbc::tokenize {SELECT :foo::VARCHAR}
48 test tokenize-4.0 {unterminated comment} {
52 test tokenize-4.1 {unterminated quote} {
56 test tokenize-4.2 {unterminated quote} {
[all …]
/dports/databases/tdbc/tdbc1.1.3/tests/
H A Dtokenize.test1 # tokenize.test --
10 test tokenize-1.0 {wrong args} \
12 ::tdbc::tokenize
17 test tokenize-1.1 {wrong args} \
19 ::tdbc::tokenize foo bar
25 ::tdbc::tokenize {SELECT :a, ':b' FROM y}
29 ::tdbc::tokenize {SELECT :foo::VARCHAR}
48 test tokenize-4.0 {unterminated comment} {
52 test tokenize-4.1 {unterminated quote} {
56 test tokenize-4.2 {unterminated quote} {
[all …]
/dports/benchmarks/hyperfine/hyperfine-1.12.0/src/
H A Dtokenize.rs1 pub fn tokenize(values: &str) -> Vec<String> { in tokenize() function
35 assert_eq!(tokenize(r""), vec![""]); in test_tokenize_single_value()
36 assert_eq!(tokenize(r"foo"), vec!["foo"]); in test_tokenize_single_value()
37 assert_eq!(tokenize(r" "), vec![" "]); in test_tokenize_single_value()
39 assert_eq!(tokenize(r"\,"), vec![","]); in test_tokenize_single_value()
40 assert_eq!(tokenize(r"\,\,\,"), vec![",,,"]); in test_tokenize_single_value()
41 assert_eq!(tokenize(r"\n"), vec![r"\n"]); in test_tokenize_single_value()
42 assert_eq!(tokenize(r"\\"), vec![r"\"]); in test_tokenize_single_value()
43 assert_eq!(tokenize(r"\\\,"), vec![r"\,"]); in test_tokenize_single_value()
57 assert_eq!(tokenize(r",bar"), vec!["", "bar"]); in test_tokenize_empty_values()
[all …]
/dports/sysutils/py-mitogen/mitogen-0.2.9/mitogen/
H A Dminify.py41 from mitogen.compat import tokenize
43 import tokenize
62 return tokenize.untokenize(tokens)
75 if typ in (tokenize.NL, tokenize.NEWLINE):
76 if prev_typ in (tokenize.NL, tokenize.NEWLINE):
100 if typ in (tokenize.NL, tokenize.COMMENT):
102 elif typ in (tokenize.DEDENT, tokenize.INDENT, tokenize.STRING):
110 if t[0] in (tokenize.DEDENT, tokenize.INDENT):
119 if typ == tokenize.NEWLINE:
132 if typ == tokenize.INDENT:
[all …]
/dports/multimedia/py-mido/mido-1.2.10/tests/
H A Dtest_tokenizer.py3 def tokenize(midi_bytes): function
8 assert tokenize([0x90, 1, 2]) == [[0x90, 1, 2]]
12 assert tokenize([0xf0, 1, 2, 3, 0xf7]) == [[0xf0, 1, 2, 3, 0xf7]]
16 assert tokenize([0xf0, 0xf7]) == [[0xf0, 0xf7]]
20 assert tokenize([0xf8]) == [[0xf8]]
24 assert tokenize([0xf8, 0xf8]) == [[0xf8], [0xf8]]
29 assert tokenize([0x90, 1, 0xf8, 2]) == [[0xf8]]
39 assert tokenize([0xf0, 1, 0xf8, 2, 0xf7]) == [[0xf8], [0xf0, 1, 2, 0xf7]]
40 assert tokenize([0xf0, 0xf8, 0xf7]) == [[0xf8], [0xf0, 0xf7]]
45 assert tokenize([0xf0, 0x90, 1, 2, 0xf7]) == [[0x90, 1, 2]]
[all …]
/dports/devel/p5-Data-SExpression/Data-SExpression-0.41/t/
H A D01-lexer.t15 sub tokenize {
56 is_deeply(tokenize(q{a . b}),
61 is_deeply(tokenize(q{""}),
64 is_deeply(tokenize(q{("")}),
69 is_deeply(tokenize(q{("") ("")}),
77 is_deeply(tokenize(q{("") (" ")}),
85 is_deeply(tokenize(q{("a") ("b")}),
94 is_deeply(tokenize(qq{"\n"}),
97 is_deeply(tokenize(qq{"aa\n"}),
100 is_deeply(tokenize(qq{"\nbb"}),
[all …]
/dports/devel/py-dask/dask-2021.11.2/dask/tests/
H A Dtest_base.py133 assert tokenize(a) == tokenize(a)
146 assert tokenize(x) != tokenize(x)
262 assert tokenize(a) == tokenize(b)
264 assert tokenize(a) != tokenize(b)
269 assert tokenize(a) != tokenize(b)
271 assert tokenize(a) == tokenize(b)
363 assert tokenize(a) == tokenize(a)
364 assert tokenize(a) != tokenize(b)
404 assert tokenize(a) == tokenize(b)
405 assert tokenize(a) != tokenize(c)
[all …]
/dports/devel/elixir-gettext/gettext-0.15.0/test/gettext/po/
H A Dtokenizer_test.exs9 assert tokenize(str) ==
18 assert tokenize(str) ==
28 assert tokenize(str) ==
81 assert tokenize(str) ==
110 assert tokenize(str) ==
123 assert tokenize(str) ==
131 assert tokenize(str) ==
139 assert tokenize(str) ==
147 assert tokenize(str) ==
161 assert tokenize(str) ==
[all …]
/dports/lang/python39/Python-3.9.9/Parser/pgen/
H A Dmetaparser.py4 import tokenize # from stdlib
13 tokenize.NAME: "NAME",
14 tokenize.STRING: "STRING",
15 tokenize.NEWLINE: "NEWLINE",
16 tokenize.NL: "NL",
17 tokenize.OP: "OP",
18 tokenize.ENDMARKER: "ENDMARKER",
19 tokenize.COMMENT: "COMMENT",
67 while self.type in (tokenize.NAME, tokenize.STRING) or self.value in ("(", "["):
111 elif self.type in (tokenize.NAME, tokenize.STRING):
[all …]
/dports/textproc/py-nltk/nltk-3.4.1/nltk/tokenize/
H A D__init__.py67 from nltk.tokenize.mwe import MWETokenizer
68 from nltk.tokenize.punkt import PunktSentenceTokenizer
69 from nltk.tokenize.regexp import (
78 from nltk.tokenize.repp import ReppTokenizer
79 from nltk.tokenize.sexpr import SExprTokenizer, sexpr_tokenize
80 from nltk.tokenize.simple import (
86 from nltk.tokenize.texttiling import TextTilingTokenizer
87 from nltk.tokenize.toktok import ToktokTokenizer
88 from nltk.tokenize.treebank import TreebankWordTokenizer
90 from nltk.tokenize.stanford_segmenter import StanfordSegmenter
[all …]
/dports/textproc/google-translate-cli/translate-shell-0.9.6.12/test/
H A DTestParser.awk9 tokenize(tokens, "0")
15 tokenize(tokens, "3.14")
21 tokenize(tokens, "Atom")
33 tokenize(tokens, "\"\\\"\"")
39 tokenize(tokens, "(QUOTE A)")
48 tokenize(tokens, "1 + 2 * 3")
77 tokenize(tokens, "0")
83 tokenize(tokens, "null")
89 tokenize(tokens, "[42]")
124 tokenize(tokens, "{}")
[all …]
/dports/databases/sqlite3/sqlite-src-3350500/test/
H A Dtokenize.test20 do_test tokenize-1.1 {
23 do_test tokenize-1.2 {
26 do_test tokenize-1.3 {
29 do_test tokenize-1.4 {
32 do_test tokenize-1.5 {
35 do_test tokenize-1.6 {
38 do_test tokenize-1.7 {
41 do_test tokenize-1.8 {
44 do_test tokenize-1.9 {
57 do_test tokenize-2.1 {
[all …]

12345678910>>...191