Lines Matching refs:Tokenizer

24 TEST(Tokenizer, HTTPResponse)  in TEST()  argument
26 Tokenizer::Token t; in TEST()
30 Tokenizer p( in TEST()
39 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_INTEGER, t)); in TEST()
40 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
43 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_INTEGER, t)); in TEST()
44 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
48 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_INTEGER, t)); in TEST()
49 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
54 while (p.Next(t) && t.Type() != Tokenizer::TOKEN_EOL) in TEST()
64 p.Claim(h, Tokenizer::INCLUDE_LAST); in TEST()
70 while (p.Next(t) && t.Type() != Tokenizer::TOKEN_EOL) in TEST()
79 p.Claim(h, Tokenizer::INCLUDE_LAST); in TEST()
84 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_INTEGER, t)); in TEST()
91 while (p.Next(t) && t.Type() != Tokenizer::TOKEN_EOF) in TEST()
98 TEST(Tokenizer, Main) in TEST() argument
100 Tokenizer::Token t; in TEST()
104 Tokenizer p("test123 ,15 \t*\r\n%xx,-15\r\r"_ns); in TEST()
107 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_WORD); in TEST()
110 Tokenizer::Token u; in TEST()
115 EXPECT_FALSE(p.Check(Tokenizer::Token::Number(123))); in TEST()
121 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(15))); in TEST()
124 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(15))); in TEST()
128 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
144 p.Record(Tokenizer::EXCLUDE_LAST); in TEST()
151 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_CHAR); in TEST()
155 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_EOL); in TEST()
158 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_CHAR); in TEST()
162 p.Claim(claim, Tokenizer::EXCLUDE_LAST); in TEST()
164 p.Claim(claim, Tokenizer::INCLUDE_LAST); in TEST()
170 p.Record(Tokenizer::INCLUDE_LAST); in TEST()
176 p.Claim(claim, Tokenizer::INCLUDE_LAST); in TEST()
180 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_CHAR); in TEST()
186 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
190 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_EOL); in TEST()
193 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_EOL); in TEST()
196 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_EOF); in TEST()
202 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_EOF); in TEST()
212 TEST(Tokenizer, Main16) in TEST() argument
326 TEST(Tokenizer, SingleWord) in TEST() argument
330 Tokenizer p("test123"_ns); in TEST()
336 TEST(Tokenizer, EndingAfterNumber) in TEST() argument
340 Tokenizer p("123"_ns); in TEST()
342 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(123))); in TEST()
346 TEST(Tokenizer, BadInteger) in TEST() argument
348 Tokenizer::Token t; in TEST()
352 Tokenizer p("189234891274981758617846178651647620587135"_ns); in TEST()
355 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_ERROR); in TEST()
359 TEST(Tokenizer, CheckExpectedTokenValue) in TEST() argument
361 Tokenizer::Token t; in TEST()
365 Tokenizer p("blue velvet"_ns); in TEST()
367 EXPECT_FALSE(p.Check(Tokenizer::TOKEN_INTEGER, t)); in TEST()
369 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_WORD, t)); in TEST()
372 EXPECT_FALSE(p.Check(Tokenizer::TOKEN_WORD, t)); in TEST()
376 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_WORD, t)); in TEST()
384 TEST(Tokenizer, HasFailed) in TEST() argument
386 Tokenizer::Token t; in TEST()
390 Tokenizer p1("a b"_ns); in TEST()
392 while (p1.Next(t) && t.Type() != Tokenizer::TOKEN_CHAR) in TEST()
396 Tokenizer p2("a b ?!c"_ns); in TEST()
412 EXPECT_TRUE(p2.Check(Tokenizer::TOKEN_CHAR, t)); in TEST()
416 t = Tokenizer::Token::Char('!'); in TEST()
420 while (p2.Next(t) && t.Type() != Tokenizer::TOKEN_CHAR) in TEST()
425 TEST(Tokenizer, Construction) in TEST() argument
429 Tokenizer p1(a); in TEST()
436 Tokenizer p1(a); in TEST()
444 Tokenizer p1(a); in TEST()
452 Tokenizer p1(a); in TEST()
458 Tokenizer p1(nsDependentCString("test")); in TEST()
464 Tokenizer p1("test"_ns); in TEST()
470 Tokenizer p1("test"); in TEST()
476 TEST(Tokenizer, Customization) in TEST() argument
478 Tokenizer p1("test-custom*words and\tdefault-whites"_ns, nullptr, "-*"); in TEST()
485 Tokenizer p2("test, custom,whites"_ns, ", "); in TEST()
493 Tokenizer p3("test, custom, whites-and#word-chars"_ns, ",", "-#"); in TEST()
505 TEST(Tokenizer, ShortcutChecks) in TEST() argument
507 Tokenizer p("test1 test2,123"); in TEST()
528 TEST(Tokenizer, ReadCharClassified) in TEST() argument
530 Tokenizer p("abc"); in TEST()
544 TEST(Tokenizer, ClaimSubstring) in TEST() argument
546 Tokenizer p(" abc "); in TEST()
553 p.Claim(v, Tokenizer::INCLUDE_LAST); in TEST()
559 TEST(Tokenizer, Fragment) in TEST() argument
562 Tokenizer p(str); in TEST()
565 Tokenizer::Token t1, t2; in TEST()
568 EXPECT_TRUE(t1.Type() == Tokenizer::TOKEN_WORD); in TEST()
573 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_WORD, t2)); in TEST()
578 EXPECT_TRUE(t1.Type() == Tokenizer::TOKEN_CHAR); in TEST()
583 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_CHAR, t2)); in TEST()
587 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_WORD, t2)); in TEST()
593 EXPECT_TRUE(t1.Type() == Tokenizer::TOKEN_WORD); in TEST()
597 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_CHAR, t2)); in TEST()
603 EXPECT_TRUE(t1.Type() == Tokenizer::TOKEN_CHAR); in TEST()
608 EXPECT_TRUE(t1.Type() == Tokenizer::TOKEN_INTEGER); in TEST()
612 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_WS, t2)); in TEST()
616 EXPECT_TRUE(p.Check(Tokenizer::TOKEN_EOF, t1)); in TEST()
621 TEST(Tokenizer, SkipWhites) in TEST() argument
623 Tokenizer p("Text1 \nText2 \nText3\n Text4\n "); in TEST()
630 p.SkipWhites(Tokenizer::INCLUDE_NEW_LINE); in TEST()
638 p.SkipWhites(Tokenizer::INCLUDE_NEW_LINE); in TEST()
642 TEST(Tokenizer, SkipCustomWhites) in TEST() argument
644 Tokenizer p("Text1 \n\r\t.Text2 \n\r\t.", " \n\r\t."); in TEST()
657 TEST(Tokenizer, IntegerReading) in TEST() argument
666 Tokenizer p(MOZ_STRINGIFY(INT_6_BITS)); in TEST()
706 Tokenizer p(MOZ_STRINGIFY(INT_30_BITS)); in TEST()
737 Tokenizer p(MOZ_STRINGIFY(INT_32_BITS)); in TEST()
748 Tokenizer p(MOZ_STRINGIFY(INT_50_BITS)); in TEST()
763 Tokenizer p(STR_INT_MORE_THAN_64_BITS); in TEST()
772 TEST(Tokenizer, ReadUntil) in TEST() argument
774 Tokenizer p("Hello;test 4,"); in TEST()
776 EXPECT_TRUE(p.ReadUntil(Tokenizer::Token::Char(';'), f)); in TEST()
781 p.ReadUntil(Tokenizer::Token::Char(';'), f, Tokenizer::INCLUDE_LAST)); in TEST()
785 EXPECT_FALSE(p.ReadUntil(Tokenizer::Token::Char('!'), f)); in TEST()
789 EXPECT_TRUE(p.ReadUntil(Tokenizer::Token::Word("test"_ns), f)); in TEST()
793 EXPECT_TRUE(p.ReadUntil(Tokenizer::Token::Word("test"_ns), f, in TEST()
794 Tokenizer::INCLUDE_LAST)); in TEST()
796 EXPECT_TRUE(p.ReadUntil(Tokenizer::Token::Char(','), f)); in TEST()
800 TEST(Tokenizer, SkipUntil) in TEST() argument
803 Tokenizer p("test1,test2,,,test3"); in TEST()
805 p.SkipUntil(Tokenizer::Token::Char(',')); in TEST()
809 p.SkipUntil(Tokenizer::Token::Char(',')); // must not move in TEST()
815 Tokenizer::Token::Char(',')); // must not move, we are on the ',' char in TEST()
822 p.SkipUntil(Tokenizer::Token::Char(',')); in TEST()
827 Tokenizer p("test0,test1,test2"); in TEST()
829 p.SkipUntil(Tokenizer::Token::Char(',')); in TEST()
832 p.SkipUntil(Tokenizer::Token::Char(',')); in TEST()
838 p.SkipUntil(Tokenizer::Token::Char(',')); in TEST()
846 TEST(Tokenizer, Custom) in TEST() argument
848 Tokenizer p( in TEST()
851 Tokenizer::Token c1 = in TEST()
852 p.AddCustomToken("custom-1", Tokenizer::CASE_INSENSITIVE); in TEST()
853 Tokenizer::Token c2 = p.AddCustomToken("custom-2", Tokenizer::CASE_SENSITIVE); in TEST()
859 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(1))); in TEST()
872 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(1))); in TEST()
875 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(0))); in TEST()
882 EXPECT_TRUE(p.Check(Tokenizer::Token::Number(2))); in TEST()
887 TEST(Tokenizer, CustomRaw) in TEST() argument
889 Tokenizer p( in TEST()
892 Tokenizer::Token c1 = in TEST()
893 p.AddCustomToken("custom-1", Tokenizer::CASE_INSENSITIVE); in TEST()
894 Tokenizer::Token c2 = p.AddCustomToken("custom-2", Tokenizer::CASE_SENSITIVE); in TEST()
898 p.SetTokenizingMode(Tokenizer::Mode::CUSTOM_ONLY); in TEST()
900 Tokenizer::Token t; in TEST()
903 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
909 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
915 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
921 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
927 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
933 EXPECT_TRUE(t.Type() == Tokenizer::TOKEN_RAW); in TEST()
939 TEST(Tokenizer, Incremental) in TEST() argument
988 TEST(Tokenizer, IncrementalRollback) in TEST() argument
1041 TEST(Tokenizer, IncrementalNeedMoreInput) in TEST() argument
1100 TEST(Tokenizer, IncrementalCustom) in TEST() argument
1124 custom = i.AddCustomToken("some-test", Tokenizer::CASE_SENSITIVE); in TEST()
1135 TEST(Tokenizer, IncrementalCustomRaw) in TEST() argument
1153 i.SetTokenizingMode(Tokenizer::Mode::FULL); in TEST()
1157 i.SetTokenizingMode(Tokenizer::Mode::CUSTOM_ONLY); in TEST()
1176 custom = i.AddCustomToken("test2", Tokenizer::CASE_SENSITIVE); in TEST()
1177 i.SetTokenizingMode(Tokenizer::Mode::CUSTOM_ONLY); in TEST()
1191 TEST(Tokenizer, IncrementalCustomRemove) in TEST() argument
1215 custom = i.AddCustomToken("custom1", Tokenizer::CASE_SENSITIVE); in TEST()
1224 TEST(Tokenizer, IncrementalBuffering1) in TEST() argument
1260 custom = i.AddCustomToken("aaa", Tokenizer::CASE_SENSITIVE); in TEST()
1263 Unused << i.AddCustomToken("bb", Tokenizer::CASE_SENSITIVE); in TEST()
1264 i.SetTokenizingMode(Tokenizer::Mode::CUSTOM_ONLY); in TEST()
1284 TEST(Tokenizer, IncrementalBuffering2) in TEST() argument
1316 custom = i.AddCustomToken("aaa", Tokenizer::CASE_SENSITIVE); in TEST()
1319 Unused << i.AddCustomToken("bbbbb", Tokenizer::CASE_SENSITIVE); in TEST()
1320 i.SetTokenizingMode(Tokenizer::Mode::CUSTOM_ONLY); in TEST()
1334 TEST(Tokenizer, RecordAndReadUntil) in TEST() argument
1336 Tokenizer t("aaaa,bbbb"); in TEST()
1340 EXPECT_TRUE(t.ReadUntil(mozilla::Tokenizer::Token::Char(','), subject)); in TEST()
1345 EXPECT_FALSE(t.ReadUntil(mozilla::Tokenizer::Token::Char(','), subject)); in TEST()
1349 EXPECT_FALSE(t.ReadUntil(mozilla::Tokenizer::Token::Char(','), subject)); in TEST()
1355 TEST(Tokenizer, ReadIntegers) in TEST() argument
1359 Tokenizer t("100,-100,200,-200,4294967295,-4294967295,-2147483647", nullptr, in TEST()
1417 TEST(Tokenizer, CheckPhrase) in TEST() argument
1419 Tokenizer t("foo bar baz"); in TEST()