1 //! This code has been generated by running the `build_pycompat_tokenizer.py` script
2 //! in the repository root. Please do not edit it, as your edits will be destroyed
3 //! upon re-running code generation.
4
5 use tokenize::Tokenizer;
6
tokenize_assert(test_str: &str, comparison: Vec<&str>)7 fn tokenize_assert(test_str: &str, comparison: Vec<&str>) {
8 let tokens: Vec<String> = Tokenizer::new(test_str).collect();
9 assert_eq!(tokens, comparison, "Tokenizing mismatch for `{}`", test_str);
10 }
11
12 #[test]
test_tokenize0()13 fn test_tokenize0() {
14 let comp = vec![
15 "Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28",
16 ];
17 tokenize_assert("Thu Sep 25 10:36:28", comp);
18 }
19
20 #[test]
test_tokenize1()21 fn test_tokenize1() {
22 let comp = vec!["Sep", " ", "10", ":", "36", ":", "28"];
23 tokenize_assert("Sep 10:36:28", comp);
24 }
25
26 #[test]
test_tokenize2()27 fn test_tokenize2() {
28 let comp = vec!["10", ":", "36", ":", "28"];
29 tokenize_assert("10:36:28", comp);
30 }
31
32 #[test]
test_tokenize3()33 fn test_tokenize3() {
34 let comp = vec!["10", ":", "36"];
35 tokenize_assert("10:36", comp);
36 }
37
38 #[test]
test_tokenize4()39 fn test_tokenize4() {
40 let comp = vec!["Sep", " ", "2003"];
41 tokenize_assert("Sep 2003", comp);
42 }
43
44 #[test]
test_tokenize5()45 fn test_tokenize5() {
46 let comp = vec!["Sep"];
47 tokenize_assert("Sep", comp);
48 }
49
50 #[test]
test_tokenize6()51 fn test_tokenize6() {
52 let comp = vec!["2003"];
53 tokenize_assert("2003", comp);
54 }
55
56 #[test]
test_tokenize7()57 fn test_tokenize7() {
58 let comp = vec!["10", "h", "36", "m", "28.5", "s"];
59 tokenize_assert("10h36m28.5s", comp);
60 }
61
62 #[test]
test_tokenize8()63 fn test_tokenize8() {
64 let comp = vec!["10", "h", "36", "m", "28", "s"];
65 tokenize_assert("10h36m28s", comp);
66 }
67
68 #[test]
test_tokenize9()69 fn test_tokenize9() {
70 let comp = vec!["10", "h", "36", "m"];
71 tokenize_assert("10h36m", comp);
72 }
73
74 #[test]
test_tokenize10()75 fn test_tokenize10() {
76 let comp = vec!["10", "h"];
77 tokenize_assert("10h", comp);
78 }
79
80 #[test]
test_tokenize11()81 fn test_tokenize11() {
82 let comp = vec!["10", " ", "h", " ", "36"];
83 tokenize_assert("10 h 36", comp);
84 }
85
86 #[test]
test_tokenize12()87 fn test_tokenize12() {
88 let comp = vec!["10", " ", "h", " ", "36.5"];
89 tokenize_assert("10 h 36.5", comp);
90 }
91
92 #[test]
test_tokenize13()93 fn test_tokenize13() {
94 let comp = vec!["36", " ", "m", " ", "5"];
95 tokenize_assert("36 m 5", comp);
96 }
97
98 #[test]
test_tokenize14()99 fn test_tokenize14() {
100 let comp = vec!["36", " ", "m", " ", "5", " ", "s"];
101 tokenize_assert("36 m 5 s", comp);
102 }
103
104 #[test]
test_tokenize15()105 fn test_tokenize15() {
106 let comp = vec!["36", " ", "m", " ", "05"];
107 tokenize_assert("36 m 05", comp);
108 }
109
110 #[test]
test_tokenize16()111 fn test_tokenize16() {
112 let comp = vec!["36", " ", "m", " ", "05", " ", "s"];
113 tokenize_assert("36 m 05 s", comp);
114 }
115
116 #[test]
test_tokenize17()117 fn test_tokenize17() {
118 let comp = vec!["10", "h", " ", "am"];
119 tokenize_assert("10h am", comp);
120 }
121
122 #[test]
test_tokenize18()123 fn test_tokenize18() {
124 let comp = vec!["10", "h", " ", "pm"];
125 tokenize_assert("10h pm", comp);
126 }
127
128 #[test]
test_tokenize19()129 fn test_tokenize19() {
130 let comp = vec!["10", "am"];
131 tokenize_assert("10am", comp);
132 }
133
134 #[test]
test_tokenize20()135 fn test_tokenize20() {
136 let comp = vec!["10", "pm"];
137 tokenize_assert("10pm", comp);
138 }
139
140 #[test]
test_tokenize21()141 fn test_tokenize21() {
142 let comp = vec!["10", ":", "00", " ", "am"];
143 tokenize_assert("10:00 am", comp);
144 }
145
146 #[test]
test_tokenize22()147 fn test_tokenize22() {
148 let comp = vec!["10", ":", "00", " ", "pm"];
149 tokenize_assert("10:00 pm", comp);
150 }
151
152 #[test]
test_tokenize23()153 fn test_tokenize23() {
154 let comp = vec!["10", ":", "00", "am"];
155 tokenize_assert("10:00am", comp);
156 }
157
158 #[test]
test_tokenize24()159 fn test_tokenize24() {
160 let comp = vec!["10", ":", "00", "pm"];
161 tokenize_assert("10:00pm", comp);
162 }
163
164 #[test]
test_tokenize25()165 fn test_tokenize25() {
166 let comp = vec!["10", ":", "00", "a", ".", "m"];
167 tokenize_assert("10:00a.m", comp);
168 }
169
170 #[test]
test_tokenize26()171 fn test_tokenize26() {
172 let comp = vec!["10", ":", "00", "p", ".", "m"];
173 tokenize_assert("10:00p.m", comp);
174 }
175
176 #[test]
test_tokenize27()177 fn test_tokenize27() {
178 let comp = vec!["10", ":", "00", "a", ".", "m", "."];
179 tokenize_assert("10:00a.m.", comp);
180 }
181
182 #[test]
test_tokenize28()183 fn test_tokenize28() {
184 let comp = vec!["10", ":", "00", "p", ".", "m", "."];
185 tokenize_assert("10:00p.m.", comp);
186 }
187
188 #[test]
test_tokenize29()189 fn test_tokenize29() {
190 let comp = vec!["October"];
191 tokenize_assert("October", comp);
192 }
193
194 #[test]
test_tokenize30()195 fn test_tokenize30() {
196 let comp = vec!["31", "-", "Dec", "-", "00"];
197 tokenize_assert("31-Dec-00", comp);
198 }
199
200 #[test]
test_tokenize31()201 fn test_tokenize31() {
202 let comp = vec!["0", ":", "01", ":", "02"];
203 tokenize_assert("0:01:02", comp);
204 }
205
206 #[test]
test_tokenize32()207 fn test_tokenize32() {
208 let comp = vec!["12", "h", " ", "01", "m", "02", "s", " ", "am"];
209 tokenize_assert("12h 01m02s am", comp);
210 }
211
212 #[test]
test_tokenize33()213 fn test_tokenize33() {
214 let comp = vec!["12", ":", "08", " ", "PM"];
215 tokenize_assert("12:08 PM", comp);
216 }
217
218 #[test]
test_tokenize34()219 fn test_tokenize34() {
220 let comp = vec!["01", "h", "02", "m", "03"];
221 tokenize_assert("01h02m03", comp);
222 }
223
224 #[test]
test_tokenize35()225 fn test_tokenize35() {
226 let comp = vec!["01", "h", "02"];
227 tokenize_assert("01h02", comp);
228 }
229
230 #[test]
test_tokenize36()231 fn test_tokenize36() {
232 let comp = vec!["01", "h", "02", "s"];
233 tokenize_assert("01h02s", comp);
234 }
235
236 #[test]
test_tokenize37()237 fn test_tokenize37() {
238 let comp = vec!["01", "m", "02"];
239 tokenize_assert("01m02", comp);
240 }
241
242 #[test]
test_tokenize38()243 fn test_tokenize38() {
244 let comp = vec!["01", "m", "02", "h"];
245 tokenize_assert("01m02h", comp);
246 }
247
248 #[test]
test_tokenize39()249 fn test_tokenize39() {
250 let comp = vec!["2004", " ", "10", " ", "Apr", " ", "11", "h", "30", "m"];
251 tokenize_assert("2004 10 Apr 11h30m", comp);
252 }
253
254 #[test]
test_tokenize40()255 fn test_tokenize40() {
256 let comp = vec!["Sep", " ", "03"];
257 tokenize_assert("Sep 03", comp);
258 }
259
260 #[test]
test_tokenize41()261 fn test_tokenize41() {
262 let comp = vec!["Sep", " ", "of", " ", "03"];
263 tokenize_assert("Sep of 03", comp);
264 }
265
266 #[test]
test_tokenize42()267 fn test_tokenize42() {
268 let comp = vec!["02", ":", "17", "NOV", "2017"];
269 tokenize_assert("02:17NOV2017", comp);
270 }
271
272 #[test]
test_tokenize43()273 fn test_tokenize43() {
274 let comp = vec!["Thu", " ", "Sep", " ", "10", ":", "36", ":", "28"];
275 tokenize_assert("Thu Sep 10:36:28", comp);
276 }
277
278 #[test]
test_tokenize44()279 fn test_tokenize44() {
280 let comp = vec!["Thu", " ", "10", ":", "36", ":", "28"];
281 tokenize_assert("Thu 10:36:28", comp);
282 }
283
284 #[test]
test_tokenize45()285 fn test_tokenize45() {
286 let comp = vec!["Wed"];
287 tokenize_assert("Wed", comp);
288 }
289
290 #[test]
test_tokenize46()291 fn test_tokenize46() {
292 let comp = vec!["Wednesday"];
293 tokenize_assert("Wednesday", comp);
294 }
295
296 #[test]
test_tokenize47()297 fn test_tokenize47() {
298 let comp = vec![
299 "Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "2003",
300 ];
301 tokenize_assert("Thu Sep 25 10:36:28 2003", comp);
302 }
303
304 #[test]
test_tokenize48()305 fn test_tokenize48() {
306 let comp = vec!["Thu", " ", "Sep", " ", "25", " ", "2003"];
307 tokenize_assert("Thu Sep 25 2003", comp);
308 }
309
310 #[test]
test_tokenize49()311 fn test_tokenize49() {
312 let comp = vec![
313 "2003", "-", "09", "-", "25", "T", "10", ":", "49", ":", "41",
314 ];
315 tokenize_assert("2003-09-25T10:49:41", comp);
316 }
317
318 #[test]
test_tokenize50()319 fn test_tokenize50() {
320 let comp = vec!["2003", "-", "09", "-", "25", "T", "10", ":", "49"];
321 tokenize_assert("2003-09-25T10:49", comp);
322 }
323
324 #[test]
test_tokenize51()325 fn test_tokenize51() {
326 let comp = vec!["2003", "-", "09", "-", "25", "T", "10"];
327 tokenize_assert("2003-09-25T10", comp);
328 }
329
330 #[test]
test_tokenize52()331 fn test_tokenize52() {
332 let comp = vec!["2003", "-", "09", "-", "25"];
333 tokenize_assert("2003-09-25", comp);
334 }
335
336 #[test]
test_tokenize53()337 fn test_tokenize53() {
338 let comp = vec!["20030925", "T", "104941"];
339 tokenize_assert("20030925T104941", comp);
340 }
341
342 #[test]
test_tokenize54()343 fn test_tokenize54() {
344 let comp = vec!["20030925", "T", "1049"];
345 tokenize_assert("20030925T1049", comp);
346 }
347
348 #[test]
test_tokenize55()349 fn test_tokenize55() {
350 let comp = vec!["20030925", "T", "10"];
351 tokenize_assert("20030925T10", comp);
352 }
353
354 #[test]
test_tokenize56()355 fn test_tokenize56() {
356 let comp = vec!["20030925"];
357 tokenize_assert("20030925", comp);
358 }
359
360 #[test]
test_tokenize57()361 fn test_tokenize57() {
362 let comp = vec![
363 "2003", "-", "09", "-", "25", " ", "10", ":", "49", ":", "41.502",
364 ];
365 tokenize_assert("2003-09-25 10:49:41,502", comp);
366 }
367
368 #[test]
test_tokenize58()369 fn test_tokenize58() {
370 let comp = vec!["199709020908"];
371 tokenize_assert("199709020908", comp);
372 }
373
374 #[test]
test_tokenize59()375 fn test_tokenize59() {
376 let comp = vec!["19970902090807"];
377 tokenize_assert("19970902090807", comp);
378 }
379
380 #[test]
test_tokenize60()381 fn test_tokenize60() {
382 let comp = vec!["2003", "-", "09", "-", "25"];
383 tokenize_assert("2003-09-25", comp);
384 }
385
386 #[test]
test_tokenize61()387 fn test_tokenize61() {
388 let comp = vec!["09", "-", "25", "-", "2003"];
389 tokenize_assert("09-25-2003", comp);
390 }
391
392 #[test]
test_tokenize62()393 fn test_tokenize62() {
394 let comp = vec!["25", "-", "09", "-", "2003"];
395 tokenize_assert("25-09-2003", comp);
396 }
397
398 #[test]
test_tokenize63()399 fn test_tokenize63() {
400 let comp = vec!["10", "-", "09", "-", "2003"];
401 tokenize_assert("10-09-2003", comp);
402 }
403
404 #[test]
test_tokenize64()405 fn test_tokenize64() {
406 let comp = vec!["10", "-", "09", "-", "03"];
407 tokenize_assert("10-09-03", comp);
408 }
409
410 #[test]
test_tokenize65()411 fn test_tokenize65() {
412 let comp = vec!["2003", ".", "09", ".", "25"];
413 tokenize_assert("2003.09.25", comp);
414 }
415
416 #[test]
test_tokenize66()417 fn test_tokenize66() {
418 let comp = vec!["09", ".", "25", ".", "2003"];
419 tokenize_assert("09.25.2003", comp);
420 }
421
422 #[test]
test_tokenize67()423 fn test_tokenize67() {
424 let comp = vec!["25", ".", "09", ".", "2003"];
425 tokenize_assert("25.09.2003", comp);
426 }
427
428 #[test]
test_tokenize68()429 fn test_tokenize68() {
430 let comp = vec!["10", ".", "09", ".", "2003"];
431 tokenize_assert("10.09.2003", comp);
432 }
433
434 #[test]
test_tokenize69()435 fn test_tokenize69() {
436 let comp = vec!["10", ".", "09", ".", "03"];
437 tokenize_assert("10.09.03", comp);
438 }
439
440 #[test]
test_tokenize70()441 fn test_tokenize70() {
442 let comp = vec!["2003", "/", "09", "/", "25"];
443 tokenize_assert("2003/09/25", comp);
444 }
445
446 #[test]
test_tokenize71()447 fn test_tokenize71() {
448 let comp = vec!["09", "/", "25", "/", "2003"];
449 tokenize_assert("09/25/2003", comp);
450 }
451
452 #[test]
test_tokenize72()453 fn test_tokenize72() {
454 let comp = vec!["25", "/", "09", "/", "2003"];
455 tokenize_assert("25/09/2003", comp);
456 }
457
458 #[test]
test_tokenize73()459 fn test_tokenize73() {
460 let comp = vec!["10", "/", "09", "/", "2003"];
461 tokenize_assert("10/09/2003", comp);
462 }
463
464 #[test]
test_tokenize74()465 fn test_tokenize74() {
466 let comp = vec!["10", "/", "09", "/", "03"];
467 tokenize_assert("10/09/03", comp);
468 }
469
470 #[test]
test_tokenize75()471 fn test_tokenize75() {
472 let comp = vec!["2003", " ", "09", " ", "25"];
473 tokenize_assert("2003 09 25", comp);
474 }
475
476 #[test]
test_tokenize76()477 fn test_tokenize76() {
478 let comp = vec!["09", " ", "25", " ", "2003"];
479 tokenize_assert("09 25 2003", comp);
480 }
481
482 #[test]
test_tokenize77()483 fn test_tokenize77() {
484 let comp = vec!["25", " ", "09", " ", "2003"];
485 tokenize_assert("25 09 2003", comp);
486 }
487
488 #[test]
test_tokenize78()489 fn test_tokenize78() {
490 let comp = vec!["10", " ", "09", " ", "2003"];
491 tokenize_assert("10 09 2003", comp);
492 }
493
494 #[test]
test_tokenize79()495 fn test_tokenize79() {
496 let comp = vec!["10", " ", "09", " ", "03"];
497 tokenize_assert("10 09 03", comp);
498 }
499
500 #[test]
test_tokenize80()501 fn test_tokenize80() {
502 let comp = vec!["25", " ", "09", " ", "03"];
503 tokenize_assert("25 09 03", comp);
504 }
505
506 #[test]
test_tokenize81()507 fn test_tokenize81() {
508 let comp = vec!["03", " ", "25", " ", "Sep"];
509 tokenize_assert("03 25 Sep", comp);
510 }
511
512 #[test]
test_tokenize82()513 fn test_tokenize82() {
514 let comp = vec!["25", " ", "03", " ", "Sep"];
515 tokenize_assert("25 03 Sep", comp);
516 }
517
518 #[test]
test_tokenize83()519 fn test_tokenize83() {
520 let comp = vec![
521 " ", " ", "July", " ", " ", " ", "4", " ", ",", " ", " ", "1976", " ", " ", " ", "12", ":",
522 "01", ":", "02", " ", " ", " ", "am", " ", " ",
523 ];
524 tokenize_assert(" July 4 , 1976 12:01:02 am ", comp);
525 }
526
527 #[test]
test_tokenize84()528 fn test_tokenize84() {
529 let comp = vec!["Wed", ",", " ", "July", " ", "10", ",", " ", "'", "96"];
530 tokenize_assert("Wed, July 10, '96", comp);
531 }
532
533 #[test]
test_tokenize85()534 fn test_tokenize85() {
535 let comp = vec![
536 "1996", ".", "July", ".", "10", " ", "AD", " ", "12", ":", "08", " ", "PM",
537 ];
538 tokenize_assert("1996.July.10 AD 12:08 PM", comp);
539 }
540
541 #[test]
test_tokenize86()542 fn test_tokenize86() {
543 let comp = vec!["July", " ", "4", ",", " ", "1976"];
544 tokenize_assert("July 4, 1976", comp);
545 }
546
547 #[test]
test_tokenize87()548 fn test_tokenize87() {
549 let comp = vec!["7", " ", "4", " ", "1976"];
550 tokenize_assert("7 4 1976", comp);
551 }
552
553 #[test]
test_tokenize88()554 fn test_tokenize88() {
555 let comp = vec!["4", " ", "jul", " ", "1976"];
556 tokenize_assert("4 jul 1976", comp);
557 }
558
559 #[test]
test_tokenize89()560 fn test_tokenize89() {
561 let comp = vec!["7", "-", "4", "-", "76"];
562 tokenize_assert("7-4-76", comp);
563 }
564
565 #[test]
test_tokenize90()566 fn test_tokenize90() {
567 let comp = vec!["19760704"];
568 tokenize_assert("19760704", comp);
569 }
570
571 #[test]
test_tokenize91()572 fn test_tokenize91() {
573 let comp = vec![
574 "0", ":", "01", ":", "02", " ", "on", " ", "July", " ", "4", ",", " ", "1976",
575 ];
576 tokenize_assert("0:01:02 on July 4, 1976", comp);
577 }
578
579 #[test]
test_tokenize92()580 fn test_tokenize92() {
581 let comp = vec![
582 "0", ":", "01", ":", "02", " ", "on", " ", "July", " ", "4", ",", " ", "1976",
583 ];
584 tokenize_assert("0:01:02 on July 4, 1976", comp);
585 }
586
587 #[test]
test_tokenize93()588 fn test_tokenize93() {
589 let comp = vec![
590 "July", " ", "4", ",", " ", "1976", " ", "12", ":", "01", ":", "02", " ", "am",
591 ];
592 tokenize_assert("July 4, 1976 12:01:02 am", comp);
593 }
594
595 #[test]
test_tokenize94()596 fn test_tokenize94() {
597 let comp = vec![
598 "Mon", " ", "Jan", " ", " ", "2", " ", "04", ":", "24", ":", "27", " ", "1995",
599 ];
600 tokenize_assert("Mon Jan 2 04:24:27 1995", comp);
601 }
602
603 #[test]
test_tokenize95()604 fn test_tokenize95() {
605 let comp = vec!["04", ".", "04", ".", "95", " ", "00", ":", "22"];
606 tokenize_assert("04.04.95 00:22", comp);
607 }
608
609 #[test]
test_tokenize96()610 fn test_tokenize96() {
611 let comp = vec![
612 "Jan", " ", "1", " ", "1999", " ", "11", ":", "23", ":", "34.578",
613 ];
614 tokenize_assert("Jan 1 1999 11:23:34.578", comp);
615 }
616
617 #[test]
test_tokenize97()618 fn test_tokenize97() {
619 let comp = vec!["950404", " ", "122212"];
620 tokenize_assert("950404 122212", comp);
621 }
622
623 #[test]
test_tokenize98()624 fn test_tokenize98() {
625 let comp = vec!["3", "rd", " ", "of", " ", "May", " ", "2001"];
626 tokenize_assert("3rd of May 2001", comp);
627 }
628
629 #[test]
test_tokenize99()630 fn test_tokenize99() {
631 let comp = vec!["5", "th", " ", "of", " ", "March", " ", "2001"];
632 tokenize_assert("5th of March 2001", comp);
633 }
634
635 #[test]
test_tokenize100()636 fn test_tokenize100() {
637 let comp = vec!["1", "st", " ", "of", " ", "May", " ", "2003"];
638 tokenize_assert("1st of May 2003", comp);
639 }
640
641 #[test]
test_tokenize101()642 fn test_tokenize101() {
643 let comp = vec![
644 "0099", "-", "01", "-", "01", "T", "00", ":", "00", ":", "00",
645 ];
646 tokenize_assert("0099-01-01T00:00:00", comp);
647 }
648
649 #[test]
test_tokenize102()650 fn test_tokenize102() {
651 let comp = vec![
652 "0031", "-", "01", "-", "01", "T", "00", ":", "00", ":", "00",
653 ];
654 tokenize_assert("0031-01-01T00:00:00", comp);
655 }
656
657 #[test]
test_tokenize103()658 fn test_tokenize103() {
659 let comp = vec!["20080227", "T", "21", ":", "26", ":", "01.123456789"];
660 tokenize_assert("20080227T21:26:01.123456789", comp);
661 }
662
663 #[test]
test_tokenize104()664 fn test_tokenize104() {
665 let comp = vec!["13", "NOV", "2017"];
666 tokenize_assert("13NOV2017", comp);
667 }
668
669 #[test]
test_tokenize105()670 fn test_tokenize105() {
671 let comp = vec!["0003", "-", "03", "-", "04"];
672 tokenize_assert("0003-03-04", comp);
673 }
674
675 #[test]
test_tokenize106()676 fn test_tokenize106() {
677 let comp = vec!["December", ".", "0031", ".", "30"];
678 tokenize_assert("December.0031.30", comp);
679 }
680
681 #[test]
test_tokenize107()682 fn test_tokenize107() {
683 let comp = vec!["090107"];
684 tokenize_assert("090107", comp);
685 }
686
687 #[test]
test_tokenize108()688 fn test_tokenize108() {
689 let comp = vec!["2015", "-", "15", "-", "May"];
690 tokenize_assert("2015-15-May", comp);
691 }
692
693 #[test]
test_tokenize109()694 fn test_tokenize109() {
695 let comp = vec![
696 "Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "2003",
697 ];
698 tokenize_assert("Thu Sep 25 10:36:28 BRST 2003", comp);
699 }
700
701 #[test]
test_tokenize110()702 fn test_tokenize110() {
703 let comp = vec![
704 "2003", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "25", " ", "Sep", " ", "Thu",
705 ];
706 tokenize_assert("2003 10:36:28 BRST 25 Sep Thu", comp);
707 }
708
709 #[test]
test_tokenize111()710 fn test_tokenize111() {
711 let comp = vec![
712 "Thu", ",", " ", "25", " ", "Sep", " ", "2003", " ", "10", ":", "49", ":", "41", " ", "-",
713 "0300",
714 ];
715 tokenize_assert("Thu, 25 Sep 2003 10:49:41 -0300", comp);
716 }
717
718 #[test]
test_tokenize112()719 fn test_tokenize112() {
720 let comp = vec![
721 "2003", "-", "09", "-", "25", "T", "10", ":", "49", ":", "41.5", "-", "03", ":", "00",
722 ];
723 tokenize_assert("2003-09-25T10:49:41.5-03:00", comp);
724 }
725
726 #[test]
test_tokenize113()727 fn test_tokenize113() {
728 let comp = vec![
729 "2003", "-", "09", "-", "25", "T", "10", ":", "49", ":", "41", "-", "03", ":", "00",
730 ];
731 tokenize_assert("2003-09-25T10:49:41-03:00", comp);
732 }
733
734 #[test]
test_tokenize114()735 fn test_tokenize114() {
736 let comp = vec!["20030925", "T", "104941.5", "-", "0300"];
737 tokenize_assert("20030925T104941.5-0300", comp);
738 }
739
740 #[test]
test_tokenize115()741 fn test_tokenize115() {
742 let comp = vec!["20030925", "T", "104941", "-", "0300"];
743 tokenize_assert("20030925T104941-0300", comp);
744 }
745
746 #[test]
test_tokenize116()747 fn test_tokenize116() {
748 let comp = vec![
749 "2018", "-", "08", "-", "10", " ", "10", ":", "00", ":", "00", " ", "UTC", "+", "3",
750 ];
751 tokenize_assert("2018-08-10 10:00:00 UTC+3", comp);
752 }
753
754 #[test]
test_tokenize117()755 fn test_tokenize117() {
756 let comp = vec![
757 "2018", "-", "08", "-", "10", " ", "03", ":", "36", ":", "47", " ", "PM", " ", "GMT", "-",
758 "4",
759 ];
760 tokenize_assert("2018-08-10 03:36:47 PM GMT-4", comp);
761 }
762
763 #[test]
test_tokenize118()764 fn test_tokenize118() {
765 let comp = vec![
766 "2018", "-", "08", "-", "10", " ", "04", ":", "15", ":", "00", " ", "AM", " ", "Z", "-",
767 "02", ":", "00",
768 ];
769 tokenize_assert("2018-08-10 04:15:00 AM Z-02:00", comp);
770 }
771
772 #[test]
test_tokenize119()773 fn test_tokenize119() {
774 let comp = vec!["10", "-", "09", "-", "2003"];
775 tokenize_assert("10-09-2003", comp);
776 }
777
778 #[test]
test_tokenize120()779 fn test_tokenize120() {
780 let comp = vec!["10", ".", "09", ".", "2003"];
781 tokenize_assert("10.09.2003", comp);
782 }
783
784 #[test]
test_tokenize121()785 fn test_tokenize121() {
786 let comp = vec!["10", "/", "09", "/", "2003"];
787 tokenize_assert("10/09/2003", comp);
788 }
789
790 #[test]
test_tokenize122()791 fn test_tokenize122() {
792 let comp = vec!["10", " ", "09", " ", "2003"];
793 tokenize_assert("10 09 2003", comp);
794 }
795
796 #[test]
test_tokenize123()797 fn test_tokenize123() {
798 let comp = vec!["090107"];
799 tokenize_assert("090107", comp);
800 }
801
802 #[test]
test_tokenize124()803 fn test_tokenize124() {
804 let comp = vec!["2015", " ", "09", " ", "25"];
805 tokenize_assert("2015 09 25", comp);
806 }
807
808 #[test]
test_tokenize125()809 fn test_tokenize125() {
810 let comp = vec!["10", "-", "09", "-", "03"];
811 tokenize_assert("10-09-03", comp);
812 }
813
814 #[test]
test_tokenize126()815 fn test_tokenize126() {
816 let comp = vec!["10", ".", "09", ".", "03"];
817 tokenize_assert("10.09.03", comp);
818 }
819
820 #[test]
test_tokenize127()821 fn test_tokenize127() {
822 let comp = vec!["10", "/", "09", "/", "03"];
823 tokenize_assert("10/09/03", comp);
824 }
825
826 #[test]
test_tokenize128()827 fn test_tokenize128() {
828 let comp = vec!["10", " ", "09", " ", "03"];
829 tokenize_assert("10 09 03", comp);
830 }
831
832 #[test]
test_tokenize129()833 fn test_tokenize129() {
834 let comp = vec!["090107"];
835 tokenize_assert("090107", comp);
836 }
837
838 #[test]
test_tokenize130()839 fn test_tokenize130() {
840 let comp = vec!["2015", " ", "09", " ", "25"];
841 tokenize_assert("2015 09 25", comp);
842 }
843
844 #[test]
test_tokenize131()845 fn test_tokenize131() {
846 let comp = vec!["090107"];
847 tokenize_assert("090107", comp);
848 }
849
850 #[test]
test_tokenize132()851 fn test_tokenize132() {
852 let comp = vec!["2015", " ", "09", " ", "25"];
853 tokenize_assert("2015 09 25", comp);
854 }
855
856 #[test]
test_tokenize133()857 fn test_tokenize133() {
858 let comp = vec!["April", " ", "2009"];
859 tokenize_assert("April 2009", comp);
860 }
861
862 #[test]
test_tokenize134()863 fn test_tokenize134() {
864 let comp = vec!["Feb", " ", "2007"];
865 tokenize_assert("Feb 2007", comp);
866 }
867
868 #[test]
test_tokenize135()869 fn test_tokenize135() {
870 let comp = vec!["Feb", " ", "2008"];
871 tokenize_assert("Feb 2008", comp);
872 }
873
874 #[test]
test_tokenize136()875 fn test_tokenize136() {
876 let comp = vec![
877 "Thu", " ", "Sep", " ", "25", " ", "10", ":", "36", ":", "28", " ", "BRST", " ", "2003",
878 ];
879 tokenize_assert("Thu Sep 25 10:36:28 BRST 2003", comp);
880 }
881
882 #[test]
test_tokenize137()883 fn test_tokenize137() {
884 let comp = vec![
885 "1996", ".", "07", ".", "10", " ", "AD", " ", "at", " ", "15", ":", "08", ":", "56", " ",
886 "PDT",
887 ];
888 tokenize_assert("1996.07.10 AD at 15:08:56 PDT", comp);
889 }
890
891 #[test]
test_tokenize138()892 fn test_tokenize138() {
893 let comp = vec![
894 "Tuesday", ",", " ", "April", " ", "12", ",", " ", "1952", " ", "AD", " ", "3", ":", "30",
895 ":", "42", "pm", " ", "PST",
896 ];
897 tokenize_assert("Tuesday, April 12, 1952 AD 3:30:42pm PST", comp);
898 }
899
900 #[test]
test_tokenize139()901 fn test_tokenize139() {
902 let comp = vec![
903 "November", " ", "5", ",", " ", "1994", ",", " ", "8", ":", "15", ":", "30", " ", "am",
904 " ", "EST",
905 ];
906 tokenize_assert("November 5, 1994, 8:15:30 am EST", comp);
907 }
908
909 #[test]
test_tokenize140()910 fn test_tokenize140() {
911 let comp = vec![
912 "1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "-", "05", ":", "00",
913 ];
914 tokenize_assert("1994-11-05T08:15:30-05:00", comp);
915 }
916
917 #[test]
test_tokenize141()918 fn test_tokenize141() {
919 let comp = vec![
920 "1994", "-", "11", "-", "05", "T", "08", ":", "15", ":", "30", "Z",
921 ];
922 tokenize_assert("1994-11-05T08:15:30Z", comp);
923 }
924
925 #[test]
test_tokenize142()926 fn test_tokenize142() {
927 let comp = vec![
928 "1976", "-", "07", "-", "04", "T", "00", ":", "01", ":", "02", "Z",
929 ];
930 tokenize_assert("1976-07-04T00:01:02Z", comp);
931 }
932
933 #[test]
test_tokenize143()934 fn test_tokenize143() {
935 let comp = vec![
936 "Tue", " ", "Apr", " ", "4", " ", "00", ":", "22", ":", "12", " ", "PDT", " ", "1995",
937 ];
938 tokenize_assert("Tue Apr 4 00:22:12 PDT 1995", comp);
939 }
940
941 #[test]
test_tokenize144()942 fn test_tokenize144() {
943 let comp = vec![
944 "Today",
945 " ",
946 "is",
947 " ",
948 "25",
949 " ",
950 "of",
951 " ",
952 "September",
953 " ",
954 "of",
955 " ",
956 "2003",
957 ",",
958 " ",
959 "exactly",
960 " ",
961 "at",
962 " ",
963 "10",
964 ":",
965 "49",
966 ":",
967 "41",
968 " ",
969 "with",
970 " ",
971 "timezone",
972 " ",
973 "-",
974 "03",
975 ":",
976 "00",
977 ".",
978 ];
979 tokenize_assert(
980 "Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.",
981 comp,
982 );
983 }
984
985 #[test]
test_tokenize145()986 fn test_tokenize145() {
987 let comp = vec![
988 "Today",
989 " ",
990 "is",
991 " ",
992 "25",
993 " ",
994 "of",
995 " ",
996 "September",
997 " ",
998 "of",
999 " ",
1000 "2003",
1001 ",",
1002 " ",
1003 "exactly",
1004 " ",
1005 "at",
1006 " ",
1007 "10",
1008 ":",
1009 "49",
1010 ":",
1011 "41",
1012 " ",
1013 "with",
1014 " ",
1015 "timezone",
1016 " ",
1017 "-",
1018 "03",
1019 ":",
1020 "00",
1021 ".",
1022 ];
1023 tokenize_assert(
1024 "Today is 25 of September of 2003, exactly at 10:49:41 with timezone -03:00.",
1025 comp,
1026 );
1027 }
1028
1029 #[test]
test_tokenize146()1030 fn test_tokenize146() {
1031 let comp = vec![
1032 "I", " ", "have", " ", "a", " ", "meeting", " ", "on", " ", "March", " ", "1", ",", " ",
1033 "1974",
1034 ];
1035 tokenize_assert("I have a meeting on March 1, 1974", comp);
1036 }
1037
1038 #[test]
test_tokenize147()1039 fn test_tokenize147() {
1040 let comp = vec![
1041 "On", " ", "June", " ", "8", "th", ",", " ", "2020", ",", " ", "I", " ", "am", " ",
1042 "going", " ", "to", " ", "be", " ", "the", " ", "first", " ", "man", " ", "on", " ",
1043 "Mars",
1044 ];
1045 tokenize_assert(
1046 "On June 8th, 2020, I am going to be the first man on Mars",
1047 comp,
1048 );
1049 }
1050
1051 #[test]
test_tokenize148()1052 fn test_tokenize148() {
1053 let comp = vec![
1054 "Meet", " ", "me", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on", " ", "Sunset",
1055 " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December", " ", "3", "rd", ",",
1056 " ", "2003",
1057 ];
1058 tokenize_assert(
1059 "Meet me at the AM/PM on Sunset at 3:00 AM on December 3rd, 2003",
1060 comp,
1061 );
1062 }
1063
1064 #[test]
test_tokenize149()1065 fn test_tokenize149() {
1066 let comp = vec![
1067 "Meet", " ", "me", " ", "at", " ", "3", ":", "00", " ", "AM", " ", "on", " ", "December",
1068 " ", "3", "rd", ",", " ", "2003", " ", "at", " ", "the", " ", "AM", "/", "PM", " ", "on",
1069 " ", "Sunset",
1070 ];
1071 tokenize_assert(
1072 "Meet me at 3:00 AM on December 3rd, 2003 at the AM/PM on Sunset",
1073 comp,
1074 );
1075 }
1076
1077 #[test]
test_tokenize150()1078 fn test_tokenize150() {
1079 let comp = vec![
1080 "Jan", " ", "29", ",", " ", "1945", " ", "14", ":", "45", " ", "AM", " ", "I", " ",
1081 "going", " ", "to", " ", "see", " ", "you", " ", "there", "?",
1082 ];
1083 tokenize_assert("Jan 29, 1945 14:45 AM I going to see you there?", comp);
1084 }
1085
1086 #[test]
test_tokenize151()1087 fn test_tokenize151() {
1088 let comp = vec!["2017", "-", "07", "-", "17", " ", "06", ":", "15", ":"];
1089 tokenize_assert("2017-07-17 06:15:", comp);
1090 }
1091