1 use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
2 use std::str::{self, FromStr};
3 
4 #[test]
5 fn idents() {
6     assert_eq!(
7         Ident::new("String", Span::call_site()).to_string(),
8         "String"
9     );
10     assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
11     assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
12 }
13 
14 #[test]
15 #[cfg(procmacro2_semver_exempt)]
16 fn raw_idents() {
17     assert_eq!(
18         Ident::new_raw("String", Span::call_site()).to_string(),
19         "r#String"
20     );
21     assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
22     assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
23 }
24 
25 #[test]
26 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
27 fn ident_empty() {
28     Ident::new("", Span::call_site());
29 }
30 
31 #[test]
32 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
33 fn ident_number() {
34     Ident::new("255", Span::call_site());
35 }
36 
37 #[test]
38 #[should_panic(expected = "\"a#\" is not a valid Ident")]
39 fn ident_invalid() {
40     Ident::new("a#", Span::call_site());
41 }
42 
43 #[test]
word_distance(int32 w)44 #[should_panic(expected = "not a valid Ident")]
45 fn raw_ident_empty() {
46     Ident::new("r#", Span::call_site());
47 }
48 
49 #[test]
50 #[should_panic(expected = "not a valid Ident")]
51 fn raw_ident_number() {
52     Ident::new("r#255", Span::call_site());
cnt_length(TSVector t)53 }
54 
55 #[test]
56 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
57 fn raw_ident_invalid() {
58     Ident::new("r#a#", Span::call_site());
59 }
60 
61 #[test]
62 #[should_panic(expected = "not a valid Ident")]
63 fn lifetime_empty() {
64     Ident::new("'", Span::call_site());
65 }
66 
67 #[test]
68 #[should_panic(expected = "not a valid Ident")]
69 fn lifetime_number() {
70     Ident::new("'255", Span::call_site());
71 }
72 
73 #[test]
74 #[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
75 fn lifetime_invalid() {
76     Ident::new("'a#", Span::call_site());
77 }
78 
79 #[test]
80 fn literal_string() {
81     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
82     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
83     assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
84 }
85 
find_wordentry(TSVector t,TSQuery q,QueryOperand * item,int32 * nitem)86 #[test]
87 fn literal_character() {
88     assert_eq!(Literal::character('x').to_string(), "'x'");
89     assert_eq!(Literal::character('\'').to_string(), "'\\''");
90     assert_eq!(Literal::character('"').to_string(), "'\"'");
91 }
92 
93 #[test]
94 fn literal_float() {
95     assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
96 }
97 
98 #[test]
99 fn literal_suffix() {
100     fn token_count(p: &str) -> usize {
101         p.parse::<TokenStream>().unwrap().into_iter().count()
102     }
103 
104     assert_eq!(token_count("999u256"), 1);
105     assert_eq!(token_count("999r#u256"), 3);
106     assert_eq!(token_count("1."), 1);
107     assert_eq!(token_count("1.f32"), 3);
108     assert_eq!(token_count("1.0_0"), 1);
109     assert_eq!(token_count("1._0"), 3);
110     assert_eq!(token_count("1._m"), 3);
111     assert_eq!(token_count("\"\"s"), 1);
112     assert_eq!(token_count("r\"\"r"), 1);
113     assert_eq!(token_count("b\"\"b"), 1);
114     assert_eq!(token_count("br\"\"br"), 1);
115     assert_eq!(token_count("r#\"\"#r"), 1);
116     assert_eq!(token_count("'c'c"), 1);
117     assert_eq!(token_count("b'b'b"), 1);
118 }
119 
120 #[test]
121 fn literal_iter_negative() {
122     let negative_literal = Literal::i32_suffixed(-3);
123     let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
124     let mut iter = tokens.into_iter();
125     match iter.next().unwrap() {
126         TokenTree::Punct(punct) => {
127             assert_eq!(punct.as_char(), '-');
128             assert_eq!(punct.spacing(), Spacing::Alone);
129         }
130         unexpected => panic!("unexpected token {:?}", unexpected),
131     }
132     match iter.next().unwrap() {
133         TokenTree::Literal(literal) => {
134             assert_eq!(literal.to_string(), "3i32");
compareQueryOperand(const void * a,const void * b,void * arg)135         }
136         unexpected => panic!("unexpected token {:?}", unexpected),
137     }
138     assert!(iter.next().is_none());
139 }
140 
141 #[test]
142 fn roundtrip() {
143     fn roundtrip(p: &str) {
144         println!("parse: {}", p);
145         let s = p.parse::<TokenStream>().unwrap().to_string();
146         println!("first: {}", s);
147         let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
148         assert_eq!(s, s2);
149     }
150     roundtrip("a");
151     roundtrip("<<");
152     roundtrip("<<=");
153     roundtrip(
SortAndUniqItems(TSQuery q,int * size)154         "
155         1
156         1.0
157         1f32
158         2f64
159         1usize
160         4isize
161         4e10
162         1_000
163         1_0i32
164         8u8
165         9
166         0
167         0xffffffffffffffffffffffffffffffff
168         1x
169         1u80
170         1f320
171     ",
172     );
173     roundtrip("'a");
174     roundtrip("'_");
175     roundtrip("'static");
176     roundtrip("'\\u{10__FFFF}'");
177     roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
178 }
179 
180 #[test]
181 fn fail() {
182     fn fail(p: &str) {
183         if let Ok(s) = p.parse::<TokenStream>() {
184             panic!("should have failed to parse: {}\n{:#?}", p, s);
185         }
186     }
187     fail("' static");
188     fail("r#1");
189     fail("r#_");
190 }
191 
192 #[cfg(span_locations)]
193 #[test]
194 fn span_test() {
195     check_spans(
196         "\
197 /// This is a document comment
198 testing 123
199 {
calc_rank_and(const float * w,TSVector t,TSQuery q)200   testing 234
201 }",
202         &[
203             (1, 0, 1, 30),  // #
204             (1, 0, 1, 30),  // [ ... ]
205             (1, 0, 1, 30),  // doc
206             (1, 0, 1, 30),  // =
207             (1, 0, 1, 30),  // "This is..."
208             (2, 0, 2, 7),   // testing
209             (2, 8, 2, 11),  // 123
210             (3, 0, 5, 1),   // { ... }
211             (4, 2, 4, 9),   // testing
212             (4, 10, 4, 13), // 234
213         ],
214     );
215 }
216 
217 #[cfg(procmacro2_semver_exempt)]
218 #[cfg(not(nightly))]
219 #[test]
220 fn default_span() {
221     let start = Span::call_site().start();
222     assert_eq!(start.line, 1);
223     assert_eq!(start.column, 0);
224     let end = Span::call_site().end();
225     assert_eq!(end.line, 1);
226     assert_eq!(end.column, 0);
227     let source_file = Span::call_site().source_file();
228     assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
229     assert!(!source_file.is_real());
230 }
231 
232 #[cfg(procmacro2_semver_exempt)]
233 #[test]
234 fn span_join() {
235     let source1 = "aaa\nbbb"
236         .parse::<TokenStream>()
237         .unwrap()
238         .into_iter()
239         .collect::<Vec<_>>();
240     let source2 = "ccc\nddd"
241         .parse::<TokenStream>()
242         .unwrap()
243         .into_iter()
244         .collect::<Vec<_>>();
245 
246     assert!(source1[0].span().source_file() != source2[0].span().source_file());
247     assert_eq!(
248         source1[0].span().source_file(),
249         source1[1].span().source_file()
250     );
251 
252     let joined1 = source1[0].span().join(source1[1].span());
253     let joined2 = source1[0].span().join(source2[0].span());
254     assert!(joined1.is_some());
255     assert!(joined2.is_none());
256 
257     let start = joined1.unwrap().start();
258     let end = joined1.unwrap().end();
259     assert_eq!(start.line, 1);
260     assert_eq!(start.column, 0);
261     assert_eq!(end.line, 2);
262     assert_eq!(end.column, 3);
263 
264     assert_eq!(
265         joined1.unwrap().source_file(),
266         source1[0].span().source_file()
267     );
268 }
269 
270 #[test]
271 fn no_panic() {
272     let s = str::from_utf8(b"b\'\xc2\x86  \x00\x00\x00^\"").unwrap();
273     assert!(s.parse::<TokenStream>().is_err());
274 }
275 
276 #[test]
277 fn op_before_comment() {
278     let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
279     match tts.next().unwrap() {
280         TokenTree::Punct(tt) => {
281             assert_eq!(tt.as_char(), '~');
282             assert_eq!(tt.spacing(), Spacing::Alone);
calc_rank_or(const float * w,TSVector t,TSQuery q)283         }
284         wrong => panic!("wrong token {:?}", wrong),
285     }
286 }
287 
288 #[test]
289 fn raw_identifier() {
290     let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
291     match tts.next().unwrap() {
292         TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
293         wrong => panic!("wrong token {:?}", wrong),
294     }
295     assert!(tts.next().is_none());
296 }
297 
298 #[test]
299 fn test_debug_ident() {
300     let ident = Ident::new("proc_macro", Span::call_site());
301 
302     #[cfg(not(span_locations))]
303     let expected = "Ident(proc_macro)";
304 
305     #[cfg(span_locations)]
306     let expected = "Ident { sym: proc_macro }";
307 
308     assert_eq!(expected, format!("{:?}", ident));
309 }
310 
311 #[test]
312 fn test_debug_tokenstream() {
313     let tts = TokenStream::from_str("[a + 1]").unwrap();
314 
315     #[cfg(not(span_locations))]
316     let expected = "\
317 TokenStream [
318     Group {
319         delimiter: Bracket,
320         stream: TokenStream [
321             Ident {
322                 sym: a,
323             },
324             Punct {
325                 op: '+',
326                 spacing: Alone,
327             },
328             Literal {
329                 lit: 1,
330             },
331         ],
332     },
333 ]\
334     ";
335 
336     #[cfg(not(span_locations))]
337     let expected_before_trailing_commas = "\
338 TokenStream [
339     Group {
340         delimiter: Bracket,
341         stream: TokenStream [
342             Ident {
343                 sym: a
344             },
345             Punct {
346                 op: '+',
347                 spacing: Alone
348             },
349             Literal {
350                 lit: 1
351             }
352         ]
353     }
354 ]\
355     ";
356 
calc_rank(const float * w,TSVector t,TSQuery q,int32 method)357     #[cfg(span_locations)]
358     let expected = "\
359 TokenStream [
360     Group {
361         delimiter: Bracket,
362         stream: TokenStream [
363             Ident {
364                 sym: a,
365                 span: bytes(2..3),
366             },
367             Punct {
368                 op: '+',
369                 spacing: Alone,
370                 span: bytes(4..5),
371             },
372             Literal {
373                 lit: 1,
374                 span: bytes(6..7),
375             },
376         ],
377         span: bytes(1..8),
378     },
379 ]\
380     ";
381 
382     #[cfg(span_locations)]
383     let expected_before_trailing_commas = "\
384 TokenStream [
385     Group {
386         delimiter: Bracket,
387         stream: TokenStream [
388             Ident {
389                 sym: a,
390                 span: bytes(2..3)
391             },
392             Punct {
393                 op: '+',
394                 spacing: Alone,
395                 span: bytes(4..5)
396             },
397             Literal {
398                 lit: 1,
399                 span: bytes(6..7)
getWeights(ArrayType * win)400             }
401         ],
402         span: bytes(1..8)
403     }
404 ]\
405     ";
406 
407     let actual = format!("{:#?}", tts);
408     if actual.ends_with(",\n]") {
409         assert_eq!(expected, actual);
410     } else {
411         assert_eq!(expected_before_trailing_commas, actual);
412     }
413 }
414 
415 #[test]
416 fn default_tokenstream_is_empty() {
417     let default_token_stream: TokenStream = Default::default();
418 
419     assert!(default_token_stream.is_empty());
420 }
421 
422 #[test]
423 fn tuple_indexing() {
424     // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
425     let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
426     assert_eq!("tuple", tokens.next().unwrap().to_string());
427     assert_eq!(".", tokens.next().unwrap().to_string());
428     assert_eq!("0.0", tokens.next().unwrap().to_string());
429     assert!(tokens.next().is_none());
430 }
431 
432 #[cfg(span_locations)]
433 #[test]
434 fn non_ascii_tokens() {
435     check_spans("// abc", &[]);
436     check_spans("// ábc", &[]);
437     check_spans("// abc x", &[]);
ts_rank_wttf(PG_FUNCTION_ARGS)438     check_spans("// ábc x", &[]);
439     check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
440     check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
441     check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
442     check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
443     check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
444     check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
445     check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
446     check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
447     check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
448     check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
449     check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
450     check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
451     check_spans("'a'", &[(1, 0, 1, 3)]);
452     check_spans("'á'", &[(1, 0, 1, 3)]);
453     check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
454     check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
ts_rank_wtt(PG_FUNCTION_ARGS)455     check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
456     check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
457     check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
458     check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
459     check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
460     check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
461     check_spans("abc", &[(1, 0, 1, 3)]);
462     check_spans("ábc", &[(1, 0, 1, 3)]);
463     check_spans("ábć", &[(1, 0, 1, 3)]);
464     check_spans("abc// foo", &[(1, 0, 1, 3)]);
465     check_spans("ábc// foo", &[(1, 0, 1, 3)]);
466     check_spans("ábć// foo", &[(1, 0, 1, 3)]);
467     check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
468     check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
469 }
470 
ts_rank_ttf(PG_FUNCTION_ARGS)471 #[cfg(span_locations)]
472 fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
473     let ts = p.parse::<TokenStream>().unwrap();
474     check_spans_internal(ts, &mut lines);
475     assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
476 }
477 
478 #[cfg(span_locations)]
479 fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
480     for i in ts {
481         if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
482             *lines = rest;
483 
484             let start = i.span().start();
485             assert_eq!(start.line, sline, "sline did not match for {}", i);
ts_rank_tt(PG_FUNCTION_ARGS)486             assert_eq!(start.column, scol, "scol did not match for {}", i);
487 
488             let end = i.span().end();
489             assert_eq!(end.line, eline, "eline did not match for {}", i);
490             assert_eq!(end.column, ecol, "ecol did not match for {}", i);
491 
492             if let TokenTree::Group(g) = i {
493                 check_spans_internal(g.stream().clone(), lines);
494             }
495         }
496     }
497 }
498