1 extern crate proc_macro2;
2 
3 use std::str::{self, FromStr};
4 
5 use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
6 
7 #[test]
idents()8 fn idents() {
9     assert_eq!(
10         Ident::new("String", Span::call_site()).to_string(),
11         "String"
12     );
13     assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
14     assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
15 }
16 
17 #[test]
18 #[cfg(procmacro2_semver_exempt)]
raw_idents()19 fn raw_idents() {
20     assert_eq!(
21         Ident::new_raw("String", Span::call_site()).to_string(),
22         "r#String"
23     );
24     assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
25     assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
26 }
27 
28 #[test]
29 #[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
ident_empty()30 fn ident_empty() {
31     Ident::new("", Span::call_site());
32 }
33 
34 #[test]
35 #[should_panic(expected = "Ident cannot be a number; use Literal instead")]
ident_number()36 fn ident_number() {
37     Ident::new("255", Span::call_site());
38 }
39 
40 #[test]
41 #[should_panic(expected = "\"a#\" is not a valid Ident")]
ident_invalid()42 fn ident_invalid() {
43     Ident::new("a#", Span::call_site());
44 }
45 
46 #[test]
47 #[should_panic(expected = "not a valid Ident")]
raw_ident_empty()48 fn raw_ident_empty() {
49     Ident::new("r#", Span::call_site());
50 }
51 
52 #[test]
53 #[should_panic(expected = "not a valid Ident")]
raw_ident_number()54 fn raw_ident_number() {
55     Ident::new("r#255", Span::call_site());
56 }
57 
58 #[test]
59 #[should_panic(expected = "\"r#a#\" is not a valid Ident")]
raw_ident_invalid()60 fn raw_ident_invalid() {
61     Ident::new("r#a#", Span::call_site());
62 }
63 
64 #[test]
65 #[should_panic(expected = "not a valid Ident")]
lifetime_empty()66 fn lifetime_empty() {
67     Ident::new("'", Span::call_site());
68 }
69 
70 #[test]
71 #[should_panic(expected = "not a valid Ident")]
lifetime_number()72 fn lifetime_number() {
73     Ident::new("'255", Span::call_site());
74 }
75 
76 #[test]
77 #[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
lifetime_invalid()78 fn lifetime_invalid() {
79     Ident::new("'a#", Span::call_site());
80 }
81 
82 #[test]
literal_string()83 fn literal_string() {
84     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
85     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
86     assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
87 }
88 
89 #[test]
literal_character()90 fn literal_character() {
91     assert_eq!(Literal::character('x').to_string(), "'x'");
92     assert_eq!(Literal::character('\'').to_string(), "'\\''");
93     assert_eq!(Literal::character('"').to_string(), "'\"'");
94 }
95 
96 #[test]
literal_float()97 fn literal_float() {
98     assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
99 }
100 
101 #[test]
roundtrip()102 fn roundtrip() {
103     fn roundtrip(p: &str) {
104         println!("parse: {}", p);
105         let s = p.parse::<TokenStream>().unwrap().to_string();
106         println!("first: {}", s);
107         let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
108         assert_eq!(s, s2);
109     }
110     roundtrip("a");
111     roundtrip("<<");
112     roundtrip("<<=");
113     roundtrip(
114         "
115         1
116         1.0
117         1f32
118         2f64
119         1usize
120         4isize
121         4e10
122         1_000
123         1_0i32
124         8u8
125         9
126         0
127         0xffffffffffffffffffffffffffffffff
128     ",
129     );
130     roundtrip("'a");
131     roundtrip("'_");
132     roundtrip("'static");
133     roundtrip("'\\u{10__FFFF}'");
134     roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
135 }
136 
137 #[test]
fail()138 fn fail() {
139     fn fail(p: &str) {
140         if let Ok(s) = p.parse::<TokenStream>() {
141             panic!("should have failed to parse: {}\n{:#?}", p, s);
142         }
143     }
144     fail("1x");
145     fail("1u80");
146     fail("1f320");
147     fail("' static");
148     fail("r#1");
149     fail("r#_");
150 }
151 
152 #[cfg(span_locations)]
153 #[test]
span_test()154 fn span_test() {
155     use proc_macro2::TokenTree;
156 
157     fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
158         let ts = p.parse::<TokenStream>().unwrap();
159         check_spans_internal(ts, &mut lines);
160     }
161 
162     fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
163         for i in ts {
164             if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
165                 *lines = rest;
166 
167                 let start = i.span().start();
168                 assert_eq!(start.line, sline, "sline did not match for {}", i);
169                 assert_eq!(start.column, scol, "scol did not match for {}", i);
170 
171                 let end = i.span().end();
172                 assert_eq!(end.line, eline, "eline did not match for {}", i);
173                 assert_eq!(end.column, ecol, "ecol did not match for {}", i);
174 
175                 match i {
176                     TokenTree::Group(ref g) => {
177                         check_spans_internal(g.stream().clone(), lines);
178                     }
179                     _ => {}
180                 }
181             }
182         }
183     }
184 
185     check_spans(
186         "\
187 /// This is a document comment
188 testing 123
189 {
190   testing 234
191 }",
192         &[
193             (1, 0, 1, 30),  // #
194             (1, 0, 1, 30),  // [ ... ]
195             (1, 0, 1, 30),  // doc
196             (1, 0, 1, 30),  // =
197             (1, 0, 1, 30),  // "This is..."
198             (2, 0, 2, 7),   // testing
199             (2, 8, 2, 11),  // 123
200             (3, 0, 5, 1),   // { ... }
201             (4, 2, 4, 9),   // testing
202             (4, 10, 4, 13), // 234
203         ],
204     );
205 }
206 
207 #[cfg(procmacro2_semver_exempt)]
208 #[cfg(not(nightly))]
209 #[test]
default_span()210 fn default_span() {
211     let start = Span::call_site().start();
212     assert_eq!(start.line, 1);
213     assert_eq!(start.column, 0);
214     let end = Span::call_site().end();
215     assert_eq!(end.line, 1);
216     assert_eq!(end.column, 0);
217     let source_file = Span::call_site().source_file();
218     assert_eq!(source_file.path().to_string_lossy(), "<unspecified>");
219     assert!(!source_file.is_real());
220 }
221 
222 #[cfg(procmacro2_semver_exempt)]
223 #[test]
span_join()224 fn span_join() {
225     let source1 = "aaa\nbbb"
226         .parse::<TokenStream>()
227         .unwrap()
228         .into_iter()
229         .collect::<Vec<_>>();
230     let source2 = "ccc\nddd"
231         .parse::<TokenStream>()
232         .unwrap()
233         .into_iter()
234         .collect::<Vec<_>>();
235 
236     assert!(source1[0].span().source_file() != source2[0].span().source_file());
237     assert_eq!(
238         source1[0].span().source_file(),
239         source1[1].span().source_file()
240     );
241 
242     let joined1 = source1[0].span().join(source1[1].span());
243     let joined2 = source1[0].span().join(source2[0].span());
244     assert!(joined1.is_some());
245     assert!(joined2.is_none());
246 
247     let start = joined1.unwrap().start();
248     let end = joined1.unwrap().end();
249     assert_eq!(start.line, 1);
250     assert_eq!(start.column, 0);
251     assert_eq!(end.line, 2);
252     assert_eq!(end.column, 3);
253 
254     assert_eq!(
255         joined1.unwrap().source_file(),
256         source1[0].span().source_file()
257     );
258 }
259 
260 #[test]
no_panic()261 fn no_panic() {
262     let s = str::from_utf8(b"b\'\xc2\x86  \x00\x00\x00^\"").unwrap();
263     assert!(s.parse::<proc_macro2::TokenStream>().is_err());
264 }
265 
266 #[test]
tricky_doc_comment()267 fn tricky_doc_comment() {
268     let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
269     let tokens = stream.into_iter().collect::<Vec<_>>();
270     assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
271 
272     let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
273     let tokens = stream.into_iter().collect::<Vec<_>>();
274     assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
275     match tokens[0] {
276         proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
277         _ => panic!("wrong token {:?}", tokens[0]),
278     }
279     let mut tokens = match tokens[1] {
280         proc_macro2::TokenTree::Group(ref tt) => {
281             assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
282             tt.stream().into_iter()
283         }
284         _ => panic!("wrong token {:?}", tokens[0]),
285     };
286 
287     match tokens.next().unwrap() {
288         proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
289         t => panic!("wrong token {:?}", t),
290     }
291     match tokens.next().unwrap() {
292         proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
293         t => panic!("wrong token {:?}", t),
294     }
295     match tokens.next().unwrap() {
296         proc_macro2::TokenTree::Literal(ref tt) => {
297             assert_eq!(tt.to_string(), "\" doc\"");
298         }
299         t => panic!("wrong token {:?}", t),
300     }
301     assert!(tokens.next().is_none());
302 
303     let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
304     let tokens = stream.into_iter().collect::<Vec<_>>();
305     assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
306 }
307 
308 #[test]
op_before_comment()309 fn op_before_comment() {
310     let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
311     match tts.next().unwrap() {
312         TokenTree::Punct(tt) => {
313             assert_eq!(tt.as_char(), '~');
314             assert_eq!(tt.spacing(), Spacing::Alone);
315         }
316         wrong => panic!("wrong token {:?}", wrong),
317     }
318 }
319 
320 #[test]
raw_identifier()321 fn raw_identifier() {
322     let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
323     match tts.next().unwrap() {
324         TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
325         wrong => panic!("wrong token {:?}", wrong),
326     }
327     assert!(tts.next().is_none());
328 }
329 
330 #[test]
test_debug_ident()331 fn test_debug_ident() {
332     let ident = Ident::new("proc_macro", Span::call_site());
333 
334     #[cfg(not(procmacro2_semver_exempt))]
335     let expected = "Ident(proc_macro)";
336 
337     #[cfg(procmacro2_semver_exempt)]
338     let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
339 
340     assert_eq!(expected, format!("{:?}", ident));
341 }
342 
343 #[test]
test_debug_tokenstream()344 fn test_debug_tokenstream() {
345     let tts = TokenStream::from_str("[a + 1]").unwrap();
346 
347     #[cfg(not(procmacro2_semver_exempt))]
348     let expected = "\
349 TokenStream [
350     Group {
351         delimiter: Bracket,
352         stream: TokenStream [
353             Ident {
354                 sym: a,
355             },
356             Punct {
357                 op: '+',
358                 spacing: Alone,
359             },
360             Literal {
361                 lit: 1,
362             },
363         ],
364     },
365 ]\
366     ";
367 
368     #[cfg(not(procmacro2_semver_exempt))]
369     let expected_before_trailing_commas = "\
370 TokenStream [
371     Group {
372         delimiter: Bracket,
373         stream: TokenStream [
374             Ident {
375                 sym: a
376             },
377             Punct {
378                 op: '+',
379                 spacing: Alone
380             },
381             Literal {
382                 lit: 1
383             }
384         ]
385     }
386 ]\
387     ";
388 
389     #[cfg(procmacro2_semver_exempt)]
390     let expected = "\
391 TokenStream [
392     Group {
393         delimiter: Bracket,
394         stream: TokenStream [
395             Ident {
396                 sym: a,
397                 span: bytes(2..3),
398             },
399             Punct {
400                 op: '+',
401                 spacing: Alone,
402                 span: bytes(4..5),
403             },
404             Literal {
405                 lit: 1,
406                 span: bytes(6..7),
407             },
408         ],
409         span: bytes(1..8),
410     },
411 ]\
412     ";
413 
414     #[cfg(procmacro2_semver_exempt)]
415     let expected_before_trailing_commas = "\
416 TokenStream [
417     Group {
418         delimiter: Bracket,
419         stream: TokenStream [
420             Ident {
421                 sym: a,
422                 span: bytes(2..3)
423             },
424             Punct {
425                 op: '+',
426                 spacing: Alone,
427                 span: bytes(4..5)
428             },
429             Literal {
430                 lit: 1,
431                 span: bytes(6..7)
432             }
433         ],
434         span: bytes(1..8)
435     }
436 ]\
437     ";
438 
439     let actual = format!("{:#?}", tts);
440     if actual.ends_with(",\n]") {
441         assert_eq!(expected, actual);
442     } else {
443         assert_eq!(expected_before_trailing_commas, actual);
444     }
445 }
446 
447 #[test]
default_tokenstream_is_empty()448 fn default_tokenstream_is_empty() {
449     let default_token_stream: TokenStream = Default::default();
450 
451     assert!(default_token_stream.is_empty());
452 }
453