1 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
2
3 use parser::{ParseError, TreeSink};
4 use rustc_hash::{FxHashMap, FxHashSet};
5 use syntax::{
6 ast::{self, make::tokens::doc_comment},
7 AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
8 SyntaxKind::*,
9 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
10 };
11 use tt::buffer::{Cursor, TokenBuffer};
12
13 use crate::{
14 to_parser_tokens::to_parser_tokens, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
15 };
16
17 /// Convert the syntax node to a `TokenTree` (what macro
18 /// will consume).
syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap)19 pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
20 syntax_node_to_token_tree_censored(node, &Default::default())
21 }
22
23 /// Convert the syntax node to a `TokenTree` (what macro will consume)
24 /// with the censored range excluded.
syntax_node_to_token_tree_censored( node: &SyntaxNode, censor: &FxHashSet<SyntaxNode>, ) -> (tt::Subtree, TokenMap)25 pub fn syntax_node_to_token_tree_censored(
26 node: &SyntaxNode,
27 censor: &FxHashSet<SyntaxNode>,
28 ) -> (tt::Subtree, TokenMap) {
29 let global_offset = node.text_range().start();
30 let mut c = Convertor::new(node, global_offset, censor);
31 let subtree = convert_tokens(&mut c);
32 c.id_alloc.map.shrink_to_fit();
33 (subtree, c.id_alloc.map)
34 }
35
36 // The following items are what `rustc` macro can be parsed into :
37 // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
38 // * Expr(P<ast::Expr>) -> token_tree_to_expr
39 // * Pat(P<ast::Pat>) -> token_tree_to_pat
40 // * Ty(P<ast::Ty>) -> token_tree_to_ty
41 // * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
42 // * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
43 //
44 // * TraitItems(SmallVec<[ast::TraitItem; 1]>)
45 // * AssocItems(SmallVec<[ast::AssocItem; 1]>)
46 // * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
47
token_tree_to_syntax_node( tt: &tt::Subtree, entry_point: ParserEntryPoint, ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError>48 pub fn token_tree_to_syntax_node(
49 tt: &tt::Subtree,
50 entry_point: ParserEntryPoint,
51 ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
52 let buffer = match tt {
53 tt::Subtree { delimiter: None, token_trees } => {
54 TokenBuffer::from_tokens(token_trees.as_slice())
55 }
56 _ => TokenBuffer::from_subtree(tt),
57 };
58 let parser_tokens = to_parser_tokens(&buffer);
59 let mut tree_sink = TtTreeSink::new(buffer.begin());
60 parser::parse(&parser_tokens, &mut tree_sink, entry_point);
61 if tree_sink.roots.len() != 1 {
62 return Err(ExpandError::ConversionError);
63 }
64 //FIXME: would be cool to report errors
65 let (parse, range_map) = tree_sink.finish();
66 Ok((parse, range_map))
67 }
68
69 /// Convert a string to a `TokenTree`
parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)>70 pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
71 let lexed = parser::LexedStr::new(text);
72 if lexed.errors().next().is_some() {
73 return None;
74 }
75
76 let mut conv = RawConvertor {
77 lexed: lexed,
78 pos: 0,
79 id_alloc: TokenIdAlloc {
80 map: Default::default(),
81 global_offset: TextSize::default(),
82 next_id: 0,
83 },
84 };
85
86 let subtree = convert_tokens(&mut conv);
87 Some((subtree, conv.id_alloc.map))
88 }
89
90 /// Split token tree with separate expr: $($e:expr)SEP*
parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree>91 pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
92 if tt.token_trees.is_empty() {
93 return Vec::new();
94 }
95
96 let mut iter = TtIter::new(tt);
97 let mut res = Vec::new();
98
99 while iter.peek_n(0).is_some() {
100 let expanded = iter.expect_fragment(ParserEntryPoint::Expr);
101
102 res.push(match expanded.value {
103 None => break,
104 Some(tt @ tt::TokenTree::Leaf(_)) => {
105 tt::Subtree { delimiter: None, token_trees: vec![tt] }
106 }
107 Some(tt::TokenTree::Subtree(tt)) => tt,
108 });
109
110 let mut fork = iter.clone();
111 if fork.expect_char(sep).is_err() {
112 break;
113 }
114 iter = fork;
115 }
116
117 if iter.peek_n(0).is_some() {
118 res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
119 }
120
121 res
122 }
123
convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree124 fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
125 struct StackEntry {
126 subtree: tt::Subtree,
127 idx: usize,
128 open_range: TextRange,
129 }
130
131 let entry = StackEntry {
132 subtree: tt::Subtree { delimiter: None, ..Default::default() },
133 // never used (delimiter is `None`)
134 idx: !0,
135 open_range: TextRange::empty(TextSize::of('.')),
136 };
137 let mut stack = vec![entry];
138
139 loop {
140 let entry = stack.last_mut().unwrap();
141 let result = &mut entry.subtree.token_trees;
142 let (token, range) = match conv.bump() {
143 None => break,
144 Some(it) => it,
145 };
146
147 let k: SyntaxKind = token.kind(&conv);
148 if k == COMMENT {
149 if let Some(tokens) = conv.convert_doc_comment(&token) {
150 // FIXME: There has to be a better way to do this
151 // Add the comments token id to the converted doc string
152 let id = conv.id_alloc().alloc(range);
153 result.extend(tokens.into_iter().map(|mut tt| {
154 if let tt::TokenTree::Subtree(sub) = &mut tt {
155 if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = &mut sub.token_trees[2]
156 {
157 lit.id = id
158 }
159 }
160 tt
161 }));
162 }
163 continue;
164 }
165
166 result.push(if k.is_punct() && k != UNDERSCORE {
167 assert_eq!(range.len(), TextSize::of('.'));
168
169 if let Some(delim) = entry.subtree.delimiter {
170 let expected = match delim.kind {
171 tt::DelimiterKind::Parenthesis => T![')'],
172 tt::DelimiterKind::Brace => T!['}'],
173 tt::DelimiterKind::Bracket => T![']'],
174 };
175
176 if k == expected {
177 let entry = stack.pop().unwrap();
178 conv.id_alloc().close_delim(entry.idx, Some(range));
179 stack.last_mut().unwrap().subtree.token_trees.push(entry.subtree.into());
180 continue;
181 }
182 }
183
184 let delim = match k {
185 T!['('] => Some(tt::DelimiterKind::Parenthesis),
186 T!['{'] => Some(tt::DelimiterKind::Brace),
187 T!['['] => Some(tt::DelimiterKind::Bracket),
188 _ => None,
189 };
190
191 if let Some(kind) = delim {
192 let mut subtree = tt::Subtree::default();
193 let (id, idx) = conv.id_alloc().open_delim(range);
194 subtree.delimiter = Some(tt::Delimiter { id, kind });
195 stack.push(StackEntry { subtree, idx, open_range: range });
196 continue;
197 } else {
198 let spacing = match conv.peek() {
199 Some(next)
200 if next.kind(&conv).is_trivia()
201 || next.kind(&conv) == T!['[']
202 || next.kind(&conv) == T!['{']
203 || next.kind(&conv) == T!['('] =>
204 {
205 tt::Spacing::Alone
206 }
207 Some(next) if next.kind(&conv).is_punct() && next.kind(&conv) != UNDERSCORE => {
208 tt::Spacing::Joint
209 }
210 _ => tt::Spacing::Alone,
211 };
212 let char = match token.to_char(&conv) {
213 Some(c) => c,
214 None => {
215 panic!("Token from lexer must be single char: token = {:#?}", token);
216 }
217 };
218 tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
219 }
220 } else {
221 macro_rules! make_leaf {
222 ($i:ident) => {
223 tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
224 };
225 }
226 let leaf: tt::Leaf = match k {
227 T![true] | T![false] => make_leaf!(Ident),
228 IDENT => make_leaf!(Ident),
229 UNDERSCORE => make_leaf!(Ident),
230 k if k.is_keyword() => make_leaf!(Ident),
231 k if k.is_literal() => make_leaf!(Literal),
232 LIFETIME_IDENT => {
233 let char_unit = TextSize::of('\'');
234 let r = TextRange::at(range.start(), char_unit);
235 let apostrophe = tt::Leaf::from(tt::Punct {
236 char: '\'',
237 spacing: tt::Spacing::Joint,
238 id: conv.id_alloc().alloc(r),
239 });
240 result.push(apostrophe.into());
241
242 let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
243 let ident = tt::Leaf::from(tt::Ident {
244 text: SmolStr::new(&token.to_text(conv)[1..]),
245 id: conv.id_alloc().alloc(r),
246 });
247 result.push(ident.into());
248 continue;
249 }
250 _ => continue,
251 };
252
253 leaf.into()
254 });
255 }
256
257 // If we get here, we've consumed all input tokens.
258 // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
259 // Merge them so we're left with one.
260 while stack.len() > 1 {
261 let entry = stack.pop().unwrap();
262 let parent = stack.last_mut().unwrap();
263
264 conv.id_alloc().close_delim(entry.idx, None);
265 let leaf: tt::Leaf = tt::Punct {
266 id: conv.id_alloc().alloc(entry.open_range),
267 char: match entry.subtree.delimiter.unwrap().kind {
268 tt::DelimiterKind::Parenthesis => '(',
269 tt::DelimiterKind::Brace => '{',
270 tt::DelimiterKind::Bracket => '[',
271 },
272 spacing: tt::Spacing::Alone,
273 }
274 .into();
275 parent.subtree.token_trees.push(leaf.into());
276 parent.subtree.token_trees.extend(entry.subtree.token_trees);
277 }
278
279 let subtree = stack.pop().unwrap().subtree;
280 if subtree.token_trees.len() == 1 {
281 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
282 return first.clone();
283 }
284 }
285 subtree
286 }
287
288 /// Returns the textual content of a doc comment block as a quoted string
289 /// That is, strips leading `///` (or `/**`, etc)
290 /// and strips the ending `*/`
291 /// And then quote the string, which is needed to convert to `tt::Literal`
doc_comment_text(comment: &ast::Comment) -> SmolStr292 fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
293 let prefix_len = comment.prefix().len();
294 let mut text = &comment.text()[prefix_len..];
295
296 // Remove ending "*/"
297 if comment.kind().shape == ast::CommentShape::Block {
298 text = &text[0..text.len() - 2];
299 }
300
301 // Quote the string
302 // Note that `tt::Literal` expect an escaped string
303 let text = format!("\"{}\"", text.escape_debug());
304 text.into()
305 }
306
convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>>307 fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
308 cov_mark::hit!(test_meta_doc_comments);
309 let comment = ast::Comment::cast(token.clone())?;
310 let doc = comment.kind().doc?;
311
312 // Make `doc="\" Comments\""
313 let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
314
315 // Make `#![]`
316 let mut token_trees = vec![mk_punct('#')];
317 if let ast::CommentPlacement::Inner = doc {
318 token_trees.push(mk_punct('!'));
319 }
320 token_trees.push(tt::TokenTree::from(tt::Subtree {
321 delimiter: Some(tt::Delimiter {
322 kind: tt::DelimiterKind::Bracket,
323 id: tt::TokenId::unspecified(),
324 }),
325 token_trees: meta_tkns,
326 }));
327
328 return Some(token_trees);
329
330 // Helper functions
331 fn mk_ident(s: &str) -> tt::TokenTree {
332 tt::TokenTree::from(tt::Leaf::from(tt::Ident {
333 text: s.into(),
334 id: tt::TokenId::unspecified(),
335 }))
336 }
337
338 fn mk_punct(c: char) -> tt::TokenTree {
339 tt::TokenTree::from(tt::Leaf::from(tt::Punct {
340 char: c,
341 spacing: tt::Spacing::Alone,
342 id: tt::TokenId::unspecified(),
343 }))
344 }
345
346 fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
347 let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
348
349 tt::TokenTree::from(tt::Leaf::from(lit))
350 }
351 }
352
353 struct TokenIdAlloc {
354 map: TokenMap,
355 global_offset: TextSize,
356 next_id: u32,
357 }
358
359 impl TokenIdAlloc {
alloc(&mut self, absolute_range: TextRange) -> tt::TokenId360 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
361 let relative_range = absolute_range - self.global_offset;
362 let token_id = tt::TokenId(self.next_id);
363 self.next_id += 1;
364 self.map.insert(token_id, relative_range);
365 token_id
366 }
367
open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize)368 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
369 let token_id = tt::TokenId(self.next_id);
370 self.next_id += 1;
371 let idx = self.map.insert_delim(
372 token_id,
373 open_abs_range - self.global_offset,
374 open_abs_range - self.global_offset,
375 );
376 (token_id, idx)
377 }
378
close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>)379 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
380 match close_abs_range {
381 None => {
382 self.map.remove_delim(idx);
383 }
384 Some(close) => {
385 self.map.update_close_delim(idx, close - self.global_offset);
386 }
387 }
388 }
389 }
390
391 /// A Raw Token (straightly from lexer) convertor
392 struct RawConvertor<'a> {
393 lexed: parser::LexedStr<'a>,
394 pos: usize,
395 id_alloc: TokenIdAlloc,
396 }
397
398 trait SrcToken<Ctx>: std::fmt::Debug {
kind(&self, ctx: &Ctx) -> SyntaxKind399 fn kind(&self, ctx: &Ctx) -> SyntaxKind;
400
to_char(&self, ctx: &Ctx) -> Option<char>401 fn to_char(&self, ctx: &Ctx) -> Option<char>;
402
to_text(&self, ctx: &Ctx) -> SmolStr403 fn to_text(&self, ctx: &Ctx) -> SmolStr;
404 }
405
406 trait TokenConvertor: Sized {
407 type Token: SrcToken<Self>;
408
convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>409 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
410
bump(&mut self) -> Option<(Self::Token, TextRange)>411 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
412
peek(&self) -> Option<Self::Token>413 fn peek(&self) -> Option<Self::Token>;
414
id_alloc(&mut self) -> &mut TokenIdAlloc415 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
416 }
417
418 impl<'a> SrcToken<RawConvertor<'a>> for usize {
kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind419 fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
420 ctx.lexed.kind(*self)
421 }
422
to_char(&self, ctx: &RawConvertor<'a>) -> Option<char>423 fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
424 ctx.lexed.text(*self).chars().next()
425 }
426
to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr427 fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
428 ctx.lexed.text(*self).into()
429 }
430 }
431
432 impl<'a> TokenConvertor for RawConvertor<'a> {
433 type Token = usize;
434
convert_doc_comment(&self, token: &usize) -> Option<Vec<tt::TokenTree>>435 fn convert_doc_comment(&self, token: &usize) -> Option<Vec<tt::TokenTree>> {
436 let text = self.lexed.text(*token);
437 convert_doc_comment(&doc_comment(text))
438 }
439
bump(&mut self) -> Option<(Self::Token, TextRange)>440 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
441 if self.pos == self.lexed.len() {
442 return None;
443 }
444 let token = self.pos;
445 self.pos += 1;
446 let range = self.lexed.text_range(token);
447 let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
448
449 Some((token, range))
450 }
451
peek(&self) -> Option<Self::Token>452 fn peek(&self) -> Option<Self::Token> {
453 if self.pos == self.lexed.len() {
454 return None;
455 }
456 Some(self.pos)
457 }
458
id_alloc(&mut self) -> &mut TokenIdAlloc459 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
460 &mut self.id_alloc
461 }
462 }
463
464 struct Convertor<'c> {
465 id_alloc: TokenIdAlloc,
466 current: Option<SyntaxToken>,
467 preorder: PreorderWithTokens,
468 censor: &'c FxHashSet<SyntaxNode>,
469 range: TextRange,
470 punct_offset: Option<(SyntaxToken, TextSize)>,
471 }
472
473 impl<'c> Convertor<'c> {
new( node: &SyntaxNode, global_offset: TextSize, censor: &'c FxHashSet<SyntaxNode>, ) -> Convertor<'c>474 fn new(
475 node: &SyntaxNode,
476 global_offset: TextSize,
477 censor: &'c FxHashSet<SyntaxNode>,
478 ) -> Convertor<'c> {
479 let range = node.text_range();
480 let mut preorder = node.preorder_with_tokens();
481 let first = Self::next_token(&mut preorder, censor);
482 Convertor {
483 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
484 current: first,
485 preorder,
486 range,
487 censor,
488 punct_offset: None,
489 }
490 }
491
next_token( preorder: &mut PreorderWithTokens, censor: &FxHashSet<SyntaxNode>, ) -> Option<SyntaxToken>492 fn next_token(
493 preorder: &mut PreorderWithTokens,
494 censor: &FxHashSet<SyntaxNode>,
495 ) -> Option<SyntaxToken> {
496 while let Some(ev) = preorder.next() {
497 let ele = match ev {
498 WalkEvent::Enter(ele) => ele,
499 _ => continue,
500 };
501 match ele {
502 SyntaxElement::Token(t) => return Some(t),
503 SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
504 SyntaxElement::Node(_) => (),
505 }
506 }
507 None
508 }
509 }
510
511 #[derive(Debug)]
512 enum SynToken {
513 Ordinary(SyntaxToken),
514 Punch(SyntaxToken, TextSize),
515 }
516
517 impl SynToken {
token(&self) -> &SyntaxToken518 fn token(&self) -> &SyntaxToken {
519 match self {
520 SynToken::Ordinary(it) => it,
521 SynToken::Punch(it, _) => it,
522 }
523 }
524 }
525
526 impl<'a> SrcToken<Convertor<'a>> for SynToken {
kind(&self, _ctx: &Convertor<'a>) -> SyntaxKind527 fn kind(&self, _ctx: &Convertor<'a>) -> SyntaxKind {
528 self.token().kind()
529 }
to_char(&self, _ctx: &Convertor<'a>) -> Option<char>530 fn to_char(&self, _ctx: &Convertor<'a>) -> Option<char> {
531 match self {
532 SynToken::Ordinary(_) => None,
533 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
534 }
535 }
to_text(&self, _ctx: &Convertor<'a>) -> SmolStr536 fn to_text(&self, _ctx: &Convertor<'a>) -> SmolStr {
537 self.token().text().into()
538 }
539 }
540
541 impl TokenConvertor for Convertor<'_> {
542 type Token = SynToken;
convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>543 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
544 convert_doc_comment(token.token())
545 }
546
bump(&mut self) -> Option<(Self::Token, TextRange)>547 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
548 if let Some((punct, offset)) = self.punct_offset.clone() {
549 if usize::from(offset) + 1 < punct.text().len() {
550 let offset = offset + TextSize::of('.');
551 let range = punct.text_range();
552 self.punct_offset = Some((punct.clone(), offset));
553 let range = TextRange::at(range.start() + offset, TextSize::of('.'));
554 return Some((SynToken::Punch(punct, offset), range));
555 }
556 }
557
558 let curr = self.current.clone()?;
559 if !&self.range.contains_range(curr.text_range()) {
560 return None;
561 }
562 self.current = Self::next_token(&mut self.preorder, self.censor);
563 let token = if curr.kind().is_punct() {
564 let range = curr.text_range();
565 let range = TextRange::at(range.start(), TextSize::of('.'));
566 self.punct_offset = Some((curr.clone(), 0.into()));
567 (SynToken::Punch(curr, 0.into()), range)
568 } else {
569 self.punct_offset = None;
570 let range = curr.text_range();
571 (SynToken::Ordinary(curr), range)
572 };
573
574 Some(token)
575 }
576
peek(&self) -> Option<Self::Token>577 fn peek(&self) -> Option<Self::Token> {
578 if let Some((punct, mut offset)) = self.punct_offset.clone() {
579 offset += TextSize::of('.');
580 if usize::from(offset) < punct.text().len() {
581 return Some(SynToken::Punch(punct, offset));
582 }
583 }
584
585 let curr = self.current.clone()?;
586 if !self.range.contains_range(curr.text_range()) {
587 return None;
588 }
589
590 let token = if curr.kind().is_punct() {
591 SynToken::Punch(curr, 0.into())
592 } else {
593 SynToken::Ordinary(curr)
594 };
595 Some(token)
596 }
597
id_alloc(&mut self) -> &mut TokenIdAlloc598 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
599 &mut self.id_alloc
600 }
601 }
602
603 struct TtTreeSink<'a> {
604 buf: String,
605 cursor: Cursor<'a>,
606 open_delims: FxHashMap<tt::TokenId, TextSize>,
607 text_pos: TextSize,
608 inner: SyntaxTreeBuilder,
609 token_map: TokenMap,
610
611 // Number of roots
612 // Use for detect ill-form tree which is not single root
613 roots: smallvec::SmallVec<[usize; 1]>,
614 }
615
616 impl<'a> TtTreeSink<'a> {
new(cursor: Cursor<'a>) -> Self617 fn new(cursor: Cursor<'a>) -> Self {
618 TtTreeSink {
619 buf: String::new(),
620 cursor,
621 open_delims: FxHashMap::default(),
622 text_pos: 0.into(),
623 inner: SyntaxTreeBuilder::default(),
624 roots: smallvec::SmallVec::new(),
625 token_map: TokenMap::default(),
626 }
627 }
628
finish(mut self) -> (Parse<SyntaxNode>, TokenMap)629 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
630 self.token_map.shrink_to_fit();
631 (self.inner.finish(), self.token_map)
632 }
633 }
634
delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str635 fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
636 let texts = match d {
637 tt::DelimiterKind::Parenthesis => "()",
638 tt::DelimiterKind::Brace => "{}",
639 tt::DelimiterKind::Bracket => "[]",
640 };
641
642 let idx = closing as usize;
643 &texts[idx..texts.len() - (1 - idx)]
644 }
645
646 impl<'a> TreeSink for TtTreeSink<'a> {
token(&mut self, kind: SyntaxKind, mut n_tokens: u8)647 fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
648 if kind == LIFETIME_IDENT {
649 n_tokens = 2;
650 }
651
652 let mut last = self.cursor;
653 for _ in 0..n_tokens {
654 let tmp_str: SmolStr;
655 if self.cursor.eof() {
656 break;
657 }
658 last = self.cursor;
659 let text: &str = loop {
660 break match self.cursor.token_tree() {
661 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
662 // Mark the range if needed
663 let (text, id) = match leaf {
664 tt::Leaf::Ident(ident) => (&ident.text, ident.id),
665 tt::Leaf::Punct(punct) => {
666 assert!(punct.char.is_ascii());
667 let char = &(punct.char as u8);
668 tmp_str = SmolStr::new_inline(
669 std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
670 );
671 (&tmp_str, punct.id)
672 }
673 tt::Leaf::Literal(lit) => (&lit.text, lit.id),
674 };
675 let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
676 self.token_map.insert(id, range);
677 self.cursor = self.cursor.bump();
678 text
679 }
680 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
681 self.cursor = self.cursor.subtree().unwrap();
682 match subtree.delimiter {
683 Some(d) => {
684 self.open_delims.insert(d.id, self.text_pos);
685 delim_to_str(d.kind, false)
686 }
687 None => continue,
688 }
689 }
690 None => {
691 let parent = self.cursor.end().unwrap();
692 self.cursor = self.cursor.bump();
693 match parent.delimiter {
694 Some(d) => {
695 if let Some(open_delim) = self.open_delims.get(&d.id) {
696 let open_range = TextRange::at(*open_delim, TextSize::of('('));
697 let close_range =
698 TextRange::at(self.text_pos, TextSize::of('('));
699 self.token_map.insert_delim(d.id, open_range, close_range);
700 }
701 delim_to_str(d.kind, true)
702 }
703 None => continue,
704 }
705 }
706 };
707 };
708 self.buf += text;
709 self.text_pos += TextSize::of(text);
710 }
711
712 self.inner.token(kind, self.buf.as_str());
713 self.buf.clear();
714 // Add whitespace between adjoint puncts
715 let next = last.bump();
716 if let (
717 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
718 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
719 ) = (last.token_tree(), next.token_tree())
720 {
721 // Note: We always assume the semi-colon would be the last token in
722 // other parts of RA such that we don't add whitespace here.
723 if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
724 self.inner.token(WHITESPACE, " ");
725 self.text_pos += TextSize::of(' ');
726 }
727 }
728 }
729
start_node(&mut self, kind: SyntaxKind)730 fn start_node(&mut self, kind: SyntaxKind) {
731 self.inner.start_node(kind);
732
733 match self.roots.last_mut() {
734 None | Some(0) => self.roots.push(1),
735 Some(ref mut n) => **n += 1,
736 };
737 }
738
finish_node(&mut self)739 fn finish_node(&mut self) {
740 self.inner.finish_node();
741 *self.roots.last_mut().unwrap() -= 1;
742 }
743
error(&mut self, error: ParseError)744 fn error(&mut self, error: ParseError) {
745 self.inner.error(error, self.text_pos)
746 }
747 }
748