1 //! This module contains tests for macro expansion. Effectively, it covers `tt`,
2 //! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
3 //! wrong architecture at the first glance, but is intentional.
4 //!
5 //! Physically, macro expansion process is intertwined with name resolution. You
6 //! can not expand *just* the syntax. So, to be able to write integration tests
7 //! of the "expand this code please" form, we have to do it after name
8 //! resolution. That is, in this crate. We *could* fake some dependencies and
9 //! write unit-tests (in fact, we used to do that), but that makes tests brittle
10 //! and harder to understand.
11 
12 mod mbe;
13 mod builtin_fn_macro;
14 mod builtin_derive_macro;
15 mod proc_macros;
16 
17 use std::{iter, ops::Range};
18 
19 use ::mbe::TokenMap;
20 use base_db::{fixture::WithFixture, SourceDatabase};
21 use expect_test::Expect;
22 use hir_expand::{
23     db::{AstDatabase, TokenExpander},
24     AstId, InFile, MacroDefId, MacroDefKind, MacroFile,
25 };
26 use stdx::format_to;
27 use syntax::{
28     ast::{self, edit::IndentLevel},
29     AstNode, SyntaxElement,
30     SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
31     SyntaxNode, TextRange, T,
32 };
33 use tt::{Subtree, TokenId};
34 
35 use crate::{
36     db::DefDatabase, nameres::ModuleSource, resolver::HasResolver, src::HasSource, test_db::TestDB,
37     AdtId, AsMacroCall, Lookup, ModuleDefId,
38 };
39 
40 #[track_caller]
check(ra_fixture: &str, mut expect: Expect)41 fn check(ra_fixture: &str, mut expect: Expect) {
42     let db = TestDB::with_files(ra_fixture);
43     let krate = db.crate_graph().iter().next().unwrap();
44     let def_map = db.crate_def_map(krate);
45     let local_id = def_map.root();
46     let module = def_map.module_id(local_id);
47     let resolver = module.resolver(&db);
48     let source = def_map[local_id].definition_source(&db);
49     let source_file = match source.value {
50         ModuleSource::SourceFile(it) => it,
51         ModuleSource::Module(_) | ModuleSource::BlockExpr(_) => panic!(),
52     };
53 
54     // What we want to do is to replace all macros (fn-like, derive, attr) with
55     // their expansions. Turns out, we don't actually store enough information
56     // to do this precisely though! Specifically, if a macro expands to nothing,
57     // it leaves zero traces in def-map, so we can't get its expansion after the
58     // fact.
59     //
60     // This is the usual
61     // <https://github.com/rust-analyzer/rust-analyzer/issues/3407>
62     // resolve/record tension!
63     //
64     // So here we try to do a resolve, which is necessary a heuristic. For macro
65     // calls, we use `as_call_id_with_errors`. For derives, we look at the impls
66     // in the module and assume that, if impls's source is a different
67     // `HirFileId`, than it came from macro expansion.
68 
69     let mut text_edits = Vec::new();
70     let mut expansions = Vec::new();
71 
72     for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
73         let mut show_token_ids = false;
74         for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
75             show_token_ids |= comment.to_string().contains("+tokenids");
76         }
77         if !show_token_ids {
78             continue;
79         }
80 
81         let call_offset = macro_.syntax().text_range().start().into();
82         let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
83         let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
84         let kind = MacroDefKind::Declarative(ast_id);
85 
86         let macro_def = db.macro_def(MacroDefId { krate, kind, local_inner: false }).unwrap();
87         if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
88             let tt = match &macro_ {
89                 ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
90                 ast::Macro::MacroDef(_) => unimplemented!(""),
91             };
92 
93             let tt_start = tt.syntax().text_range().start();
94             tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
95                 |token| {
96                     let range = token.text_range().checked_sub(tt_start).unwrap();
97                     if let Some(id) = def_site_token_map.token_by_range(range) {
98                         let offset = (range.end() + tt_start).into();
99                         text_edits.push((offset..offset, format!("#{}", id.0)));
100                     }
101                 },
102             );
103             text_edits.push((
104                 call_offset..call_offset,
105                 format!("// call ids will be shifted by {:?}\n", mac.shift()),
106             ));
107         }
108     }
109 
110     for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
111         let macro_call = InFile::new(source.file_id, &macro_call);
112         let mut error = None;
113         let macro_call_id = macro_call
114             .as_call_id_with_errors(
115                 &db,
116                 krate,
117                 |path| resolver.resolve_path_as_macro(&db, &path),
118                 &mut |err| error = Some(err),
119             )
120             .unwrap()
121             .unwrap();
122         let macro_file = MacroFile { macro_call_id };
123         let mut expansion_result = db.parse_macro_expansion(macro_file);
124         expansion_result.err = expansion_result.err.or(error);
125         expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
126     }
127 
128     for (call, exp, arg) in expansions.into_iter().rev() {
129         let mut tree = false;
130         let mut expect_errors = false;
131         let mut show_token_ids = false;
132         for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
133             tree |= comment.to_string().contains("+tree");
134             expect_errors |= comment.to_string().contains("+errors");
135             show_token_ids |= comment.to_string().contains("+tokenids");
136         }
137 
138         let mut expn_text = String::new();
139         if let Some(err) = exp.err {
140             format_to!(expn_text, "/* error: {} */", err);
141         }
142         if let Some((parse, token_map)) = exp.value {
143             if expect_errors {
144                 assert!(!parse.errors().is_empty(), "no parse errors in expansion");
145                 for e in parse.errors() {
146                     format_to!(expn_text, "/* parse error: {} */\n", e);
147                 }
148             } else {
149                 assert!(
150                     parse.errors().is_empty(),
151                     "parse errors in expansion: \n{:#?}",
152                     parse.errors()
153                 );
154             }
155             let pp = pretty_print_macro_expansion(
156                 parse.syntax_node(),
157                 show_token_ids.then(|| &*token_map),
158             );
159             let indent = IndentLevel::from_node(call.syntax());
160             let pp = reindent(indent, pp);
161             format_to!(expn_text, "{}", pp);
162 
163             if tree {
164                 let tree = format!("{:#?}", parse.syntax_node())
165                     .split_inclusive("\n")
166                     .map(|line| format!("// {}", line))
167                     .collect::<String>();
168                 format_to!(expn_text, "\n{}", tree)
169             }
170         }
171         let range = call.syntax().text_range();
172         let range: Range<usize> = range.into();
173 
174         if show_token_ids {
175             if let Some((tree, map)) = arg.as_deref() {
176                 let tt_range = call.token_tree().unwrap().syntax().text_range();
177                 let mut ranges = Vec::new();
178                 extract_id_ranges(&mut ranges, &map, &tree);
179                 for (range, id) in ranges {
180                     let idx = (tt_range.start() + range.end()).into();
181                     text_edits.push((idx..idx, format!("#{}", id.0)));
182                 }
183             }
184             text_edits.push((range.start..range.start, "// ".into()));
185             call.to_string().match_indices('\n').for_each(|(offset, _)| {
186                 let offset = offset + 1 + range.start;
187                 text_edits.push((offset..offset, "// ".into()));
188             });
189             text_edits.push((range.end..range.end, "\n".into()));
190             text_edits.push((range.end..range.end, expn_text));
191         } else {
192             text_edits.push((range, expn_text));
193         }
194     }
195 
196     text_edits.sort_by_key(|(range, _)| range.start);
197     text_edits.reverse();
198     let mut expanded_text = source_file.to_string();
199     for (range, text) in text_edits {
200         expanded_text.replace_range(range, &text);
201     }
202 
203     for decl_id in def_map[local_id].scope.declarations() {
204         if let ModuleDefId::AdtId(AdtId::StructId(struct_id)) = decl_id {
205             let src = struct_id.lookup(&db).source(&db);
206             if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
207                 let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
208                 format_to!(expanded_text, "\n{}", pp)
209             }
210         }
211     }
212 
213     for impl_id in def_map[local_id].scope.impls() {
214         let src = impl_id.lookup(&db).source(&db);
215         if src.file_id.is_builtin_derive(&db).is_some() {
216             let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
217             format_to!(expanded_text, "\n{}", pp)
218         }
219     }
220 
221     expect.indent(false);
222     expect.assert_eq(&expanded_text);
223 }
224 
extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree)225 fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
226     tree.token_trees.iter().for_each(|tree| match tree {
227         tt::TokenTree::Leaf(leaf) => {
228             let id = match leaf {
229                 tt::Leaf::Literal(it) => it.id,
230                 tt::Leaf::Punct(it) => it.id,
231                 tt::Leaf::Ident(it) => it.id,
232             };
233             ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
234         }
235         tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
236     });
237 }
238 
reindent(indent: IndentLevel, pp: String) -> String239 fn reindent(indent: IndentLevel, pp: String) -> String {
240     if !pp.contains('\n') {
241         return pp;
242     }
243     let mut lines = pp.split_inclusive('\n');
244     let mut res = lines.next().unwrap().to_string();
245     for line in lines {
246         if line.trim().is_empty() {
247             res.push_str(&line)
248         } else {
249             format_to!(res, "{}{}", indent, line)
250         }
251     }
252     res
253 }
254 
pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String255 fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
256     let mut res = String::new();
257     let mut prev_kind = EOF;
258     let mut indent_level = 0;
259     for token in iter::successors(expn.first_token(), |t| t.next_token()) {
260         let curr_kind = token.kind();
261         let space = match (prev_kind, curr_kind) {
262             _ if prev_kind.is_trivia() || curr_kind.is_trivia() => "",
263             (T!['{'], T!['}']) => "",
264             (T![=], _) | (_, T![=]) => " ",
265             (_, T!['{']) => " ",
266             (T![;] | T!['{'] | T!['}'], _) => "\n",
267             (_, T!['}']) => "\n",
268             (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
269             _ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
270             (IDENT, _) if curr_kind.is_keyword() => " ",
271             (_, IDENT) if prev_kind.is_keyword() => " ",
272             (T![>], IDENT) => " ",
273             (T![>], _) if curr_kind.is_keyword() => " ",
274             (T![->], _) | (_, T![->]) => " ",
275             (T![&&], _) | (_, T![&&]) => " ",
276             (T![,], _) => " ",
277             (T![:], IDENT | T!['(']) => " ",
278             (T![:], _) if curr_kind.is_keyword() => " ",
279             (T![fn], T!['(']) => "",
280             (T![']'], _) if curr_kind.is_keyword() => " ",
281             (T![']'], T![#]) => "\n",
282             _ if prev_kind.is_keyword() => " ",
283             _ => "",
284         };
285 
286         match prev_kind {
287             T!['{'] => indent_level += 1,
288             T!['}'] => indent_level -= 1,
289             _ => (),
290         }
291 
292         res.push_str(space);
293         if space == "\n" {
294             let level = if curr_kind == T!['}'] { indent_level - 1 } else { indent_level };
295             res.push_str(&"    ".repeat(level));
296         }
297         prev_kind = curr_kind;
298         format_to!(res, "{}", token);
299         if let Some(map) = map {
300             if let Some(id) = map.token_by_range(token.text_range()) {
301                 format_to!(res, "#{}", id.0);
302             }
303         }
304     }
305     res
306 }
307