1 use super::ty::AllowPlus;
2 use super::TokenType;
3 use super::{BlockMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenExpectType};
4
5 use rustc_ast as ast;
6 use rustc_ast::ptr::P;
7 use rustc_ast::token::{self, Lit, LitKind, TokenKind};
8 use rustc_ast::util::parser::AssocOp;
9 use rustc_ast::{AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec};
10 use rustc_ast::{BinOpKind, BindingMode, Block, BlockCheckMode, Expr, ExprKind, GenericArg, Item};
11 use rustc_ast::{ItemKind, Mutability, Param, Pat, PatKind, Path, PathSegment, QSelf, Ty, TyKind};
12 use rustc_ast_pretty::pprust;
13 use rustc_data_structures::fx::FxHashSet;
14 use rustc_errors::{pluralize, struct_span_err};
15 use rustc_errors::{Applicability, DiagnosticBuilder, Handler, PResult};
16 use rustc_span::source_map::Spanned;
17 use rustc_span::symbol::{kw, Ident};
18 use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
19
20 use tracing::{debug, trace};
21
22 const TURBOFISH_SUGGESTION_STR: &str =
23 "use `::<...>` instead of `<...>` to specify type or const arguments";
24
25 /// Creates a placeholder argument.
dummy_arg(ident: Ident) -> Param26 pub(super) fn dummy_arg(ident: Ident) -> Param {
27 let pat = P(Pat {
28 id: ast::DUMMY_NODE_ID,
29 kind: PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None),
30 span: ident.span,
31 tokens: None,
32 });
33 let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None };
34 Param {
35 attrs: AttrVec::default(),
36 id: ast::DUMMY_NODE_ID,
37 pat,
38 span: ident.span,
39 ty: P(ty),
40 is_placeholder: false,
41 }
42 }
43
44 pub enum Error {
45 UselessDocComment,
46 }
47
48 impl Error {
span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_>49 fn span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_> {
50 match self {
51 Error::UselessDocComment => {
52 let mut err = struct_span_err!(
53 handler,
54 sp,
55 E0585,
56 "found a documentation comment that doesn't document anything",
57 );
58 err.help(
59 "doc comments must come before what they document, maybe a comment was \
60 intended with `//`?",
61 );
62 err
63 }
64 }
65 }
66 }
67
68 pub(super) trait RecoverQPath: Sized + 'static {
69 const PATH_STYLE: PathStyle = PathStyle::Expr;
to_ty(&self) -> Option<P<Ty>>70 fn to_ty(&self) -> Option<P<Ty>>;
recovered(qself: Option<QSelf>, path: ast::Path) -> Self71 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
72 }
73
74 impl RecoverQPath for Ty {
75 const PATH_STYLE: PathStyle = PathStyle::Type;
to_ty(&self) -> Option<P<Ty>>76 fn to_ty(&self) -> Option<P<Ty>> {
77 Some(P(self.clone()))
78 }
recovered(qself: Option<QSelf>, path: ast::Path) -> Self79 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
80 Self {
81 span: path.span,
82 kind: TyKind::Path(qself, path),
83 id: ast::DUMMY_NODE_ID,
84 tokens: None,
85 }
86 }
87 }
88
89 impl RecoverQPath for Pat {
to_ty(&self) -> Option<P<Ty>>90 fn to_ty(&self) -> Option<P<Ty>> {
91 self.to_ty()
92 }
recovered(qself: Option<QSelf>, path: ast::Path) -> Self93 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
94 Self {
95 span: path.span,
96 kind: PatKind::Path(qself, path),
97 id: ast::DUMMY_NODE_ID,
98 tokens: None,
99 }
100 }
101 }
102
103 impl RecoverQPath for Expr {
to_ty(&self) -> Option<P<Ty>>104 fn to_ty(&self) -> Option<P<Ty>> {
105 self.to_ty()
106 }
recovered(qself: Option<QSelf>, path: ast::Path) -> Self107 fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
108 Self {
109 span: path.span,
110 kind: ExprKind::Path(qself, path),
111 attrs: AttrVec::new(),
112 id: ast::DUMMY_NODE_ID,
113 tokens: None,
114 }
115 }
116 }
117
118 /// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`.
119 crate enum ConsumeClosingDelim {
120 Yes,
121 No,
122 }
123
124 #[derive(Clone, Copy)]
125 pub enum AttemptLocalParseRecovery {
126 Yes,
127 No,
128 }
129
130 impl AttemptLocalParseRecovery {
yes(&self) -> bool131 pub fn yes(&self) -> bool {
132 match self {
133 AttemptLocalParseRecovery::Yes => true,
134 AttemptLocalParseRecovery::No => false,
135 }
136 }
137
no(&self) -> bool138 pub fn no(&self) -> bool {
139 match self {
140 AttemptLocalParseRecovery::Yes => false,
141 AttemptLocalParseRecovery::No => true,
142 }
143 }
144 }
145
146 impl<'a> Parser<'a> {
span_fatal_err<S: Into<MultiSpan>>( &self, sp: S, err: Error, ) -> DiagnosticBuilder<'a>147 pub(super) fn span_fatal_err<S: Into<MultiSpan>>(
148 &self,
149 sp: S,
150 err: Error,
151 ) -> DiagnosticBuilder<'a> {
152 err.span_err(sp, self.diagnostic())
153 }
154
struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a>155 pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> {
156 self.sess.span_diagnostic.struct_span_err(sp, m)
157 }
158
span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> !159 pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! {
160 self.sess.span_diagnostic.span_bug(sp, m)
161 }
162
diagnostic(&self) -> &'a Handler163 pub(super) fn diagnostic(&self) -> &'a Handler {
164 &self.sess.span_diagnostic
165 }
166
span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError>167 pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
168 self.sess.source_map().span_to_snippet(span)
169 }
170
expected_ident_found(&self) -> DiagnosticBuilder<'a>171 pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> {
172 let mut err = self.struct_span_err(
173 self.token.span,
174 &format!("expected identifier, found {}", super::token_descr(&self.token)),
175 );
176 let valid_follow = &[
177 TokenKind::Eq,
178 TokenKind::Colon,
179 TokenKind::Comma,
180 TokenKind::Semi,
181 TokenKind::ModSep,
182 TokenKind::OpenDelim(token::DelimToken::Brace),
183 TokenKind::OpenDelim(token::DelimToken::Paren),
184 TokenKind::CloseDelim(token::DelimToken::Brace),
185 TokenKind::CloseDelim(token::DelimToken::Paren),
186 ];
187 match self.token.ident() {
188 Some((ident, false))
189 if ident.is_raw_guess()
190 && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
191 {
192 err.span_suggestion(
193 ident.span,
194 "you can escape reserved keywords to use them as identifiers",
195 format!("r#{}", ident.name),
196 Applicability::MaybeIncorrect,
197 );
198 }
199 _ => {}
200 }
201 if let Some(token_descr) = super::token_descr_opt(&self.token) {
202 err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
203 } else {
204 err.span_label(self.token.span, "expected identifier");
205 if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
206 err.span_suggestion(
207 self.token.span,
208 "remove this comma",
209 String::new(),
210 Applicability::MachineApplicable,
211 );
212 }
213 }
214 err
215 }
216
expected_one_of_not_found( &mut self, edible: &[TokenKind], inedible: &[TokenKind], ) -> PResult<'a, bool >217 pub(super) fn expected_one_of_not_found(
218 &mut self,
219 edible: &[TokenKind],
220 inedible: &[TokenKind],
221 ) -> PResult<'a, bool /* recovered */> {
222 debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
223 fn tokens_to_string(tokens: &[TokenType]) -> String {
224 let mut i = tokens.iter();
225 // This might be a sign we need a connect method on `Iterator`.
226 let b = i.next().map_or_else(String::new, |t| t.to_string());
227 i.enumerate().fold(b, |mut b, (i, a)| {
228 if tokens.len() > 2 && i == tokens.len() - 2 {
229 b.push_str(", or ");
230 } else if tokens.len() == 2 && i == tokens.len() - 2 {
231 b.push_str(" or ");
232 } else {
233 b.push_str(", ");
234 }
235 b.push_str(&a.to_string());
236 b
237 })
238 }
239
240 let mut expected = edible
241 .iter()
242 .map(|x| TokenType::Token(x.clone()))
243 .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
244 .chain(self.expected_tokens.iter().cloned())
245 .collect::<Vec<_>>();
246 expected.sort_by_cached_key(|x| x.to_string());
247 expected.dedup();
248
249 let expect = tokens_to_string(&expected[..]);
250 let actual = super::token_descr(&self.token);
251 let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
252 let short_expect = if expected.len() > 6 {
253 format!("{} possible tokens", expected.len())
254 } else {
255 expect.clone()
256 };
257 (
258 format!("expected one of {}, found {}", expect, actual),
259 (self.prev_token.span.shrink_to_hi(), format!("expected one of {}", short_expect)),
260 )
261 } else if expected.is_empty() {
262 (
263 format!("unexpected token: {}", actual),
264 (self.prev_token.span, "unexpected token after this".to_string()),
265 )
266 } else {
267 (
268 format!("expected {}, found {}", expect, actual),
269 (self.prev_token.span.shrink_to_hi(), format!("expected {}", expect)),
270 )
271 };
272 self.last_unexpected_token_span = Some(self.token.span);
273 let mut err = self.struct_span_err(self.token.span, &msg_exp);
274
275 // Add suggestion for a missing closing angle bracket if '>' is included in expected_tokens
276 // there are unclosed angle brackets
277 if self.unmatched_angle_bracket_count > 0
278 && self.token.kind == TokenKind::Eq
279 && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Gt)))
280 {
281 err.span_label(self.prev_token.span, "maybe try to close unmatched angle bracket");
282 }
283
284 let sp = if self.token == token::Eof {
285 // This is EOF; don't want to point at the following char, but rather the last token.
286 self.prev_token.span
287 } else {
288 label_sp
289 };
290 match self.recover_closing_delimiter(
291 &expected
292 .iter()
293 .filter_map(|tt| match tt {
294 TokenType::Token(t) => Some(t.clone()),
295 _ => None,
296 })
297 .collect::<Vec<_>>(),
298 err,
299 ) {
300 Err(e) => err = e,
301 Ok(recovered) => {
302 return Ok(recovered);
303 }
304 }
305
306 if self.check_too_many_raw_str_terminators(&mut err) {
307 return Err(err);
308 }
309
310 let sm = self.sess.source_map();
311 if self.prev_token.span == DUMMY_SP {
312 // Account for macro context where the previous span might not be
313 // available to avoid incorrect output (#54841).
314 err.span_label(self.token.span, label_exp);
315 } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) {
316 // When the spans are in the same line, it means that the only content between
317 // them is whitespace, point at the found token in that case:
318 //
319 // X | () => { syntax error };
320 // | ^^^^^ expected one of 8 possible tokens here
321 //
322 // instead of having:
323 //
324 // X | () => { syntax error };
325 // | -^^^^^ unexpected token
326 // | |
327 // | expected one of 8 possible tokens here
328 err.span_label(self.token.span, label_exp);
329 } else {
330 err.span_label(sp, label_exp);
331 err.span_label(self.token.span, "unexpected token");
332 }
333 self.maybe_annotate_with_ascription(&mut err, false);
334 Err(err)
335 }
336
check_too_many_raw_str_terminators(&mut self, err: &mut DiagnosticBuilder<'_>) -> bool337 fn check_too_many_raw_str_terminators(&mut self, err: &mut DiagnosticBuilder<'_>) -> bool {
338 match (&self.prev_token.kind, &self.token.kind) {
339 (
340 TokenKind::Literal(Lit {
341 kind: LitKind::StrRaw(n_hashes) | LitKind::ByteStrRaw(n_hashes),
342 ..
343 }),
344 TokenKind::Pound,
345 ) => {
346 err.set_primary_message("too many `#` when terminating raw string");
347 err.span_suggestion(
348 self.token.span,
349 "remove the extra `#`",
350 String::new(),
351 Applicability::MachineApplicable,
352 );
353 err.note(&format!("the raw string started with {} `#`s", n_hashes));
354 true
355 }
356 _ => false,
357 }
358 }
359
maybe_suggest_struct_literal( &mut self, lo: Span, s: BlockCheckMode, ) -> Option<PResult<'a, P<Block>>>360 pub fn maybe_suggest_struct_literal(
361 &mut self,
362 lo: Span,
363 s: BlockCheckMode,
364 ) -> Option<PResult<'a, P<Block>>> {
365 if self.token.is_ident() && self.look_ahead(1, |t| t == &token::Colon) {
366 // We might be having a struct literal where people forgot to include the path:
367 // fn foo() -> Foo {
368 // field: value,
369 // }
370 let mut snapshot = self.clone();
371 let path =
372 Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
373 let struct_expr = snapshot.parse_struct_expr(path, AttrVec::new(), false);
374 let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
375 return Some(match (struct_expr, block_tail) {
376 (Ok(expr), Err(mut err)) => {
377 // We have encountered the following:
378 // fn foo() -> Foo {
379 // field: value,
380 // }
381 // Suggest:
382 // fn foo() -> Foo { Path {
383 // field: value,
384 // } }
385 err.delay_as_bug();
386 self.struct_span_err(expr.span, "struct literal body without path")
387 .multipart_suggestion(
388 "you might have forgotten to add the struct literal inside the block",
389 vec![
390 (expr.span.shrink_to_lo(), "{ SomeStruct ".to_string()),
391 (expr.span.shrink_to_hi(), " }".to_string()),
392 ],
393 Applicability::MaybeIncorrect,
394 )
395 .emit();
396 *self = snapshot;
397 Ok(self.mk_block(
398 vec![self.mk_stmt_err(expr.span)],
399 s,
400 lo.to(self.prev_token.span),
401 ))
402 }
403 (Err(mut err), Ok(tail)) => {
404 // We have a block tail that contains a somehow valid type ascription expr.
405 err.cancel();
406 Ok(tail)
407 }
408 (Err(mut snapshot_err), Err(err)) => {
409 // We don't know what went wrong, emit the normal error.
410 snapshot_err.cancel();
411 self.consume_block(token::Brace, ConsumeClosingDelim::Yes);
412 Err(err)
413 }
414 (Ok(_), Ok(tail)) => Ok(tail),
415 });
416 }
417 None
418 }
419
maybe_annotate_with_ascription( &mut self, err: &mut DiagnosticBuilder<'_>, maybe_expected_semicolon: bool, )420 pub fn maybe_annotate_with_ascription(
421 &mut self,
422 err: &mut DiagnosticBuilder<'_>,
423 maybe_expected_semicolon: bool,
424 ) {
425 if let Some((sp, likely_path)) = self.last_type_ascription.take() {
426 let sm = self.sess.source_map();
427 let next_pos = sm.lookup_char_pos(self.token.span.lo());
428 let op_pos = sm.lookup_char_pos(sp.hi());
429
430 let allow_unstable = self.sess.unstable_features.is_nightly_build();
431
432 if likely_path {
433 err.span_suggestion(
434 sp,
435 "maybe write a path separator here",
436 "::".to_string(),
437 if allow_unstable {
438 Applicability::MaybeIncorrect
439 } else {
440 Applicability::MachineApplicable
441 },
442 );
443 self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
444 } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
445 err.span_suggestion(
446 sp,
447 "try using a semicolon",
448 ";".to_string(),
449 Applicability::MaybeIncorrect,
450 );
451 } else if allow_unstable {
452 err.span_label(sp, "tried to parse a type due to this type ascription");
453 } else {
454 err.span_label(sp, "tried to parse a type due to this");
455 }
456 if allow_unstable {
457 // Give extra information about type ascription only if it's a nightly compiler.
458 err.note(
459 "`#![feature(type_ascription)]` lets you annotate an expression with a type: \
460 `<expr>: <type>`",
461 );
462 if !likely_path {
463 // Avoid giving too much info when it was likely an unrelated typo.
464 err.note(
465 "see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
466 for more information",
467 );
468 }
469 }
470 }
471 }
472
473 /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
474 /// passes through any errors encountered. Used for error recovery.
eat_to_tokens(&mut self, kets: &[&TokenKind])475 pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
476 if let Err(ref mut err) =
477 self.parse_seq_to_before_tokens(kets, SeqSep::none(), TokenExpectType::Expect, |p| {
478 Ok(p.parse_token_tree())
479 })
480 {
481 err.cancel();
482 }
483 }
484
485 /// This function checks if there are trailing angle brackets and produces
486 /// a diagnostic to suggest removing them.
487 ///
488 /// ```ignore (diagnostic)
489 /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
490 /// ^^ help: remove extra angle brackets
491 /// ```
492 ///
493 /// If `true` is returned, then trailing brackets were recovered, tokens were consumed
494 /// up until one of the tokens in 'end' was encountered, and an error was emitted.
check_trailing_angle_brackets( &mut self, segment: &PathSegment, end: &[&TokenKind], ) -> bool495 pub(super) fn check_trailing_angle_brackets(
496 &mut self,
497 segment: &PathSegment,
498 end: &[&TokenKind],
499 ) -> bool {
500 // This function is intended to be invoked after parsing a path segment where there are two
501 // cases:
502 //
503 // 1. A specific token is expected after the path segment.
504 // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call),
505 // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path).
506 // 2. No specific token is expected after the path segment.
507 // eg. `x.foo` (field access)
508 //
509 // This function is called after parsing `.foo` and before parsing the token `end` (if
510 // present). This includes any angle bracket arguments, such as `.foo::<u32>` or
511 // `Foo::<Bar>`.
512
513 // We only care about trailing angle brackets if we previously parsed angle bracket
514 // arguments. This helps stop us incorrectly suggesting that extra angle brackets be
515 // removed in this case:
516 //
517 // `x.foo >> (3)` (where `x.foo` is a `u32` for example)
518 //
519 // This case is particularly tricky as we won't notice it just looking at the tokens -
520 // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will
521 // have already been parsed):
522 //
523 // `x.foo::<u32>>>(3)`
524 let parsed_angle_bracket_args =
525 segment.args.as_ref().map_or(false, |args| args.is_angle_bracketed());
526
527 debug!(
528 "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
529 parsed_angle_bracket_args,
530 );
531 if !parsed_angle_bracket_args {
532 return false;
533 }
534
535 // Keep the span at the start so we can highlight the sequence of `>` characters to be
536 // removed.
537 let lo = self.token.span;
538
539 // We need to look-ahead to see if we have `>` characters without moving the cursor forward
540 // (since we might have the field access case and the characters we're eating are
541 // actual operators and not trailing characters - ie `x.foo >> 3`).
542 let mut position = 0;
543
544 // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how
545 // many of each (so we can correctly pluralize our error messages) and continue to
546 // advance.
547 let mut number_of_shr = 0;
548 let mut number_of_gt = 0;
549 while self.look_ahead(position, |t| {
550 trace!("check_trailing_angle_brackets: t={:?}", t);
551 if *t == token::BinOp(token::BinOpToken::Shr) {
552 number_of_shr += 1;
553 true
554 } else if *t == token::Gt {
555 number_of_gt += 1;
556 true
557 } else {
558 false
559 }
560 }) {
561 position += 1;
562 }
563
564 // If we didn't find any trailing `>` characters, then we have nothing to error about.
565 debug!(
566 "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}",
567 number_of_gt, number_of_shr,
568 );
569 if number_of_gt < 1 && number_of_shr < 1 {
570 return false;
571 }
572
573 // Finally, double check that we have our end token as otherwise this is the
574 // second case.
575 if self.look_ahead(position, |t| {
576 trace!("check_trailing_angle_brackets: t={:?}", t);
577 end.contains(&&t.kind)
578 }) {
579 // Eat from where we started until the end token so that parsing can continue
580 // as if we didn't have those extra angle brackets.
581 self.eat_to_tokens(end);
582 let span = lo.until(self.token.span);
583
584 let total_num_of_gt = number_of_gt + number_of_shr * 2;
585 self.struct_span_err(
586 span,
587 &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
588 )
589 .span_suggestion(
590 span,
591 &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
592 String::new(),
593 Applicability::MachineApplicable,
594 )
595 .emit();
596 return true;
597 }
598 false
599 }
600
601 /// Check if a method call with an intended turbofish has been written without surrounding
602 /// angle brackets.
check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment)603 pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
604 if token::ModSep == self.token.kind && segment.args.is_none() {
605 let snapshot = self.clone();
606 self.bump();
607 let lo = self.token.span;
608 match self.parse_angle_args() {
609 Ok(args) => {
610 let span = lo.to(self.prev_token.span);
611 // Detect trailing `>` like in `x.collect::Vec<_>>()`.
612 let mut trailing_span = self.prev_token.span.shrink_to_hi();
613 while self.token.kind == token::BinOp(token::Shr)
614 || self.token.kind == token::Gt
615 {
616 trailing_span = trailing_span.to(self.token.span);
617 self.bump();
618 }
619 if self.token.kind == token::OpenDelim(token::Paren) {
620 // Recover from bad turbofish: `foo.collect::Vec<_>()`.
621 let args = AngleBracketedArgs { args, span }.into();
622 segment.args = args;
623
624 self.struct_span_err(
625 span,
626 "generic parameters without surrounding angle brackets",
627 )
628 .multipart_suggestion(
629 "surround the type parameters with angle brackets",
630 vec![
631 (span.shrink_to_lo(), "<".to_string()),
632 (trailing_span, ">".to_string()),
633 ],
634 Applicability::MachineApplicable,
635 )
636 .emit();
637 } else {
638 // This doesn't look like an invalid turbofish, can't recover parse state.
639 *self = snapshot;
640 }
641 }
642 Err(mut err) => {
643 // We couldn't parse generic parameters, unlikely to be a turbofish. Rely on
644 // generic parse error instead.
645 err.cancel();
646 *self = snapshot;
647 }
648 }
649 }
650 }
651
652 /// When writing a turbofish with multiple type parameters missing the leading `::`, we will
653 /// encounter a parse error when encountering the first `,`.
check_mistyped_turbofish_with_multiple_type_params( &mut self, mut e: DiagnosticBuilder<'a>, expr: &mut P<Expr>, ) -> PResult<'a, ()>654 pub(super) fn check_mistyped_turbofish_with_multiple_type_params(
655 &mut self,
656 mut e: DiagnosticBuilder<'a>,
657 expr: &mut P<Expr>,
658 ) -> PResult<'a, ()> {
659 if let ExprKind::Binary(binop, _, _) = &expr.kind {
660 if let ast::BinOpKind::Lt = binop.node {
661 if self.eat(&token::Comma) {
662 let x = self.parse_seq_to_before_end(
663 &token::Gt,
664 SeqSep::trailing_allowed(token::Comma),
665 |p| p.parse_generic_arg(),
666 );
667 match x {
668 Ok((_, _, false)) => {
669 if self.eat(&token::Gt) {
670 match self.parse_expr() {
671 Ok(_) => {
672 e.span_suggestion_verbose(
673 binop.span.shrink_to_lo(),
674 TURBOFISH_SUGGESTION_STR,
675 "::".to_string(),
676 Applicability::MaybeIncorrect,
677 );
678 e.emit();
679 *expr =
680 self.mk_expr_err(expr.span.to(self.prev_token.span));
681 return Ok(());
682 }
683 Err(mut err) => {
684 err.cancel();
685 }
686 }
687 }
688 }
689 Err(mut err) => {
690 err.cancel();
691 }
692 _ => {}
693 }
694 }
695 }
696 }
697 Err(e)
698 }
699
700 /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
701 /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
702 /// parenthesising the leftmost comparison.
attempt_chained_comparison_suggestion( &mut self, err: &mut DiagnosticBuilder<'_>, inner_op: &Expr, outer_op: &Spanned<AssocOp>, ) -> bool703 fn attempt_chained_comparison_suggestion(
704 &mut self,
705 err: &mut DiagnosticBuilder<'_>,
706 inner_op: &Expr,
707 outer_op: &Spanned<AssocOp>,
708 ) -> bool /* advanced the cursor */ {
709 if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
710 if let ExprKind::Field(_, ident) = l1.kind {
711 if ident.as_str().parse::<i32>().is_err() && !matches!(r1.kind, ExprKind::Lit(_)) {
712 // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish
713 // suggestion being the only one to apply is high.
714 return false;
715 }
716 }
717 let mut enclose = |left: Span, right: Span| {
718 err.multipart_suggestion(
719 "parenthesize the comparison",
720 vec![
721 (left.shrink_to_lo(), "(".to_string()),
722 (right.shrink_to_hi(), ")".to_string()),
723 ],
724 Applicability::MaybeIncorrect,
725 );
726 };
727 return match (op.node, &outer_op.node) {
728 // `x == y == z`
729 (BinOpKind::Eq, AssocOp::Equal) |
730 // `x < y < z` and friends.
731 (BinOpKind::Lt, AssocOp::Less | AssocOp::LessEqual) |
732 (BinOpKind::Le, AssocOp::LessEqual | AssocOp::Less) |
733 // `x > y > z` and friends.
734 (BinOpKind::Gt, AssocOp::Greater | AssocOp::GreaterEqual) |
735 (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
736 let expr_to_str = |e: &Expr| {
737 self.span_to_snippet(e.span)
738 .unwrap_or_else(|_| pprust::expr_to_string(&e))
739 };
740 err.span_suggestion_verbose(
741 inner_op.span.shrink_to_hi(),
742 "split the comparison into two",
743 format!(" && {}", expr_to_str(&r1)),
744 Applicability::MaybeIncorrect,
745 );
746 false // Keep the current parse behavior, where the AST is `(x < y) < z`.
747 }
748 // `x == y < z`
749 (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
750 // Consume `z`/outer-op-rhs.
751 let snapshot = self.clone();
752 match self.parse_expr() {
753 Ok(r2) => {
754 // We are sure that outer-op-rhs could be consumed, the suggestion is
755 // likely correct.
756 enclose(r1.span, r2.span);
757 true
758 }
759 Err(mut expr_err) => {
760 expr_err.cancel();
761 *self = snapshot;
762 false
763 }
764 }
765 }
766 // `x > y == z`
767 (BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, AssocOp::Equal) => {
768 let snapshot = self.clone();
769 // At this point it is always valid to enclose the lhs in parentheses, no
770 // further checks are necessary.
771 match self.parse_expr() {
772 Ok(_) => {
773 enclose(l1.span, r1.span);
774 true
775 }
776 Err(mut expr_err) => {
777 expr_err.cancel();
778 *self = snapshot;
779 false
780 }
781 }
782 }
783 _ => false,
784 };
785 }
786 false
787 }
788
789 /// Produces an error if comparison operators are chained (RFC #558).
790 /// We only need to check the LHS, not the RHS, because all comparison ops have same
791 /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`).
792 ///
793 /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used
794 /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the
795 /// case.
796 ///
797 /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left
798 /// associative we can infer that we have:
799 ///
800 /// ```text
801 /// outer_op
802 /// / \
803 /// inner_op r2
804 /// / \
805 /// l1 r1
806 /// ```
check_no_chained_comparison( &mut self, inner_op: &Expr, outer_op: &Spanned<AssocOp>, ) -> PResult<'a, Option<P<Expr>>>807 pub(super) fn check_no_chained_comparison(
808 &mut self,
809 inner_op: &Expr,
810 outer_op: &Spanned<AssocOp>,
811 ) -> PResult<'a, Option<P<Expr>>> {
812 debug_assert!(
813 outer_op.node.is_comparison(),
814 "check_no_chained_comparison: {:?} is not comparison",
815 outer_op.node,
816 );
817
818 let mk_err_expr =
819 |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new())));
820
821 match inner_op.kind {
822 ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
823 let mut err = self.struct_span_err(
824 vec![op.span, self.prev_token.span],
825 "comparison operators cannot be chained",
826 );
827
828 let suggest = |err: &mut DiagnosticBuilder<'_>| {
829 err.span_suggestion_verbose(
830 op.span.shrink_to_lo(),
831 TURBOFISH_SUGGESTION_STR,
832 "::".to_string(),
833 Applicability::MaybeIncorrect,
834 );
835 };
836
837 // Include `<` to provide this recommendation even in a case like
838 // `Foo<Bar<Baz<Qux, ()>>>`
839 if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less
840 || outer_op.node == AssocOp::Greater
841 {
842 if outer_op.node == AssocOp::Less {
843 let snapshot = self.clone();
844 self.bump();
845 // So far we have parsed `foo<bar<`, consume the rest of the type args.
846 let modifiers =
847 [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
848 self.consume_tts(1, &modifiers[..]);
849
850 if !&[token::OpenDelim(token::Paren), token::ModSep]
851 .contains(&self.token.kind)
852 {
853 // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
854 // parser and bail out.
855 *self = snapshot.clone();
856 }
857 }
858 return if token::ModSep == self.token.kind {
859 // We have some certainty that this was a bad turbofish at this point.
860 // `foo< bar >::`
861 suggest(&mut err);
862
863 let snapshot = self.clone();
864 self.bump(); // `::`
865
866 // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`.
867 match self.parse_expr() {
868 Ok(_) => {
869 // 99% certain that the suggestion is correct, continue parsing.
870 err.emit();
871 // FIXME: actually check that the two expressions in the binop are
872 // paths and resynthesize new fn call expression instead of using
873 // `ExprKind::Err` placeholder.
874 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
875 }
876 Err(mut expr_err) => {
877 expr_err.cancel();
878 // Not entirely sure now, but we bubble the error up with the
879 // suggestion.
880 *self = snapshot;
881 Err(err)
882 }
883 }
884 } else if token::OpenDelim(token::Paren) == self.token.kind {
885 // We have high certainty that this was a bad turbofish at this point.
886 // `foo< bar >(`
887 suggest(&mut err);
888 // Consume the fn call arguments.
889 match self.consume_fn_args() {
890 Err(()) => Err(err),
891 Ok(()) => {
892 err.emit();
893 // FIXME: actually check that the two expressions in the binop are
894 // paths and resynthesize new fn call expression instead of using
895 // `ExprKind::Err` placeholder.
896 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
897 }
898 }
899 } else {
900 if !matches!(l1.kind, ExprKind::Lit(_))
901 && !matches!(r1.kind, ExprKind::Lit(_))
902 {
903 // All we know is that this is `foo < bar >` and *nothing* else. Try to
904 // be helpful, but don't attempt to recover.
905 err.help(TURBOFISH_SUGGESTION_STR);
906 err.help("or use `(...)` if you meant to specify fn arguments");
907 }
908
909 // If it looks like a genuine attempt to chain operators (as opposed to a
910 // misformatted turbofish, for instance), suggest a correct form.
911 if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
912 {
913 err.emit();
914 mk_err_expr(self, inner_op.span.to(self.prev_token.span))
915 } else {
916 // These cases cause too many knock-down errors, bail out (#61329).
917 Err(err)
918 }
919 };
920 }
921 let recover =
922 self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
923 err.emit();
924 if recover {
925 return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
926 }
927 }
928 _ => {}
929 }
930 Ok(None)
931 }
932
consume_fn_args(&mut self) -> Result<(), ()>933 fn consume_fn_args(&mut self) -> Result<(), ()> {
934 let snapshot = self.clone();
935 self.bump(); // `(`
936
937 // Consume the fn call arguments.
938 let modifiers =
939 [(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)];
940 self.consume_tts(1, &modifiers[..]);
941
942 if self.token.kind == token::Eof {
943 // Not entirely sure that what we consumed were fn arguments, rollback.
944 *self = snapshot;
945 Err(())
946 } else {
947 // 99% certain that the suggestion is correct, continue parsing.
948 Ok(())
949 }
950 }
951
maybe_report_ambiguous_plus( &mut self, allow_plus: AllowPlus, impl_dyn_multi: bool, ty: &Ty, )952 pub(super) fn maybe_report_ambiguous_plus(
953 &mut self,
954 allow_plus: AllowPlus,
955 impl_dyn_multi: bool,
956 ty: &Ty,
957 ) {
958 if matches!(allow_plus, AllowPlus::No) && impl_dyn_multi {
959 let sum_with_parens = format!("({})", pprust::ty_to_string(&ty));
960 self.struct_span_err(ty.span, "ambiguous `+` in a type")
961 .span_suggestion(
962 ty.span,
963 "use parentheses to disambiguate",
964 sum_with_parens,
965 Applicability::MachineApplicable,
966 )
967 .emit();
968 }
969 }
970
maybe_recover_from_bad_type_plus( &mut self, allow_plus: AllowPlus, ty: &Ty, ) -> PResult<'a, ()>971 pub(super) fn maybe_recover_from_bad_type_plus(
972 &mut self,
973 allow_plus: AllowPlus,
974 ty: &Ty,
975 ) -> PResult<'a, ()> {
976 // Do not add `+` to expected tokens.
977 if matches!(allow_plus, AllowPlus::No) || !self.token.is_like_plus() {
978 return Ok(());
979 }
980
981 self.bump(); // `+`
982 let bounds = self.parse_generic_bounds(None)?;
983 let sum_span = ty.span.to(self.prev_token.span);
984
985 let mut err = struct_span_err!(
986 self.sess.span_diagnostic,
987 sum_span,
988 E0178,
989 "expected a path on the left-hand side of `+`, not `{}`",
990 pprust::ty_to_string(ty)
991 );
992
993 match ty.kind {
994 TyKind::Rptr(ref lifetime, ref mut_ty) => {
995 let sum_with_parens = pprust::to_string(|s| {
996 s.s.word("&");
997 s.print_opt_lifetime(lifetime);
998 s.print_mutability(mut_ty.mutbl, false);
999 s.popen();
1000 s.print_type(&mut_ty.ty);
1001 s.print_type_bounds(" +", &bounds);
1002 s.pclose()
1003 });
1004 err.span_suggestion(
1005 sum_span,
1006 "try adding parentheses",
1007 sum_with_parens,
1008 Applicability::MachineApplicable,
1009 );
1010 }
1011 TyKind::Ptr(..) | TyKind::BareFn(..) => {
1012 err.span_label(sum_span, "perhaps you forgot parentheses?");
1013 }
1014 _ => {
1015 err.span_label(sum_span, "expected a path");
1016 }
1017 }
1018 err.emit();
1019 Ok(())
1020 }
1021
1022 /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`.
1023 /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem`
1024 /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type.
maybe_recover_from_bad_qpath<T: RecoverQPath>( &mut self, base: P<T>, allow_recovery: bool, ) -> PResult<'a, P<T>>1025 pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>(
1026 &mut self,
1027 base: P<T>,
1028 allow_recovery: bool,
1029 ) -> PResult<'a, P<T>> {
1030 // Do not add `::` to expected tokens.
1031 if allow_recovery && self.token == token::ModSep {
1032 if let Some(ty) = base.to_ty() {
1033 return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty);
1034 }
1035 }
1036 Ok(base)
1037 }
1038
1039 /// Given an already parsed `Ty`, parses the `::AssocItem` tail and
1040 /// combines them into a `<Ty>::AssocItem` expression/pattern/type.
maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>( &mut self, ty_span: Span, ty: P<Ty>, ) -> PResult<'a, P<T>>1041 pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>(
1042 &mut self,
1043 ty_span: Span,
1044 ty: P<Ty>,
1045 ) -> PResult<'a, P<T>> {
1046 self.expect(&token::ModSep)?;
1047
1048 let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
1049 self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
1050 path.span = ty_span.to(self.prev_token.span);
1051
1052 let ty_str = self.span_to_snippet(ty_span).unwrap_or_else(|_| pprust::ty_to_string(&ty));
1053 self.struct_span_err(path.span, "missing angle brackets in associated item path")
1054 .span_suggestion(
1055 // This is a best-effort recovery.
1056 path.span,
1057 "try",
1058 format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
1059 Applicability::MaybeIncorrect,
1060 )
1061 .emit();
1062
1063 let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
1064 Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
1065 }
1066
maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool1067 pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
1068 if self.eat(&token::Semi) {
1069 let mut err = self.struct_span_err(self.prev_token.span, "expected item, found `;`");
1070 err.span_suggestion_short(
1071 self.prev_token.span,
1072 "remove this semicolon",
1073 String::new(),
1074 Applicability::MachineApplicable,
1075 );
1076 if !items.is_empty() {
1077 let previous_item = &items[items.len() - 1];
1078 let previous_item_kind_name = match previous_item.kind {
1079 // Say "braced struct" because tuple-structs and
1080 // braceless-empty-struct declarations do take a semicolon.
1081 ItemKind::Struct(..) => Some("braced struct"),
1082 ItemKind::Enum(..) => Some("enum"),
1083 ItemKind::Trait(..) => Some("trait"),
1084 ItemKind::Union(..) => Some("union"),
1085 _ => None,
1086 };
1087 if let Some(name) = previous_item_kind_name {
1088 err.help(&format!("{} declarations are not followed by a semicolon", name));
1089 }
1090 }
1091 err.emit();
1092 true
1093 } else {
1094 false
1095 }
1096 }
1097
1098 /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a
1099 /// closing delimiter.
unexpected_try_recover( &mut self, t: &TokenKind, ) -> PResult<'a, bool >1100 pub(super) fn unexpected_try_recover(
1101 &mut self,
1102 t: &TokenKind,
1103 ) -> PResult<'a, bool /* recovered */> {
1104 let token_str = pprust::token_kind_to_string(t);
1105 let this_token_str = super::token_descr(&self.token);
1106 let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
1107 // Point at the end of the macro call when reaching end of macro arguments.
1108 (token::Eof, Some(_)) => {
1109 let sp = self.sess.source_map().next_point(self.prev_token.span);
1110 (sp, sp)
1111 }
1112 // We don't want to point at the following span after DUMMY_SP.
1113 // This happens when the parser finds an empty TokenStream.
1114 _ if self.prev_token.span == DUMMY_SP => (self.token.span, self.token.span),
1115 // EOF, don't want to point at the following char, but rather the last token.
1116 (token::Eof, None) => (self.prev_token.span, self.token.span),
1117 _ => (self.prev_token.span.shrink_to_hi(), self.token.span),
1118 };
1119 let msg = format!(
1120 "expected `{}`, found {}",
1121 token_str,
1122 match (&self.token.kind, self.subparser_name) {
1123 (token::Eof, Some(origin)) => format!("end of {}", origin),
1124 _ => this_token_str,
1125 },
1126 );
1127 let mut err = self.struct_span_err(sp, &msg);
1128 let label_exp = format!("expected `{}`", token_str);
1129 match self.recover_closing_delimiter(&[t.clone()], err) {
1130 Err(e) => err = e,
1131 Ok(recovered) => {
1132 return Ok(recovered);
1133 }
1134 }
1135 let sm = self.sess.source_map();
1136 if !sm.is_multiline(prev_sp.until(sp)) {
1137 // When the spans are in the same line, it means that the only content
1138 // between them is whitespace, point only at the found token.
1139 err.span_label(sp, label_exp);
1140 } else {
1141 err.span_label(prev_sp, label_exp);
1142 err.span_label(sp, "unexpected token");
1143 }
1144 Err(err)
1145 }
1146
expect_semi(&mut self) -> PResult<'a, ()>1147 pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
1148 if self.eat(&token::Semi) {
1149 return Ok(());
1150 }
1151 let sm = self.sess.source_map();
1152 let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
1153 let appl = Applicability::MachineApplicable;
1154 if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
1155 // Likely inside a macro, can't provide meaningful suggestions.
1156 return self.expect(&token::Semi).map(drop);
1157 } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
1158 // The current token is in the same line as the prior token, not recoverable.
1159 } else if [token::Comma, token::Colon].contains(&self.token.kind)
1160 && self.prev_token.kind == token::CloseDelim(token::Paren)
1161 {
1162 // Likely typo: The current token is on a new line and is expected to be
1163 // `.`, `;`, `?`, or an operator after a close delimiter token.
1164 //
1165 // let a = std::process::Command::new("echo")
1166 // .arg("1")
1167 // ,arg("2")
1168 // ^
1169 // https://github.com/rust-lang/rust/issues/72253
1170 self.expect(&token::Semi)?;
1171 return Ok(());
1172 } else if self.look_ahead(1, |t| {
1173 t == &token::CloseDelim(token::Brace) || t.can_begin_expr() && t.kind != token::Colon
1174 }) && [token::Comma, token::Colon].contains(&self.token.kind)
1175 {
1176 // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
1177 // either `,` or `:`, and the next token could either start a new statement or is a
1178 // block close. For example:
1179 //
1180 // let x = 32:
1181 // let y = 42;
1182 self.bump();
1183 let sp = self.prev_token.span;
1184 self.struct_span_err(sp, &msg)
1185 .span_suggestion_short(sp, "change this to `;`", ";".to_string(), appl)
1186 .emit();
1187 return Ok(());
1188 } else if self.look_ahead(0, |t| {
1189 t == &token::CloseDelim(token::Brace)
1190 || (
1191 t.can_begin_expr() && t != &token::Semi && t != &token::Pound
1192 // Avoid triggering with too many trailing `#` in raw string.
1193 )
1194 }) {
1195 // Missing semicolon typo. This is triggered if the next token could either start a
1196 // new statement or is a block close. For example:
1197 //
1198 // let x = 32
1199 // let y = 42;
1200 let sp = self.prev_token.span.shrink_to_hi();
1201 self.struct_span_err(sp, &msg)
1202 .span_label(self.token.span, "unexpected token")
1203 .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl)
1204 .emit();
1205 return Ok(());
1206 }
1207 self.expect(&token::Semi).map(drop) // Error unconditionally
1208 }
1209
1210 /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
1211 /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
recover_incorrect_await_syntax( &mut self, lo: Span, await_sp: Span, attrs: AttrVec, ) -> PResult<'a, P<Expr>>1212 pub(super) fn recover_incorrect_await_syntax(
1213 &mut self,
1214 lo: Span,
1215 await_sp: Span,
1216 attrs: AttrVec,
1217 ) -> PResult<'a, P<Expr>> {
1218 let (hi, expr, is_question) = if self.token == token::Not {
1219 // Handle `await!(<expr>)`.
1220 self.recover_await_macro()?
1221 } else {
1222 self.recover_await_prefix(await_sp)?
1223 };
1224 let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
1225 let kind = match expr.kind {
1226 // Avoid knock-down errors as we don't know whether to interpret this as `foo().await?`
1227 // or `foo()?.await` (the very reason we went with postfix syntax ).
1228 ExprKind::Try(_) => ExprKind::Err,
1229 _ => ExprKind::Await(expr),
1230 };
1231 let expr = self.mk_expr(lo.to(sp), kind, attrs);
1232 self.maybe_recover_from_bad_qpath(expr, true)
1233 }
1234
recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)>1235 fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
1236 self.expect(&token::Not)?;
1237 self.expect(&token::OpenDelim(token::Paren))?;
1238 let expr = self.parse_expr()?;
1239 self.expect(&token::CloseDelim(token::Paren))?;
1240 Ok((self.prev_token.span, expr, false))
1241 }
1242
recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)>1243 fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
1244 let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
1245 let expr = if self.token == token::OpenDelim(token::Brace) {
1246 // Handle `await { <expr> }`.
1247 // This needs to be handled separately from the next arm to avoid
1248 // interpreting `await { <expr> }?` as `<expr>?.await`.
1249 self.parse_block_expr(None, self.token.span, BlockCheckMode::Default, AttrVec::new())
1250 } else {
1251 self.parse_expr()
1252 }
1253 .map_err(|mut err| {
1254 err.span_label(await_sp, "while parsing this incorrect await expression");
1255 err
1256 })?;
1257 Ok((expr.span, expr, is_question))
1258 }
1259
error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span1260 fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span {
1261 let expr_str =
1262 self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr));
1263 let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" });
1264 let sp = lo.to(hi);
1265 let app = match expr.kind {
1266 ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?`
1267 _ => Applicability::MachineApplicable,
1268 };
1269 self.struct_span_err(sp, "incorrect use of `await`")
1270 .span_suggestion(sp, "`await` is a postfix operation", suggestion, app)
1271 .emit();
1272 sp
1273 }
1274
1275 /// If encountering `future.await()`, consumes and emits an error.
recover_from_await_method_call(&mut self)1276 pub(super) fn recover_from_await_method_call(&mut self) {
1277 if self.token == token::OpenDelim(token::Paren)
1278 && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
1279 {
1280 // future.await()
1281 let lo = self.token.span;
1282 self.bump(); // (
1283 let sp = lo.to(self.token.span);
1284 self.bump(); // )
1285 self.struct_span_err(sp, "incorrect use of `await`")
1286 .span_suggestion(
1287 sp,
1288 "`await` is not a method call, remove the parentheses",
1289 String::new(),
1290 Applicability::MachineApplicable,
1291 )
1292 .emit();
1293 }
1294 }
1295
try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>>1296 pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
1297 let is_try = self.token.is_keyword(kw::Try);
1298 let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
1299 let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for (
1300
1301 if is_try && is_questionmark && is_open {
1302 let lo = self.token.span;
1303 self.bump(); //remove try
1304 self.bump(); //remove !
1305 let try_span = lo.to(self.token.span); //we take the try!( span
1306 self.bump(); //remove (
1307 let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty
1308 self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block
1309 let hi = self.token.span;
1310 self.bump(); //remove )
1311 let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
1312 err.note("in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated");
1313 let prefix = if is_empty { "" } else { "alternatively, " };
1314 if !is_empty {
1315 err.multipart_suggestion(
1316 "you can use the `?` operator instead",
1317 vec![(try_span, "".to_owned()), (hi, "?".to_owned())],
1318 Applicability::MachineApplicable,
1319 );
1320 }
1321 err.span_suggestion(lo.shrink_to_lo(), &format!("{}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax", prefix), "r#".to_string(), Applicability::MachineApplicable);
1322 err.emit();
1323 Ok(self.mk_expr_err(lo.to(hi)))
1324 } else {
1325 Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro
1326 }
1327 }
1328
1329 /// Recovers a situation like `for ( $pat in $expr )`
1330 /// and suggest writing `for $pat in $expr` instead.
1331 ///
1332 /// This should be called before parsing the `$block`.
recover_parens_around_for_head( &mut self, pat: P<Pat>, expr: &Expr, begin_paren: Option<Span>, ) -> P<Pat>1333 pub(super) fn recover_parens_around_for_head(
1334 &mut self,
1335 pat: P<Pat>,
1336 expr: &Expr,
1337 begin_paren: Option<Span>,
1338 ) -> P<Pat> {
1339 match (&self.token.kind, begin_paren) {
1340 (token::CloseDelim(token::Paren), Some(begin_par_sp)) => {
1341 self.bump();
1342
1343 let pat_str = self
1344 // Remove the `(` from the span of the pattern:
1345 .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap())
1346 .unwrap_or_else(|_| pprust::pat_to_string(&pat));
1347
1348 self.struct_span_err(self.prev_token.span, "unexpected closing `)`")
1349 .span_label(begin_par_sp, "opening `(`")
1350 .span_suggestion(
1351 begin_par_sp.to(self.prev_token.span),
1352 "remove parenthesis in `for` loop",
1353 format!("{} in {}", pat_str, pprust::expr_to_string(&expr)),
1354 // With e.g. `for (x) in y)` this would replace `(x) in y)`
1355 // with `x) in y)` which is syntactically invalid.
1356 // However, this is prevented before we get here.
1357 Applicability::MachineApplicable,
1358 )
1359 .emit();
1360
1361 // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
1362 pat.and_then(|pat| match pat.kind {
1363 PatKind::Paren(pat) => pat,
1364 _ => P(pat),
1365 })
1366 }
1367 _ => pat,
1368 }
1369 }
1370
could_ascription_be_path(&self, node: &ast::ExprKind) -> bool1371 pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
1372 (self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
1373 self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
1374 || self.token.is_ident() &&
1375 matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
1376 !self.token.is_reserved_ident() && // v `foo:bar(baz)`
1377 self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren))
1378 || self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {`
1379 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz`
1380 self.look_ahead(2, |t| t == &token::Lt) &&
1381 self.look_ahead(3, |t| t.is_ident())
1382 || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
1383 self.look_ahead(2, |t| t.is_ident())
1384 || self.look_ahead(1, |t| t == &token::ModSep)
1385 && (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz`
1386 self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
1387 }
1388
recover_seq_parse_error( &mut self, delim: token::DelimToken, lo: Span, result: PResult<'a, P<Expr>>, ) -> P<Expr>1389 pub(super) fn recover_seq_parse_error(
1390 &mut self,
1391 delim: token::DelimToken,
1392 lo: Span,
1393 result: PResult<'a, P<Expr>>,
1394 ) -> P<Expr> {
1395 match result {
1396 Ok(x) => x,
1397 Err(mut err) => {
1398 err.emit();
1399 // Recover from parse error, callers expect the closing delim to be consumed.
1400 self.consume_block(delim, ConsumeClosingDelim::Yes);
1401 self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err, AttrVec::new())
1402 }
1403 }
1404 }
1405
recover_closing_delimiter( &mut self, tokens: &[TokenKind], mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, bool>1406 pub(super) fn recover_closing_delimiter(
1407 &mut self,
1408 tokens: &[TokenKind],
1409 mut err: DiagnosticBuilder<'a>,
1410 ) -> PResult<'a, bool> {
1411 let mut pos = None;
1412 // We want to use the last closing delim that would apply.
1413 for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
1414 if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
1415 && Some(self.token.span) > unmatched.unclosed_span
1416 {
1417 pos = Some(i);
1418 }
1419 }
1420 match pos {
1421 Some(pos) => {
1422 // Recover and assume that the detected unclosed delimiter was meant for
1423 // this location. Emit the diagnostic and act as if the delimiter was
1424 // present for the parser's sake.
1425
1426 // Don't attempt to recover from this unclosed delimiter more than once.
1427 let unmatched = self.unclosed_delims.remove(pos);
1428 let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
1429 if unmatched.found_delim.is_none() {
1430 // We encountered `Eof`, set this fact here to avoid complaining about missing
1431 // `fn main()` when we found place to suggest the closing brace.
1432 *self.sess.reached_eof.borrow_mut() = true;
1433 }
1434
1435 // We want to suggest the inclusion of the closing delimiter where it makes
1436 // the most sense, which is immediately after the last token:
1437 //
1438 // {foo(bar {}}
1439 // - ^
1440 // | |
1441 // | help: `)` may belong here
1442 // |
1443 // unclosed delimiter
1444 if let Some(sp) = unmatched.unclosed_span {
1445 err.span_label(sp, "unclosed delimiter");
1446 }
1447 // Backticks should be removed to apply suggestions.
1448 let mut delim = delim.to_string();
1449 delim.retain(|c| c != '`');
1450 err.span_suggestion_short(
1451 self.prev_token.span.shrink_to_hi(),
1452 &format!("`{}` may belong here", delim),
1453 delim,
1454 Applicability::MaybeIncorrect,
1455 );
1456 if unmatched.found_delim.is_none() {
1457 // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
1458 // errors which would be emitted elsewhere in the parser and let other error
1459 // recovery consume the rest of the file.
1460 Err(err)
1461 } else {
1462 err.emit();
1463 self.expected_tokens.clear(); // Reduce the number of errors.
1464 Ok(true)
1465 }
1466 }
1467 _ => Err(err),
1468 }
1469 }
1470
1471 /// Eats tokens until we can be relatively sure we reached the end of the
1472 /// statement. This is something of a best-effort heuristic.
1473 ///
1474 /// We terminate when we find an unmatched `}` (without consuming it).
recover_stmt(&mut self)1475 pub(super) fn recover_stmt(&mut self) {
1476 self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
1477 }
1478
1479 /// If `break_on_semi` is `Break`, then we will stop consuming tokens after
1480 /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
1481 /// approximate -- it can mean we break too early due to macros, but that
1482 /// should only lead to sub-optimal recovery, not inaccurate parsing).
1483 ///
1484 /// If `break_on_block` is `Break`, then we will stop consuming tokens
1485 /// after finding (and consuming) a brace-delimited block.
recover_stmt_( &mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode, )1486 pub(super) fn recover_stmt_(
1487 &mut self,
1488 break_on_semi: SemiColonMode,
1489 break_on_block: BlockMode,
1490 ) {
1491 let mut brace_depth = 0;
1492 let mut bracket_depth = 0;
1493 let mut in_block = false;
1494 debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", break_on_semi, break_on_block);
1495 loop {
1496 debug!("recover_stmt_ loop {:?}", self.token);
1497 match self.token.kind {
1498 token::OpenDelim(token::DelimToken::Brace) => {
1499 brace_depth += 1;
1500 self.bump();
1501 if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
1502 {
1503 in_block = true;
1504 }
1505 }
1506 token::OpenDelim(token::DelimToken::Bracket) => {
1507 bracket_depth += 1;
1508 self.bump();
1509 }
1510 token::CloseDelim(token::DelimToken::Brace) => {
1511 if brace_depth == 0 {
1512 debug!("recover_stmt_ return - close delim {:?}", self.token);
1513 break;
1514 }
1515 brace_depth -= 1;
1516 self.bump();
1517 if in_block && bracket_depth == 0 && brace_depth == 0 {
1518 debug!("recover_stmt_ return - block end {:?}", self.token);
1519 break;
1520 }
1521 }
1522 token::CloseDelim(token::DelimToken::Bracket) => {
1523 bracket_depth -= 1;
1524 if bracket_depth < 0 {
1525 bracket_depth = 0;
1526 }
1527 self.bump();
1528 }
1529 token::Eof => {
1530 debug!("recover_stmt_ return - Eof");
1531 break;
1532 }
1533 token::Semi => {
1534 self.bump();
1535 if break_on_semi == SemiColonMode::Break
1536 && brace_depth == 0
1537 && bracket_depth == 0
1538 {
1539 debug!("recover_stmt_ return - Semi");
1540 break;
1541 }
1542 }
1543 token::Comma
1544 if break_on_semi == SemiColonMode::Comma
1545 && brace_depth == 0
1546 && bracket_depth == 0 =>
1547 {
1548 debug!("recover_stmt_ return - Semi");
1549 break;
1550 }
1551 _ => self.bump(),
1552 }
1553 }
1554 }
1555
check_for_for_in_in_typo(&mut self, in_span: Span)1556 pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
1557 if self.eat_keyword(kw::In) {
1558 // a common typo: `for _ in in bar {}`
1559 self.struct_span_err(self.prev_token.span, "expected iterable, found keyword `in`")
1560 .span_suggestion_short(
1561 in_span.until(self.prev_token.span),
1562 "remove the duplicated `in`",
1563 String::new(),
1564 Applicability::MachineApplicable,
1565 )
1566 .emit();
1567 }
1568 }
1569
eat_incorrect_doc_comment_for_param_type(&mut self)1570 pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
1571 if let token::DocComment(..) = self.token.kind {
1572 self.struct_span_err(
1573 self.token.span,
1574 "documentation comments cannot be applied to a function parameter's type",
1575 )
1576 .span_label(self.token.span, "doc comments are not allowed here")
1577 .emit();
1578 self.bump();
1579 } else if self.token == token::Pound
1580 && self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket))
1581 {
1582 let lo = self.token.span;
1583 // Skip every token until next possible arg.
1584 while self.token != token::CloseDelim(token::Bracket) {
1585 self.bump();
1586 }
1587 let sp = lo.to(self.token.span);
1588 self.bump();
1589 self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
1590 .span_label(sp, "attributes are not allowed here")
1591 .emit();
1592 }
1593 }
1594
parameter_without_type( &mut self, err: &mut DiagnosticBuilder<'_>, pat: P<ast::Pat>, require_name: bool, first_param: bool, ) -> Option<Ident>1595 pub(super) fn parameter_without_type(
1596 &mut self,
1597 err: &mut DiagnosticBuilder<'_>,
1598 pat: P<ast::Pat>,
1599 require_name: bool,
1600 first_param: bool,
1601 ) -> Option<Ident> {
1602 // If we find a pattern followed by an identifier, it could be an (incorrect)
1603 // C-style parameter declaration.
1604 if self.check_ident()
1605 && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren))
1606 {
1607 // `fn foo(String s) {}`
1608 let ident = self.parse_ident().unwrap();
1609 let span = pat.span.with_hi(ident.span.hi());
1610
1611 err.span_suggestion(
1612 span,
1613 "declare the type after the parameter binding",
1614 String::from("<identifier>: <type>"),
1615 Applicability::HasPlaceholders,
1616 );
1617 return Some(ident);
1618 } else if require_name
1619 && (self.token == token::Comma
1620 || self.token == token::Lt
1621 || self.token == token::CloseDelim(token::Paren))
1622 {
1623 let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
1624
1625 let (ident, self_sugg, param_sugg, type_sugg) = match pat.kind {
1626 PatKind::Ident(_, ident, _) => (
1627 ident,
1628 format!("self: {}", ident),
1629 format!("{}: TypeName", ident),
1630 format!("_: {}", ident),
1631 ),
1632 // Also catches `fn foo(&a)`.
1633 PatKind::Ref(ref pat, mutab)
1634 if matches!(pat.clone().into_inner().kind, PatKind::Ident(..)) =>
1635 {
1636 match pat.clone().into_inner().kind {
1637 PatKind::Ident(_, ident, _) => {
1638 let mutab = mutab.prefix_str();
1639 (
1640 ident,
1641 format!("self: &{}{}", mutab, ident),
1642 format!("{}: &{}TypeName", ident, mutab),
1643 format!("_: &{}{}", mutab, ident),
1644 )
1645 }
1646 _ => unreachable!(),
1647 }
1648 }
1649 _ => {
1650 // Otherwise, try to get a type and emit a suggestion.
1651 if let Some(ty) = pat.to_ty() {
1652 err.span_suggestion_verbose(
1653 pat.span,
1654 "explicitly ignore the parameter name",
1655 format!("_: {}", pprust::ty_to_string(&ty)),
1656 Applicability::MachineApplicable,
1657 );
1658 err.note(rfc_note);
1659 }
1660
1661 return None;
1662 }
1663 };
1664
1665 // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}`
1666 if first_param {
1667 err.span_suggestion(
1668 pat.span,
1669 "if this is a `self` type, give it a parameter name",
1670 self_sugg,
1671 Applicability::MaybeIncorrect,
1672 );
1673 }
1674 // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to
1675 // `fn foo(HashMap: TypeName<u32>)`.
1676 if self.token != token::Lt {
1677 err.span_suggestion(
1678 pat.span,
1679 "if this is a parameter name, give it a type",
1680 param_sugg,
1681 Applicability::HasPlaceholders,
1682 );
1683 }
1684 err.span_suggestion(
1685 pat.span,
1686 "if this is a type, explicitly ignore the parameter name",
1687 type_sugg,
1688 Applicability::MachineApplicable,
1689 );
1690 err.note(rfc_note);
1691
1692 // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name.
1693 return if self.token == token::Lt { None } else { Some(ident) };
1694 }
1695 None
1696 }
1697
recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)>1698 pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
1699 let pat = self.parse_pat_no_top_alt(Some("argument name"))?;
1700 self.expect(&token::Colon)?;
1701 let ty = self.parse_ty()?;
1702
1703 struct_span_err!(
1704 self.diagnostic(),
1705 pat.span,
1706 E0642,
1707 "patterns aren't allowed in methods without bodies",
1708 )
1709 .span_suggestion_short(
1710 pat.span,
1711 "give this argument a name or use an underscore to ignore it",
1712 "_".to_owned(),
1713 Applicability::MachineApplicable,
1714 )
1715 .emit();
1716
1717 // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
1718 let pat =
1719 P(Pat { kind: PatKind::Wild, span: pat.span, id: ast::DUMMY_NODE_ID, tokens: None });
1720 Ok((pat, ty))
1721 }
1722
recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param>1723 pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
1724 let sp = param.pat.span;
1725 param.ty.kind = TyKind::Err;
1726 self.struct_span_err(sp, "unexpected `self` parameter in function")
1727 .span_label(sp, "must be the first parameter of an associated function")
1728 .emit();
1729 Ok(param)
1730 }
1731
consume_block( &mut self, delim: token::DelimToken, consume_close: ConsumeClosingDelim, )1732 pub(super) fn consume_block(
1733 &mut self,
1734 delim: token::DelimToken,
1735 consume_close: ConsumeClosingDelim,
1736 ) {
1737 let mut brace_depth = 0;
1738 loop {
1739 if self.eat(&token::OpenDelim(delim)) {
1740 brace_depth += 1;
1741 } else if self.check(&token::CloseDelim(delim)) {
1742 if brace_depth == 0 {
1743 if let ConsumeClosingDelim::Yes = consume_close {
1744 // Some of the callers of this method expect to be able to parse the
1745 // closing delimiter themselves, so we leave it alone. Otherwise we advance
1746 // the parser.
1747 self.bump();
1748 }
1749 return;
1750 } else {
1751 self.bump();
1752 brace_depth -= 1;
1753 continue;
1754 }
1755 } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
1756 return;
1757 } else {
1758 self.bump();
1759 }
1760 }
1761 }
1762
expected_expression_found(&self) -> DiagnosticBuilder<'a>1763 pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
1764 let (span, msg) = match (&self.token.kind, self.subparser_name) {
1765 (&token::Eof, Some(origin)) => {
1766 let sp = self.sess.source_map().next_point(self.prev_token.span);
1767 (sp, format!("expected expression, found end of {}", origin))
1768 }
1769 _ => (
1770 self.token.span,
1771 format!("expected expression, found {}", super::token_descr(&self.token),),
1772 ),
1773 };
1774 let mut err = self.struct_span_err(span, &msg);
1775 let sp = self.sess.source_map().start_point(self.token.span);
1776 if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
1777 self.sess.expr_parentheses_needed(&mut err, *sp, None);
1778 }
1779 err.span_label(span, "expected expression");
1780 err
1781 }
1782
consume_tts( &mut self, mut acc: i64, modifier: &[(token::TokenKind, i64)], )1783 fn consume_tts(
1784 &mut self,
1785 mut acc: i64, // `i64` because malformed code can have more closing delims than opening.
1786 // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`.
1787 modifier: &[(token::TokenKind, i64)],
1788 ) {
1789 while acc > 0 {
1790 if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) {
1791 acc += *val;
1792 }
1793 if self.token.kind == token::Eof {
1794 break;
1795 }
1796 self.bump();
1797 }
1798 }
1799
1800 /// Replace duplicated recovered parameters with `_` pattern to avoid unnecessary errors.
1801 ///
1802 /// This is necessary because at this point we don't know whether we parsed a function with
1803 /// anonymous parameters or a function with names but no types. In order to minimize
1804 /// unnecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where
1805 /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
1806 /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
1807 /// we deduplicate them to not complain about duplicated parameter names.
deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>)1808 pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
1809 let mut seen_inputs = FxHashSet::default();
1810 for input in fn_inputs.iter_mut() {
1811 let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
1812 (&input.pat.kind, &input.ty.kind)
1813 {
1814 Some(*ident)
1815 } else {
1816 None
1817 };
1818 if let Some(ident) = opt_ident {
1819 if seen_inputs.contains(&ident) {
1820 input.pat.kind = PatKind::Wild;
1821 }
1822 seen_inputs.insert(ident);
1823 }
1824 }
1825 }
1826
1827 /// Handle encountering a symbol in a generic argument list that is not a `,` or `>`. In this
1828 /// case, we emit an error and try to suggest enclosing a const argument in braces if it looks
1829 /// like the user has forgotten them.
handle_ambiguous_unbraced_const_arg( &mut self, args: &mut Vec<AngleBracketedArg>, ) -> PResult<'a, bool>1830 pub fn handle_ambiguous_unbraced_const_arg(
1831 &mut self,
1832 args: &mut Vec<AngleBracketedArg>,
1833 ) -> PResult<'a, bool> {
1834 // If we haven't encountered a closing `>`, then the argument is malformed.
1835 // It's likely that the user has written a const expression without enclosing it
1836 // in braces, so we try to recover here.
1837 let arg = args.pop().unwrap();
1838 // FIXME: for some reason using `unexpected` or `expected_one_of_not_found` has
1839 // adverse side-effects to subsequent errors and seems to advance the parser.
1840 // We are causing this error here exclusively in case that a `const` expression
1841 // could be recovered from the current parser state, even if followed by more
1842 // arguments after a comma.
1843 let mut err = self.struct_span_err(
1844 self.token.span,
1845 &format!("expected one of `,` or `>`, found {}", super::token_descr(&self.token)),
1846 );
1847 err.span_label(self.token.span, "expected one of `,` or `>`");
1848 match self.recover_const_arg(arg.span(), err) {
1849 Ok(arg) => {
1850 args.push(AngleBracketedArg::Arg(arg));
1851 if self.eat(&token::Comma) {
1852 return Ok(true); // Continue
1853 }
1854 }
1855 Err(mut err) => {
1856 args.push(arg);
1857 // We will emit a more generic error later.
1858 err.delay_as_bug();
1859 }
1860 }
1861 return Ok(false); // Don't continue.
1862 }
1863
1864 /// Attempt to parse a generic const argument that has not been enclosed in braces.
1865 /// There are a limited number of expressions that are permitted without being encoded
1866 /// in braces:
1867 /// - Literals.
1868 /// - Single-segment paths (i.e. standalone generic const parameters).
1869 /// All other expressions that can be parsed will emit an error suggesting the expression be
1870 /// wrapped in braces.
handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>>1871 pub fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P<Expr>> {
1872 let start = self.token.span;
1873 let expr = self.parse_expr_res(Restrictions::CONST_EXPR, None).map_err(|mut err| {
1874 err.span_label(
1875 start.shrink_to_lo(),
1876 "while parsing a const generic argument starting here",
1877 );
1878 err
1879 })?;
1880 if !self.expr_is_valid_const_arg(&expr) {
1881 self.struct_span_err(
1882 expr.span,
1883 "expressions must be enclosed in braces to be used as const generic \
1884 arguments",
1885 )
1886 .multipart_suggestion(
1887 "enclose the `const` expression in braces",
1888 vec![
1889 (expr.span.shrink_to_lo(), "{ ".to_string()),
1890 (expr.span.shrink_to_hi(), " }".to_string()),
1891 ],
1892 Applicability::MachineApplicable,
1893 )
1894 .emit();
1895 }
1896 Ok(expr)
1897 }
1898
1899 /// Try to recover from possible generic const argument without `{` and `}`.
1900 ///
1901 /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
1902 /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
1903 /// if we think that that the resulting expression would be well formed.
recover_const_arg( &mut self, start: Span, mut err: DiagnosticBuilder<'a>, ) -> PResult<'a, GenericArg>1904 pub fn recover_const_arg(
1905 &mut self,
1906 start: Span,
1907 mut err: DiagnosticBuilder<'a>,
1908 ) -> PResult<'a, GenericArg> {
1909 let is_op = AssocOp::from_token(&self.token)
1910 .and_then(|op| {
1911 if let AssocOp::Greater
1912 | AssocOp::Less
1913 | AssocOp::ShiftRight
1914 | AssocOp::GreaterEqual
1915 // Don't recover from `foo::<bar = baz>`, because this could be an attempt to
1916 // assign a value to a defaulted generic parameter.
1917 | AssocOp::Assign
1918 | AssocOp::AssignOp(_) = op
1919 {
1920 None
1921 } else {
1922 Some(op)
1923 }
1924 })
1925 .is_some();
1926 // This will be true when a trait object type `Foo +` or a path which was a `const fn` with
1927 // type params has been parsed.
1928 let was_op =
1929 matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt);
1930 if !is_op && !was_op {
1931 // We perform these checks and early return to avoid taking a snapshot unnecessarily.
1932 return Err(err);
1933 }
1934 let snapshot = self.clone();
1935 if is_op {
1936 self.bump();
1937 }
1938 match self.parse_expr_res(Restrictions::CONST_EXPR, None) {
1939 Ok(expr) => {
1940 if token::Comma == self.token.kind || self.token.kind.should_end_const_arg() {
1941 // Avoid the following output by checking that we consumed a full const arg:
1942 // help: expressions must be enclosed in braces to be used as const generic
1943 // arguments
1944 // |
1945 // LL | let sr: Vec<{ (u32, _, _) = vec![] };
1946 // | ^ ^
1947 err.multipart_suggestion(
1948 "expressions must be enclosed in braces to be used as const generic \
1949 arguments",
1950 vec![
1951 (start.shrink_to_lo(), "{ ".to_string()),
1952 (expr.span.shrink_to_hi(), " }".to_string()),
1953 ],
1954 Applicability::MaybeIncorrect,
1955 );
1956 let value = self.mk_expr_err(start.to(expr.span));
1957 err.emit();
1958 return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }));
1959 }
1960 }
1961 Err(mut err) => {
1962 err.cancel();
1963 }
1964 }
1965 *self = snapshot;
1966 Err(err)
1967 }
1968
1969 /// Get the diagnostics for the cases where `move async` is found.
1970 ///
1971 /// `move_async_span` starts at the 'm' of the move keyword and ends with the 'c' of the async keyword
incorrect_move_async_order_found( &self, move_async_span: Span, ) -> DiagnosticBuilder<'a>1972 pub(super) fn incorrect_move_async_order_found(
1973 &self,
1974 move_async_span: Span,
1975 ) -> DiagnosticBuilder<'a> {
1976 let mut err =
1977 self.struct_span_err(move_async_span, "the order of `move` and `async` is incorrect");
1978 err.span_suggestion_verbose(
1979 move_async_span,
1980 "try switching the order",
1981 "async move".to_owned(),
1982 Applicability::MaybeIncorrect,
1983 );
1984 err
1985 }
1986 }
1987