diff options
Diffstat (limited to 'compiler/rustc_parse/src/parser/diagnostics.rs')
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 369 |
1 files changed, 190 insertions, 179 deletions
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 7a24b819b5f..9677eea0604 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -21,21 +21,22 @@ use crate::errors::{ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; use rustc_ast::tokenstream::AttrTokenTree; use rustc_ast::util::parser::AssocOp; use rustc_ast::{ - AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block, + AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat, - PatKind, Path, PathSegment, QSelf, Ty, TyKind, + PatKind, Path, PathSegment, QSelf, Recovered, Ty, TyKind, }; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; use rustc_errors::{ - pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder, - ErrorGuaranteed, FatalError, PErr, PResult, + pluralize, Applicability, Diag, DiagCtxt, ErrorGuaranteed, FatalError, PErr, PResult, + Subdiagnostic, }; use rustc_session::errors::ExprParenthesesNeeded; use rustc_span::source_map::Spanned; @@ -44,16 +45,17 @@ use rustc_span::{BytePos, Span, SpanSnippetError, Symbol, DUMMY_SP}; use std::mem::take; use std::ops::{Deref, DerefMut}; use thin_vec::{thin_vec, ThinVec}; +use tracing::{debug, trace}; /// Creates a placeholder argument. -pub(super) fn dummy_arg(ident: Ident) -> Param { +pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param { let pat = P(Pat { id: ast::DUMMY_NODE_ID, - kind: PatKind::Ident(BindingAnnotation::NONE, ident, None), + kind: PatKind::Ident(BindingMode::NONE, ident, None), span: ident.span, tokens: None, }); - let ty = Ty { kind: TyKind::Err, span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None }; + let ty = Ty { kind: TyKind::Err(guar), span: ident.span, id: ast::DUMMY_NODE_ID, tokens: None }; Param { attrs: AttrVec::default(), id: ast::DUMMY_NODE_ID, @@ -208,11 +210,11 @@ struct MultiSugg { } impl MultiSugg { - fn emit(self, err: &mut Diagnostic) { + fn emit(self, err: &mut Diag<'_>) { err.multipart_suggestion(self.msg, self.patches, self.applicability); } - fn emit_verbose(self, err: &mut Diagnostic) { + fn emit_verbose(self, err: &mut Diag<'_>) { err.multipart_suggestion_verbose(self.msg, self.patches, self.applicability); } } @@ -240,7 +242,7 @@ impl<'a> DerefMut for SnapshotParser<'a> { impl<'a> Parser<'a> { pub fn dcx(&self) -> &'a DiagCtxt { - &self.sess.dcx + &self.psess.dcx } /// Replace `self` with `snapshot.parser`. @@ -255,7 +257,7 @@ impl<'a> Parser<'a> { } pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> { - self.sess.source_map().span_to_snippet(span) + self.psess.source_map().span_to_snippet(span) } /// Emits an error with suggestions if an identifier was expected but not found. @@ -264,7 +266,7 @@ impl<'a> Parser<'a> { pub(super) fn expected_ident_found( &mut self, recover: bool, - ) -> PResult<'a, (Ident, /* is_raw */ bool)> { + ) -> PResult<'a, (Ident, IdentIsRaw)> { if let TokenKind::DocComment(..) = self.prev_token.kind { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { span: self.prev_token.span, @@ -277,7 +279,7 @@ impl<'a> Parser<'a> { TokenKind::Colon, TokenKind::Comma, TokenKind::Semi, - TokenKind::ModSep, + TokenKind::PathSep, TokenKind::OpenDelim(Delimiter::Brace), TokenKind::OpenDelim(Delimiter::Parenthesis), TokenKind::CloseDelim(Delimiter::Brace), @@ -290,13 +292,13 @@ impl<'a> Parser<'a> { let bad_token = self.token.clone(); // suggest prepending a keyword in identifier position with `r#` - let suggest_raw = if let Some((ident, false)) = self.token.ident() + let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { - recovered_ident = Some((ident, true)); + recovered_ident = Some((ident, IdentIsRaw::Yes)); - // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`, + // `Symbol::to_string()` is different from `Symbol::into_diag_arg()`, // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` let ident_name = ident.name.to_string(); @@ -320,7 +322,7 @@ impl<'a> Parser<'a> { let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| { let (invalid, valid) = self.token.span.split_at(len as u32); - recovered_ident = Some((Ident::new(valid_portion, valid), false)); + recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No)); HelpIdentifierStartsWithNumber { num_span: invalid } }); @@ -362,7 +364,7 @@ impl<'a> Parser<'a> { if !self.look_ahead(1, |t| *t == token::Lt) && let Ok(snippet) = - self.sess.source_map().span_to_snippet(generic.span) + self.psess.source_map().span_to_snippet(generic.span) { err.multipart_suggestion_verbose( format!("place the generic parameter name after the {ident_name} name"), @@ -399,7 +401,7 @@ impl<'a> Parser<'a> { } } - pub(super) fn expected_ident_found_err(&mut self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_ident_found_err(&mut self) -> Diag<'a> { self.expected_ident_found(false).unwrap_err() } @@ -429,7 +431,7 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -452,7 +454,6 @@ impl<'a> Parser<'a> { let mut expected = self .expected_tokens .iter() - .cloned() .filter(|token| { // Filter out suggestions that suggest the same token which was found and deemed incorrect. fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool { @@ -464,7 +465,7 @@ impl<'a> Parser<'a> { false } - if *token != parser::TokenType::Token(self.token.kind.clone()) { + if **token != parser::TokenType::Token(self.token.kind.clone()) { let eq = is_ident_eq_keyword(&self.token.kind, &token); // If the suggestion is a keyword and the found token is an ident, // the content of which are equal to the suggestion's content, @@ -483,11 +484,12 @@ impl<'a> Parser<'a> { } false }) + .cloned() .collect::<Vec<_>>(); expected.sort_by_cached_key(|x| x.to_string()); expected.dedup(); - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); // Special-case "expected `;`" errors. if expected.contains(&TokenType::Token(token::Semi)) { @@ -525,14 +527,14 @@ impl<'a> Parser<'a> { // // let x = 32: // let y = 42; - self.dcx().emit_err(ExpectedSemi { + let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, token: self.token.clone(), unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); - return Ok(true); + return Ok(Recovered::Yes(guar)); } else if self.look_ahead(0, |t| { t == &token::CloseDelim(Delimiter::Brace) || ((t.can_begin_expr() || t.can_begin_item()) @@ -550,13 +552,13 @@ impl<'a> Parser<'a> { // let x = 32 // let y = 42; let span = self.prev_token.span.shrink_to_hi(); - self.dcx().emit_err(ExpectedSemi { + let guar = self.dcx().emit_err(ExpectedSemi { span, token: self.token.clone(), unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); - return Ok(true); + return Ok(Recovered::Yes(guar)); } } @@ -653,9 +655,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound @@ -665,7 +667,7 @@ impl<'a> Parser<'a> { { err.note("you may be trying to write a c-string literal"); err.note("c-string literals require Rust 2021 or later"); - HelpUseLatestEdition::new().add_to_diagnostic(&mut err); + err.subdiagnostic(self.dcx(), HelpUseLatestEdition::new()); } // `pub` may be used for an item or `pub(crate)` @@ -710,8 +712,8 @@ impl<'a> Parser<'a> { if self.check_too_many_raw_str_terminators(&mut err) { if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { - err.emit(); - return Ok(true); + let guar = err.emit(); + return Ok(Recovered::Yes(guar)); } else { return Err(err); } @@ -742,7 +744,8 @@ impl<'a> Parser<'a> { Err(err) } - pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) { + /// The user has written `#[attr] expr` which is unsupported. (#106020) + pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) -> ErrorGuaranteed { // Missing semicolon typo error. let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { @@ -785,6 +788,8 @@ impl<'a> Parser<'a> { ], Applicability::MachineApplicable, ); + + // Special handling for `#[cfg(...)]` chains let mut snapshot = self.create_snapshot_for_diagnostic(); if let [attr] = &expr.attrs[..] && let ast::AttrKind::Normal(attr_kind) = &attr.kind @@ -795,9 +800,8 @@ impl<'a> Parser<'a> { { Ok(next_attr) => next_attr, Err(inner_err) => { - err.cancel(); inner_err.cancel(); - return; + return err.emit(); } } && let ast::AttrKind::Normal(next_attr_kind) = next_attr.kind @@ -808,9 +812,8 @@ impl<'a> Parser<'a> { let next_expr = match snapshot.parse_expr() { Ok(next_expr) => next_expr, Err(inner_err) => { - err.cancel(); inner_err.cancel(); - return; + return err.emit(); } }; // We have for sure @@ -819,7 +822,7 @@ impl<'a> Parser<'a> { // #[cfg(..)] // other_expr // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`. - let margin = self.sess.source_map().span_to_margin(next_expr.span).unwrap_or(0); + let margin = self.psess.source_map().span_to_margin(next_expr.span).unwrap_or(0); let sugg = vec![ (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()), (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()), @@ -843,11 +846,11 @@ impl<'a> Parser<'a> { ); } } - err.emit(); + err.emit() } - fn check_too_many_raw_str_terminators(&mut self, err: &mut Diagnostic) -> bool { - let sm = self.sess.source_map(); + fn check_too_many_raw_str_terminators(&mut self, err: &mut Diag<'_>) -> bool { + let sm = self.psess.source_map(); match (&self.prev_token.kind, &self.token.kind) { ( TokenKind::Literal(Lit { @@ -900,7 +903,7 @@ impl<'a> Parser<'a> { // fn foo() -> Foo { // field: value, // } - info!(?maybe_struct_name, ?self.token); + debug!(?maybe_struct_name, ?self.token); let mut snapshot = self.create_snapshot_for_diagnostic(); let path = Path { segments: ThinVec::new(), @@ -919,10 +922,10 @@ impl<'a> Parser<'a> { // fn foo() -> Foo { Path { // field: value, // } } - err.delay_as_bug(); + let guar = err.delay_as_bug(); self.restore_snapshot(snapshot); let mut tail = self.mk_block( - thin_vec![self.mk_stmt_err(expr.span)], + thin_vec![self.mk_stmt_err(expr.span, guar)], s, lo.to(self.prev_token.span), ); @@ -932,7 +935,7 @@ impl<'a> Parser<'a> { // expand `before` so that we take care of module path such as: // `foo::Bar { ... } ` // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })` - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); let before = maybe_struct_name.span.shrink_to_lo(); if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| { t.is_alphanumeric() || t == ':' || t == '_' @@ -980,7 +983,7 @@ impl<'a> Parser<'a> { pub(super) fn recover_closure_body( &mut self, - mut err: DiagnosticBuilder<'a>, + mut err: Diag<'a>, before: token::Token, prev: token::Token, token: token::Token, @@ -988,7 +991,7 @@ impl<'a> Parser<'a> { decl_hi: Span, ) -> PResult<'a, P<Expr>> { err.span_label(lo.to(decl_hi), "while parsing the body of this closure"); - match before.kind { + let guar = match before.kind { token::OpenDelim(Delimiter::Brace) if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => { @@ -1002,8 +1005,9 @@ impl<'a> Parser<'a> { ], Applicability::MaybeIncorrect, ); - err.emit(); + let guar = err.emit(); self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]); + guar } token::OpenDelim(Delimiter::Parenthesis) if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => @@ -1020,7 +1024,7 @@ impl<'a> Parser<'a> { ], Applicability::MaybeIncorrect, ); - err.emit(); + err.emit() } _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => { // We don't have a heuristic to correctly identify where the block @@ -1033,8 +1037,8 @@ impl<'a> Parser<'a> { return Err(err); } _ => return Err(err), - } - Ok(self.mk_expr_err(lo.to(self.token.span))) + }; + Ok(self.mk_expr_err(lo.to(self.token.span), guar)) } /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, @@ -1165,7 +1169,7 @@ impl<'a> Parser<'a> { return; } - if token::ModSep == self.token.kind && segment.args.is_none() { + if token::PathSep == self.token.kind && segment.args.is_none() { let snapshot = self.create_snapshot_for_diagnostic(); self.bump(); let lo = self.token.span; @@ -1210,9 +1214,9 @@ impl<'a> Parser<'a> { /// encounter a parse error when encountering the first `,`. pub(super) fn check_mistyped_turbofish_with_multiple_type_params( &mut self, - mut e: DiagnosticBuilder<'a>, + mut e: Diag<'a>, expr: &mut P<Expr>, - ) -> PResult<'a, ()> { + ) -> PResult<'a, ErrorGuaranteed> { if let ExprKind::Binary(binop, _, _) = &expr.kind && let ast::BinOpKind::Lt = binop.node && self.eat(&token::Comma) @@ -1220,10 +1224,14 @@ impl<'a> Parser<'a> { let x = self.parse_seq_to_before_end( &token::Gt, SeqSep::trailing_allowed(token::Comma), - |p| p.parse_generic_arg(None), + |p| match p.parse_generic_arg(None)? { + Some(arg) => Ok(arg), + // If we didn't eat a generic arg, then we should error. + None => p.unexpected_any(), + }, ); match x { - Ok((_, _, false)) => { + Ok((_, _, Recovered::No)) => { if self.eat(&token::Gt) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( @@ -1237,9 +1245,9 @@ impl<'a> Parser<'a> { // The subsequent expression is valid. Mark // `expr` as erroneous and emit `e` now, but // return `Ok` so parsing can continue. - e.emit(); - *expr = self.mk_expr_err(expr.span.to(self.prev_token.span)); - return Ok(()); + let guar = e.emit(); + *expr = self.mk_expr_err(expr.span.to(self.prev_token.span), guar); + return Ok(guar); } Err(err) => { err.cancel(); @@ -1247,7 +1255,7 @@ impl<'a> Parser<'a> { } } } - Ok((_, _, true)) => {} + Ok((_, _, Recovered::Yes(_))) => {} Err(err) => { err.cancel(); } @@ -1258,7 +1266,7 @@ impl<'a> Parser<'a> { /// Suggest add the missing `let` before the identifier in stmt /// `a: Ty = 1` -> `let a: Ty = 1` - pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut DiagnosticBuilder<'a>) { + pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut Diag<'a>) { if self.token == token::Colon { let prev_span = self.prev_token.span.shrink_to_lo(); let snapshot = self.create_snapshot_for_diagnostic(); @@ -1267,7 +1275,7 @@ impl<'a> Parser<'a> { Ok(_) => { if self.token == token::Eq { let sugg = SuggAddMissingLetStmt { span: prev_span }; - sugg.add_to_diagnostic(err); + sugg.add_to_diag(err); } } Err(e) => { @@ -1280,13 +1288,13 @@ impl<'a> Parser<'a> { /// Check to see if a pair of chained operators looks like an attempt at chained comparison, /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or - /// parenthesising the leftmost comparison. + /// parenthesising the leftmost comparison. The return value indicates if recovery happened. fn attempt_chained_comparison_suggestion( &mut self, err: &mut ComparisonOperatorsCannotBeChained, inner_op: &Expr, outer_op: &Spanned<AssocOp>, - ) -> bool /* advanced the cursor */ { + ) -> bool { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind && ident.as_str().parse::<i32>().is_err() @@ -1332,7 +1340,7 @@ impl<'a> Parser<'a> { Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + true } } } @@ -1356,7 +1364,7 @@ impl<'a> Parser<'a> { } } } - _ => false, + _ => false }; } false @@ -1391,7 +1399,8 @@ impl<'a> Parser<'a> { outer_op.node, ); - let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err))); + let mk_err_expr = + |this: &Self, span, guar| Ok(Some(this.mk_expr(span, ExprKind::Err(guar)))); match &inner_op.kind { ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => { @@ -1415,7 +1424,7 @@ impl<'a> Parser<'a> { [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; self.consume_tts(1, &modifiers); - if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep] + if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep] .contains(&self.token.kind) { // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the @@ -1423,7 +1432,7 @@ impl<'a> Parser<'a> { self.restore_snapshot(snapshot); } } - return if token::ModSep == self.token.kind { + return if token::PathSep == self.token.kind { // We have some certainty that this was a bad turbofish at this point. // `foo< bar >::` if let ExprKind::Binary(o, ..) = inner_op.kind @@ -1441,11 +1450,11 @@ impl<'a> Parser<'a> { match self.parse_expr() { Ok(_) => { // 99% certain that the suggestion is correct, continue parsing. - self.dcx().emit_err(err); + let guar = self.dcx().emit_err(err); // FIXME: actually check that the two expressions in the binop are // paths and resynthesize new fn call expression instead of using // `ExprKind::Err` placeholder. - mk_err_expr(self, inner_op.span.to(self.prev_token.span)) + mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar) } Err(expr_err) => { expr_err.cancel(); @@ -1469,11 +1478,11 @@ impl<'a> Parser<'a> { match self.consume_fn_args() { Err(()) => Err(self.dcx().create_err(err)), Ok(()) => { - self.dcx().emit_err(err); + let guar = self.dcx().emit_err(err); // FIXME: actually check that the two expressions in the binop are // paths and resynthesize new fn call expression instead of using // `ExprKind::Err` placeholder. - mk_err_expr(self, inner_op.span.to(self.prev_token.span)) + mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar) } } } else { @@ -1487,21 +1496,22 @@ impl<'a> Parser<'a> { // If it looks like a genuine attempt to chain operators (as opposed to a // misformatted turbofish, for instance), suggest a correct form. - if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) - { - self.dcx().emit_err(err); - mk_err_expr(self, inner_op.span.to(self.prev_token.span)) + let recovered = self + .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + if recovered { + let guar = self.dcx().emit_err(err); + mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar) } else { // These cases cause too many knock-down errors, bail out (#61329). Err(self.dcx().create_err(err)) } }; } - let recover = + let recovered = self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); - self.dcx().emit_err(err); - if recover { - return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); + let guar = self.dcx().emit_err(err); + if recovered { + return mk_err_expr(self, inner_op.span.to(self.prev_token.span), guar); } } _ => {} @@ -1540,14 +1550,14 @@ impl<'a> Parser<'a> { pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> { if self.token == token::Question { self.bump(); - self.dcx().emit_err(QuestionMarkInType { + let guar = self.dcx().emit_err(QuestionMarkInType { span: self.prev_token.span, sugg: QuestionMarkInTypeSugg { left: ty.span.shrink_to_lo(), right: self.prev_token.span, }, }); - self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err) + self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err(guar)) } else { ty } @@ -1677,7 +1687,7 @@ impl<'a> Parser<'a> { ); err.span_label(op_span, format!("not a valid {} operator", kind.fixity)); - let help_base_case = |mut err: DiagnosticBuilder<'_, _>, base| { + let help_base_case = |mut err: Diag<'_, _>, base| { err.help(format!("use `{}= 1` instead", kind.op.chr())); err.emit(); Ok(base) @@ -1778,7 +1788,7 @@ impl<'a> Parser<'a> { } // Do not add `::` to expected tokens. - if self.token == token::ModSep { + if self.token == token::PathSep { if let Some(ty) = base.to_ty() { return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty); } @@ -1793,7 +1803,7 @@ impl<'a> Parser<'a> { ty_span: Span, ty: P<Ty>, ) -> PResult<'a, P<T>> { - self.expect(&token::ModSep)?; + self.expect(&token::PathSep)?; let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None }; self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?; @@ -1808,42 +1818,36 @@ impl<'a> Parser<'a> { Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path))) } - pub fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool { - if self.token.kind == TokenKind::Semi { - self.bump(); - - let mut err = - IncorrectSemicolon { span: self.prev_token.span, opt_help: None, name: "" }; + /// This function gets called in places where a semicolon is NOT expected and if there's a + /// semicolon it emits the appropriate error and returns true. + pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool { + if self.token.kind != TokenKind::Semi { + return false; + } - if !items.is_empty() { - let previous_item = &items[items.len() - 1]; - let previous_item_kind_name = match previous_item.kind { + // Check previous item to add it to the diagnostic, for example to say + // `enum declarations are not followed by a semicolon` + let err = match previous_item { + Some(previous_item) => { + let name = match previous_item.kind { // Say "braced struct" because tuple-structs and // braceless-empty-struct declarations do take a semicolon. - ItemKind::Struct(..) => Some("braced struct"), - ItemKind::Enum(..) => Some("enum"), - ItemKind::Trait(..) => Some("trait"), - ItemKind::Union(..) => Some("union"), - _ => None, + ItemKind::Struct(..) => "braced struct", + _ => previous_item.kind.descr(), }; - if let Some(name) = previous_item_kind_name { - err.opt_help = Some(()); - err.name = name; - } + IncorrectSemicolon { span: self.token.span, name, show_help: true } } - self.dcx().emit_err(err); - true - } else { - false - } + None => IncorrectSemicolon { span: self.token.span, name: "", show_help: false }, + }; + self.dcx().emit_err(err); + + self.bump(); + true } - /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a + /// Creates a `Diag` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { + pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { let token_str = pprust::token_kind_to_string(t); let this_token_str = super::token_descr(&self.token); let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { @@ -1869,7 +1873,7 @@ impl<'a> Parser<'a> { ); let mut err = self.dcx().struct_span_err(sp, msg); let label_exp = format!("expected `{token_str}`"); - let sm = self.sess.source_map(); + let sm = self.psess.source_map(); if !sm.is_multiline(prev_sp.until(sp)) { // When the spans are in the same line, it means that the only content // between them is whitespace, point only at the found token. @@ -1890,7 +1894,7 @@ impl<'a> Parser<'a> { pub(super) fn recover_colon_as_semi(&mut self) -> bool { let line_idx = |span: Span| { - self.sess + self.psess .source_map() .span_to_lines(span) .ok() @@ -1903,7 +1907,7 @@ impl<'a> Parser<'a> { { self.dcx().emit_err(ColonAsSemi { span: self.token.span, - type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()), + type_ascription: self.psess.unstable_features.is_nightly_build().then_some(()), }); self.bump(); return true; @@ -1925,8 +1929,8 @@ impl<'a> Parser<'a> { } else { self.recover_await_prefix(await_sp)? }; - let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question); - let expr = self.mk_expr(lo.to(sp), ExprKind::Err); + let (sp, guar) = self.error_on_incorrect_await(lo, hi, &expr, is_question); + let expr = self.mk_expr_err(lo.to(sp), guar); self.maybe_recover_from_bad_qpath(expr) } @@ -1955,21 +1959,27 @@ impl<'a> Parser<'a> { Ok((expr.span, expr, is_question)) } - fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span { + fn error_on_incorrect_await( + &self, + lo: Span, + hi: Span, + expr: &Expr, + is_question: bool, + ) -> (Span, ErrorGuaranteed) { let span = lo.to(hi); let applicability = match expr.kind { ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?` _ => Applicability::MachineApplicable, }; - self.dcx().emit_err(IncorrectAwait { + let guar = self.dcx().emit_err(IncorrectAwait { span, sugg_span: (span, applicability), expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(expr)), question_mark: if is_question { "?" } else { "" }, }); - span + (span, guar) } /// If encountering `future.await()`, consumes and emits an error. @@ -2013,8 +2023,8 @@ impl<'a> Parser<'a> { ); } err.span_suggestion(lo.shrink_to_lo(), format!("{prefix}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax"), "r#", Applicability::MachineApplicable); - err.emit(); - Ok(self.mk_expr_err(lo.to(hi))) + let guar = err.emit(); + Ok(self.mk_expr_err(lo.to(hi), guar)) } else { Err(self.expected_expression_found()) // The user isn't trying to invoke the try! macro } @@ -2059,10 +2069,10 @@ impl<'a> Parser<'a> { lo: Span, err: PErr<'a>, ) -> P<Expr> { - err.emit(); + let guar = err.emit(); // Recover from parse error, callers expect the closing delim to be consumed. self.consume_block(delim, ConsumeClosingDelim::Yes); - self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err) + self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar)) } /// Eats tokens until we can be relatively sure we reached the end of the @@ -2179,7 +2189,7 @@ impl<'a> Parser<'a> { pub(super) fn parameter_without_type( &mut self, - err: &mut Diagnostic, + err: &mut Diag<'_>, pat: P<ast::Pat>, require_name: bool, first_param: bool, @@ -2304,8 +2314,8 @@ impl<'a> Parser<'a> { pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> { let span = param.pat.span; - param.ty.kind = TyKind::Err; - self.dcx().emit_err(SelfParamNotFirst { span }); + let guar = self.dcx().emit_err(SelfParamNotFirst { span }); + param.ty.kind = TyKind::Err(guar); Ok(param) } @@ -2336,7 +2346,7 @@ impl<'a> Parser<'a> { } } - pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_expression_found(&self) -> Diag<'a> { let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { let sp = self.prev_token.span.shrink_to_hi(); @@ -2348,9 +2358,9 @@ impl<'a> Parser<'a> { ), }; let mut err = self.dcx().struct_span_err(span, msg); - let sp = self.sess.source_map().start_point(self.token.span); - if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { - err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp)); + let sp = self.psess.source_map().start_point(self.token.span); + if let Some(sp) = self.psess.ambiguous_block_expr_parse.borrow().get(&sp) { + err.subdiagnostic(self.dcx(), ExprParenthesesNeeded::surrounding(*sp)); } err.span_label(span, "expected expression"); @@ -2360,9 +2370,9 @@ impl<'a> Parser<'a> { // in a subsequent macro invocation (#71039). let mut tok = self.token.clone(); let mut labels = vec![]; - while let TokenKind::Interpolated(node) = &tok.kind { - let tokens = node.0.tokens(); - labels.push(node.clone()); + while let TokenKind::Interpolated(nt) = &tok.kind { + let tokens = nt.tokens(); + labels.push(nt.clone()); if let Some(tokens) = tokens && let tokens = tokens.to_attr_token_stream() && let tokens = tokens.0.deref() @@ -2375,27 +2385,20 @@ impl<'a> Parser<'a> { } let mut iter = labels.into_iter().peekable(); let mut show_link = false; - while let Some(node) = iter.next() { - let descr = node.0.descr(); + while let Some(nt) = iter.next() { + let descr = nt.descr(); if let Some(next) = iter.peek() { - let next_descr = next.0.descr(); + let next_descr = next.descr(); if next_descr != descr { - err.span_label(next.1, format!("this macro fragment matcher is {next_descr}")); - err.span_label(node.1, format!("this macro fragment matcher is {descr}")); - err.span_label( - next.0.use_span(), - format!("this is expected to be {next_descr}"), - ); + err.span_label(next.use_span(), format!("this is expected to be {next_descr}")); err.span_label( - node.0.use_span(), + nt.use_span(), format!( "this is interpreted as {}, but it is expected to be {}", next_descr, descr, ), ); show_link = true; - } else { - err.span_label(node.1, ""); } } } @@ -2437,7 +2440,7 @@ impl<'a> Parser<'a> { pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) { let mut seen_inputs = FxHashSet::default(); for input in fn_inputs.iter_mut() { - let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = + let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err(_)) = (&input.pat.kind, &input.ty.kind) { Some(*ident) @@ -2530,7 +2533,7 @@ impl<'a> Parser<'a> { }; let ident = param.ident.to_string(); - let sugg = match (ty_generics, self.sess.source_map().span_to_snippet(param.span())) { + let sugg = match (ty_generics, self.psess.source_map().span_to_snippet(param.span())) { (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => { Some(match ¶ms[..] { [] => UnexpectedConstParamDeclarationSugg::AddParam { @@ -2549,9 +2552,10 @@ impl<'a> Parser<'a> { } _ => None, }; - self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg }); + let guar = + self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg }); - let value = self.mk_expr_err(param.span()); + let value = self.mk_expr_err(param.span(), guar); Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })) } @@ -2585,11 +2589,7 @@ impl<'a> Parser<'a> { /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion /// if we think that the resulting expression would be well formed. - pub fn recover_const_arg( - &mut self, - start: Span, - mut err: DiagnosticBuilder<'a>, - ) -> PResult<'a, GenericArg> { + pub fn recover_const_arg(&mut self, start: Span, mut err: Diag<'a>) -> PResult<'a, GenericArg> { let is_op_or_dot = AssocOp::from_token(&self.token) .and_then(|op| { if let AssocOp::Greater @@ -2630,8 +2630,8 @@ impl<'a> Parser<'a> { "=", Applicability::MaybeIncorrect, ); - let value = self.mk_expr_err(start.to(expr.span)); - err.emit(); + let guar = err.emit(); + let value = self.mk_expr_err(start.to(expr.span), guar); return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })); } else if token::Colon == snapshot.token.kind && expr.span.lo() == snapshot.token.span.hi() @@ -2644,8 +2644,10 @@ impl<'a> Parser<'a> { "::", Applicability::MaybeIncorrect, ); - err.emit(); - return Ok(GenericArg::Type(self.mk_ty(start.to(expr.span), TyKind::Err))); + let guar = err.emit(); + return Ok(GenericArg::Type( + self.mk_ty(start.to(expr.span), TyKind::Err(guar)), + )); } else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg() { // Avoid the following output by checking that we consumed a full const arg: @@ -2688,19 +2690,15 @@ impl<'a> Parser<'a> { } /// Creates a dummy const argument, and reports that the expression must be enclosed in braces - pub fn dummy_const_arg_needs_braces( - &self, - mut err: DiagnosticBuilder<'a>, - span: Span, - ) -> GenericArg { + pub fn dummy_const_arg_needs_braces(&self, mut err: Diag<'a>, span: Span) -> GenericArg { err.multipart_suggestion( "expressions must be enclosed in braces to be used as const generic \ arguments", vec![(span.shrink_to_lo(), "{ ".to_string()), (span.shrink_to_hi(), " }".to_string())], Applicability::MaybeIncorrect, ); - let value = self.mk_expr_err(span); - err.emit(); + let guar = err.emit(); + let value = self.mk_expr_err(span, guar); GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }) } @@ -2783,7 +2781,7 @@ impl<'a> Parser<'a> { } _ => {} }, - PatKind::Ident(BindingAnnotation::NONE, ident, None) => { + PatKind::Ident(BindingMode::NONE, ident, None) => { match &first_pat.kind { PatKind::Ident(_, old_ident, _) => { let path = PatKind::Path( @@ -2957,13 +2955,23 @@ impl<'a> Parser<'a> { err } - pub fn is_diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> bool { + /// This checks if this is a conflict marker, depending of the parameter passed. + /// + /// * `>>>>>` + /// * `=====` + /// * `<<<<<` + /// + pub fn is_vcs_conflict_marker( + &mut self, + long_kind: &TokenKind, + short_kind: &TokenKind, + ) -> bool { (0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind)) && self.look_ahead(3, |tok| tok == short_kind) } - fn diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> { - if self.is_diff_marker(long_kind, short_kind) { + fn conflict_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> { + if self.is_vcs_conflict_marker(long_kind, short_kind) { let lo = self.token.span; for _ in 0..4 { self.bump(); @@ -2973,15 +2981,16 @@ impl<'a> Parser<'a> { None } - pub fn recover_diff_marker(&mut self) { - if let Err(err) = self.err_diff_marker() { + pub fn recover_vcs_conflict_marker(&mut self) { + if let Err(err) = self.err_vcs_conflict_marker() { err.emit(); FatalError.raise(); } } - pub fn err_diff_marker(&mut self) -> PResult<'a, ()> { - let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else { + pub fn err_vcs_conflict_marker(&mut self) -> PResult<'a, ()> { + let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) + else { return Ok(()); }; let mut spans = Vec::with_capacity(3); @@ -2993,13 +3002,15 @@ impl<'a> Parser<'a> { if self.token.kind == TokenKind::Eof { break; } - if let Some(span) = self.diff_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) { + if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) + { middlediff3 = Some(span); } - if let Some(span) = self.diff_marker(&TokenKind::EqEq, &TokenKind::Eq) { + if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) { middle = Some(span); } - if let Some(span) = self.diff_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt) { + if let Some(span) = self.conflict_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt) + { spans.push(span); end = Some(span); break; |
