diff options
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/messages.ftl | 7 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/errors.rs | 28 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/tokentrees.rs | 16 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 26 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 53 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 50 |
7 files changed, 142 insertions, 42 deletions
diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index ef259703f0c..cafd4b6dca2 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -216,6 +216,9 @@ parse_expected_identifier_found_doc_comment = expected identifier, found doc com parse_expected_identifier_found_doc_comment_str = expected identifier, found doc comment `{$token}` parse_expected_identifier_found_keyword = expected identifier, found keyword parse_expected_identifier_found_keyword_str = expected identifier, found keyword `{$token}` +parse_expected_identifier_found_metavar = expected identifier, found metavariable +# This one deliberately doesn't print a token. +parse_expected_identifier_found_metavar_str = expected identifier, found metavariable parse_expected_identifier_found_reserved_identifier = expected identifier, found reserved identifier parse_expected_identifier_found_reserved_identifier_str = expected identifier, found reserved identifier `{$token}` parse_expected_identifier_found_reserved_keyword = expected identifier, found reserved keyword @@ -227,6 +230,8 @@ parse_expected_mut_or_const_in_raw_pointer_type = expected `mut` or `const` keyw parse_expected_semi_found_doc_comment_str = expected `;`, found doc comment `{$token}` parse_expected_semi_found_keyword_str = expected `;`, found keyword `{$token}` +# This one deliberately doesn't print a token. +parse_expected_semi_found_metavar_str = expected `;`, found metavariable parse_expected_semi_found_reserved_identifier_str = expected `;`, found reserved identifier `{$token}` parse_expected_semi_found_reserved_keyword_str = expected `;`, found reserved keyword `{$token}` parse_expected_semi_found_str = expected `;`, found `{$token}` @@ -864,6 +869,8 @@ parse_unexpected_token_after_not_logical = use `!` to perform logical negation parse_unexpected_token_after_struct_name = expected `where`, `{"{"}`, `(`, or `;` after struct name parse_unexpected_token_after_struct_name_found_doc_comment = expected `where`, `{"{"}`, `(`, or `;` after struct name, found doc comment `{$token}` parse_unexpected_token_after_struct_name_found_keyword = expected `where`, `{"{"}`, `(`, or `;` after struct name, found keyword `{$token}` +# This one deliberately doesn't print a token. +parse_unexpected_token_after_struct_name_found_metavar = expected `where`, `{"{"}`, `(`, or `;` after struct name, found metavar parse_unexpected_token_after_struct_name_found_other = expected `where`, `{"{"}`, `(`, or `;` after struct name, found `{$token}` parse_unexpected_token_after_struct_name_found_reserved_identifier = expected `where`, `{"{"}`, `(`, or `;` after struct name, found reserved identifier `{$token}` diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index 37eb463cba6..9bdb99dc000 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -1086,6 +1086,8 @@ pub(crate) enum ExpectedIdentifierFound { ReservedKeyword(#[primary_span] Span), #[label(parse_expected_identifier_found_doc_comment)] DocComment(#[primary_span] Span), + #[label(parse_expected_identifier_found_metavar)] + MetaVar(#[primary_span] Span), #[label(parse_expected_identifier)] Other(#[primary_span] Span), } @@ -1099,6 +1101,7 @@ impl ExpectedIdentifierFound { Some(TokenDescription::Keyword) => ExpectedIdentifierFound::Keyword, Some(TokenDescription::ReservedKeyword) => ExpectedIdentifierFound::ReservedKeyword, Some(TokenDescription::DocComment) => ExpectedIdentifierFound::DocComment, + Some(TokenDescription::MetaVar(_)) => ExpectedIdentifierFound::MetaVar, None => ExpectedIdentifierFound::Other, })(span) } @@ -1117,6 +1120,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedIdentifier { fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> { let token_descr = TokenDescription::from_token(&self.token); + let mut add_token = true; let mut diag = Diag::new(dcx, level, match token_descr { Some(TokenDescription::ReservedIdentifier) => { fluent::parse_expected_identifier_found_reserved_identifier_str @@ -1128,10 +1132,16 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedIdentifier { Some(TokenDescription::DocComment) => { fluent::parse_expected_identifier_found_doc_comment_str } + Some(TokenDescription::MetaVar(_)) => { + add_token = false; + fluent::parse_expected_identifier_found_metavar_str + } None => fluent::parse_expected_identifier_found_str, }); diag.span(self.span); - diag.arg("token", self.token); + if add_token { + diag.arg("token", self.token); + } if let Some(sugg) = self.suggest_raw { sugg.add_to_diag(&mut diag); @@ -1171,6 +1181,7 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedSemi { fn into_diag(self, dcx: DiagCtxtHandle<'a>, level: Level) -> Diag<'a, G> { let token_descr = TokenDescription::from_token(&self.token); + let mut add_token = true; let mut diag = Diag::new(dcx, level, match token_descr { Some(TokenDescription::ReservedIdentifier) => { fluent::parse_expected_semi_found_reserved_identifier_str @@ -1180,10 +1191,16 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedSemi { fluent::parse_expected_semi_found_reserved_keyword_str } Some(TokenDescription::DocComment) => fluent::parse_expected_semi_found_doc_comment_str, + Some(TokenDescription::MetaVar(_)) => { + add_token = false; + fluent::parse_expected_semi_found_metavar_str + } None => fluent::parse_expected_semi_found_str, }); diag.span(self.span); - diag.arg("token", self.token); + if add_token { + diag.arg("token", self.token); + } if let Some(unexpected_token_label) = self.unexpected_token_label { diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token); @@ -1925,6 +1942,12 @@ pub(crate) enum UnexpectedTokenAfterStructName { span: Span, token: Token, }, + #[diag(parse_unexpected_token_after_struct_name_found_metavar)] + MetaVar { + #[primary_span] + #[label(parse_unexpected_token_after_struct_name)] + span: Span, + }, #[diag(parse_unexpected_token_after_struct_name_found_other)] Other { #[primary_span] @@ -1941,6 +1964,7 @@ impl UnexpectedTokenAfterStructName { Some(TokenDescription::Keyword) => Self::Keyword { span, token }, Some(TokenDescription::ReservedKeyword) => Self::ReservedKeyword { span, token }, Some(TokenDescription::DocComment) => Self::DocComment { span, token }, + Some(TokenDescription::MetaVar(_)) => Self::MetaVar { span }, None => Self::Other { span, token }, } } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index d35c9c7bae7..7b21ffacc84 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -43,11 +43,19 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> { let mut buf = Vec::new(); loop { match self.token.kind { - token::OpenDelim(delim) => buf.push(match self.lex_token_tree_open_delim(delim) { - Ok(val) => val, - Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)), - }), + token::OpenDelim(delim) => { + // Invisible delimiters cannot occur here because `TokenTreesReader` parses + // code directly from strings, with no macro expansion involved. + debug_assert!(!matches!(delim, Delimiter::Invisible(_))); + buf.push(match self.lex_token_tree_open_delim(delim) { + Ok(val) => val, + Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)), + }) + } token::CloseDelim(delim) => { + // Invisible delimiters cannot occur here because `TokenTreesReader` parses + // code directly from strings, with no macro expansion involved. + debug_assert!(!matches!(delim, Delimiter::Invisible(_))); return ( open_spacing, TokenStream::new(buf), diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index c85d0bd05cb..434f71beac2 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -510,8 +510,8 @@ fn make_attr_token_stream( FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => { let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); - assert_eq!( - open_delim, delim, + assert!( + open_delim.eq_ignoring_invisible_origin(&delim), "Mismatched open/close delims: open={open_delim:?} close={span:?}" ); let dspan = DelimSpan::from_pair(open_sp, span); diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 0012db471ef..aa5e9586daf 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -49,7 +49,7 @@ pub(super) enum DestructuredFloat { /// 1.2 | 1.2e3 MiddleDot(Symbol, Span, Span, Symbol, Span), /// Invalid - Error(ErrorGuaranteed), + Error, } impl<'a> Parser<'a> { @@ -1005,7 +1005,7 @@ impl<'a> Parser<'a> { self.mk_expr_tuple_field_access(lo, ident1_span, base, sym1, None); self.mk_expr_tuple_field_access(lo, ident2_span, base1, sym2, suffix) } - DestructuredFloat::Error(_) => base, + DestructuredFloat::Error => base, }) } _ => { @@ -1015,7 +1015,7 @@ impl<'a> Parser<'a> { } } - fn error_unexpected_after_dot(&self) -> ErrorGuaranteed { + fn error_unexpected_after_dot(&self) { let actual = pprust::token_to_string(&self.token); let span = self.token.span; let sm = self.psess.source_map(); @@ -1025,7 +1025,7 @@ impl<'a> Parser<'a> { } _ => (span, actual), }; - self.dcx().emit_err(errors::UnexpectedTokenAfterDot { span, actual }) + self.dcx().emit_err(errors::UnexpectedTokenAfterDot { span, actual }); } /// We need an identifier or integer, but the next token is a float. @@ -1111,8 +1111,8 @@ impl<'a> Parser<'a> { // 1.2e+3 | 1.2e-3 [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => { // See the FIXME about `TokenCursor` above. - let guar = self.error_unexpected_after_dot(); - DestructuredFloat::Error(guar) + self.error_unexpected_after_dot(); + DestructuredFloat::Error } _ => panic!("unexpected components in a float token: {components:?}"), } @@ -1178,7 +1178,7 @@ impl<'a> Parser<'a> { fields.insert(start_idx, Ident::new(symbol2, span2)); fields.insert(start_idx, Ident::new(symbol1, span1)); } - DestructuredFloat::Error(_) => { + DestructuredFloat::Error => { trailing_dot = None; fields.insert(start_idx, Ident::new(symbol, self.prev_token.span)); } @@ -3591,11 +3591,19 @@ impl<'a> Parser<'a> { && !self.token.is_reserved_ident() && self.look_ahead(1, |t| { AssocOp::from_token(t).is_some() - || matches!(t.kind, token::OpenDelim(_)) + || matches!( + t.kind, + token::OpenDelim( + Delimiter::Parenthesis + | Delimiter::Bracket + | Delimiter::Brace + ) + ) || *t == token::Dot }) { - // Looks like they tried to write a shorthand, complex expression. + // Looks like they tried to write a shorthand, complex expression, + // E.g.: `n + m`, `f(a)`, `a[i]`, `S { x: 3 }`, or `x.y`. e.span_suggestion_verbose( self.token.span.shrink_to_lo(), "try naming a field", diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 042ee96bbe8..0ed8d152d2d 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -21,7 +21,9 @@ pub(crate) use item::FnParseMode; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind}; +use rustc_ast::token::{ + self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind, +}; use rustc_ast::tokenstream::{ AttrsTarget, DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree, TokenTreeCursor, }; @@ -317,7 +319,7 @@ impl TokenCursor { spacing, delim, )); - if delim != Delimiter::Invisible { + if !delim.skip() { return (Token::new(token::OpenDelim(delim), sp.open), spacing.open); } // No open delimiter to return; continue on to the next iteration. @@ -326,7 +328,7 @@ impl TokenCursor { } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() { // We have exhausted this token stream. Move back to its parent token stream. self.tree_cursor = tree_cursor; - if delim != Delimiter::Invisible { + if !delim.skip() { return (Token::new(token::CloseDelim(delim), span.close), spacing.close); } // No close delimiter to return; continue on to the next iteration. @@ -410,6 +412,12 @@ pub(super) enum TokenDescription { Keyword, ReservedKeyword, DocComment, + + // Expanded metavariables are wrapped in invisible delimiters which aren't + // pretty-printed. In error messages we must handle these specially + // otherwise we get confusing things in messages like "expected `(`, found + // ``". It's better to say e.g. "expected `(`, found type metavariable". + MetaVar(MetaVarKind), } impl TokenDescription { @@ -419,26 +427,29 @@ impl TokenDescription { _ if token.is_used_keyword() => Some(TokenDescription::Keyword), _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword), token::DocComment(..) => Some(TokenDescription::DocComment), + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { + Some(TokenDescription::MetaVar(kind)) + } _ => None, } } } pub fn token_descr(token: &Token) -> String { - let name = pprust::token_to_string(token).to_string(); - - let kind = match (TokenDescription::from_token(token), &token.kind) { - (Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"), - (Some(TokenDescription::Keyword), _) => Some("keyword"), - (Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"), - (Some(TokenDescription::DocComment), _) => Some("doc comment"), - (None, TokenKind::NtIdent(..)) => Some("identifier"), - (None, TokenKind::NtLifetime(..)) => Some("lifetime"), - (None, TokenKind::Interpolated(node)) => Some(node.descr()), - (None, _) => None, - }; - - if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") } + let s = pprust::token_to_string(token).to_string(); + + match (TokenDescription::from_token(token), &token.kind) { + (Some(TokenDescription::ReservedIdentifier), _) => format!("reserved identifier `{s}`"), + (Some(TokenDescription::Keyword), _) => format!("keyword `{s}`"), + (Some(TokenDescription::ReservedKeyword), _) => format!("reserved keyword `{s}`"), + (Some(TokenDescription::DocComment), _) => format!("doc comment `{s}`"), + // Deliberately doesn't print `s`, which is empty. + (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"), + (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"), + (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"), + (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()), + (None, _) => format!("`{s}`"), + } } impl<'a> Parser<'a> { @@ -1163,7 +1174,7 @@ impl<'a> Parser<'a> { } debug_assert!(!matches!( next.0.kind, - token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) + token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() )); self.inlined_bump_with(next) } @@ -1187,7 +1198,7 @@ impl<'a> Parser<'a> { match tree { TokenTree::Token(token, _) => return looker(token), &TokenTree::Delimited(dspan, _, delim, _) => { - if delim != Delimiter::Invisible { + if !delim.skip() { return looker(&Token::new(token::OpenDelim(delim), dspan.open)); } } @@ -1197,7 +1208,7 @@ impl<'a> Parser<'a> { // The tree cursor lookahead went (one) past the end of the // current token tree. Try to return a close delimiter. if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last() - && delim != Delimiter::Invisible + && !delim.skip() { // We are not in the outermost token stream, so we have // delimiters. Also, those delimiters are not skipped. @@ -1216,7 +1227,7 @@ impl<'a> Parser<'a> { token = cursor.next().0; if matches!( token.kind, - token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) + token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() ) { continue; } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 43c3de90d9d..8fb6f85d0dd 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -3,7 +3,9 @@ use rustc_ast::ptr::P; use rustc_ast::token::Nonterminal::*; use rustc_ast::token::NtExprKind::*; use rustc_ast::token::NtPatKind::*; -use rustc_ast::token::{self, Delimiter, NonterminalKind, Token}; +use rustc_ast::token::{ + self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token, +}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::PResult; @@ -22,7 +24,28 @@ impl<'a> Parser<'a> { #[inline] pub fn nonterminal_may_begin_with(kind: NonterminalKind, token: &Token) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. - fn may_be_ident(nt: &token::Nonterminal) -> bool { + fn may_be_ident(kind: MetaVarKind) -> bool { + match kind { + MetaVarKind::Stmt + | MetaVarKind::Pat(_) + | MetaVarKind::Expr { .. } + | MetaVarKind::Ty + | MetaVarKind::Literal // `true`, `false` + | MetaVarKind::Meta + | MetaVarKind::Path => true, + + MetaVarKind::Item + | MetaVarKind::Block + | MetaVarKind::Vis => false, + + MetaVarKind::Ident + | MetaVarKind::Lifetime + | MetaVarKind::TT => unreachable!(), + } + } + + /// Old variant of `may_be_ident`. Being phased out. + fn nt_may_be_ident(nt: &Nonterminal) -> bool { match nt { NtStmt(_) | NtPat(_) @@ -69,7 +92,8 @@ impl<'a> Parser<'a> { | token::Ident(..) | token::NtIdent(..) | token::NtLifetime(..) - | token::Interpolated(_) => true, + | token::Interpolated(_) + | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { @@ -79,11 +103,29 @@ impl<'a> Parser<'a> { NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true, NtItem(_) | NtPat(_) | NtTy(_) | NtMeta(_) | NtPath(_) | NtVis(_) => false, }, + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { + MetaVarKind::Block + | MetaVarKind::Stmt + | MetaVarKind::Expr { .. } + | MetaVarKind::Literal => true, + MetaVarKind::Item + | MetaVarKind::Pat(_) + | MetaVarKind::Ty + | MetaVarKind::Meta + | MetaVarKind::Path + | MetaVarKind::Vis => false, + MetaVarKind::Lifetime | MetaVarKind::Ident | MetaVarKind::TT => { + unreachable!() + } + }, _ => false, }, NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { token::PathSep | token::Ident(..) | token::NtIdent(..) => true, - token::Interpolated(nt) => may_be_ident(nt), + token::Interpolated(nt) => nt_may_be_ident(nt), + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { + may_be_ident(*kind) + } _ => false, }, NonterminalKind::Pat(pat_kind) => token.can_begin_pattern(pat_kind), |
