diff options
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/Cargo.toml | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/messages.ftl | 3 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/errors.rs | 84 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/unicode_chars.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 78 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 39 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/generics.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 11 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 90 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 27 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 22 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 55 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 22 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 13 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 15 |
17 files changed, 287 insertions, 204 deletions
diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index 2360914a0ab..c9dcab0c871 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "rustc_parse" version = "0.0.0" -edition = "2021" +edition = "2024" [dependencies] # tidy-alphabetical-start diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index d50bd18a1d7..563081c7240 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -743,9 +743,6 @@ parse_single_colon_import_path = expected `::`, found `:` .suggestion = use double colon .note = import paths are delimited using `::` -parse_single_colon_struct_type = found single colon in a struct field type path - .suggestion = write a path separator here - parse_static_with_generics = static items may not have generic parameters parse_struct_literal_body_without_path = diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index 2373ab67d42..dc03d6f9521 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -1121,23 +1121,29 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedIdentifier { let token_descr = TokenDescription::from_token(&self.token); let mut add_token = true; - let mut diag = Diag::new(dcx, level, match token_descr { - Some(TokenDescription::ReservedIdentifier) => { - fluent::parse_expected_identifier_found_reserved_identifier_str - } - Some(TokenDescription::Keyword) => fluent::parse_expected_identifier_found_keyword_str, - Some(TokenDescription::ReservedKeyword) => { - fluent::parse_expected_identifier_found_reserved_keyword_str - } - Some(TokenDescription::DocComment) => { - fluent::parse_expected_identifier_found_doc_comment_str - } - Some(TokenDescription::MetaVar(_)) => { - add_token = false; - fluent::parse_expected_identifier_found_metavar_str - } - None => fluent::parse_expected_identifier_found_str, - }); + let mut diag = Diag::new( + dcx, + level, + match token_descr { + Some(TokenDescription::ReservedIdentifier) => { + fluent::parse_expected_identifier_found_reserved_identifier_str + } + Some(TokenDescription::Keyword) => { + fluent::parse_expected_identifier_found_keyword_str + } + Some(TokenDescription::ReservedKeyword) => { + fluent::parse_expected_identifier_found_reserved_keyword_str + } + Some(TokenDescription::DocComment) => { + fluent::parse_expected_identifier_found_doc_comment_str + } + Some(TokenDescription::MetaVar(_)) => { + add_token = false; + fluent::parse_expected_identifier_found_metavar_str + } + None => fluent::parse_expected_identifier_found_str, + }, + ); diag.span(self.span); if add_token { diag.arg("token", self.token); @@ -1182,21 +1188,27 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for ExpectedSemi { let token_descr = TokenDescription::from_token(&self.token); let mut add_token = true; - let mut diag = Diag::new(dcx, level, match token_descr { - Some(TokenDescription::ReservedIdentifier) => { - fluent::parse_expected_semi_found_reserved_identifier_str - } - Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str, - Some(TokenDescription::ReservedKeyword) => { - fluent::parse_expected_semi_found_reserved_keyword_str - } - Some(TokenDescription::DocComment) => fluent::parse_expected_semi_found_doc_comment_str, - Some(TokenDescription::MetaVar(_)) => { - add_token = false; - fluent::parse_expected_semi_found_metavar_str - } - None => fluent::parse_expected_semi_found_str, - }); + let mut diag = Diag::new( + dcx, + level, + match token_descr { + Some(TokenDescription::ReservedIdentifier) => { + fluent::parse_expected_semi_found_reserved_identifier_str + } + Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str, + Some(TokenDescription::ReservedKeyword) => { + fluent::parse_expected_semi_found_reserved_keyword_str + } + Some(TokenDescription::DocComment) => { + fluent::parse_expected_semi_found_doc_comment_str + } + Some(TokenDescription::MetaVar(_)) => { + add_token = false; + fluent::parse_expected_semi_found_metavar_str + } + None => fluent::parse_expected_semi_found_str, + }, + ); diag.span(self.span); if add_token { diag.arg("token", self.token); @@ -3060,14 +3072,6 @@ pub(crate) struct BadItemKind { } #[derive(Diagnostic)] -#[diag(parse_single_colon_struct_type)] -pub(crate) struct SingleColonStructType { - #[primary_span] - #[suggestion(code = "::", applicability = "maybe-incorrect", style = "verbose")] - pub span: Span, -} - -#[derive(Diagnostic)] #[diag(parse_macro_rules_missing_bang)] pub(crate) struct MacroRulesMissingBang { #[primary_span] diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index ef8d0f96b61..648a352efd9 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -8,7 +8,7 @@ use crate::errors::TokenSubstitution; use crate::token::{self, Delimiter}; #[rustfmt::skip] // for line breaks -pub(super) const UNICODE_ARRAY: &[(char, &str, &str)] = &[ +pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[ (' ', "Line Separator", " "), (' ', "Paragraph Separator", " "), (' ', "Ogham Space mark", " "), diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index e681987ff07..1a104ff5e33 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -17,12 +17,12 @@ use std::path::{Path, PathBuf}; use std::str::Utf8Error; +use std::sync::Arc; use rustc_ast as ast; use rustc_ast::tokenstream::TokenStream; use rustc_ast::{AttrItem, Attribute, MetaItemInner, token}; use rustc_ast_pretty::pprust; -use rustc_data_structures::sync::Lrc; use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize}; use rustc_session::parse::ParseSess; use rustc_span::source_map::SourceMap; @@ -147,7 +147,7 @@ pub fn utf8_error<E: EmissionGuarantee>( /// the initial token stream. fn new_parser_from_source_file( psess: &ParseSess, - source_file: Lrc<SourceFile>, + source_file: Arc<SourceFile>, ) -> Result<Parser<'_>, Vec<Diag<'_>>> { let end_pos = source_file.end_position(); let stream = source_file_to_stream(psess, source_file, None)?; @@ -172,7 +172,7 @@ pub fn source_str_to_stream( /// parsing the token stream. fn source_file_to_stream<'psess>( psess: &'psess ParseSess, - source_file: Lrc<SourceFile>, + source_file: Arc<SourceFile>, override_span: Option<Span>, ) -> Result<TokenStream, Vec<Diag<'psess>>> { let src = source_file.src.as_ref().unwrap_or_else(|| { diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 434f71beac2..cff998fa137 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -502,10 +502,10 @@ fn make_attr_token_stream( for flat_token in iter { match flat_token { FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => { - stack_rest.push(mem::replace(&mut stack_top, FrameData { - open_delim_sp: Some((delim, span, spacing)), - inner: vec![], - })); + stack_rest.push(mem::replace( + &mut stack_top, + FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, + )); } FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => { let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 9ee6c2fae1a..67abc2d5394 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -1,5 +1,6 @@ use std::mem::take; use std::ops::{Deref, DerefMut}; +use std::sync::Arc; use ast::token::IdentIsRaw; use rustc_ast as ast; @@ -14,7 +15,6 @@ use rustc_ast::{ }; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::sync::Lrc; use rustc_errors::{ Applicability, Diag, DiagCtxtHandle, ErrorGuaranteed, PResult, Subdiagnostic, Suggestions, pluralize, @@ -301,13 +301,6 @@ impl<'a> Parser<'a> { &mut self, recover: bool, ) -> PResult<'a, (Ident, IdentIsRaw)> { - if let TokenKind::DocComment(..) = self.prev_token.kind { - return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { - span: self.prev_token.span, - missing_comma: None, - })); - } - let valid_follow = &[ TokenKind::Eq, TokenKind::Colon, @@ -319,6 +312,15 @@ impl<'a> Parser<'a> { TokenKind::CloseDelim(Delimiter::Brace), TokenKind::CloseDelim(Delimiter::Parenthesis), ]; + if let TokenKind::DocComment(..) = self.prev_token.kind + && valid_follow.contains(&self.token.kind) + { + let err = self.dcx().create_err(DocCommentDoesNotDocumentAnything { + span: self.prev_token.span, + missing_comma: None, + }); + return Err(err); + } let mut recovered_ident = None; // we take this here so that the correct original token is retained in @@ -684,12 +686,15 @@ impl<'a> Parser<'a> { let span = self.token.span.with_lo(pos).with_hi(pos); err.span_suggestion_verbose( span, - format!("add a space before {} to write a regular comment", match (kind, style) { - (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`", - (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`", - (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`", - (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`", - },), + format!( + "add a space before {} to write a regular comment", + match (kind, style) { + (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`", + (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`", + (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`", + (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`", + }, + ), " ".to_string(), Applicability::MachineApplicable, ); @@ -1894,13 +1899,14 @@ impl<'a> Parser<'a> { (token::Eof, None) => (self.prev_token.span, self.token.span), _ => (self.prev_token.span.shrink_to_hi(), self.token.span), }; - let msg = format!("expected `{}`, found {}", token_str, match ( - &self.token.kind, - self.subparser_name - ) { - (token::Eof, Some(origin)) => format!("end of {origin}"), - _ => this_token_str, - },); + let msg = format!( + "expected `{}`, found {}", + token_str, + match (&self.token.kind, self.subparser_name) { + (token::Eof, Some(origin)) => format!("end of {origin}"), + _ => this_token_str, + }, + ); let mut err = self.dcx().struct_span_err(sp, msg); let label_exp = format!("expected `{token_str}`"); let sm = self.psess.source_map(); @@ -2403,7 +2409,7 @@ impl<'a> Parser<'a> { let mut labels = vec![]; while let TokenKind::Interpolated(nt) = &tok.kind { let tokens = nt.tokens(); - labels.push(Lrc::clone(nt)); + labels.push(Arc::clone(nt)); if let Some(tokens) = tokens && let tokens = tokens.to_attr_token_stream() && let tokens = tokens.0.deref() @@ -2826,25 +2832,27 @@ impl<'a> Parser<'a> { PatKind::Ident(BindingMode::NONE, ident, None) => { match &first_pat.kind { PatKind::Ident(_, old_ident, _) => { - let path = PatKind::Path(None, Path { - span: new_span, - segments: thin_vec![ - PathSegment::from_ident(*old_ident), - PathSegment::from_ident(*ident), - ], - tokens: None, - }); + let path = PatKind::Path( + None, + Path { + span: new_span, + segments: thin_vec![ + PathSegment::from_ident(*old_ident), + PathSegment::from_ident(*ident), + ], + tokens: None, + }, + ); first_pat = self.mk_pat(new_span, path); show_sugg = true; } PatKind::Path(old_qself, old_path) => { let mut segments = old_path.segments.clone(); segments.push(PathSegment::from_ident(*ident)); - let path = PatKind::Path(old_qself.clone(), Path { - span: new_span, - segments, - tokens: None, - }); + let path = PatKind::Path( + old_qself.clone(), + Path { span: new_span, segments, tokens: None }, + ); first_pat = self.mk_pat(new_span, path); show_sugg = true; } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index ffd46f20767..b2e58c94280 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -807,17 +807,20 @@ impl<'a> Parser<'a> { // Check if an illegal postfix operator has been added after the cast. // If the resulting expression is not a cast, it is an illegal postfix operator. if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) { - let msg = format!("cast cannot be followed by {}", match with_postfix.kind { - ExprKind::Index(..) => "indexing", - ExprKind::Try(_) => "`?`", - ExprKind::Field(_, _) => "a field access", - ExprKind::MethodCall(_) => "a method call", - ExprKind::Call(_, _) => "a function call", - ExprKind::Await(_, _) => "`.await`", - ExprKind::Match(_, _, MatchKind::Postfix) => "a postfix match", - ExprKind::Err(_) => return Ok(with_postfix), - _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"), - }); + let msg = format!( + "cast cannot be followed by {}", + match with_postfix.kind { + ExprKind::Index(..) => "indexing", + ExprKind::Try(_) => "`?`", + ExprKind::Field(_, _) => "a field access", + ExprKind::MethodCall(_) => "a method call", + ExprKind::Call(_, _) => "a function call", + ExprKind::Await(_, _) => "`.await`", + ExprKind::Match(_, _, MatchKind::Postfix) => "a postfix match", + ExprKind::Err(_) => return Ok(with_postfix), + _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"), + } + ); let mut err = self.dcx().struct_span_err(span, msg); let suggest_parens = |err: &mut Diag<'_>| { @@ -2862,13 +2865,10 @@ impl<'a> Parser<'a> { .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() }); let err_expr = self.mk_expr(expr.span, ExprKind::Err(guar)); let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span); - return Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::ForLoop { - pat, - iter: err_expr, - body: block, - label: opt_label, - kind, - })); + return Ok(self.mk_expr( + lo.to(self.prev_token.span), + ExprKind::ForLoop { pat, iter: err_expr, body: block, label: opt_label, kind }, + )); } let (attrs, loop_block) = self.parse_inner_attrs_and_block()?; @@ -3125,10 +3125,11 @@ impl<'a> Parser<'a> { let mut result = if armless { // A pattern without a body, allowed for never patterns. arm_body = None; + let span = lo.to(this.prev_token.span); this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| { // Don't gate twice if !pat.contains_never_pattern() { - this.psess.gated_spans.gate(sym::never_patterns, pat.span); + this.psess.gated_spans.gate(sym::never_patterns, span); } x }) diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 86816819be2..11f0e579de5 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -302,26 +302,16 @@ impl<'a> Parser<'a> { pub(super) fn parse_contract( &mut self, ) -> PResult<'a, Option<rustc_ast::ptr::P<ast::FnContract>>> { - let gate = |span| { - if self.psess.contract_attribute_spans.contains(span) { - // span was generated via a builtin contracts attribute, so gate as end-user visible - self.psess.gated_spans.gate(sym::contracts, span); - } else { - // span was not generated via a builtin contracts attribute, so gate as internal machinery - self.psess.gated_spans.gate(sym::contracts_internals, span); - } - }; - let requires = if self.eat_keyword_noexpect(exp!(ContractRequires).kw) { + self.psess.gated_spans.gate(sym::contracts_internals, self.prev_token.span); let precond = self.parse_expr()?; - gate(precond.span); Some(precond) } else { None }; let ensures = if self.eat_keyword_noexpect(exp!(ContractEnsures).kw) { + self.psess.gated_spans.gate(sym::contracts_internals, self.prev_token.span); let postcond = self.parse_expr()?; - gate(postcond.span); Some(postcond) } else { None diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index dbdc31f06a2..c923717ecaf 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1954,10 +1954,10 @@ impl<'a> Parser<'a> { // Try to recover extra trailing angle brackets if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind { if let Some(last_segment) = segments.last() { - let guar = self.check_trailing_angle_brackets(last_segment, &[ - exp!(Comma), - exp!(CloseBrace), - ]); + let guar = self.check_trailing_angle_brackets( + last_segment, + &[exp!(Comma), exp!(CloseBrace)], + ); if let Some(_guar) = guar { // Handle a case like `Vec<u8>>,` where we can continue parsing fields // after the comma @@ -2043,9 +2043,6 @@ impl<'a> Parser<'a> { } self.expect_field_ty_separator()?; let ty = self.parse_ty()?; - if self.token == token::Colon && self.look_ahead(1, |t| *t != token::Colon) { - self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span }); - } let default = if self.token == token::Eq { self.bump(); let const_expr = self.parse_expr_anon_const()?; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index faebb5a40bb..80a33a76005 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -13,6 +13,7 @@ mod ty; use std::assert_matches::debug_assert_matches; use std::ops::Range; +use std::sync::Arc; use std::{fmt, mem, slice}; use attr_wrapper::{AttrWrapper, UsePreAttrPos}; @@ -34,7 +35,6 @@ use rustc_ast::{ }; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lrc; use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult}; use rustc_index::interval::IntervalSet; use rustc_session::parse::ParseSess; @@ -117,12 +117,15 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.may_recover() - && $self.look_ahead(1, |t| t == &token::PathSep) - && let token::Interpolated(nt) = &$self.token.kind - && let token::NtTy(ty) = &**nt + && let Some(mv_kind) = $self.token.is_metavar_seq() + && let token::MetaVarKind::Ty { .. } = mv_kind + && $self.check_noexpect_past_close_delim(&token::PathSep) { - let ty = ty.clone(); - $self.bump(); + // Reparse the type, then move to recovery. + let ty = $self + .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover()) + .expect("metavar seq ty"); + return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty); } }; @@ -614,6 +617,24 @@ impl<'a> Parser<'a> { self.token == *tok } + // Check the first token after the delimiter that closes the current + // delimited sequence. (Panics if used in the outermost token stream, which + // has no delimiters.) It uses a clone of the relevant tree cursor to skip + // past the entire `TokenTree::Delimited` in a single step, avoiding the + // need for unbounded token lookahead. + // + // Primarily used when `self.token` matches + // `OpenDelim(Delimiter::Invisible(_))`, to look ahead through the current + // metavar expansion. + fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool { + let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone(); + tree_cursor.bump(); + matches!( + tree_cursor.curr(), + Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok + ) + } + /// Consumes a token 'tok' if it exists. Returns whether the given token was present. /// /// the main purpose of this function is to reduce the cluttering of the suggestions list @@ -721,6 +742,43 @@ impl<'a> Parser<'a> { if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) } } + /// Consume a sequence produced by a metavar expansion, if present. + fn eat_metavar_seq<T>( + &mut self, + mv_kind: MetaVarKind, + f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, + ) -> Option<T> { + self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f) + } + + /// A slightly more general form of `eat_metavar_seq`, for use with the + /// `MetaVarKind` variants that have parameters, where an exact match isn't + /// desired. + fn eat_metavar_seq_with_matcher<T>( + &mut self, + match_mv_kind: impl Fn(MetaVarKind) -> bool, + mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, + ) -> Option<T> { + if let token::OpenDelim(delim) = self.token.kind + && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim + && match_mv_kind(mv_kind) + { + self.bump(); + let res = f(self).expect("failed to reparse {mv_kind:?}"); + if let token::CloseDelim(delim) = self.token.kind + && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim + && match_mv_kind(mv_kind) + { + self.bump(); + Some(res) + } else { + panic!("no close delim when reparsing {mv_kind:?}"); + } + } else { + None + } + } + /// Is the given keyword `kw` followed by a non-reserved identifier? fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool { self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) @@ -754,9 +812,9 @@ impl<'a> Parser<'a> { self.is_keyword_ahead(0, &[kw::Const]) && self.look_ahead(1, |t| match &t.kind { // async closures do not work with const closures, so we do not parse that here. - token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => { - true - } + token::Ident(kw::Move | kw::Static, IdentIsRaw::No) + | token::OrOr + | token::BinOp(token::Or) => true, _ => false, }) } @@ -1455,7 +1513,11 @@ impl<'a> Parser<'a> { /// so emit a proper diagnostic. // Public for rustfmt usage. pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { - maybe_whole!(self, NtVis, |vis| vis.into_inner()); + if let Some(vis) = self + .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes)) + { + return Ok(vis); + } if !self.eat_keyword(exp!(Pub)) { // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no @@ -1595,7 +1657,7 @@ impl<'a> Parser<'a> { // Debug view of the parser's token stream, up to `{lookahead}` tokens. // Only used when debugging. #[allow(unused)] - pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ { + pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug { fmt::from_fn(move |f| { let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of @@ -1683,7 +1745,9 @@ pub enum ParseNtResult { Tt(TokenTree), Ident(Ident, IdentIsRaw), Lifetime(Ident, IdentIsRaw), + Ty(P<ast::Ty>), + Vis(P<ast::Visibility>), - /// This case will eventually be removed, along with `Token::Interpolate`. - Nt(Lrc<Nonterminal>), + /// This variant will eventually be removed, along with `Token::Interpolate`. + Nt(Arc<Nonterminal>), } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 67cabb757e9..f202f85752e 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use rustc_ast::HasTokens; use rustc_ast::ptr::P; use rustc_ast::token::Nonterminal::*; @@ -7,7 +9,6 @@ use rustc_ast::token::{ self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token, }; use rustc_ast_pretty::pprust; -use rustc_data_structures::sync::Lrc; use rustc_errors::PResult; use rustc_span::{Ident, kw}; @@ -29,7 +30,7 @@ impl<'a> Parser<'a> { MetaVarKind::Stmt | MetaVarKind::Pat(_) | MetaVarKind::Expr { .. } - | MetaVarKind::Ty + | MetaVarKind::Ty { .. } | MetaVarKind::Literal // `true`, `false` | MetaVarKind::Meta | MetaVarKind::Path => true, @@ -50,14 +51,11 @@ impl<'a> Parser<'a> { NtStmt(_) | NtPat(_) | NtExpr(_) - | NtTy(_) | NtLiteral(_) // `true`, `false` | NtMeta(_) | NtPath(_) => true, - NtItem(_) - | NtBlock(_) - | NtVis(_) => false, + NtItem(_) | NtBlock(_) => false, } } @@ -87,7 +85,7 @@ impl<'a> Parser<'a> { NonterminalKind::Ident => get_macro_ident(token).is_some(), NonterminalKind::Literal => token.can_begin_literal_maybe_minus(), NonterminalKind::Vis => match token.kind { - // The follow-set of :vis + "priv" keyword + interpolated + // The follow-set of :vis + "priv" keyword + interpolated/metavar-expansion. token::Comma | token::Ident(..) | token::NtIdent(..) @@ -101,7 +99,7 @@ impl<'a> Parser<'a> { token::NtLifetime(..) => true, token::Interpolated(nt) => match &**nt { NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true, - NtItem(_) | NtPat(_) | NtTy(_) | NtMeta(_) | NtPath(_) | NtVis(_) => false, + NtItem(_) | NtPat(_) | NtMeta(_) | NtPath(_) => false, }, token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { MetaVarKind::Block @@ -110,7 +108,7 @@ impl<'a> Parser<'a> { | MetaVarKind::Literal => true, MetaVarKind::Item | MetaVarKind::Pat(_) - | MetaVarKind::Ty + | MetaVarKind::Ty { .. } | MetaVarKind::Meta | MetaVarKind::Path | MetaVarKind::Vis => false, @@ -188,7 +186,9 @@ impl<'a> Parser<'a> { NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?) } NonterminalKind::Ty => { - NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?) + return Ok(ParseNtResult::Ty( + self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, + )); } // this could be handled like a token, since it is one NonterminalKind::Ident => { @@ -207,8 +207,9 @@ impl<'a> Parser<'a> { } NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(ForceCollect::Yes)?)), NonterminalKind::Vis => { - NtVis(P(self - .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)) + return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| { + this.parse_visibility(FollowedByType::Yes) + })?))); } NonterminalKind::Lifetime => { // We want to keep `'keyword` parsing, just like `keyword` is still @@ -235,7 +236,7 @@ impl<'a> Parser<'a> { ); } - Ok(ParseNtResult::Nt(Lrc::new(nt))) + Ok(ParseNtResult::Nt(Arc::new(nt))) } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 64bcb1a5a36..8ce749ec814 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1472,17 +1472,6 @@ impl<'a> Parser<'a> { let mut last_non_comma_dotdot_span = None; while self.token != token::CloseDelim(Delimiter::Brace) { - let attrs = match self.parse_outer_attributes() { - Ok(attrs) => attrs, - Err(err) => { - if let Some(delayed) = delayed_err { - delayed.emit(); - } - return Err(err); - } - }; - let lo = self.token.span; - // check that a comma comes after every field if !ate_comma { let err = if self.token == token::At { @@ -1585,6 +1574,17 @@ impl<'a> Parser<'a> { } } + let attrs = match self.parse_outer_attributes() { + Ok(attrs) => attrs, + Err(err) => { + if let Some(delayed) = delayed_err { + delayed.emit(); + } + return Err(err); + } + }; + let lo = self.token.span; + let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let field = match this.parse_pat_field(lo, attrs) { Ok(field) => Ok(field), diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 39737b9e137..c24305ea9a8 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,7 +2,7 @@ use std::mem; use ast::token::IdentIsRaw; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, MetaVarKind, Token, TokenKind}; use rustc_ast::{ self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemConstraint, AssocItemConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs, @@ -106,11 +106,10 @@ impl<'a> Parser<'a> { self.parse_path_segments(&mut path.segments, style, None)?; } - Ok((qself, Path { - segments: path.segments, - span: lo.to(self.prev_token.span), - tokens: None, - })) + Ok(( + qself, + Path { segments: path.segments, span: lo.to(self.prev_token.span), tokens: None }, + )) } /// Recover from an invalid single colon, when the user likely meant a qualified path. @@ -197,14 +196,12 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner())); - if let token::Interpolated(nt) = &self.token.kind { - if let token::NtTy(ty) = &**nt { - if let ast::TyKind::Path(None, path) = &ty.kind { - let path = path.clone(); - self.bump(); - return Ok(reject_generics_if_mod_style(self, path)); - } - } + // If we have a `ty` metavar in the form of a path, reparse it directly as a path, instead + // of reparsing it as a `ty` and then extracting the path. + if let Some(path) = self.eat_metavar_seq(MetaVarKind::Ty { is_path: true }, |this| { + this.parse_path(PathStyle::Type) + }) { + return Ok(reject_generics_if_mod_style(self, path)); } let lo = self.token.span; @@ -247,8 +244,19 @@ impl<'a> Parser<'a> { segments.push(segment); if self.is_import_coupler() || !self.eat_path_sep() { - if style == PathStyle::Expr - && self.may_recover() + let ok_for_recovery = self.may_recover() + && match style { + PathStyle::Expr => true, + PathStyle::Type if let Some((ident, _)) = self.prev_token.ident() => { + self.token == token::Colon + && ident.as_str().chars().all(|c| c.is_lowercase()) + && self.token.span.lo() == self.prev_token.span.hi() + && self + .look_ahead(1, |token| self.token.span.hi() == token.span.lo()) + } + _ => false, + }; + if ok_for_recovery && self.token == token::Colon && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident()) { @@ -485,13 +493,16 @@ impl<'a> Parser<'a> { error.span_suggestion_verbose( prev_token_before_parsing.span, - format!("consider removing the `::` here to {}", match style { - PathStyle::Expr => "call the expression", - PathStyle::Pat => "turn this into a tuple struct pattern", - _ => { - return; + format!( + "consider removing the `::` here to {}", + match style { + PathStyle::Expr => "call the expression", + PathStyle::Pat => "turn this into a tuple struct pattern", + _ => { + return; + } } - }), + ), "", Applicability::MaybeIncorrect, ); diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 1ddb5fc0a11..a2699b077fc 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -417,14 +417,20 @@ impl<'a> Parser<'a> { fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) { if let Some(trailing) = classify::expr_trailing_brace(init) { let (span, sugg) = match trailing { - TrailingBrace::MacCall(mac) => (mac.span(), errors::WrapInParentheses::MacroArgs { - left: mac.args.dspan.open, - right: mac.args.dspan.close, - }), - TrailingBrace::Expr(expr) => (expr.span, errors::WrapInParentheses::Expression { - left: expr.span.shrink_to_lo(), - right: expr.span.shrink_to_hi(), - }), + TrailingBrace::MacCall(mac) => ( + mac.span(), + errors::WrapInParentheses::MacroArgs { + left: mac.args.dspan.open, + right: mac.args.dspan.close, + }, + ), + TrailingBrace::Expr(expr) => ( + expr.span, + errors::WrapInParentheses::Expression { + left: expr.span.shrink_to_lo(), + right: expr.span.shrink_to_hi(), + }, + ), }; self.dcx().emit_err(errors::InvalidCurlyInLetElse { span: span.with_lo(span.hi() - BytePos(1)), diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 655ab822359..8b8c81a77a0 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -13,7 +13,6 @@ use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; use rustc_ast::{self as ast, PatKind, visit}; use rustc_ast_pretty::pprust::item_to_string; -use rustc_data_structures::sync::Lrc; use rustc_errors::emitter::{HumanEmitter, OutputTheme}; use rustc_errors::{DiagCtxt, MultiSpan, PResult}; use rustc_session::parse::ParseSess; @@ -27,7 +26,7 @@ use crate::parser::{ForceCollect, Parser}; use crate::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal}; fn psess() -> ParseSess { - ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE]) + ParseSess::new(vec![crate::DEFAULT_LOCALE_RESOURCE]) } /// Map string to parser (via tts). @@ -39,13 +38,11 @@ fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> { )) } -fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Lrc<SourceMap>, Arc<Mutex<Vec<u8>>>) { +fn create_test_handler(theme: OutputTheme) -> (DiagCtxt, Arc<SourceMap>, Arc<Mutex<Vec<u8>>>) { let output = Arc::new(Mutex::new(Vec::new())); - let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = rustc_errors::fallback_fluent_bundle( - vec![crate::DEFAULT_LOCALE_RESOURCE, crate::DEFAULT_LOCALE_RESOURCE], - false, - ); + let source_map = Arc::new(SourceMap::new(FilePathMapping::empty())); + let fallback_bundle = + rustc_errors::fallback_fluent_bundle(vec![crate::DEFAULT_LOCALE_RESOURCE], false); let mut emitter = HumanEmitter::new(Box::new(Shared { data: output.clone() }), fallback_bundle) .sm(Some(source_map.clone())) .diagnostic_width(Some(140)); diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index dc5919b3630..18af0a1c79a 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,5 +1,5 @@ use rustc_ast::ptr::P; -use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, Token, TokenKind}; +use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy, @@ -18,7 +18,7 @@ use crate::errors::{ HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime, NestedCVariadicType, ReturnTypesUseThinArrow, }; -use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole}; +use crate::{exp, maybe_recover_from_interpolated_ty_qpath}; /// Signals whether parsing a type should allow `+`. /// @@ -183,7 +183,8 @@ impl<'a> Parser<'a> { ) } - /// Parse a type without recovering `:` as `->` to avoid breaking code such as `where fn() : for<'a>` + /// Parse a type without recovering `:` as `->` to avoid breaking code such + /// as `where fn() : for<'a>`. pub(super) fn parse_ty_for_where_clause(&mut self) -> PResult<'a, P<Ty>> { self.parse_ty_common( AllowPlus::Yes, @@ -247,7 +248,13 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P<Ty>> { let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes; maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); - maybe_whole!(self, NtTy, |ty| ty); + + if let Some(ty) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Ty { .. }), + |this| this.parse_ty_no_question_mark_recover(), + ) { + return Ok(ty); + } let lo = self.token.span; let mut impl_dyn_multi = false; |
