diff options
| author | bors <bors@rust-lang.org> | 2025-04-14 03:56:55 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2025-04-14 03:56:55 +0000 |
| commit | f836ae4e663b6e8938096b8559e094d18361be55 (patch) | |
| tree | 210d28f13b7c2f09b8cc78ef0c4c146c0cc28322 /compiler/rustc_parse | |
| parent | 15f58c46da79399961a09db0c650a2f90f442e6b (diff) | |
| parent | 1830245a224c523f86ad3c62be76be3f336a9fb0 (diff) | |
| download | rust-f836ae4e663b6e8938096b8559e094d18361be55.tar.gz rust-f836ae4e663b6e8938096b8559e094d18361be55.zip | |
Auto merge of #124141 - nnethercote:rm-Nonterminal-and-TokenKind-Interpolated, r=petrochenkov
Remove `Nonterminal` and `TokenKind::Interpolated` A third attempt at this; the first attempt was #96724 and the second was #114647. r? `@ghost`
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/src/lexer/unicode_chars.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 1 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 10 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 58 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 7 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 41 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 124 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 4 |
13 files changed, 103 insertions, 172 deletions
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index ff03b42484b..2bfa1ea4e05 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -376,7 +376,7 @@ pub(super) fn check_for_substitution( ascii_name, }) }; - (token.clone(), sugg) + (*token, sugg) } /// Extract string if found at current position with given delimiters diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 79939aab7fc..2edc8c83017 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -4,7 +4,6 @@ #![allow(internal_features)] #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 #![feature(array_windows)] #![feature(assert_matches)] #![feature(box_patterns)] diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index cff998fa137..f1bd6a22730 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { // produce an empty `TokenStream` if no calls were made, and omit the // final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token.clone())) + let tokens = iter::once(FlatToken::Token(self.start_token)) .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) .take(self.num_calls as usize); @@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { - start_token: (self.token.clone(), self.token_spacing), + start_token: (self.token, self.token_spacing), cursor_snapshot: self.token_cursor.clone(), start_pos: self.num_bump_calls, } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ef044fe9d63..bb63e91aecf 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -322,7 +322,7 @@ impl<'a> Parser<'a> { let mut recovered_ident = None; // we take this here so that the correct original token is retained in // the diagnostic, regardless of eager recovery. - let bad_token = self.token.clone(); + let bad_token = self.token; // suggest prepending a keyword in identifier position with `r#` let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { // if the previous token is a valid keyword // that might use a generic, then suggest a correct // generic placement (later on) - let maybe_keyword = self.prev_token.clone(); + let maybe_keyword = self.prev_token; if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { // if we have a valid keyword, attempt to parse generics // also obtain the keywords symbol @@ -530,7 +530,7 @@ impl<'a> Parser<'a> { // let y = 42; let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, - token: self.token.clone(), + token: self.token, unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); @@ -555,7 +555,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let guar = self.dcx().emit_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -801,7 +801,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index d52e36fcfac..14bc98f66da 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -344,7 +344,7 @@ impl<'a> Parser<'a> { fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.dcx().emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, - token: self.token.clone(), + token: self.token, suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } @@ -417,7 +417,7 @@ impl<'a> Parser<'a> { cur_op_span: Span, ) -> PResult<'a, P<Expr>> { let rhs = if self.is_at_start_of_range_notation_rhs() { - let maybe_lt = self.token.clone(); + let maybe_lt = self.token; let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(Bound::Excluded(prec), attrs) @@ -611,7 +611,7 @@ impl<'a> Parser<'a> { /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { - let negated_token = self.look_ahead(1, |t| t.clone()); + let negated_token = self.look_ahead(1, |t| *t); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise @@ -637,9 +637,7 @@ impl<'a> Parser<'a> { /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { - TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => { - self.prev_token.span - } + TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span, TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { // `expr.span` is the interpolated span, because invisible open // and close delims both get marked with the same span, one @@ -1386,15 +1384,7 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, true); let span = self.token.span; - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - token::NtBlock(block) => { - let block = block.clone(); - self.bump(); - return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None))); - } - }; - } else if let Some(expr) = self.eat_metavar_seq_with_matcher( + if let Some(expr) = self.eat_metavar_seq_with_matcher( |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), |this| { // Force collection (as opposed to just `parse_expr`) is required to avoid the @@ -1415,9 +1405,13 @@ impl<'a> Parser<'a> { self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) { return Ok(lit); - } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { - this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) - }) { + } else if let Some(block) = + self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) + { + return Ok(self.mk_expr(span, ExprKind::Block(block, None))); + } else if let Some(path) = + self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type)) + { return Ok(self.mk_expr(span, ExprKind::Path(None, path))); } @@ -1612,7 +1606,7 @@ impl<'a> Parser<'a> { } fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> { - let maybe_eq_tok = self.prev_token.clone(); + let maybe_eq_tok = self.prev_token; let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { @@ -1671,7 +1665,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) - || self.token.is_whole_block() + || self.token.is_metavar_block() { self.parse_expr_block(label, lo, BlockCheckMode::Default) } else if !ate_colon @@ -2073,7 +2067,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { - let token = self.token.clone(); + let token = self.token; let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.dcx().struct_span_err(token.span, msg) @@ -2354,7 +2348,7 @@ impl<'a> Parser<'a> { } } - if self.token.is_whole_block() { + if self.token.is_metavar_block() { self.dcx().emit_err(errors::InvalidBlockMacroSegment { span: self.token.span, context: lo.to(self.token.span), @@ -2379,7 +2373,7 @@ impl<'a> Parser<'a> { fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> { let lo = self.token.span; - let before = self.prev_token.clone(); + let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; @@ -2411,8 +2405,8 @@ impl<'a> Parser<'a> { FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - let prev = self.prev_token.clone(); - let token = self.token.clone(); + let prev = self.prev_token; + let token = self.token; let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { Ok((expr, _)) => expr, @@ -2477,7 +2471,7 @@ impl<'a> Parser<'a> { if self.may_recover() && self.token.can_begin_expr() && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace)) - && !self.token.is_whole_block() + && !self.token.is_metavar_block() { let snapshot = self.create_snapshot_for_diagnostic(); let restrictions = @@ -2659,7 +2653,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. - let maybe_fatarrow = self.token.clone(); + let maybe_fatarrow = self.token; let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { @@ -3524,7 +3518,7 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Catch]) && self - .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } @@ -3535,7 +3529,7 @@ impl<'a> Parser<'a> { fn is_try_block(&self) -> bool { self.token.is_keyword(kw::Try) && self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && self.token_uninterpolated_span().at_least_rust_2018() } @@ -3569,12 +3563,12 @@ impl<'a> Parser<'a> { // `async move {` self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use]) && self.look_ahead(lookahead + 2, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() + *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block() }) ) || ( // `async {` self.look_ahead(lookahead + 1, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() + *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block() }) )) } @@ -3867,7 +3861,7 @@ impl<'a> Parser<'a> { return Err(this.dcx().create_err(errors::ExpectedStructField { span: this.look_ahead(1, |t| t.span), ident_span: this.token.span, - token: this.look_ahead(1, |t| t.clone()), + token: this.look_ahead(1, |t| *t), })); } let (ident, expr) = if is_shorthand { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 06501816340..39a0291cb1e 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1732,8 +1732,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; body } else { - let err = - errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); + let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token); return Err(self.dcx().create_err(err)); }; @@ -2303,7 +2302,7 @@ impl<'a> Parser<'a> { || self.token.is_keyword(kw::Union)) && self.look_ahead(1, |t| t.is_ident()) { - let kw_token = self.token.clone(); + let kw_token = self.token; let kw_str = pprust::token_to_string(&kw_token); let item = self.parse_item(ForceCollect::No)?; let mut item = item.unwrap().span; @@ -2536,7 +2535,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; *sig_hi = self.prev_token.span; (AttrVec::new(), None) - } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() { + } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() { self.parse_block_common(self.token.span, BlockCheckMode::Default, None) .map(|(attrs, body)| (attrs, Some(body)))? } else if self.token == token::Eq { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index fafd1b1ae00..2221a261b4c 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -13,7 +13,6 @@ mod ty; use std::assert_matches::debug_assert_matches; use std::ops::Range; -use std::sync::Arc; use std::{fmt, mem, slice}; use attr_wrapper::{AttrWrapper, UsePreAttrPos}; @@ -24,8 +23,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtExprKind, NtPatKind, - Token, TokenKind, + self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, + TokenKind, }; use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; @@ -98,21 +97,6 @@ pub enum ForceCollect { No, } -#[macro_export] -macro_rules! maybe_whole { - ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - #[allow(irrefutable_let_patterns)] // FIXME: temporary - if let token::Interpolated(nt) = &$p.token.kind - && let token::$constructor(x) = &**nt - { - #[allow(unused_mut)] - let mut $x = x.clone(); - $p.bump(); - return Ok($e); - } - }; -} - /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`. #[macro_export] macro_rules! maybe_recover_from_interpolated_ty_qpath { @@ -342,12 +326,12 @@ impl TokenCursor { // below can be removed. if let Some(tree) = self.curr.curr() { match tree { - &TokenTree::Token(ref token, spacing) => { + &TokenTree::Token(token, spacing) => { debug_assert!(!matches!( token.kind, token::OpenDelim(_) | token::CloseDelim(_) )); - let res = (token.clone(), spacing); + let res = (token, spacing); self.curr.bump(); return res; } @@ -459,7 +443,6 @@ pub fn token_descr(token: &Token) -> String { (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"), (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"), (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"), - (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()), (None, _) => format!("`{s}`"), } } @@ -855,8 +838,10 @@ impl<'a> Parser<'a> { fn check_inline_const(&self, dist: usize) -> bool { self.is_keyword_ahead(dist, &[kw::Const]) && self.look_ahead(dist + 1, |t| match &t.kind { - token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)), token::OpenDelim(Delimiter::Brace) => true, + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + MetaVarKind::Block, + ))) => true, _ => false, }) } @@ -1402,7 +1387,7 @@ impl<'a> Parser<'a> { // Avoid const blocks and const closures to be parsed as const items if (self.check_const_closure() == is_closure) && !self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && self.eat_keyword_case(exp!(Const), case) { Const::Yes(self.prev_token_uninterpolated_span()) @@ -1532,7 +1517,7 @@ impl<'a> Parser<'a> { _ => { let prev_spacing = self.token_spacing; self.bump(); - TokenTree::Token(self.prev_token.clone(), prev_spacing) + TokenTree::Token(self.prev_token, prev_spacing) } } } @@ -1718,7 +1703,7 @@ impl<'a> Parser<'a> { dbg_fmt.field("prev_token", &self.prev_token); let mut tokens = vec![]; for i in 0..lookahead { - let tok = self.look_ahead(i, |tok| tok.kind.clone()); + let tok = self.look_ahead(i, |tok| tok.kind); let is_eof = tok == TokenKind::Eof; tokens.push(tok); if is_eof { @@ -1759,7 +1744,6 @@ impl<'a> Parser<'a> { pub fn token_uninterpolated_span(&self) -> Span { match &self.token.kind { token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, - token::Interpolated(nt) => nt.use_span(), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { self.look_ahead(1, |t| t.span) } @@ -1771,7 +1755,6 @@ impl<'a> Parser<'a> { pub fn prev_token_uninterpolated_span(&self) -> Span { match &self.prev_token.kind { token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, - token::Interpolated(nt) => nt.use_span(), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { self.look_ahead(0, |t| t.span) } @@ -1828,6 +1811,7 @@ pub enum ParseNtResult { Ident(Ident, IdentIsRaw), Lifetime(Ident, IdentIsRaw), Item(P<ast::Item>), + Block(P<ast::Block>), Stmt(P<ast::Stmt>), Pat(P<ast::Pat>, NtPatKind), Expr(P<ast::Expr>, NtExprKind), @@ -1836,7 +1820,4 @@ pub enum ParseNtResult { Meta(P<ast::AttrItem>), Path(P<ast::Path>), Vis(P<ast::Visibility>), - - /// This variant will eventually be removed, along with `Token::Interpolate`. - Nt(Arc<Nonterminal>), } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index b4e540d670d..b6e89cd7fa4 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,14 +1,7 @@ -use std::sync::Arc; - -use rustc_ast::HasTokens; use rustc_ast::ptr::P; -use rustc_ast::token::Nonterminal::*; use rustc_ast::token::NtExprKind::*; use rustc_ast::token::NtPatKind::*; -use rustc_ast::token::{ - self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token, -}; -use rustc_ast_pretty::pprust; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token}; use rustc_errors::PResult; use rustc_span::{Ident, kw}; @@ -45,13 +38,6 @@ impl<'a> Parser<'a> { } } - /// Old variant of `may_be_ident`. Being phased out. - fn nt_may_be_ident(nt: &Nonterminal) -> bool { - match nt { - NtBlock(_) => false, - } - } - match kind { // `expr_2021` and earlier NonterminalKind::Expr(Expr2021 { .. }) => { @@ -83,16 +69,12 @@ impl<'a> Parser<'a> { | token::Ident(..) | token::NtIdent(..) | token::NtLifetime(..) - | token::Interpolated(_) | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { token::OpenDelim(Delimiter::Brace) => true, token::NtLifetime(..) => true, - token::Interpolated(nt) => match &**nt { - NtBlock(_) => true, - }, token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { MetaVarKind::Block | MetaVarKind::Stmt @@ -112,7 +94,6 @@ impl<'a> Parser<'a> { }, NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { token::PathSep | token::Ident(..) | token::NtIdent(..) => true, - token::Interpolated(nt) => nt_may_be_ident(nt), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { may_be_ident(*kind) } @@ -136,110 +117,85 @@ impl<'a> Parser<'a> { // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // which requires having captured tokens available. Since we cannot determine // in advance whether or not a proc-macro will be (transitively) invoked, - // we always capture tokens for any `Nonterminal` which needs them. - let mut nt = match kind { + // we always capture tokens for any nonterminal that needs them. + match kind { // Note that TT is treated differently to all the others. - NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())), + NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())), NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? { - Some(item) => return Ok(ParseNtResult::Item(item)), - None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Item(self.token.span))); - } + Some(item) => Ok(ParseNtResult::Item(item)), + None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))), }, NonterminalKind::Block => { // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`), // the ':block' matcher does not support them - NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?) + Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?)) } NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? { - Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))), + Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))), None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Statement(self.token.span))); + Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span))) } }, - NonterminalKind::Pat(pat_kind) => { - return Ok(ParseNtResult::Pat( - self.collect_tokens_no_attrs(|this| match pat_kind { - PatParam { .. } => this.parse_pat_no_top_alt(None, None), - PatWithOr => this.parse_pat_no_top_guard( - None, - RecoverComma::No, - RecoverColon::No, - CommaRecoveryMode::EitherTupleOrPipe, - ), - })?, - pat_kind, - )); - } + NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat( + self.collect_tokens_no_attrs(|this| match pat_kind { + PatParam { .. } => this.parse_pat_no_top_alt(None, None), + PatWithOr => this.parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ), + })?, + pat_kind, + )), NonterminalKind::Expr(expr_kind) => { - return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)); + Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)) } NonterminalKind::Literal => { // The `:literal` matcher does not support attributes. - return Ok(ParseNtResult::Literal( + Ok(ParseNtResult::Literal( self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, - )); - } - NonterminalKind::Ty => { - return Ok(ParseNtResult::Ty( - self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, - )); + )) } - // this could be handled like a token, since it is one + NonterminalKind::Ty => Ok(ParseNtResult::Ty( + self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, + )), + // This could be handled like a token, since it is one. NonterminalKind::Ident => { - return if let Some((ident, is_raw)) = get_macro_ident(&self.token) { + if let Some((ident, is_raw)) = get_macro_ident(&self.token) { self.bump(); Ok(ParseNtResult::Ident(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Ident { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; - } - NonterminalKind::Path => { - return Ok(ParseNtResult::Path(P( - self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? - ))); + } } + NonterminalKind::Path => Ok(ParseNtResult::Path(P( + self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? + ))), NonterminalKind::Meta => { - return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))); + Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))) } NonterminalKind::Vis => { - return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| { - this.parse_visibility(FollowedByType::Yes) - })?))); + Ok(ParseNtResult::Vis(P(self + .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))) } NonterminalKind::Lifetime => { // We want to keep `'keyword` parsing, just like `keyword` is still // an ident for nonterminal purposes. - return if let Some((ident, is_raw)) = self.token.lifetime() { + if let Some((ident, is_raw)) = self.token.lifetime() { self.bump(); Ok(ParseNtResult::Lifetime(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; + } } - }; - - // If tokens are supported at all, they should be collected. - if matches!(nt.tokens_mut(), Some(None)) { - panic!( - "Missing tokens for nt {:?} at {:?}: {:?}", - nt, - nt.use_span(), - pprust::nonterminal_to_string(&nt) - ); } - - Ok(ParseNtResult::Nt(Arc::new(nt))) } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 9612f71b2af..d5f469f9aa9 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -363,7 +363,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, start: lo, - token: self.token.clone(), + token: self.token, note_double_vert: matches!(self.token.kind, token::OrOr), }); self.bump(); @@ -1519,8 +1519,8 @@ impl<'a> Parser<'a> { etc = PatFieldsRest::Rest; let mut etc_sp = self.token.span; if first_etc_and_maybe_comma_span.is_none() { - if let Some(comma_tok) = self - .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None }) + if let Some(comma_tok) = + self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None }) { let nw_span = self .psess diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 9c6830c3672..30fb96c6ea9 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -393,8 +393,8 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` - let prev_token_before_parsing = self.prev_token.clone(); - let token_before_parsing = self.token.clone(); + let prev_token_before_parsing = self.prev_token; + let token_before_parsing = self.token; let mut snapshot = None; if self.may_recover() && prev_token_before_parsing == token::PathSep diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index e00fd40ecee..a50a9e224cf 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -23,8 +23,8 @@ use super::{ AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode, Trailing, UsePreAttrPos, }; -use crate::errors::MalformedLoopLabel; -use crate::{errors, exp, maybe_whole}; +use crate::errors::{self, MalformedLoopLabel}; +use crate::exp; impl<'a> Parser<'a> { /// Parses a statement. This stops just before trailing semicolons on everything but items. @@ -696,9 +696,11 @@ impl<'a> Parser<'a> { blk_mode: BlockCheckMode, loop_header: Option<Span>, ) -> PResult<'a, (AttrVec, P<Block>)> { - maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block)); + if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) { + return Ok((AttrVec::new(), block)); + } - let maybe_ident = self.prev_token.clone(); + let maybe_ident = self.prev_token; self.maybe_recover_unexpected_block_label(loop_header); if !self.eat(exp!(OpenBrace)) { return self.error_block_no_opening_brace(); @@ -911,7 +913,7 @@ impl<'a> Parser<'a> { { if self.token == token::Colon && self.look_ahead(1, |token| { - token.is_whole_block() + token.is_metavar_block() || matches!( token.kind, token::Ident( diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 49ae6cb9b72..2f958f4d492 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) { // Do the `assert_eq` outside the closure so that `track_caller` works. // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure // doesn't give the line number in the test below if the assertion fails.) - let tok = p.look_ahead(dist, |tok| tok.clone()); + let tok = p.look_ahead(dist, |tok| *tok); assert_eq!(kind, tok.kind); } diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 93705da22c4..d0cff42a209 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -547,7 +547,7 @@ impl<'a> Parser<'a> { // Recovery mutbl = Mutability::Mut; - let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing); + let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing); self.bump(); self.bump_with((dyn_tok, dyn_tok_sp)); } @@ -886,7 +886,7 @@ impl<'a> Parser<'a> { /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { let lo = self.token.span; - let leading_token = self.prev_token.clone(); + let leading_token = self.prev_token; let has_parens = self.eat(exp!(OpenParen)); let bound = if self.token.is_lifetime() { |
