diff options
| author | Tshepang Mbambo <tshepang@gmail.com> | 2025-04-19 16:06:07 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2025-04-19 16:06:07 +0200 |
| commit | b886a8e654ab660f2f2cd02c68bf170dd4918fd8 (patch) | |
| tree | 86e6a9ab8518740ca6ce8deae8f409ee9c309f20 /compiler/rustc_parse/src/parser | |
| parent | 52616fa902a9a551ba4e574bee41dbf6162746f4 (diff) | |
| parent | 07b7f00a9e961fdf66ece2ecf70ae1dba505f10e (diff) | |
| download | rust-b886a8e654ab660f2f2cd02c68bf170dd4918fd8.tar.gz rust-b886a8e654ab660f2f2cd02c68bf170dd4918fd8.zip | |
Merge pull request #2347 from rust-lang/rustc-pull
Rustc pull update
Diffstat (limited to 'compiler/rustc_parse/src/parser')
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 46 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 83 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 32 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 61 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 124 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 36 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 67 |
11 files changed, 244 insertions, 223 deletions
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index cff998fa137..f1bd6a22730 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { // produce an empty `TokenStream` if no calls were made, and omit the // final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token.clone())) + let tokens = iter::once(FlatToken::Token(self.start_token)) .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) .take(self.num_calls as usize); @@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { - start_token: (self.token.clone(), self.token_spacing), + start_token: (self.token, self.token_spacing), cursor_snapshot: self.token_cursor.clone(), start_pos: self.num_bump_calls, } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ef044fe9d63..7c8e0146c3d 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -322,7 +322,7 @@ impl<'a> Parser<'a> { let mut recovered_ident = None; // we take this here so that the correct original token is retained in // the diagnostic, regardless of eager recovery. - let bad_token = self.token.clone(); + let bad_token = self.token; // suggest prepending a keyword in identifier position with `r#` let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { // if the previous token is a valid keyword // that might use a generic, then suggest a correct // generic placement (later on) - let maybe_keyword = self.prev_token.clone(); + let maybe_keyword = self.prev_token; if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { // if we have a valid keyword, attempt to parse generics // also obtain the keywords symbol @@ -530,7 +530,7 @@ impl<'a> Parser<'a> { // let y = 42; let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, - token: self.token.clone(), + token: self.token, unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); @@ -555,7 +555,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let guar = self.dcx().emit_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -609,6 +609,8 @@ impl<'a> Parser<'a> { // FIXME: translation requires list formatting (for `expect`) let mut err = self.dcx().struct_span_err(self.token.span, msg_exp); + self.label_expected_raw_ref(&mut err); + // Look for usages of '=>' where '>=' was probably intended if self.token == token::FatArrow && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le)) @@ -750,6 +752,25 @@ impl<'a> Parser<'a> { Err(err) } + /// Adds a label when `&raw EXPR` was written instead of `&raw const EXPR`/`&raw mut EXPR`. + /// + /// Given that not all parser diagnostics flow through `expected_one_of_not_found`, this + /// label may need added to other diagnostics emission paths as needed. + pub(super) fn label_expected_raw_ref(&mut self, err: &mut Diag<'_>) { + if self.prev_token.is_keyword(kw::Raw) + && self.expected_token_types.contains(TokenType::KwMut) + && self.expected_token_types.contains(TokenType::KwConst) + && self.token.can_begin_expr() + { + err.span_suggestions( + self.prev_token.span.shrink_to_hi(), + "`&raw` must be followed by `const` or `mut` to be a raw reference expression", + [" const".to_string(), " mut".to_string()], + Applicability::MaybeIncorrect, + ); + } + } + /// Checks if the current token or the previous token are misspelled keywords /// and adds a helpful suggestion. fn check_for_misspelled_kw(&self, err: &mut Diag<'_>, expected: &[TokenType]) { @@ -801,7 +822,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -1636,19 +1657,19 @@ impl<'a> Parser<'a> { self.bump(); // `+` let _bounds = self.parse_generic_bounds()?; - let sum_span = ty.span.to(self.prev_token.span); - let sub = match &ty.kind { TyKind::Ref(_lifetime, mut_ty) => { let lo = mut_ty.ty.span.shrink_to_lo(); let hi = self.prev_token.span.shrink_to_hi(); BadTypePlusSub::AddParen { suggestion: AddParen { lo, hi } } } - TyKind::Ptr(..) | TyKind::BareFn(..) => BadTypePlusSub::ForgotParen { span: sum_span }, - _ => BadTypePlusSub::ExpectPath { span: sum_span }, + TyKind::Ptr(..) | TyKind::BareFn(..) => { + BadTypePlusSub::ForgotParen { span: ty.span.to(self.prev_token.span) } + } + _ => BadTypePlusSub::ExpectPath { span: ty.span }, }; - self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub }); + self.dcx().emit_err(BadTypePlus { span: ty.span, sub }); Ok(()) } @@ -1922,10 +1943,7 @@ impl<'a> Parser<'a> { && self.token == token::Colon && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span)) { - self.dcx().emit_err(ColonAsSemi { - span: self.token.span, - type_ascription: self.psess.unstable_features.is_nightly_build(), - }); + self.dcx().emit_err(ColonAsSemi { span: self.token.span }); self.bump(); return true; } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 9c457f150a3..df44b3cc23c 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -344,7 +344,7 @@ impl<'a> Parser<'a> { fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.dcx().emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, - token: self.token.clone(), + token: self.token, suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } @@ -417,7 +417,7 @@ impl<'a> Parser<'a> { cur_op_span: Span, ) -> PResult<'a, P<Expr>> { let rhs = if self.is_at_start_of_range_notation_rhs() { - let maybe_lt = self.token.clone(); + let maybe_lt = self.token; let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(Bound::Excluded(prec), attrs) @@ -611,7 +611,7 @@ impl<'a> Parser<'a> { /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { - let negated_token = self.look_ahead(1, |t| t.clone()); + let negated_token = self.look_ahead(1, |t| *t); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise @@ -637,9 +637,7 @@ impl<'a> Parser<'a> { /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { - TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => { - self.prev_token.span - } + TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span, TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { // `expr.span` is the interpolated span, because invisible open // and close delims both get marked with the same span, one @@ -829,6 +827,18 @@ impl<'a> Parser<'a> { if let Some(lt) = lifetime { self.error_remove_borrow_lifetime(span, lt.ident.span.until(expr.span)); } + + // Add expected tokens if we parsed `&raw` as an expression. + // This will make sure we see "expected `const`, `mut`", and + // guides recovery in case we write `&raw expr`. + if borrow_kind == ast::BorrowKind::Ref + && mutbl == ast::Mutability::Not + && matches!(&expr.kind, ExprKind::Path(None, p) if p.is_ident(kw::Raw)) + { + self.expected_token_types.insert(TokenType::KwMut); + self.expected_token_types.insert(TokenType::KwConst); + } + Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) } @@ -1386,15 +1396,7 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, true); let span = self.token.span; - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - token::NtBlock(block) => { - let block = block.clone(); - self.bump(); - return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None))); - } - }; - } else if let Some(expr) = self.eat_metavar_seq_with_matcher( + if let Some(expr) = self.eat_metavar_seq_with_matcher( |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), |this| { // Force collection (as opposed to just `parse_expr`) is required to avoid the @@ -1415,9 +1417,13 @@ impl<'a> Parser<'a> { self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) { return Ok(lit); - } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { - this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) - }) { + } else if let Some(block) = + self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) + { + return Ok(self.mk_expr(span, ExprKind::Block(block, None))); + } else if let Some(path) = + self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type)) + { return Ok(self.mk_expr(span, ExprKind::Path(None, path))); } @@ -1612,7 +1618,7 @@ impl<'a> Parser<'a> { } fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> { - let maybe_eq_tok = self.prev_token.clone(); + let maybe_eq_tok = self.prev_token; let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { @@ -1671,7 +1677,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) - || self.token.is_whole_block() + || self.token.is_metavar_block() { self.parse_expr_block(label, lo, BlockCheckMode::Default) } else if !ate_colon @@ -2073,7 +2079,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { - let token = self.token.clone(); + let token = self.token; let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.dcx().struct_span_err(token.span, msg) @@ -2166,10 +2172,15 @@ impl<'a> Parser<'a> { let expr = self .eat_metavar_seq(mv_kind, |this| this.parse_expr()) .expect("metavar seq expr"); - let ast::ExprKind::Lit(token_lit) = expr.kind else { - panic!("didn't reparse an expr"); - }; - Some(token_lit) + if let ast::ExprKind::Lit(token_lit) = expr.kind { + Some(token_lit) + } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind + && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner + { + None + } else { + panic!("unexpected reparsed expr: {:?}", expr.kind); + } } _ => None, } @@ -2349,7 +2360,7 @@ impl<'a> Parser<'a> { } } - if self.token.is_whole_block() { + if self.token.is_metavar_block() { self.dcx().emit_err(errors::InvalidBlockMacroSegment { span: self.token.span, context: lo.to(self.token.span), @@ -2374,7 +2385,7 @@ impl<'a> Parser<'a> { fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> { let lo = self.token.span; - let before = self.prev_token.clone(); + let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; @@ -2406,8 +2417,8 @@ impl<'a> Parser<'a> { FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - let prev = self.prev_token.clone(); - let token = self.token.clone(); + let prev = self.prev_token; + let token = self.token; let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { Ok((expr, _)) => expr, @@ -2472,7 +2483,7 @@ impl<'a> Parser<'a> { if self.may_recover() && self.token.can_begin_expr() && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace)) - && !self.token.is_whole_block() + && !self.token.is_metavar_block() { let snapshot = self.create_snapshot_for_diagnostic(); let restrictions = @@ -2654,7 +2665,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. - let maybe_fatarrow = self.token.clone(); + let maybe_fatarrow = self.token; let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { @@ -3519,7 +3530,7 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Catch]) && self - .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } @@ -3530,7 +3541,7 @@ impl<'a> Parser<'a> { fn is_try_block(&self) -> bool { self.token.is_keyword(kw::Try) && self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && self.token_uninterpolated_span().at_least_rust_2018() } @@ -3564,12 +3575,12 @@ impl<'a> Parser<'a> { // `async move {` self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use]) && self.look_ahead(lookahead + 2, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() + *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block() }) ) || ( // `async {` self.look_ahead(lookahead + 1, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() + *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block() }) )) } @@ -3862,7 +3873,7 @@ impl<'a> Parser<'a> { return Err(this.dcx().create_err(errors::ExpectedStructField { span: this.look_ahead(1, |t| t.span), ident_span: this.token.span, - token: this.look_ahead(1, |t| t.clone()), + token: this.look_ahead(1, |t| *t), })); } let (ident, expr) = if is_shorthand { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 9405b58ab3b..39a0291cb1e 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -602,21 +602,13 @@ impl<'a> Parser<'a> { let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. - let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span)); let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt) { let span = self.prev_token.span.between(self.token.span); - self.dcx().emit_err(errors::MissingTraitInTraitImpl { + return Err(self.dcx().create_err(errors::MissingTraitInTraitImpl { span, for_span: span.to(self.token.span), - }); - - P(Ty { - kind: TyKind::Path(None, err_path(span)), - span, - id: DUMMY_NODE_ID, - tokens: None, - }) + })); } else { self.parse_ty_with_generics_recovery(&generics)? }; @@ -657,6 +649,7 @@ impl<'a> Parser<'a> { other => { if let TyKind::ImplTrait(_, bounds) = other && let [bound] = bounds.as_slice() + && let GenericBound::Trait(poly_trait_ref) = bound { // Suggest removing extra `impl` keyword: // `impl<T: Default> impl Default for Wrapper<T>` @@ -666,12 +659,12 @@ impl<'a> Parser<'a> { extra_impl_kw, impl_trait_span: ty_first.span, }); + poly_trait_ref.trait_ref.path.clone() } else { - self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType { - span: ty_first.span, - }); + return Err(self.dcx().create_err( + errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span }, + )); } - err_path(ty_first.span) } }; let trait_ref = TraitRef { path, ref_id: ty_first.id }; @@ -1739,8 +1732,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; body } else { - let err = - errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); + let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token); return Err(self.dcx().create_err(err)); }; @@ -2310,7 +2302,7 @@ impl<'a> Parser<'a> { || self.token.is_keyword(kw::Union)) && self.look_ahead(1, |t| t.is_ident()) { - let kw_token = self.token.clone(); + let kw_token = self.token; let kw_str = pprust::token_to_string(&kw_token); let item = self.parse_item(ForceCollect::No)?; let mut item = item.unwrap().span; @@ -2543,7 +2535,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; *sig_hi = self.prev_token.span; (AttrVec::new(), None) - } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() { + } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() { self.parse_block_common(self.token.span, BlockCheckMode::Default, None) .map(|(attrs, body)| (attrs, Some(body)))? } else if self.token == token::Eq { @@ -2987,9 +2979,7 @@ impl<'a> Parser<'a> { } match ty { Ok(ty) => { - let ident = Ident::new(kw::Empty, this.prev_token.span); - let bm = BindingMode::NONE; - let pat = this.mk_pat_ident(ty.span, bm, ident); + let pat = this.mk_pat(ty.span, PatKind::Missing); (pat, ty) } // If this is a C-variadic argument and we hit an error, return the error. diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 3b0861a9942..2221a261b4c 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -13,7 +13,6 @@ mod ty; use std::assert_matches::debug_assert_matches; use std::ops::Range; -use std::sync::Arc; use std::{fmt, mem, slice}; use attr_wrapper::{AttrWrapper, UsePreAttrPos}; @@ -24,8 +23,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtExprKind, NtPatKind, - Token, TokenKind, + self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, + TokenKind, }; use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; @@ -98,21 +97,6 @@ pub enum ForceCollect { No, } -#[macro_export] -macro_rules! maybe_whole { - ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - #[allow(irrefutable_let_patterns)] // FIXME: temporary - if let token::Interpolated(nt) = &$p.token.kind - && let token::$constructor(x) = &**nt - { - #[allow(unused_mut)] - let mut $x = x.clone(); - $p.bump(); - return Ok($e); - } - }; -} - /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`. #[macro_export] macro_rules! maybe_recover_from_interpolated_ty_qpath { @@ -342,12 +326,12 @@ impl TokenCursor { // below can be removed. if let Some(tree) = self.curr.curr() { match tree { - &TokenTree::Token(ref token, spacing) => { + &TokenTree::Token(token, spacing) => { debug_assert!(!matches!( token.kind, token::OpenDelim(_) | token::CloseDelim(_) )); - let res = (token.clone(), spacing); + let res = (token, spacing); self.curr.bump(); return res; } @@ -459,7 +443,6 @@ pub fn token_descr(token: &Token) -> String { (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"), (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"), (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"), - (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()), (None, _) => format!("`{s}`"), } } @@ -782,9 +765,16 @@ impl<'a> Parser<'a> { // Recovery is disabled when parsing macro arguments, so it must // also be disabled when reparsing pasted macro arguments, // otherwise we get inconsistent results (e.g. #137874). - let res = self.with_recovery(Recovery::Forbidden, |this| { - f(this).expect("failed to reparse {mv_kind:?}") - }); + let res = self.with_recovery(Recovery::Forbidden, |this| f(this)); + + let res = match res { + Ok(res) => res, + Err(err) => { + // This can occur in unusual error cases, e.g. #139445. + err.delay_as_bug(); + return None; + } + }; if let token::CloseDelim(delim) = self.token.kind && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim @@ -793,7 +783,12 @@ impl<'a> Parser<'a> { self.bump(); Some(res) } else { - panic!("no close delim when reparsing {mv_kind:?}"); + // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248, + // where the reparse attempt of an invalid expr consumed the trailing invisible + // delimiter. + self.dcx() + .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}"); + None } } else { None @@ -843,8 +838,10 @@ impl<'a> Parser<'a> { fn check_inline_const(&self, dist: usize) -> bool { self.is_keyword_ahead(dist, &[kw::Const]) && self.look_ahead(dist + 1, |t| match &t.kind { - token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)), token::OpenDelim(Delimiter::Brace) => true, + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + MetaVarKind::Block, + ))) => true, _ => false, }) } @@ -1390,7 +1387,7 @@ impl<'a> Parser<'a> { // Avoid const blocks and const closures to be parsed as const items if (self.check_const_closure() == is_closure) && !self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()) && self.eat_keyword_case(exp!(Const), case) { Const::Yes(self.prev_token_uninterpolated_span()) @@ -1520,7 +1517,7 @@ impl<'a> Parser<'a> { _ => { let prev_spacing = self.token_spacing; self.bump(); - TokenTree::Token(self.prev_token.clone(), prev_spacing) + TokenTree::Token(self.prev_token, prev_spacing) } } } @@ -1706,7 +1703,7 @@ impl<'a> Parser<'a> { dbg_fmt.field("prev_token", &self.prev_token); let mut tokens = vec![]; for i in 0..lookahead { - let tok = self.look_ahead(i, |tok| tok.kind.clone()); + let tok = self.look_ahead(i, |tok| tok.kind); let is_eof = tok == TokenKind::Eof; tokens.push(tok); if is_eof { @@ -1747,7 +1744,6 @@ impl<'a> Parser<'a> { pub fn token_uninterpolated_span(&self) -> Span { match &self.token.kind { token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, - token::Interpolated(nt) => nt.use_span(), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { self.look_ahead(1, |t| t.span) } @@ -1759,7 +1755,6 @@ impl<'a> Parser<'a> { pub fn prev_token_uninterpolated_span(&self) -> Span { match &self.prev_token.kind { token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, - token::Interpolated(nt) => nt.use_span(), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { self.look_ahead(0, |t| t.span) } @@ -1816,6 +1811,7 @@ pub enum ParseNtResult { Ident(Ident, IdentIsRaw), Lifetime(Ident, IdentIsRaw), Item(P<ast::Item>), + Block(P<ast::Block>), Stmt(P<ast::Stmt>), Pat(P<ast::Pat>, NtPatKind), Expr(P<ast::Expr>, NtExprKind), @@ -1824,7 +1820,4 @@ pub enum ParseNtResult { Meta(P<ast::AttrItem>), Path(P<ast::Path>), Vis(P<ast::Visibility>), - - /// This variant will eventually be removed, along with `Token::Interpolate`. - Nt(Arc<Nonterminal>), } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index b4e540d670d..b6e89cd7fa4 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,14 +1,7 @@ -use std::sync::Arc; - -use rustc_ast::HasTokens; use rustc_ast::ptr::P; -use rustc_ast::token::Nonterminal::*; use rustc_ast::token::NtExprKind::*; use rustc_ast::token::NtPatKind::*; -use rustc_ast::token::{ - self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token, -}; -use rustc_ast_pretty::pprust; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token}; use rustc_errors::PResult; use rustc_span::{Ident, kw}; @@ -45,13 +38,6 @@ impl<'a> Parser<'a> { } } - /// Old variant of `may_be_ident`. Being phased out. - fn nt_may_be_ident(nt: &Nonterminal) -> bool { - match nt { - NtBlock(_) => false, - } - } - match kind { // `expr_2021` and earlier NonterminalKind::Expr(Expr2021 { .. }) => { @@ -83,16 +69,12 @@ impl<'a> Parser<'a> { | token::Ident(..) | token::NtIdent(..) | token::NtLifetime(..) - | token::Interpolated(_) | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { token::OpenDelim(Delimiter::Brace) => true, token::NtLifetime(..) => true, - token::Interpolated(nt) => match &**nt { - NtBlock(_) => true, - }, token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { MetaVarKind::Block | MetaVarKind::Stmt @@ -112,7 +94,6 @@ impl<'a> Parser<'a> { }, NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { token::PathSep | token::Ident(..) | token::NtIdent(..) => true, - token::Interpolated(nt) => nt_may_be_ident(nt), token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { may_be_ident(*kind) } @@ -136,110 +117,85 @@ impl<'a> Parser<'a> { // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // which requires having captured tokens available. Since we cannot determine // in advance whether or not a proc-macro will be (transitively) invoked, - // we always capture tokens for any `Nonterminal` which needs them. - let mut nt = match kind { + // we always capture tokens for any nonterminal that needs them. + match kind { // Note that TT is treated differently to all the others. - NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())), + NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())), NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? { - Some(item) => return Ok(ParseNtResult::Item(item)), - None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Item(self.token.span))); - } + Some(item) => Ok(ParseNtResult::Item(item)), + None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))), }, NonterminalKind::Block => { // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`), // the ':block' matcher does not support them - NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?) + Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?)) } NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? { - Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))), + Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))), None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Statement(self.token.span))); + Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span))) } }, - NonterminalKind::Pat(pat_kind) => { - return Ok(ParseNtResult::Pat( - self.collect_tokens_no_attrs(|this| match pat_kind { - PatParam { .. } => this.parse_pat_no_top_alt(None, None), - PatWithOr => this.parse_pat_no_top_guard( - None, - RecoverComma::No, - RecoverColon::No, - CommaRecoveryMode::EitherTupleOrPipe, - ), - })?, - pat_kind, - )); - } + NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat( + self.collect_tokens_no_attrs(|this| match pat_kind { + PatParam { .. } => this.parse_pat_no_top_alt(None, None), + PatWithOr => this.parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ), + })?, + pat_kind, + )), NonterminalKind::Expr(expr_kind) => { - return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)); + Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)) } NonterminalKind::Literal => { // The `:literal` matcher does not support attributes. - return Ok(ParseNtResult::Literal( + Ok(ParseNtResult::Literal( self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, - )); - } - NonterminalKind::Ty => { - return Ok(ParseNtResult::Ty( - self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, - )); + )) } - // this could be handled like a token, since it is one + NonterminalKind::Ty => Ok(ParseNtResult::Ty( + self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, + )), + // This could be handled like a token, since it is one. NonterminalKind::Ident => { - return if let Some((ident, is_raw)) = get_macro_ident(&self.token) { + if let Some((ident, is_raw)) = get_macro_ident(&self.token) { self.bump(); Ok(ParseNtResult::Ident(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Ident { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; - } - NonterminalKind::Path => { - return Ok(ParseNtResult::Path(P( - self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? - ))); + } } + NonterminalKind::Path => Ok(ParseNtResult::Path(P( + self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? + ))), NonterminalKind::Meta => { - return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))); + Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))) } NonterminalKind::Vis => { - return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| { - this.parse_visibility(FollowedByType::Yes) - })?))); + Ok(ParseNtResult::Vis(P(self + .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))) } NonterminalKind::Lifetime => { // We want to keep `'keyword` parsing, just like `keyword` is still // an ident for nonterminal purposes. - return if let Some((ident, is_raw)) = self.token.lifetime() { + if let Some((ident, is_raw)) = self.token.lifetime() { self.bump(); Ok(ParseNtResult::Lifetime(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; + } } - }; - - // If tokens are supported at all, they should be collected. - if matches!(nt.tokens_mut(), Some(None)) { - panic!( - "Missing tokens for nt {:?} at {:?}: {:?}", - nt, - nt.use_span(), - pprust::nonterminal_to_string(&nt) - ); } - - Ok(ParseNtResult::Nt(Arc::new(nt))) } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 9612f71b2af..d5f469f9aa9 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -363,7 +363,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, start: lo, - token: self.token.clone(), + token: self.token, note_double_vert: matches!(self.token.kind, token::OrOr), }); self.bump(); @@ -1519,8 +1519,8 @@ impl<'a> Parser<'a> { etc = PatFieldsRest::Rest; let mut etc_sp = self.token.span; if first_etc_and_maybe_comma_span.is_none() { - if let Some(comma_tok) = self - .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None }) + if let Some(comma_tok) = + self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None }) { let nw_span = self .psess diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 9c6830c3672..02883655662 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -273,7 +273,6 @@ impl<'a> Parser<'a> { self.dcx().emit_err(PathSingleColon { span: self.prev_token.span, suggestion: self.prev_token.span.shrink_to_hi(), - type_ascription: self.psess.unstable_features.is_nightly_build(), }); } continue; @@ -348,7 +347,6 @@ impl<'a> Parser<'a> { err = self.dcx().create_err(PathSingleColon { span: self.token.span, suggestion: self.prev_token.span.shrink_to_hi(), - type_ascription: self.psess.unstable_features.is_nightly_build(), }); } // Attempt to find places where a missing `>` might belong. @@ -393,8 +391,8 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` - let prev_token_before_parsing = self.prev_token.clone(); - let token_before_parsing = self.token.clone(); + let prev_token_before_parsing = self.prev_token; + let token_before_parsing = self.token; let mut snapshot = None; if self.may_recover() && prev_token_before_parsing == token::PathSep diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 2cd09aa8959..0cc8b605018 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -23,8 +23,8 @@ use super::{ AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode, Trailing, UsePreAttrPos, }; -use crate::errors::MalformedLoopLabel; -use crate::{errors, exp, maybe_whole}; +use crate::errors::{self, MalformedLoopLabel}; +use crate::exp; impl<'a> Parser<'a> { /// Parses a statement. This stops just before trailing semicolons on everything but items. @@ -75,10 +75,11 @@ impl<'a> Parser<'a> { let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) { self.collect_tokens(None, attrs, force_collect, |this, attrs| { + let super_span = this.token.span; this.expect_keyword(exp!(Super))?; - this.psess.gated_spans.gate(sym::super_let, this.prev_token.span); this.expect_keyword(exp!(Let))?; - let local = this.parse_local(attrs)?; // FIXME(mara): implement super let + this.psess.gated_spans.gate(sym::super_let, super_span); + let local = this.parse_local(Some(super_span), attrs)?; let trailing = Trailing::from(capture_semi && this.token == token::Semi); Ok(( this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), @@ -89,7 +90,7 @@ impl<'a> Parser<'a> { } else if self.token.is_keyword(kw::Let) { self.collect_tokens(None, attrs, force_collect, |this, attrs| { this.expect_keyword(exp!(Let))?; - let local = this.parse_local(attrs)?; + let local = this.parse_local(None, attrs)?; let trailing = Trailing::from(capture_semi && this.token == token::Semi); Ok(( this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), @@ -294,7 +295,7 @@ impl<'a> Parser<'a> { force_collect: ForceCollect, ) -> PResult<'a, Stmt> { let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| { - let local = this.parse_local(attrs)?; + let local = this.parse_local(None, attrs)?; // FIXME - maybe capture semicolon in recovery? Ok(( this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), @@ -308,8 +309,8 @@ impl<'a> Parser<'a> { } /// Parses a local variable declaration. - fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> { - let lo = self.prev_token.span; + fn parse_local(&mut self, super_: Option<Span>, attrs: AttrVec) -> PResult<'a, P<Local>> { + let lo = super_.unwrap_or(self.prev_token.span); if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) { self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) }); @@ -411,6 +412,7 @@ impl<'a> Parser<'a> { }; let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span }; Ok(P(ast::Local { + super_, ty, pat, kind, @@ -516,7 +518,11 @@ impl<'a> Parser<'a> { let prev = self.prev_token.span; let sp = self.token.span; let mut e = self.dcx().struct_span_err(sp, msg); - let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon; + self.label_expected_raw_ref(&mut e); + + let do_not_suggest_help = self.token.is_keyword(kw::In) + || self.token == token::Colon + || self.prev_token.is_keyword(kw::Raw); // Check to see if the user has written something like // @@ -694,9 +700,11 @@ impl<'a> Parser<'a> { blk_mode: BlockCheckMode, loop_header: Option<Span>, ) -> PResult<'a, (AttrVec, P<Block>)> { - maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block)); + if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) { + return Ok((AttrVec::new(), block)); + } - let maybe_ident = self.prev_token.clone(); + let maybe_ident = self.prev_token; self.maybe_recover_unexpected_block_label(loop_header); if !self.eat(exp!(OpenBrace)) { return self.error_block_no_opening_brace(); @@ -763,10 +771,6 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - if self.psess.unstable_features.is_nightly_build() { - // FIXME(Nilstrieb): Remove this again after a few months. - err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>"); - } } } @@ -909,7 +913,7 @@ impl<'a> Parser<'a> { { if self.token == token::Colon && self.look_ahead(1, |token| { - token.is_whole_block() + token.is_metavar_block() || matches!( token.kind, token::Ident( diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 49ae6cb9b72..2f958f4d492 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) { // Do the `assert_eq` outside the closure so that `track_caller` works. // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure // doesn't give the line number in the test below if the assertion fails.) - let tok = p.look_ahead(dist, |tok| tok.clone()); + let tok = p.look_ahead(dist, |tok| *tok); assert_eq!(kind, tok.kind); } diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 93705da22c4..42ebf26784d 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -7,7 +7,7 @@ use rustc_ast::{ Pinnedness, PolyTraitRef, PreciseCapturingArg, TraitBoundModifiers, TraitObjectSyntax, Ty, TyKind, UnsafeBinderTy, }; -use rustc_errors::{Applicability, PResult}; +use rustc_errors::{Applicability, Diag, PResult}; use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym}; use thin_vec::{ThinVec, thin_vec}; @@ -411,6 +411,9 @@ impl<'a> Parser<'a> { TyKind::Path(None, path) if maybe_bounds => { self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true) } + // For `('a) + …`, we know that `'a` in type position already lead to an error being + // emitted. To reduce output, let's indirectly suppress E0178 (bad `+` in type) and + // other irrelevant consequential errors. TyKind::TraitObject(bounds, TraitObjectSyntax::None) if maybe_bounds && bounds.len() == 1 && !trailing_plus => { @@ -425,12 +428,60 @@ impl<'a> Parser<'a> { } fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> { - let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()); - let bounds = self.parse_generic_bounds_common(allow_plus)?; - if lt_no_plus { - self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { span: lo }); + // A lifetime only begins a bare trait object type if it is followed by `+`! + if self.token.is_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()) { + // In Rust 2021 and beyond, we assume that the user didn't intend to write a bare trait + // object type with a leading lifetime bound since that seems very unlikely given the + // fact that `dyn`-less trait objects are *semantically* invalid. + if self.psess.edition.at_least_rust_2021() { + let lt = self.expect_lifetime(); + let mut err = self.dcx().struct_span_err(lo, "expected type, found lifetime"); + err.span_label(lo, "expected type"); + return Ok(match self.maybe_recover_ref_ty_no_leading_ampersand(lt, lo, err) { + Ok(ref_ty) => ref_ty, + Err(err) => TyKind::Err(err.emit()), + }); + } + + self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { + span: lo, + suggestion: lo.shrink_to_hi(), + }); + } + Ok(TyKind::TraitObject( + self.parse_generic_bounds_common(allow_plus)?, + TraitObjectSyntax::None, + )) + } + + fn maybe_recover_ref_ty_no_leading_ampersand<'cx>( + &mut self, + lt: Lifetime, + lo: Span, + mut err: Diag<'cx>, + ) -> Result<TyKind, Diag<'cx>> { + if !self.may_recover() { + return Err(err); + } + let snapshot = self.create_snapshot_for_diagnostic(); + let mutbl = self.parse_mutability(); + match self.parse_ty_no_plus() { + Ok(ty) => { + err.span_suggestion_verbose( + lo.shrink_to_lo(), + "you might have meant to write a reference type here", + "&", + Applicability::MaybeIncorrect, + ); + err.emit(); + Ok(TyKind::Ref(Some(lt), MutTy { ty, mutbl })) + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + Err(err) + } } - Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) } fn parse_remaining_bounds_path( @@ -547,7 +598,7 @@ impl<'a> Parser<'a> { // Recovery mutbl = Mutability::Mut; - let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing); + let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing); self.bump(); self.bump_with((dyn_tok, dyn_tok_sp)); } @@ -886,7 +937,7 @@ impl<'a> Parser<'a> { /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { let lo = self.token.span; - let leading_token = self.prev_token.clone(); + let leading_token = self.prev_token; let has_parens = self.eat(exp!(OpenParen)); let bound = if self.token.is_lifetime() { |
