diff options
Diffstat (limited to 'compiler/rustc_parse/src/parser')
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 196 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 129 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 546 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/generics.rs | 5 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 408 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 473 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mut_visit/tests.rs | 65 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 142 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 66 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 37 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 80 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 70 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/token_type.rs | 28 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 85 | 
15 files changed, 1066 insertions, 1268 deletions
| diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 53614049f08..41d3889c448 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,5 +1,6 @@ use rustc_ast as ast; use rustc_ast::token::{self, MetaVarKind}; +use rustc_ast::tokenstream::ParserRange; use rustc_ast::{Attribute, attr}; use rustc_errors::codes::*; use rustc_errors::{Diag, PResult}; @@ -8,8 +9,7 @@ use thin_vec::ThinVec; use tracing::debug; use super::{ - AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, - UsePreAttrPos, + AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos, }; use crate::{errors, exp, fluent_generated as fluent}; diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index cff998fa137..44fdf146f9c 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,21 +1,18 @@ use std::borrow::Cow; -use std::{iter, mem}; +use std::mem; -use rustc_ast::token::{Delimiter, Token, TokenKind}; +use rustc_ast::token::Token; use rustc_ast::tokenstream::{ - AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream, - Spacing, ToAttrTokenStream, + AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor, }; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::PResult; use rustc_session::parse::ParseSess; -use rustc_span::{DUMMY_SP, Span, sym}; +use rustc_span::{DUMMY_SP, sym}; +use thin_vec::ThinVec; -use super::{ - Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange, - TokenCursor, Trailing, -}; +use super::{Capturing, ForceCollect, Parser, Trailing}; // When collecting tokens, this fully captures the start point. Usually its // just after outer attributes, but occasionally it's before. @@ -94,99 +91,10 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool { }) } -// From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens`, by -// replaying the getting of the tokens. This saves us producing a `TokenStream` -// if it is never needed, e.g. a captured `macro_rules!` argument that is never -// passed to a proc macro. In practice, token stream creation happens rarely -// compared to calls to `collect_tokens` (see some statistics in #78736) so we -// are doing as little up-front work as possible. -// -// This also makes `Parser` very cheap to clone, since -// there is no intermediate collection buffer to clone. -struct LazyAttrTokenStreamImpl { - start_token: (Token, Spacing), - cursor_snapshot: TokenCursor, - num_calls: u32, - break_last_token: u32, - node_replacements: Box<[NodeReplacement]>, -} - -impl ToAttrTokenStream for LazyAttrTokenStreamImpl { - fn to_attr_token_stream(&self) -> AttrTokenStream { - // The token produced by the final call to `{,inlined_}next` was not - // actually consumed by the callback. The combination of chaining the - // initial token and using `take` produces the desired result - we - // produce an empty `TokenStream` if no calls were made, and omit the - // final token otherwise. - let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token.clone())) - .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) - .take(self.num_calls as usize); - - if self.node_replacements.is_empty() { - make_attr_token_stream(tokens, self.break_last_token) - } else { - let mut tokens: Vec<_> = tokens.collect(); - let mut node_replacements = self.node_replacements.to_vec(); - node_replacements.sort_by_key(|(range, _)| range.0.start); - - #[cfg(debug_assertions)] - for [(node_range, tokens), (next_node_range, next_tokens)] in - node_replacements.array_windows() - { - assert!( - node_range.0.end <= next_node_range.0.start - || node_range.0.end >= next_node_range.0.end, - "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", - node_range, - tokens, - next_node_range, - next_tokens, - ); - } - - // Process the replace ranges, starting from the highest start - // position and working our way back. If have tokens like: - // - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // Then we will generate replace ranges for both - // the `#[cfg(FALSE)] field: bool` and the entire - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // By starting processing from the replace range with the greatest - // start position, we ensure that any (outer) replace range which - // encloses another (inner) replace range will fully overwrite the - // inner range's replacement. - for (node_range, target) in node_replacements.into_iter().rev() { - assert!( - !node_range.0.is_empty(), - "Cannot replace an empty node range: {:?}", - node_range.0 - ); - - // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus - // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the - // total length of `tokens` constant throughout the replacement process, allowing - // us to do all replacements without adjusting indices. - let target_len = target.is_some() as usize; - tokens.splice( - (node_range.0.start as usize)..(node_range.0.end as usize), - target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( - iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len), - ), - ); - } - make_attr_token_stream(tokens.into_iter(), self.break_last_token) - } - } -} - impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { - start_token: (self.token.clone(), self.token_spacing), + start_token: (self.token, self.token_spacing), cursor_snapshot: self.token_cursor.clone(), start_pos: self.num_bump_calls, } @@ -387,10 +295,10 @@ impl<'a> Parser<'a> { // This is hot enough for `deep-vector` that checking the conditions for an empty iterator // is measurably faster than actually executing the iterator. - let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end + let node_replacements = if parser_replacements_start == parser_replacements_end && inner_attr_parser_replacements.is_empty() { - Box::new([]) + ThinVec::new() } else { // Grab any replace ranges that occur *inside* the current AST node. Convert them // from `ParserRange` form to `NodeRange` form. We will perform the actual @@ -429,13 +337,13 @@ impl<'a> Parser<'a> { // - `attrs`: includes the outer and the inner attr. // - `tokens`: lazy tokens for `g` (with its inner attr deleted). - let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { - start_token: collect_pos.start_token, - cursor_snapshot: collect_pos.cursor_snapshot, + let tokens = LazyAttrTokenStream::new_pending( + collect_pos.start_token, + collect_pos.cursor_snapshot, num_calls, - break_last_token: self.break_last_token, + self.break_last_token, node_replacements, - }); + ); let mut tokens_used = false; // If in "definite capture mode" we need to register a replace range @@ -483,71 +391,6 @@ impl<'a> Parser<'a> { } } -/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an -/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and -/// close delims. -fn make_attr_token_stream( - iter: impl Iterator<Item = FlatToken>, - break_last_token: u32, -) -> AttrTokenStream { - #[derive(Debug)] - struct FrameData { - // This is `None` for the first frame, `Some` for all others. - open_delim_sp: Option<(Delimiter, Span, Spacing)>, - inner: Vec<AttrTokenTree>, - } - // The stack always has at least one element. Storing it separately makes for shorter code. - let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] }; - let mut stack_rest = vec![]; - for flat_token in iter { - match flat_token { - FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => { - stack_rest.push(mem::replace( - &mut stack_top, - FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, - )); - } - FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => { - let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); - let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); - assert!( - open_delim.eq_ignoring_invisible_origin(&delim), - "Mismatched open/close delims: open={open_delim:?} close={span:?}" - ); - let dspan = DelimSpan::from_pair(open_sp, span); - let dspacing = DelimSpacing::new(open_spacing, spacing); - let stream = AttrTokenStream::new(frame_data.inner); - let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream); - stack_top.inner.push(delimited); - } - FlatToken::Token((token, spacing)) => { - stack_top.inner.push(AttrTokenTree::Token(token, spacing)) - } - FlatToken::AttrsTarget(target) => { - stack_top.inner.push(AttrTokenTree::AttrsTarget(target)) - } - FlatToken::Empty => {} - } - } - - if break_last_token > 0 { - let last_token = stack_top.inner.pop().unwrap(); - if let AttrTokenTree::Token(last_token, spacing) = last_token { - let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap(); - - // Tokens are always ASCII chars, so we can use byte arithmetic here. - let mut first_span = last_token.span.shrink_to_lo(); - first_span = - first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token)); - - stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing)); - } else { - panic!("Unexpected last token {last_token:?}") - } - } - AttrTokenStream::new(stack_top.inner) -} - /// Tokens are needed if: /// - any non-single-segment attributes (other than doc comments) are present, /// e.g. `rustfmt::skip`; or @@ -562,14 +405,3 @@ fn needs_tokens(attrs: &[ast::Attribute]) -> bool { } }) } - -// Some types are used a lot. Make sure they don't unintentionally get bigger. -#[cfg(target_pointer_width = "64")] -mod size_asserts { - use rustc_data_structures::static_assert_size; - - use super::*; - // tidy-alphabetical-start - static_assert_size!(LazyAttrTokenStreamImpl, 96); - // tidy-alphabetical-end -} diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ef044fe9d63..23c8db7bca7 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -4,7 +4,7 @@ use std::ops::{Deref, DerefMut}; use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; +use rustc_ast::token::{self, Lit, LitKind, Token, TokenKind}; use rustc_ast::util::parser::AssocOp; use rustc_ast::{ AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, @@ -304,10 +304,10 @@ impl<'a> Parser<'a> { TokenKind::Comma, TokenKind::Semi, TokenKind::PathSep, - TokenKind::OpenDelim(Delimiter::Brace), - TokenKind::OpenDelim(Delimiter::Parenthesis), - TokenKind::CloseDelim(Delimiter::Brace), - TokenKind::CloseDelim(Delimiter::Parenthesis), + TokenKind::OpenBrace, + TokenKind::OpenParen, + TokenKind::CloseBrace, + TokenKind::CloseParen, ]; if let TokenKind::DocComment(..) = self.prev_token.kind && valid_follow.contains(&self.token.kind) @@ -322,7 +322,7 @@ impl<'a> Parser<'a> { let mut recovered_ident = None; // we take this here so that the correct original token is retained in // the diagnostic, regardless of eager recovery. - let bad_token = self.token.clone(); + let bad_token = self.token; // suggest prepending a keyword in identifier position with `r#` let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { // if the previous token is a valid keyword // that might use a generic, then suggest a correct // generic placement (later on) - let maybe_keyword = self.prev_token.clone(); + let maybe_keyword = self.prev_token; if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { // if we have a valid keyword, attempt to parse generics // also obtain the keywords symbol @@ -507,7 +507,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if [token::Comma, token::Colon].contains(&self.token.kind) - && self.prev_token == token::CloseDelim(Delimiter::Parenthesis) + && self.prev_token == token::CloseParen { // Likely typo: The current token is on a new line and is expected to be // `.`, `;`, `?`, or an operator after a close delimiter token. @@ -518,8 +518,7 @@ impl<'a> Parser<'a> { // ^ // https://github.com/rust-lang/rust/issues/72253 } else if self.look_ahead(1, |t| { - t == &token::CloseDelim(Delimiter::Brace) - || t.can_begin_expr() && *t != token::Colon + t == &token::CloseBrace || t.can_begin_expr() && *t != token::Colon }) && [token::Comma, token::Colon].contains(&self.token.kind) { // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is @@ -530,14 +529,14 @@ impl<'a> Parser<'a> { // let y = 42; let guar = self.dcx().emit_err(ExpectedSemi { span: self.token.span, - token: self.token.clone(), + token: self.token, unexpected_token_label: None, sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); return Ok(guar); } else if self.look_ahead(0, |t| { - t == &token::CloseDelim(Delimiter::Brace) + t == &token::CloseBrace || ((t.can_begin_expr() || t.can_begin_item()) && t != &token::Semi && t != &token::Pound) @@ -555,7 +554,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let guar = self.dcx().emit_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -609,6 +608,8 @@ impl<'a> Parser<'a> { // FIXME: translation requires list formatting (for `expect`) let mut err = self.dcx().struct_span_err(self.token.span, msg_exp); + self.label_expected_raw_ref(&mut err); + // Look for usages of '=>' where '>=' was probably intended if self.token == token::FatArrow && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le)) @@ -673,8 +674,7 @@ impl<'a> Parser<'a> { // `pub` may be used for an item or `pub(crate)` if self.prev_token.is_ident_named(sym::public) - && (self.token.can_begin_item() - || self.token == TokenKind::OpenDelim(Delimiter::Parenthesis)) + && (self.token.can_begin_item() || self.token == TokenKind::OpenParen) { err.span_suggestion_short( self.prev_token.span, @@ -750,6 +750,25 @@ impl<'a> Parser<'a> { Err(err) } + /// Adds a label when `&raw EXPR` was written instead of `&raw const EXPR`/`&raw mut EXPR`. + /// + /// Given that not all parser diagnostics flow through `expected_one_of_not_found`, this + /// label may need added to other diagnostics emission paths as needed. + pub(super) fn label_expected_raw_ref(&mut self, err: &mut Diag<'_>) { + if self.prev_token.is_keyword(kw::Raw) + && self.expected_token_types.contains(TokenType::KwMut) + && self.expected_token_types.contains(TokenType::KwConst) + && self.token.can_begin_expr() + { + err.span_suggestions( + self.prev_token.span.shrink_to_hi(), + "`&raw` must be followed by `const` or `mut` to be a raw reference expression", + [" const".to_string(), " mut".to_string()], + Applicability::MaybeIncorrect, + ); + } + } + /// Checks if the current token or the previous token are misspelled keywords /// and adds a helpful suggestion. fn check_for_misspelled_kw(&self, err: &mut Diag<'_>, expected: &[TokenType]) { @@ -801,7 +820,7 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let mut err = self.dcx().create_err(ExpectedSemi { span, - token: self.token.clone(), + token: self.token, unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); @@ -822,9 +841,7 @@ impl<'a> Parser<'a> { if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" }, ), ); - if self.token == token::Pound - && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) - { + if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) { // We have // #[attr] // expr @@ -1016,9 +1033,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P<Expr>> { err.span_label(lo.to(decl_hi), "while parsing the body of this closure"); let guar = match before.kind { - token::OpenDelim(Delimiter::Brace) - if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => - { + token::OpenBrace if token.kind != token::OpenBrace => { // `{ || () }` should have been `|| { () }` err.multipart_suggestion( "you might have meant to open the body of the closure, instead of enclosing \ @@ -1033,9 +1048,7 @@ impl<'a> Parser<'a> { self.eat_to_tokens(&[exp!(CloseBrace)]); guar } - token::OpenDelim(Delimiter::Parenthesis) - if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => - { + token::OpenParen if token.kind != token::OpenBrace => { // We are within a function call or tuple, we can emit the error // and recover. self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]); @@ -1050,7 +1063,7 @@ impl<'a> Parser<'a> { ); err.emit() } - _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => { + _ if token.kind != token::OpenBrace => { // We don't have a heuristic to correctly identify where the block // should be closed. err.multipart_suggestion_verbose( @@ -1204,7 +1217,7 @@ impl<'a> Parser<'a> { trailing_span = trailing_span.to(self.token.span); self.bump(); } - if self.token == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenParen { // Recover from bad turbofish: `foo.collect::Vec<_>()`. segment.args = Some(AngleBracketedArgs { args, span }.into()); @@ -1449,9 +1462,7 @@ impl<'a> Parser<'a> { let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)]; self.consume_tts(1, &modifiers); - if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep] - .contains(&self.token.kind) - { + if !matches!(self.token.kind, token::OpenParen | token::PathSep) { // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the // parser and bail out. self.restore_snapshot(snapshot); @@ -1489,7 +1500,7 @@ impl<'a> Parser<'a> { Err(self.dcx().create_err(err)) } } - } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { + } else if self.token == token::OpenParen { // We have high certainty that this was a bad turbofish at this point. // `foo< bar >(` if let ExprKind::Binary(o, ..) = inner_op.kind @@ -1549,10 +1560,7 @@ impl<'a> Parser<'a> { self.bump(); // `(` // Consume the fn call arguments. - let modifiers = [ - (token::OpenDelim(Delimiter::Parenthesis), 1), - (token::CloseDelim(Delimiter::Parenthesis), -1), - ]; + let modifiers = [(token::OpenParen, 1), (token::CloseParen, -1)]; self.consume_tts(1, &modifiers); if self.token == token::Eof { @@ -1636,19 +1644,19 @@ impl<'a> Parser<'a> { self.bump(); // `+` let _bounds = self.parse_generic_bounds()?; - let sum_span = ty.span.to(self.prev_token.span); - let sub = match &ty.kind { TyKind::Ref(_lifetime, mut_ty) => { let lo = mut_ty.ty.span.shrink_to_lo(); let hi = self.prev_token.span.shrink_to_hi(); BadTypePlusSub::AddParen { suggestion: AddParen { lo, hi } } } - TyKind::Ptr(..) | TyKind::BareFn(..) => BadTypePlusSub::ForgotParen { span: sum_span }, - _ => BadTypePlusSub::ExpectPath { span: sum_span }, + TyKind::Ptr(..) | TyKind::BareFn(..) => { + BadTypePlusSub::ForgotParen { span: ty.span.to(self.prev_token.span) } + } + _ => BadTypePlusSub::ExpectPath { span: ty.span }, }; - self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub }); + self.dcx().emit_err(BadTypePlus { span: ty.span, sub }); Ok(()) } @@ -1922,10 +1930,7 @@ impl<'a> Parser<'a> { && self.token == token::Colon && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span)) { - self.dcx().emit_err(ColonAsSemi { - span: self.token.span, - type_ascription: self.psess.unstable_features.is_nightly_build(), - }); + self.dcx().emit_err(ColonAsSemi { span: self.token.span }); self.bump(); return true; } @@ -1960,7 +1965,7 @@ impl<'a> Parser<'a> { fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> { let is_question = self.eat(exp!(Question)); // Handle `await? <expr>`. - let expr = if self.token == token::OpenDelim(Delimiter::Brace) { + let expr = if self.token == token::OpenBrace { // Handle `await { <expr> }`. // This needs to be handled separately from the next arm to avoid // interpreting `await { <expr> }?` as `<expr>?.await`. @@ -1996,9 +2001,7 @@ impl<'a> Parser<'a> { /// If encountering `future.await()`, consumes and emits an error. pub(super) fn recover_from_await_method_call(&mut self) { - if self.token == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) - { + if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) { // future.await() let lo = self.token.span; self.bump(); // ( @@ -2011,9 +2014,7 @@ impl<'a> Parser<'a> { /// /// If encountering `x.use()`, consumes and emits an error. pub(super) fn recover_from_use(&mut self) { - if self.token == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) - { + if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) { // var.use() let lo = self.token.span; self.bump(); // ( @@ -2027,7 +2028,7 @@ impl<'a> Parser<'a> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> { let is_try = self.token.is_keyword(kw::Try); let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for ! - let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for ( + let is_open = self.look_ahead(2, |t| t == &token::OpenParen); //check for ( if is_try && is_questionmark && is_open { let lo = self.token.span; @@ -2035,7 +2036,7 @@ impl<'a> Parser<'a> { self.bump(); //remove ! let try_span = lo.to(self.token.span); //we take the try!( span self.bump(); //remove ( - let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty + let is_empty = self.token == token::CloseParen; //check if the block is empty self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block let hi = self.token.span; self.bump(); //remove ) @@ -2130,7 +2131,7 @@ impl<'a> Parser<'a> { loop { debug!("recover_stmt_ loop {:?}", self.token); match self.token.kind { - token::OpenDelim(Delimiter::Brace) => { + token::OpenBrace => { brace_depth += 1; self.bump(); if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0 @@ -2138,11 +2139,11 @@ impl<'a> Parser<'a> { in_block = true; } } - token::OpenDelim(Delimiter::Bracket) => { + token::OpenBracket => { bracket_depth += 1; self.bump(); } - token::CloseDelim(Delimiter::Brace) => { + token::CloseBrace => { if brace_depth == 0 { debug!("recover_stmt_ return - close delim {:?}", self.token); break; @@ -2154,7 +2155,7 @@ impl<'a> Parser<'a> { break; } } - token::CloseDelim(Delimiter::Bracket) => { + token::CloseBracket => { bracket_depth -= 1; if bracket_depth < 0 { bracket_depth = 0; @@ -2201,12 +2202,10 @@ impl<'a> Parser<'a> { if let token::DocComment(..) = self.token.kind { self.dcx().emit_err(DocCommentOnParamType { span: self.token.span }); self.bump(); - } else if self.token == token::Pound - && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) - { + } else if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) { let lo = self.token.span; // Skip every token until next possible arg. - while self.token != token::CloseDelim(Delimiter::Bracket) { + while self.token != token::CloseBracket { self.bump(); } let sp = lo.to(self.token.span); @@ -2225,9 +2224,7 @@ impl<'a> Parser<'a> { // If we find a pattern followed by an identifier, it could be an (incorrect) // C-style parameter declaration. if self.check_ident() - && self.look_ahead(1, |t| { - *t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis) - }) + && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseParen) { // `fn foo(String s) {}` let ident = self.parse_ident().unwrap(); @@ -2243,7 +2240,7 @@ impl<'a> Parser<'a> { } else if require_name && (self.token == token::Comma || self.token == token::Lt - || self.token == token::CloseDelim(Delimiter::Parenthesis)) + || self.token == token::CloseParen) { let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)"; @@ -2854,7 +2851,7 @@ impl<'a> Parser<'a> { // Check for `'a : {` if !(self.check_lifetime() && self.look_ahead(1, |t| *t == token::Colon) - && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))) + && self.look_ahead(2, |t| *t == token::OpenBrace)) { return false; } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 92e83577f1b..f3b53971b29 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,29 +4,29 @@ use core::mem; use core::ops::{Bound, ControlFlow}; use ast::mut_visit::{self, MutVisitor}; -use ast::token::{IdentIsRaw, MetaVarKind}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, Token, TokenKind}; use rustc_ast::tokenstream::TokenTree; use rustc_ast::util::case::Case; use rustc_ast::util::classify; use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par}; use rustc_ast::visit::{Visitor, walk_expr}; use rustc_ast::{ - self as ast, AnonConst, Arm, AttrStyle, AttrVec, BinOp, BinOpKind, BlockCheckMode, CaptureBy, - ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, FnRetTy, Label, MacCall, - MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp, UnsafeBinderCastKind, - YieldKind, + self as ast, AnonConst, Arm, AssignOp, AssignOpKind, AttrStyle, AttrVec, BinOp, BinOpKind, + BlockCheckMode, CaptureBy, ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, + FnRetTy, Label, MacCall, MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, + UnOp, UnsafeBinderCastKind, YieldKind, }; -use rustc_ast_pretty::pprust; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic}; -use rustc_lexer::unescape::unescape_char; +use rustc_literal_escaper::unescape_char; use rustc_macros::Subdiagnostic; use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error}; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP; +use rustc_span::edition::Edition; use rustc_span::source_map::{self, Spanned}; use rustc_span::{BytePos, ErrorGuaranteed, Ident, Pos, Span, Symbol, kw, sym}; use thin_vec::{ThinVec, thin_vec}; @@ -345,7 +345,7 @@ impl<'a> Parser<'a> { fn error_found_expr_would_be_stmt(&self, lhs: &Expr) { self.dcx().emit_err(errors::FoundExprWouldBeStmt { span: self.token.span, - token: self.token.clone(), + token: self.token, suggestion: ExprParenthesesNeeded::surrounding(lhs.span), }); } @@ -360,7 +360,7 @@ impl<'a> Parser<'a> { ( Some( AssocOp::Binary(BinOpKind::Shr | BinOpKind::Gt | BinOpKind::Ge) - | AssocOp::AssignOp(BinOpKind::Shr), + | AssocOp::AssignOp(AssignOpKind::ShrAssign), ), _, ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { @@ -418,7 +418,7 @@ impl<'a> Parser<'a> { cur_op_span: Span, ) -> PResult<'a, P<Expr>> { let rhs = if self.is_at_start_of_range_notation_rhs() { - let maybe_lt = self.token.clone(); + let maybe_lt = self.token; let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(Bound::Excluded(prec), attrs) @@ -437,7 +437,7 @@ impl<'a> Parser<'a> { fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. - if self.token == token::OpenDelim(Delimiter::Brace) { + if self.token == token::OpenBrace { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true @@ -543,8 +543,8 @@ impl<'a> Parser<'a> { } // Recover from `++x`: token::Plus if this.look_ahead(1, |t| *t == token::Plus) => { - let starts_stmt = this.prev_token == token::Semi - || this.prev_token == token::CloseDelim(Delimiter::Brace); + let starts_stmt = + this.prev_token == token::Semi || this.prev_token == token::CloseBrace; let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span)); // Eat both `+`s. this.bump(); @@ -605,14 +605,14 @@ impl<'a> Parser<'a> { // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, - _ => t.is_whole_expr(), + _ => t.is_metavar_expr(), }; self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) } /// Recover on `not expr` in favor of `!expr`. fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { - let negated_token = self.look_ahead(1, |t| t.clone()); + let negated_token = self.look_ahead(1, |t| *t); let sub_diag = if negated_token.is_numeric_lit() { errors::NotAsNegationOperatorSub::SuggestNotBitwise @@ -638,7 +638,12 @@ impl<'a> Parser<'a> { /// Returns the span of expr if it was not interpolated, or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: &Expr) -> Span { match self.prev_token.kind { - TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => { + token::NtIdent(..) | token::NtLifetime(..) => self.prev_token.span, + token::CloseInvisible(InvisibleOrigin::MetaVar(_)) => { + // `expr.span` is the interpolated span, because invisible open + // and close delims both get marked with the same span, one + // that covers the entire thing between them. (See + // `rustc_expand::mbe::transcribe::transcribe`.) self.prev_token.span } _ => expr.span, @@ -823,6 +828,18 @@ impl<'a> Parser<'a> { if let Some(lt) = lifetime { self.error_remove_borrow_lifetime(span, lt.ident.span.until(expr.span)); } + + // Add expected tokens if we parsed `&raw` as an expression. + // This will make sure we see "expected `const`, `mut`", and + // guides recovery in case we write `&raw expr`. + if borrow_kind == ast::BorrowKind::Ref + && mutbl == ast::Mutability::Not + && matches!(&expr.kind, ExprKind::Path(None, p) if p.is_ident(kw::Raw)) + { + self.expected_token_types.insert(TokenType::KwMut); + self.expected_token_types.insert(TokenType::KwConst); + } + Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) } @@ -896,8 +913,8 @@ impl<'a> Parser<'a> { return Ok(e); } e = match self.token.kind { - token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e), - token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?, + token::OpenParen => self.parse_expr_fn_call(lo, e), + token::OpenBracket => self.parse_expr_index(lo, e)?, _ => return Ok(e), } } @@ -979,12 +996,30 @@ impl<'a> Parser<'a> { } fn error_unexpected_after_dot(&self) { - let actual = pprust::token_to_string(&self.token); + let actual = super::token_descr(&self.token); let span = self.token.span; let sm = self.psess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { - (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => { - (span.shrink_to_hi(), actual.into()) + (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => { + (span.shrink_to_hi(), format!("`{}`", snippet)) + } + (token::CloseInvisible(InvisibleOrigin::MetaVar(_)), _) => { + // No need to report an error. This case will only occur when parsing a pasted + // metavariable, and we should have emitted an error when parsing the macro call in + // the first place. E.g. in this code: + // ``` + // macro_rules! m { ($e:expr) => { $e }; } + // + // fn main() { + // let f = 1; + // m!(f.); + // } + // ``` + // we'll get an error "unexpected token: `)` when parsing the `m!(f.)`, so we don't + // want to issue a second error when parsing the expansion `«f.»` (where `«`/`»` + // represent the invisible delimiters). + self.dcx().span_delayed_bug(span, "bad dot expr in metavariable"); + return; } _ => (span, actual), }; @@ -1168,7 +1203,7 @@ impl<'a> Parser<'a> { } } - if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) { + if self.token.kind.close_delim().is_some() || self.token.kind == token::Comma { break; } else if trailing_dot.is_none() { // This loop should only repeat if there is a trailing dot. @@ -1198,7 +1233,7 @@ impl<'a> Parser<'a> { /// Parse a function call expression, `expr(...)`. fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> { - let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) { + let snapshot = if self.token == token::OpenParen { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) } else { None @@ -1294,7 +1329,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { + if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { return Ok(self.mk_await_expr(self_arg, lo)); } @@ -1362,22 +1397,34 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, true); let span = self.token.span; - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } - token::NtBlock(block) => { - let block = block.clone(); - self.bump(); - return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None))); + if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // Force collection (as opposed to just `parse_expr`) is required to avoid the + // attribute duplication seen in #138478. + let expr = this.parse_expr_force_collect(); + // FIXME(nnethercote) Sometimes with expressions we get a trailing comma, possibly + // related to the FIXME in `collect_tokens_for_expr`. Examples are the multi-line + // `assert_eq!` calls involving arguments annotated with `#[rustfmt::skip]` in + // `compiler/rustc_index/src/bit_set/tests.rs`. + if this.token.kind == token::Comma { + this.bump(); } - }; - } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { - this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) - }) { + expr + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); + } else if let Some(block) = + self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) + { + return Ok(self.mk_expr(span, ExprKind::Block(block, None))); + } else if let Some(path) = + self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type)) + { return Ok(self.mk_expr(span, ExprKind::Path(None, path))); } @@ -1471,9 +1518,9 @@ impl<'a> Parser<'a> { this.parse_expr_let(restrictions) } else if this.eat_keyword(exp!(Underscore)) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) - } else if this.token.uninterpolated_span().at_least_rust_2018() { + } else if this.token_uninterpolated_span().at_least_rust_2018() { // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. - if this.token.uninterpolated_span().at_least_rust_2024() + if this.token_uninterpolated_span().at_least_rust_2024() // check for `gen {}` and `gen move {}` // or `async gen {}` and `async gen move {}` && (this.is_gen_block(kw::Gen, 0) @@ -1572,7 +1619,7 @@ impl<'a> Parser<'a> { } fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> { - let maybe_eq_tok = self.prev_token.clone(); + let maybe_eq_tok = self.prev_token; let (qself, path) = if self.eat_lt() { let lt_span = self.prev_token.span; let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| { @@ -1630,14 +1677,11 @@ impl<'a> Parser<'a> { self.parse_expr_for(label, lo) } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) - } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) - || self.token.is_whole_block() - { + } else if self.check_noexpect(&token::OpenBrace) || self.token.is_metavar_block() { self.parse_expr_block(label, lo, BlockCheckMode::Default) } else if !ate_colon && self.may_recover() - && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma) - || self.token.is_punct()) + && (self.token.kind.close_delim().is_some() || self.token.is_punct()) && could_be_unclosed_char_literal(label_.ident) { let (lit, _) = @@ -1832,19 +1876,21 @@ impl<'a> Parser<'a> { }, }); Some(lexpr) - } else if self.token != token::OpenDelim(Delimiter::Brace) + } else if self.token != token::OpenBrace || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) { let mut expr = self.parse_expr_opt()?; if let Some(expr) = &mut expr { if label.is_some() - && matches!( - expr.kind, + && match &expr.kind { ExprKind::While(_, _, None) - | ExprKind::ForLoop { label: None, .. } - | ExprKind::Loop(_, None, _) - | ExprKind::Block(_, None) - ) + | ExprKind::ForLoop { label: None, .. } + | ExprKind::Loop(_, None, _) => true, + ExprKind::Block(block, None) => { + matches!(block.rules, BlockCheckMode::Default) + } + _ => false, + } { self.psess.buffer_lint( BREAK_WITH_LABEL_AND_LOOP, @@ -1968,7 +2014,7 @@ impl<'a> Parser<'a> { // Eat tokens until the macro call ends. if self.may_recover() { - while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) { + while !self.token.kind.is_close_delim_or_eof() { self.bump(); } } @@ -2033,7 +2079,7 @@ impl<'a> Parser<'a> { &mut self, mk_lit_char: impl FnOnce(Symbol, Span) -> L, ) -> PResult<'a, L> { - let token = self.token.clone(); + let token = self.token; let err = |self_: &Self| { let msg = format!("unexpected token: {}", super::token_descr(&token)); self_.dcx().struct_span_err(token.span, msg) @@ -2062,87 +2108,110 @@ impl<'a> Parser<'a> { .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char)) } - fn recover_after_dot(&mut self) -> Option<Token> { - let mut recovered = None; + fn recover_after_dot(&mut self) { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where // dot would follow an optional literal, so we do this unconditionally. - recovered = self.look_ahead(1, |next_token| { + let recovered = self.look_ahead(1, |next_token| { + // If it's an integer that looks like a float, then recover as such. + // + // We will never encounter the exponent part of a floating + // point literal here, since there's no use of the exponent + // syntax that also constitutes a valid integer, so we need + // not check for that. if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = next_token.kind + && suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) + && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') + && self.token.span.hi() == next_token.span.lo() { - // If this integer looks like a float, then recover as such. - // - // We will never encounter the exponent part of a floating - // point literal here, since there's no use of the exponent - // syntax that also constitutes a valid integer, so we need - // not check for that. - if suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) - && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') - && self.token.span.hi() == next_token.span.lo() - { - let s = String::from("0.") + symbol.as_str(); - let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.token.span.to(next_token.span))); - } + let s = String::from("0.") + symbol.as_str(); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + Some(Token::new(kind, self.token.span.to(next_token.span))) + } else { + None } - None }); - if let Some(token) = &recovered { - self.bump(); + if let Some(recovered) = recovered { self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart { - span: token.span, - suggestion: token.span.shrink_to_lo(), + span: recovered.span, + suggestion: recovered.span.shrink_to_lo(), }); + self.bump(); + self.token = recovered; } } + } - recovered + /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and + /// `Lit::from_token` (excluding unary negation). + fn eat_token_lit(&mut self) -> Option<token::Lit> { + let check_expr = |expr: P<Expr>| { + if let ast::ExprKind::Lit(token_lit) = expr.kind { + Some(token_lit) + } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind + && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner + { + None + } else { + panic!("unexpected reparsed expr/literal: {:?}", expr.kind); + } + }; + match self.token.uninterpolate().kind { + token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => { + self.bump(); + Some(token::Lit::new(token::Bool, name, None)) + } + token::Literal(token_lit) => { + self.bump(); + Some(token_lit) + } + token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Literal)) => { + let lit = self + .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + .expect("metavar seq literal"); + check_expr(lit) + } + token::OpenInvisible(InvisibleOrigin::MetaVar( + mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. }, + )) => { + let expr = self + .eat_metavar_seq(mv_kind, |this| this.parse_expr()) + .expect("metavar seq expr"); + check_expr(expr) + } + _ => None, + } } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - let span = token.span; - - token::Lit::from_token(token).map(|token_lit| { - self.bump(); - (token_lit, span) - }) + fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { + self.recover_after_dot(); + let span = self.token.span; + self.eat_token_lit().map(|token_lit| (token_lit, span)) } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - match token::Lit::from_token(token) { - Some(lit) => { - match MetaItemLit::from_token_lit(lit, token.span) { - Ok(lit) => { - self.bump(); - Some(lit) - } - Err(err) => { - let span = token.uninterpolated_span(); - self.bump(); - let guar = report_lit_error(self.psess, err, lit, span); - // Pack possible quotes and prefixes from the original literal into - // the error literal's symbol so they can be pretty-printed faithfully. - let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); - let symbol = Symbol::intern(&suffixless_lit.to_string()); - let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix); - Some( - MetaItemLit::from_token_lit(lit, span) - .unwrap_or_else(|_| unreachable!()), - ) - } + fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> { + self.recover_after_dot(); + let span = self.token.span; + let uninterpolated_span = self.token_uninterpolated_span(); + self.eat_token_lit().map(|token_lit| { + match MetaItemLit::from_token_lit(token_lit, span) { + Ok(lit) => lit, + Err(err) => { + let guar = report_lit_error(&self.psess, err, token_lit, uninterpolated_span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix); + MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap() } } - None => None, - } + }) } pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) { @@ -2166,9 +2235,10 @@ impl<'a> Parser<'a> { /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - // FIXME(nnethercote) The `NtExpr` case should only match if + if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // FIXME(nnethercote) The `expr` case should only match if // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing // an `UnOp::Neg` and an `ExprKind::Lit`, like how // `can_begin_literal_maybe_minus` works. But this method has @@ -2178,13 +2248,14 @@ impl<'a> Parser<'a> { // `ExprKind::Path` must be accepted when parsing range // patterns. That requires some care. So for now, we continue // being less strict here than we should be. - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } - _ => {} - }; + this.parse_expr() + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); } let lo = self.token.span; @@ -2200,7 +2271,9 @@ impl<'a> Parser<'a> { } fn is_array_like_block(&mut self) -> bool { - self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) + self.token.kind == TokenKind::OpenBrace + && self + .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) && self.look_ahead(2, |t| t == &token::Comma) && self.look_ahead(3, |t| t.can_begin_expr()) } @@ -2212,9 +2285,9 @@ impl<'a> Parser<'a> { let mut snapshot = self.create_snapshot_for_diagnostic(); match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) { Ok(arr) => { - let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces { + let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfBraces { span: arr.span, - sub: errors::ArrayBracketsInsteadOfSpacesSugg { + sub: errors::ArrayBracketsInsteadOfBracesSugg { left: lo, right: snapshot.prev_token.span, }, @@ -2251,8 +2324,8 @@ impl<'a> Parser<'a> { |p| p.parse_expr(), ) { Ok(_) - // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`, - // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`. + // When the close delim is `)`, `token.kind` is expected to be `token::CloseParen`, + // but the actual `token.kind` is `token::CloseBracket`. // This is because the `token.kind` of the close delim is treated as the same as // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different. // Therefore, `token.kind` should not be compared here. @@ -2285,7 +2358,7 @@ impl<'a> Parser<'a> { } } - if self.token.is_whole_block() { + if self.token.is_metavar_block() { self.dcx().emit_err(errors::InvalidBlockMacroSegment { span: self.token.span, context: lo.to(self.token.span), @@ -2310,7 +2383,7 @@ impl<'a> Parser<'a> { fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> { let lo = self.token.span; - let before = self.prev_token.clone(); + let before = self.prev_token; let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; @@ -2328,7 +2401,7 @@ impl<'a> Parser<'a> { let movability = if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; - let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() { + let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) } else { None @@ -2337,23 +2410,21 @@ impl<'a> Parser<'a> { let capture_clause = self.parse_capture_clause()?; let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?; let decl_hi = self.prev_token.span; - let mut body = match fn_decl.output { + let mut body = match &fn_decl.output { + // No return type. FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; - let prev = self.prev_token.clone(); - let token = self.token.clone(); + let prev = self.prev_token; + let token = self.token; let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { Ok((expr, _)) => expr, Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, } } - _ => { - // If an explicit return type is given, require a block to appear (RFC 968). - let body_lo = self.token.span; - self.parse_expr_block(None, body_lo, BlockCheckMode::Default)? - } + // Explicit return type (`->`) needs block `-> T { }`. + FnRetTy::Ty(ty) => self.parse_closure_block_body(ty.span)?, }; match coroutine_kind { @@ -2405,6 +2476,49 @@ impl<'a> Parser<'a> { Ok(closure) } + /// If an explicit return type is given, require a block to appear (RFC 968). + fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P<Expr>> { + if self.may_recover() + && self.token.can_begin_expr() + && self.token.kind != TokenKind::OpenBrace + && !self.token.is_metavar_block() + { + let snapshot = self.create_snapshot_for_diagnostic(); + let restrictions = + self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; + let tok = self.token.clone(); + match self.parse_expr_res(restrictions, AttrWrapper::empty()) { + Ok((expr, _)) => { + let descr = super::token_descr(&tok); + let mut diag = self + .dcx() + .struct_span_err(tok.span, format!("expected `{{`, found {descr}")); + diag.span_label( + ret_span, + "explicit return type requires closure body to be enclosed in braces", + ); + diag.multipart_suggestion_verbose( + "wrap the expression in curly braces", + vec![ + (expr.span.shrink_to_lo(), "{ ".to_string()), + (expr.span.shrink_to_hi(), " }".to_string()), + ], + Applicability::MachineApplicable, + ); + diag.emit(); + return Ok(expr); + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + } + } + } + + let body_lo = self.token.span; + self.parse_expr_block(None, body_lo, BlockCheckMode::Default) + } + /// Parses an optional `move` or `use` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { if self.eat_keyword(exp!(Move)) { @@ -2489,7 +2603,10 @@ impl<'a> Parser<'a> { /// Parses an `if` expression (`if` token already eaten). fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> { let lo = self.prev_token.span; - let cond = self.parse_expr_cond()?; + // Scoping code checks the top level edition of the `if`; let's match it here. + // The `CondChecker` also checks the edition of the `let` itself, just to make sure. + let let_chains_policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() }; + let cond = self.parse_expr_cond(let_chains_policy)?; self.parse_if_after_cond(lo, cond) } @@ -2549,7 +2666,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?; // For recovery. - let maybe_fatarrow = self.token.clone(); + let maybe_fatarrow = self.token; let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { @@ -2598,18 +2715,17 @@ impl<'a> Parser<'a> { } /// Parses the condition of a `if` or `while` expression. + /// + /// The specified `edition` in `let_chains_policy` should be that of the whole `if` construct, + /// i.e. the same span we use to later decide whether the drop behaviour should be that of + /// edition `..=2021` or that of `2024..`. // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions. - pub fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> { + pub fn parse_expr_cond(&mut self, let_chains_policy: LetChainsPolicy) -> PResult<'a, P<Expr>> { let attrs = self.parse_outer_attributes()?; let (mut cond, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?; - CondChecker::new(self).visit_expr(&mut cond); - - if let ExprKind::Let(_, _, _, Recovered::No) = cond.kind { - // Remove the last feature gating of a `let` expression since it's stable. - self.psess.gated_spans.ungate_last(sym::let_chains, cond.span); - } + CondChecker::new(self, let_chains_policy).visit_expr(&mut cond); Ok(cond) } @@ -2769,7 +2885,7 @@ impl<'a> Parser<'a> { } fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> { - let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) { + let begin_paren = if self.token == token::OpenParen { // Record whether we are about to parse `for (`. // This is used below for recovery in case of `for ( $stuff ) $block` // in which case we will suggest `for $stuff $block`. @@ -2803,7 +2919,7 @@ impl<'a> Parser<'a> { return Err(err); } }; - return if self.token == token::CloseDelim(Delimiter::Parenthesis) { + return if self.token == token::CloseParen { // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the // parser state and emit a targeted suggestion. let span = vec![start_span, self.token.span]; @@ -2836,7 +2952,7 @@ impl<'a> Parser<'a> { /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten). fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> { let is_await = - self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)); + self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)); if is_await { self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span); @@ -2847,7 +2963,7 @@ impl<'a> Parser<'a> { let (pat, expr) = self.parse_for_head()?; // Recover from missing expression in `for` loop if matches!(expr.kind, ExprKind::Block(..)) - && !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace)) + && self.token.kind != token::OpenBrace && self.may_recover() { let guar = self @@ -2904,7 +3020,8 @@ impl<'a> Parser<'a> { /// Parses a `while` or `while let` expression (`while` token already eaten). fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> { - let cond = self.parse_expr_cond().map_err(|mut err| { + let policy = LetChainsPolicy::EditionDependent { current_edition: lo.edition() }; + let cond = self.parse_expr_cond(policy).map_err(|mut err| { err.span_label(lo, "while parsing the condition of this `while` expression"); err })?; @@ -2996,7 +3113,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_inner_attributes()?; let mut arms = ThinVec::new(); - while self.token != token::CloseDelim(Delimiter::Brace) { + while self.token != token::CloseBrace { match self.parse_arm() { Ok(arm) => arms.push(arm), Err(e) => { @@ -3004,7 +3121,7 @@ impl<'a> Parser<'a> { let guar = e.emit(); self.recover_stmt(); let span = lo.to(self.token.span); - if self.token == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseBrace { self.bump(); } // Always push at least one arm to make the match non-empty @@ -3065,7 +3182,7 @@ impl<'a> Parser<'a> { // We might have either a `,` -> `;` typo, or a block without braces. We need // a more subtle parsing strategy. loop { - if self.token == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseBrace { // We have reached the closing brace of the `match` expression. return Some(err(self, stmts)); } @@ -3124,7 +3241,7 @@ impl<'a> Parser<'a> { // this avoids the compiler saying that a `,` or `}` was expected even though // the pattern isn't a never pattern (and thus an arm body is required) let armless = (!is_fat_arrow && !is_almost_fat_arrow && pat.could_be_never_pattern()) - || matches!(this.token.kind, token::Comma | token::CloseDelim(Delimiter::Brace)); + || matches!(this.token.kind, token::Comma | token::CloseBrace); let mut result = if armless { // A pattern without a body, allowed for never patterns. @@ -3172,8 +3289,8 @@ impl<'a> Parser<'a> { err })?; - let require_comma = !classify::expr_is_complete(&expr) - && this.token != token::CloseDelim(Delimiter::Brace); + let require_comma = + !classify::expr_is_complete(&expr) && this.token != token::CloseBrace; if !require_comma { arm_body = Some(expr); @@ -3288,17 +3405,17 @@ impl<'a> Parser<'a> { } fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> { - // Used to check the `let_chains` and `if_let_guard` features mostly by scanning + // Used to check the `if_let_guard` feature mostly by scanning // `&&` tokens. - fn check_let_expr(expr: &Expr) -> (bool, bool) { + fn has_let_expr(expr: &Expr) -> bool { match &expr.kind { ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => { - let lhs_rslt = check_let_expr(lhs); - let rhs_rslt = check_let_expr(rhs); - (lhs_rslt.0 || rhs_rslt.0, false) + let lhs_rslt = has_let_expr(lhs); + let rhs_rslt = has_let_expr(rhs); + lhs_rslt || rhs_rslt } - ExprKind::Let(..) => (true, true), - _ => (false, true), + ExprKind::Let(..) => true, + _ => false, } } if !self.eat_keyword(exp!(If)) { @@ -3309,14 +3426,9 @@ impl<'a> Parser<'a> { let if_span = self.prev_token.span; let mut cond = self.parse_match_guard_condition()?; - CondChecker::new(self).visit_expr(&mut cond); + CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond); - let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond); - if has_let_expr { - if does_not_have_bin_op { - // Remove the last feature gating of a `let` expression since it's stable. - self.psess.gated_spans.ungate_last(sym::let_chains, cond.span); - } + if has_let_expr(&cond) { let span = if_span.to(cond.span); self.psess.gated_spans.gate(sym::if_let_guard, span); } @@ -3324,7 +3436,7 @@ impl<'a> Parser<'a> { } fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> { - if self.token == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenParen { let left = self.token.span; let pat = self.parse_pat_no_top_guard( None, @@ -3343,7 +3455,7 @@ impl<'a> Parser<'a> { unreachable!() }; self.psess.gated_spans.ungate_last(sym::guard_patterns, cond.span); - CondChecker::new(self).visit_expr(&mut cond); + CondChecker::new(self, LetChainsPolicy::AlwaysAllowed).visit_expr(&mut cond); let right = self.prev_token.span; self.dcx().emit_err(errors::ParenthesesInMatchPat { span: vec![left, right], @@ -3370,7 +3482,7 @@ impl<'a> Parser<'a> { match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) { Ok((expr, _)) => Ok(expr), Err(mut err) => { - if self.prev_token == token::OpenDelim(Delimiter::Brace) { + if self.prev_token == token::OpenBrace { let sugg_sp = self.prev_token.span.shrink_to_lo(); // Consume everything within the braces, let's avoid further parse // errors. @@ -3413,8 +3525,7 @@ impl<'a> Parser<'a> { fn is_do_catch_block(&self) -> bool { self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Catch]) - && self - .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + && self.look_ahead(2, |t| *t == token::OpenBrace || t.is_metavar_block()) && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } @@ -3424,9 +3535,8 @@ impl<'a> Parser<'a> { fn is_try_block(&self) -> bool { self.token.is_keyword(kw::Try) - && self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) - && self.token.uninterpolated_span().at_least_rust_2018() + && self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block()) + && self.token_uninterpolated_span().at_least_rust_2018() } /// Parses an `async move? {...}` or `gen move? {...}` expression. @@ -3459,13 +3569,11 @@ impl<'a> Parser<'a> { // `async move {` self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use]) && self.look_ahead(lookahead + 2, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() + *t == token::OpenBrace || t.is_metavar_block() }) ) || ( // `async {` - self.look_ahead(lookahead + 1, |t| { - *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block() - }) + self.look_ahead(lookahead + 1, |t| *t == token::OpenBrace || t.is_metavar_block()) )) } @@ -3589,11 +3697,7 @@ impl<'a> Parser<'a> { AssocOp::from_token(t).is_some() || matches!( t.kind, - token::OpenDelim( - Delimiter::Parenthesis - | Delimiter::Bracket - | Delimiter::Brace - ) + token::OpenParen | token::OpenBracket | token::OpenBrace ) || *t == token::Dot }) @@ -3750,14 +3854,14 @@ impl<'a> Parser<'a> { t == &token::Colon || t == &token::Eq || t == &token::Comma - || t == &token::CloseDelim(Delimiter::Brace) - || t == &token::CloseDelim(Delimiter::Parenthesis) + || t == &token::CloseBrace + || t == &token::CloseParen }); if is_wrong { return Err(this.dcx().create_err(errors::ExpectedStructField { span: this.look_ahead(1, |t| t.span), ident_span: this.token.span, - token: this.look_ahead(1, |t| t.clone()), + token: this.look_ahead(1, |t| *t), })); } let (ident, expr) = if is_shorthand { @@ -3809,8 +3913,8 @@ impl<'a> Parser<'a> { self.dcx().emit_err(errors::LeftArrowOperator { span }); } - fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind { - ExprKind::AssignOp(binop, lhs, rhs) + fn mk_assign_op(&self, assign_op: AssignOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind { + ExprKind::AssignOp(assign_op, lhs, rhs) } fn mk_range( @@ -3922,7 +4026,14 @@ pub(crate) enum ForbiddenLetReason { NotSupportedParentheses(#[primary_span] Span), } -/// Visitor to check for invalid/unstable use of `ExprKind::Let` that can't +/// Whether let chains are allowed on all editions, or it's edition dependent (allowed only on +/// 2024 and later). In case of edition dependence, specify the currently present edition. +pub enum LetChainsPolicy { + AlwaysAllowed, + EditionDependent { current_edition: Edition }, +} + +/// Visitor to check for invalid use of `ExprKind::Let` that can't /// easily be caught in parsing. For example: /// /// ```rust,ignore (example) @@ -3933,35 +4044,57 @@ pub(crate) enum ForbiddenLetReason { /// ``` struct CondChecker<'a> { parser: &'a Parser<'a>, + let_chains_policy: LetChainsPolicy, + depth: u32, forbid_let_reason: Option<ForbiddenLetReason>, missing_let: Option<errors::MaybeMissingLet>, comparison: Option<errors::MaybeComparison>, } impl<'a> CondChecker<'a> { - fn new(parser: &'a Parser<'a>) -> Self { - CondChecker { parser, forbid_let_reason: None, missing_let: None, comparison: None } + fn new(parser: &'a Parser<'a>, let_chains_policy: LetChainsPolicy) -> Self { + CondChecker { + parser, + forbid_let_reason: None, + missing_let: None, + comparison: None, + let_chains_policy, + depth: 0, + } } } impl MutVisitor for CondChecker<'_> { fn visit_expr(&mut self, e: &mut P<Expr>) { + self.depth += 1; use ForbiddenLetReason::*; let span = e.span; match e.kind { ExprKind::Let(_, _, _, ref mut recovered @ Recovered::No) => { if let Some(reason) = self.forbid_let_reason { - *recovered = Recovered::Yes(self.parser.dcx().emit_err( - errors::ExpectedExpressionFoundLet { + let error = match reason { + NotSupportedOr(or_span) => { + self.parser.dcx().emit_err(errors::OrInLetChain { span: or_span }) + } + _ => self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet { span, reason, missing_let: self.missing_let, comparison: self.comparison, - }, - )); - } else { - self.parser.psess.gated_spans.gate(sym::let_chains, span); + }), + }; + *recovered = Recovered::Yes(error); + } else if self.depth > 1 { + // Top level `let` is always allowed; only gate chains + match self.let_chains_policy { + LetChainsPolicy::AlwaysAllowed => (), + LetChainsPolicy::EditionDependent { current_edition } => { + if !current_edition.at_least_rust_2024() || !span.at_least_rust_2024() { + self.parser.psess.gated_spans.gate(sym::let_chains, span); + } + } + } } } ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, _, _) => { @@ -4063,5 +4196,6 @@ impl MutVisitor for CondChecker<'_> { // These would forbid any let expressions they contain already. } } + self.depth -= 1; } } diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index c3f71dd8b30..c05479feb61 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -1,4 +1,3 @@ -use ast::token::Delimiter; use rustc_ast::{ self as ast, AttrVec, DUMMY_NODE_ID, GenericBounds, GenericParam, GenericParamKind, TyKind, WhereClause, token, @@ -437,7 +436,7 @@ impl<'a> Parser<'a> { if let Some(struct_) = struct_ && self.may_recover() - && self.token == token::OpenDelim(Delimiter::Parenthesis) + && self.token == token::OpenParen { snapshot = Some((struct_, self.create_snapshot_for_diagnostic())); }; @@ -548,7 +547,7 @@ impl<'a> Parser<'a> { matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq) // Recovery-only branch -- this could be removed, // since it only affects diagnostics currently. - || matches!(t.kind, token::Question) + || t.kind == token::Question }) || self.is_keyword_ahead(start + 1, &[kw::Const])) } diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index aad18578375..4be8a90368d 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -34,10 +34,10 @@ impl<'a> Parser<'a> { } /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. - fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> { + fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); self.expect_keyword(exp!(Mod))?; - let id = self.parse_ident()?; + let ident = self.parse_ident()?; let mod_kind = if self.eat(exp!(Semi)) { ModKind::Unloaded } else { @@ -46,7 +46,7 @@ impl<'a> Parser<'a> { attrs.extend(inner_attrs); ModKind::Loaded(items, Inline::Yes, inner_span, Ok(())) }; - Ok((id, ItemKind::Mod(safety, mod_kind))) + Ok(ItemKind::Mod(safety, ident, mod_kind)) } /// Parses the contents of a module (inner attributes followed by module items). @@ -115,8 +115,6 @@ impl<'a> Parser<'a> { } } -pub(super) type ItemInfo = (Ident, ItemKind); - impl<'a> Parser<'a> { pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<P<Item>>> { let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true }; @@ -163,11 +161,11 @@ impl<'a> Parser<'a> { fn_parse_mode, Case::Sensitive, )?; - if let Some((ident, kind)) = kind { + if let Some(kind) = kind { this.error_on_unconsumed_default(def, &kind); let span = lo.to(this.prev_token.span); let id = DUMMY_NODE_ID; - let item = Item { ident, attrs, id, kind, vis, span, tokens: None }; + let item = Item { attrs, id, kind, vis, span, tokens: None }; return Ok((Some(item), Trailing::No, UsePreAttrPos::No)); } @@ -208,7 +206,7 @@ impl<'a> Parser<'a> { def: &mut Defaultness, fn_parse_mode: FnParseMode, case: Case, - ) -> PResult<'a, Option<ItemInfo>> { + ) -> PResult<'a, Option<ItemKind>> { let check_pub = def == &Defaultness::Final; let mut def_ = || mem::replace(def, Defaultness::Final); @@ -218,17 +216,15 @@ impl<'a> Parser<'a> { // FUNCTION ITEM let (ident, sig, generics, contract, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?; - ( + ItemKind::Fn(Box::new(Fn { + defaultness: def_(), ident, - ItemKind::Fn(Box::new(Fn { - defaultness: def_(), - sig, - generics, - contract, - body, - define_opaque: None, - })), - ) + sig, + generics, + contract, + body, + define_opaque: None, + })) } else if self.eat_keyword(exp!(Extern)) { if self.eat_keyword(exp!(Crate)) { // EXTERN CRATE @@ -247,8 +243,7 @@ impl<'a> Parser<'a> { // STATIC ITEM self.bump(); // `static` let mutability = self.parse_mutability(); - let (ident, item) = self.parse_static_item(safety, mutability)?; - (ident, ItemKind::Static(Box::new(item))) + self.parse_static_item(safety, mutability)? } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) { // CONST ITEM if self.token.is_keyword(kw::Impl) { @@ -258,16 +253,14 @@ impl<'a> Parser<'a> { self.recover_const_mut(const_span); self.recover_missing_kw_before_item()?; let (ident, generics, ty, expr) = self.parse_const_item()?; - ( + ItemKind::Const(Box::new(ConstItem { + defaultness: def_(), ident, - ItemKind::Const(Box::new(ConstItem { - defaultness: def_(), - generics, - ty, - expr, - define_opaque: None, - })), - ) + generics, + ty, + expr, + define_opaque: None, + })) } } else if self.check_keyword(exp!(Trait)) || self.check_auto_or_unsafe_trait_item() { // TRAIT ITEM @@ -334,14 +327,14 @@ impl<'a> Parser<'a> { self.recover_missing_kw_before_item()?; } // MACRO INVOCATION ITEM - (Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?))) + ItemKind::MacCall(P(self.parse_item_macro(vis)?)) } else { return Ok(None); }; Ok(Some(info)) } - fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemInfo>> { + fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemKind>> { let span = self.token.span; let token_name = super::token_descr(&self.token); let snapshot = self.create_snapshot_for_diagnostic(); @@ -359,7 +352,7 @@ impl<'a> Parser<'a> { } } - fn parse_use_item(&mut self) -> PResult<'a, ItemInfo> { + fn parse_use_item(&mut self) -> PResult<'a, ItemKind> { let tree = self.parse_use_tree()?; if let Err(mut e) = self.expect_semi() { match tree.kind { @@ -373,7 +366,7 @@ impl<'a> Parser<'a> { } return Err(e); } - Ok((Ident::empty(), ItemKind::Use(tree))) + Ok(ItemKind::Use(tree)) } /// When parsing a statement, would the start of a path be an item? @@ -406,14 +399,9 @@ impl<'a> Parser<'a> { let insert_span = ident_span.shrink_to_lo(); let ident = if self.token.is_ident() - && (!is_const || self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis))) + && (!is_const || self.look_ahead(1, |t| *t == token::OpenParen)) && self.look_ahead(1, |t| { - [ - token::Lt, - token::OpenDelim(Delimiter::Brace), - token::OpenDelim(Delimiter::Parenthesis), - ] - .contains(&t.kind) + matches!(t.kind, token::Lt | token::OpenBrace | token::OpenParen) }) { self.parse_ident().unwrap() } else { @@ -429,7 +417,7 @@ impl<'a> Parser<'a> { let err = if self.check(exp!(OpenBrace)) { // possible struct or enum definition where `struct` or `enum` was forgotten - if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Brace)) { + if self.look_ahead(1, |t| *t == token::CloseBrace) { // `S {}` could be unit enum or struct Some(errors::MissingKeywordForItemDefinition::EnumOrStruct { span }) } else if self.look_ahead(2, |t| *t == token::Colon) @@ -483,7 +471,7 @@ impl<'a> Parser<'a> { if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) } } - fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> { + fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemKind>> { // To be expanded Ok(None) } @@ -577,7 +565,7 @@ impl<'a> Parser<'a> { &mut self, attrs: &mut AttrVec, defaultness: Defaultness, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); self.expect_keyword(exp!(Impl))?; @@ -598,7 +586,7 @@ impl<'a> Parser<'a> { } // Parse stray `impl async Trait` - if (self.token.uninterpolated_span().at_least_rust_2018() + if (self.token_uninterpolated_span().at_least_rust_2018() && self.token.is_keyword(kw::Async)) || self.is_kw_followed_by_ident(kw::Async) { @@ -609,21 +597,13 @@ impl<'a> Parser<'a> { let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. - let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span)); let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt) { let span = self.prev_token.span.between(self.token.span); - self.dcx().emit_err(errors::MissingTraitInTraitImpl { + return Err(self.dcx().create_err(errors::MissingTraitInTraitImpl { span, for_span: span.to(self.token.span), - }); - - P(Ty { - kind: TyKind::Path(None, err_path(span)), - span, - id: DUMMY_NODE_ID, - tokens: None, - }) + })); } else { self.parse_ty_with_generics_recovery(&generics)? }; @@ -664,6 +644,7 @@ impl<'a> Parser<'a> { other => { if let TyKind::ImplTrait(_, bounds) = other && let [bound] = bounds.as_slice() + && let GenericBound::Trait(poly_trait_ref) = bound { // Suggest removing extra `impl` keyword: // `impl<T: Default> impl Default for Wrapper<T>` @@ -673,12 +654,12 @@ impl<'a> Parser<'a> { extra_impl_kw, impl_trait_span: ty_first.span, }); + poly_trait_ref.trait_ref.path.clone() } else { - self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType { - span: ty_first.span, - }); + return Err(self.dcx().create_err( + errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span }, + )); } - err_path(ty_first.span) } }; let trait_ref = TraitRef { path, ref_id: ty_first.id }; @@ -687,7 +668,7 @@ impl<'a> Parser<'a> { } None => (None, ty_first), // impl Type }; - let item_kind = ItemKind::Impl(Box::new(Impl { + Ok(ItemKind::Impl(Box::new(Impl { safety, polarity, defaultness, @@ -696,12 +677,10 @@ impl<'a> Parser<'a> { of_trait, self_ty, items: impl_items, - })); - - Ok((Ident::empty(), item_kind)) + }))) } - fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_delegation(&mut self) -> PResult<'a, ItemKind> { let span = self.token.span; self.expect_keyword(exp!(Reuse))?; @@ -724,7 +703,7 @@ impl<'a> Parser<'a> { }) }; - let (ident, item_kind) = if self.eat_path_sep() { + let item_kind = if self.eat_path_sep() { let suffixes = if self.eat(exp!(Star)) { None } else { @@ -732,7 +711,7 @@ impl<'a> Parser<'a> { Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0) }; let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? }; - (Ident::empty(), ItemKind::DelegationMac(Box::new(deleg))) + ItemKind::DelegationMac(Box::new(deleg)) } else { let rename = rename(self)?; let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident); @@ -740,17 +719,18 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, qself, path, + ident, rename, body: body(self)?, from_glob: false, }; - (ident, ItemKind::Delegation(Box::new(deleg))) + ItemKind::Delegation(Box::new(deleg)) }; let span = span.to(self.prev_token.span); self.psess.gated_spans.gate(sym::fn_delegation, span); - Ok((ident, item_kind)) + Ok(item_kind) } fn parse_item_list<T>( @@ -779,11 +759,12 @@ impl<'a> Parser<'a> { match parse_item(self) { Ok(None) => { let mut is_unnecessary_semicolon = !items.is_empty() - // When the close delim is `)` in a case like the following, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`, - // but the actual `token.kind` is `token::CloseDelim(Delimiter::Brace)`. - // This is because the `token.kind` of the close delim is treated as the same as - // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different. - // Therefore, `token.kind` should not be compared here. + // When the close delim is `)` in a case like the following, `token.kind` + // is expected to be `token::CloseParen`, but the actual `token.kind` is + // `token::CloseBrace`. This is because the `token.kind` of the close delim + // is treated as the same as that of the open delim in + // `TokenTreesReader::parse_token_tree`, even if the delimiters of them are + // different. Therefore, `token.kind` should not be compared here. // // issue-60075.rs // ``` @@ -802,8 +783,8 @@ impl<'a> Parser<'a> { let mut semicolon_span = self.token.span; if !is_unnecessary_semicolon { // #105369, Detect spurious `;` before assoc fn body - is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace) - && self.prev_token == token::Semi; + is_unnecessary_semicolon = + self.token == token::OpenBrace && self.prev_token == token::Semi; semicolon_span = self.prev_token.span; } // We have to bail or we'll potentially never make progress. @@ -855,7 +836,7 @@ impl<'a> Parser<'a> { /// Recover on a doc comment before `}`. fn recover_doc_comment_before_brace(&mut self) -> bool { if let token::DocComment(..) = self.token.kind { - if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) { + if self.look_ahead(1, |tok| tok == &token::CloseBrace) { // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585) struct_span_code_err!( self.dcx(), @@ -885,7 +866,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As)) { self.bump(); // `default` - Defaultness::Default(self.prev_token.uninterpolated_span()) + Defaultness::Default(self.prev_token_uninterpolated_span()) } else { Defaultness::Final } @@ -900,7 +881,7 @@ impl<'a> Parser<'a> { } /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`. - fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> { + fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); // Parse optional `auto` prefix. let is_auto = if self.eat_keyword(exp!(Auto)) { @@ -941,15 +922,12 @@ impl<'a> Parser<'a> { self.psess.gated_spans.gate(sym::trait_alias, whole_span); - Ok((ident, ItemKind::TraitAlias(generics, bounds))) + Ok(ItemKind::TraitAlias(ident, generics, bounds)) } else { // It's a normal trait. generics.where_clause = self.parse_where_clause()?; let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?; - Ok(( - ident, - ItemKind::Trait(Box::new(Trait { is_auto, safety, generics, bounds, items })), - )) + Ok(ItemKind::Trait(Box::new(Trait { is_auto, safety, ident, generics, bounds, items }))) } } @@ -977,11 +955,12 @@ impl<'a> Parser<'a> { force_collect: ForceCollect, ) -> PResult<'a, Option<Option<P<AssocItem>>>> { Ok(self.parse_item_(fn_parse_mode, force_collect)?.map( - |Item { attrs, id, span, vis, ident, kind, tokens }| { + |Item { attrs, id, span, vis, kind, tokens }| { let kind = match AssocItemKind::try_from(kind) { Ok(kind) => kind, Err(kind) => match kind { ItemKind::Static(box StaticItem { + ident, ty, safety: _, mutability: _, @@ -991,6 +970,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span }); AssocItemKind::Const(Box::new(ConstItem { defaultness: Defaultness::Final, + ident, generics: Generics::default(), ty, expr, @@ -1000,7 +980,7 @@ impl<'a> Parser<'a> { _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"), }, }; - Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) + Some(P(Item { attrs, id, span, vis, kind, tokens })) }, )) } @@ -1010,7 +990,7 @@ impl<'a> Parser<'a> { /// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ; /// ``` /// The `"type"` has already been eaten. - fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemInfo> { + fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1045,16 +1025,14 @@ impl<'a> Parser<'a> { self.expect_semi()?; - Ok(( + Ok(ItemKind::TyAlias(Box::new(TyAlias { + defaultness, ident, - ItemKind::TyAlias(Box::new(TyAlias { - defaultness, - generics, - where_clauses, - bounds, - ty, - })), - )) + generics, + where_clauses, + bounds, + ty, + }))) } /// Parses a `UseTree`. @@ -1158,16 +1136,16 @@ impl<'a> Parser<'a> { /// extern crate foo; /// extern crate bar as foo; /// ``` - fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemKind> { // Accept `extern crate name-like-this` for better diagnostics - let orig_name = self.parse_crate_name_with_dashes()?; - let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? { - (rename, Some(orig_name.name)) + let orig_ident = self.parse_crate_name_with_dashes()?; + let (orig_name, item_ident) = if let Some(rename) = self.parse_rename()? { + (Some(orig_ident.name), rename) } else { - (orig_name, None) + (None, orig_ident) }; self.expect_semi()?; - Ok((item_name, ItemKind::ExternCrate(orig_name))) + Ok(ItemKind::ExternCrate(orig_name, item_ident)) } fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> { @@ -1218,25 +1196,24 @@ impl<'a> Parser<'a> { &mut self, attrs: &mut AttrVec, mut safety: Safety, - ) -> PResult<'a, ItemInfo> { - let extern_span = self.prev_token.uninterpolated_span(); + ) -> PResult<'a, ItemKind> { + let extern_span = self.prev_token_uninterpolated_span(); let abi = self.parse_abi(); // ABI? // FIXME: This recovery should be tested better. if safety == Safety::Default && self.token.is_keyword(kw::Unsafe) - && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) + && self.look_ahead(1, |t| *t == token::OpenBrace) { self.expect(exp!(OpenBrace)).unwrap_err().emit(); safety = Safety::Unsafe(self.token.span); let _ = self.eat_keyword(exp!(Unsafe)); } - let module = ast::ForeignMod { + Ok(ItemKind::ForeignMod(ast::ForeignMod { extern_span, safety, abi, items: self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?, - }; - Ok((Ident::empty(), ItemKind::ForeignMod(module))) + })) } /// Parses a foreign item (one in an `extern { ... }` block). @@ -1246,11 +1223,11 @@ impl<'a> Parser<'a> { ) -> PResult<'a, Option<Option<P<ForeignItem>>>> { let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false }; Ok(self.parse_item_(fn_parse_mode, force_collect)?.map( - |Item { attrs, id, span, vis, ident, kind, tokens }| { + |Item { attrs, id, span, vis, kind, tokens }| { let kind = match ForeignItemKind::try_from(kind) { Ok(kind) => kind, Err(kind) => match kind { - ItemKind::Const(box ConstItem { ty, expr, .. }) => { + ItemKind::Const(box ConstItem { ident, ty, expr, .. }) => { let const_span = Some(span.with_hi(ident.span.lo())) .filter(|span| span.can_be_used_for_suggestions()); self.dcx().emit_err(errors::ExternItemCannotBeConst { @@ -1258,6 +1235,7 @@ impl<'a> Parser<'a> { const_span, }); ForeignItemKind::Static(Box::new(StaticItem { + ident, ty, mutability: Mutability::Not, expr, @@ -1268,7 +1246,7 @@ impl<'a> Parser<'a> { _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"), }, }; - Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) + Some(P(Item { attrs, id, span, vis, kind, tokens })) }, )) } @@ -1301,12 +1279,24 @@ impl<'a> Parser<'a> { } fn is_unsafe_foreign_mod(&self) -> bool { - self.token.is_keyword(kw::Unsafe) - && self.is_keyword_ahead(1, &[kw::Extern]) - && self.look_ahead( - 2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize), - |t| *t == token::OpenDelim(Delimiter::Brace), - ) + // Look for `unsafe`. + if !self.token.is_keyword(kw::Unsafe) { + return false; + } + // Look for `extern`. + if !self.is_keyword_ahead(1, &[kw::Extern]) { + return false; + } + + // Look for the optional ABI string literal. + let n = if self.look_ahead(2, |t| t.can_begin_string_literal()) { 3 } else { 2 }; + + // Look for the `{`. Use `tree_look_ahead` because the ABI (if present) + // might be a metavariable i.e. an invisible-delimited sequence, and + // `tree_look_ahead` will consider that a single element when looking + // ahead. + self.tree_look_ahead(n, |t| matches!(t, TokenTree::Delimited(_, _, Delimiter::Brace, _))) + == Some(true) } fn is_static_global(&mut self) -> bool { @@ -1343,13 +1333,13 @@ impl<'a> Parser<'a> { const_span: Span, attrs: &mut AttrVec, defaultness: Defaultness, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { let impl_span = self.token.span; let err = self.expected_ident_found_err(); // Only try to recover if this is implementing a trait for a type - let mut impl_info = match self.parse_item_impl(attrs, defaultness) { - Ok(impl_info) => impl_info, + let mut item_kind = match self.parse_item_impl(attrs, defaultness) { + Ok(item_kind) => item_kind, Err(recovery_error) => { // Recovery failed, raise the "expected identifier" error recovery_error.cancel(); @@ -1357,7 +1347,7 @@ impl<'a> Parser<'a> { } }; - match &mut impl_info.1 { + match &mut item_kind { ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => { *constness = Const::Yes(const_span); @@ -1374,10 +1364,11 @@ impl<'a> Parser<'a> { _ => unreachable!(), } - Ok(impl_info) + Ok(item_kind) } - /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in `mutability`. + /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in + /// `mutability`. /// /// ```ebnf /// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ; @@ -1386,7 +1377,7 @@ impl<'a> Parser<'a> { &mut self, safety: Safety, mutability: Mutability, - ) -> PResult<'a, (Ident, StaticItem)> { + ) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; if self.token == TokenKind::Lt && self.may_recover() { @@ -1398,7 +1389,8 @@ impl<'a> Parser<'a> { // FIXME: This could maybe benefit from `.may_recover()`? let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) { (true, false) => self.parse_ty()?, - // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type. + // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing + // type. (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)), }; @@ -1406,7 +1398,8 @@ impl<'a> Parser<'a> { self.expect_semi()?; - Ok((ident, StaticItem { ty, safety, mutability, expr, define_opaque: None })) + let item = StaticItem { ident, ty, safety, mutability, expr, define_opaque: None }; + Ok(ItemKind::Static(Box::new(item))) } /// Parse a constant item with the prefix `"const"` already parsed. @@ -1531,7 +1524,7 @@ impl<'a> Parser<'a> { } /// Parses an enum declaration. - fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_enum(&mut self) -> PResult<'a, ItemKind> { if self.token.is_keyword(kw::Struct) { let span = self.prev_token.span.to(self.token.span); let err = errors::EnumStructMutuallyExclusive { span }; @@ -1544,7 +1537,7 @@ impl<'a> Parser<'a> { } let prev_span = self.prev_token.span; - let id = self.parse_ident()?; + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; generics.where_clause = self.parse_where_clause()?; @@ -1555,10 +1548,10 @@ impl<'a> Parser<'a> { (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| { - p.parse_enum_variant(id.span) + p.parse_enum_variant(ident.span) }) .map_err(|mut err| { - err.span_label(id.span, "while parsing this enum"); + err.span_label(ident.span, "while parsing this enum"); if self.token == token::Colon { let snapshot = self.create_snapshot_for_diagnostic(); self.bump(); @@ -1584,7 +1577,7 @@ impl<'a> Parser<'a> { }; let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() }; - Ok((id, ItemKind::Enum(enum_definition, generics))) + Ok(ItemKind::Enum(ident, enum_definition, generics)) } fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> { @@ -1676,8 +1669,8 @@ impl<'a> Parser<'a> { } /// Parses `struct Foo { ... }`. - fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; + fn parse_item_struct(&mut self) -> PResult<'a, ItemKind> { + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1698,7 +1691,7 @@ impl<'a> Parser<'a> { let vdata = if self.token.is_keyword(kw::Where) { let tuple_struct_body; (generics.where_clause, tuple_struct_body) = - self.parse_struct_where_clause(class_name, generics.span)?; + self.parse_struct_where_clause(ident, generics.span)?; if let Some(body) = tuple_struct_body { // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);` @@ -1712,7 +1705,7 @@ impl<'a> Parser<'a> { // If we see: `struct Foo<T> where T: Copy { ... }` let (fields, recovered) = self.parse_record_struct_body( "struct", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } @@ -1721,31 +1714,30 @@ impl<'a> Parser<'a> { } else if self.eat(exp!(Semi)) { VariantData::Unit(DUMMY_NODE_ID) // Record-style struct definition - } else if self.token == token::OpenDelim(Delimiter::Brace) { + } else if self.token == token::OpenBrace { let (fields, recovered) = self.parse_record_struct_body( "struct", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } // Tuple-style struct definition with optional where-clause. - } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { + } else if self.token == token::OpenParen { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); generics.where_clause = self.parse_where_clause()?; self.expect_semi()?; body } else { - let err = - errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); + let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token); return Err(self.dcx().create_err(err)); }; - Ok((class_name, ItemKind::Struct(vdata, generics))) + Ok(ItemKind::Struct(ident, vdata, generics)) } /// Parses `union Foo { ... }`. - fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; + fn parse_item_union(&mut self) -> PResult<'a, ItemKind> { + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1753,14 +1745,14 @@ impl<'a> Parser<'a> { generics.where_clause = self.parse_where_clause()?; let (fields, recovered) = self.parse_record_struct_body( "union", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } - } else if self.token == token::OpenDelim(Delimiter::Brace) { + } else if self.token == token::OpenBrace { let (fields, recovered) = self.parse_record_struct_body( "union", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } @@ -1772,7 +1764,7 @@ impl<'a> Parser<'a> { return Err(err); }; - Ok((class_name, ItemKind::Union(vdata, generics))) + Ok(ItemKind::Union(ident, vdata, generics)) } /// This function parses the fields of record structs: @@ -1788,7 +1780,7 @@ impl<'a> Parser<'a> { let mut fields = ThinVec::new(); let mut recovered = Recovered::No; if self.eat(exp!(OpenBrace)) { - while self.token != token::CloseDelim(Delimiter::Brace) { + while self.token != token::CloseBrace { match self.parse_field_def(adt_ty) { Ok(field) => { fields.push(field); @@ -1945,7 +1937,7 @@ impl<'a> Parser<'a> { token::Comma => { self.bump(); } - token::CloseDelim(Delimiter::Brace) => {} + token::CloseBrace => {} token::DocComment(..) => { let previous_span = self.prev_token.span; let mut err = errors::DocCommentDoesNotDocumentAnything { @@ -1959,7 +1951,7 @@ impl<'a> Parser<'a> { if !seen_comma && comma_after_doc_seen { seen_comma = true; } - if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) { + if comma_after_doc_seen || self.token == token::CloseBrace { self.dcx().emit_err(err); } else { if !seen_comma { @@ -1997,7 +1989,7 @@ impl<'a> Parser<'a> { if self.token.is_ident() || (self.token == TokenKind::Pound - && (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket)))) + && (self.look_ahead(1, |t| t == &token::OpenBracket))) { // This is likely another field, TokenKind::Pound is used for `#[..]` // attribute for next field. Emit the diagnostic and continue parsing. @@ -2066,6 +2058,17 @@ impl<'a> Parser<'a> { } self.expect_field_ty_separator()?; let ty = self.parse_ty()?; + if self.token == token::Colon && self.look_ahead(1, |&t| t != token::Colon) { + self.dcx() + .struct_span_err(self.token.span, "found single colon in a struct field type path") + .with_span_suggestion_verbose( + self.token.span, + "write a path separator here", + "::", + Applicability::MaybeIncorrect, + ) + .emit(); + } let default = if self.token == token::Eq { self.bump(); let const_expr = self.parse_expr_anon_const()?; @@ -2124,15 +2127,17 @@ impl<'a> Parser<'a> { } } else if self.eat_keyword(exp!(Struct)) { match self.parse_item_struct() { - Ok((ident, _)) => self - .dcx() - .struct_span_err( - lo.with_hi(ident.span.hi()), - format!("structs are not allowed in {adt_ty} definitions"), - ) - .with_help( - "consider creating a new `struct` definition instead of nesting", - ), + Ok(item) => { + let ItemKind::Struct(ident, ..) = item else { unreachable!() }; + self.dcx() + .struct_span_err( + lo.with_hi(ident.span.hi()), + format!("structs are not allowed in {adt_ty} definitions"), + ) + .with_help( + "consider creating a new `struct` definition instead of nesting", + ) + } Err(err) => { err.cancel(); self.restore_snapshot(snapshot); @@ -2177,7 +2182,7 @@ impl<'a> Parser<'a> { /// MacParams = "(" TOKEN_STREAM ")" ; /// DeclMac = "macro" Ident MacParams? MacBody ; /// ``` - fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> { + fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; let body = if self.check(exp!(OpenBrace)) { self.parse_delim_args()? // `MacBody` @@ -2199,7 +2204,7 @@ impl<'a> Parser<'a> { }; self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span)); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false }))) + Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: false })) } /// Is this a possibly malformed start of a `macro_rules! foo` item definition? @@ -2228,7 +2233,7 @@ impl<'a> Parser<'a> { &mut self, vis: &Visibility, has_bang: bool, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { self.expect_keyword(exp!(MacroRules))?; // `macro_rules` if has_bang { @@ -2246,7 +2251,7 @@ impl<'a> Parser<'a> { self.eat_semi_for_macro_if_needed(&body); self.complain_if_pub_macro(vis, true); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: true }))) + Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: true })) } /// Item macro invocations or `macro_rules!` definitions need inherited visibility. @@ -2304,7 +2309,7 @@ impl<'a> Parser<'a> { || self.token.is_keyword(kw::Union)) && self.look_ahead(1, |t| t.is_ident()) { - let kw_token = self.token.clone(); + let kw_token = self.token; let kw_str = pprust::token_to_string(&kw_token); let item = self.parse_item(ForceCollect::No)?; let mut item = item.unwrap().span; @@ -2449,7 +2454,7 @@ impl<'a> Parser<'a> { match self.expected_one_of_not_found(&[], expected) { Ok(error_guaranteed) => Ok(error_guaranteed), Err(mut err) => { - if self.token == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseBrace { // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in // the AST for typechecking. err.span_label(ident_span, "while parsing this `fn`"); @@ -2537,7 +2542,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; *sig_hi = self.prev_token.span; (AttrVec::new(), None) - } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() { + } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() { self.parse_block_common(self.token.span, BlockCheckMode::Default, None) .map(|(attrs, body)| (attrs, Some(body)))? } else if self.token == token::Eq { @@ -2610,13 +2615,36 @@ impl<'a> Parser<'a> { }) // `extern ABI fn` || self.check_keyword_case(exp!(Extern), case) + // Use `tree_look_ahead` because `ABI` might be a metavariable, + // i.e. an invisible-delimited sequence, and `tree_look_ahead` + // will consider that a single element when looking ahead. && self.look_ahead(1, |t| t.can_begin_string_literal()) - && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) || + && (self.tree_look_ahead(2, |tt| { + match tt { + TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case), + TokenTree::Delimited(..) => false, + } + }) == Some(true) || // This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not // allowed here. (self.may_recover() - && self.look_ahead(2, |t| ALL_QUALS.iter().any(|exp| t.is_keyword(exp.kw))) - && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case)))) + && self.tree_look_ahead(2, |tt| { + match tt { + TokenTree::Token(t, _) => + ALL_QUALS.iter().any(|exp| { + t.is_keyword(exp.kw) + }), + TokenTree::Delimited(..) => false, + } + }) == Some(true) + && self.tree_look_ahead(3, |tt| { + match tt { + TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case), + TokenTree::Delimited(..) => false, + } + }) == Some(true) + ) + ) } /// Parses all the "front matter" (or "qualifiers") for a `fn` declaration, @@ -2752,7 +2780,7 @@ impl<'a> Parser<'a> { .expect("Span extracted directly from keyword should always work"); err.span_suggestion( - self.token.uninterpolated_span(), + self.token_uninterpolated_span(), format!("`{original_kw}` already used earlier, remove this one"), "", Applicability::MachineApplicable, @@ -2763,7 +2791,7 @@ impl<'a> Parser<'a> { else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw { let correct_pos_sp = correct_pos_sp.to(self.prev_token.span); if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) { - let misplaced_qual_sp = self.token.uninterpolated_span(); + let misplaced_qual_sp = self.token_uninterpolated_span(); let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap(); err.span_suggestion( @@ -2853,7 +2881,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> { let mut first_param = true; // Parse the arguments, starting out with `self` being allowed... - if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis) + if self.token != TokenKind::OpenParen // might be typo'd trait impl, handled elsewhere && !self.token.is_keyword(kw::For) { @@ -2871,7 +2899,7 @@ impl<'a> Parser<'a> { // When parsing a param failed, we should check to make the span of the param // not contain '(' before it. // For example when parsing `*mut Self` in function `fn oof(*mut Self)`. - let lo = if let TokenKind::OpenDelim(Delimiter::Parenthesis) = p.prev_token.kind { + let lo = if let TokenKind::OpenParen = p.prev_token.kind { p.prev_token.span.shrink_to_hi() } else { p.prev_token.span @@ -2931,23 +2959,37 @@ impl<'a> Parser<'a> { let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic(); this.eat_incorrect_doc_comment_for_param_type(); let mut ty = this.parse_ty_for_param(); - if ty.is_ok() - && this.token != token::Comma - && this.token != token::CloseDelim(Delimiter::Parenthesis) - { - // This wasn't actually a type, but a pattern looking like a type, - // so we are going to rollback and re-parse for recovery. - ty = this.unexpected_any(); + + if let Ok(t) = &ty { + // Check for trailing angle brackets + if let TyKind::Path(_, Path { segments, .. }) = &t.kind { + if let Some(segment) = segments.last() { + if let Some(guar) = + this.check_trailing_angle_brackets(segment, &[exp!(CloseParen)]) + { + return Ok(( + dummy_arg(segment.ident, guar), + Trailing::No, + UsePreAttrPos::No, + )); + } + } + } + + if this.token != token::Comma && this.token != token::CloseParen { + // This wasn't actually a type, but a pattern looking like a type, + // so we are going to rollback and re-parse for recovery. + ty = this.unexpected_any(); + } } match ty { Ok(ty) => { - let ident = Ident::new(kw::Empty, this.prev_token.span); - let bm = BindingMode::NONE; - let pat = this.mk_pat_ident(ty.span, bm, ident); + let pat = this.mk_pat(ty.span, PatKind::Missing); (pat, ty) } // If this is a C-variadic argument and we hit an error, return the error. Err(err) if this.token == token::DotDotDot => return Err(err), + Err(err) if this.unmatched_angle_bracket_count > 0 => return Err(err), // Recover from attempting to parse the argument as a type without pattern. Err(err) => { err.cancel(); @@ -3116,7 +3158,7 @@ impl<'a> Parser<'a> { fn is_named_param(&self) -> bool { let offset = match &self.token.kind { - token::OpenDelim(Delimiter::Invisible(origin)) => match origin { + token::OpenInvisible(origin) => match origin { InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => { return self.check_noexpect_past_close_delim(&token::Colon); } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index a79b4048288..968376678f3 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -12,8 +12,6 @@ pub mod token_type; mod ty; use std::assert_matches::debug_assert_matches; -use std::ops::Range; -use std::sync::Arc; use std::{fmt, mem, slice}; use attr_wrapper::{AttrWrapper, UsePreAttrPos}; @@ -24,10 +22,11 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token, - TokenKind, + self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind, +}; +use rustc_ast::tokenstream::{ + ParserRange, ParserReplacement, Spacing, TokenCursor, TokenStream, TokenTree, TokenTreeCursor, }; -use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID, @@ -39,17 +38,14 @@ use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, Diag, FatalError, MultiSpan, PResult}; use rustc_index::interval::IntervalSet; use rustc_session::parse::ParseSess; -use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym}; +use rustc_span::{Ident, Span, Symbol, kw, sym}; use thin_vec::ThinVec; use token_type::TokenTypeSet; pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType}; use tracing::debug; -use crate::errors::{ - self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral, -}; +use crate::errors::{self, IncorrectVisibilityRestriction, NonStringAbiLiteral}; use crate::exp; -use crate::lexer::UnmatchedDelim; #[cfg(test)] mod tests; @@ -60,19 +56,64 @@ mod tests; mod tokenstream { mod tests; } -#[cfg(test)] -mod mut_visit { - mod tests; -} bitflags::bitflags! { + /// Restrictions applied while parsing. + /// + /// The parser maintains a bitset of restrictions it will honor while + /// parsing. This is essentially used as a way of tracking state of what + /// is being parsed and to change behavior based on that. #[derive(Clone, Copy, Debug)] struct Restrictions: u8 { + /// Restricts expressions for use in statement position. + /// + /// When expressions are used in various places, like statements or + /// match arms, this is used to stop parsing once certain tokens are + /// reached. + /// + /// For example, `if true {} & 1` with `STMT_EXPR` in effect is parsed + /// as two separate expression statements (`if` and a reference to 1). + /// Otherwise it is parsed as a bitwise AND where `if` is on the left + /// and 1 is on the right. const STMT_EXPR = 1 << 0; + /// Do not allow struct literals. + /// + /// There are several places in the grammar where we don't want to + /// allow struct literals because they can require lookahead, or + /// otherwise could be ambiguous or cause confusion. For example, + /// `if Foo {} {}` isn't clear if it is `Foo{}` struct literal, or + /// just `Foo` is the condition, followed by a consequent block, + /// followed by an empty block. + /// + /// See [RFC 92](https://rust-lang.github.io/rfcs/0092-struct-grammar.html). const NO_STRUCT_LITERAL = 1 << 1; + /// Used to provide better error messages for const generic arguments. + /// + /// An un-braced const generic argument is limited to a very small + /// subset of expressions. This is used to detect the situation where + /// an expression outside of that subset is used, and to suggest to + /// wrap the expression in braces. const CONST_EXPR = 1 << 2; + /// Allows `let` expressions. + /// + /// `let pattern = scrutinee` is parsed as an expression, but it is + /// only allowed in let chains (`if` and `while` conditions). + /// Otherwise it is not an expression (note that `let` in statement + /// positions is treated as a `StmtKind::Let` statement, which has a + /// slightly different grammar). const ALLOW_LET = 1 << 3; + /// Used to detect a missing `=>` in a match guard. + /// + /// This is used for error handling in a match guard to give a better + /// error message if the `=>` is missing. It is set when parsing the + /// guard expression. const IN_IF_GUARD = 1 << 4; + /// Used to detect the incorrect use of expressions in patterns. + /// + /// This is used for error handling while parsing a pattern. During + /// error recovery, this will be set to try to parse the pattern as an + /// expression, but halts parsing the expression when reaching certain + /// tokens like `=`. const IS_PAT = 1 << 5; } } @@ -98,20 +139,6 @@ pub enum ForceCollect { No, } -#[macro_export] -macro_rules! maybe_whole { - ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token.kind - && let token::$constructor(x) = &**nt - { - #[allow(unused_mut)] - let mut $x = x.clone(); - $p.bump(); - return Ok($e); - } - }; -} - /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`. #[macro_export] macro_rules! maybe_recover_from_interpolated_ty_qpath { @@ -206,57 +233,6 @@ struct ClosureSpans { body: Span, } -/// A token range within a `Parser`'s full token stream. -#[derive(Clone, Debug)] -struct ParserRange(Range<u32>); - -/// A token range within an individual AST node's (lazy) token stream, i.e. -/// relative to that node's first token. Distinct from `ParserRange` so the two -/// kinds of range can't be mixed up. -#[derive(Clone, Debug)] -struct NodeRange(Range<u32>); - -/// Indicates a range of tokens that should be replaced by an `AttrsTarget` -/// (replacement) or be replaced by nothing (deletion). This is used in two -/// places during token collection. -/// -/// 1. Replacement. During the parsing of an AST node that may have a -/// `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]` -/// or `#[cfg_attr]`, we replace the entire inner AST node with -/// `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an -/// `AttrTokenStream`. -/// -/// 2. Deletion. We delete inner attributes from all collected token streams, -/// and instead track them through the `attrs` field on the AST node. This -/// lets us manipulate them similarly to outer attributes. When we create a -/// `TokenStream`, the inner attributes are inserted into the proper place -/// in the token stream. -/// -/// Each replacement starts off in `ParserReplacement` form but is converted to -/// `NodeReplacement` form when it is attached to a single AST node, via -/// `LazyAttrTokenStreamImpl`. -type ParserReplacement = (ParserRange, Option<AttrsTarget>); - -/// See the comment on `ParserReplacement`. -type NodeReplacement = (NodeRange, Option<AttrsTarget>); - -impl NodeRange { - // Converts a range within a parser's tokens to a range within a - // node's tokens beginning at `start_pos`. - // - // For example, imagine a parser with 50 tokens in its token stream, a - // function that spans `ParserRange(20..40)` and an inner attribute within - // that function that spans `ParserRange(30..35)`. We would find the inner - // attribute's range within the function's tokens by subtracting 20, which - // is the position of the function's start token. This gives - // `NodeRange(10..15)`. - fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange { - assert!(!parser_range.is_empty()); - assert!(parser_range.start >= start_pos); - NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos)) - } -} - /// Controls how we capture tokens. Capturing can be expensive, /// so we try to avoid performing capturing in cases where /// we will never need an `AttrTokenStream`. @@ -279,103 +255,6 @@ struct CaptureState { seen_attrs: IntervalSet<AttrId>, } -#[derive(Clone, Debug)] -struct TokenTreeCursor { - stream: TokenStream, - /// Points to the current token tree in the stream. In `TokenCursor::curr`, - /// this can be any token tree. In `TokenCursor::stack`, this is always a - /// `TokenTree::Delimited`. - index: usize, -} - -impl TokenTreeCursor { - #[inline] - fn new(stream: TokenStream) -> Self { - TokenTreeCursor { stream, index: 0 } - } - - #[inline] - fn curr(&self) -> Option<&TokenTree> { - self.stream.get(self.index) - } - - #[inline] - fn bump(&mut self) { - self.index += 1; - } -} - -/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that -/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b) -/// use this type to emit them as a linear sequence. But a linear sequence is -/// what the parser expects, for the most part. -#[derive(Clone, Debug)] -struct TokenCursor { - // Cursor for the current (innermost) token stream. The index within the - // cursor can point to any token tree in the stream (or one past the end). - // The delimiters for this token stream are found in `self.stack.last()`; - // if that is `None` we are in the outermost token stream which never has - // delimiters. - curr: TokenTreeCursor, - - // Token streams surrounding the current one. The index within each cursor - // always points to a `TokenTree::Delimited`. - stack: Vec<TokenTreeCursor>, -} - -impl TokenCursor { - fn next(&mut self) -> (Token, Spacing) { - self.inlined_next() - } - - /// This always-inlined version should only be used on hot code paths. - #[inline(always)] - fn inlined_next(&mut self) -> (Token, Spacing) { - loop { - // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix - // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions - // below can be removed. - if let Some(tree) = self.curr.curr() { - match tree { - &TokenTree::Token(ref token, spacing) => { - debug_assert!(!matches!( - token.kind, - token::OpenDelim(_) | token::CloseDelim(_) - )); - let res = (token.clone(), spacing); - self.curr.bump(); - return res; - } - &TokenTree::Delimited(sp, spacing, delim, ref tts) => { - let trees = TokenTreeCursor::new(tts.clone()); - self.stack.push(mem::replace(&mut self.curr, trees)); - if !delim.skip() { - return (Token::new(token::OpenDelim(delim), sp.open), spacing.open); - } - // No open delimiter to return; continue on to the next iteration. - } - }; - } else if let Some(parent) = self.stack.pop() { - // We have exhausted this token stream. Move back to its parent token stream. - let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else { - panic!("parent should be Delimited") - }; - self.curr = parent; - self.curr.bump(); // move past the `Delimited` - if !delim.skip() { - return (Token::new(token::CloseDelim(delim), span.close), spacing.close); - } - // No close delimiter to return; continue on to the next iteration. - } else { - // We have exhausted the outermost token stream. The use of - // `Spacing::Alone` is arbitrary and immaterial, because the - // `Eof` token's spacing is never used. - return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone); - } - } - } -} - /// A sequence separator. #[derive(Debug)] struct SeqSep<'a> { @@ -434,7 +313,7 @@ impl TokenDescription { _ if token.is_used_keyword() => Some(TokenDescription::Keyword), _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword), token::DocComment(..) => Some(TokenDescription::DocComment), - token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { + token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => { Some(TokenDescription::MetaVar(kind)) } _ => None, @@ -454,7 +333,6 @@ pub fn token_descr(token: &Token) -> String { (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"), (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"), (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"), - (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()), (None, _) => format!("`{s}`"), } } @@ -507,6 +385,14 @@ impl<'a> Parser<'a> { self } + #[inline] + fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T { + let old = mem::replace(&mut self.recovery, recovery); + let res = f(self); + self.recovery = old; + res + } + /// Whether the parser is allowed to recover from broken code. /// /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead) @@ -624,9 +510,8 @@ impl<'a> Parser<'a> { // past the entire `TokenTree::Delimited` in a single step, avoiding the // need for unbounded token lookahead. // - // Primarily used when `self.token` matches - // `OpenDelim(Delimiter::Invisible(_))`, to look ahead through the current - // metavar expansion. + // Primarily used when `self.token` matches `OpenInvisible(_))`, to look + // ahead through the current metavar expansion. fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool { let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone(); tree_cursor.bump(); @@ -760,20 +645,37 @@ impl<'a> Parser<'a> { match_mv_kind: impl Fn(MetaVarKind) -> bool, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> Option<T> { - if let token::OpenDelim(delim) = self.token.kind - && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim + if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind && match_mv_kind(mv_kind) { self.bump(); - let res = f(self).expect("failed to reparse {mv_kind:?}"); - if let token::CloseDelim(delim) = self.token.kind - && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim + + // Recovery is disabled when parsing macro arguments, so it must + // also be disabled when reparsing pasted macro arguments, + // otherwise we get inconsistent results (e.g. #137874). + let res = self.with_recovery(Recovery::Forbidden, |this| f(this)); + + let res = match res { + Ok(res) => res, + Err(err) => { + // This can occur in unusual error cases, e.g. #139445. + err.delay_as_bug(); + return None; + } + }; + + if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind && match_mv_kind(mv_kind) { self.bump(); Some(res) } else { - panic!("no close delim when reparsing {mv_kind:?}"); + // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248, + // where the reparse attempt of an invalid expr consumed the trailing invisible + // delimiter. + self.dcx() + .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}"); + None } } else { None @@ -823,8 +725,8 @@ impl<'a> Parser<'a> { fn check_inline_const(&self, dist: usize) -> bool { self.is_keyword_ahead(dist, &[kw::Const]) && self.look_ahead(dist + 1, |t| match &t.kind { - token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)), - token::OpenDelim(Delimiter::Brace) => true, + token::OpenBrace => true, + token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true, _ => false, }) } @@ -943,7 +845,7 @@ impl<'a> Parser<'a> { let mut v = ThinVec::new(); while !self.expect_any_with_type(closes_expected, closes_not_expected) { - if let token::CloseDelim(..) | token::Eof = self.token.kind { + if self.token.kind.is_close_delim_or_eof() { break; } if let Some(exp) = sep.sep { @@ -1227,7 +1129,7 @@ impl<'a> Parser<'a> { } debug_assert!(!matches!( next.0.kind, - token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() + token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip() )); self.inlined_bump_with(next) } @@ -1252,7 +1154,7 @@ impl<'a> Parser<'a> { TokenTree::Token(token, _) => return looker(token), &TokenTree::Delimited(dspan, _, delim, _) => { if !delim.skip() { - return looker(&Token::new(token::OpenDelim(delim), dspan.open)); + return looker(&Token::new(delim.as_open_token_kind(), dspan.open)); } } } @@ -1266,7 +1168,7 @@ impl<'a> Parser<'a> { { // We are not in the outermost token stream, so we have // delimiters. Also, those delimiters are not skipped. - return looker(&Token::new(token::CloseDelim(delim), span.close)); + return looker(&Token::new(delim.as_close_token_kind(), span.close)); } } } @@ -1281,7 +1183,7 @@ impl<'a> Parser<'a> { token = cursor.next().0; if matches!( token.kind, - token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip() + token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip() ) { continue; } @@ -1290,6 +1192,17 @@ impl<'a> Parser<'a> { looker(&token) } + /// Like `lookahead`, but skips over token trees rather than tokens. Useful + /// when looking past possible metavariable pasting sites. + pub fn tree_look_ahead<R>( + &self, + dist: usize, + looker: impl FnOnce(&TokenTree) -> R, + ) -> Option<R> { + assert_ne!(dist, 0); + self.token_cursor.curr.look_ahead(dist - 1).map(looker) + } + /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) @@ -1297,14 +1210,14 @@ impl<'a> Parser<'a> { /// Parses asyncness: `async` or nothing. fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> { - let span = self.token.uninterpolated_span(); + let span = self.token_uninterpolated_span(); if self.eat_keyword_case(exp!(Async), case) { // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then // error if edition <= 2024, like we do with async and edition <= 2018? - if self.token.uninterpolated_span().at_least_rust_2024() + if self.token_uninterpolated_span().at_least_rust_2024() && self.eat_keyword_case(exp!(Gen), case) { - let gen_span = self.prev_token.uninterpolated_span(); + let gen_span = self.prev_token_uninterpolated_span(); Some(CoroutineKind::AsyncGen { span: span.to(gen_span), closure_id: DUMMY_NODE_ID, @@ -1317,7 +1230,7 @@ impl<'a> Parser<'a> { return_impl_trait_id: DUMMY_NODE_ID, }) } - } else if self.token.uninterpolated_span().at_least_rust_2024() + } else if self.token_uninterpolated_span().at_least_rust_2024() && self.eat_keyword_case(exp!(Gen), case) { Some(CoroutineKind::Gen { @@ -1333,9 +1246,9 @@ impl<'a> Parser<'a> { /// Parses fn unsafety: `unsafe`, `safe` or nothing. fn parse_safety(&mut self, case: Case) -> Safety { if self.eat_keyword_case(exp!(Unsafe), case) { - Safety::Unsafe(self.prev_token.uninterpolated_span()) + Safety::Unsafe(self.prev_token_uninterpolated_span()) } else if self.eat_keyword_case(exp!(Safe), case) { - Safety::Safe(self.prev_token.uninterpolated_span()) + Safety::Safe(self.prev_token_uninterpolated_span()) } else { Safety::Default } @@ -1358,11 +1271,10 @@ impl<'a> Parser<'a> { fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const { // Avoid const blocks and const closures to be parsed as const items if (self.check_const_closure() == is_closure) - && !self - .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) + && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block()) && self.eat_keyword_case(exp!(Const), case) { - Const::Yes(self.prev_token.uninterpolated_span()) + Const::Yes(self.prev_token_uninterpolated_span()) } else { Const::No } @@ -1370,9 +1282,6 @@ impl<'a> Parser<'a> { /// Parses inline const expressions. fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> { - if pat { - self.psess.gated_spans.gate(sym::inline_const_pat, span); - } self.expect_keyword(exp!(Const))?; let (attrs, blk) = self.parse_inner_attrs_and_block(None)?; let anon_const = AnonConst { @@ -1380,7 +1289,17 @@ impl<'a> Parser<'a> { value: self.mk_expr(blk.span, ExprKind::Block(blk, None)), }; let blk_span = anon_const.value.span; - Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs)) + let kind = if pat { + let guar = self + .dcx() + .struct_span_err(blk_span, "`inline_const_pat` has been removed") + .with_help("use a named `const`-item or an `if`-guard instead") + .emit(); + ExprKind::Err(guar) + } else { + ExprKind::ConstBlock(anon_const) + }; + Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs)) } /// Parses mutability (`mut` or nothing). @@ -1451,48 +1370,46 @@ impl<'a> Parser<'a> { /// Parses a single token tree from the input. pub fn parse_token_tree(&mut self) -> TokenTree { - match self.token.kind { - token::OpenDelim(..) => { - // Clone the `TokenTree::Delimited` that we are currently - // within. That's what we are going to return. - let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone(); - debug_assert_matches!(tree, TokenTree::Delimited(..)); - - // Advance the token cursor through the entire delimited - // sequence. After getting the `OpenDelim` we are *within* the - // delimited sequence, i.e. at depth `d`. After getting the - // matching `CloseDelim` we are *after* the delimited sequence, - // i.e. at depth `d - 1`. - let target_depth = self.token_cursor.stack.len() - 1; - loop { - // Advance one token at a time, so `TokenCursor::next()` - // can capture these tokens if necessary. - self.bump(); - if self.token_cursor.stack.len() == target_depth { - debug_assert_matches!(self.token.kind, token::CloseDelim(_)); - break; - } - } - - // Consume close delimiter - self.bump(); - tree - } - token::CloseDelim(_) | token::Eof => unreachable!(), - _ => { - let prev_spacing = self.token_spacing; + if self.token.kind.open_delim().is_some() { + // Clone the `TokenTree::Delimited` that we are currently + // within. That's what we are going to return. + let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone(); + debug_assert_matches!(tree, TokenTree::Delimited(..)); + + // Advance the token cursor through the entire delimited + // sequence. After getting the `OpenDelim` we are *within* the + // delimited sequence, i.e. at depth `d`. After getting the + // matching `CloseDelim` we are *after* the delimited sequence, + // i.e. at depth `d - 1`. + let target_depth = self.token_cursor.stack.len() - 1; + loop { + // Advance one token at a time, so `TokenCursor::next()` + // can capture these tokens if necessary. self.bump(); - TokenTree::Token(self.prev_token.clone(), prev_spacing) + if self.token_cursor.stack.len() == target_depth { + debug_assert!(self.token.kind.close_delim().is_some()); + break; + } } + + // Consume close delimiter + self.bump(); + tree + } else { + assert!(!self.token.kind.is_close_delim_or_eof()); + let prev_spacing = self.token_spacing; + self.bump(); + TokenTree::Token(self.prev_token, prev_spacing) } } pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { - match self.token.kind { - token::Eof | token::CloseDelim(..) => break, - _ => result.push(self.parse_token_tree()), + if self.token.kind.is_close_delim_or_eof() { + break; + } else { + result.push(self.parse_token_tree()); } } TokenStream::new(result) @@ -1555,7 +1472,7 @@ impl<'a> Parser<'a> { kind: vis, tokens: None, }); - } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) + } else if self.look_ahead(2, |t| t == &token::CloseParen) && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower]) { // Parse `pub(crate)`, `pub(self)`, or `pub(super)`. @@ -1652,9 +1569,7 @@ impl<'a> Parser<'a> { /// `::{` or `::*` fn is_import_coupler(&mut self) -> bool { - self.check_path_sep_and_look_ahead(|t| { - matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star) - }) + self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star)) } // Debug view of the parser's token stream, up to `{lookahead}` tokens. @@ -1668,7 +1583,7 @@ impl<'a> Parser<'a> { dbg_fmt.field("prev_token", &self.prev_token); let mut tokens = vec![]; for i in 0..lookahead { - let tok = self.look_ahead(i, |tok| tok.kind.clone()); + let tok = self.look_ahead(i, |tok| tok.kind); let is_eof = tok == TokenKind::Eof; tokens.push(tok); if is_eof { @@ -1699,47 +1614,29 @@ impl<'a> Parser<'a> { pub fn approx_token_stream_pos(&self) -> u32 { self.num_bump_calls } -} -pub(crate) fn make_unclosed_delims_error( - unmatched: UnmatchedDelim, - psess: &ParseSess, -) -> Option<Diag<'_>> { - // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to - // `unmatched_delims` only for error recovery in the `Parser`. - let found_delim = unmatched.found_delim?; - let mut spans = vec![unmatched.found_span]; - if let Some(sp) = unmatched.unclosed_span { - spans.push(sp); - }; - let err = psess.dcx().create_err(MismatchedClosingDelimiter { - spans, - delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(), - unmatched: unmatched.found_span, - opening_candidate: unmatched.candidate_span, - unclosed: unmatched.unclosed_span, - }); - Some(err) -} + /// For interpolated `self.token`, returns a span of the fragment to which + /// the interpolated token refers. For all other tokens this is just a + /// regular span. It is particularly important to use this for identifiers + /// and lifetimes for which spans affect name resolution and edition + /// checks. Note that keywords are also identifiers, so they should use + /// this if they keep spans or perform edition checks. + pub fn token_uninterpolated_span(&self) -> Span { + match &self.token.kind { + token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, + token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span), + _ => self.token.span, + } + } -/// A helper struct used when building an `AttrTokenStream` from -/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens -/// are stored as `FlatToken::Token`. A vector of `FlatToken`s -/// is then 'parsed' to build up an `AttrTokenStream` with nested -/// `AttrTokenTree::Delimited` tokens. -#[derive(Debug, Clone)] -enum FlatToken { - /// A token - this holds both delimiter (e.g. '{' and '}') - /// and non-delimiter tokens - Token((Token, Spacing)), - /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted - /// directly into the constructed `AttrTokenStream` as an - /// `AttrTokenTree::AttrsTarget`. - AttrsTarget(AttrsTarget), - /// A special 'empty' token that is ignored during the conversion - /// to an `AttrTokenStream`. This is used to simplify the - /// handling of replace ranges. - Empty, + /// Like `token_uninterpolated_span`, but works on `self.prev_token`. + pub fn prev_token_uninterpolated_span(&self) -> Span { + match &self.prev_token.kind { + token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, + token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span), + _ => self.prev_token.span, + } + } } // Metavar captures of various kinds. @@ -1749,13 +1646,13 @@ pub enum ParseNtResult { Ident(Ident, IdentIsRaw), Lifetime(Ident, IdentIsRaw), Item(P<ast::Item>), + Block(P<ast::Block>), Stmt(P<ast::Stmt>), Pat(P<ast::Pat>, NtPatKind), + Expr(P<ast::Expr>, NtExprKind), + Literal(P<ast::Expr>), Ty(P<ast::Ty>), Meta(P<ast::AttrItem>), Path(P<ast::Path>), Vis(P<ast::Visibility>), - - /// This variant will eventually be removed, along with `Token::Interpolate`. - Nt(Arc<Nonterminal>), } diff --git a/compiler/rustc_parse/src/parser/mut_visit/tests.rs b/compiler/rustc_parse/src/parser/mut_visit/tests.rs deleted file mode 100644 index 46c678c3902..00000000000 --- a/compiler/rustc_parse/src/parser/mut_visit/tests.rs +++ /dev/null @@ -1,65 +0,0 @@ -use rustc_ast as ast; -use rustc_ast::mut_visit::MutVisitor; -use rustc_ast_pretty::pprust; -use rustc_span::{Ident, create_default_session_globals_then}; - -use crate::parser::tests::{matches_codepattern, string_to_crate}; - -// This version doesn't care about getting comments or doc-strings in. -fn print_crate_items(krate: &ast::Crate) -> String { - krate.items.iter().map(|i| pprust::item_to_string(i)).collect::<Vec<_>>().join(" ") -} - -// Change every identifier to "zz". -struct ToZzIdentMutVisitor; - -impl MutVisitor for ToZzIdentMutVisitor { - const VISIT_TOKENS: bool = true; - - fn visit_ident(&mut self, ident: &mut Ident) { - *ident = Ident::from_str("zz"); - } -} - -macro_rules! assert_matches_codepattern { - ($a:expr , $b:expr) => {{ - let a_val = $a; - let b_val = $b; - if !matches_codepattern(&a_val, &b_val) { - panic!("expected args satisfying `matches_codepattern`, got {} and {}", a_val, b_val); - } - }}; -} - -// Make sure idents get transformed everywhere. -#[test] -fn ident_transformation() { - create_default_session_globals_then(|| { - let mut zz_visitor = ToZzIdentMutVisitor; - let mut krate = - string_to_crate("#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); - zz_visitor.visit_crate(&mut krate); - assert_matches_codepattern!( - print_crate_items(&krate), - "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string() - ); - }) -} - -// Make sure idents get transformed even inside macro defs. -#[test] -fn ident_transformation_in_defs() { - create_default_session_globals_then(|| { - let mut zz_visitor = ToZzIdentMutVisitor; - let mut krate = string_to_crate( - "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ - (g $(d $d $e)+))} " - .to_string(), - ); - zz_visitor.visit_crate(&mut krate); - assert_matches_codepattern!( - print_crate_items(&krate), - "macro_rules! zz{(zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+))}".to_string() - ); - }) -} diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 1123755ce00..7c83e96c160 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,14 +1,7 @@ -use std::sync::Arc; - -use rustc_ast::HasTokens; use rustc_ast::ptr::P; -use rustc_ast::token::Nonterminal::*; use rustc_ast::token::NtExprKind::*; use rustc_ast::token::NtPatKind::*; -use rustc_ast::token::{ - self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token, -}; -use rustc_ast_pretty::pprust; +use rustc_ast::token::{self, InvisibleOrigin, MetaVarKind, NonterminalKind, Token}; use rustc_errors::PResult; use rustc_span::{Ident, kw}; @@ -45,17 +38,6 @@ impl<'a> Parser<'a> { } } - /// Old variant of `may_be_ident`. Being phased out. - fn nt_may_be_ident(nt: &Nonterminal) -> bool { - match nt { - NtExpr(_) - | NtLiteral(_) // `true`, `false` - => true, - - NtBlock(_) => false, - } - } - match kind { // `expr_2021` and earlier NonterminalKind::Expr(Expr2021 { .. }) => { @@ -87,17 +69,13 @@ impl<'a> Parser<'a> { | token::Ident(..) | token::NtIdent(..) | token::NtLifetime(..) - | token::Interpolated(_) - | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true, + | token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => true, _ => token.can_begin_type(), }, NonterminalKind::Block => match &token.kind { - token::OpenDelim(Delimiter::Brace) => true, + token::OpenBrace => true, token::NtLifetime(..) => true, - token::Interpolated(nt) => match &**nt { - NtBlock(_) | NtExpr(_) | NtLiteral(_) => true, - }, - token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { + token::OpenInvisible(InvisibleOrigin::MetaVar(k)) => match k { MetaVarKind::Block | MetaVarKind::Stmt | MetaVarKind::Expr { .. } @@ -116,10 +94,7 @@ impl<'a> Parser<'a> { }, NonterminalKind::Path | NonterminalKind::Meta => match &token.kind { token::PathSep | token::Ident(..) | token::NtIdent(..) => true, - token::Interpolated(nt) => nt_may_be_ident(nt), - token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => { - may_be_ident(*kind) - } + token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => may_be_ident(*kind), _ => false, }, NonterminalKind::Pat(pat_kind) => token.can_begin_pattern(pat_kind), @@ -128,7 +103,7 @@ impl<'a> Parser<'a> { _ => false, }, NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => { - !matches!(token.kind, token::CloseDelim(_)) + token.kind.close_delim().is_none() } } } @@ -140,106 +115,85 @@ impl<'a> Parser<'a> { // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro, // which requires having captured tokens available. Since we cannot determine // in advance whether or not a proc-macro will be (transitively) invoked, - // we always capture tokens for any `Nonterminal` which needs them. - let mut nt = match kind { + // we always capture tokens for any nonterminal that needs them. + match kind { // Note that TT is treated differently to all the others. - NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())), + NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())), NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? { - Some(item) => return Ok(ParseNtResult::Item(item)), - None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Item(self.token.span))); - } + Some(item) => Ok(ParseNtResult::Item(item)), + None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))), }, NonterminalKind::Block => { // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`), // the ':block' matcher does not support them - NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?) + Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?)) } NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? { - Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))), + Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))), None => { - return Err(self - .dcx() - .create_err(UnexpectedNonterminal::Statement(self.token.span))); + Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span))) } }, - NonterminalKind::Pat(pat_kind) => { - return Ok(ParseNtResult::Pat( - self.collect_tokens_no_attrs(|this| match pat_kind { - PatParam { .. } => this.parse_pat_no_top_alt(None, None), - PatWithOr => this.parse_pat_no_top_guard( - None, - RecoverComma::No, - RecoverColon::No, - CommaRecoveryMode::EitherTupleOrPipe, - ), - })?, - pat_kind, - )); + NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat( + self.collect_tokens_no_attrs(|this| match pat_kind { + PatParam { .. } => this.parse_pat_no_top_alt(None, None), + PatWithOr => this.parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ), + })?, + pat_kind, + )), + NonterminalKind::Expr(expr_kind) => { + Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)) } - NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?), NonterminalKind::Literal => { - // The `:literal` matcher does not support attributes - NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?) + // The `:literal` matcher does not support attributes. + Ok(ParseNtResult::Literal( + self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, + )) } - NonterminalKind::Ty => { - return Ok(ParseNtResult::Ty( - self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, - )); - } - // this could be handled like a token, since it is one + NonterminalKind::Ty => Ok(ParseNtResult::Ty( + self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?, + )), + // This could be handled like a token, since it is one. NonterminalKind::Ident => { - return if let Some((ident, is_raw)) = get_macro_ident(&self.token) { + if let Some((ident, is_raw)) = get_macro_ident(&self.token) { self.bump(); Ok(ParseNtResult::Ident(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Ident { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; - } - NonterminalKind::Path => { - return Ok(ParseNtResult::Path(P( - self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? - ))); + } } + NonterminalKind::Path => Ok(ParseNtResult::Path(P( + self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? + ))), NonterminalKind::Meta => { - return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))); + Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))) } NonterminalKind::Vis => { - return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| { - this.parse_visibility(FollowedByType::Yes) - })?))); + Ok(ParseNtResult::Vis(P(self + .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))) } NonterminalKind::Lifetime => { // We want to keep `'keyword` parsing, just like `keyword` is still // an ident for nonterminal purposes. - return if let Some((ident, is_raw)) = self.token.lifetime() { + if let Some((ident, is_raw)) = self.token.lifetime() { self.bump(); Ok(ParseNtResult::Lifetime(ident, is_raw)) } else { Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime { span: self.token.span, - token: self.token.clone(), + token: self.token, })) - }; + } } - }; - - // If tokens are supported at all, they should be collected. - if matches!(nt.tokens_mut(), Some(None)) { - panic!( - "Missing tokens for nt {:?} at {:?}: {:?}", - nt, - nt.use_span(), - pprust::nonterminal_to_string(&nt) - ); } - - Ok(ParseNtResult::Nt(Arc::new(nt))) } } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index ec14c5718da..d6ff80b2eb4 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -3,7 +3,7 @@ use std::ops::Bound; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::ptr::P; use rustc_ast::token::NtPatKind::*; -use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token}; +use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token}; use rustc_ast::util::parser::ExprPrecedence; use rustc_ast::visit::{self, Visitor}; use rustc_ast::{ @@ -323,7 +323,7 @@ impl<'a> Parser<'a> { fn eat_or_separator(&mut self, lo: Option<Span>) -> EatOrResult { if self.recover_trailing_vert(lo) { EatOrResult::TrailingVert - } else if matches!(self.token.kind, token::OrOr) { + } else if self.token.kind == token::OrOr { // Found `||`; Recover and pretend we parsed `|`. self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo }); self.bump(); @@ -352,9 +352,9 @@ impl<'a> Parser<'a> { | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. | token::Comma // e.g. `let (a |,)`. - | token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`. - | token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`. - | token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`. + | token::CloseBracket // e.g. `let [a | ]`. + | token::CloseParen // e.g. `let (a | )`. + | token::CloseBrace // e.g. `let A { f: a | }`. ) }); match (is_end_ahead, &self.token.kind) { @@ -363,8 +363,8 @@ impl<'a> Parser<'a> { self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, start: lo, - token: self.token.clone(), - note_double_vert: matches!(self.token.kind, token::OrOr), + token: self.token, + note_double_vert: self.token.kind == token::OrOr, }); self.bump(); true @@ -438,8 +438,8 @@ impl<'a> Parser<'a> { | token::Caret | token::And | token::Shl | token::Shr // excludes `Or` ) || self.token == token::Question - || (self.token == token::OpenDelim(Delimiter::Bracket) - && self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]` + || (self.token == token::OpenBracket + && self.look_ahead(1, |t| *t != token::CloseBracket)) // excludes `[]` || self.token.is_keyword(kw::As); if !has_dot_expr && !has_trailing_operator { @@ -481,7 +481,7 @@ impl<'a> Parser<'a> { let is_bound = is_end_bound // is_start_bound: either `..` or `)..` || self.token.is_range_separator() - || self.token == token::CloseDelim(Delimiter::Parenthesis) + || self.token == token::CloseParen && self.look_ahead(1, Token::is_range_separator); let span = expr.span; @@ -631,15 +631,6 @@ impl<'a> Parser<'a> { ident, indentation, }); - - // help: wrap the expr in a `const { expr }` - // FIXME(inline_const_pat): once stabilized, remove this check and remove the `(requires #[feature(inline_const_pat)])` note from the message - if self.parser.psess.unstable_features.is_nightly_build() { - err.subdiagnostic(UnexpectedExpressionInPatternSugg::InlineConst { - start_span: expr_span.shrink_to_lo(), - end_span: expr_span.shrink_to_hi(), - }); - } }, ); } @@ -844,7 +835,7 @@ impl<'a> Parser<'a> { // because we never have `'a: label {}` in a pattern position anyways, but it does // keep us from suggesting something like `let 'a: Ty = ..` => `let 'a': Ty = ..` && could_be_unclosed_char_literal(lt) - && !self.look_ahead(1, |token| matches!(token.kind, token::Colon)) + && !self.look_ahead(1, |token| token.kind == token::Colon) { // Recover a `'a` as a `'a'` literal let lt = self.expect_lifetime(); @@ -1261,11 +1252,11 @@ impl<'a> Parser<'a> { || *t == token::Dot // e.g. `.5` for recovery; || matches!(t.kind, token::Literal(..) | token::Minus) || t.is_bool_lit() - || t.is_whole_expr() + || t.is_metavar_expr() || t.is_lifetime() // recover `'a` instead of `'a'` || (self.may_recover() // recover leading `(` - && *t == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis)) + && *t == token::OpenParen + && self.look_ahead(dist + 1, |t| *t != token::OpenParen) && self.is_pat_range_end_start(dist + 1)) }) } @@ -1273,9 +1264,8 @@ impl<'a> Parser<'a> { /// Parse a range pattern end bound fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { // recover leading `(` - let open_paren = (self.may_recover() - && self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis))) - .then_some(self.prev_token.span); + let open_paren = (self.may_recover() && self.eat_noexpect(&token::OpenParen)) + .then_some(self.prev_token.span); let bound = if self.check_inline_const(0) { self.parse_const_block(self.token.span, true) @@ -1331,8 +1321,8 @@ impl<'a> Parser<'a> { // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`. && !self.token.is_keyword(kw::In) // Try to do something more complex? - && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern. - | token::OpenDelim(Delimiter::Brace) // A struct pattern. + && self.look_ahead(1, |t| !matches!(t.kind, token::OpenParen // A tuple struct pattern. + | token::OpenBrace // A struct pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::PathSep // A tuple / struct variant pattern. | token::Bang)) // A macro expanding to a pattern. @@ -1370,7 +1360,7 @@ impl<'a> Parser<'a> { // This shortly leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead // will direct us over to `parse_enum_variant()`. - if self.token == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenParen { return Err(self .dcx() .create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span })); @@ -1438,9 +1428,9 @@ impl<'a> Parser<'a> { token::Comma, token::Semi, token::At, - token::OpenDelim(Delimiter::Brace), - token::CloseDelim(Delimiter::Brace), - token::CloseDelim(Delimiter::Parenthesis), + token::OpenBrace, + token::CloseBrace, + token::CloseParen, ] .contains(&self.token.kind) } @@ -1498,7 +1488,7 @@ impl<'a> Parser<'a> { let mut first_etc_and_maybe_comma_span = None; let mut last_non_comma_dotdot_span = None; - while self.token != token::CloseDelim(Delimiter::Brace) { + while self.token != token::CloseBrace { // check that a comma comes after every field if !ate_comma { let err = if self.token == token::At { @@ -1528,8 +1518,8 @@ impl<'a> Parser<'a> { etc = PatFieldsRest::Rest; let mut etc_sp = self.token.span; if first_etc_and_maybe_comma_span.is_none() { - if let Some(comma_tok) = self - .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None }) + if let Some(comma_tok) = + self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None }) { let nw_span = self .psess @@ -1547,7 +1537,7 @@ impl<'a> Parser<'a> { self.recover_bad_dot_dot(); self.bump(); // `..` || `...` || `_` - if self.token == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseBrace { break; } let token_str = super::token_descr(&self.token); @@ -1570,7 +1560,7 @@ impl<'a> Parser<'a> { ate_comma = true; } - if self.token == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseBrace { // If the struct looks otherwise well formed, recover and continue. if let Some(sp) = comma_sp { err.span_suggestion_short( @@ -1690,7 +1680,7 @@ impl<'a> Parser<'a> { // We found `ref mut? ident:`, try to parse a `name,` or `name }`. && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span)) && self.look_ahead(2, |t| { - t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace) + t == &token::Comma || t == &token::CloseBrace }) { let span = last.pat.span.with_hi(ident.span.lo()); diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 9c6830c3672..1093e4f4af0 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,7 +2,7 @@ use std::mem; use ast::token::IdentIsRaw; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, MetaVarKind, Token, TokenKind}; +use rustc_ast::token::{self, MetaVarKind, Token, TokenKind}; use rustc_ast::{ self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemConstraint, AssocItemConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs, @@ -248,19 +248,13 @@ impl<'a> Parser<'a> { segments.push(segment); if self.is_import_coupler() || !self.eat_path_sep() { - let ok_for_recovery = self.may_recover() - && match style { - PathStyle::Expr => true, - PathStyle::Type if let Some((ident, _)) = self.prev_token.ident() => { - self.token == token::Colon - && ident.as_str().chars().all(|c| c.is_lowercase()) - && self.token.span.lo() == self.prev_token.span.hi() - && self - .look_ahead(1, |token| self.token.span.hi() == token.span.lo()) - } - _ => false, - }; - if ok_for_recovery + // IMPORTANT: We can *only ever* treat single colons as typo'ed double colons in + // expression contexts (!) since only there paths cannot possibly be followed by + // a colon and still form a syntactically valid construct. In pattern contexts, + // a path may be followed by a type annotation. E.g., `let pat:ty`. In type + // contexts, a path may be followed by a list of bounds. E.g., `where ty:bound`. + if self.may_recover() + && style == PathStyle::Expr // (!) && self.token == token::Colon && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident()) { @@ -273,7 +267,6 @@ impl<'a> Parser<'a> { self.dcx().emit_err(PathSingleColon { span: self.prev_token.span, suggestion: self.prev_token.span.shrink_to_hi(), - type_ascription: self.psess.unstable_features.is_nightly_build(), }); } continue; @@ -303,10 +296,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; let is_args_start = |token: &Token| { - matches!( - token.kind, - token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow - ) + matches!(token.kind, token::Lt | token::Shl | token::OpenParen | token::LArrow) }; let check_args_start = |this: &mut Self| { this.expected_token_types.insert(TokenType::Lt); @@ -348,7 +338,6 @@ impl<'a> Parser<'a> { err = self.dcx().create_err(PathSingleColon { span: self.token.span, suggestion: self.prev_token.span.shrink_to_hi(), - type_ascription: self.psess.unstable_features.is_nightly_build(), }); } // Attempt to find places where a missing `>` might belong. @@ -368,7 +357,7 @@ impl<'a> Parser<'a> { })?; let span = lo.to(self.prev_token.span); AngleBracketedArgs { args, span }.into() - } else if self.token == token::OpenDelim(Delimiter::Parenthesis) + } else if self.token == token::OpenParen // FIXME(return_type_notation): Could also recover `...` here. && self.look_ahead(1, |t| *t == token::DotDot) { @@ -393,8 +382,8 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` - let prev_token_before_parsing = self.prev_token.clone(); - let token_before_parsing = self.token.clone(); + let prev_token_before_parsing = self.prev_token; + let token_before_parsing = self.token; let mut snapshot = None; if self.may_recover() && prev_token_before_parsing == token::PathSep @@ -854,7 +843,7 @@ impl<'a> Parser<'a> { /// the caller. pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> { // Parse const argument. - let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind { + let value = if self.token.kind == token::OpenBrace { self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)? } else { self.handle_unambiguous_unbraced_const_arg()? diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 97cd4d2117f..885a65d4de7 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -23,8 +23,8 @@ use super::{ AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode, Trailing, UsePreAttrPos, }; -use crate::errors::MalformedLoopLabel; -use crate::{errors, exp, maybe_whole}; +use crate::errors::{self, MalformedLoopLabel}; +use crate::exp; impl<'a> Parser<'a> { /// Parses a statement. This stops just before trailing semicolons on everything but items. @@ -73,10 +73,24 @@ impl<'a> Parser<'a> { }); } - let stmt = if self.token.is_keyword(kw::Let) { + let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) { self.collect_tokens(None, attrs, force_collect, |this, attrs| { + let super_span = this.token.span; + this.expect_keyword(exp!(Super))?; this.expect_keyword(exp!(Let))?; - let local = this.parse_local(attrs)?; + this.psess.gated_spans.gate(sym::super_let, super_span); + let local = this.parse_local(Some(super_span), attrs)?; + let trailing = Trailing::from(capture_semi && this.token == token::Semi); + Ok(( + this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), + trailing, + UsePreAttrPos::No, + )) + })? + } else if self.token.is_keyword(kw::Let) { + self.collect_tokens(None, attrs, force_collect, |this, attrs| { + this.expect_keyword(exp!(Let))?; + let local = this.parse_local(None, attrs)?; let trailing = Trailing::from(capture_semi && this.token == token::Semi); Ok(( this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), @@ -148,7 +162,7 @@ impl<'a> Parser<'a> { // Do not attempt to parse an expression if we're done here. self.error_outer_attrs(attrs); self.mk_stmt(lo, StmtKind::Empty) - } else if self.token != token::CloseDelim(Delimiter::Brace) { + } else if self.token != token::CloseBrace { // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case // above. let restrictions = @@ -240,9 +254,7 @@ impl<'a> Parser<'a> { self.token.kind, token::Semi | token::Eof - | token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( - MetaVarKind::Stmt - ))) + | token::CloseInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Stmt)) ) { StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None })) } else { @@ -281,7 +293,7 @@ impl<'a> Parser<'a> { force_collect: ForceCollect, ) -> PResult<'a, Stmt> { let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| { - let local = this.parse_local(attrs)?; + let local = this.parse_local(None, attrs)?; // FIXME - maybe capture semicolon in recovery? Ok(( this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), @@ -295,8 +307,8 @@ impl<'a> Parser<'a> { } /// Parses a local variable declaration. - fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> { - let lo = self.prev_token.span; + fn parse_local(&mut self, super_: Option<Span>, attrs: AttrVec) -> PResult<'a, P<Local>> { + let lo = super_.unwrap_or(self.prev_token.span); if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) { self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) }); @@ -398,6 +410,7 @@ impl<'a> Parser<'a> { }; let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span }; Ok(P(ast::Local { + super_, ty, pat, kind, @@ -503,7 +516,11 @@ impl<'a> Parser<'a> { let prev = self.prev_token.span; let sp = self.token.span; let mut e = self.dcx().struct_span_err(sp, msg); - let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon; + self.label_expected_raw_ref(&mut e); + + let do_not_suggest_help = self.token.is_keyword(kw::In) + || self.token == token::Colon + || self.prev_token.is_keyword(kw::Raw); // Check to see if the user has written something like // @@ -528,7 +545,7 @@ impl<'a> Parser<'a> { // + + Ok(Some(_)) if (!self.token.is_keyword(kw::Else) - && self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace))) + && self.look_ahead(1, |t| t == &token::OpenBrace)) || do_not_suggest_help => {} // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836). Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {} @@ -565,9 +582,7 @@ impl<'a> Parser<'a> { stmt_kind: &StmtKind, ) { match (&self.token.kind, &stmt_kind) { - (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr)) - if let ExprKind::Call(..) = expr.kind => - { + (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Call(..) = expr.kind => { // for _ in x y() {} e.span_suggestion_verbose( between, @@ -576,9 +591,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr)) - if let ExprKind::Field(..) = expr.kind => - { + (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Field(..) = expr.kind => { // for _ in x y.z {} e.span_suggestion_verbose( between, @@ -587,7 +600,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - (token::CloseDelim(Delimiter::Brace), StmtKind::Expr(expr)) + (token::CloseBrace, StmtKind::Expr(expr)) if let ExprKind::Struct(expr) = &expr.kind && let None = expr.qself && expr.path.segments.len() == 1 => @@ -602,7 +615,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr)) + (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Lit(lit) = expr.kind && let None = lit.suffix && let token::LitKind::Integer | token::LitKind::Float = lit.kind => @@ -616,7 +629,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr)) + (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Loop(..) | ExprKind::If(..) | ExprKind::While(..) @@ -639,7 +652,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - (token::OpenDelim(Delimiter::Brace), _) => {} + (token::OpenBrace, _) => {} (_, _) => { e.multipart_suggestion( "you might have meant to write this as part of a block", @@ -681,9 +694,11 @@ impl<'a> Parser<'a> { blk_mode: BlockCheckMode, loop_header: Option<Span>, ) -> PResult<'a, (AttrVec, P<Block>)> { - maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block)); + if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) { + return Ok((AttrVec::new(), block)); + } - let maybe_ident = self.prev_token.clone(); + let maybe_ident = self.prev_token; self.maybe_recover_unexpected_block_label(loop_header); if !self.eat(exp!(OpenBrace)) { return self.error_block_no_opening_brace(); @@ -750,10 +765,6 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - if self.psess.unstable_features.is_nightly_build() { - // FIXME(Nilstrieb): Remove this again after a few months. - err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>"); - } } } @@ -792,7 +803,7 @@ impl<'a> Parser<'a> { // Likely `foo bar` } else if self.prev_token.kind == token::Question { // `foo? bar` - } else if self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis) { + } else if self.prev_token.kind == token::CloseParen { // `foo() bar` } else { return; @@ -809,7 +820,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - if self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis)) { + if self.look_ahead(1, |t| t.kind == token::OpenParen) { err.span_suggestion_verbose( self.prev_token.span.between(self.token.span), "you might have meant to write a method call", @@ -853,8 +864,7 @@ impl<'a> Parser<'a> { StmtKind::Expr(expr) if classify::expr_requires_semi_to_be_stmt(expr) && !expr.attrs.is_empty() - && ![token::Eof, token::Semi, token::CloseDelim(Delimiter::Brace)] - .contains(&self.token.kind) => + && !matches!(self.token.kind, token::Eof | token::Semi | token::CloseBrace) => { // The user has written `#[attr] expr` which is unsupported. (#106020) let guar = self.attr_on_non_tail_expr(&expr); @@ -896,13 +906,13 @@ impl<'a> Parser<'a> { { if self.token == token::Colon && self.look_ahead(1, |token| { - token.is_whole_block() + token.is_metavar_block() || matches!( token.kind, token::Ident( kw::For | kw::Loop | kw::While, token::IdentIsRaw::No - ) | token::OpenDelim(Delimiter::Brace) + ) | token::OpenBrace ) }) { diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 471966d086d..2a44c90abc1 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -95,12 +95,6 @@ pub(crate) fn string_to_stream(source_str: String) -> TokenStream { )) } -/// Parses a string, returns a crate. -pub(crate) fn string_to_crate(source_str: String) -> ast::Crate { - let psess = psess(); - with_error_checking_parse(source_str, &psess, |p| p.parse_crate_mod()) -} - /// Does the given string match the pattern? whitespace in the first string /// may be deleted or replaced with other whitespace to match the pattern. /// This function is relatively Unicode-ignorant; fortunately, the careful design @@ -2554,7 +2548,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) { // Do the `assert_eq` outside the closure so that `track_caller` works. // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure // doesn't give the line number in the test below if the assertion fails.) - let tok = p.look_ahead(dist, |tok| tok.clone()); + let tok = p.look_ahead(dist, |tok| *tok); assert_eq!(kind, tok.kind); } @@ -2573,14 +2567,14 @@ fn look_ahead() { // Current position is the `fn`. look(&p, 0, token::Ident(kw::Fn, raw_no)); look(&p, 1, token::Ident(sym_f, raw_no)); - look(&p, 2, token::OpenDelim(Delimiter::Parenthesis)); + look(&p, 2, token::OpenParen); look(&p, 3, token::Ident(sym_x, raw_no)); look(&p, 4, token::Colon); look(&p, 5, token::Ident(sym::u32, raw_no)); - look(&p, 6, token::CloseDelim(Delimiter::Parenthesis)); - look(&p, 7, token::OpenDelim(Delimiter::Brace)); + look(&p, 6, token::CloseParen); + look(&p, 7, token::OpenBrace); look(&p, 8, token::Ident(sym_x, raw_no)); - look(&p, 9, token::CloseDelim(Delimiter::Brace)); + look(&p, 9, token::CloseBrace); look(&p, 10, token::Ident(kw::Struct, raw_no)); look(&p, 11, token::Ident(sym_S, raw_no)); look(&p, 12, token::Semi); @@ -2597,10 +2591,10 @@ fn look_ahead() { look(&p, 0, token::Ident(sym_x, raw_no)); look(&p, 1, token::Colon); look(&p, 2, token::Ident(sym::u32, raw_no)); - look(&p, 3, token::CloseDelim(Delimiter::Parenthesis)); - look(&p, 4, token::OpenDelim(Delimiter::Brace)); + look(&p, 3, token::CloseParen); + look(&p, 4, token::OpenBrace); look(&p, 5, token::Ident(sym_x, raw_no)); - look(&p, 6, token::CloseDelim(Delimiter::Brace)); + look(&p, 6, token::CloseBrace); look(&p, 7, token::Ident(kw::Struct, raw_no)); look(&p, 8, token::Ident(sym_S, raw_no)); look(&p, 9, token::Semi); @@ -2652,18 +2646,18 @@ fn look_ahead_non_outermost_stream() { } look(&p, 0, token::Ident(kw::Fn, raw_no)); look(&p, 1, token::Ident(sym_f, raw_no)); - look(&p, 2, token::OpenDelim(Delimiter::Parenthesis)); + look(&p, 2, token::OpenParen); look(&p, 3, token::Ident(sym_x, raw_no)); look(&p, 4, token::Colon); look(&p, 5, token::Ident(sym::u32, raw_no)); - look(&p, 6, token::CloseDelim(Delimiter::Parenthesis)); - look(&p, 7, token::OpenDelim(Delimiter::Brace)); + look(&p, 6, token::CloseParen); + look(&p, 7, token::OpenBrace); look(&p, 8, token::Ident(sym_x, raw_no)); - look(&p, 9, token::CloseDelim(Delimiter::Brace)); + look(&p, 9, token::CloseBrace); look(&p, 10, token::Ident(kw::Struct, raw_no)); look(&p, 11, token::Ident(sym_S, raw_no)); look(&p, 12, token::Semi); - look(&p, 13, token::CloseDelim(Delimiter::Brace)); + look(&p, 13, token::CloseBrace); // Any lookahead past the end of the token stream returns `Eof`. look(&p, 14, token::Eof); look(&p, 15, token::Eof); @@ -2723,9 +2717,7 @@ fn debug_lookahead() { \"f\", No, ), - OpenDelim( - Parenthesis, - ), + OpenParen, Ident( \"x\", No, @@ -2735,9 +2727,7 @@ fn debug_lookahead() { \"u32\", No, ), - CloseDelim( - Parenthesis, - ), + CloseParen, ], approx_token_stream_pos: 0, .. @@ -2768,9 +2758,7 @@ fn debug_lookahead() { \"f\", No, ), - OpenDelim( - Parenthesis, - ), + OpenParen, Ident( \"x\", No, @@ -2780,19 +2768,13 @@ fn debug_lookahead() { \"u32\", No, ), - CloseDelim( - Parenthesis, - ), - OpenDelim( - Brace, - ), + CloseParen, + OpenBrace, Ident( \"x\", No, ), - CloseDelim( - Brace, - ), + CloseBrace, Ident( \"struct\", No, @@ -2817,9 +2799,7 @@ fn debug_lookahead() { &format!("{:#?}", p.debug_lookahead(1)), "Parser { prev_token: Token { - kind: OpenDelim( - Brace, - ), + kind: OpenBrace, span: Span { lo: BytePos( 13, @@ -2844,9 +2824,7 @@ fn debug_lookahead() { &format!("{:#?}", p.debug_lookahead(4)), "Parser { prev_token: Token { - kind: OpenDelim( - Brace, - ), + kind: OpenBrace, span: Span { lo: BytePos( 13, @@ -2862,9 +2840,7 @@ fn debug_lookahead() { \"x\", No, ), - CloseDelim( - Brace, - ), + CloseBrace, Ident( \"struct\", No, @@ -2922,7 +2898,7 @@ fn out_of_line_mod() { .unwrap() .unwrap(); - let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() }; + let ast::ItemKind::Mod(_, _, mod_kind) = &item.kind else { panic!() }; assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2); }); } diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index 886438fd583..b91548196a3 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -114,6 +114,7 @@ pub enum TokenType { KwSelfUpper, KwStatic, KwStruct, + KwSuper, KwTrait, KwTry, KwType, @@ -250,6 +251,7 @@ impl TokenType { KwSelfUpper, KwStatic, KwStruct, + KwSuper, KwTrait, KwTry, KwType, @@ -324,6 +326,7 @@ impl TokenType { TokenType::KwSelfUpper => Some(kw::SelfUpper), TokenType::KwStatic => Some(kw::Static), TokenType::KwStruct => Some(kw::Struct), + TokenType::KwSuper => Some(kw::Super), TokenType::KwTrait => Some(kw::Trait), TokenType::KwTry => Some(kw::Try), TokenType::KwType => Some(kw::Type), @@ -445,18 +448,6 @@ macro_rules! exp { token_type: $crate::parser::token_type::TokenType::$tok } }; - (@open, $delim:ident, $token_type:ident) => { - $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim), - token_type: $crate::parser::token_type::TokenType::$token_type, - } - }; - (@close, $delim:ident, $token_type:ident) => { - $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::CloseDelim(rustc_ast::token::Delimiter::$delim), - token_type: $crate::parser::token_type::TokenType::$token_type, - } - }; // `ExpKeywordPair` helper rules. (@kw, $kw:ident, $token_type:ident) => { @@ -501,12 +492,12 @@ macro_rules! exp { (Question) => { exp!(@tok, Question) }; (Eof) => { exp!(@tok, Eof) }; - (OpenParen) => { exp!(@open, Parenthesis, OpenParen) }; - (OpenBrace) => { exp!(@open, Brace, OpenBrace) }; - (OpenBracket) => { exp!(@open, Bracket, OpenBracket) }; - (CloseParen) => { exp!(@close, Parenthesis, CloseParen) }; - (CloseBrace) => { exp!(@close, Brace, CloseBrace) }; - (CloseBracket) => { exp!(@close, Bracket, CloseBracket) }; + (OpenParen) => { exp!(@tok, OpenParen) }; + (OpenBrace) => { exp!(@tok, OpenBrace) }; + (OpenBracket) => { exp!(@tok, OpenBracket) }; + (CloseParen) => { exp!(@tok, CloseParen) }; + (CloseBrace) => { exp!(@tok, CloseBrace) }; + (CloseBracket) => { exp!(@tok, CloseBracket) }; (As) => { exp!(@kw, As, KwAs) }; (Async) => { exp!(@kw, Async, KwAsync) }; @@ -549,6 +540,7 @@ macro_rules! exp { (SelfUpper) => { exp!(@kw, SelfUpper, KwSelfUpper) }; (Static) => { exp!(@kw, Static, KwStatic) }; (Struct) => { exp!(@kw, Struct, KwStruct) }; + (Super) => { exp!(@kw, Super, KwSuper) }; (Trait) => { exp!(@kw, Trait, KwTrait) }; (Try) => { exp!(@kw, Try, KwTry) }; (Type) => { exp!(@kw, Type, KwType) }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index b45ebae079c..17481731b11 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,5 +1,5 @@ use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind}; +use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token, TokenKind}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy, @@ -7,7 +7,7 @@ use rustc_ast::{ Pinnedness, PolyTraitRef, PreciseCapturingArg, TraitBoundModifiers, TraitObjectSyntax, Ty, TyKind, UnsafeBinderTy, }; -use rustc_errors::{Applicability, PResult}; +use rustc_errors::{Applicability, Diag, PResult}; use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym}; use thin_vec::{ThinVec, thin_vec}; @@ -98,7 +98,7 @@ fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool { || t.is_lifetime() || t == &TokenKind::Question || t.is_keyword(kw::For) - || t == &TokenKind::OpenDelim(Delimiter::Parenthesis) + || t == &TokenKind::OpenParen } impl<'a> Parser<'a> { @@ -355,7 +355,7 @@ impl<'a> Parser<'a> { } } } else if self.check_keyword(exp!(Unsafe)) - && self.look_ahead(1, |tok| matches!(tok.kind, token::Lt)) + && self.look_ahead(1, |tok| tok.kind == token::Lt) { self.parse_unsafe_binder_ty()? } else { @@ -411,6 +411,9 @@ impl<'a> Parser<'a> { TyKind::Path(None, path) if maybe_bounds => { self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true) } + // For `('a) + …`, we know that `'a` in type position already lead to an error being + // emitted. To reduce output, let's indirectly suppress E0178 (bad `+` in type) and + // other irrelevant consequential errors. TyKind::TraitObject(bounds, TraitObjectSyntax::None) if maybe_bounds && bounds.len() == 1 && !trailing_plus => { @@ -425,12 +428,60 @@ impl<'a> Parser<'a> { } fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> { - let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()); - let bounds = self.parse_generic_bounds_common(allow_plus)?; - if lt_no_plus { - self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { span: lo }); + // A lifetime only begins a bare trait object type if it is followed by `+`! + if self.token.is_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()) { + // In Rust 2021 and beyond, we assume that the user didn't intend to write a bare trait + // object type with a leading lifetime bound since that seems very unlikely given the + // fact that `dyn`-less trait objects are *semantically* invalid. + if self.psess.edition.at_least_rust_2021() { + let lt = self.expect_lifetime(); + let mut err = self.dcx().struct_span_err(lo, "expected type, found lifetime"); + err.span_label(lo, "expected type"); + return Ok(match self.maybe_recover_ref_ty_no_leading_ampersand(lt, lo, err) { + Ok(ref_ty) => ref_ty, + Err(err) => TyKind::Err(err.emit()), + }); + } + + self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { + span: lo, + suggestion: lo.shrink_to_hi(), + }); + } + Ok(TyKind::TraitObject( + self.parse_generic_bounds_common(allow_plus)?, + TraitObjectSyntax::None, + )) + } + + fn maybe_recover_ref_ty_no_leading_ampersand<'cx>( + &mut self, + lt: Lifetime, + lo: Span, + mut err: Diag<'cx>, + ) -> Result<TyKind, Diag<'cx>> { + if !self.may_recover() { + return Err(err); + } + let snapshot = self.create_snapshot_for_diagnostic(); + let mutbl = self.parse_mutability(); + match self.parse_ty_no_plus() { + Ok(ty) => { + err.span_suggestion_verbose( + lo.shrink_to_lo(), + "you might have meant to write a reference type here", + "&", + Applicability::MaybeIncorrect, + ); + err.emit(); + Ok(TyKind::Ref(Some(lt), MutTy { ty, mutbl })) + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + Err(err) + } } - Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) } fn parse_remaining_bounds_path( @@ -483,7 +534,7 @@ impl<'a> Parser<'a> { let elt_ty = match self.parse_ty() { Ok(ty) => ty, Err(err) - if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket)) + if self.look_ahead(1, |t| *t == token::CloseBracket) | self.look_ahead(1, |t| *t == token::Semi) => { // Recover from `[LIT; EXPR]` and `[LIT]` @@ -547,7 +598,7 @@ impl<'a> Parser<'a> { // Recovery mutbl = Mutability::Mut; - let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing); + let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing); self.bump(); self.bump_with((dyn_tok, dyn_tok_sp)); } @@ -775,7 +826,7 @@ impl<'a> Parser<'a> { /// Is a `dyn B0 + ... + Bn` type allowed here? fn is_explicit_dyn_type(&mut self) -> bool { self.check_keyword(exp!(Dyn)) - && (self.token.uninterpolated_span().at_least_rust_2018() + && (self.token_uninterpolated_span().at_least_rust_2018() || self.look_ahead(1, |t| { (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star) && !can_continue_type_after_non_fn_ident(t) @@ -886,7 +937,7 @@ impl<'a> Parser<'a> { /// ``` fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> { let lo = self.token.span; - let leading_token = self.prev_token.clone(); + let leading_token = self.prev_token; let has_parens = self.eat(exp!(OpenParen)); let bound = if self.token.is_lifetime() { @@ -998,13 +1049,13 @@ impl<'a> Parser<'a> { BoundConstness::Never }; - let asyncness = if self.token.uninterpolated_span().at_least_rust_2018() + let asyncness = if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Async)) { self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span); BoundAsyncness::Async(self.prev_token.span) } else if self.may_recover() - && self.token.uninterpolated_span().is_rust_2015() + && self.token_uninterpolated_span().is_rust_2015() && self.is_kw_followed_by_ident(kw::Async) { self.bump(); // eat `async` @@ -1103,7 +1154,7 @@ impl<'a> Parser<'a> { } let mut path = if self.token.is_keyword(kw::Fn) - && self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis)) + && self.look_ahead(1, |t| *t == TokenKind::OpenParen) && let Some(path) = self.recover_path_from_fn() { path @@ -1157,7 +1208,7 @@ impl<'a> Parser<'a> { self.parse_path(PathStyle::Type)? }; - if self.may_recover() && self.token == TokenKind::OpenDelim(Delimiter::Parenthesis) { + if self.may_recover() && self.token == TokenKind::OpenParen { self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?; } | 
