diff options
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/messages.ftl | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/errors.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/mod.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/unicode_chars.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr.rs | 42 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 113 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 152 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/generics.rs | 65 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 98 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 39 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 53 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 20 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 15 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/token_type.rs | 25 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 26 |
17 files changed, 350 insertions, 352 deletions
diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 563081c7240..1d5b5942170 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -424,7 +424,7 @@ parse_invalid_logical_operator = `{$incorrect}` is not a logical operator .use_amp_amp_for_conjunction = use `&&` to perform logical conjunction .use_pipe_pipe_for_disjunction = use `||` to perform logical disjunction -parse_invalid_meta_item = expected unsuffixed literal, found `{$token}` +parse_invalid_meta_item = expected unsuffixed literal, found {$descr} .quote_ident_sugg = surround the identifier with quotation marks to make it into a string literal parse_invalid_offset_of = offset_of expects dot-separated field and variant names diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index dc03d6f9521..8d2fd595942 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -772,7 +772,6 @@ pub(crate) struct LabeledLoopInBreak { } #[derive(Subdiagnostic)] - pub(crate) enum WrapInParentheses { #[multipart_suggestion( parse_sugg_wrap_expression_in_parentheses, @@ -1025,7 +1024,7 @@ pub(crate) struct SuffixedLiteralInAttribute { pub(crate) struct InvalidMetaItem { #[primary_span] pub span: Span, - pub token: Token, + pub descr: String, #[subdiagnostic] pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>, } @@ -2923,8 +2922,9 @@ pub(crate) struct AddBoxNew { #[diag(parse_bad_return_type_notation_output)] pub(crate) struct BadReturnTypeNotationOutput { #[primary_span] - #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")] pub span: Span, + #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")] + pub suggestion: Span, } #[derive(Diagnostic)] diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 792e2cc26ef..1d17290e1c7 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -384,17 +384,17 @@ impl<'psess, 'src> Lexer<'psess, 'src> { rustc_lexer::TokenKind::Colon => token::Colon, rustc_lexer::TokenKind::Dollar => token::Dollar, rustc_lexer::TokenKind::Eq => token::Eq, - rustc_lexer::TokenKind::Bang => token::Not, + rustc_lexer::TokenKind::Bang => token::Bang, rustc_lexer::TokenKind::Lt => token::Lt, rustc_lexer::TokenKind::Gt => token::Gt, - rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus), - rustc_lexer::TokenKind::And => token::BinOp(token::And), - rustc_lexer::TokenKind::Or => token::BinOp(token::Or), - rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus), - rustc_lexer::TokenKind::Star => token::BinOp(token::Star), - rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash), - rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret), - rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent), + rustc_lexer::TokenKind::Minus => token::Minus, + rustc_lexer::TokenKind::And => token::And, + rustc_lexer::TokenKind::Or => token::Or, + rustc_lexer::TokenKind::Plus => token::Plus, + rustc_lexer::TokenKind::Star => token::Star, + rustc_lexer::TokenKind::Slash => token::Slash, + rustc_lexer::TokenKind::Caret => token::Caret, + rustc_lexer::TokenKind::Percent => token::Percent, rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => { // Don't emit diagnostics for sequences of the same invalid token diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 648a352efd9..ff03b42484b 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -308,11 +308,11 @@ pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[ (" ", "Space", None), ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))), - ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))), + ("-", "Minus/Hyphen", Some(token::Minus)), (",", "Comma", Some(token::Comma)), (";", "Semicolon", Some(token::Semi)), (":", "Colon", Some(token::Colon)), - ("!", "Exclamation Mark", Some(token::Not)), + ("!", "Exclamation Mark", Some(token::Bang)), ("?", "Question Mark", Some(token::Question)), (".", "Period", Some(token::Dot)), ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))), @@ -321,11 +321,11 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[ ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))), ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))), ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))), - ("*", "Asterisk", Some(token::BinOp(token::Star))), - ("/", "Slash", Some(token::BinOp(token::Slash))), + ("*", "Asterisk", Some(token::Star)), + ("/", "Slash", Some(token::Slash)), ("\\", "Backslash", None), - ("&", "Ampersand", Some(token::BinOp(token::And))), - ("+", "Plus Sign", Some(token::BinOp(token::Plus))), + ("&", "Ampersand", Some(token::And)), + ("+", "Plus Sign", Some(token::Plus)), ("<", "Less-Than Sign", Some(token::Lt)), ("=", "Equals Sign", Some(token::Eq)), ("==", "Double Equals Sign", Some(token::EqEq)), diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 2691e6f56d6..066b570c23f 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,4 +1,6 @@ -use rustc_ast::{self as ast, Attribute, attr, token}; +use rustc_ast as ast; +use rustc_ast::token::{self, MetaVarKind}; +use rustc_ast::{Attribute, attr}; use rustc_errors::codes::*; use rustc_errors::{Diag, PResult}; use rustc_span::{BytePos, Span}; @@ -9,7 +11,7 @@ use super::{ AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, UsePreAttrPos, }; -use crate::{errors, exp, fluent_generated as fluent, maybe_whole}; +use crate::{errors, exp, fluent_generated as fluent}; // Public for rustfmt usage #[derive(Debug)] @@ -128,7 +130,7 @@ impl<'a> Parser<'a> { assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position"); let style = - if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; + if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; this.expect(exp!(OpenBracket))?; let item = this.parse_attr_item(ForceCollect::No)?; @@ -269,7 +271,12 @@ impl<'a> Parser<'a> { /// PATH `=` UNSUFFIXED_LIT /// The delimiters or `=` are still put into the resulting token stream. pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> { - maybe_whole!(self, NtMeta, |attr| attr.into_inner()); + if let Some(item) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Meta { .. }), + |this| this.parse_attr_item(force_collect), + ) { + return Ok(item); + } // Attr items don't have attributes. self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| { @@ -305,7 +312,7 @@ impl<'a> Parser<'a> { loop { let start_pos = self.num_bump_calls; // Only try to parse if it is an inner attribute (has `!`). - let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) { + let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) { Some(self.parse_attribute(InnerAttrPolicy::Permitted)?) } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { if attr_style == ast::AttrStyle::Inner { @@ -396,18 +403,17 @@ impl<'a> Parser<'a> { &mut self, unsafe_allowed: AllowLeadingUnsafe, ) -> PResult<'a, ast::MetaItem> { - // We can't use `maybe_whole` here because it would bump in the `None` - // case, which we don't want. - if let token::Interpolated(nt) = &self.token.kind - && let token::NtMeta(attr_item) = &**nt - { - match attr_item.meta(attr_item.path.span) { - Some(meta) => { - self.bump(); - return Ok(meta); - } - None => self.unexpected()?, - } + if let Some(MetaVarKind::Meta { has_meta_form }) = self.token.is_metavar_seq() { + return if has_meta_form { + let attr_item = self + .eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| { + this.parse_attr_item(ForceCollect::No) + }) + .unwrap(); + Ok(attr_item.meta(attr_item.path.span).unwrap()) + } else { + self.unexpected_any() + }; } let lo = self.token.span; @@ -464,7 +470,7 @@ impl<'a> Parser<'a> { let mut err = errors::InvalidMetaItem { span: self.token.span, - token: self.token.clone(), + descr: super::token_descr(&self.token), quote_ident_sugg: None, }; diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 67abc2d5394..94db43bb59f 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -1,17 +1,15 @@ use std::mem::take; use std::ops::{Deref, DerefMut}; -use std::sync::Arc; use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; -use rustc_ast::tokenstream::AttrTokenTree; use rustc_ast::util::parser::AssocOp; use rustc_ast::{ AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, - BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat, - PatKind, Path, PathSegment, QSelf, Recovered, Ty, TyKind, + BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind, + Path, PathSegment, QSelf, Recovered, Ty, TyKind, }; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; @@ -1169,7 +1167,7 @@ impl<'a> Parser<'a> { let mut number_of_gt = 0; while self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - if *t == token::BinOp(token::BinOpToken::Shr) { + if *t == token::Shr { number_of_shr += 1; true } else if *t == token::Gt { @@ -1224,7 +1222,7 @@ impl<'a> Parser<'a> { let span = lo.to(self.prev_token.span); // Detect trailing `>` like in `x.collect::Vec<_>>()`. let mut trailing_span = self.prev_token.span.shrink_to_hi(); - while self.token == token::BinOp(token::Shr) || self.token == token::Gt { + while self.token == token::Shr || self.token == token::Gt { trailing_span = trailing_span.to(self.token.span); self.bump(); } @@ -1350,13 +1348,13 @@ impl<'a> Parser<'a> { } return match (op.node, &outer_op.node) { // `x == y == z` - (BinOpKind::Eq, AssocOp::Equal) | + (BinOpKind::Eq, AssocOp::Binary(BinOpKind::Eq)) | // `x < y < z` and friends. - (BinOpKind::Lt, AssocOp::Less | AssocOp::LessEqual) | - (BinOpKind::Le, AssocOp::LessEqual | AssocOp::Less) | + (BinOpKind::Lt, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) | + (BinOpKind::Le, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) | // `x > y > z` and friends. - (BinOpKind::Gt, AssocOp::Greater | AssocOp::GreaterEqual) | - (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => { + (BinOpKind::Gt, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) | + (BinOpKind::Ge, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) => { let expr_to_str = |e: &Expr| { self.span_to_snippet(e.span) .unwrap_or_else(|_| pprust::expr_to_string(e)) @@ -1368,7 +1366,10 @@ impl<'a> Parser<'a> { false // Keep the current parse behavior, where the AST is `(x < y) < z`. } // `x == y < z` - (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => { + ( + BinOpKind::Eq, + AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge) + ) => { // Consume `z`/outer-op-rhs. let snapshot = self.create_snapshot_for_diagnostic(); match self.parse_expr() { @@ -1389,7 +1390,10 @@ impl<'a> Parser<'a> { } } // `x > y == z` - (BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, AssocOp::Equal) => { + ( + BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, + AssocOp::Binary(BinOpKind::Eq) + ) => { let snapshot = self.create_snapshot_for_diagnostic(); // At this point it is always valid to enclose the lhs in parentheses, no // further checks are necessary. @@ -1457,15 +1461,14 @@ impl<'a> Parser<'a> { // Include `<` to provide this recommendation even in a case like // `Foo<Bar<Baz<Qux, ()>>>` - if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less - || outer_op.node == AssocOp::Greater + if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Binary(BinOpKind::Lt) + || outer_op.node == AssocOp::Binary(BinOpKind::Gt) { - if outer_op.node == AssocOp::Less { + if outer_op.node == AssocOp::Binary(BinOpKind::Lt) { let snapshot = self.create_snapshot_for_diagnostic(); self.bump(); // So far we have parsed `foo<bar<`, consume the rest of the type args. - let modifiers = - [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; + let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)]; self.consume_tts(1, &modifiers); if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep] @@ -1958,7 +1961,7 @@ impl<'a> Parser<'a> { &mut self, await_sp: Span, ) -> PResult<'a, P<Expr>> { - let (hi, expr, is_question) = if self.token == token::Not { + let (hi, expr, is_question) = if self.token == token::Bang { // Handle `await!(<expr>)`. self.recover_await_macro()? } else { @@ -1970,7 +1973,7 @@ impl<'a> Parser<'a> { } fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> { - self.expect(exp!(Not))?; + self.expect(exp!(Bang))?; self.expect(exp!(OpenParen))?; let expr = self.parse_expr()?; self.expect(exp!(CloseParen))?; @@ -2030,7 +2033,7 @@ impl<'a> Parser<'a> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> { let is_try = self.token.is_keyword(kw::Try); - let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for ! + let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for ! let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for ( if is_try && is_questionmark && is_open { @@ -2400,52 +2403,6 @@ impl<'a> Parser<'a> { err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp)); } err.span_label(span, "expected expression"); - - // Walk the chain of macro expansions for the current token to point at how the original - // code was interpreted. This helps the user realize when a macro argument of one type is - // later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat` - // in a subsequent macro invocation (#71039). - let mut tok = self.token.clone(); - let mut labels = vec![]; - while let TokenKind::Interpolated(nt) = &tok.kind { - let tokens = nt.tokens(); - labels.push(Arc::clone(nt)); - if let Some(tokens) = tokens - && let tokens = tokens.to_attr_token_stream() - && let tokens = tokens.0.deref() - && let [AttrTokenTree::Token(token, _)] = &tokens[..] - { - tok = token.clone(); - } else { - break; - } - } - let mut iter = labels.into_iter().peekable(); - let mut show_link = false; - while let Some(nt) = iter.next() { - let descr = nt.descr(); - if let Some(next) = iter.peek() { - let next_descr = next.descr(); - if next_descr != descr { - err.span_label(next.use_span(), format!("this is expected to be {next_descr}")); - err.span_label( - nt.use_span(), - format!( - "this is interpreted as {}, but it is expected to be {}", - next_descr, descr, - ), - ); - show_link = true; - } - } - } - if show_link { - err.note( - "when forwarding a matched fragment to another macro-by-example, matchers in the \ - second macro will see an opaque AST of the fragment type, not the underlying \ - tokens", - ); - } err } @@ -2635,10 +2592,12 @@ impl<'a> Parser<'a> { ) -> PResult<'a, GenericArg> { let is_op_or_dot = AssocOp::from_token(&self.token) .and_then(|op| { - if let AssocOp::Greater - | AssocOp::Less - | AssocOp::ShiftRight - | AssocOp::GreaterEqual + if let AssocOp::Binary( + BinOpKind::Gt + | BinOpKind::Lt + | BinOpKind::Shr + | BinOpKind::Ge + ) // Don't recover from `foo::<bar = baz>`, because this could be an attempt to // assign a value to a defaulted generic parameter. | AssocOp::Assign @@ -2653,8 +2612,7 @@ impl<'a> Parser<'a> { || self.token == TokenKind::Dot; // This will be true when a trait object type `Foo +` or a path which was a `const fn` with // type params has been parsed. - let was_op = - matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt); + let was_op = matches!(self.prev_token.kind, token::Plus | token::Shr | token::Gt); if !is_op_or_dot && !was_op { // We perform these checks and early return to avoid taking a snapshot unnecessarily. return Err(err); @@ -3032,8 +2990,7 @@ impl<'a> Parser<'a> { pub(super) fn recover_vcs_conflict_marker(&mut self) { // <<<<<<< - let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) - else { + let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else { return; }; let mut spans = Vec::with_capacity(3); @@ -3048,15 +3005,13 @@ impl<'a> Parser<'a> { if self.token == TokenKind::Eof { break; } - if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) - { + if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) { middlediff3 = Some(span); } if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) { middle = Some(span); } - if let Some(span) = self.conflict_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt) - { + if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) { spans.push(span); end = Some(span); break; diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index b2e58c94280..0a08c6faeb4 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,7 +4,7 @@ use core::mem; use core::ops::{Bound, ControlFlow}; use ast::mut_visit::{self, MutVisitor}; -use ast::token::IdentIsRaw; +use ast::token::{IdentIsRaw, MetaVarKind}; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; @@ -171,7 +171,7 @@ impl<'a> Parser<'a> { break; } // Check for deprecated `...` syntax - if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq { + if self.token == token::DotDotDot && op.node == AssocOp::Range(RangeLimits::Closed) { self.err_dotdotdot_syntax(self.token.span); } @@ -188,17 +188,12 @@ impl<'a> Parser<'a> { } // Look for JS' `===` and `!==` and recover - if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual) + if let AssocOp::Binary(bop @ BinOpKind::Eq | bop @ BinOpKind::Ne) = op.node && self.token == token::Eq && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); - let sugg = match op.node { - AssocOp::Equal => "==", - AssocOp::NotEqual => "!=", - _ => unreachable!(), - } - .into(); + let sugg = bop.as_str().into(); let invalid = format!("{sugg}="); self.dcx().emit_err(errors::InvalidComparisonOperator { span: sp, @@ -213,7 +208,7 @@ impl<'a> Parser<'a> { } // Look for PHP's `<>` and recover - if op.node == AssocOp::Less + if op.node == AssocOp::Binary(BinOpKind::Lt) && self.token == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { @@ -231,7 +226,7 @@ impl<'a> Parser<'a> { } // Look for C++'s `<=>` and recover - if op.node == AssocOp::LessEqual + if op.node == AssocOp::Binary(BinOpKind::Le) && self.token == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { @@ -244,8 +239,8 @@ impl<'a> Parser<'a> { self.bump(); } - if self.prev_token == token::BinOp(token::Plus) - && self.token == token::BinOp(token::Plus) + if self.prev_token == token::Plus + && self.token == token::Plus && self.prev_token.span.between(self.token.span).is_empty() { let op_span = self.prev_token.span.to(self.token.span); @@ -255,8 +250,8 @@ impl<'a> Parser<'a> { continue; } - if self.prev_token == token::BinOp(token::Minus) - && self.token == token::BinOp(token::Minus) + if self.prev_token == token::Minus + && self.token == token::Minus && self.prev_token.span.between(self.token.span).is_empty() && !self.look_ahead(1, |tok| tok.can_begin_expr()) { @@ -269,13 +264,13 @@ impl<'a> Parser<'a> { let op = op.node; // Special cases: - if op == AssocOp::As { + if op == AssocOp::Cast { lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; continue; - } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { + } else if let AssocOp::Range(limits) = op { // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to // generalise it to the Fixity::None code. - lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?; + lhs = self.parse_expr_range(prec, lhs, limits, cur_op_span)?; break; } @@ -290,46 +285,16 @@ impl<'a> Parser<'a> { let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span); lhs = match op { - AssocOp::Add - | AssocOp::Subtract - | AssocOp::Multiply - | AssocOp::Divide - | AssocOp::Modulus - | AssocOp::LAnd - | AssocOp::LOr - | AssocOp::BitXor - | AssocOp::BitAnd - | AssocOp::BitOr - | AssocOp::ShiftLeft - | AssocOp::ShiftRight - | AssocOp::Equal - | AssocOp::Less - | AssocOp::LessEqual - | AssocOp::NotEqual - | AssocOp::Greater - | AssocOp::GreaterEqual => { - let ast_op = op.to_ast_binop().unwrap(); + AssocOp::Binary(ast_op) => { let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); self.mk_expr(span, binary) } AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)), - AssocOp::AssignOp(k) => { - let aop = match k { - token::Plus => BinOpKind::Add, - token::Minus => BinOpKind::Sub, - token::Star => BinOpKind::Mul, - token::Slash => BinOpKind::Div, - token::Percent => BinOpKind::Rem, - token::Caret => BinOpKind::BitXor, - token::And => BinOpKind::BitAnd, - token::Or => BinOpKind::BitOr, - token::Shl => BinOpKind::Shl, - token::Shr => BinOpKind::Shr, - }; + AssocOp::AssignOp(aop) => { let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); self.mk_expr(span, aopexpr) } - AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => { + AssocOp::Cast | AssocOp::Range(_) => { self.dcx().span_bug(span, "AssocOp should have been handled by special case") } }; @@ -347,13 +312,14 @@ impl<'a> Parser<'a> { // An exhaustive check is done in the following block, but these are checked first // because they *are* ambiguous but also reasonable looking incorrect syntax, so we // want to keep their span info to improve diagnostics in these cases in a later stage. - (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3` - (true, Some(AssocOp::Subtract)) | // `{ 42 } -5` - (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus) - (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }` - (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure) - (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42` - => { + (true, Some(AssocOp::Binary( + BinOpKind::Mul | // `{ 42 } *foo = bar;` or `{ 42 } * 3` + BinOpKind::Sub | // `{ 42 } -5` + BinOpKind::Add | // `{ 42 } + 42` (unary plus) + BinOpKind::And | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }` + BinOpKind::Or | // `{ 42 } || 42` ("logical or" or closure) + BinOpKind::BitOr // `{ 42 } | 42` or `{ 42 } |x| 42` + ))) => { // These cases are ambiguous and can't be identified in the parser alone. // // Bitwise AND is left out because guessing intent is hard. We can make @@ -392,23 +358,21 @@ impl<'a> Parser<'a> { // When parsing const expressions, stop parsing when encountering `>`. ( Some( - AssocOp::ShiftRight - | AssocOp::Greater - | AssocOp::GreaterEqual - | AssocOp::AssignOp(token::BinOpToken::Shr), + AssocOp::Binary(BinOpKind::Shr | BinOpKind::Gt | BinOpKind::Ge) + | AssocOp::AssignOp(BinOpKind::Shr), ), _, ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { return None; } - // When recovering patterns as expressions, stop parsing when encountering an assignment `=`, an alternative `|`, or a range `..`. + // When recovering patterns as expressions, stop parsing when encountering an + // assignment `=`, an alternative `|`, or a range `..`. ( Some( AssocOp::Assign | AssocOp::AssignOp(_) - | AssocOp::BitOr - | AssocOp::DotDot - | AssocOp::DotDotEq, + | AssocOp::Binary(BinOpKind::BitOr) + | AssocOp::Range(_), ), _, ) if self.restrictions.contains(Restrictions::IS_PAT) => { @@ -423,7 +387,7 @@ impl<'a> Parser<'a> { incorrect: "and".into(), sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span), }); - (AssocOp::LAnd, span) + (AssocOp::Binary(BinOpKind::And), span) } (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => { self.dcx().emit_err(errors::InvalidLogicalOperator { @@ -431,7 +395,7 @@ impl<'a> Parser<'a> { incorrect: "or".into(), sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span), }); - (AssocOp::LOr, span) + (AssocOp::Binary(BinOpKind::Or), span) } _ => return None, }; @@ -449,7 +413,7 @@ impl<'a> Parser<'a> { &mut self, prec: ExprPrecedence, lhs: P<Expr>, - op: AssocOp, + limits: RangeLimits, cur_op_span: Span, ) -> PResult<'a, P<Expr>> { let rhs = if self.is_at_start_of_range_notation_rhs() { @@ -465,8 +429,6 @@ impl<'a> Parser<'a> { }; let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span); let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span); - let limits = - if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed }; let range = self.mk_range(Some(lhs), rhs, limits); Ok(self.mk_expr(span, range)) } @@ -543,23 +505,23 @@ impl<'a> Parser<'a> { // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() match this.token.uninterpolate().kind { // `!expr` - token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)), + token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)), // `~expr` token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `-expr` - token::BinOp(token::Minus) => { + token::Minus => { make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg)) } // `*expr` - token::BinOp(token::Star) => { + token::Star => { make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref)) } // `&expr` and `&&expr` - token::BinOp(token::And) | token::AndAnd => { + token::And | token::AndAnd => { make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo)) } // `+lit` - token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => { + token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => { let mut err = errors::LeadingPlusNotSupported { span: lo, remove_plus: None, @@ -579,9 +541,7 @@ impl<'a> Parser<'a> { this.parse_expr_prefix(attrs) } // Recover from `++x`: - token::BinOp(token::Plus) - if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) => - { + token::Plus if this.look_ahead(1, |t| *t == token::Plus) => { let starts_stmt = this.prev_token == token::Semi || this.prev_token == token::CloseDelim(Delimiter::Brace); let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span)); @@ -605,7 +565,11 @@ impl<'a> Parser<'a> { fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> { self.bump(); let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_prefix(attrs)?; + let expr = if self.token.is_range_separator() { + self.parse_expr_prefix_range(attrs) + } else { + self.parse_expr_prefix(attrs) + }?; let span = self.interpolated_or_expr_span(&expr); Ok((lo.to(span), expr)) } @@ -761,14 +725,12 @@ impl<'a> Parser<'a> { suggestion, }) } - token::BinOp(token::Shl) => { - self.dcx().emit_err(errors::ShiftInterpretedAsGeneric { - shift: self.token.span, - r#type: path, - args: args_span, - suggestion, - }) - } + token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric { + shift: self.token.span, + r#type: path, + args: args_span, + suggestion, + }), _ => { // We can end up here even without `<` being the next token, for // example because `parse_ty_no_plus` returns `Err` on keywords, @@ -1382,6 +1344,7 @@ impl<'a> Parser<'a> { fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> { maybe_recover_from_interpolated_ty_qpath!(self, true); + let span = self.token.span; if let token::Interpolated(nt) = &self.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { @@ -1389,11 +1352,6 @@ impl<'a> Parser<'a> { self.bump(); return Ok(e); } - token::NtPath(path) => { - let path = (**path).clone(); - self.bump(); - return Ok(self.mk_expr(self.prev_token.span, ExprKind::Path(None, path))); - } token::NtBlock(block) => { let block = block.clone(); self.bump(); @@ -1401,6 +1359,10 @@ impl<'a> Parser<'a> { } _ => {} }; + } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { + this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) + }) { + return Ok(self.mk_expr(span, ExprKind::Path(None, path))); } // Outer attributes are already parsed and will be @@ -1612,7 +1574,7 @@ impl<'a> Parser<'a> { }; // `!`, as an operator, is prefix, so we know this isn't that. - let (span, kind) = if self.eat(exp!(Not)) { + let (span, kind) = if self.eat(exp!(Bang)) { // MACRO INVOCATION expression if qself.is_some() { self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); @@ -2633,7 +2595,7 @@ impl<'a> Parser<'a> { missing_let: None, comparison: None, }; - if self.prev_token == token::BinOp(token::Or) { + if self.prev_token == token::Or { // This was part of a closure, the that part of the parser recover. return Err(self.dcx().create_err(err)); } else { diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 11f0e579de5..c3f71dd8b30 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -367,34 +367,47 @@ impl<'a> Parser<'a> { loop { let where_sp = where_lo.to(self.prev_token.span); + let attrs = self.parse_outer_attributes()?; let pred_lo = self.token.span; - let kind = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { - let lifetime = self.expect_lifetime(); - // Bounds starting with a colon are mandatory, but possibly empty. - self.expect(exp!(Colon))?; - let bounds = self.parse_lt_param_bounds(); - ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate { - lifetime, - bounds, - }) - } else if self.check_type() { - match self.parse_ty_where_predicate_kind_or_recover_tuple_struct_body( - struct_, pred_lo, where_sp, - )? { - PredicateKindOrStructBody::PredicateKind(kind) => kind, - PredicateKindOrStructBody::StructBody(body) => { - tuple_struct_body = Some(body); - break; - } + let predicate = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { + for attr in &attrs { + self.psess.gated_spans.gate(sym::where_clause_attrs, attr.span); } - } else { - break; - }; - where_clause.predicates.push(ast::WherePredicate { - kind, - id: DUMMY_NODE_ID, - span: pred_lo.to(self.prev_token.span), - }); + let kind = if this.check_lifetime() && this.look_ahead(1, |t| !t.is_like_plus()) { + let lifetime = this.expect_lifetime(); + // Bounds starting with a colon are mandatory, but possibly empty. + this.expect(exp!(Colon))?; + let bounds = this.parse_lt_param_bounds(); + Some(ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate { + lifetime, + bounds, + })) + } else if this.check_type() { + match this.parse_ty_where_predicate_kind_or_recover_tuple_struct_body( + struct_, pred_lo, where_sp, + )? { + PredicateKindOrStructBody::PredicateKind(kind) => Some(kind), + PredicateKindOrStructBody::StructBody(body) => { + tuple_struct_body = Some(body); + None + } + } + } else { + None + }; + let predicate = kind.map(|kind| ast::WherePredicate { + attrs, + kind, + id: DUMMY_NODE_ID, + span: pred_lo.to(this.prev_token.span), + is_placeholder: false, + }); + Ok((predicate, Trailing::No, UsePreAttrPos::No)) + })?; + match predicate { + Some(predicate) => where_clause.predicates.push(predicate), + None => break, + } let prev_token = self.prev_token.span; let ate_comma = self.eat(exp!(Comma)); diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index c923717ecaf..c3b1956ad2e 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -4,7 +4,7 @@ use std::mem; use ast::token::IdentIsRaw; use rustc_ast::ast::*; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::util::case::Case; use rustc_ast::{self as ast}; @@ -382,7 +382,7 @@ impl<'a> Parser<'a> { /// Are we sure this could not possibly be a macro invocation? fn isnt_macro_invocation(&mut self) -> bool { - self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep) + self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep) } /// Recover on encountering a struct, enum, or method definition where the user @@ -480,7 +480,7 @@ impl<'a> Parser<'a> { /// Parses an item macro, e.g., `item!();`. fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> { let path = self.parse_path(PathStyle::Mod)?; // `foo::bar` - self.expect(exp!(Not))?; // `!` + self.expect(exp!(Bang))?; // `!` match self.parse_delim_args() { // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`. Ok(args) => { @@ -540,7 +540,7 @@ impl<'a> Parser<'a> { fn parse_polarity(&mut self) -> ast::ImplPolarity { // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. - if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) { + if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) { self.bump(); // `!` ast::ImplPolarity::Negative(self.prev_token.span) } else { @@ -1293,7 +1293,7 @@ impl<'a> Parser<'a> { if token.is_keyword(kw::Move) { return true; } - matches!(token.kind, token::BinOp(token::Or) | token::OrOr) + matches!(token.kind, token::Or | token::OrOr) }) } else { // `$qual static` @@ -1579,7 +1579,7 @@ impl<'a> Parser<'a> { } let ident = this.parse_field_ident("enum", vlo)?; - if this.token == token::Not { + if this.token == token::Bang { if let Err(err) = this.unexpected() { err.with_note(fluent::parse_macro_expands_to_enum_variant).emit(); } @@ -1814,7 +1814,7 @@ impl<'a> Parser<'a> { let attrs = p.parse_outer_attributes()?; p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| { let mut snapshot = None; - if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) { + if p.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) { // Account for `<<<<<<<` diff markers. We can't proactively error here because // that can be a valid type start, so we snapshot and reparse only we've // encountered another parse error. @@ -2034,7 +2034,7 @@ impl<'a> Parser<'a> { attrs: AttrVec, ) -> PResult<'a, FieldDef> { let name = self.parse_field_ident(adt_ty, lo)?; - if self.token == token::Not { + if self.token == token::Bang { if let Err(mut err) = self.unexpected() { // Encounter the macro invocation err.subdiagnostic(MacroExpandsToAdtField { adt_ty }); @@ -2184,7 +2184,7 @@ impl<'a> Parser<'a> { if self.check_keyword(exp!(MacroRules)) { let macro_rules_span = self.token.span; - if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) { + if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) { return IsMacroRulesItem::Yes { has_bang: true }; } else if self.look_ahead(1, |t| (t.is_ident())) { // macro_rules foo @@ -2209,11 +2209,11 @@ impl<'a> Parser<'a> { self.expect_keyword(exp!(MacroRules))?; // `macro_rules` if has_bang { - self.expect(exp!(Not))?; // `!` + self.expect(exp!(Bang))?; // `!` } let ident = self.parse_ident()?; - if self.eat(exp!(Not)) { + if self.eat(exp!(Bang)) { // Handle macro_rules! foo! let span = self.prev_token.span; self.dcx().emit_err(errors::MacroNameRemoveBang { span }); @@ -2954,14 +2954,27 @@ impl<'a> Parser<'a> { } _ => unreachable!(), }; + // is lifetime `n` tokens ahead? + let is_lifetime = |this: &Self, n| this.look_ahead(n, |t| t.is_lifetime()); // Is `self` `n` tokens ahead? let is_isolated_self = |this: &Self, n| { this.is_keyword_ahead(n, &[kw::SelfLower]) && this.look_ahead(n + 1, |t| t != &token::PathSep) }; + // Is `pin const self` `n` tokens ahead? + let is_isolated_pin_const_self = |this: &Self, n| { + this.look_ahead(n, |token| token.is_ident_named(sym::pin)) + && this.is_keyword_ahead(n + 1, &[kw::Const]) + && is_isolated_self(this, n + 2) + }; // Is `mut self` `n` tokens ahead? let is_isolated_mut_self = |this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1); + // Is `pin mut self` `n` tokens ahead? + let is_isolated_pin_mut_self = |this: &Self, n| { + this.look_ahead(n, |token| token.is_ident_named(sym::pin)) + && is_isolated_mut_self(this, n + 1) + }; // Parse `self` or `self: TYPE`. We already know the current token is `self`. let parse_self_possibly_typed = |this: &mut Self, m| { let eself_ident = expect_self_ident(this); @@ -3011,27 +3024,36 @@ impl<'a> Parser<'a> { // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.token.span; let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind { - token::BinOp(token::And) => { - let eself = if is_isolated_self(self, 1) { - // `&self` - self.bump(); - SelfKind::Region(None, Mutability::Not) - } else if is_isolated_mut_self(self, 1) { - // `&mut self` - self.bump(); - self.bump(); - SelfKind::Region(None, Mutability::Mut) - } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) { - // `&'lt self` - self.bump(); - let lt = self.expect_lifetime(); - SelfKind::Region(Some(lt), Mutability::Not) - } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) { - // `&'lt mut self` - self.bump(); - let lt = self.expect_lifetime(); - self.bump(); - SelfKind::Region(Some(lt), Mutability::Mut) + token::And => { + let has_lifetime = is_lifetime(self, 1); + let skip_lifetime_count = has_lifetime as usize; + let eself = if is_isolated_self(self, skip_lifetime_count + 1) { + // `&{'lt} self` + self.bump(); // & + let lifetime = has_lifetime.then(|| self.expect_lifetime()); + SelfKind::Region(lifetime, Mutability::Not) + } else if is_isolated_mut_self(self, skip_lifetime_count + 1) { + // `&{'lt} mut self` + self.bump(); // & + let lifetime = has_lifetime.then(|| self.expect_lifetime()); + self.bump(); // mut + SelfKind::Region(lifetime, Mutability::Mut) + } else if is_isolated_pin_const_self(self, skip_lifetime_count + 1) { + // `&{'lt} pin const self` + self.bump(); // & + let lifetime = has_lifetime.then(|| self.expect_lifetime()); + self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span); + self.bump(); // pin + self.bump(); // const + SelfKind::Pinned(lifetime, Mutability::Not) + } else if is_isolated_pin_mut_self(self, skip_lifetime_count + 1) { + // `&{'lt} pin mut self` + self.bump(); // & + let lifetime = has_lifetime.then(|| self.expect_lifetime()); + self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span); + self.bump(); // pin + self.bump(); // mut + SelfKind::Pinned(lifetime, Mutability::Mut) } else { // `¬_self` return Ok(None); @@ -3041,12 +3063,12 @@ impl<'a> Parser<'a> { (eself, self_ident, hi) } // `*self` - token::BinOp(token::Star) if is_isolated_self(self, 1) => { + token::Star if is_isolated_self(self, 1) => { self.bump(); recover_self_ptr(self)? } // `*mut self` and `*const self` - token::BinOp(token::Star) + token::Star if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) => { self.bump(); @@ -3071,11 +3093,13 @@ impl<'a> Parser<'a> { fn is_named_param(&self) -> bool { let offset = match &self.token.kind { - token::Interpolated(nt) => match &**nt { - token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), + token::OpenDelim(Delimiter::Invisible(origin)) => match origin { + InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => { + return self.check_noexpect_past_close_delim(&token::Colon); + } _ => 0, }, - token::BinOp(token::And) | token::AndAnd => 1, + token::And | token::AndAnd => 1, _ if self.token.is_keyword(kw::Mut) => 1, _ => 0, }; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 80a33a76005..7dabc28c645 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -24,7 +24,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind, + self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token, + TokenKind, }; use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; @@ -812,9 +813,9 @@ impl<'a> Parser<'a> { self.is_keyword_ahead(0, &[kw::Const]) && self.look_ahead(1, |t| match &t.kind { // async closures do not work with const closures, so we do not parse that here. - token::Ident(kw::Move | kw::Static, IdentIsRaw::No) - | token::OrOr - | token::BinOp(token::Or) => true, + token::Ident(kw::Move | kw::Static, IdentIsRaw::No) | token::OrOr | token::Or => { + true + } _ => false, }) } @@ -1650,7 +1651,7 @@ impl<'a> Parser<'a> { /// `::{` or `::*` fn is_import_coupler(&mut self) -> bool { self.check_path_sep_and_look_ahead(|t| { - matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star)) + matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star) }) } @@ -1745,7 +1746,10 @@ pub enum ParseNtResult { Tt(TokenTree), Ident(Ident, IdentIsRaw), Lifetime(Ident, IdentIsRaw), + Pat(P<ast::Pat>, NtPatKind), Ty(P<ast::Ty>), + Meta(P<ast::AttrItem>), + Path(P<ast::Path>), Vis(P<ast::Visibility>), /// This variant will eventually be removed, along with `Token::Interpolate`. diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index f202f85752e..d537bc17ce3 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -32,7 +32,7 @@ impl<'a> Parser<'a> { | MetaVarKind::Expr { .. } | MetaVarKind::Ty { .. } | MetaVarKind::Literal // `true`, `false` - | MetaVarKind::Meta + | MetaVarKind::Meta { .. } | MetaVarKind::Path => true, MetaVarKind::Item @@ -49,11 +49,9 @@ impl<'a> Parser<'a> { fn nt_may_be_ident(nt: &Nonterminal) -> bool { match nt { NtStmt(_) - | NtPat(_) | NtExpr(_) | NtLiteral(_) // `true`, `false` - | NtMeta(_) - | NtPath(_) => true, + => true, NtItem(_) | NtBlock(_) => false, } @@ -99,7 +97,7 @@ impl<'a> Parser<'a> { token::NtLifetime(..) => true, token::Interpolated(nt) => match &**nt { NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true, - NtItem(_) | NtPat(_) | NtMeta(_) | NtPath(_) => false, + NtItem(_) => false, }, token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { MetaVarKind::Block @@ -109,7 +107,7 @@ impl<'a> Parser<'a> { MetaVarKind::Item | MetaVarKind::Pat(_) | MetaVarKind::Ty { .. } - | MetaVarKind::Meta + | MetaVarKind::Meta { .. } | MetaVarKind::Path | MetaVarKind::Vis => false, MetaVarKind::Lifetime | MetaVarKind::Ident | MetaVarKind::TT => { @@ -170,15 +168,18 @@ impl<'a> Parser<'a> { } }, NonterminalKind::Pat(pat_kind) => { - NtPat(self.collect_tokens_no_attrs(|this| match pat_kind { - PatParam { .. } => this.parse_pat_no_top_alt(None, None), - PatWithOr => this.parse_pat_no_top_guard( - None, - RecoverComma::No, - RecoverColon::No, - CommaRecoveryMode::EitherTupleOrPipe, - ), - })?) + return Ok(ParseNtResult::Pat( + self.collect_tokens_no_attrs(|this| match pat_kind { + PatParam { .. } => this.parse_pat_no_top_alt(None, None), + PatWithOr => this.parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ), + })?, + pat_kind, + )); } NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?), NonterminalKind::Literal => { @@ -203,9 +204,13 @@ impl<'a> Parser<'a> { }; } NonterminalKind::Path => { - NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?)) + return Ok(ParseNtResult::Path(P( + self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))? + ))); + } + NonterminalKind::Meta => { + return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?))); } - NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(ForceCollect::Yes)?)), NonterminalKind::Vis => { return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| { this.parse_visibility(FollowedByType::Yes) diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 8ce749ec814..ec14c5718da 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -2,7 +2,8 @@ use std::ops::Bound; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, Token}; +use rustc_ast::token::NtPatKind::*; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token}; use rustc_ast::util::parser::ExprPrecedence; use rustc_ast::visit::{self, Visitor}; use rustc_ast::{ @@ -30,7 +31,7 @@ use crate::errors::{ UnexpectedVertVertInPattern, WrapInParens, }; use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal}; -use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole}; +use crate::{exp, maybe_recover_from_interpolated_ty_qpath}; #[derive(PartialEq, Copy, Clone)] pub enum Expected { @@ -357,7 +358,7 @@ impl<'a> Parser<'a> { ) }); match (is_end_ahead, &self.token.kind) { - (true, token::BinOp(token::Or) | token::OrOr) => { + (true, token::Or | token::OrOr) => { // A `|` or possibly `||` token shouldn't be here. Ban it. self.dcx().emit_err(TrailingVertNotAllowed { span: self.token.span, @@ -431,7 +432,11 @@ impl<'a> Parser<'a> { // `[` is included for indexing operations, // `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`), // `as` is included for type casts - let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or) + let has_trailing_operator = matches!( + self.token.kind, + token::Plus | token::Minus | token::Star | token::Slash | token::Percent + | token::Caret | token::And | token::Shl | token::Shr // excludes `Or` + ) || self.token == token::Question || (self.token == token::OpenDelim(Delimiter::Bracket) && self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]` @@ -689,6 +694,27 @@ impl<'a> Parser<'a> { PatVisitor { parser: self, stmt, arm: None, field: None }.visit_stmt(stmt); } + fn eat_metavar_pat(&mut self) -> Option<P<Pat>> { + // Must try both kinds of pattern nonterminals. + if let Some(pat) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Pat(PatParam { .. })), + |this| this.parse_pat_no_top_alt(None, None), + ) { + Some(pat) + } else if let Some(pat) = self.eat_metavar_seq(MetaVarKind::Pat(PatWithOr), |this| { + this.parse_pat_no_top_guard( + None, + RecoverComma::No, + RecoverColon::No, + CommaRecoveryMode::EitherTupleOrPipe, + ) + }) { + Some(pat) + } else { + None + } + } + /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are /// allowed). fn parse_pat_with_range_pat( @@ -698,7 +724,10 @@ impl<'a> Parser<'a> { syntax_loc: Option<PatternLocation>, ) -> PResult<'a, P<Pat>> { maybe_recover_from_interpolated_ty_qpath!(self, true); - maybe_whole!(self, NtPat, |pat| pat); + + if let Some(pat) = self.eat_metavar_pat() { + return Ok(pat); + } let mut lo = self.token.span; @@ -738,7 +767,7 @@ impl<'a> Parser<'a> { self.recover_dotdotdot_rest_pat(lo) } else if let Some(form) = self.parse_range_end() { self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`. - } else if self.eat(exp!(Not)) { + } else if self.eat(exp!(Bang)) { // Parse `!` self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span); PatKind::Never @@ -794,7 +823,7 @@ impl<'a> Parser<'a> { }; let span = lo.to(self.prev_token.span); - if qself.is_none() && self.check(exp!(Not)) { + if qself.is_none() && self.check(exp!(Bang)) { self.parse_pat_mac_invoc(path)? } else if let Some(form) = self.parse_range_end() { let begin = self.mk_expr(span, ExprKind::Path(qself, path)); @@ -1043,10 +1072,8 @@ impl<'a> Parser<'a> { self.recover_additional_muts(); // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`. - if let token::Interpolated(nt) = &self.token.kind { - if let token::NtPat(..) = &**nt { - self.expected_ident_found_err().emit(); - } + if let Some(MetaVarKind::Pat(_)) = self.token.is_metavar_seq() { + self.expected_ident_found_err().emit(); } // Parse the pattern we hope to be an identifier. @@ -1232,7 +1259,7 @@ impl<'a> Parser<'a> { || self.look_ahead(dist, |t| { t.is_path_start() // e.g. `MY_CONST`; || *t == token::Dot // e.g. `.5` for recovery; - || matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus)) + || matches!(t.kind, token::Literal(..) | token::Minus) || t.is_bool_lit() || t.is_whole_expr() || t.is_lifetime() // recover `'a` instead of `'a'` @@ -1308,7 +1335,7 @@ impl<'a> Parser<'a> { | token::OpenDelim(Delimiter::Brace) // A struct pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::PathSep // A tuple / struct variant pattern. - | token::Not)) // A macro expanding to a pattern. + | token::Bang)) // A macro expanding to a pattern. } /// Parses `ident` or `ident @ pat`. diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index c24305ea9a8..9c6830c3672 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -15,9 +15,9 @@ use tracing::debug; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; -use crate::errors::{PathSingleColon, PathTripleColon}; +use crate::errors::{self, PathSingleColon, PathTripleColon}; +use crate::exp; use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma}; -use crate::{errors, exp, maybe_whole}; /// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] @@ -194,7 +194,11 @@ impl<'a> Parser<'a> { } }; - maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner())); + if let Some(path) = + self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type)) + { + return Ok(reject_generics_if_mod_style(self, path)); + } // If we have a `ty` metavar in the form of a path, reparse it directly as a path, instead // of reparsing it as a `ty` and then extracting the path. @@ -301,10 +305,7 @@ impl<'a> Parser<'a> { let is_args_start = |token: &Token| { matches!( token.kind, - token::Lt - | token::BinOp(token::Shl) - | token::OpenDelim(Delimiter::Parenthesis) - | token::LArrow + token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow ) }; let check_args_start = |this: &mut Self| { @@ -378,11 +379,14 @@ impl<'a> Parser<'a> { self.psess.gated_spans.gate(sym::return_type_notation, span); + let prev_lo = self.prev_token.span.shrink_to_hi(); if self.eat_noexpect(&token::RArrow) { let lo = self.prev_token.span; let ty = self.parse_ty()?; + let span = lo.to(ty.span); + let suggestion = prev_lo.to(ty.span); self.dcx() - .emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) }); + .emit_err(errors::BadReturnTypeNotationOutput { span, suggestion }); } P(ast::GenericArgs::ParenthesizedElided(span)) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index a2699b077fc..3a9dc5ce798 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -176,7 +176,7 @@ impl<'a> Parser<'a> { let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let path = this.parse_path(PathStyle::Expr)?; - if this.eat(exp!(Not)) { + if this.eat(exp!(Bang)) { let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?; return Ok(( stmt_mac, @@ -442,7 +442,16 @@ impl<'a> Parser<'a> { /// Parses the RHS of a local variable declaration (e.g., `= 14;`). fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> { let eq_consumed = match self.token.kind { - token::BinOpEq(..) => { + token::PlusEq + | token::MinusEq + | token::StarEq + | token::SlashEq + | token::PercentEq + | token::CaretEq + | token::AndEq + | token::OrEq + | token::ShlEq + | token::ShrEq => { // Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here // because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the // `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a @@ -688,7 +697,7 @@ impl<'a> Parser<'a> { if self.token == token::Eof { break; } - if self.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) { + if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) { // Account for `<<<<<<<` diff markers. We can't proactively error here because // that can be a valid path start, so we snapshot and reparse only we've // encountered another parse error. diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 8b8c81a77a0..471966d086d 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2291,7 +2291,7 @@ fn string_to_tts_macro() { Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. }, _, ), - TokenTree::Token(Token { kind: token::Not, .. }, _), + TokenTree::Token(Token { kind: token::Bang, .. }, _), TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Delimited(.., macro_delim, macro_tts), ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index 40631d9154d..110546d0ba6 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -25,7 +25,7 @@ pub enum TokenType { Gt, AndAnd, OrOr, - Not, + Bang, Tilde, // BinOps @@ -172,7 +172,7 @@ impl TokenType { Gt, AndAnd, OrOr, - Not, + Bang, Tilde, Plus, @@ -366,7 +366,7 @@ impl TokenType { TokenType::Gt => "`>`", TokenType::AndAnd => "`&&`", TokenType::OrOr => "`||`", - TokenType::Not => "`!`", + TokenType::Bang => "`!`", TokenType::Tilde => "`~`", TokenType::Plus => "`+`", @@ -445,12 +445,6 @@ macro_rules! exp { token_type: $crate::parser::token_type::TokenType::$tok } }; - (@binop, $op:ident) => { - $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op), - token_type: $crate::parser::token_type::TokenType::$op, - } - }; (@open, $delim:ident, $token_type:ident) => { $crate::parser::token_type::ExpTokenPair { tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim), @@ -485,8 +479,13 @@ macro_rules! exp { (Gt) => { exp!(@tok, Gt) }; (AndAnd) => { exp!(@tok, AndAnd) }; (OrOr) => { exp!(@tok, OrOr) }; - (Not) => { exp!(@tok, Not) }; + (Bang) => { exp!(@tok, Bang) }; (Tilde) => { exp!(@tok, Tilde) }; + (Plus) => { exp!(@tok, Plus) }; + (Minus) => { exp!(@tok, Minus) }; + (Star) => { exp!(@tok, Star) }; + (And) => { exp!(@tok, And) }; + (Or) => { exp!(@tok, Or) }; (At) => { exp!(@tok, At) }; (Dot) => { exp!(@tok, Dot) }; (DotDot) => { exp!(@tok, DotDot) }; @@ -502,12 +501,6 @@ macro_rules! exp { (Question) => { exp!(@tok, Question) }; (Eof) => { exp!(@tok, Eof) }; - (Plus) => { exp!(@binop, Plus) }; - (Minus) => { exp!(@binop, Minus) }; - (Star) => { exp!(@binop, Star) }; - (And) => { exp!(@binop, And) }; - (Or) => { exp!(@binop, Or) }; - (OpenParen) => { exp!(@open, Parenthesis, OpenParen) }; (OpenBrace) => { exp!(@open, Brace, OpenBrace) }; (OpenBracket) => { exp!(@open, Bracket, OpenBracket) }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 18af0a1c79a..b45ebae079c 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,5 +1,5 @@ use rustc_ast::ptr::P; -use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy, @@ -86,7 +86,7 @@ enum AllowCVariadic { /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. fn can_continue_type_after_non_fn_ident(t: &Token) -> bool { - t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl) + t == &token::PathSep || t == &token::Lt || t == &token::Shl } fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool { @@ -260,7 +260,7 @@ impl<'a> Parser<'a> { let mut impl_dyn_multi = false; let kind = if self.check(exp!(OpenParen)) { self.parse_ty_tuple_or_parens(lo, allow_plus)? - } else if self.eat(exp!(Not)) { + } else if self.eat(exp!(Bang)) { // Never type `!` TyKind::Never } else if self.eat(exp!(Star)) { @@ -399,7 +399,7 @@ impl<'a> Parser<'a> { let mut trailing_plus = false; let (ts, trailing) = self.parse_paren_comma_seq(|p| { let ty = p.parse_ty()?; - trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus); + trailing_plus = p.prev_token == TokenKind::Plus; Ok(ty) })?; @@ -735,7 +735,7 @@ impl<'a> Parser<'a> { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; - *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus); + *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus; Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)) } @@ -747,11 +747,7 @@ impl<'a> Parser<'a> { self.expect_lt()?; let (args, _, _) = self.parse_seq_to_before_tokens( &[exp!(Gt)], - &[ - &TokenKind::Ge, - &TokenKind::BinOp(BinOpToken::Shr), - &TokenKind::BinOpEq(BinOpToken::Shr), - ], + &[&TokenKind::Ge, &TokenKind::Shr, &TokenKind::Shr], SeqSep::trailing_allowed(exp!(Comma)), |self_| { if self_.check_keyword(exp!(SelfUpper)) { @@ -781,7 +777,7 @@ impl<'a> Parser<'a> { self.check_keyword(exp!(Dyn)) && (self.token.uninterpolated_span().at_least_rust_2018() || self.look_ahead(1, |t| { - (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star)) + (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star) && !can_continue_type_after_non_fn_ident(t) })) } @@ -803,7 +799,7 @@ impl<'a> Parser<'a> { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; - *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus); + *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus; Ok(TyKind::TraitObject(bounds, syntax)) } @@ -821,7 +817,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, TyKind> { // Simple path let path = self.parse_path_inner(PathStyle::Type, ty_generics)?; - if self.eat(exp!(Not)) { + if self.eat(exp!(Bang)) { // Macro invocation in type position Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? }))) } else if allow_plus == AllowPlus::Yes && self.check_plus() { @@ -874,7 +870,7 @@ impl<'a> Parser<'a> { fn can_begin_bound(&mut self) -> bool { self.check_path() || self.check_lifetime() - || self.check(exp!(Not)) + || self.check(exp!(Bang)) || self.check(exp!(Question)) || self.check(exp!(Tilde)) || self.check_keyword(exp!(For)) @@ -1025,7 +1021,7 @@ impl<'a> Parser<'a> { let polarity = if self.eat(exp!(Question)) { BoundPolarity::Maybe(self.prev_token.span) - } else if self.eat(exp!(Not)) { + } else if self.eat(exp!(Bang)) { self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span); BoundPolarity::Negative(self.prev_token.span) } else { |
