diff options
| author | Nicholas Nethercote <n.nethercote@gmail.com> | 2024-12-04 15:55:06 +1100 |
|---|---|---|
| committer | Nicholas Nethercote <n.nethercote@gmail.com> | 2024-12-19 16:05:41 +1100 |
| commit | b9bf0b4b10148aa914243a527d9010aba9b7b827 (patch) | |
| tree | 2478d44acd2d710543d168353b729e0716aefb70 /compiler/rustc_parse/src/parser/expr.rs | |
| parent | d5370d981f58ebadf575f075a6f0d8c35bc704e8 (diff) | |
| download | rust-b9bf0b4b10148aa914243a527d9010aba9b7b827.tar.gz rust-b9bf0b4b10148aa914243a527d9010aba9b7b827.zip | |
Speed up `Parser::expected_token_types`.
The parser pushes a `TokenType` to `Parser::expected_token_types` on every call to the various `check`/`eat` methods, and clears it on every call to `bump`. Some of those `TokenType` values are full tokens that require cloning and dropping. This is a *lot* of work for something that is only used in error messages and it accounts for a significant fraction of parsing execution time. This commit overhauls `TokenType` so that `Parser::expected_token_types` can be implemented as a bitset. This requires changing `TokenType` to a C-style parameterless enum, and adding `TokenTypeSet` which uses a `u128` for the bits. (The new `TokenType` has 105 variants.) The new types `ExpTokenPair` and `ExpKeywordPair` are now arguments to the `check`/`eat` methods. This is for maximum speed. The elements in the pairs are always statically known; e.g. a `token::BinOp(token::Star)` is always paired with a `TokenType::Star`. So we now compute `TokenType`s in advance and pass them in to `check`/`eat` rather than the current approach of constructing them on insertion into `expected_token_types`. Values of these pair types can be produced by the new `exp!` macro, which is used at every `check`/`eat` call site. The macro is for convenience, allowing any pair to be generated from a single identifier. The ident/keyword filtering in `expected_one_of_not_found` is no longer necessary. It was there to account for some sloppiness in `TokenKind`/`TokenType` comparisons. The existing `TokenType` is moved to a new file `token_type.rs`, and all its new infrastructure is added to that file. There is more boilerplate code than I would like, but I can't see how to make it shorter.
Diffstat (limited to 'compiler/rustc_parse/src/parser/expr.rs')
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 308 |
1 files changed, 151 insertions, 157 deletions
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 1e84b2a0cf8..2f4adf2af9e 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -35,10 +35,10 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, + AttrWrapper, BlockMode, ClosureSpans, ExpTokenPair, ForceCollect, Parser, PathStyle, + Restrictions, SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, }; -use crate::{errors, maybe_recover_from_interpolated_ty_qpath}; +use crate::{errors, exp, maybe_recover_from_interpolated_ty_qpath}; #[derive(Debug)] pub(super) enum DestructuredFloat { @@ -153,7 +153,7 @@ impl<'a> Parser<'a> { return Ok((lhs, parsed_something)); } - self.expected_token_types.push(TokenType::Operator); + self.expected_token_types.insert(TokenType::Operator); while let Some(op) = self.check_assoc_op() { let lhs_span = self.interpolated_or_expr_span(&lhs); let cur_op_span = self.token.span; @@ -873,9 +873,9 @@ impl<'a> Parser<'a> { /// Parse `mut?` or `raw [ const | mut ]`. fn parse_borrow_modifiers(&mut self) -> (ast::BorrowKind, ast::Mutability) { - if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) { + if self.check_keyword(exp!(Raw)) && self.look_ahead(1, Token::is_mutability) { // `raw [ const | mut ]`. - let found_raw = self.eat_keyword(kw::Raw); + let found_raw = self.eat_keyword(exp!(Raw)); assert!(found_raw); let mutability = self.parse_const_or_mut().unwrap(); (ast::BorrowKind::Raw, mutability) @@ -908,7 +908,7 @@ impl<'a> Parser<'a> { // a `return` which could be suggested otherwise. self.eat_noexpect(&token::Question) } else { - self.eat(&token::Question) + self.eat(exp!(Question)) }; if has_question { // `expr?` @@ -926,7 +926,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(errors::ExprRArrowCall { span }); true } else { - self.eat(&token::Dot) + self.eat(exp!(Dot)) }; if has_dot { // expr.f @@ -1251,7 +1251,7 @@ impl<'a> Parser<'a> { .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args))); match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) { Ok(expr) => expr, - Err(err) => self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err), + Err(err) => self.recover_seq_parse_error(exp!(OpenParen), exp!(CloseParen), lo, err), } } @@ -1268,10 +1268,8 @@ impl<'a> Parser<'a> { match (self.may_recover(), seq, snapshot) { (true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { snapshot.bump(); // `(` - match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) { - Ok((fields, ..)) - if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) => - { + match snapshot.parse_struct_fields(path.clone(), false, exp!(CloseParen)) { + Ok((fields, ..)) if snapshot.eat(exp!(CloseParen)) => { // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. self.restore_snapshot(snapshot); @@ -1328,7 +1326,7 @@ impl<'a> Parser<'a> { self.bump(); // `[` let index = self.parse_expr()?; self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?; - self.expect(&token::CloseDelim(Delimiter::Bracket))?; + self.expect(exp!(CloseBracket))?; Ok(self.mk_expr( lo.to(self.prev_token.span), self.mk_index(base, index, open_delim_span.to(self.prev_token.span)), @@ -1337,12 +1335,12 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) { + if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { return Ok(self.mk_await_expr(self_arg, lo)); } // Post-fix match - if self.eat_keyword(kw::Match) { + if self.eat_keyword(exp!(Match)) { let match_span = self.prev_token.span; self.psess.gated_spans.gate(sym::postfix_match, match_span); return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix); @@ -1350,10 +1348,10 @@ impl<'a> Parser<'a> { let fn_span_lo = self.token.span; let mut seg = self.parse_path_segment(PathStyle::Expr, None)?; - self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]); + self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]); self.check_turbofish_missing_angle_brackets(&mut seg); - if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { + if self.check(exp!(OpenParen)) { // Method call `expr.f()` let args = self.parse_expr_paren_seq()?; let fn_span = fn_span_lo.to(self.prev_token.span); @@ -1415,18 +1413,18 @@ impl<'a> Parser<'a> { let restrictions = self.restrictions; self.with_res(restrictions - Restrictions::ALLOW_LET, |this| { - // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. + // Note: adding new syntax here? Don't forget to adjust `TokenKind::can_begin_expr()`. let lo = this.token.span; if let token::Literal(_) = this.token.kind { // This match arm is a special-case of the `_` match arm below and // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. this.parse_expr_lit() - } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { + } else if this.check(exp!(OpenParen)) { this.parse_expr_tuple_parens(restrictions) - } else if this.check(&token::OpenDelim(Delimiter::Brace)) { + } else if this.check(exp!(OpenBrace)) { this.parse_expr_block(None, lo, BlockCheckMode::Default) - } else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) { + } else if this.check(exp!(Or)) || this.check(exp!(OrOr)) { this.parse_expr_closure().map_err(|mut err| { // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }` // then suggest parens around the lhs. @@ -1435,41 +1433,41 @@ impl<'a> Parser<'a> { } err }) - } else if this.check(&token::OpenDelim(Delimiter::Bracket)) { - this.parse_expr_array_or_repeat(Delimiter::Bracket) + } else if this.check(exp!(OpenBracket)) { + this.parse_expr_array_or_repeat(exp!(CloseBracket)) } else if this.is_builtin() { this.parse_expr_builtin() } else if this.check_path() { this.parse_expr_path_start() - } else if this.check_keyword(kw::Move) - || this.check_keyword(kw::Static) + } else if this.check_keyword(exp!(Move)) + || this.check_keyword(exp!(Static)) || this.check_const_closure() { this.parse_expr_closure() - } else if this.eat_keyword(kw::If) { + } else if this.eat_keyword(exp!(If)) { this.parse_expr_if() - } else if this.check_keyword(kw::For) { + } else if this.check_keyword(exp!(For)) { if this.choose_generics_over_qpath(1) { this.parse_expr_closure() } else { - assert!(this.eat_keyword(kw::For)); + assert!(this.eat_keyword(exp!(For))); this.parse_expr_for(None, lo) } - } else if this.eat_keyword(kw::While) { + } else if this.eat_keyword(exp!(While)) { this.parse_expr_while(None, lo) } else if let Some(label) = this.eat_label() { this.parse_expr_labeled(label, true) - } else if this.eat_keyword(kw::Loop) { + } else if this.eat_keyword(exp!(Loop)) { this.parse_expr_loop(None, lo).map_err(|mut err| { err.span_label(lo, "while parsing this `loop` expression"); err }) - } else if this.eat_keyword(kw::Match) { + } else if this.eat_keyword(exp!(Match)) { this.parse_expr_match().map_err(|mut err| { err.span_label(lo, "while parsing this `match` expression"); err }) - } else if this.eat_keyword(kw::Unsafe) { + } else if this.eat_keyword(exp!(Unsafe)) { this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err( |mut err| { err.span_label(lo, "while parsing this `unsafe` expression"); @@ -1481,23 +1479,23 @@ impl<'a> Parser<'a> { } else if this.may_recover() && this.is_do_catch_block() { this.recover_do_catch() } else if this.is_try_block() { - this.expect_keyword(kw::Try)?; + this.expect_keyword(exp!(Try))?; this.parse_try_block(lo) - } else if this.eat_keyword(kw::Return) { + } else if this.eat_keyword(exp!(Return)) { this.parse_expr_return() - } else if this.eat_keyword(kw::Continue) { + } else if this.eat_keyword(exp!(Continue)) { this.parse_expr_continue(lo) - } else if this.eat_keyword(kw::Break) { + } else if this.eat_keyword(exp!(Break)) { this.parse_expr_break() - } else if this.eat_keyword(kw::Yield) { + } else if this.eat_keyword(exp!(Yield)) { this.parse_expr_yield() } else if this.is_do_yeet() { this.parse_expr_yeet() - } else if this.eat_keyword(kw::Become) { + } else if this.eat_keyword(exp!(Become)) { this.parse_expr_become() - } else if this.check_keyword(kw::Let) { + } else if this.check_keyword(exp!(Let)) { this.parse_expr_let(restrictions) - } else if this.eat_keyword(kw::Underscore) { + } else if this.eat_keyword(exp!(Underscore)) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) } else if this.token.uninterpolated_span().at_least_rust_2018() { // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. @@ -1505,11 +1503,11 @@ impl<'a> Parser<'a> { // check for `gen {}` and `gen move {}` // or `async gen {}` and `async gen move {}` && (this.is_gen_block(kw::Gen, 0) - || (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1))) + || (this.check_keyword(exp!(Async)) && this.is_gen_block(kw::Gen, 1))) { // FIXME: (async) gen closures aren't yet parsed. this.parse_gen_block() - } else if this.check_keyword(kw::Async) { + } else if this.check_keyword(exp!(Async)) { // FIXME(gen_blocks): Parse `gen async` and suggest swap if this.is_gen_block(kw::Async, 0) { // Check for `async {` and `async move {`, @@ -1541,15 +1539,20 @@ impl<'a> Parser<'a> { fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> { let lo = self.token.span; - self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(OpenParen))?; let (es, trailing_comma) = match self.parse_seq_to_end( - &token::CloseDelim(Delimiter::Parenthesis), - SeqSep::trailing_allowed(token::Comma), + exp!(CloseParen), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)), ) { Ok(x) => x, Err(err) => { - return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); + return Ok(self.recover_seq_parse_error( + exp!(OpenParen), + exp!(CloseParen), + lo, + err, + )); } }; let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { @@ -1563,25 +1566,24 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr) } - fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> { + fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, P<Expr>> { let lo = self.token.span; self.bump(); // `[` or other open delim - let close = &token::CloseDelim(close_delim); let kind = if self.eat(close) { // Empty vector ExprKind::Array(ThinVec::new()) } else { // Non-empty vector let first_expr = self.parse_expr()?; - if self.eat(&token::Semi) { + if self.eat(exp!(Semi)) { // Repeating array syntax: `[ 0; 512 ]` let count = self.parse_expr_anon_const()?; self.expect(close)?; ExprKind::Repeat(first_expr, count) - } else if self.eat(&token::Comma) { + } else if self.eat(exp!(Comma)) { // Vector with two or more elements. - let sep = SeqSep::trailing_allowed(token::Comma); + let sep = SeqSep::trailing_allowed(exp!(Comma)); let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?; exprs.insert(0, first_expr); ExprKind::Array(exprs) @@ -1615,7 +1617,7 @@ impl<'a> Parser<'a> { }; // `!`, as an operator, is prefix, so we know this isn't that. - let (span, kind) = if self.eat(&token::Not) { + let (span, kind) = if self.eat(exp!(Not)) { // MACRO INVOCATION expression if qself.is_some() { self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span)); @@ -1623,7 +1625,7 @@ impl<'a> Parser<'a> { let lo = path.span; let mac = P(MacCall { path, args: self.parse_delim_args()? }); (lo.to(self.prev_token.span), ExprKind::MacCall(mac)) - } else if self.check(&token::OpenDelim(Delimiter::Brace)) + } else if self.check(exp!(OpenBrace)) && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path) { if qself.is_some() { @@ -1646,13 +1648,13 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P<Expr>> { let lo = label_.ident.span; let label = Some(label_); - let ate_colon = self.eat(&token::Colon); + let ate_colon = self.eat(exp!(Colon)); let tok_sp = self.token.span; - let expr = if self.eat_keyword(kw::While) { + let expr = if self.eat_keyword(exp!(While)) { self.parse_expr_while(label, lo) - } else if self.eat_keyword(kw::For) { + } else if self.eat_keyword(exp!(For)) { self.parse_expr_for(label, lo) - } else if self.eat_keyword(kw::Loop) { + } else if self.eat_keyword(exp!(Loop)) { self.parse_expr_loop(label, lo) } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() @@ -1958,7 +1960,7 @@ impl<'a> Parser<'a> { self.psess.gated_spans.gate(sym::builtin_syntax, ident.span); self.bump(); - self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?; + self.expect(exp!(OpenParen))?; let ret = if let Some(res) = parse(self, lo, ident)? { Ok(res) } else { @@ -1968,7 +1970,7 @@ impl<'a> Parser<'a> { }); return Err(err); }; - self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?; + self.expect(exp!(CloseParen))?; ret } @@ -1976,14 +1978,12 @@ impl<'a> Parser<'a> { /// Built-in macro for `offset_of!` expressions. pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> { let container = self.parse_ty()?; - self.expect(&TokenKind::Comma)?; + self.expect(exp!(Comma))?; let fields = self.parse_floating_field_access()?; let trailing_comma = self.eat_noexpect(&TokenKind::Comma); - if let Err(mut e) = - self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)]) - { + if let Err(mut e) = self.expect_one_of(&[], &[exp!(CloseParen)]) { if trailing_comma { e.note("unexpected third argument to offset_of"); } else { @@ -2006,7 +2006,7 @@ impl<'a> Parser<'a> { /// Built-in macro for type ascription expressions. pub(crate) fn parse_expr_type_ascribe(&mut self, lo: Span) -> PResult<'a, P<Expr>> { let expr = self.parse_expr()?; - self.expect(&token::Comma)?; + self.expect(exp!(Comma))?; let ty = self.parse_ty()?; let span = lo.to(self.token.span); Ok(self.mk_expr(span, ExprKind::Type(expr, ty))) @@ -2018,7 +2018,7 @@ impl<'a> Parser<'a> { kind: UnsafeBinderCastKind, ) -> PResult<'a, P<Expr>> { let expr = self.parse_expr()?; - let ty = if self.eat(&TokenKind::Comma) { Some(self.parse_ty()?) } else { None }; + let ty = if self.eat(exp!(Comma)) { Some(self.parse_ty()?) } else { None }; let span = lo.to(self.token.span); Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty))) } @@ -2214,7 +2214,7 @@ impl<'a> Parser<'a> { } let lo = self.token.span; - let minus_present = self.eat(&token::BinOp(token::Minus)); + let minus_present = self.eat(exp!(Minus)); let (token_lit, span) = self.parse_token_lit()?; let expr = self.mk_expr(span, ExprKind::Lit(token_lit)); @@ -2236,7 +2236,7 @@ impl<'a> Parser<'a> { /// expression. fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> { let mut snapshot = self.create_snapshot_for_diagnostic(); - match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) { + match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) { Ok(arr) => { let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces { span: arr.span, @@ -2272,8 +2272,8 @@ impl<'a> Parser<'a> { let mut snapshot = self.create_snapshot_for_diagnostic(); snapshot.bump(); match snapshot.parse_seq_to_before_end( - &token::CloseDelim(Delimiter::Bracket), - SeqSep::trailing_allowed(token::Comma), + exp!(CloseBracket), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_expr(), ) { Ok(_) @@ -2337,7 +2337,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let before = self.prev_token.clone(); - let binder = if self.check_keyword(kw::For) { + let binder = if self.check_keyword(exp!(For)) { let lo = self.token.span; let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?; let span = lo.to(self.prev_token.span); @@ -2352,7 +2352,7 @@ impl<'a> Parser<'a> { let constness = self.parse_closure_constness(); let movability = - if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable }; + if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) @@ -2433,10 +2433,10 @@ impl<'a> Parser<'a> { /// Parses an optional `move` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { - if self.eat_keyword(kw::Move) { + if self.eat_keyword(exp!(Move)) { let move_kw_span = self.prev_token.span; // Check for `move async` and recover - if self.check_keyword(kw::Async) { + if self.check_keyword(exp!(Async)) { let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo); Err(self .dcx() @@ -2453,15 +2453,15 @@ impl<'a> Parser<'a> { fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> { let arg_start = self.token.span.lo(); - let inputs = if self.eat(&token::OrOr) { + let inputs = if self.eat(exp!(OrOr)) { ThinVec::new() } else { - self.expect(&token::BinOp(token::Or))?; + self.expect(exp!(Or))?; let args = self .parse_seq_to_before_tokens( - &[&token::BinOp(token::Or)], + &[exp!(Or)], &[&token::OrOr], - SeqSep::trailing_allowed(token::Comma), + SeqSep::trailing_allowed(exp!(Comma)), |p| p.parse_fn_block_param(), )? .0; @@ -2481,7 +2481,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_outer_attributes()?; self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?; - let ty = if this.eat(&token::Colon) { + let ty = if this.eat(exp!(Colon)) { this.parse_ty()? } else { this.mk_ty(pat.span, TyKind::Infer) @@ -2566,7 +2566,7 @@ impl<'a> Parser<'a> { } else { let attrs = self.parse_outer_attributes()?; // For recovery. let maybe_fatarrow = self.token.clone(); - let block = if self.check(&token::OpenDelim(Delimiter::Brace)) { + let block = if self.check(exp!(OpenBrace)) { self.parse_block()? } else if let Some(block) = recover_block_from_condition(self) { block @@ -2609,7 +2609,7 @@ impl<'a> Parser<'a> { self.error_on_if_block_attrs(lo, false, block.span, attrs); block }; - let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None }; + let els = if self.eat_keyword(exp!(Else)) { Some(self.parse_expr_else()?) } else { None }; Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els))) } @@ -2662,7 +2662,7 @@ impl<'a> Parser<'a> { }); self.bump(); } else { - self.expect(&token::Eq)?; + self.expect(exp!(Eq))?; } let attrs = self.parse_outer_attributes()?; let (expr, _) = @@ -2675,9 +2675,9 @@ impl<'a> Parser<'a> { fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> { let else_span = self.prev_token.span; // `else` let attrs = self.parse_outer_attributes()?; // For recovery. - let expr = if self.eat_keyword(kw::If) { + let expr = if self.eat_keyword(exp!(If)) { ensure_sufficient_stack(|| self.parse_expr_if())? - } else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) { + } else if self.check(exp!(OpenBrace)) { self.parse_simple_block()? } else { let snapshot = self.create_snapshot_for_diagnostic(); @@ -2719,7 +2719,7 @@ impl<'a> Parser<'a> { // while true {} // } // ^ - if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) + if self.check(exp!(OpenBrace)) && (classify::expr_requires_semi_to_be_stmt(&cond) || matches!(cond.kind, ExprKind::MacCall(..))) => @@ -2805,7 +2805,7 @@ impl<'a> Parser<'a> { begin_paren, ) { (Ok(pat), _) => pat, // Happy path. - (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => { + (Err(err), Some((start_span, left))) if self.eat_keyword(exp!(In)) => { // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would // happen right before the return of this method. let attrs = self.parse_outer_attributes()?; @@ -2839,7 +2839,7 @@ impl<'a> Parser<'a> { } (Err(err), _) => return Err(err), // Some other error, bubble up. }; - if !self.eat_keyword(kw::In) { + if !self.eat_keyword(exp!(In)) { self.error_missing_in_for_loop(); } self.check_for_for_in_in_typo(self.prev_token.span); @@ -2851,7 +2851,7 @@ impl<'a> Parser<'a> { /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten). fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> { let is_await = - self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await); + self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)); if is_await { self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span); @@ -2981,7 +2981,7 @@ impl<'a> Parser<'a> { scrutinee: P<Expr>, match_kind: MatchKind, ) -> PResult<'a, P<Expr>> { - if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) { + if let Err(mut e) = self.expect(exp!(OpenBrace)) { if self.token == token::Semi { e.span_suggestion_short( match_span, @@ -3121,7 +3121,7 @@ impl<'a> Parser<'a> { let span_before_body = this.prev_token.span; let arm_body; - let is_fat_arrow = this.check(&token::FatArrow); + let is_fat_arrow = this.check(exp!(FatArrow)); let is_almost_fat_arrow = TokenKind::FatArrow .similar_tokens() .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind)); @@ -3134,17 +3134,15 @@ impl<'a> Parser<'a> { let mut result = if armless { // A pattern without a body, allowed for never patterns. arm_body = None; - this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map( - |x| { - // Don't gate twice - if !pat.contains_never_pattern() { - this.psess.gated_spans.gate(sym::never_patterns, pat.span); - } - x - }, - ) + this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| { + // Don't gate twice + if !pat.contains_never_pattern() { + this.psess.gated_spans.gate(sym::never_patterns, pat.span); + } + x + }) } else { - if let Err(mut err) = this.expect(&token::FatArrow) { + if let Err(mut err) = this.expect(exp!(FatArrow)) { // We might have a `=>` -> `=` or `->` typo (issue #89396). if is_almost_fat_arrow { err.span_suggestion( @@ -3184,7 +3182,7 @@ impl<'a> Parser<'a> { if !require_comma { arm_body = Some(expr); // Eat a comma if it exists, though. - let _ = this.eat(&token::Comma); + let _ = this.eat(exp!(Comma)); Ok(Recovered::No) } else if let Some((span, guar)) = this.parse_arm_body_missing_braces(&expr, arrow_span) @@ -3195,42 +3193,40 @@ impl<'a> Parser<'a> { } else { let expr_span = expr.span; arm_body = Some(expr); - this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]) - .map_err(|mut err| { - if this.token == token::FatArrow { - let sm = this.psess.source_map(); - if let Ok(expr_lines) = sm.span_to_lines(expr_span) - && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span) - && arm_start_lines.lines[0].end_col - == expr_lines.lines[0].end_col - && expr_lines.lines.len() == 2 - { - // We check whether there's any trailing code in the parse span, - // if there isn't, we very likely have the following: - // - // X | &Y => "y" - // | -- - missing comma - // | | - // | arrow_span - // X | &X => "x" - // | - ^^ self.token.span - // | | - // | parsed until here as `"y" & X` - err.span_suggestion_short( - arm_start_span.shrink_to_hi(), - "missing a comma here to end this `match` arm", - ",", - Applicability::MachineApplicable, - ); - } - } else { - err.span_label( - arrow_span, - "while parsing the `match` arm starting here", + this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map_err(|mut err| { + if this.token == token::FatArrow { + let sm = this.psess.source_map(); + if let Ok(expr_lines) = sm.span_to_lines(expr_span) + && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span) + && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col + && expr_lines.lines.len() == 2 + { + // We check whether there's any trailing code in the parse span, + // if there isn't, we very likely have the following: + // + // X | &Y => "y" + // | -- - missing comma + // | | + // | arrow_span + // X | &X => "x" + // | - ^^ self.token.span + // | | + // | parsed until here as `"y" & X` + err.span_suggestion_short( + arm_start_span.shrink_to_hi(), + "missing a comma here to end this `match` arm", + ",", + Applicability::MachineApplicable, ); } - err - }) + } else { + err.span_label( + arrow_span, + "while parsing the `match` arm starting here", + ); + } + err + }) } }; @@ -3267,7 +3263,7 @@ impl<'a> Parser<'a> { ) .map_err(|err| err.cancel()) .is_ok(); - if pattern_follows && snapshot.check(&TokenKind::FatArrow) { + if pattern_follows && snapshot.check(exp!(FatArrow)) { err.cancel(); let guar = this.dcx().emit_err(errors::MissingCommaAfterMatchArm { span: arm_span.shrink_to_hi(), @@ -3309,7 +3305,7 @@ impl<'a> Parser<'a> { _ => (false, true), } } - if !self.eat_keyword(kw::If) { + if !self.eat_keyword(exp!(If)) { // No match arm guard present. return Ok(None); } @@ -3384,7 +3380,7 @@ impl<'a> Parser<'a> { // errors. self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); let msg = "you might have meant to start a match arm after the match guard"; - if self.eat(&token::CloseDelim(Delimiter::Brace)) { + if self.eat(exp!(CloseBrace)) { let applicability = if self.token != token::FatArrow { // We have high confidence that we indeed didn't have a struct // literal in the match guard, but rather we had some operation @@ -3409,7 +3405,7 @@ impl<'a> Parser<'a> { /// Parses a `try {...}` expression (`try` token already eaten). fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> { let (attrs, body) = self.parse_inner_attrs_and_block()?; - if self.eat_keyword(kw::Catch) { + if self.eat_keyword(exp!(Catch)) { Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span })) } else { let span = span_lo.to(body.span); @@ -3440,10 +3436,10 @@ impl<'a> Parser<'a> { /// Parses an `async move? {...}` or `gen move? {...}` expression. fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> { let lo = self.token.span; - let kind = if self.eat_keyword(kw::Async) { - if self.eat_keyword(kw::Gen) { GenBlockKind::AsyncGen } else { GenBlockKind::Async } + let kind = if self.eat_keyword(exp!(Async)) { + if self.eat_keyword(exp!(Gen)) { GenBlockKind::AsyncGen } else { GenBlockKind::Async } } else { - assert!(self.eat_keyword(kw::Gen)); + assert!(self.eat_keyword(exp!(Gen))); GenBlockKind::Gen }; match kind { @@ -3504,7 +3500,7 @@ impl<'a> Parser<'a> { ) -> Option<PResult<'a, P<Expr>>> { let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); if struct_allowed || self.is_certainly_not_a_block() { - if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) { + if let Err(err) = self.expect(exp!(OpenBrace)) { return Some(Err(err)); } let expr = self.parse_expr_struct(qself.clone(), path.clone(), true); @@ -3527,7 +3523,7 @@ impl<'a> Parser<'a> { &mut self, pth: ast::Path, recover: bool, - close_delim: Delimiter, + close: ExpTokenPair<'_>, ) -> PResult< 'a, ( @@ -3546,11 +3542,11 @@ impl<'a> Parser<'a> { errors::HelpUseLatestEdition::new().add_to_diag(e); }; - while self.token != token::CloseDelim(close_delim) { - if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) { + while self.token != *close.tok { + if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) { let exp_span = self.prev_token.span; // We permit `.. }` on the left-hand side of a destructuring assignment. - if self.check(&token::CloseDelim(close_delim)) { + if self.check(close) { base = ast::StructRest::Rest(self.prev_token.span); break; } @@ -3625,7 +3621,7 @@ impl<'a> Parser<'a> { Applicability::MaybeIncorrect, ); } - if in_if_guard && close_delim == Delimiter::Brace { + if in_if_guard && close.token_type == TokenType::CloseBrace { return Err(e); } @@ -3655,9 +3651,9 @@ impl<'a> Parser<'a> { let is_shorthand = parsed_field.as_ref().is_ok_and(|f| f.is_shorthand); // A shorthand field can be turned into a full field with `:`. // We should point this out. - self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon)); + self.check_or_expected(!is_shorthand, TokenType::Colon); - match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) { + match self.expect_one_of(&[exp!(Comma)], &[close]) { Ok(_) => { if let Ok(f) = parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar)) { @@ -3689,7 +3685,7 @@ impl<'a> Parser<'a> { fields.push(f); } self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); - let _ = self.eat(&token::Comma); + let _ = self.eat(exp!(Comma)); } } } @@ -3705,9 +3701,9 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P<Expr>> { let lo = pth.span; let (fields, base, recovered_async) = - self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?; + self.parse_struct_fields(pth.clone(), recover, exp!(CloseBrace))?; let span = lo.to(self.token.span); - self.expect(&token::CloseDelim(Delimiter::Brace))?; + self.expect(exp!(CloseBrace))?; let expr = if let Some(guar) = recovered_async { ExprKind::Err(guar) } else { @@ -3727,10 +3723,8 @@ impl<'a> Parser<'a> { self.recover_stmt(); } - fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool { - if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim)) - && self.eat(&token::DotDotDot) - { + fn recover_struct_field_dots(&mut self, close: &TokenKind) -> bool { + if !self.look_ahead(1, |t| t == close) && self.eat(exp!(DotDotDot)) { // recover from typo of `...`, suggest `..` let span = self.prev_token.span; self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span }); |
