diff options
| author | bors <bors@rust-lang.org> | 2024-02-25 16:18:45 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2024-02-25 16:18:45 +0000 |
| commit | 8c0b1fcd2914caaf1c3a1071028fb74b70c519e9 (patch) | |
| tree | 2f87a275d744713395351771d943ebea39482a6a /compiler/rustc_parse/src | |
| parent | 34aab623ddccd54636a9f6e630cb29af443c4680 (diff) | |
| parent | a4423884c15acbe9d3027ddd9d008f79d583afeb (diff) | |
| download | rust-8c0b1fcd2914caaf1c3a1071028fb74b70c519e9.tar.gz rust-8c0b1fcd2914caaf1c3a1071028fb74b70c519e9.zip | |
Auto merge of #121591 - matthiaskrgr:rollup-8wfhh3v, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #119590 (Stabilize `cfg_target_abi`) - #120805 (make non-PartialEq-typed consts as patterns a hard error) - #121060 (Add newtypes for bool fields/params/return types) - #121284 (Add test cases for inlining compiler-private items) - #121324 (pattern_analysis: factor out unspecialization) - #121409 (Prevent cycle in implied predicates computation) - #121513 (Fix sgx unit test compilation) - #121570 (Make most bootstrap step types !Copy) - #121586 (Don't use `unwrap()` in `ArrayIntoIter` lint when typeck fails) r? `@ghost` `@rustbot` modify labels: rollup
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/lexer/mod.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/unicode_chars.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 56 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 50 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 36 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 64 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 3 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 4 |
11 files changed, 141 insertions, 110 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index c768ea93b5f..dc9f5bad765 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -4,7 +4,7 @@ use crate::errors; use crate::lexer::unicode_chars::UNICODE_ARRAY; use crate::make_unclosed_delims_error; use rustc_ast::ast::{self, AttrStyle}; -use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey}; @@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym }); } self.sess.raw_identifier_spans.push(span); - token::Ident(sym, true) + token::Ident(sym, IdentIsRaw::Yes) } rustc_lexer::TokenKind::UnknownPrefix => { self.report_unknown_prefix(start); @@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let span = self.mk_sp(start, self.pos); self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() .push(span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } // split up (raw) c string literals to an ident and a string literal when edition < 2021. rustc_lexer::TokenKind::Literal { @@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); self.sess.symbol_gallery.insert(sym, span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index a136abaa28b..3b4e05332fa 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[ // fancier error recovery to it, as there will be less overall work to do this way. const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[ (" ", "Space", None), - ("_", "Underscore", Some(token::Ident(kw::Underscore, false))), + ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))), ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))), (",", "Comma", Some(token::Comma)), (";", "Semicolon", Some(token::Semi)), diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 0cc2170714c..517e3d82787 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -21,6 +21,8 @@ use crate::errors::{ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; +use ast::token::IdentIsRaw; +use parser::Recovered; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -264,7 +266,7 @@ impl<'a> Parser<'a> { pub(super) fn expected_ident_found( &mut self, recover: bool, - ) -> PResult<'a, (Ident, /* is_raw */ bool)> { + ) -> PResult<'a, (Ident, IdentIsRaw)> { if let TokenKind::DocComment(..) = self.prev_token.kind { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { span: self.prev_token.span, @@ -290,11 +292,11 @@ impl<'a> Parser<'a> { let bad_token = self.token.clone(); // suggest prepending a keyword in identifier position with `r#` - let suggest_raw = if let Some((ident, false)) = self.token.ident() + let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { - recovered_ident = Some((ident, true)); + recovered_ident = Some((ident, IdentIsRaw::Yes)); // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`, // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` @@ -320,7 +322,7 @@ impl<'a> Parser<'a> { let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| { let (invalid, valid) = self.token.span.split_at(len as u32); - recovered_ident = Some((Ident::new(valid_portion, valid), false)); + recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No)); HelpIdentifierStartsWithNumber { num_span: invalid } }); @@ -429,7 +431,7 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -532,7 +534,7 @@ impl<'a> Parser<'a> { sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); - return Ok(true); + return Ok(Recovered::Yes); } else if self.look_ahead(0, |t| { t == &token::CloseDelim(Delimiter::Brace) || ((t.can_begin_expr() || t.can_begin_item()) @@ -556,7 +558,7 @@ impl<'a> Parser<'a> { unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); - return Ok(true); + return Ok(Recovered::Yes); } } @@ -653,9 +655,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound @@ -711,7 +713,7 @@ impl<'a> Parser<'a> { if self.check_too_many_raw_str_terminators(&mut err) { if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { err.emit(); - return Ok(true); + return Ok(Recovered::Yes); } else { return Err(err); } @@ -1223,7 +1225,7 @@ impl<'a> Parser<'a> { |p| p.parse_generic_arg(None), ); match x { - Ok((_, _, false)) => { + Ok((_, _, Recovered::No)) => { if self.eat(&token::Gt) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( @@ -1247,7 +1249,7 @@ impl<'a> Parser<'a> { } } } - Ok((_, _, true)) => {} + Ok((_, _, Recovered::Yes)) => {} Err(err) => { err.cancel(); } @@ -1286,7 +1288,7 @@ impl<'a> Parser<'a> { err: &mut ComparisonOperatorsCannotBeChained, inner_op: &Expr, outer_op: &Spanned<AssocOp>, - ) -> bool /* advanced the cursor */ { + ) -> Recovered { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind && ident.as_str().parse::<i32>().is_err() @@ -1294,7 +1296,7 @@ impl<'a> Parser<'a> { { // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish // suggestion being the only one to apply is high. - return false; + return Recovered::No; } return match (op.node, &outer_op.node) { // `x == y == z` @@ -1313,7 +1315,7 @@ impl<'a> Parser<'a> { span: inner_op.span.shrink_to_hi(), middle_term: expr_to_str(r1), }); - false // Keep the current parse behavior, where the AST is `(x < y) < z`. + Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`. } // `x == y < z` (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => { @@ -1327,12 +1329,12 @@ impl<'a> Parser<'a> { left: r1.span.shrink_to_lo(), right: r2.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::Yes } } } @@ -1347,19 +1349,19 @@ impl<'a> Parser<'a> { left: l1.span.shrink_to_lo(), right: r1.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::No } } } - _ => false, + _ => Recovered::No, }; } - false + Recovered::No } /// Produces an error if comparison operators are chained (RFC #558). @@ -1487,8 +1489,9 @@ impl<'a> Parser<'a> { // If it looks like a genuine attempt to chain operators (as opposed to a // misformatted turbofish, for instance), suggest a correct form. - if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) - { + let recovered = self + .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + if matches!(recovered, Recovered::Yes) { self.dcx().emit_err(err); mk_err_expr(self, inner_op.span.to(self.prev_token.span)) } else { @@ -1500,7 +1503,7 @@ impl<'a> Parser<'a> { let recover = self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); self.dcx().emit_err(err); - if recover { + if matches!(recover, Recovered::Yes) { return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); } } @@ -1840,10 +1843,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { + pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { let token_str = pprust::token_kind_to_string(t); let this_token_str = super::token_descr(&self.token); let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 8826c06bebd..1ad637451b1 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -3,13 +3,14 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, + AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, + SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken, }; use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; @@ -126,7 +127,7 @@ impl<'a> Parser<'a> { match self.parse_expr_res(restrictions, None) { Ok(expr) => Ok(expr), Err(err) => match self.token.ident() { - Some((Ident { name: kw::Underscore, .. }, false)) + Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` @@ -457,7 +458,9 @@ impl<'a> Parser<'a> { return None; } (Some(op), _) => (op, self.token.span), - (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No))) + if self.may_recover() => + { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), @@ -465,7 +468,7 @@ impl<'a> Parser<'a> { }); (AssocOp::LAnd, span) } - (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), @@ -742,7 +745,7 @@ impl<'a> Parser<'a> { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), - token::Ident(kw::For | kw::Loop | kw::While, false), + token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { @@ -955,19 +958,20 @@ impl<'a> Parser<'a> { fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { loop { - let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { - // we are using noexpect here because we don't expect a `?` directly after a `return` - // which could be suggested otherwise - self.eat_noexpect(&token::Question) - } else { - self.eat(&token::Question) - }; + let has_question = + if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + // we are using noexpect here because we don't expect a `?` directly after a `return` + // which could be suggested otherwise + self.eat_noexpect(&token::Question) + } else { + self.eat(&token::Question) + }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } - let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { + let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) @@ -1126,19 +1130,19 @@ impl<'a> Parser<'a> { // 1. DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => { assert!(suffix.is_none()); - self.token = Token::new(token::Ident(sym, false), ident_span); + self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token)) } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => { - self.token = Token::new(token::Ident(symbol1, false), ident1_span); + self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); - let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); + let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } @@ -1555,7 +1559,7 @@ impl<'a> Parser<'a> { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); } }; - let kind = if es.len() == 1 && !trailing_comma { + let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { @@ -1946,7 +1950,7 @@ impl<'a> Parser<'a> { self.bump(); // `builtin` self.bump(); // `#` - let Some((ident, false)) = self.token.ident() else { + let Some((ident, IdentIsRaw::No)) = self.token.ident() else { let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span }); return Err(err); }; @@ -3087,10 +3091,10 @@ impl<'a> Parser<'a> { if !require_comma { arm_body = Some(expr); this.eat(&token::Comma); - Ok(false) + Ok(Recovered::No) } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) { arm_body = Some(body); - Ok(true) + Ok(Recovered::Yes) } else { let expr_span = expr.span; arm_body = Some(expr); @@ -3171,7 +3175,7 @@ impl<'a> Parser<'a> { this.dcx().emit_err(errors::MissingCommaAfterMatchArm { span: arm_span.shrink_to_hi(), }); - return Ok(true); + return Ok(Recovered::Yes); } Err(err) }); @@ -3574,7 +3578,7 @@ impl<'a> Parser<'a> { fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> { match self.token.ident() { Some((ident, is_raw)) - if (is_raw || !ident.is_reserved()) + if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved()) && self.look_ahead(1, |t| *t == token::Colon) => { Some(ast::ExprField { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 77381ef4626..2e049ca908f 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,8 +1,12 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; -use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; +use super::{ + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing, + TrailingToken, +}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; +use ast::token::IdentIsRaw; use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, TokenKind}; @@ -1076,7 +1080,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident @ Ident { name: kw::Underscore, .. }, false)) => { + Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => { self.bump(); Ok(ident) } @@ -1453,7 +1457,7 @@ impl<'a> Parser<'a> { let (variants, _) = if self.token == TokenKind::Semi { self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); - (thin_vec![], false) + (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span)) .map_err(|mut err| { @@ -1530,10 +1534,10 @@ impl<'a> Parser<'a> { err.span_label(span, "while parsing this enum"); err.help(help); err.emit(); - (thin_vec![], true) + (thin_vec![], Recovered::Yes) } }; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { let body = match this.parse_tuple_struct_body() { Ok(body) => body, @@ -1618,7 +1622,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } // No `where` so: `struct Foo<T>;` } else if self.eat(&token::Semi) { @@ -1630,7 +1634,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } // Tuple-style struct definition with optional where-clause. } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); @@ -1659,14 +1663,14 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "union", class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else { let token_str = super::token_descr(&self.token); let msg = format!("expected `where` or `{{` after union name, found {token_str}"); @@ -1683,14 +1687,14 @@ impl<'a> Parser<'a> { adt_ty: &str, ident_span: Span, parsed_where: bool, - ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> { + ) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> { let mut fields = ThinVec::new(); - let mut recovered = false; + let mut recovered = Recovered::No; if self.eat(&token::OpenDelim(Delimiter::Brace)) { while self.token != token::CloseDelim(Delimiter::Brace) { let field = self.parse_field_def(adt_ty).map_err(|e| { self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No); - recovered = true; + recovered = Recovered::Yes; e }); match field { @@ -1962,7 +1966,7 @@ impl<'a> Parser<'a> { let (ident, is_raw) = self.ident_or_err(true)?; if ident.name == kw::Underscore { self.sess.gated_spans.gate(sym::unnamed_fields, lo); - } else if !is_raw && ident.is_reserved() { + } else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let snapshot = self.create_snapshot_for_diagnostic(); let err = if self.check_fn_front_matter(false, Case::Sensitive) { let inherited_vis = Visibility { @@ -2461,8 +2465,8 @@ impl<'a> Parser<'a> { // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't // account for this. match self.expect_one_of(&[], &[]) { - Ok(true) => {} - Ok(false) => unreachable!(), + Ok(Recovered::Yes) => {} + Ok(Recovered::No) => unreachable!(), Err(mut err) => { // Qualifier keywords ordering check enum WrongKw { @@ -2740,7 +2744,7 @@ impl<'a> Parser<'a> { fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { // Extract an identifier *after* having confirmed that the token is one. let expect_self_ident = |this: &mut Self| match this.token.ident() { - Some((ident, false)) => { + Some((ident, IdentIsRaw::No)) => { this.bump(); ident } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index dea2b9e6ca7..29dd2eeb56a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,6 +11,7 @@ mod stmt; mod ty; use crate::lexer::UnmatchedDelim; +use ast::token::IdentIsRaw; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; @@ -357,6 +358,25 @@ pub enum FollowedByType { No, } +/// Whether a function performed recovery +#[derive(Copy, Clone, Debug)] +pub enum Recovered { + No, + Yes, +} + +impl From<Recovered> for bool { + fn from(r: Recovered) -> bool { + matches!(r, Recovered::Yes) + } +} + +#[derive(Copy, Clone, Debug)] +pub enum Trailing { + No, + Yes, +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -455,11 +475,11 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(false) + Ok(Recovered::No) } else { self.unexpected_try_recover(t) } @@ -475,13 +495,13 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { if edible.contains(&self.token.kind) { self.bump(); - Ok(false) + Ok(Recovered::No) } else if inedible.contains(&self.token.kind) { // leave it in the input - Ok(false) + Ok(Recovered::No) } else if self.token.kind != token::Eof && self.last_unexpected_token_span == Some(self.token.span) { @@ -499,7 +519,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { let (ident, is_raw) = self.ident_or_err(recover)?; - if !is_raw && ident.is_reserved() { + if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let err = self.expected_ident_found_err(); if recover { err.emit(); @@ -511,7 +531,7 @@ impl<'a> Parser<'a> { Ok(ident) } - fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> { + fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> { match self.token.ident() { Some(ident) => Ok(ident), None => self.expected_ident_found(recover), @@ -568,7 +588,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { true @@ -598,7 +618,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() }); @@ -783,10 +803,10 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { let mut first = true; - let mut recovered = false; - let mut trailing = false; + let mut recovered = Recovered::No; + let mut trailing = Trailing::No; let mut v = ThinVec::new(); while !self.expect_any_with_type(kets, expect) { @@ -800,12 +820,12 @@ impl<'a> Parser<'a> { } else { // check for separator match self.expect(t) { - Ok(false) /* not recovered */ => { + Ok(Recovered::No) => { self.current_closure.take(); } - Ok(true) /* recovered */ => { + Ok(Recovered::Yes) => { self.current_closure.take(); - recovered = true; + recovered = Recovered::Yes; break; } Err(mut expect_err) => { @@ -900,7 +920,7 @@ impl<'a> Parser<'a> { } } if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { - trailing = true; + trailing = Trailing::Yes; break; } @@ -978,7 +998,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -990,9 +1010,9 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { + if matches!(recovered, Recovered::No) { self.eat(ket); } Ok((val, trailing)) @@ -1007,7 +1027,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.expect(bra)?; self.parse_seq_to_end(ket, sep, f) } @@ -1019,7 +1039,7 @@ impl<'a> Parser<'a> { &mut self, delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_unspanned_seq( &token::OpenDelim(delim), &token::CloseDelim(delim), @@ -1034,7 +1054,7 @@ impl<'a> Parser<'a> { fn parse_paren_comma_seq<T>( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 071d6b72f3b..f1572a18a8b 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -201,6 +201,6 @@ impl<'a> Parser<'a> { /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> { token.ident().filter(|(ident, _)| ident.name != kw::Underscore) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 75fc013d3e6..2ede19b11e0 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1,4 +1,4 @@ -use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken}; +use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken}; use crate::errors::{ self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, @@ -311,7 +311,7 @@ impl<'a> Parser<'a> { matches!( &token.uninterpolate().kind, token::FatArrow // e.g. `a | => 0,`. - | token::Ident(kw::If, false) // e.g. `a | if expr`. + | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`. | token::Eq // e.g. `let a | = 0`. | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. @@ -696,7 +696,9 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. - Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + let paren_pattern = + fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest()); + if paren_pattern { let pat = fields.into_iter().next().unwrap(); let close_paren = self.prev_token.span; @@ -714,7 +716,7 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(begin.clone(), form)? + self.parse_pat_range_begin_with(begin.clone(), form) } // recover ranges with parentheses around the `(start)..` PatKind::Err(_) @@ -729,15 +731,15 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)? + self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form) } // (pat) with optional parentheses - _ => PatKind::Paren(pat), + _ => Ok(PatKind::Paren(pat)), } } else { - PatKind::Tuple(fields) - }) + Ok(PatKind::Tuple(fields)) + } } /// Parse a mutable binding with the `mut` token already eaten. diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 681039999a6..6e7bbe7e06d 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; use crate::errors::PathSingleColon; use crate::{errors, maybe_whole}; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::{ @@ -390,7 +391,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident, false)) if ident.is_path_segment_keyword() => { + Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => { self.bump(); Ok(ident) } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 1bae5b32240..ee02b69c614 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -11,6 +11,7 @@ use crate::errors; use crate::maybe_whole; use crate::errors::MalformedLoopLabel; +use crate::parser::Recovered; use ast::Label; use rustc_ast as ast; use rustc_ast::ptr::P; @@ -661,7 +662,6 @@ impl<'a> Parser<'a> { if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => { // Just check for errors and recover; do not eat semicolon yet. - // `expect_one_of` returns PResult<'a, bool /* recovered */> let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); @@ -669,7 +669,7 @@ impl<'a> Parser<'a> { let replace_with_err = 'break_recover: { match expect_result { // Recover from parser, skip type error to avoid extra errors. - Ok(true) => true, + Ok(Recovered::Yes) => true, Err(e) => { if self.recover_colon_as_semi() { // recover_colon_as_semi has already emitted a nicer error. @@ -691,7 +691,7 @@ impl<'a> Parser<'a> { token.kind, token::Ident( kw::For | kw::Loop | kw::While, - false + token::IdentIsRaw::No ) | token::OpenDelim(Delimiter::Brace) ) }) @@ -735,7 +735,7 @@ impl<'a> Parser<'a> { true } - Ok(false) => false, + Ok(Recovered::No) => false, } }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 23a92e6dd3d..e3fe535bd5f 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,4 +1,4 @@ -use super::{Parser, PathStyle, TokenType}; +use super::{Parser, PathStyle, TokenType, Trailing}; use crate::errors::{ self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType, @@ -415,7 +415,7 @@ impl<'a> Parser<'a> { Ok(ty) })?; - if ts.len() == 1 && !trailing { + if ts.len() == 1 && matches!(trailing, Trailing::No) { let ty = ts.into_iter().next().unwrap().into_inner(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { |
