diff options
Diffstat (limited to 'compiler/rustc_parse/src/parser')
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 295 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 315 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 80 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 16 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 11 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 15 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/tests.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/token_type.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 6 |
9 files changed, 479 insertions, 265 deletions
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 92e83577f1b..e1e6b93abf3 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,10 +4,10 @@ use core::mem; use core::ops::{Bound, ControlFlow}; use ast::mut_visit::{self, MutVisitor}; -use ast::token::{IdentIsRaw, MetaVarKind}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, Token, TokenKind}; use rustc_ast::tokenstream::TokenTree; use rustc_ast::util::case::Case; use rustc_ast::util::classify; @@ -19,7 +19,6 @@ use rustc_ast::{ MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp, UnsafeBinderCastKind, YieldKind, }; -use rustc_ast_pretty::pprust; use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic}; use rustc_lexer::unescape::unescape_char; @@ -605,7 +604,7 @@ impl<'a> Parser<'a> { // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, - _ => t.is_whole_expr(), + _ => t.is_metavar_expr(), }; self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) } @@ -641,6 +640,13 @@ impl<'a> Parser<'a> { TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => { self.prev_token.span } + TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { + // `expr.span` is the interpolated span, because invisible open + // and close delims both get marked with the same span, one + // that covers the entire thing between them. (See + // `rustc_expand::mbe::transcribe::transcribe`.) + self.prev_token.span + } _ => expr.span, } } @@ -979,12 +985,30 @@ impl<'a> Parser<'a> { } fn error_unexpected_after_dot(&self) { - let actual = pprust::token_to_string(&self.token); + let actual = super::token_descr(&self.token); let span = self.token.span; let sm = self.psess.source_map(); let (span, actual) = match (&self.token.kind, self.subparser_name) { - (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => { - (span.shrink_to_hi(), actual.into()) + (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => { + (span.shrink_to_hi(), format!("`{}`", snippet)) + } + (token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))), _) => { + // No need to report an error. This case will only occur when parsing a pasted + // metavariable, and we should have emitted an error when parsing the macro call in + // the first place. E.g. in this code: + // ``` + // macro_rules! m { ($e:expr) => { $e }; } + // + // fn main() { + // let f = 1; + // m!(f.); + // } + // ``` + // we'll get an error "unexpected token: `)` when parsing the `m!(f.)`, so we don't + // want to issue a second error when parsing the expansion `«f.»` (where `«`/`»` + // represent the invisible delimiters). + self.dcx().span_delayed_bug(span, "bad dot expr in metavariable"); + return; } _ => (span, actual), }; @@ -1294,7 +1318,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { + if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) { return Ok(self.mk_await_expr(self_arg, lo)); } @@ -1364,17 +1388,33 @@ impl<'a> Parser<'a> { let span = self.token.span; if let token::Interpolated(nt) = &self.token.kind { match &**nt { - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } token::NtBlock(block) => { let block = block.clone(); self.bump(); return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None))); } }; + } else if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // Force collection (as opposed to just `parse_expr`) is required to avoid the + // attribute duplication seen in #138478. + let expr = this.parse_expr_force_collect(); + // FIXME(nnethercote) Sometimes with expressions we get a trailing comma, possibly + // related to the FIXME in `collect_tokens_for_expr`. Examples are the multi-line + // `assert_eq!` calls involving arguments annotated with `#[rustfmt::skip]` in + // `compiler/rustc_index/src/bit_set/tests.rs`. + if this.token.kind == token::Comma { + this.bump(); + } + expr + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| { this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type)) }) { @@ -1471,9 +1511,9 @@ impl<'a> Parser<'a> { this.parse_expr_let(restrictions) } else if this.eat_keyword(exp!(Underscore)) { Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore)) - } else if this.token.uninterpolated_span().at_least_rust_2018() { + } else if this.token_uninterpolated_span().at_least_rust_2018() { // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly. - if this.token.uninterpolated_span().at_least_rust_2024() + if this.token_uninterpolated_span().at_least_rust_2024() // check for `gen {}` and `gen move {}` // or `async gen {}` and `async gen move {}` && (this.is_gen_block(kw::Gen, 0) @@ -2062,87 +2102,107 @@ impl<'a> Parser<'a> { .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char)) } - fn recover_after_dot(&mut self) -> Option<Token> { - let mut recovered = None; + fn recover_after_dot(&mut self) { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where // dot would follow an optional literal, so we do this unconditionally. - recovered = self.look_ahead(1, |next_token| { + let recovered = self.look_ahead(1, |next_token| { + // If it's an integer that looks like a float, then recover as such. + // + // We will never encounter the exponent part of a floating + // point literal here, since there's no use of the exponent + // syntax that also constitutes a valid integer, so we need + // not check for that. if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = next_token.kind + && suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) + && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') + && self.token.span.hi() == next_token.span.lo() { - // If this integer looks like a float, then recover as such. - // - // We will never encounter the exponent part of a floating - // point literal here, since there's no use of the exponent - // syntax that also constitutes a valid integer, so we need - // not check for that. - if suffix.is_none_or(|s| s == sym::f32 || s == sym::f64) - && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_') - && self.token.span.hi() == next_token.span.lo() - { - let s = String::from("0.") + symbol.as_str(); - let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.token.span.to(next_token.span))); - } + let s = String::from("0.") + symbol.as_str(); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + Some(Token::new(kind, self.token.span.to(next_token.span))) + } else { + None } - None }); - if let Some(token) = &recovered { - self.bump(); + if let Some(recovered) = recovered { self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart { - span: token.span, - suggestion: token.span.shrink_to_lo(), + span: recovered.span, + suggestion: recovered.span.shrink_to_lo(), }); + self.bump(); + self.token = recovered; } } + } - recovered + /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and + /// `Lit::from_token` (excluding unary negation). + fn eat_token_lit(&mut self) -> Option<token::Lit> { + match self.token.uninterpolate().kind { + token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => { + self.bump(); + Some(token::Lit::new(token::Bool, name, None)) + } + token::Literal(token_lit) => { + self.bump(); + Some(token_lit) + } + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + MetaVarKind::Literal, + ))) => { + let lit = self + .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + .expect("metavar seq literal"); + let ast::ExprKind::Lit(token_lit) = lit.kind else { + panic!("didn't reparse a literal"); + }; + Some(token_lit) + } + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( + mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. }, + ))) => { + let expr = self + .eat_metavar_seq(mv_kind, |this| this.parse_expr()) + .expect("metavar seq expr"); + let ast::ExprKind::Lit(token_lit) = expr.kind else { + panic!("didn't reparse an expr"); + }; + Some(token_lit) + } + _ => None, + } } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - let span = token.span; - - token::Lit::from_token(token).map(|token_lit| { - self.bump(); - (token_lit, span) - }) + fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> { + self.recover_after_dot(); + let span = self.token.span; + self.eat_token_lit().map(|token_lit| (token_lit, span)) } /// Matches `lit = true | false | token_lit`. /// Returns `None` if the next token is not a literal. - pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> { - let recovered = self.recover_after_dot(); - let token = recovered.as_ref().unwrap_or(&self.token); - match token::Lit::from_token(token) { - Some(lit) => { - match MetaItemLit::from_token_lit(lit, token.span) { - Ok(lit) => { - self.bump(); - Some(lit) - } - Err(err) => { - let span = token.uninterpolated_span(); - self.bump(); - let guar = report_lit_error(self.psess, err, lit, span); - // Pack possible quotes and prefixes from the original literal into - // the error literal's symbol so they can be pretty-printed faithfully. - let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); - let symbol = Symbol::intern(&suffixless_lit.to_string()); - let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix); - Some( - MetaItemLit::from_token_lit(lit, span) - .unwrap_or_else(|_| unreachable!()), - ) - } + fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> { + self.recover_after_dot(); + let span = self.token.span; + let uninterpolated_span = self.token_uninterpolated_span(); + self.eat_token_lit().map(|token_lit| { + match MetaItemLit::from_token_lit(token_lit, span) { + Ok(lit) => lit, + Err(err) => { + let guar = report_lit_error(&self.psess, err, token_lit, uninterpolated_span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix); + MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap() } } - None => None, - } + }) } pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) { @@ -2166,9 +2226,10 @@ impl<'a> Parser<'a> { /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { - if let token::Interpolated(nt) = &self.token.kind { - match &**nt { - // FIXME(nnethercote) The `NtExpr` case should only match if + if let Some(expr) = self.eat_metavar_seq_with_matcher( + |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }), + |this| { + // FIXME(nnethercote) The `expr` case should only match if // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing // an `UnOp::Neg` and an `ExprKind::Lit`, like how // `can_begin_literal_maybe_minus` works. But this method has @@ -2178,13 +2239,14 @@ impl<'a> Parser<'a> { // `ExprKind::Path` must be accepted when parsing range // patterns. That requires some care. So for now, we continue // being less strict here than we should be. - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - self.bump(); - return Ok(e); - } - _ => {} - }; + this.parse_expr() + }, + ) { + return Ok(expr); + } else if let Some(lit) = + self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus()) + { + return Ok(lit); } let lo = self.token.span; @@ -2200,7 +2262,9 @@ impl<'a> Parser<'a> { } fn is_array_like_block(&mut self) -> bool { - self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) + matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace)) + && self + .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) && self.look_ahead(2, |t| t == &token::Comma) && self.look_ahead(3, |t| t.can_begin_expr()) } @@ -2212,9 +2276,9 @@ impl<'a> Parser<'a> { let mut snapshot = self.create_snapshot_for_diagnostic(); match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) { Ok(arr) => { - let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces { + let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfBraces { span: arr.span, - sub: errors::ArrayBracketsInsteadOfSpacesSugg { + sub: errors::ArrayBracketsInsteadOfBracesSugg { left: lo, right: snapshot.prev_token.span, }, @@ -2328,7 +2392,7 @@ impl<'a> Parser<'a> { let movability = if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable }; - let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() { + let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() { self.parse_coroutine_kind(Case::Sensitive) } else { None @@ -2337,7 +2401,8 @@ impl<'a> Parser<'a> { let capture_clause = self.parse_capture_clause()?; let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?; let decl_hi = self.prev_token.span; - let mut body = match fn_decl.output { + let mut body = match &fn_decl.output { + // No return type. FnRetTy::Default(_) => { let restrictions = self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; @@ -2349,11 +2414,8 @@ impl<'a> Parser<'a> { Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, } } - _ => { - // If an explicit return type is given, require a block to appear (RFC 968). - let body_lo = self.token.span; - self.parse_expr_block(None, body_lo, BlockCheckMode::Default)? - } + // Explicit return type (`->`) needs block `-> T { }`. + FnRetTy::Ty(ty) => self.parse_closure_block_body(ty.span)?, }; match coroutine_kind { @@ -2405,6 +2467,49 @@ impl<'a> Parser<'a> { Ok(closure) } + /// If an explicit return type is given, require a block to appear (RFC 968). + fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P<Expr>> { + if self.may_recover() + && self.token.can_begin_expr() + && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace)) + && !self.token.is_whole_block() + { + let snapshot = self.create_snapshot_for_diagnostic(); + let restrictions = + self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET; + let tok = self.token.clone(); + match self.parse_expr_res(restrictions, AttrWrapper::empty()) { + Ok((expr, _)) => { + let descr = super::token_descr(&tok); + let mut diag = self + .dcx() + .struct_span_err(tok.span, format!("expected `{{`, found {descr}")); + diag.span_label( + ret_span, + "explicit return type requires closure body to be enclosed in braces", + ); + diag.multipart_suggestion_verbose( + "wrap the expression in curly braces", + vec![ + (expr.span.shrink_to_lo(), "{ ".to_string()), + (expr.span.shrink_to_hi(), " }".to_string()), + ], + Applicability::MachineApplicable, + ); + diag.emit(); + return Ok(expr); + } + Err(diag) => { + diag.cancel(); + self.restore_snapshot(snapshot); + } + } + } + + let body_lo = self.token.span; + self.parse_expr_block(None, body_lo, BlockCheckMode::Default) + } + /// Parses an optional `move` or `use` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { if self.eat_keyword(exp!(Move)) { @@ -2836,7 +2941,7 @@ impl<'a> Parser<'a> { /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten). fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> { let is_await = - self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)); + self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)); if is_await { self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span); @@ -3426,7 +3531,7 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::Try) && self .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) - && self.token.uninterpolated_span().at_least_rust_2018() + && self.token_uninterpolated_span().at_least_rust_2018() } /// Parses an `async move? {...}` or `gen move? {...}` expression. diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index aad18578375..9405b58ab3b 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -34,10 +34,10 @@ impl<'a> Parser<'a> { } /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. - fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> { + fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); self.expect_keyword(exp!(Mod))?; - let id = self.parse_ident()?; + let ident = self.parse_ident()?; let mod_kind = if self.eat(exp!(Semi)) { ModKind::Unloaded } else { @@ -46,7 +46,7 @@ impl<'a> Parser<'a> { attrs.extend(inner_attrs); ModKind::Loaded(items, Inline::Yes, inner_span, Ok(())) }; - Ok((id, ItemKind::Mod(safety, mod_kind))) + Ok(ItemKind::Mod(safety, ident, mod_kind)) } /// Parses the contents of a module (inner attributes followed by module items). @@ -115,8 +115,6 @@ impl<'a> Parser<'a> { } } -pub(super) type ItemInfo = (Ident, ItemKind); - impl<'a> Parser<'a> { pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<P<Item>>> { let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true }; @@ -163,11 +161,11 @@ impl<'a> Parser<'a> { fn_parse_mode, Case::Sensitive, )?; - if let Some((ident, kind)) = kind { + if let Some(kind) = kind { this.error_on_unconsumed_default(def, &kind); let span = lo.to(this.prev_token.span); let id = DUMMY_NODE_ID; - let item = Item { ident, attrs, id, kind, vis, span, tokens: None }; + let item = Item { attrs, id, kind, vis, span, tokens: None }; return Ok((Some(item), Trailing::No, UsePreAttrPos::No)); } @@ -208,7 +206,7 @@ impl<'a> Parser<'a> { def: &mut Defaultness, fn_parse_mode: FnParseMode, case: Case, - ) -> PResult<'a, Option<ItemInfo>> { + ) -> PResult<'a, Option<ItemKind>> { let check_pub = def == &Defaultness::Final; let mut def_ = || mem::replace(def, Defaultness::Final); @@ -218,17 +216,15 @@ impl<'a> Parser<'a> { // FUNCTION ITEM let (ident, sig, generics, contract, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?; - ( + ItemKind::Fn(Box::new(Fn { + defaultness: def_(), ident, - ItemKind::Fn(Box::new(Fn { - defaultness: def_(), - sig, - generics, - contract, - body, - define_opaque: None, - })), - ) + sig, + generics, + contract, + body, + define_opaque: None, + })) } else if self.eat_keyword(exp!(Extern)) { if self.eat_keyword(exp!(Crate)) { // EXTERN CRATE @@ -247,8 +243,7 @@ impl<'a> Parser<'a> { // STATIC ITEM self.bump(); // `static` let mutability = self.parse_mutability(); - let (ident, item) = self.parse_static_item(safety, mutability)?; - (ident, ItemKind::Static(Box::new(item))) + self.parse_static_item(safety, mutability)? } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) { // CONST ITEM if self.token.is_keyword(kw::Impl) { @@ -258,16 +253,14 @@ impl<'a> Parser<'a> { self.recover_const_mut(const_span); self.recover_missing_kw_before_item()?; let (ident, generics, ty, expr) = self.parse_const_item()?; - ( + ItemKind::Const(Box::new(ConstItem { + defaultness: def_(), ident, - ItemKind::Const(Box::new(ConstItem { - defaultness: def_(), - generics, - ty, - expr, - define_opaque: None, - })), - ) + generics, + ty, + expr, + define_opaque: None, + })) } } else if self.check_keyword(exp!(Trait)) || self.check_auto_or_unsafe_trait_item() { // TRAIT ITEM @@ -334,14 +327,14 @@ impl<'a> Parser<'a> { self.recover_missing_kw_before_item()?; } // MACRO INVOCATION ITEM - (Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?))) + ItemKind::MacCall(P(self.parse_item_macro(vis)?)) } else { return Ok(None); }; Ok(Some(info)) } - fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemInfo>> { + fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemKind>> { let span = self.token.span; let token_name = super::token_descr(&self.token); let snapshot = self.create_snapshot_for_diagnostic(); @@ -359,7 +352,7 @@ impl<'a> Parser<'a> { } } - fn parse_use_item(&mut self) -> PResult<'a, ItemInfo> { + fn parse_use_item(&mut self) -> PResult<'a, ItemKind> { let tree = self.parse_use_tree()?; if let Err(mut e) = self.expect_semi() { match tree.kind { @@ -373,7 +366,7 @@ impl<'a> Parser<'a> { } return Err(e); } - Ok((Ident::empty(), ItemKind::Use(tree))) + Ok(ItemKind::Use(tree)) } /// When parsing a statement, would the start of a path be an item? @@ -483,7 +476,7 @@ impl<'a> Parser<'a> { if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) } } - fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> { + fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemKind>> { // To be expanded Ok(None) } @@ -577,7 +570,7 @@ impl<'a> Parser<'a> { &mut self, attrs: &mut AttrVec, defaultness: Defaultness, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); self.expect_keyword(exp!(Impl))?; @@ -598,7 +591,7 @@ impl<'a> Parser<'a> { } // Parse stray `impl async Trait` - if (self.token.uninterpolated_span().at_least_rust_2018() + if (self.token_uninterpolated_span().at_least_rust_2018() && self.token.is_keyword(kw::Async)) || self.is_kw_followed_by_ident(kw::Async) { @@ -687,7 +680,7 @@ impl<'a> Parser<'a> { } None => (None, ty_first), // impl Type }; - let item_kind = ItemKind::Impl(Box::new(Impl { + Ok(ItemKind::Impl(Box::new(Impl { safety, polarity, defaultness, @@ -696,12 +689,10 @@ impl<'a> Parser<'a> { of_trait, self_ty, items: impl_items, - })); - - Ok((Ident::empty(), item_kind)) + }))) } - fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_delegation(&mut self) -> PResult<'a, ItemKind> { let span = self.token.span; self.expect_keyword(exp!(Reuse))?; @@ -724,7 +715,7 @@ impl<'a> Parser<'a> { }) }; - let (ident, item_kind) = if self.eat_path_sep() { + let item_kind = if self.eat_path_sep() { let suffixes = if self.eat(exp!(Star)) { None } else { @@ -732,7 +723,7 @@ impl<'a> Parser<'a> { Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0) }; let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? }; - (Ident::empty(), ItemKind::DelegationMac(Box::new(deleg))) + ItemKind::DelegationMac(Box::new(deleg)) } else { let rename = rename(self)?; let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident); @@ -740,17 +731,18 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, qself, path, + ident, rename, body: body(self)?, from_glob: false, }; - (ident, ItemKind::Delegation(Box::new(deleg))) + ItemKind::Delegation(Box::new(deleg)) }; let span = span.to(self.prev_token.span); self.psess.gated_spans.gate(sym::fn_delegation, span); - Ok((ident, item_kind)) + Ok(item_kind) } fn parse_item_list<T>( @@ -885,7 +877,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As)) { self.bump(); // `default` - Defaultness::Default(self.prev_token.uninterpolated_span()) + Defaultness::Default(self.prev_token_uninterpolated_span()) } else { Defaultness::Final } @@ -900,7 +892,7 @@ impl<'a> Parser<'a> { } /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`. - fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> { + fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemKind> { let safety = self.parse_safety(Case::Sensitive); // Parse optional `auto` prefix. let is_auto = if self.eat_keyword(exp!(Auto)) { @@ -941,15 +933,12 @@ impl<'a> Parser<'a> { self.psess.gated_spans.gate(sym::trait_alias, whole_span); - Ok((ident, ItemKind::TraitAlias(generics, bounds))) + Ok(ItemKind::TraitAlias(ident, generics, bounds)) } else { // It's a normal trait. generics.where_clause = self.parse_where_clause()?; let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?; - Ok(( - ident, - ItemKind::Trait(Box::new(Trait { is_auto, safety, generics, bounds, items })), - )) + Ok(ItemKind::Trait(Box::new(Trait { is_auto, safety, ident, generics, bounds, items }))) } } @@ -977,11 +966,12 @@ impl<'a> Parser<'a> { force_collect: ForceCollect, ) -> PResult<'a, Option<Option<P<AssocItem>>>> { Ok(self.parse_item_(fn_parse_mode, force_collect)?.map( - |Item { attrs, id, span, vis, ident, kind, tokens }| { + |Item { attrs, id, span, vis, kind, tokens }| { let kind = match AssocItemKind::try_from(kind) { Ok(kind) => kind, Err(kind) => match kind { ItemKind::Static(box StaticItem { + ident, ty, safety: _, mutability: _, @@ -991,6 +981,7 @@ impl<'a> Parser<'a> { self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span }); AssocItemKind::Const(Box::new(ConstItem { defaultness: Defaultness::Final, + ident, generics: Generics::default(), ty, expr, @@ -1000,7 +991,7 @@ impl<'a> Parser<'a> { _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"), }, }; - Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) + Some(P(Item { attrs, id, span, vis, kind, tokens })) }, )) } @@ -1010,7 +1001,7 @@ impl<'a> Parser<'a> { /// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ; /// ``` /// The `"type"` has already been eaten. - fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemInfo> { + fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1045,16 +1036,14 @@ impl<'a> Parser<'a> { self.expect_semi()?; - Ok(( + Ok(ItemKind::TyAlias(Box::new(TyAlias { + defaultness, ident, - ItemKind::TyAlias(Box::new(TyAlias { - defaultness, - generics, - where_clauses, - bounds, - ty, - })), - )) + generics, + where_clauses, + bounds, + ty, + }))) } /// Parses a `UseTree`. @@ -1158,16 +1147,16 @@ impl<'a> Parser<'a> { /// extern crate foo; /// extern crate bar as foo; /// ``` - fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemKind> { // Accept `extern crate name-like-this` for better diagnostics - let orig_name = self.parse_crate_name_with_dashes()?; - let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? { - (rename, Some(orig_name.name)) + let orig_ident = self.parse_crate_name_with_dashes()?; + let (orig_name, item_ident) = if let Some(rename) = self.parse_rename()? { + (Some(orig_ident.name), rename) } else { - (orig_name, None) + (None, orig_ident) }; self.expect_semi()?; - Ok((item_name, ItemKind::ExternCrate(orig_name))) + Ok(ItemKind::ExternCrate(orig_name, item_ident)) } fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> { @@ -1218,8 +1207,8 @@ impl<'a> Parser<'a> { &mut self, attrs: &mut AttrVec, mut safety: Safety, - ) -> PResult<'a, ItemInfo> { - let extern_span = self.prev_token.uninterpolated_span(); + ) -> PResult<'a, ItemKind> { + let extern_span = self.prev_token_uninterpolated_span(); let abi = self.parse_abi(); // ABI? // FIXME: This recovery should be tested better. if safety == Safety::Default @@ -1230,13 +1219,12 @@ impl<'a> Parser<'a> { safety = Safety::Unsafe(self.token.span); let _ = self.eat_keyword(exp!(Unsafe)); } - let module = ast::ForeignMod { + Ok(ItemKind::ForeignMod(ast::ForeignMod { extern_span, safety, abi, items: self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?, - }; - Ok((Ident::empty(), ItemKind::ForeignMod(module))) + })) } /// Parses a foreign item (one in an `extern { ... }` block). @@ -1246,11 +1234,11 @@ impl<'a> Parser<'a> { ) -> PResult<'a, Option<Option<P<ForeignItem>>>> { let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false }; Ok(self.parse_item_(fn_parse_mode, force_collect)?.map( - |Item { attrs, id, span, vis, ident, kind, tokens }| { + |Item { attrs, id, span, vis, kind, tokens }| { let kind = match ForeignItemKind::try_from(kind) { Ok(kind) => kind, Err(kind) => match kind { - ItemKind::Const(box ConstItem { ty, expr, .. }) => { + ItemKind::Const(box ConstItem { ident, ty, expr, .. }) => { let const_span = Some(span.with_hi(ident.span.lo())) .filter(|span| span.can_be_used_for_suggestions()); self.dcx().emit_err(errors::ExternItemCannotBeConst { @@ -1258,6 +1246,7 @@ impl<'a> Parser<'a> { const_span, }); ForeignItemKind::Static(Box::new(StaticItem { + ident, ty, mutability: Mutability::Not, expr, @@ -1268,7 +1257,7 @@ impl<'a> Parser<'a> { _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"), }, }; - Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) + Some(P(Item { attrs, id, span, vis, kind, tokens })) }, )) } @@ -1301,12 +1290,24 @@ impl<'a> Parser<'a> { } fn is_unsafe_foreign_mod(&self) -> bool { - self.token.is_keyword(kw::Unsafe) - && self.is_keyword_ahead(1, &[kw::Extern]) - && self.look_ahead( - 2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize), - |t| *t == token::OpenDelim(Delimiter::Brace), - ) + // Look for `unsafe`. + if !self.token.is_keyword(kw::Unsafe) { + return false; + } + // Look for `extern`. + if !self.is_keyword_ahead(1, &[kw::Extern]) { + return false; + } + + // Look for the optional ABI string literal. + let n = if self.look_ahead(2, |t| t.can_begin_string_literal()) { 3 } else { 2 }; + + // Look for the `{`. Use `tree_look_ahead` because the ABI (if present) + // might be a metavariable i.e. an invisible-delimited sequence, and + // `tree_look_ahead` will consider that a single element when looking + // ahead. + self.tree_look_ahead(n, |t| matches!(t, TokenTree::Delimited(_, _, Delimiter::Brace, _))) + == Some(true) } fn is_static_global(&mut self) -> bool { @@ -1343,13 +1344,13 @@ impl<'a> Parser<'a> { const_span: Span, attrs: &mut AttrVec, defaultness: Defaultness, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { let impl_span = self.token.span; let err = self.expected_ident_found_err(); // Only try to recover if this is implementing a trait for a type - let mut impl_info = match self.parse_item_impl(attrs, defaultness) { - Ok(impl_info) => impl_info, + let mut item_kind = match self.parse_item_impl(attrs, defaultness) { + Ok(item_kind) => item_kind, Err(recovery_error) => { // Recovery failed, raise the "expected identifier" error recovery_error.cancel(); @@ -1357,7 +1358,7 @@ impl<'a> Parser<'a> { } }; - match &mut impl_info.1 { + match &mut item_kind { ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => { *constness = Const::Yes(const_span); @@ -1374,10 +1375,11 @@ impl<'a> Parser<'a> { _ => unreachable!(), } - Ok(impl_info) + Ok(item_kind) } - /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in `mutability`. + /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in + /// `mutability`. /// /// ```ebnf /// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ; @@ -1386,7 +1388,7 @@ impl<'a> Parser<'a> { &mut self, safety: Safety, mutability: Mutability, - ) -> PResult<'a, (Ident, StaticItem)> { + ) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; if self.token == TokenKind::Lt && self.may_recover() { @@ -1398,7 +1400,8 @@ impl<'a> Parser<'a> { // FIXME: This could maybe benefit from `.may_recover()`? let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) { (true, false) => self.parse_ty()?, - // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type. + // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing + // type. (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)), }; @@ -1406,7 +1409,8 @@ impl<'a> Parser<'a> { self.expect_semi()?; - Ok((ident, StaticItem { ty, safety, mutability, expr, define_opaque: None })) + let item = StaticItem { ident, ty, safety, mutability, expr, define_opaque: None }; + Ok(ItemKind::Static(Box::new(item))) } /// Parse a constant item with the prefix `"const"` already parsed. @@ -1531,7 +1535,7 @@ impl<'a> Parser<'a> { } /// Parses an enum declaration. - fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { + fn parse_item_enum(&mut self) -> PResult<'a, ItemKind> { if self.token.is_keyword(kw::Struct) { let span = self.prev_token.span.to(self.token.span); let err = errors::EnumStructMutuallyExclusive { span }; @@ -1544,7 +1548,7 @@ impl<'a> Parser<'a> { } let prev_span = self.prev_token.span; - let id = self.parse_ident()?; + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; generics.where_clause = self.parse_where_clause()?; @@ -1555,10 +1559,10 @@ impl<'a> Parser<'a> { (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| { - p.parse_enum_variant(id.span) + p.parse_enum_variant(ident.span) }) .map_err(|mut err| { - err.span_label(id.span, "while parsing this enum"); + err.span_label(ident.span, "while parsing this enum"); if self.token == token::Colon { let snapshot = self.create_snapshot_for_diagnostic(); self.bump(); @@ -1584,7 +1588,7 @@ impl<'a> Parser<'a> { }; let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() }; - Ok((id, ItemKind::Enum(enum_definition, generics))) + Ok(ItemKind::Enum(ident, enum_definition, generics)) } fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> { @@ -1676,8 +1680,8 @@ impl<'a> Parser<'a> { } /// Parses `struct Foo { ... }`. - fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; + fn parse_item_struct(&mut self) -> PResult<'a, ItemKind> { + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1698,7 +1702,7 @@ impl<'a> Parser<'a> { let vdata = if self.token.is_keyword(kw::Where) { let tuple_struct_body; (generics.where_clause, tuple_struct_body) = - self.parse_struct_where_clause(class_name, generics.span)?; + self.parse_struct_where_clause(ident, generics.span)?; if let Some(body) = tuple_struct_body { // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);` @@ -1712,7 +1716,7 @@ impl<'a> Parser<'a> { // If we see: `struct Foo<T> where T: Copy { ... }` let (fields, recovered) = self.parse_record_struct_body( "struct", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } @@ -1724,7 +1728,7 @@ impl<'a> Parser<'a> { } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "struct", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } @@ -1740,12 +1744,12 @@ impl<'a> Parser<'a> { return Err(self.dcx().create_err(err)); }; - Ok((class_name, ItemKind::Struct(vdata, generics))) + Ok(ItemKind::Struct(ident, vdata, generics)) } /// Parses `union Foo { ... }`. - fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; + fn parse_item_union(&mut self) -> PResult<'a, ItemKind> { + let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -1753,14 +1757,14 @@ impl<'a> Parser<'a> { generics.where_clause = self.parse_where_clause()?; let (fields, recovered) = self.parse_record_struct_body( "union", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "union", - class_name.span, + ident.span, generics.where_clause.has_where_token, )?; VariantData::Struct { fields, recovered } @@ -1772,7 +1776,7 @@ impl<'a> Parser<'a> { return Err(err); }; - Ok((class_name, ItemKind::Union(vdata, generics))) + Ok(ItemKind::Union(ident, vdata, generics)) } /// This function parses the fields of record structs: @@ -2124,15 +2128,17 @@ impl<'a> Parser<'a> { } } else if self.eat_keyword(exp!(Struct)) { match self.parse_item_struct() { - Ok((ident, _)) => self - .dcx() - .struct_span_err( - lo.with_hi(ident.span.hi()), - format!("structs are not allowed in {adt_ty} definitions"), - ) - .with_help( - "consider creating a new `struct` definition instead of nesting", - ), + Ok(item) => { + let ItemKind::Struct(ident, ..) = item else { unreachable!() }; + self.dcx() + .struct_span_err( + lo.with_hi(ident.span.hi()), + format!("structs are not allowed in {adt_ty} definitions"), + ) + .with_help( + "consider creating a new `struct` definition instead of nesting", + ) + } Err(err) => { err.cancel(); self.restore_snapshot(snapshot); @@ -2177,7 +2183,7 @@ impl<'a> Parser<'a> { /// MacParams = "(" TOKEN_STREAM ")" ; /// DeclMac = "macro" Ident MacParams? MacBody ; /// ``` - fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> { + fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemKind> { let ident = self.parse_ident()?; let body = if self.check(exp!(OpenBrace)) { self.parse_delim_args()? // `MacBody` @@ -2199,7 +2205,7 @@ impl<'a> Parser<'a> { }; self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span)); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false }))) + Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: false })) } /// Is this a possibly malformed start of a `macro_rules! foo` item definition? @@ -2228,7 +2234,7 @@ impl<'a> Parser<'a> { &mut self, vis: &Visibility, has_bang: bool, - ) -> PResult<'a, ItemInfo> { + ) -> PResult<'a, ItemKind> { self.expect_keyword(exp!(MacroRules))?; // `macro_rules` if has_bang { @@ -2246,7 +2252,7 @@ impl<'a> Parser<'a> { self.eat_semi_for_macro_if_needed(&body); self.complain_if_pub_macro(vis, true); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: true }))) + Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: true })) } /// Item macro invocations or `macro_rules!` definitions need inherited visibility. @@ -2610,13 +2616,36 @@ impl<'a> Parser<'a> { }) // `extern ABI fn` || self.check_keyword_case(exp!(Extern), case) + // Use `tree_look_ahead` because `ABI` might be a metavariable, + // i.e. an invisible-delimited sequence, and `tree_look_ahead` + // will consider that a single element when looking ahead. && self.look_ahead(1, |t| t.can_begin_string_literal()) - && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) || + && (self.tree_look_ahead(2, |tt| { + match tt { + TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case), + TokenTree::Delimited(..) => false, + } + }) == Some(true) || // This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not // allowed here. (self.may_recover() - && self.look_ahead(2, |t| ALL_QUALS.iter().any(|exp| t.is_keyword(exp.kw))) - && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case)))) + && self.tree_look_ahead(2, |tt| { + match tt { + TokenTree::Token(t, _) => + ALL_QUALS.iter().any(|exp| { + t.is_keyword(exp.kw) + }), + TokenTree::Delimited(..) => false, + } + }) == Some(true) + && self.tree_look_ahead(3, |tt| { + match tt { + TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case), + TokenTree::Delimited(..) => false, + } + }) == Some(true) + ) + ) } /// Parses all the "front matter" (or "qualifiers") for a `fn` declaration, @@ -2752,7 +2781,7 @@ impl<'a> Parser<'a> { .expect("Span extracted directly from keyword should always work"); err.span_suggestion( - self.token.uninterpolated_span(), + self.token_uninterpolated_span(), format!("`{original_kw}` already used earlier, remove this one"), "", Applicability::MachineApplicable, @@ -2763,7 +2792,7 @@ impl<'a> Parser<'a> { else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw { let correct_pos_sp = correct_pos_sp.to(self.prev_token.span); if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) { - let misplaced_qual_sp = self.token.uninterpolated_span(); + let misplaced_qual_sp = self.token_uninterpolated_span(); let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap(); err.span_suggestion( @@ -2931,13 +2960,30 @@ impl<'a> Parser<'a> { let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic(); this.eat_incorrect_doc_comment_for_param_type(); let mut ty = this.parse_ty_for_param(); - if ty.is_ok() - && this.token != token::Comma - && this.token != token::CloseDelim(Delimiter::Parenthesis) - { - // This wasn't actually a type, but a pattern looking like a type, - // so we are going to rollback and re-parse for recovery. - ty = this.unexpected_any(); + + if let Ok(t) = &ty { + // Check for trailing angle brackets + if let TyKind::Path(_, Path { segments, .. }) = &t.kind { + if let Some(segment) = segments.last() { + if let Some(guar) = + this.check_trailing_angle_brackets(segment, &[exp!(CloseParen)]) + { + return Ok(( + dummy_arg(segment.ident, guar), + Trailing::No, + UsePreAttrPos::No, + )); + } + } + } + + if this.token != token::Comma + && this.token != token::CloseDelim(Delimiter::Parenthesis) + { + // This wasn't actually a type, but a pattern looking like a type, + // so we are going to rollback and re-parse for recovery. + ty = this.unexpected_any(); + } } match ty { Ok(ty) => { @@ -2948,6 +2994,7 @@ impl<'a> Parser<'a> { } // If this is a C-variadic argument and we hit an error, return the error. Err(err) if this.token == token::DotDotDot => return Err(err), + Err(err) if this.unmatched_angle_bracket_count > 0 => return Err(err), // Recover from attempting to parse the argument as a type without pattern. Err(err) => { err.cancel(); diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index a79b4048288..392a1c1057a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -24,8 +24,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{ - self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtPatKind, Token, - TokenKind, + self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtExprKind, NtPatKind, + Token, TokenKind, }; use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; @@ -101,6 +101,7 @@ pub enum ForceCollect { #[macro_export] macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { + #[allow(irrefutable_let_patterns)] // FIXME: temporary if let token::Interpolated(nt) = &$p.token.kind && let token::$constructor(x) = &**nt { @@ -299,6 +300,10 @@ impl TokenTreeCursor { self.stream.get(self.index) } + fn look_ahead(&self, n: usize) -> Option<&TokenTree> { + self.stream.get(self.index + n) + } + #[inline] fn bump(&mut self) { self.index += 1; @@ -1290,6 +1295,17 @@ impl<'a> Parser<'a> { looker(&token) } + /// Like `lookahead`, but skips over token trees rather than tokens. Useful + /// when looking past possible metavariable pasting sites. + pub fn tree_look_ahead<R>( + &self, + dist: usize, + looker: impl FnOnce(&TokenTree) -> R, + ) -> Option<R> { + assert_ne!(dist, 0); + self.token_cursor.curr.look_ahead(dist - 1).map(looker) + } + /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) @@ -1297,14 +1313,14 @@ impl<'a> Parser<'a> { /// Parses asyncness: `async` or nothing. fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> { - let span = self.token.uninterpolated_span(); + let span = self.token_uninterpolated_span(); if self.eat_keyword_case(exp!(Async), case) { // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then // error if edition <= 2024, like we do with async and edition <= 2018? - if self.token.uninterpolated_span().at_least_rust_2024() + if self.token_uninterpolated_span().at_least_rust_2024() && self.eat_keyword_case(exp!(Gen), case) { - let gen_span = self.prev_token.uninterpolated_span(); + let gen_span = self.prev_token_uninterpolated_span(); Some(CoroutineKind::AsyncGen { span: span.to(gen_span), closure_id: DUMMY_NODE_ID, @@ -1317,7 +1333,7 @@ impl<'a> Parser<'a> { return_impl_trait_id: DUMMY_NODE_ID, }) } - } else if self.token.uninterpolated_span().at_least_rust_2024() + } else if self.token_uninterpolated_span().at_least_rust_2024() && self.eat_keyword_case(exp!(Gen), case) { Some(CoroutineKind::Gen { @@ -1333,9 +1349,9 @@ impl<'a> Parser<'a> { /// Parses fn unsafety: `unsafe`, `safe` or nothing. fn parse_safety(&mut self, case: Case) -> Safety { if self.eat_keyword_case(exp!(Unsafe), case) { - Safety::Unsafe(self.prev_token.uninterpolated_span()) + Safety::Unsafe(self.prev_token_uninterpolated_span()) } else if self.eat_keyword_case(exp!(Safe), case) { - Safety::Safe(self.prev_token.uninterpolated_span()) + Safety::Safe(self.prev_token_uninterpolated_span()) } else { Safety::Default } @@ -1362,7 +1378,7 @@ impl<'a> Parser<'a> { .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()) && self.eat_keyword_case(exp!(Const), case) { - Const::Yes(self.prev_token.uninterpolated_span()) + Const::Yes(self.prev_token_uninterpolated_span()) } else { Const::No } @@ -1370,9 +1386,6 @@ impl<'a> Parser<'a> { /// Parses inline const expressions. fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> { - if pat { - self.psess.gated_spans.gate(sym::inline_const_pat, span); - } self.expect_keyword(exp!(Const))?; let (attrs, blk) = self.parse_inner_attrs_and_block(None)?; let anon_const = AnonConst { @@ -1380,7 +1393,17 @@ impl<'a> Parser<'a> { value: self.mk_expr(blk.span, ExprKind::Block(blk, None)), }; let blk_span = anon_const.value.span; - Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs)) + let kind = if pat { + let guar = self + .dcx() + .struct_span_err(blk_span, "`inline_const_pat` has been removed") + .with_help("use a named `const`-item or an `if`-guard instead") + .emit(); + ExprKind::Err(guar) + } else { + ExprKind::ConstBlock(anon_const) + }; + Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs)) } /// Parses mutability (`mut` or nothing). @@ -1699,6 +1722,35 @@ impl<'a> Parser<'a> { pub fn approx_token_stream_pos(&self) -> u32 { self.num_bump_calls } + + /// For interpolated `self.token`, returns a span of the fragment to which + /// the interpolated token refers. For all other tokens this is just a + /// regular span. It is particularly important to use this for identifiers + /// and lifetimes for which spans affect name resolution and edition + /// checks. Note that keywords are also identifiers, so they should use + /// this if they keep spans or perform edition checks. + pub fn token_uninterpolated_span(&self) -> Span { + match &self.token.kind { + token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, + token::Interpolated(nt) => nt.use_span(), + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { + self.look_ahead(1, |t| t.span) + } + _ => self.token.span, + } + } + + /// Like `token_uninterpolated_span`, but works on `self.prev_token`. + pub fn prev_token_uninterpolated_span(&self) -> Span { + match &self.prev_token.kind { + token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span, + token::Interpolated(nt) => nt.use_span(), + token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => { + self.look_ahead(0, |t| t.span) + } + _ => self.prev_token.span, + } + } } pub(crate) fn make_unclosed_delims_error( @@ -1751,6 +1803,8 @@ pub enum ParseNtResult { Item(P<ast::Item>), Stmt(P<ast::Stmt>), Pat(P<ast::Pat>, NtPatKind), + Expr(P<ast::Expr>, NtExprKind), + Literal(P<ast::Expr>), Ty(P<ast::Ty>), Meta(P<ast::AttrItem>), Path(P<ast::Path>), diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 1123755ce00..b4e540d670d 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -48,10 +48,6 @@ impl<'a> Parser<'a> { /// Old variant of `may_be_ident`. Being phased out. fn nt_may_be_ident(nt: &Nonterminal) -> bool { match nt { - NtExpr(_) - | NtLiteral(_) // `true`, `false` - => true, - NtBlock(_) => false, } } @@ -95,7 +91,7 @@ impl<'a> Parser<'a> { token::OpenDelim(Delimiter::Brace) => true, token::NtLifetime(..) => true, token::Interpolated(nt) => match &**nt { - NtBlock(_) | NtExpr(_) | NtLiteral(_) => true, + NtBlock(_) => true, }, token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k { MetaVarKind::Block @@ -179,10 +175,14 @@ impl<'a> Parser<'a> { pat_kind, )); } - NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?), + NonterminalKind::Expr(expr_kind) => { + return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind)); + } NonterminalKind::Literal => { - // The `:literal` matcher does not support attributes - NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?) + // The `:literal` matcher does not support attributes. + return Ok(ParseNtResult::Literal( + self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?, + )); } NonterminalKind::Ty => { return Ok(ParseNtResult::Ty( diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index ec14c5718da..9612f71b2af 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -631,15 +631,6 @@ impl<'a> Parser<'a> { ident, indentation, }); - - // help: wrap the expr in a `const { expr }` - // FIXME(inline_const_pat): once stabilized, remove this check and remove the `(requires #[feature(inline_const_pat)])` note from the message - if self.parser.psess.unstable_features.is_nightly_build() { - err.subdiagnostic(UnexpectedExpressionInPatternSugg::InlineConst { - start_span: expr_span.shrink_to_lo(), - end_span: expr_span.shrink_to_hi(), - }); - } }, ); } @@ -1261,7 +1252,7 @@ impl<'a> Parser<'a> { || *t == token::Dot // e.g. `.5` for recovery; || matches!(t.kind, token::Literal(..) | token::Minus) || t.is_bool_lit() - || t.is_whole_expr() + || t.is_metavar_expr() || t.is_lifetime() // recover `'a` instead of `'a'` || (self.may_recover() // recover leading `(` && *t == token::OpenDelim(Delimiter::Parenthesis) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 97cd4d2117f..2cd09aa8959 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -73,7 +73,20 @@ impl<'a> Parser<'a> { }); } - let stmt = if self.token.is_keyword(kw::Let) { + let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) { + self.collect_tokens(None, attrs, force_collect, |this, attrs| { + this.expect_keyword(exp!(Super))?; + this.psess.gated_spans.gate(sym::super_let, this.prev_token.span); + this.expect_keyword(exp!(Let))?; + let local = this.parse_local(attrs)?; // FIXME(mara): implement super let + let trailing = Trailing::from(capture_semi && this.token == token::Semi); + Ok(( + this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), + trailing, + UsePreAttrPos::No, + )) + })? + } else if self.token.is_keyword(kw::Let) { self.collect_tokens(None, attrs, force_collect, |this, attrs| { this.expect_keyword(exp!(Let))?; let local = this.parse_local(attrs)?; diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 471966d086d..49ae6cb9b72 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2922,7 +2922,7 @@ fn out_of_line_mod() { .unwrap() .unwrap(); - let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() }; + let ast::ItemKind::Mod(_, _, mod_kind) = &item.kind else { panic!() }; assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2); }); } diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index 886438fd583..add3c970201 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -114,6 +114,7 @@ pub enum TokenType { KwSelfUpper, KwStatic, KwStruct, + KwSuper, KwTrait, KwTry, KwType, @@ -250,6 +251,7 @@ impl TokenType { KwSelfUpper, KwStatic, KwStruct, + KwSuper, KwTrait, KwTry, KwType, @@ -324,6 +326,7 @@ impl TokenType { TokenType::KwSelfUpper => Some(kw::SelfUpper), TokenType::KwStatic => Some(kw::Static), TokenType::KwStruct => Some(kw::Struct), + TokenType::KwSuper => Some(kw::Super), TokenType::KwTrait => Some(kw::Trait), TokenType::KwTry => Some(kw::Try), TokenType::KwType => Some(kw::Type), @@ -549,6 +552,7 @@ macro_rules! exp { (SelfUpper) => { exp!(@kw, SelfUpper, KwSelfUpper) }; (Static) => { exp!(@kw, Static, KwStatic) }; (Struct) => { exp!(@kw, Struct, KwStruct) }; + (Super) => { exp!(@kw, Super, KwSuper) }; (Trait) => { exp!(@kw, Trait, KwTrait) }; (Try) => { exp!(@kw, Try, KwTry) }; (Type) => { exp!(@kw, Type, KwType) }; diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index b45ebae079c..93705da22c4 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -775,7 +775,7 @@ impl<'a> Parser<'a> { /// Is a `dyn B0 + ... + Bn` type allowed here? fn is_explicit_dyn_type(&mut self) -> bool { self.check_keyword(exp!(Dyn)) - && (self.token.uninterpolated_span().at_least_rust_2018() + && (self.token_uninterpolated_span().at_least_rust_2018() || self.look_ahead(1, |t| { (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star) && !can_continue_type_after_non_fn_ident(t) @@ -998,13 +998,13 @@ impl<'a> Parser<'a> { BoundConstness::Never }; - let asyncness = if self.token.uninterpolated_span().at_least_rust_2018() + let asyncness = if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Async)) { self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span); BoundAsyncness::Async(self.prev_token.span) } else if self.may_recover() - && self.token.uninterpolated_span().is_rust_2015() + && self.token_uninterpolated_span().is_rust_2015() && self.is_kw_followed_by_ident(kw::Async) { self.bump(); // eat `async` |
