diff options
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/Cargo.toml | 1 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/mod.rs | 22 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/tokentrees.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/unicode_chars.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 64 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 149 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 118 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/generics.rs | 13 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 95 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 256 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 32 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 11 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 50 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 28 |
16 files changed, 467 insertions, 428 deletions
diff --git a/compiler/rustc_parse/Cargo.toml b/compiler/rustc_parse/Cargo.toml index a823607ab0e..c6ca260e983 100644 --- a/compiler/rustc_parse/Cargo.toml +++ b/compiler/rustc_parse/Cargo.toml @@ -13,6 +13,7 @@ rustc_ast_pretty = { path = "../rustc_ast_pretty" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_feature = { path = "../rustc_feature" } rustc_lexer = { path = "../rustc_lexer" } +rustc_macros = { path = "../rustc_macros" } rustc_errors = { path = "../rustc_errors" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index bfa13ce79ba..ee54dd44f71 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,6 +1,6 @@ use crate::lexer::unicode_chars::UNICODE_ARRAY; use rustc_ast::ast::{self, AttrStyle}; -use rustc_ast::token::{self, CommentKind, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{Spacing, TokenStream}; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult}; @@ -24,8 +24,8 @@ use unescape_error_reporting::{emit_unescape_error, escaped_char}; #[derive(Clone, Debug)] pub struct UnmatchedBrace { - pub expected_delim: token::DelimToken, - pub found_delim: Option<token::DelimToken>, + pub expected_delim: Delimiter, + pub found_delim: Option<Delimiter>, pub found_span: Span, pub unclosed_span: Option<Span>, pub candidate_span: Option<Span>, @@ -284,12 +284,12 @@ impl<'a> StringReader<'a> { rustc_lexer::TokenKind::Semi => token::Semi, rustc_lexer::TokenKind::Comma => token::Comma, rustc_lexer::TokenKind::Dot => token::Dot, - rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren), - rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren), - rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(token::Brace), - rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(token::Brace), - rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(token::Bracket), - rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(token::Bracket), + rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis), + rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis), + rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace), + rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace), + rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket), + rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket), rustc_lexer::TokenKind::At => token::At, rustc_lexer::TokenKind::Pound => token::Pound, rustc_lexer::TokenKind::Tilde => token::Tilde, @@ -612,14 +612,14 @@ impl<'a> StringReader<'a> { err.span_suggestion_verbose( prefix_span, "use `br` for a raw byte string", - "br".to_string(), + "br", Applicability::MaybeIncorrect, ); } else if expn_data.is_root() { err.span_suggestion_verbose( prefix_span.shrink_to_hi(), "consider inserting whitespace here", - " ".into(), + " ", Applicability::MaybeIncorrect, ); } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index 8318aec8726..ef84f95ec83 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -1,6 +1,6 @@ use super::{StringReader, UnmatchedBrace}; -use rustc_ast::token::{self, DelimToken, Token}; +use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{ DelimSpan, Spacing::{self, *}, @@ -32,15 +32,15 @@ struct TokenTreesReader<'a> { string_reader: StringReader<'a>, token: Token, /// Stack of open delimiters and their spans. Used for error message. - open_braces: Vec<(token::DelimToken, Span)>, + open_braces: Vec<(Delimiter, Span)>, unmatched_braces: Vec<UnmatchedBrace>, /// The type and spans for all braces /// /// Used only for error recovery when arriving to EOF with mismatched braces. - matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, + matching_delim_spans: Vec<(Delimiter, Span, Span)>, last_unclosed_found_span: Option<Span>, /// Collect empty block spans that might have been auto-inserted by editors. - last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>, + last_delim_empty_block_spans: FxHashMap<Delimiter, Span>, /// Collect the spans of braces (Open, Close). Used only /// for detecting if blocks are empty and only braces. matching_block_spans: Vec<(Span, Span)>, @@ -88,7 +88,7 @@ impl<'a> TokenTreesReader<'a> { for &(_, sp) in &self.open_braces { err.span_label(sp, "unclosed delimiter"); self.unmatched_braces.push(UnmatchedBrace { - expected_delim: token::DelimToken::Brace, + expected_delim: Delimiter::Brace, found_delim: None, found_span: self.token.span, unclosed_span: Some(sp), @@ -150,7 +150,7 @@ impl<'a> TokenTreesReader<'a> { } //only add braces - if let (DelimToken::Brace, DelimToken::Brace) = (open_brace, delim) { + if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) { self.matching_block_spans.push((open_brace_span, close_brace_span)); } diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 1d63b79adc5..faa686c3e57 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -2,7 +2,7 @@ // https://www.unicode.org/Public/security/10.0.0/confusables.txt use super::StringReader; -use crate::token; +use crate::token::{self, Delimiter}; use rustc_errors::{Applicability, Diagnostic}; use rustc_span::{symbol::kw, BytePos, Pos, Span}; @@ -312,12 +312,12 @@ const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[ ('!', "Exclamation Mark", Some(token::Not)), ('?', "Question Mark", Some(token::Question)), ('.', "Period", Some(token::Dot)), - ('(', "Left Parenthesis", Some(token::OpenDelim(token::Paren))), - (')', "Right Parenthesis", Some(token::CloseDelim(token::Paren))), - ('[', "Left Square Bracket", Some(token::OpenDelim(token::Bracket))), - (']', "Right Square Bracket", Some(token::CloseDelim(token::Bracket))), - ('{', "Left Curly Brace", Some(token::OpenDelim(token::Brace))), - ('}', "Right Curly Brace", Some(token::CloseDelim(token::Brace))), + ('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))), + (')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))), + ('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))), + (']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))), + ('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))), + ('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))), ('*', "Asterisk", Some(token::BinOp(token::Star))), ('/', "Slash", Some(token::BinOp(token::Slash))), ('\\', "Backslash", None), @@ -338,9 +338,7 @@ pub(super) fn check_for_substitution<'a>( ch: char, err: &mut Diagnostic, ) -> Option<token::TokenKind> { - let Some(&(_u_char, u_name, ascii_char)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else { - return None; - }; + let &(_u_char, u_name, ascii_char) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?; let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8())); diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 1724bab5caa..358b01df3b9 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -1,7 +1,7 @@ use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle}; use rustc_ast as ast; use rustc_ast::attr; -use rustc_ast::token::{self, Nonterminal}; +use rustc_ast::token::{self, Delimiter, Nonterminal}; use rustc_ast_pretty::pprust; use rustc_errors::{error_code, Diagnostic, PResult}; use rustc_span::{sym, BytePos, Span}; @@ -130,9 +130,9 @@ impl<'a> Parser<'a> { ast::AttrStyle::Outer }; - this.expect(&token::OpenDelim(token::Bracket))?; + this.expect(&token::OpenDelim(Delimiter::Bracket))?; let item = this.parse_attr_item(false)?; - this.expect(&token::CloseDelim(token::Bracket))?; + this.expect(&token::CloseDelim(Delimiter::Bracket))?; let attr_sp = lo.to(this.prev_token.span); // Emit error if inner attribute is encountered and forbidden. @@ -403,7 +403,7 @@ impl<'a> Parser<'a> { crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { Ok(if self.eat(&token::Eq) { ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) - } else if self.check(&token::OpenDelim(token::Paren)) { + } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // Matches `meta_seq = ( COMMASEP(meta_item_inner) )`. let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?; ast::MetaItemKind::List(list) diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 5ee9c339bb7..a12621564ab 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,11 +1,11 @@ use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken}; -use rustc_ast::token::{self, DelimToken, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream}; use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing}; use rustc_ast::{self as ast}; use rustc_ast::{AstLike, AttrVec, Attribute}; use rustc_errors::PResult; -use rustc_span::{sym, Span, DUMMY_SP}; +use rustc_span::{sym, Span}; use std::convert::TryInto; use std::ops::Range; @@ -100,21 +100,16 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144); impl CreateTokenStream for LazyTokenStreamImpl { fn create_token_stream(&self) -> AttrAnnotatedTokenStream { - // The token produced by the final call to `{,inlined_}next` or - // `{,inlined_}next_desugared` was not actually consumed by the - // callback. The combination of chaining the initial token and using - // `take` produces the desired result - we produce an empty - // `TokenStream` if no calls were made, and omit the final token - // otherwise. + // The token produced by the final call to `{,inlined_}next` was not + // actually consumed by the callback. The combination of chaining the + // initial token and using `take` produces the desired result - we + // produce an empty `TokenStream` if no calls were made, and omit the + // final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); let tokens = std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1)) .chain((0..self.num_calls).map(|_| { - let token = if cursor_snapshot.desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - }; + let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments); (FlatToken::Token(token.0), token.1) })) .take(self.num_calls); @@ -393,11 +388,11 @@ impl<'a> Parser<'a> { /// Converts a flattened iterator of tokens (including open and close delimiter tokens) /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair /// of open and close delims. -// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly, -// which can cause us to end up with mismatched `None` delimiters in our +// FIXME(#67062): Currently, we don't parse `Invisible`-delimited groups correctly, +// which can cause us to end up with mismatched `Invisible` delimiters in our // captured tokens. This function contains several hacks to work around this - -// essentially, we throw away mismatched `None` delimiters when we encounter them. -// Once we properly parse `None` delimiters, they can be captured just like any +// essentially, we throw away mismatched `Invisible` delimiters when we encounter them. +// Once we properly parse `Invisible` delimiters, they can be captured just like any // other tokens, and these hacks can be removed. fn make_token_stream( mut iter: impl Iterator<Item = (FlatToken, Spacing)>, @@ -405,24 +400,26 @@ fn make_token_stream( ) -> AttrAnnotatedTokenStream { #[derive(Debug)] struct FrameData { - open: Span, - open_delim: DelimToken, + // This is `None` for the first frame, `Some` for all others. + open_delim_sp: Option<(Delimiter, Span)>, inner: Vec<(AttrAnnotatedTokenTree, Spacing)>, } - let mut stack = - vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }]; + let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }]; let mut token_and_spacing = iter.next(); while let Some((token, spacing)) = token_and_spacing { match token { FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => { - stack.push(FrameData { open: span, open_delim: delim, inner: vec![] }); + stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] }); } FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => { - // HACK: If we encounter a mismatched `None` delimiter at the top + // HACK: If we encounter a mismatched `Invisible` delimiter at the top // level, just ignore it. - if matches!(delim, DelimToken::NoDelim) + if matches!(delim, Delimiter::Invisible) && (stack.len() == 1 - || !matches!(stack.last_mut().unwrap().open_delim, DelimToken::NoDelim)) + || !matches!( + stack.last_mut().unwrap().open_delim_sp.unwrap().0, + Delimiter::Invisible + )) { token_and_spacing = iter.next(); continue; @@ -431,11 +428,11 @@ fn make_token_stream( .pop() .unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token)); - // HACK: If our current frame has a mismatched opening `None` delimiter, + // HACK: If our current frame has a mismatched opening `Invisible` delimiter, // merge our current frame with the one above it. That is, transform // `[ { < first second } third ]` into `[ { first second } third ]` - if !matches!(delim, DelimToken::NoDelim) - && matches!(frame_data.open_delim, DelimToken::NoDelim) + if !matches!(delim, Delimiter::Invisible) + && matches!(frame_data.open_delim_sp.unwrap().0, Delimiter::Invisible) { stack.last_mut().unwrap().inner.extend(frame_data.inner); // Process our closing delimiter again, this time at the previous @@ -444,12 +441,13 @@ fn make_token_stream( continue; } + let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap(); assert_eq!( - frame_data.open_delim, delim, + open_delim, delim, "Mismatched open/close delims: open={:?} close={:?}", - frame_data.open, span + open_delim, span ); - let dspan = DelimSpan::from_pair(frame_data.open, span); + let dspan = DelimSpan::from_pair(open_sp, span); let stream = AttrAnnotatedTokenStream::new(frame_data.inner); let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream); stack @@ -474,10 +472,10 @@ fn make_token_stream( } token_and_spacing = iter.next(); } - // HACK: If we don't have a closing `None` delimiter for our last + // HACK: If we don't have a closing `Invisible` delimiter for our last // frame, merge the frame with the top-level frame. That is, // turn `< first second` into `first second` - if stack.len() == 2 && stack[1].open_delim == DelimToken::NoDelim { + if stack.len() == 2 && stack[1].open_delim_sp.unwrap().0 == Delimiter::Invisible { let temp_buf = stack.pop().unwrap(); stack.last_mut().unwrap().inner.extend(temp_buf.inner); } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index ed264045170..beffbdc5de4 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -8,7 +8,7 @@ use super::{ use crate::lexer::UnmatchedBrace; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Lit, LitKind, TokenKind}; +use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind}; use rustc_ast::util::parser::AssocOp; use rustc_ast::{ AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block, @@ -21,6 +21,7 @@ use rustc_errors::{pluralize, struct_span_err, Diagnostic, EmissionGuarantee, Er use rustc_errors::{ Applicability, DiagnosticBuilder, DiagnosticMessage, Handler, MultiSpan, PResult, }; +use rustc_macros::SessionDiagnostic; use rustc_span::source_map::Spanned; use rustc_span::symbol::{kw, Ident}; use rustc_span::{Span, SpanSnippetError, DUMMY_SP}; @@ -241,6 +242,16 @@ impl MultiSugg { err.multipart_suggestions(msg, suggestions.map(|s| s.patches), applicability); } } + +#[derive(SessionDiagnostic)] +#[error(slug = "parser-maybe-report-ambiguous-plus")] +struct AmbiguousPlus { + pub sum_ty: String, + #[primary_span] + #[suggestion(code = "({sum_ty})")] + pub span: Span, +} + // SnapshotParser is used to create a snapshot of the parser // without causing duplicate errors being emitted when the `Parser` // is dropped. @@ -326,10 +337,10 @@ impl<'a> Parser<'a> { TokenKind::Comma, TokenKind::Semi, TokenKind::ModSep, - TokenKind::OpenDelim(token::DelimToken::Brace), - TokenKind::OpenDelim(token::DelimToken::Paren), - TokenKind::CloseDelim(token::DelimToken::Brace), - TokenKind::CloseDelim(token::DelimToken::Paren), + TokenKind::OpenDelim(Delimiter::Brace), + TokenKind::OpenDelim(Delimiter::Parenthesis), + TokenKind::CloseDelim(Delimiter::Brace), + TokenKind::CloseDelim(Delimiter::Parenthesis), ]; match self.token.ident() { Some((ident, false)) @@ -402,7 +413,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if [token::Comma, token::Colon].contains(&self.token.kind) - && self.prev_token.kind == token::CloseDelim(token::Paren) + && self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis) { // Likely typo: The current token is on a new line and is expected to be // `.`, `;`, `?`, or an operator after a close delimiter token. @@ -413,7 +424,7 @@ impl<'a> Parser<'a> { // ^ // https://github.com/rust-lang/rust/issues/72253 } else if self.look_ahead(1, |t| { - t == &token::CloseDelim(token::Brace) + t == &token::CloseDelim(Delimiter::Brace) || t.can_begin_expr() && t.kind != token::Colon }) && [token::Comma, token::Colon].contains(&self.token.kind) { @@ -430,11 +441,12 @@ impl<'a> Parser<'a> { .emit(); return Ok(true); } else if self.look_ahead(0, |t| { - t == &token::CloseDelim(token::Brace) - || ( - t.can_begin_expr() && t != &token::Semi && t != &token::Pound - // Avoid triggering with too many trailing `#` in raw string. - ) + t == &token::CloseDelim(Delimiter::Brace) + || (t.can_begin_expr() && t != &token::Semi && t != &token::Pound) + // Avoid triggering with too many trailing `#` in raw string. + || (sm.is_multiline( + self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()) + ) && t == &token::Pound) }) { // Missing semicolon typo. This is triggered if the next token could either start a // new statement or is a block close. For example: @@ -508,7 +520,12 @@ impl<'a> Parser<'a> { } if self.check_too_many_raw_str_terminators(&mut err) { - return Err(err); + if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { + err.emit(); + return Ok(true); + } else { + return Err(err); + } } if self.prev_token.span == DUMMY_SP { @@ -538,6 +555,7 @@ impl<'a> Parser<'a> { } fn check_too_many_raw_str_terminators(&mut self, err: &mut Diagnostic) -> bool { + let sm = self.sess.source_map(); match (&self.prev_token.kind, &self.token.kind) { ( TokenKind::Literal(Lit { @@ -545,15 +563,33 @@ impl<'a> Parser<'a> { .. }), TokenKind::Pound, - ) => { + ) if !sm.is_multiline( + self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()), + ) => + { + let n_hashes: u8 = *n_hashes; err.set_primary_message("too many `#` when terminating raw string"); + let str_span = self.prev_token.span; + let mut span = self.token.span; + let mut count = 0; + while self.token.kind == TokenKind::Pound + && !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo())) + { + span = span.with_hi(self.token.span.hi()); + self.bump(); + count += 1; + } + err.set_span(span); err.span_suggestion( - self.token.span, - "remove the extra `#`", + span, + &format!("remove the extra `#`{}", pluralize!(count)), String::new(), Applicability::MachineApplicable, ); - err.note(&format!("the raw string started with {n_hashes} `#`s")); + err.span_label( + str_span, + &format!("this raw string started with {n_hashes} `#`{}", pluralize!(n_hashes)), + ); true } _ => false, @@ -619,7 +655,7 @@ impl<'a> Parser<'a> { (Err(snapshot_err), Err(err)) => { // We don't know what went wrong, emit the normal error. snapshot_err.cancel(); - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); Err(err) } (Ok(_), Ok(mut tail)) => { @@ -830,7 +866,7 @@ impl<'a> Parser<'a> { trailing_span = trailing_span.to(self.token.span); self.bump(); } - if self.token.kind == token::OpenDelim(token::Paren) { + if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { // Recover from bad turbofish: `foo.collect::Vec<_>()`. let args = AngleBracketedArgs { args, span }.into(); segment.args = args; @@ -1062,7 +1098,7 @@ impl<'a> Parser<'a> { [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)]; self.consume_tts(1, &modifiers); - if !&[token::OpenDelim(token::Paren), token::ModSep] + if !&[token::OpenDelim(Delimiter::Parenthesis), token::ModSep] .contains(&self.token.kind) { // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the @@ -1096,7 +1132,7 @@ impl<'a> Parser<'a> { Err(err) } } - } else if token::OpenDelim(token::Paren) == self.token.kind { + } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind { // We have high certainty that this was a bad turbofish at this point. // `foo< bar >(` suggest(&mut err); @@ -1150,8 +1186,10 @@ impl<'a> Parser<'a> { self.bump(); // `(` // Consume the fn call arguments. - let modifiers = - [(token::OpenDelim(token::Paren), 1), (token::CloseDelim(token::Paren), -1)]; + let modifiers = [ + (token::OpenDelim(Delimiter::Parenthesis), 1), + (token::CloseDelim(Delimiter::Parenthesis), -1), + ]; self.consume_tts(1, &modifiers); if self.token.kind == token::Eof { @@ -1171,15 +1209,7 @@ impl<'a> Parser<'a> { ty: &Ty, ) { if matches!(allow_plus, AllowPlus::No) && impl_dyn_multi { - let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); - self.struct_span_err(ty.span, "ambiguous `+` in a type") - .span_suggestion( - ty.span, - "use parentheses to disambiguate", - sum_with_parens, - Applicability::MachineApplicable, - ) - .emit(); + self.sess.emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(&ty), span: ty.span }); } } @@ -1551,15 +1581,15 @@ impl<'a> Parser<'a> { fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> { self.expect(&token::Not)?; - self.expect(&token::OpenDelim(token::Paren))?; + self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let expr = self.parse_expr()?; - self.expect(&token::CloseDelim(token::Paren))?; + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; Ok((self.prev_token.span, expr, false)) } fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> { let is_question = self.eat(&token::Question); // Handle `await? <expr>`. - let expr = if self.token == token::OpenDelim(token::Brace) { + let expr = if self.token == token::OpenDelim(Delimiter::Brace) { // Handle `await { <expr> }`. // This needs to be handled separately from the next arm to avoid // interpreting `await { <expr> }?` as `<expr>?.await`. @@ -1591,8 +1621,8 @@ impl<'a> Parser<'a> { /// If encountering `future.await()`, consumes and emits an error. pub(super) fn recover_from_await_method_call(&mut self) { - if self.token == token::OpenDelim(token::Paren) - && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) + if self.token == token::OpenDelim(Delimiter::Parenthesis) + && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) { // future.await() let lo = self.token.span; @@ -1613,7 +1643,7 @@ impl<'a> Parser<'a> { pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> { let is_try = self.token.is_keyword(kw::Try); let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for ! - let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(token::Paren)); //check for ( + let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for ( if is_try && is_questionmark && is_open { let lo = self.token.span; @@ -1621,8 +1651,8 @@ impl<'a> Parser<'a> { self.bump(); //remove ! let try_span = lo.to(self.token.span); //we take the try!( span self.bump(); //remove ( - let is_empty = self.token == token::CloseDelim(token::Paren); //check if the block is empty - self.consume_block(token::Paren, ConsumeClosingDelim::No); //eat the block + let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty + self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block let hi = self.token.span; self.bump(); //remove ) let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro"); @@ -1653,7 +1683,7 @@ impl<'a> Parser<'a> { begin_paren: Option<Span>, ) -> P<Pat> { match (&self.token.kind, begin_paren) { - (token::CloseDelim(token::Paren), Some(begin_par_sp)) => { + (token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => { self.bump(); self.struct_span_err( @@ -1686,8 +1716,8 @@ impl<'a> Parser<'a> { || self.token.is_ident() && matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) && !self.token.is_reserved_ident() && // v `foo:bar(baz)` - self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) - || self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) // `foo:bar {` + self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis)) + || self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {` || self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar::<baz` self.look_ahead(2, |t| t == &token::Lt) && self.look_ahead(3, |t| t.is_ident()) @@ -1700,7 +1730,7 @@ impl<'a> Parser<'a> { pub(super) fn recover_seq_parse_error( &mut self, - delim: token::DelimToken, + delim: Delimiter, lo: Span, result: PResult<'a, P<Expr>>, ) -> P<Expr> { @@ -1817,7 +1847,7 @@ impl<'a> Parser<'a> { loop { debug!("recover_stmt_ loop {:?}", self.token); match self.token.kind { - token::OpenDelim(token::DelimToken::Brace) => { + token::OpenDelim(Delimiter::Brace) => { brace_depth += 1; self.bump(); if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0 @@ -1825,11 +1855,11 @@ impl<'a> Parser<'a> { in_block = true; } } - token::OpenDelim(token::DelimToken::Bracket) => { + token::OpenDelim(Delimiter::Bracket) => { bracket_depth += 1; self.bump(); } - token::CloseDelim(token::DelimToken::Brace) => { + token::CloseDelim(Delimiter::Brace) => { if brace_depth == 0 { debug!("recover_stmt_ return - close delim {:?}", self.token); break; @@ -1841,7 +1871,7 @@ impl<'a> Parser<'a> { break; } } - token::CloseDelim(token::DelimToken::Bracket) => { + token::CloseDelim(Delimiter::Bracket) => { bracket_depth -= 1; if bracket_depth < 0 { bracket_depth = 0; @@ -1899,11 +1929,11 @@ impl<'a> Parser<'a> { .emit(); self.bump(); } else if self.token == token::Pound - && self.look_ahead(1, |t| *t == token::OpenDelim(token::Bracket)) + && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) { let lo = self.token.span; // Skip every token until next possible arg. - while self.token != token::CloseDelim(token::Bracket) { + while self.token != token::CloseDelim(Delimiter::Bracket) { self.bump(); } let sp = lo.to(self.token.span); @@ -1924,7 +1954,9 @@ impl<'a> Parser<'a> { // If we find a pattern followed by an identifier, it could be an (incorrect) // C-style parameter declaration. if self.check_ident() - && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseDelim(token::Paren)) + && self.look_ahead(1, |t| { + *t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis) + }) { // `fn foo(String s) {}` let ident = self.parse_ident().unwrap(); @@ -1940,7 +1972,7 @@ impl<'a> Parser<'a> { } else if require_name && (self.token == token::Comma || self.token == token::Lt - || self.token == token::CloseDelim(token::Paren)) + || self.token == token::CloseDelim(Delimiter::Parenthesis)) { let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)"; @@ -2058,11 +2090,7 @@ impl<'a> Parser<'a> { Ok(param) } - pub(super) fn consume_block( - &mut self, - delim: token::DelimToken, - consume_close: ConsumeClosingDelim, - ) { + pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) { let mut brace_depth = 0; loop { if self.eat(&token::OpenDelim(delim)) { @@ -2081,7 +2109,8 @@ impl<'a> Parser<'a> { brace_depth -= 1; continue; } - } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) { + } else if self.token == token::Eof || self.eat(&token::CloseDelim(Delimiter::Invisible)) + { return; } else { self.bump(); @@ -2527,7 +2556,7 @@ impl<'a> Parser<'a> { crate fn maybe_recover_unexpected_block_label(&mut self) -> bool { let Some(label) = self.eat_label().filter(|_| { - self.eat(&token::Colon) && self.token.kind == token::OpenDelim(token::Brace) + self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace) }) else { return false; }; @@ -2624,7 +2653,7 @@ impl<'a> Parser<'a> { /// Parse and throw away a parenthesized comma separated /// sequence of patterns until `)` is reached. fn skip_pat_list(&mut self) -> PResult<'a, ()> { - while !self.check(&token::CloseDelim(token::Paren)) { + while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) { self.parse_pat_no_top_alt(None)?; if !self.eat(&token::Comma) { return Ok(()); diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 7efc0ca2da2..6114e7aaa7b 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -7,9 +7,8 @@ use super::{ }; use crate::maybe_recover_from_interpolated_ty_qpath; -use ast::token::DelimToken; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::Spacing; use rustc_ast::util::classify; use rustc_ast::util::literal::LitError; @@ -495,7 +494,7 @@ impl<'a> Parser<'a> { fn is_at_start_of_range_notation_rhs(&self) -> bool { if self.token.can_begin_expr() { // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. - if self.token == token::OpenDelim(token::Brace) { + if self.token == token::OpenDelim(Delimiter::Brace) { return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); } true @@ -992,8 +991,8 @@ impl<'a> Parser<'a> { return Ok(e); } e = match self.token.kind { - token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e), - token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?, + token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e), + token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?, _ => return Ok(e), } } @@ -1156,7 +1155,7 @@ impl<'a> Parser<'a> { /// Parse a function call expression, `expr(...)`. fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> { - let snapshot = if self.token.kind == token::OpenDelim(token::Paren) + let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) && self.look_ahead_type_ascription_as_field() { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) @@ -1173,7 +1172,7 @@ impl<'a> Parser<'a> { { return expr; } - self.recover_seq_parse_error(token::Paren, lo, seq) + self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq) } /// If we encounter a parser state that looks like the user has written a `struct` literal with @@ -1190,8 +1189,10 @@ impl<'a> Parser<'a> { (Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => { let name = pprust::path_to_string(&path); snapshot.bump(); // `(` - match snapshot.parse_struct_fields(path, false, token::Paren) { - Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => { + match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) { + Ok((fields, ..)) + if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) => + { // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. self.restore_snapshot(snapshot); @@ -1241,7 +1242,7 @@ impl<'a> Parser<'a> { fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> { self.bump(); // `[` let index = self.parse_expr()?; - self.expect(&token::CloseDelim(token::Bracket))?; + self.expect(&token::CloseDelim(Delimiter::Bracket))?; Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new())) } @@ -1253,10 +1254,10 @@ impl<'a> Parser<'a> { let fn_span_lo = self.token.span; let mut segment = self.parse_path_segment(PathStyle::Expr, None)?; - self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]); + self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]); self.check_turbofish_missing_angle_brackets(&mut segment); - if self.check(&token::OpenDelim(token::Paren)) { + if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // Method call `expr.f()` let mut args = self.parse_paren_expr_seq()?; args.insert(0, self_arg); @@ -1302,9 +1303,9 @@ impl<'a> Parser<'a> { // could be removed without changing functionality, but it's faster // to have it here, especially for programs with large constants. self.parse_lit_expr(attrs) - } else if self.check(&token::OpenDelim(token::Paren)) { + } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_tuple_parens_expr(attrs) - } else if self.check(&token::OpenDelim(token::Brace)) { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) { self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs) } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) { self.parse_closure_expr(attrs).map_err(|mut err| { @@ -1315,8 +1316,8 @@ impl<'a> Parser<'a> { } err }) - } else if self.check(&token::OpenDelim(token::Bracket)) { - self.parse_array_or_repeat_expr(attrs, token::Bracket) + } else if self.check(&token::OpenDelim(Delimiter::Bracket)) { + self.parse_array_or_repeat_expr(attrs, Delimiter::Bracket) } else if self.check_path() { self.parse_path_start_expr(attrs) } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { @@ -1373,6 +1374,8 @@ impl<'a> Parser<'a> { self.parse_break_expr(attrs) } else if self.eat_keyword(kw::Yield) { self.parse_yield_expr(attrs) + } else if self.is_do_yeet() { + self.parse_yeet_expr(attrs) } else if self.eat_keyword(kw::Let) { self.parse_let_expr(attrs) } else if self.eat_keyword(kw::Underscore) { @@ -1422,14 +1425,16 @@ impl<'a> Parser<'a> { fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { let lo = self.token.span; - self.expect(&token::OpenDelim(token::Paren))?; + self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let (es, trailing_comma) = match self.parse_seq_to_end( - &token::CloseDelim(token::Paren), + &token::CloseDelim(Delimiter::Parenthesis), SeqSep::trailing_allowed(token::Comma), |p| p.parse_expr_catch_underscore(), ) { Ok(x) => x, - Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))), + Err(err) => { + return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err))); + } }; let kind = if es.len() == 1 && !trailing_comma { // `(e)` is parenthesized `e`. @@ -1445,7 +1450,7 @@ impl<'a> Parser<'a> { fn parse_array_or_repeat_expr( &mut self, attrs: AttrVec, - close_delim: token::DelimToken, + close_delim: Delimiter, ) -> PResult<'a, P<Expr>> { let lo = self.token.span; self.bump(); // `[` or other open delim @@ -1500,7 +1505,7 @@ impl<'a> Parser<'a> { prior_type_ascription: self.last_type_ascription, }; (self.prev_token.span, ExprKind::MacCall(mac)) - } else if self.check(&token::OpenDelim(token::Brace)) { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) { if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) { if qself.is_some() { self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); @@ -1533,7 +1538,7 @@ impl<'a> Parser<'a> { self.parse_for_expr(label, lo, attrs) } else if self.eat_keyword(kw::Loop) { self.parse_loop_expr(label, lo, attrs) - } else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() { self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs) } else if !ate_colon && (self.check(&TokenKind::Comma) || self.check(&TokenKind::Gt)) { // We're probably inside of a `Path<'a>` that needs a turbofish @@ -1602,6 +1607,21 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr, true) } + /// Parse `"do" "yeet" expr?`. + fn parse_yeet_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { + let lo = self.token.span; + + self.bump(); // `do` + self.bump(); // `yeet` + + let kind = ExprKind::Yeet(self.parse_expr_opt()?); + + let span = lo.to(self.prev_token.span); + self.sess.gated_spans.gate(sym::yeet_expr, span); + let expr = self.mk_expr(span, kind, attrs); + self.maybe_recover_from_bad_qpath(expr, true) + } + /// Parse `"break" (('label (:? expr)?) | expr?)` with `"break"` token already eaten. /// If the label is followed immediately by a `:` token, the label and `:` are /// parsed as part of the expression (i.e. a labeled loop). The language team has @@ -1631,7 +1651,7 @@ impl<'a> Parser<'a> { ) .emit(); Some(lexpr) - } else if self.token != token::OpenDelim(token::Brace) + } else if self.token != token::OpenDelim(Delimiter::Brace) || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) { let expr = self.parse_expr_opt()?; @@ -1768,7 +1788,7 @@ impl<'a> Parser<'a> { .span_suggestion( token.span, "must have an integer part", - pprust::token_to_string(token).into(), + pprust::token_to_string(token), Applicability::MachineApplicable, ) .emit(); @@ -1940,7 +1960,7 @@ impl<'a> Parser<'a> { attrs: AttrVec, ) -> Option<P<Expr>> { let mut snapshot = self.create_snapshot_for_diagnostic(); - match snapshot.parse_array_or_repeat_expr(attrs, token::Brace) { + match snapshot.parse_array_or_repeat_expr(attrs, Delimiter::Brace) { Ok(arr) => { let hi = snapshot.prev_token.span; self.struct_span_err(arr.span, "this is a block expression, not an array") @@ -2043,7 +2063,8 @@ impl<'a> Parser<'a> { self.sess.gated_spans.gate(sym::async_closure, span); } - if self.token.kind == TokenKind::Semi && self.token_cursor.frame.delim == DelimToken::Paren + if self.token.kind == TokenKind::Semi + && matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _))) { // It is likely that the closure body is a block but where the // braces have been removed. We will recover and eat the next @@ -2157,7 +2178,7 @@ impl<'a> Parser<'a> { } } else { let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery. - let not_block = self.token != token::OpenDelim(token::Brace); + let not_block = self.token != token::OpenDelim(Delimiter::Brace); let block = self.parse_block().map_err(|err| { if not_block { self.error_missing_if_then_block(lo, Some(err), missing_then_block_binop_span()) @@ -2282,7 +2303,7 @@ impl<'a> Parser<'a> { // This is used below for recovery in case of `for ( $stuff ) $block` // in which case we will suggest `for $stuff $block`. let begin_paren = match self.token.kind { - token::OpenDelim(token::Paren) => Some(self.token.span), + token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span), _ => None, }; @@ -2320,7 +2341,7 @@ impl<'a> Parser<'a> { .span_suggestion_short( span, msg, - sugg.into(), + sugg, // Has been misleading, at least in the past (closed Issue #48492). Applicability::MaybeIncorrect, ) @@ -2371,7 +2392,7 @@ impl<'a> Parser<'a> { let match_span = self.prev_token.span; let lo = self.prev_token.span; let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; - if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { + if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) { if self.token == token::Semi { e.span_suggestion_short( match_span, @@ -2390,7 +2411,7 @@ impl<'a> Parser<'a> { attrs.extend(self.parse_inner_attributes()?); let mut arms: Vec<Arm> = Vec::new(); - while self.token != token::CloseDelim(token::Brace) { + while self.token != token::CloseDelim(Delimiter::Brace) { match self.parse_arm() { Ok(arm) => arms.push(arm), Err(mut e) => { @@ -2398,7 +2419,7 @@ impl<'a> Parser<'a> { e.emit(); self.recover_stmt(); let span = lo.to(self.token.span); - if self.token == token::CloseDelim(token::Brace) { + if self.token == token::CloseDelim(Delimiter::Brace) { self.bump(); } return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs)); @@ -2462,7 +2483,7 @@ impl<'a> Parser<'a> { // We might have either a `,` -> `;` typo, or a block without braces. We need // a more subtle parsing strategy. loop { - if self.token.kind == token::CloseDelim(token::Brace) { + if self.token.kind == token::CloseDelim(Delimiter::Brace) { // We have reached the closing brace of the `match` expression. return Some(err(self, stmts)); } @@ -2570,7 +2591,7 @@ impl<'a> Parser<'a> { })?; let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) - && this.token != token::CloseDelim(token::Brace); + && this.token != token::CloseDelim(Delimiter::Brace); let hi = this.prev_token.span; @@ -2591,8 +2612,8 @@ impl<'a> Parser<'a> { TrailingToken::None, )); } - this.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err( - |mut err| { + this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]) + .map_err(|mut err| { match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) { (Ok(ref expr_lines), Ok(ref arm_start_lines)) if arm_start_lines.lines[0].end_col @@ -2626,8 +2647,7 @@ impl<'a> Parser<'a> { } } err - }, - )?; + })?; } else { this.eat(&token::Comma); } @@ -2669,13 +2689,17 @@ impl<'a> Parser<'a> { fn is_do_catch_block(&self) -> bool { self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Catch]) - && self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) + && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)) && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } + fn is_do_yeet(&self) -> bool { + self.token.is_keyword(kw::Do) && self.is_keyword_ahead(1, &[kw::Yeet]) + } + fn is_try_block(&self) -> bool { self.token.is_keyword(kw::Try) - && self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) + && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) && self.token.uninterpolated_span().rust_2018() } @@ -2695,10 +2719,10 @@ impl<'a> Parser<'a> { && (( // `async move {` self.is_keyword_ahead(1, &[kw::Move]) - && self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) + && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)) ) || ( // `async {` - self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) + self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) )) } @@ -2725,7 +2749,7 @@ impl<'a> Parser<'a> { ) -> Option<PResult<'a, P<Expr>>> { let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); if struct_allowed || self.is_certainly_not_a_block() { - if let Err(err) = self.expect(&token::OpenDelim(token::Brace)) { + if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) { return Some(Err(err)); } let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true); @@ -2752,7 +2776,7 @@ impl<'a> Parser<'a> { &mut self, pth: ast::Path, recover: bool, - close_delim: token::DelimToken, + close_delim: Delimiter, ) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> { let mut fields = Vec::new(); let mut base = ast::StructRest::None; @@ -2825,7 +2849,7 @@ impl<'a> Parser<'a> { e.span_suggestion( self.prev_token.span.shrink_to_hi(), "try adding a comma", - ",".into(), + ",", Applicability::MachineApplicable, ); } @@ -2852,9 +2876,9 @@ impl<'a> Parser<'a> { ) -> PResult<'a, P<Expr>> { let lo = pth.span; let (fields, base, recover_async) = - self.parse_struct_fields(pth.clone(), recover, token::Brace)?; + self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?; let span = lo.to(self.token.span); - self.expect(&token::CloseDelim(token::Brace))?; + self.expect(&token::CloseDelim(Delimiter::Brace))?; let expr = if recover_async { ExprKind::Err } else { diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 29fe2b76101..8081bac7cfd 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -30,8 +30,10 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; // Parse optional colon and param bounds. + let mut colon_span = None; let bounds = if self.eat(&token::Colon) { - self.parse_generic_bounds(Some(self.prev_token.span))? + colon_span = Some(self.prev_token.span); + self.parse_generic_bounds(colon_span)? } else { Vec::new() }; @@ -45,6 +47,7 @@ impl<'a> Parser<'a> { bounds, kind: GenericParamKind::Type { default }, is_placeholder: false, + colon_span, }) } @@ -69,6 +72,7 @@ impl<'a> Parser<'a> { bounds: Vec::new(), kind: GenericParamKind::Const { ty, kw_span: const_span, default }, is_placeholder: false, + colon_span: None, }) } @@ -97,10 +101,10 @@ impl<'a> Parser<'a> { let param = if this.check_lifetime() { let lifetime = this.expect_lifetime(); // Parse lifetime parameter. - let bounds = if this.eat(&token::Colon) { - this.parse_lt_param_bounds() + let (colon_span, bounds) = if this.eat(&token::Colon) { + (Some(this.prev_token.span), this.parse_lt_param_bounds()) } else { - Vec::new() + (None, Vec::new()) }; Some(ast::GenericParam { ident: lifetime.ident, @@ -109,6 +113,7 @@ impl<'a> Parser<'a> { bounds, kind: ast::GenericParamKind::Lifetime, is_placeholder: false, + colon_span, }) } else if this.check_keyword(kw::Const) { // Parse const parameter. diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index ca81921faed..10f1daf1129 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -4,7 +4,7 @@ use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Traili use rustc_ast::ast::*; use rustc_ast::ptr::P; -use rustc_ast::token::{self, TokenKind}; +use rustc_ast::token::{self, Delimiter, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID}; use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind}; @@ -39,9 +39,9 @@ impl<'a> Parser<'a> { let mod_kind = if self.eat(&token::Semi) { ModKind::Unloaded } else { - self.expect(&token::OpenDelim(token::Brace))?; + self.expect(&token::OpenDelim(Delimiter::Brace))?; let (mut inner_attrs, items, inner_span) = - self.parse_mod(&token::CloseDelim(token::Brace))?; + self.parse_mod(&token::CloseDelim(Delimiter::Brace))?; attrs.append(&mut inner_attrs); ModKind::Loaded(items, Inline::Yes, inner_span) }; @@ -324,7 +324,7 @@ impl<'a> Parser<'a> { let sp = self.prev_token.span.between(self.token.span); let full_sp = self.prev_token.span.to(self.token.span); let ident_sp = self.token.span; - if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) { + if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) { // possible public struct definition where `struct` was forgotten let ident = self.parse_ident().unwrap(); let msg = format!("add `struct` here to parse `{ident}` as a public struct"); @@ -332,20 +332,20 @@ impl<'a> Parser<'a> { err.span_suggestion_short( sp, &msg, - " struct ".into(), + " struct ", Applicability::MaybeIncorrect, // speculative ); Err(err) - } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { + } else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) { let ident = self.parse_ident().unwrap(); self.bump(); // `(` let kw_name = self.recover_first_param(); - self.consume_block(token::Paren, ConsumeClosingDelim::Yes); + self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes); let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) { - self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]); + self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]); self.bump(); // `{` ("fn", kw_name, false) - } else if self.check(&token::OpenDelim(token::Brace)) { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) { self.bump(); // `{` ("fn", kw_name, false) } else if self.check(&token::Colon) { @@ -358,7 +358,7 @@ impl<'a> Parser<'a> { let msg = format!("missing `{kw}` for {kw_name} definition"); let mut err = self.struct_span_err(sp, &msg); if !ambiguous { - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); let suggestion = format!("add `{kw}` here to parse `{ident}` as a public {kw_name}"); err.span_suggestion_short( @@ -386,9 +386,9 @@ impl<'a> Parser<'a> { let ident = self.parse_ident().unwrap(); self.eat_to_tokens(&[&token::Gt]); self.bump(); // `>` - let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) { + let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) { ("fn", self.recover_first_param(), false) - } else if self.check(&token::OpenDelim(token::Brace)) { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) { ("struct", "struct", false) } else { ("fn` or `struct", "function or struct", true) @@ -532,13 +532,13 @@ impl<'a> Parser<'a> { .span_suggestion( span, "add a trait here", - " Trait ".into(), + " Trait ", Applicability::HasPlaceholders, ) .span_suggestion( span.to(self.token.span), "for an inherent impl, drop this `for`", - "".into(), + "", Applicability::MaybeIncorrect, ) .emit(); @@ -630,11 +630,11 @@ impl<'a> Parser<'a> { mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>, ) -> PResult<'a, Vec<T>> { let open_brace_span = self.token.span; - self.expect(&token::OpenDelim(token::Brace))?; + self.expect(&token::OpenDelim(Delimiter::Brace))?; attrs.append(&mut self.parse_inner_attributes()?); let mut items = Vec::new(); - while !self.eat(&token::CloseDelim(token::Brace)) { + while !self.eat(&token::CloseDelim(Delimiter::Brace)) { if self.recover_doc_comment_before_brace() { continue; } @@ -642,7 +642,7 @@ impl<'a> Parser<'a> { Ok(None) => { // We have to bail or we'll potentially never make progress. let non_item_span = self.token.span; - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); self.struct_span_err(non_item_span, "non-item in item list") .span_label(open_brace_span, "item list starts here") .span_label(non_item_span, "non-item starts here") @@ -652,7 +652,7 @@ impl<'a> Parser<'a> { } Ok(Some(item)) => items.extend(item), Err(mut err) => { - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); err.span_label(open_brace_span, "while parsing this item list starting here") .span_label(self.prev_token.span, "the item list ends here") .emit(); @@ -666,7 +666,7 @@ impl<'a> Parser<'a> { /// Recover on a doc comment before `}`. fn recover_doc_comment_before_brace(&mut self) -> bool { if let token::DocComment(..) = self.token.kind { - if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) { + if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) { struct_span_err!( self.diagnostic(), self.token.span, @@ -866,7 +866,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None }; - let kind = if self.check(&token::OpenDelim(token::Brace)) + let kind = if self.check(&token::OpenDelim(Delimiter::Brace)) || self.check(&token::BinOp(token::Star)) || self.is_import_coupler() { @@ -908,7 +908,7 @@ impl<'a> Parser<'a> { /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`] /// ``` fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> { - self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID))) + self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID))) .map(|(r, _)| r) } @@ -1077,7 +1077,7 @@ impl<'a> Parser<'a> { && self.is_keyword_ahead(1, &[kw::Extern]) && self.look_ahead( 2 + self.look_ahead(2, |t| t.can_begin_literal_maybe_minus() as usize), - |t| t.kind == token::OpenDelim(token::Brace), + |t| t.kind == token::OpenDelim(Delimiter::Brace), ) } @@ -1204,8 +1204,9 @@ impl<'a> Parser<'a> { let mut generics = self.parse_generics()?; generics.where_clause = self.parse_where_clause()?; - let (variants, _) = - self.parse_delim_comma_seq(token::Brace, |p| p.parse_enum_variant()).map_err(|e| { + let (variants, _) = self + .parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()) + .map_err(|e| { self.recover_stmt(); e })?; @@ -1228,11 +1229,11 @@ impl<'a> Parser<'a> { } let ident = this.parse_field_ident("enum", vlo)?; - let struct_def = if this.check(&token::OpenDelim(token::Brace)) { + let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) { // Parse a struct variant. let (fields, recovered) = this.parse_record_struct_body("struct", false)?; VariantData::Struct(fields, recovered) - } else if this.check(&token::OpenDelim(token::Paren)) { + } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID) } else { VariantData::Unit(DUMMY_NODE_ID) @@ -1292,12 +1293,12 @@ impl<'a> Parser<'a> { } else if self.eat(&token::Semi) { VariantData::Unit(DUMMY_NODE_ID) // Record-style struct definition - } else if self.token == token::OpenDelim(token::Brace) { + } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?; VariantData::Struct(fields, recovered) // Tuple-style struct definition with optional where-clause. - } else if self.token == token::OpenDelim(token::Paren) { + } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); generics.where_clause = self.parse_where_clause()?; self.expect_semi()?; @@ -1326,7 +1327,7 @@ impl<'a> Parser<'a> { let (fields, recovered) = self.parse_record_struct_body("union", generics.where_clause.has_where_token)?; VariantData::Struct(fields, recovered) - } else if self.token == token::OpenDelim(token::Brace) { + } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body("union", generics.where_clause.has_where_token)?; VariantData::Struct(fields, recovered) @@ -1348,10 +1349,10 @@ impl<'a> Parser<'a> { ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> { let mut fields = Vec::new(); let mut recovered = false; - if self.eat(&token::OpenDelim(token::Brace)) { - while self.token != token::CloseDelim(token::Brace) { + if self.eat(&token::OpenDelim(Delimiter::Brace)) { + while self.token != token::CloseDelim(Delimiter::Brace) { let field = self.parse_field_def(adt_ty).map_err(|e| { - self.consume_block(token::Brace, ConsumeClosingDelim::No); + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No); recovered = true; e }); @@ -1363,7 +1364,7 @@ impl<'a> Parser<'a> { } } } - self.eat(&token::CloseDelim(token::Brace)); + self.eat(&token::CloseDelim(Delimiter::Brace)); } else { let token_str = super::token_descr(&self.token); let msg = &format!( @@ -1439,7 +1440,7 @@ impl<'a> Parser<'a> { token::Comma => { self.bump(); } - token::CloseDelim(token::Brace) => {} + token::CloseDelim(Delimiter::Brace) => {} token::DocComment(..) => { let previous_span = self.prev_token.span; let mut err = self.span_err(self.token.span, Error::UselessDocComment); @@ -1450,7 +1451,7 @@ impl<'a> Parser<'a> { if !seen_comma && comma_after_doc_seen { seen_comma = true; } - if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) { + if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) { err.emit(); } else { if !seen_comma { @@ -1458,7 +1459,7 @@ impl<'a> Parser<'a> { err.span_suggestion( sp, "missing comma here", - ",".into(), + ",", Applicability::MachineApplicable, ); } @@ -1478,7 +1479,7 @@ impl<'a> Parser<'a> { if let Some(last_segment) = segments.last() { recovered = self.check_trailing_angle_brackets( last_segment, - &[&token::Comma, &token::CloseDelim(token::Brace)], + &[&token::Comma, &token::CloseDelim(Delimiter::Brace)], ); if recovered { // Handle a case like `Vec<u8>>,` where we can continue parsing fields @@ -1497,7 +1498,7 @@ impl<'a> Parser<'a> { err.span_suggestion( sp, "try adding a comma", - ",".into(), + ",", Applicability::MachineApplicable, ); err.emit(); @@ -1636,12 +1637,12 @@ impl<'a> Parser<'a> { /// ``` fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> { let ident = self.parse_ident()?; - let body = if self.check(&token::OpenDelim(token::Brace)) { + let body = if self.check(&token::OpenDelim(Delimiter::Brace)) { self.parse_mac_args()? // `MacBody` - } else if self.check(&token::OpenDelim(token::Paren)) { + } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { let params = self.parse_token_tree(); // `MacParams` let pspan = params.span(); - if !self.check(&token::OpenDelim(token::Brace)) { + if !self.check(&token::OpenDelim(Delimiter::Brace)) { return self.unexpected(); } let body = self.parse_token_tree(); // `MacBody` @@ -1924,7 +1925,7 @@ impl<'a> Parser<'a> { self.expect_semi()?; *sig_hi = self.prev_token.span; (Vec::new(), None) - } else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() { self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))? } else if self.token.kind == token::Eq { // Recover `fn foo() = $expr;`. @@ -1943,12 +1944,12 @@ impl<'a> Parser<'a> { (Vec::new(), Some(self.mk_block_err(span))) } else { let expected = if req_body { - &[token::OpenDelim(token::Brace)][..] + &[token::OpenDelim(Delimiter::Brace)][..] } else { - &[token::Semi, token::OpenDelim(token::Brace)] + &[token::Semi, token::OpenDelim(Delimiter::Brace)] }; if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) { - if self.token.kind == token::CloseDelim(token::Brace) { + if self.token.kind == token::CloseDelim(Delimiter::Brace) { // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in // the AST for typechecking. err.span_label(ident.span, "while parsing this `fn`"); @@ -2164,7 +2165,7 @@ impl<'a> Parser<'a> { e.emit(); let lo = p.prev_token.span; // Skip every token until next possible arg or end. - p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); + p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]); // Create a placeholder argument for proper arg count (issue #34264). Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)))) }); @@ -2220,7 +2221,7 @@ impl<'a> Parser<'a> { let mut ty = this.parse_ty_for_param(); if ty.is_ok() && this.token != token::Comma - && this.token != token::CloseDelim(token::Paren) + && this.token != token::CloseDelim(Delimiter::Parenthesis) { // This wasn't actually a type, but a pattern looking like a type, // so we are going to rollback and re-parse for recovery. diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index cb6be8f412c..cd61584a876 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -19,7 +19,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; pub use path::PathStyle; use rustc_ast::ptr::P; -use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind}; use rustc_ast::tokenstream::AttributesData; use rustc_ast::tokenstream::{self, DelimSpan, Spacing}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; @@ -123,8 +123,8 @@ pub struct Parser<'a> { pub capture_cfg: bool, restrictions: Restrictions, expected_tokens: Vec<TokenType>, - // Important: This must only be advanced from `next_tok` - // to ensure that `token_cursor.num_next_calls` is updated properly + // Important: This must only be advanced from `bump` to ensure that + // `token_cursor.num_next_calls` is updated properly. token_cursor: TokenCursor, desugar_doc_comments: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is @@ -150,6 +150,11 @@ pub struct Parser<'a> { pub current_closure: Option<ClosureSpans>, } +// This type is used a lot, e.g. it's cloned when matching many declarative macro rules. Make sure +// it doesn't unintentionally get bigger. +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] +rustc_data_structures::static_assert_size!(Parser<'_>, 328); + /// Stores span information about a closure. #[derive(Clone)] pub struct ClosureSpans { @@ -203,12 +208,15 @@ impl<'a> Drop for Parser<'a> { #[derive(Clone)] struct TokenCursor { + // The current (innermost) frame. `frame` and `stack` could be combined, + // but it's faster to have them separately to access `frame` directly + // rather than via something like `stack.last().unwrap()` or + // `stack[stack.len() - 1]`. frame: TokenCursorFrame, + // Additional frames that enclose `frame`. stack: Vec<TokenCursorFrame>, desugar_doc_comments: bool, - // Counts the number of calls to `{,inlined_}next` or - // `{,inlined_}next_desugared`, depending on whether - // `desugar_doc_comments` is set. + // Counts the number of calls to `{,inlined_}next`. num_next_calls: usize, // During parsing, we may sometimes need to 'unglue' a // glued token into two component tokens @@ -236,75 +244,60 @@ struct TokenCursor { #[derive(Clone)] struct TokenCursorFrame { - delim: token::DelimToken, - span: DelimSpan, - open_delim: bool, + delim_sp: Option<(Delimiter, DelimSpan)>, tree_cursor: tokenstream::Cursor, - close_delim: bool, } impl TokenCursorFrame { - fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { - TokenCursorFrame { - delim, - span, - open_delim: false, - tree_cursor: tts.into_trees(), - close_delim: false, - } + fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self { + TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() } } } impl TokenCursor { - fn next(&mut self) -> (Token, Spacing) { - self.inlined_next() + fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) { + self.inlined_next(desugar_doc_comments) } /// This always-inlined version should only be used on hot code paths. #[inline(always)] - fn inlined_next(&mut self) -> (Token, Spacing) { + fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) { loop { - let (tree, spacing) = if !self.frame.open_delim { - self.frame.open_delim = true; - TokenTree::open_tt(self.frame.span, self.frame.delim).into() - } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { - tree - } else if !self.frame.close_delim { - self.frame.close_delim = true; - TokenTree::close_tt(self.frame.span, self.frame.delim).into() + // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will + // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be + // removed. + if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() { + match tree { + &TokenTree::Token(ref token) => match (desugar_doc_comments, token) { + (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => { + return self.desugar(attr_style, data, span); + } + _ => return (token.clone(), *spacing), + }, + &TokenTree::Delimited(sp, delim, ref tts) => { + // Set `open_delim` to true here because we deal with it immediately. + let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone()); + self.stack.push(mem::replace(&mut self.frame, frame)); + if delim != Delimiter::Invisible { + return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone); + } + // No open delimeter to return; continue on to the next iteration. + } + }; } else if let Some(frame) = self.stack.pop() { + if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible { + self.frame = frame; + return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone); + } self.frame = frame; - continue; + // No close delimiter to return; continue on to the next iteration. } else { - (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone) - }; - - match tree { - TokenTree::Token(token) => { - return (token, spacing); - } - TokenTree::Delimited(sp, delim, tts) => { - let frame = TokenCursorFrame::new(sp, delim, tts); - self.stack.push(mem::replace(&mut self.frame, frame)); - } + return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone); } } } - fn next_desugared(&mut self) -> (Token, Spacing) { - self.inlined_next_desugared() - } - - /// This always-inlined version should only be used on hot code paths. - #[inline(always)] - fn inlined_next_desugared(&mut self) -> (Token, Spacing) { - let (data, attr_style, sp) = match self.inlined_next() { - (Token { kind: token::DocComment(_, attr_style, data), span }, _) => { - (data, attr_style, span) - } - tok => return tok, - }; - + fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) { // Searches for the occurrences of `"#*` and returns the minimum number of `#`s // required to wrap the text. let mut num_of_hashes = 0; @@ -318,14 +311,14 @@ impl TokenCursor { num_of_hashes = cmp::max(num_of_hashes, count); } - let delim_span = DelimSpan::from_single(sp); + let delim_span = DelimSpan::from_single(span); let body = TokenTree::Delimited( delim_span, - token::Bracket, + Delimiter::Bracket, [ - TokenTree::token(token::Ident(sym::doc, false), sp), - TokenTree::token(token::Eq, sp), - TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp), + TokenTree::token(token::Ident(sym::doc, false), span), + TokenTree::token(token::Eq, span), + TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), span), ] .iter() .cloned() @@ -335,15 +328,14 @@ impl TokenCursor { self.stack.push(mem::replace( &mut self.frame, TokenCursorFrame::new( - delim_span, - token::NoDelim, + None, if attr_style == AttrStyle::Inner { - [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] + [TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body] .iter() .cloned() .collect::<TokenStream>() } else { - [TokenTree::token(token::Pound, sp), body] + [TokenTree::token(token::Pound, span), body] .iter() .cloned() .collect::<TokenStream>() @@ -351,7 +343,7 @@ impl TokenCursor { ), )); - self.next() + self.next(/* desugar_doc_comments */ false) } } @@ -436,10 +428,6 @@ impl<'a> Parser<'a> { desugar_doc_comments: bool, subparser_name: Option<&'static str>, ) -> Self { - let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens); - start_frame.open_delim = true; - start_frame.close_delim = true; - let mut parser = Parser { sess, token: Token::dummy(), @@ -449,7 +437,7 @@ impl<'a> Parser<'a> { restrictions: Restrictions::empty(), expected_tokens: Vec::new(), token_cursor: TokenCursor { - frame: start_frame, + frame: TokenCursorFrame::new(None, tokens), stack: Vec::new(), num_next_calls: 0, desugar_doc_comments, @@ -476,33 +464,6 @@ impl<'a> Parser<'a> { parser } - #[inline] - fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { - loop { - let (mut next, spacing) = if self.desugar_doc_comments { - self.token_cursor.inlined_next_desugared() - } else { - self.token_cursor.inlined_next() - }; - self.token_cursor.num_next_calls += 1; - // We've retrieved an token from the underlying - // cursor, so we no longer need to worry about - // an unglued token. See `break_and_eat` for more details - self.token_cursor.break_last_token = false; - if next.span.is_dummy() { - // Tweak the location for better diagnostics, but keep syntactic context intact. - next.span = fallback_span.with_ctxt(next.span.ctxt()); - } - if matches!( - next.kind, - token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) - ) { - continue; - } - return (next, spacing); - } - } - pub fn unexpected<T>(&mut self) -> PResult<'a, T> { match self.expect_one_of(&[], &[]) { Err(e) => Err(e), @@ -665,7 +626,7 @@ impl<'a> Parser<'a> { self.is_keyword_ahead(dist, &[kw::Const]) && self.look_ahead(dist + 1, |t| match t.kind { token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), - token::OpenDelim(DelimToken::Brace) => true, + token::OpenDelim(Delimiter::Brace) => true, _ => false, }) } @@ -697,7 +658,7 @@ impl<'a> Parser<'a> { // // If we consume any additional tokens, then this token // is not needed (we'll capture the entire 'glued' token), - // and `next_tok` will set this field to `None` + // and `bump` will set this field to `None` self.token_cursor.break_last_token = true; // Use the spacing of the glued token as the spacing // of the unglued second token. @@ -841,7 +802,7 @@ impl<'a> Parser<'a> { .span_suggestion_verbose( self.prev_token.span.shrink_to_hi().until(self.token.span), &msg, - " @ ".to_string(), + " @ ", Applicability::MaybeIncorrect, ) .emit(); @@ -857,7 +818,7 @@ impl<'a> Parser<'a> { .span_suggestion_short( sp, &format!("missing `{}`", token_str), - token_str.into(), + token_str, Applicability::MaybeIncorrect, ) .emit(); @@ -993,7 +954,7 @@ impl<'a> Parser<'a> { fn parse_delim_comma_seq<T>( &mut self, - delim: DelimToken, + delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (Vec<T>, bool)> { self.parse_unspanned_seq( @@ -1008,7 +969,7 @@ impl<'a> Parser<'a> { &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (Vec<T>, bool)> { - self.parse_delim_comma_seq(token::Paren, f) + self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } /// Advance the parser by one token using provided token as the next one. @@ -1019,12 +980,6 @@ impl<'a> Parser<'a> { /// This always-inlined version should only be used on hot code paths. #[inline(always)] fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { - // Bumping after EOF is a bad sign, usually an infinite loop. - if self.prev_token.kind == TokenKind::Eof { - let msg = "attempted to bump the parser past EOF (may be stuck in a loop)"; - self.span_bug(self.token.span, msg); - } - // Update the current and previous tokens. self.prev_token = mem::replace(&mut self.token, next_token); self.token_spacing = next_spacing; @@ -1035,8 +990,24 @@ impl<'a> Parser<'a> { /// Advance the parser by one token. pub fn bump(&mut self) { - let next_token = self.next_tok(self.token.span); - self.inlined_bump_with(next_token); + // Note: destructuring here would give nicer code, but it was found in #96210 to be slower + // than `.0`/`.1` access. + let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments); + self.token_cursor.num_next_calls += 1; + // We've retrieved an token from the underlying + // cursor, so we no longer need to worry about + // an unglued token. See `break_and_eat` for more details + self.token_cursor.break_last_token = false; + if next.0.span.is_dummy() { + // Tweak the location for better diagnostics, but keep syntactic context intact. + let fallback_span = self.token.span; + next.0.span = fallback_span.with_ctxt(next.0.span.ctxt()); + } + debug_assert!(!matches!( + next.0.kind, + token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) + )); + self.inlined_bump_with(next) } /// Look-ahead `dist` tokens of `self.token` and get access to that token there. @@ -1047,10 +1018,10 @@ impl<'a> Parser<'a> { } let frame = &self.token_cursor.frame; - if frame.delim != DelimToken::NoDelim { + if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible { let all_normal = (0..dist).all(|i| { let token = frame.tree_cursor.look_ahead(i); - !matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _))) + !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _))) }); if all_normal { return match frame.tree_cursor.look_ahead(dist - 1) { @@ -1060,7 +1031,7 @@ impl<'a> Parser<'a> { looker(&Token::new(token::OpenDelim(*delim), dspan.open)) } }, - None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)), + None => looker(&Token::new(token::CloseDelim(delim), span.close)), }; } } @@ -1069,10 +1040,10 @@ impl<'a> Parser<'a> { let mut i = 0; let mut token = Token::dummy(); while i < dist { - token = cursor.next().0; + token = cursor.next(/* desugar_doc_comments */ false).0; if matches!( token.kind, - token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) + token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible) ) { continue; } @@ -1108,7 +1079,7 @@ impl<'a> Parser<'a> { /// Parses constness: `const` or nothing. fn parse_constness(&mut self) -> Const { // Avoid const blocks to be parsed as const items - if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) + if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace)) && self.eat_keyword(kw::Const) { Const::Yes(self.prev_token.uninterpolated_span()) @@ -1171,9 +1142,9 @@ impl<'a> Parser<'a> { fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> { Ok( - if self.check(&token::OpenDelim(DelimToken::Paren)) - || self.check(&token::OpenDelim(DelimToken::Bracket)) - || self.check(&token::OpenDelim(DelimToken::Brace)) + if self.check(&token::OpenDelim(Delimiter::Parenthesis)) + || self.check(&token::OpenDelim(Delimiter::Bracket)) + || self.check(&token::OpenDelim(Delimiter::Brace)) { match self.parse_token_tree() { TokenTree::Delimited(dspan, delim, tokens) => @@ -1217,24 +1188,27 @@ impl<'a> Parser<'a> { pub(crate) fn parse_token_tree(&mut self) -> TokenTree { match self.token.kind { token::OpenDelim(..) => { - let depth = self.token_cursor.stack.len(); - - // We keep advancing the token cursor until we hit - // the matching `CloseDelim` token. - while !(depth == self.token_cursor.stack.len() - && matches!(self.token.kind, token::CloseDelim(_))) - { + // Grab the tokens from this frame. + let frame = &self.token_cursor.frame; + let stream = frame.tree_cursor.stream.clone(); + let (delim, span) = frame.delim_sp.unwrap(); + + // Advance the token cursor through the entire delimited + // sequence. After getting the `OpenDelim` we are *within* the + // delimited sequence, i.e. at depth `d`. After getting the + // matching `CloseDelim` we are *after* the delimited sequence, + // i.e. at depth `d - 1`. + let target_depth = self.token_cursor.stack.len() - 1; + loop { // Advance one token at a time, so `TokenCursor::next()` // can capture these tokens if necessary. self.bump(); + if self.token_cursor.stack.len() == target_depth { + debug_assert!(matches!(self.token.kind, token::CloseDelim(_))); + break; + } } - // We are still inside the frame corresponding - // to the delimited stream we captured, so grab - // the tokens from this frame. - let frame = &self.token_cursor.frame; - let stream = frame.tree_cursor.stream.clone(); - let span = frame.span; - let delim = frame.delim; + // Consume close delimiter self.bump(); TokenTree::Delimited(span, delim, stream) @@ -1314,7 +1288,7 @@ impl<'a> Parser<'a> { } let lo = self.prev_token.span; - if self.check(&token::OpenDelim(token::Paren)) { + if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // We don't `self.bump()` the `(` yet because this might be a struct definition where // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so @@ -1325,7 +1299,7 @@ impl<'a> Parser<'a> { // Parse `pub(crate)`. self.bump(); // `(` self.bump(); // `crate` - self.expect(&token::CloseDelim(token::Paren))?; // `)` + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)` let vis = VisibilityKind::Crate(CrateSugar::PubCrate); return Ok(Visibility { span: lo.to(self.prev_token.span), @@ -1337,20 +1311,20 @@ impl<'a> Parser<'a> { self.bump(); // `(` self.bump(); // `in` let path = self.parse_path(PathStyle::Mod)?; // `path` - self.expect(&token::CloseDelim(token::Paren))?; // `)` + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)` let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; return Ok(Visibility { span: lo.to(self.prev_token.span), kind: vis, tokens: None, }); - } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) + } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis)) && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower]) { // Parse `pub(self)` or `pub(super)`. self.bump(); // `(` let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` - self.expect(&token::CloseDelim(token::Paren))?; // `)` + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)` let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; return Ok(Visibility { span: lo.to(self.prev_token.span), @@ -1372,7 +1346,7 @@ impl<'a> Parser<'a> { fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { self.bump(); // `(` let path = self.parse_path(PathStyle::Mod)?; - self.expect(&token::CloseDelim(token::Paren))?; // `)` + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)` let msg = "incorrect visibility restriction"; let suggestion = r##"some possible visibility restrictions are: @@ -1439,7 +1413,7 @@ impl<'a> Parser<'a> { fn is_import_coupler(&mut self) -> bool { self.check(&token::ModSep) && self.look_ahead(1, |t| { - *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star) + *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star) }) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index b45bca3d2e0..6974f318f94 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -1,5 +1,5 @@ use rustc_ast::ptr::P; -use rustc_ast::token::{self, NonterminalKind, Token}; +use rustc_ast::token::{self, Delimiter, NonterminalKind, Token}; use rustc_ast::AstLike; use rustc_ast_pretty::pprust; use rustc_errors::PResult; @@ -11,8 +11,10 @@ use crate::parser::{FollowedByType, ForceCollect, NtOrTt, Parser, PathStyle}; impl<'a> Parser<'a> { /// Checks whether a non-terminal may begin with a particular token. /// - /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that - /// token. Be conservative (return true) if not sure. + /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with + /// that token. Be conservative (return true) if not sure. Inlined because it has a single call + /// site. + #[inline] pub fn nonterminal_may_begin_with(kind: NonterminalKind, token: &Token) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { @@ -41,7 +43,7 @@ impl<'a> Parser<'a> { _ => token.can_begin_type(), }, NonterminalKind::Block => match token.kind { - token::OpenDelim(token::Brace) => true, + token::OpenDelim(Delimiter::Brace) => true, token::Interpolated(ref nt) => !matches!( **nt, token::NtItem(_) @@ -65,8 +67,8 @@ impl<'a> Parser<'a> { NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => { match token.kind { token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) - token::OpenDelim(token::Paren) | // tuple pattern - token::OpenDelim(token::Bracket) | // slice pattern + token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern + token::OpenDelim(Delimiter::Bracket) | // slice pattern token::BinOp(token::And) | // reference token::BinOp(token::Minus) | // negative literal token::AndAnd | // double reference @@ -95,7 +97,9 @@ impl<'a> Parser<'a> { } } - /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). + /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call + /// site. + #[inline] pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, NtOrTt> { // Any `Nonterminal` which stores its tokens (currently `NtItem` and `NtExpr`) // needs to have them force-captured here. diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 67bbbf24936..8019c5fb67c 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -2,7 +2,7 @@ use super::{ForceCollect, Parser, PathStyle, TrailingToken}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor}; use rustc_ast::ptr::P; -use rustc_ast::token; +use rustc_ast::token::{self, Delimiter}; use rustc_ast::{ self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat, PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax, @@ -260,9 +260,9 @@ impl<'a> Parser<'a> { | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. | token::Comma // e.g. `let (a |,)`. - | token::CloseDelim(token::Bracket) // e.g. `let [a | ]`. - | token::CloseDelim(token::Paren) // e.g. `let (a | )`. - | token::CloseDelim(token::Brace) // e.g. `let A { f: a | }`. + | token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`. + | token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`. + | token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`. ) }); match (is_end_ahead, &self.token.kind) { @@ -323,11 +323,11 @@ impl<'a> Parser<'a> { let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd { self.parse_pat_deref(expected)? - } else if self.check(&token::OpenDelim(token::Paren)) { + } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_pat_tuple_or_parens()? - } else if self.check(&token::OpenDelim(token::Bracket)) { + } else if self.check(&token::OpenDelim(Delimiter::Bracket)) { // Parse `[pat, pat,...]` as a slice pattern. - let (pats, _) = self.parse_delim_comma_seq(token::Bracket, |p| { + let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| { p.parse_pat_allow_top_alt( None, RecoverComma::No, @@ -389,9 +389,9 @@ impl<'a> Parser<'a> { } else if let Some(form) = self.parse_range_end() { let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new()); self.parse_pat_range_begin_with(begin, form)? - } else if self.check(&token::OpenDelim(token::Brace)) { + } else if self.check(&token::OpenDelim(Delimiter::Brace)) { self.parse_pat_struct(qself, path)? - } else if self.check(&token::OpenDelim(token::Paren)) { + } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_pat_tuple_struct(qself, path)? } else { PatKind::Path(qself, path) @@ -606,7 +606,7 @@ impl<'a> Parser<'a> { .span_suggestion( mutref_span, "try switching the order", - "ref mut".into(), + "ref mut", Applicability::MachineApplicable, ) .emit(); @@ -845,8 +845,8 @@ impl<'a> Parser<'a> { // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`. && !self.token.is_keyword(kw::In) // Try to do something more complex? - && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(token::Paren) // A tuple struct pattern. - | token::OpenDelim(token::Brace) // A struct pattern. + && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern. + | token::OpenDelim(Delimiter::Brace) // A struct pattern. | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. | token::ModSep // A tuple / struct variant pattern. | token::Not)) // A macro expanding to a pattern. @@ -868,7 +868,7 @@ impl<'a> Parser<'a> { // This shortly leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead // will direct us over to `parse_enum_variant()`. - if self.token == token::OpenDelim(token::Paren) { + if self.token == token::OpenDelim(Delimiter::Parenthesis) { return Err(self .struct_span_err(self.prev_token.span, "expected identifier, found enum pattern")); } @@ -917,7 +917,7 @@ impl<'a> Parser<'a> { let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None; let mut etc_span = None; - while self.token != token::CloseDelim(token::Brace) { + while self.token != token::CloseDelim(Delimiter::Brace) { let attrs = match self.parse_outer_attributes() { Ok(attrs) => attrs, Err(err) => { @@ -946,7 +946,7 @@ impl<'a> Parser<'a> { self.recover_one_fewer_dotdot(); self.bump(); // `..` || `...` - if self.token == token::CloseDelim(token::Brace) { + if self.token == token::CloseDelim(Delimiter::Brace) { etc_span = Some(etc_sp); break; } @@ -970,7 +970,7 @@ impl<'a> Parser<'a> { } etc_span = Some(etc_sp.until(self.token.span)); - if self.token == token::CloseDelim(token::Brace) { + if self.token == token::CloseDelim(Delimiter::Brace) { // If the struct looks otherwise well formed, recover and continue. if let Some(sp) = comma_sp { err.span_suggestion_short( diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index b9e3adaac03..5c6fb376cd4 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,7 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; use crate::maybe_whole; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Token}; +use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::{ self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocConstraint, AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs, @@ -236,14 +236,14 @@ impl<'a> Parser<'a> { token.kind, token::Lt | token::BinOp(token::Shl) - | token::OpenDelim(token::Paren) + | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow ) }; let check_args_start = |this: &mut Self| { this.expected_tokens.extend_from_slice(&[ TokenType::Token(token::Lt), - TokenType::Token(token::OpenDelim(token::Paren)), + TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)), ]); is_args_start(&this.token) }; @@ -639,7 +639,7 @@ impl<'a> Parser<'a> { /// the caller. pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> { // Parse const argument. - let value = if let token::OpenDelim(token::Brace) = self.token.kind { + let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind { self.parse_block_expr( None, self.token.span, @@ -667,7 +667,8 @@ impl<'a> Parser<'a> { GenericArg::Const(self.parse_const_arg()?) } else if self.check_type() { // Parse type argument. - let is_const_fn = self.look_ahead(1, |t| t.kind == token::OpenDelim(token::Paren)); + let is_const_fn = + self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis)); let mut snapshot = self.create_snapshot_for_diagnostic(); match self.parse_ty() { Ok(ty) => GenericArg::Type(ty), diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 5b7ae5f7a7b..ac693597662 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -11,7 +11,7 @@ use crate::maybe_whole; use rustc_ast as ast; use rustc_ast::ptr::P; -use rustc_ast::token::{self, TokenKind}; +use rustc_ast::token::{self, Delimiter, TokenKind}; use rustc_ast::util::classify; use rustc_ast::{ AstLike, AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle, @@ -92,7 +92,7 @@ impl<'a> Parser<'a> { // Do not attempt to parse an expression if we're done here. self.error_outer_attrs(&attrs.take_for_recovery()); self.mk_stmt(lo, StmtKind::Empty) - } else if self.token != token::CloseDelim(token::Brace) { + } else if self.token != token::CloseDelim(Delimiter::Brace) { // Remainder are line-expr stmts. let e = if force_collect == ForceCollect::Yes { self.collect_tokens_no_attrs(|this| { @@ -131,7 +131,7 @@ impl<'a> Parser<'a> { } } - let expr = if this.eat(&token::OpenDelim(token::Brace)) { + let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) { this.parse_struct_expr(None, path, AttrVec::new(), true)? } else { let hi = this.prev_token.span; @@ -164,25 +164,29 @@ impl<'a> Parser<'a> { let delim = args.delim(); let hi = self.prev_token.span; - let style = - if delim == token::Brace { MacStmtStyle::Braces } else { MacStmtStyle::NoBraces }; + let style = match delim { + Some(Delimiter::Brace) => MacStmtStyle::Braces, + Some(_) => MacStmtStyle::NoBraces, + None => unreachable!(), + }; let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription }; - let kind = - if (delim == token::Brace && self.token != token::Dot && self.token != token::Question) - || self.token == token::Semi - || self.token == token::Eof - { - StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None })) - } else { - // Since none of the above applied, this is an expression statement macro. - let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new()); - let e = self.maybe_recover_from_bad_qpath(e, true)?; - let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; - let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; - StmtKind::Expr(e) - }; + let kind = if (style == MacStmtStyle::Braces + && self.token != token::Dot + && self.token != token::Question) + || self.token == token::Semi + || self.token == token::Eof + { + StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None })) + } else { + // Since none of the above applied, this is an expression statement macro. + let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new()); + let e = self.maybe_recover_from_bad_qpath(e, true)?; + let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; + let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; + StmtKind::Expr(e) + }; Ok(self.mk_stmt(lo.to(hi), kind)) } @@ -430,7 +434,7 @@ impl<'a> Parser<'a> { // If the next token is an open brace (e.g., `if a b {`), the place- // inside-a-block suggestion would be more likely wrong than right. Ok(Some(_)) - if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) + if self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) || do_not_suggest_help => {} // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836). Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {} @@ -484,7 +488,7 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); self.maybe_recover_unexpected_block_label(); - if !self.eat(&token::OpenDelim(token::Brace)) { + if !self.eat(&token::OpenDelim(Delimiter::Brace)) { return self.error_block_no_opening_brace(); } @@ -505,7 +509,7 @@ impl<'a> Parser<'a> { recover: AttemptLocalParseRecovery, ) -> PResult<'a, P<Block>> { let mut stmts = vec![]; - while !self.eat(&token::CloseDelim(token::Brace)) { + while !self.eat(&token::CloseDelim(Delimiter::Brace)) { if self.token == token::Eof { break; } @@ -549,7 +553,7 @@ impl<'a> Parser<'a> { { // Just check for errors and recover; do not eat semicolon yet. if let Err(mut e) = - self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)]) + self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]) { if let TokenKind::DocComment(..) = self.token.kind { if let Ok(snippet) = self.span_to_snippet(self.token.span) { diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index bb387064e27..9e771a8af1a 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -3,7 +3,7 @@ use super::{Parser, PathStyle, TokenType}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::{ self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime, MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind, @@ -249,14 +249,14 @@ impl<'a> Parser<'a> { let lo = self.token.span; let mut impl_dyn_multi = false; - let kind = if self.check(&token::OpenDelim(token::Paren)) { + let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_ty_tuple_or_parens(lo, allow_plus)? } else if self.eat(&token::Not) { // Never type `!` TyKind::Never } else if self.eat(&token::BinOp(token::Star)) { self.parse_ty_ptr()? - } else if self.eat(&token::OpenDelim(token::Bracket)) { + } else if self.eat(&token::OpenDelim(Delimiter::Bracket)) { self.parse_array_or_slice_ty()? } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { // Reference @@ -409,7 +409,7 @@ impl<'a> Parser<'a> { let elt_ty = match self.parse_ty() { Ok(ty) => ty, Err(mut err) - if self.look_ahead(1, |t| t.kind == token::CloseDelim(token::Bracket)) + if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket)) | self.look_ahead(1, |t| t.kind == token::Semi) => { // Recover from `[LIT; EXPR]` and `[LIT]` @@ -422,14 +422,14 @@ impl<'a> Parser<'a> { let ty = if self.eat(&token::Semi) { let mut length = self.parse_anon_const_expr()?; - if let Err(e) = self.expect(&token::CloseDelim(token::Bracket)) { + if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) { // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?; - self.expect(&token::CloseDelim(token::Bracket))?; + self.expect(&token::CloseDelim(Delimiter::Bracket))?; } TyKind::Array(elt_ty, length) } else { - self.expect(&token::CloseDelim(token::Bracket))?; + self.expect(&token::CloseDelim(Delimiter::Bracket))?; TyKind::Slice(elt_ty) }; @@ -492,9 +492,9 @@ impl<'a> Parser<'a> { // Parses the `typeof(EXPR)`. // To avoid ambiguity, the type is surrounded by parentheses. fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> { - self.expect(&token::OpenDelim(token::Paren))?; + self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let expr = self.parse_anon_const_expr()?; - self.expect(&token::CloseDelim(token::Paren))?; + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; Ok(TyKind::Typeof(expr)) } @@ -672,7 +672,7 @@ impl<'a> Parser<'a> { || self.check(&token::Question) || self.check(&token::Tilde) || self.check_keyword(kw::For) - || self.check(&token::OpenDelim(token::Paren)) + || self.check(&token::OpenDelim(Delimiter::Parenthesis)) } fn error_negative_bounds( @@ -713,7 +713,7 @@ impl<'a> Parser<'a> { fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> { let anchor_lo = self.prev_token.span; let lo = self.token.span; - let has_parens = self.eat(&token::OpenDelim(token::Paren)); + let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis)); let inner_lo = self.token.span; let is_negative = self.eat(&token::Not); @@ -766,7 +766,7 @@ impl<'a> Parser<'a> { /// Recover on `('lifetime)` with `(` already eaten. fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> { let inner_span = inner_lo.to(self.prev_token.span); - self.expect(&token::CloseDelim(token::Paren))?; + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; let mut err = self.struct_span_err( lo.to(self.prev_token.span), "parenthesized lifetime bounds are not supported", @@ -829,7 +829,7 @@ impl<'a> Parser<'a> { // suggestion is given. let bounds = vec![]; self.parse_remaining_bounds(bounds, true)?; - self.expect(&token::CloseDelim(token::Paren))?; + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; let sp = vec![lo, self.prev_token.span]; let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect(); self.struct_span_err(sp, "incorrect braces around trait bounds") @@ -840,7 +840,7 @@ impl<'a> Parser<'a> { ) .emit(); } else { - self.expect(&token::CloseDelim(token::Paren))?; + self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; } } |
