diff options
| author | Corey Farwell <coreyf@rwell.org> | 2020-12-31 23:27:33 -0500 |
|---|---|---|
| committer | Corey Farwell <coreyf@rwell.org> | 2020-12-31 23:27:33 -0500 |
| commit | d482de30ea70d537dced8ec04a3903e3264cf106 (patch) | |
| tree | 67d6cd380ef7a66e785a54993bb0ca93b07b43ec /compiler/rustc_parse | |
| parent | 26cc060756d0456b17fdc53ac5d34e7f7bdc873d (diff) | |
| parent | 99ad5a1a2824fea1ecf60068fd3636beae7ea2da (diff) | |
| download | rust-d482de30ea70d537dced8ec04a3903e3264cf106.tar.gz rust-d482de30ea70d537dced8ec04a3903e3264cf106.zip | |
Merge remote-tracking branch 'origin/master' into frewsxcv-san
Diffstat (limited to 'compiler/rustc_parse')
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 390 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 31 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/generics.rs | 6 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 54 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 73 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/pat.rs | 10 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/path.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/stmt.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/ty.rs | 105 |
11 files changed, 302 insertions, 419 deletions
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 44999c9b63a..9abffbacfc3 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -7,22 +7,18 @@ use rustc_ast as ast; use rustc_ast::attr::HasAttrs; -use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree}; +use rustc_ast::token::{self, Nonterminal}; +use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream}; use rustc_ast_pretty::pprust; -use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; use rustc_session::parse::ParseSess; -use rustc_span::{symbol::kw, FileName, SourceFile, Span, DUMMY_SP}; +use rustc_span::{FileName, SourceFile, Span}; -use smallvec::SmallVec; -use std::cell::RefCell; -use std::mem; use std::path::Path; use std::str; -use tracing::{debug, info}; +use tracing::debug; pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); @@ -237,7 +233,12 @@ pub fn parse_in<'a, T>( // NOTE(Centril): The following probably shouldn't be here but it acknowledges the // fact that architecturally, we are using parsing (read on below to understand why). -pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream { +pub fn nt_to_tokenstream( + nt: &Nonterminal, + sess: &ParseSess, + span: Span, + synthesize_tokens: CanSynthesizeMissingTokens, +) -> TokenStream { // A `Nonterminal` is often a parsed AST item. At this point we now // need to convert the parsed AST to an actual token stream, e.g. // un-parse it basically. @@ -255,9 +256,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { - Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), + Nonterminal::NtItem(ref item) => { + prepend_attrs(sess, &item.attrs, nt, span, item.tokens.as_ref()) + } Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()), - Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()), + Nonterminal::NtStmt(ref stmt) => prepend_attrs(sess, stmt.attrs(), nt, span, stmt.tokens()), Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()), Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()), Nonterminal::NtIdent(ident, is_raw) => { @@ -274,376 +277,45 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke if expr.tokens.is_none() { debug!("missing tokens for expr {:?}", expr); } - prepend_attrs(&expr.attrs, expr.tokens.as_ref()) + prepend_attrs(sess, &expr.attrs, nt, span, expr.tokens.as_ref()) } }; - // Caches the stringification of 'good' `TokenStreams` which passed - // `tokenstream_probably_equal_for_proc_macro`. This allows us to avoid - // repeatedly stringifying and comparing the same `TokenStream` for deeply - // nested nonterminals. - // - // We cache by the strinification instead of the `TokenStream` to avoid - // needing to implement `Hash` for `TokenStream`. Note that it's possible to - // have two distinct `TokenStream`s that stringify to the same result - // (e.g. if they differ only in hygiene information). However, any - // information lost during the stringification process is also intentionally - // ignored by `tokenstream_probably_equal_for_proc_macro`, so it's fine - // that a single cache entry may 'map' to multiple distinct `TokenStream`s. - // - // This is a temporary hack to prevent compilation blowup on certain inputs. - // The entire pretty-print/retokenize process will be removed soon. - thread_local! { - static GOOD_TOKEN_CACHE: RefCell<FxHashSet<String>> = Default::default(); - } - - // FIXME(#43081): Avoid this pretty-print + reparse hack - // Pretty-print the AST struct without inserting any parenthesis - // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`). - // The resulting stream may have incorrect precedence, but it's only - // ever used for a comparison against the capture tokenstream. - let source = pprust::nonterminal_to_string_no_extra_parens(nt); - let filename = FileName::macro_expansion_source_code(&source); - let reparsed_tokens = parse_stream_from_source_str(filename, source.clone(), sess, Some(span)); - - // During early phases of the compiler the AST could get modified - // directly (e.g., attributes added or removed) and the internal cache - // of tokens my not be invalidated or updated. Consequently if the - // "lossless" token stream disagrees with our actual stringification - // (which has historically been much more battle-tested) then we go - // with the lossy stream anyway (losing span information). - // - // Note that the comparison isn't `==` here to avoid comparing spans, - // but it *also* is a "probable" equality which is a pretty weird - // definition. We mostly want to catch actual changes to the AST - // like a `#[cfg]` being processed or some weird `macro_rules!` - // expansion. - // - // What we *don't* want to catch is the fact that a user-defined - // literal like `0xf` is stringified as `15`, causing the cached token - // stream to not be literal `==` token-wise (ignoring spans) to the - // token stream we got from stringification. - // - // Instead the "probably equal" check here is "does each token - // recursively have the same discriminant?" We basically don't look at - // the token values here and assume that such fine grained token stream - // modifications, including adding/removing typically non-semantic - // tokens such as extra braces and commas, don't happen. if let Some(tokens) = tokens { - if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source)) { - return tokens; - } - - // Compare with a non-relaxed delim match to start. - if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) { - GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone())); - return tokens; - } - - // The check failed. This time, we pretty-print the AST struct with parenthesis - // inserted to preserve precedence. This may cause `None`-delimiters in the captured - // token stream to match up with inserted parenthesis in the reparsed stream. - let source_with_parens = pprust::nonterminal_to_string(nt); - let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens); - - if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source_with_parens)) { - return tokens; - } - - let reparsed_tokens_with_parens = parse_stream_from_source_str( - filename_with_parens, - source_with_parens, - sess, - Some(span), - ); - - // Compare with a relaxed delim match - we want inserted parenthesis in the - // reparsed stream to match `None`-delimiters in the original stream. - if tokenstream_probably_equal_for_proc_macro( - &tokens, - &reparsed_tokens_with_parens, - sess, - true, - ) { - GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone())); - return tokens; - } - - info!( - "cached tokens found, but they're not \"probably equal\", \ - going with stringified version" - ); - info!("cached tokens: {}", pprust::tts_to_string(&tokens)); - info!("reparsed tokens: {}", pprust::tts_to_string(&reparsed_tokens_with_parens)); - - info!("cached tokens debug: {:?}", tokens); - info!("reparsed tokens debug: {:?}", reparsed_tokens_with_parens); - } - reparsed_tokens -} - -// See comments in `Nonterminal::to_tokenstream` for why we care about -// *probably* equal here rather than actual equality -// -// This is otherwise the same as `eq_unspanned`, only recursing with a -// different method. -pub fn tokenstream_probably_equal_for_proc_macro( - tokens: &TokenStream, - reparsed_tokens: &TokenStream, - sess: &ParseSess, - relaxed_delim_match: bool, -) -> bool { - // When checking for `probably_eq`, we ignore certain tokens that aren't - // preserved in the AST. Because they are not preserved, the pretty - // printer arbitrarily adds or removes them when printing as token - // streams, making a comparison between a token stream generated from an - // AST and a token stream which was parsed into an AST more reliable. - fn semantic_tree(tree: &TokenTree) -> bool { - if let TokenTree::Token(token) = tree { - if let - // The pretty printer tends to add trailing commas to - // everything, and in particular, after struct fields. - | token::Comma - // The pretty printer collapses many semicolons into one. - | token::Semi - // We don't preserve leading `|` tokens in patterns, so - // we ignore them entirely - | token::BinOp(token::BinOpToken::Or) - // We don't preserve trailing '+' tokens in trait bounds, - // so we ignore them entirely - | token::BinOp(token::BinOpToken::Plus) - // The pretty printer can turn `$crate` into `::crate_name` - | token::ModSep = token.kind { - return false; - } - } - true - } - - // When comparing two `TokenStream`s, we ignore the `IsJoint` information. - // - // However, `rustc_parse::lexer::tokentrees::TokenStreamBuilder` will - // use `Token.glue` on adjacent tokens with the proper `IsJoint`. - // Since we are ignoreing `IsJoint`, a 'glued' token (e.g. `BinOp(Shr)`) - // and its 'split'/'unglued' compoenents (e.g. `Gt, Gt`) are equivalent - // when determining if two `TokenStream`s are 'probably equal'. - // - // Therefore, we use `break_two_token_op` to convert all tokens - // to the 'unglued' form (if it exists). This ensures that two - // `TokenStream`s which differ only in how their tokens are glued - // will be considered 'probably equal', which allows us to keep spans. - // - // This is important when the original `TokenStream` contained - // extra spaces (e.g. `f :: < Vec < _ > > ( ) ;'). These extra spaces - // will be omitted when we pretty-print, which can cause the original - // and reparsed `TokenStream`s to differ in the assignment of `IsJoint`, - // leading to some tokens being 'glued' together in one stream but not - // the other. See #68489 for more details. - fn break_tokens(tree: TokenTree) -> impl Iterator<Item = TokenTree> { - // In almost all cases, we should have either zero or one levels - // of 'unglueing'. However, in some unusual cases, we may need - // to iterate breaking tokens mutliple times. For example: - // '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]' - let mut token_trees: SmallVec<[_; 2]>; - if let TokenTree::Token(token) = tree { - let mut out = SmallVec::<[_; 2]>::new(); - out.push(token); - // Iterate to fixpoint: - // * We start off with 'out' containing our initial token, and `temp` empty - // * If we are able to break any tokens in `out`, then `out` will have - // at least one more element than 'temp', so we will try to break tokens - // again. - // * If we cannot break any tokens in 'out', we are done - loop { - let mut temp = SmallVec::<[_; 2]>::new(); - let mut changed = false; - - for token in out.into_iter() { - if let Some((first, second)) = token.kind.break_two_token_op() { - temp.push(Token::new(first, DUMMY_SP)); - temp.push(Token::new(second, DUMMY_SP)); - changed = true; - } else { - temp.push(token); - } - } - out = temp; - if !changed { - break; - } - } - token_trees = out.into_iter().map(TokenTree::Token).collect(); - } else { - token_trees = SmallVec::new(); - token_trees.push(tree); - } - token_trees.into_iter() - } - - fn expand_token(tree: TokenTree, sess: &ParseSess) -> impl Iterator<Item = TokenTree> { - // When checking tokenstreams for 'probable equality', we are comparing - // a captured (from parsing) `TokenStream` to a reparsed tokenstream. - // The reparsed Tokenstream will never have `None`-delimited groups, - // since they are only ever inserted as a result of macro expansion. - // Therefore, inserting a `None`-delimtied group here (when we - // convert a nested `Nonterminal` to a tokenstream) would cause - // a mismatch with the reparsed tokenstream. - // - // Note that we currently do not handle the case where the - // reparsed stream has a `Parenthesis`-delimited group - // inserted. This will cause a spurious mismatch: - // issue #75734 tracks resolving this. - - let expanded: SmallVec<[_; 1]> = - if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree { - nt_to_tokenstream(nt, sess, *span) - .into_trees() - .flat_map(|t| expand_token(t, sess)) - .collect() - } else { - // Filter before and after breaking tokens, - // since we may want to ignore both glued and unglued tokens. - std::iter::once(tree) - .filter(semantic_tree) - .flat_map(break_tokens) - .filter(semantic_tree) - .collect() - }; - expanded.into_iter() - } - - // Break tokens after we expand any nonterminals, so that we break tokens - // that are produced as a result of nonterminal expansion. - let tokens = tokens.trees().flat_map(|t| expand_token(t, sess)); - let reparsed_tokens = reparsed_tokens.trees().flat_map(|t| expand_token(t, sess)); - - tokens.eq_by(reparsed_tokens, |t, rt| { - tokentree_probably_equal_for_proc_macro(&t, &rt, sess, relaxed_delim_match) - }) -} - -// See comments in `Nonterminal::to_tokenstream` for why we care about -// *probably* equal here rather than actual equality -// -// This is otherwise the same as `eq_unspanned`, only recursing with a -// different method. -pub fn tokentree_probably_equal_for_proc_macro( - token: &TokenTree, - reparsed_token: &TokenTree, - sess: &ParseSess, - relaxed_delim_match: bool, -) -> bool { - match (token, reparsed_token) { - (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => { - token_probably_equal_for_proc_macro(token, reparsed_token) - } - ( - TokenTree::Delimited(_, delim, tokens), - TokenTree::Delimited(_, reparsed_delim, reparsed_tokens), - ) if delim == reparsed_delim => tokenstream_probably_equal_for_proc_macro( - tokens, - reparsed_tokens, - sess, - relaxed_delim_match, - ), - (TokenTree::Delimited(_, DelimToken::NoDelim, tokens), reparsed_token) => { - if relaxed_delim_match { - if let TokenTree::Delimited(_, DelimToken::Paren, reparsed_tokens) = reparsed_token - { - if tokenstream_probably_equal_for_proc_macro( - tokens, - reparsed_tokens, - sess, - relaxed_delim_match, - ) { - return true; - } - } - } - tokens.len() == 1 - && tokentree_probably_equal_for_proc_macro( - &tokens.trees().next().unwrap(), - reparsed_token, - sess, - relaxed_delim_match, - ) - } - _ => false, + return tokens; + } else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) { + return fake_token_stream(sess, nt, span); + } else { + let pretty = rustc_ast_pretty::pprust::nonterminal_to_string_no_extra_parens(&nt); + panic!("Missing tokens at {:?} for nt {:?}", span, pretty); } } -// See comments in `Nonterminal::to_tokenstream` for why we care about -// *probably* equal here rather than actual equality -fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool { - if mem::discriminant(&first.kind) != mem::discriminant(&other.kind) { - return false; - } - use rustc_ast::token::TokenKind::*; - match (&first.kind, &other.kind) { - (&Eq, &Eq) - | (&Lt, &Lt) - | (&Le, &Le) - | (&EqEq, &EqEq) - | (&Ne, &Ne) - | (&Ge, &Ge) - | (&Gt, &Gt) - | (&AndAnd, &AndAnd) - | (&OrOr, &OrOr) - | (&Not, &Not) - | (&Tilde, &Tilde) - | (&At, &At) - | (&Dot, &Dot) - | (&DotDot, &DotDot) - | (&DotDotDot, &DotDotDot) - | (&DotDotEq, &DotDotEq) - | (&Comma, &Comma) - | (&Semi, &Semi) - | (&Colon, &Colon) - | (&ModSep, &ModSep) - | (&RArrow, &RArrow) - | (&LArrow, &LArrow) - | (&FatArrow, &FatArrow) - | (&Pound, &Pound) - | (&Dollar, &Dollar) - | (&Question, &Question) - | (&Eof, &Eof) => true, - - (&BinOp(a), &BinOp(b)) | (&BinOpEq(a), &BinOpEq(b)) => a == b, - - (&OpenDelim(a), &OpenDelim(b)) | (&CloseDelim(a), &CloseDelim(b)) => a == b, - - (&DocComment(a1, a2, a3), &DocComment(b1, b2, b3)) => a1 == b1 && a2 == b2 && a3 == b3, - - (&Literal(a), &Literal(b)) => a == b, - - (&Lifetime(a), &Lifetime(b)) => a == b, - (&Ident(a, b), &Ident(c, d)) => { - b == d && (a == c || a == kw::DollarCrate || c == kw::DollarCrate) - } - - (&Interpolated(..), &Interpolated(..)) => panic!("Unexpanded Interpolated!"), - - _ => panic!("forgot to add a token?"), - } +pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal, span: Span) -> TokenStream { + let source = pprust::nonterminal_to_string(nt); + let filename = FileName::macro_expansion_source_code(&source); + parse_stream_from_source_str(filename, source, sess, Some(span)) } fn prepend_attrs( + sess: &ParseSess, attrs: &[ast::Attribute], + nt: &Nonterminal, + span: Span, tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option<tokenstream::TokenStream> { - let tokens = tokens?.create_token_stream(); if attrs.is_empty() { - return Some(tokens); + return Some(tokens?.create_token_stream()); } let mut builder = tokenstream::TokenStreamBuilder::new(); for attr in attrs { // FIXME: Correctly handle tokens for inner attributes. // For now, we fall back to reparsing the original AST node if attr.style == ast::AttrStyle::Inner { - return None; + return Some(fake_token_stream(sess, nt, span)); } builder.push(attr.tokens()); } - builder.push(tokens); + builder.push(tokens?.create_token_stream()); Some(builder.build()) } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 350a372a684..98c7b9a63a5 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -1912,4 +1912,22 @@ impl<'a> Parser<'a> { *self = snapshot; Err(err) } + + /// Get the diagnostics for the cases where `move async` is found. + /// + /// `move_async_span` starts at the 'm' of the move keyword and ends with the 'c' of the async keyword + pub(super) fn incorrect_move_async_order_found( + &self, + move_async_span: Span, + ) -> DiagnosticBuilder<'a> { + let mut err = + self.struct_span_err(move_async_span, "the order of `move` and `async` is incorrect"); + err.span_suggestion_verbose( + move_async_span, + "try switching the order", + "async move".to_owned(), + Applicability::MaybeIncorrect, + ); + err + } } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 93be478fc8c..b147f42fada 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1,5 +1,5 @@ -use super::pat::{GateOr, PARAM_EXPECTED}; -use super::ty::{AllowPlus, RecoverQPath}; +use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED}; +use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{BlockMode, Parser, PathStyle, Restrictions, TokenType}; use super::{SemiColonMode, SeqSep, TokenExpectType}; use crate::maybe_recover_from_interpolated_ty_qpath; @@ -1603,7 +1603,7 @@ impl<'a> Parser<'a> { self.sess.gated_spans.gate(sym::async_closure, span); } - let capture_clause = self.parse_capture_clause(); + let capture_clause = self.parse_capture_clause()?; let decl = self.parse_fn_block_decl()?; let decl_hi = self.prev_token.span; let body = match decl.output { @@ -1626,8 +1626,18 @@ impl<'a> Parser<'a> { } /// Parses an optional `move` prefix to a closure-like construct. - fn parse_capture_clause(&mut self) -> CaptureBy { - if self.eat_keyword(kw::Move) { CaptureBy::Value } else { CaptureBy::Ref } + fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> { + if self.eat_keyword(kw::Move) { + // Check for `move async` and recover + if self.check_keyword(kw::Async) { + let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo); + Err(self.incorrect_move_async_order_found(move_async_span)) + } else { + Ok(CaptureBy::Value) + } + } else { + Ok(CaptureBy::Ref) + } } /// Parses the `|arg, arg|` header of a closure. @@ -1647,7 +1657,8 @@ impl<'a> Parser<'a> { self.expect_or()?; args }; - let output = self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes)?; + let output = + self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?; Ok(P(FnDecl { inputs, output })) } @@ -1728,7 +1739,7 @@ impl<'a> Parser<'a> { /// The `let` token has already been eaten. fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { let lo = self.prev_token.span; - let pat = self.parse_top_pat(GateOr::No)?; + let pat = self.parse_top_pat(GateOr::No, RecoverComma::Yes)?; self.expect(&token::Eq)?; let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| { this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into()) @@ -1791,7 +1802,7 @@ impl<'a> Parser<'a> { _ => None, }; - let pat = self.parse_top_pat(GateOr::Yes)?; + let pat = self.parse_top_pat(GateOr::Yes, RecoverComma::Yes)?; if !self.eat_keyword(kw::In) { self.error_missing_in_for_loop(); } @@ -1901,7 +1912,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; let lo = self.token.span; - let pat = self.parse_top_pat(GateOr::No)?; + let pat = self.parse_top_pat(GateOr::No, RecoverComma::Yes)?; let guard = if self.eat_keyword(kw::If) { let if_span = self.prev_token.span; let cond = self.parse_expr()?; @@ -2018,7 +2029,7 @@ impl<'a> Parser<'a> { fn parse_async_block(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> { let lo = self.token.span; self.expect_keyword(kw::Async)?; - let capture_clause = self.parse_capture_clause(); + let capture_clause = self.parse_capture_clause()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); let kind = ExprKind::Async(capture_clause, DUMMY_NODE_ID, body); diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index dd99a7587dd..860e63020bb 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -5,7 +5,7 @@ use rustc_ast::{ self as ast, Attribute, GenericBounds, GenericParam, GenericParamKind, WhereClause, }; use rustc_errors::PResult; -use rustc_span::symbol::{kw, sym}; +use rustc_span::symbol::kw; impl<'a> Parser<'a> { /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. @@ -56,8 +56,6 @@ impl<'a> Parser<'a> { self.expect(&token::Colon)?; let ty = self.parse_ty()?; - self.sess.gated_spans.gate(sym::min_const_generics, const_span.to(self.prev_token.span)); - Ok(GenericParam { ident, id: ast::DUMMY_NODE_ID, @@ -240,7 +238,7 @@ impl<'a> Parser<'a> { // Parse type with mandatory colon and (possibly empty) bounds, // or with mandatory equality sign and the second type. - let ty = self.parse_ty()?; + let ty = self.parse_ty_for_where_clause()?; if self.eat(&token::Colon) { let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?; Ok(ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 5954b370e6d..c6669f04682 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,5 +1,5 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error}; -use super::ty::{AllowPlus, RecoverQPath}; +use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{FollowedByType, Parser, PathStyle}; use crate::maybe_whole; @@ -247,9 +247,14 @@ impl<'a> Parser<'a> { (ident, ItemKind::Static(ty, m, expr)) } else if let Const::Yes(const_span) = self.parse_constness() { // CONST ITEM - self.recover_const_mut(const_span); - let (ident, ty, expr) = self.parse_item_global(None)?; - (ident, ItemKind::Const(def(), ty, expr)) + if self.token.is_keyword(kw::Impl) { + // recover from `const impl`, suggest `impl const` + self.recover_const_impl(const_span, attrs, def())? + } else { + self.recover_const_mut(const_span); + let (ident, ty, expr) = self.parse_item_global(None)?; + (ident, ItemKind::Const(def(), ty, expr)) + } } else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() { // TRAIT ITEM self.parse_item_trait(attrs, lo)? @@ -489,7 +494,7 @@ impl<'a> Parser<'a> { let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. - let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span)); + let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span)); let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt) { let span = self.prev_token.span.between(self.token.span); @@ -988,6 +993,36 @@ impl<'a> Parser<'a> { } } + /// Recover on `const impl` with `const` already eaten. + fn recover_const_impl( + &mut self, + const_span: Span, + attrs: &mut Vec<Attribute>, + defaultness: Defaultness, + ) -> PResult<'a, ItemInfo> { + let impl_span = self.token.span; + let mut err = self.expected_ident_found(); + let mut impl_info = self.parse_item_impl(attrs, defaultness)?; + match impl_info.1 { + // only try to recover if this is implementing a trait for a type + ItemKind::Impl { of_trait: Some(ref trai), ref mut constness, .. } => { + *constness = Const::Yes(const_span); + + let before_trait = trai.path.span.shrink_to_lo(); + let const_up_to_impl = const_span.with_hi(impl_span.lo()); + err.multipart_suggestion( + "you might have meant to write a const trait impl", + vec![(const_up_to_impl, "".to_owned()), (before_trait, "const ".to_owned())], + Applicability::MaybeIncorrect, + ) + .emit(); + } + ItemKind::Impl { .. } => return Err(err), + _ => unreachable!(), + } + Ok(impl_info) + } + /// Parse `["const" | ("static" "mut"?)] $ident ":" $ty (= $expr)?` with /// `["const" | ("static" "mut"?)]` already parsed and stored in `m`. /// @@ -1514,7 +1549,7 @@ impl<'a> Parser<'a> { let header = self.parse_fn_front_matter()?; // `const ... fn` let ident = self.parse_ident()?; // `foo` let mut generics = self.parse_generics()?; // `<'a, T, ...>` - let decl = self.parse_fn_decl(req_name, AllowPlus::Yes)?; // `(p: u8, ...)` + let decl = self.parse_fn_decl(req_name, AllowPlus::Yes, RecoverReturnSign::Yes)?; // `(p: u8, ...)` generics.where_clause = self.parse_where_clause()?; // `where T: Ord` let mut sig_hi = self.prev_token.span; @@ -1645,10 +1680,11 @@ impl<'a> Parser<'a> { &mut self, req_name: ReqName, ret_allow_plus: AllowPlus, + recover_return_sign: RecoverReturnSign, ) -> PResult<'a, P<FnDecl>> { Ok(P(FnDecl { inputs: self.parse_fn_params(req_name)?, - output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes)?, + output: self.parse_ret_ty(ret_allow_plus, RecoverQPath::Yes, recover_return_sign)?, })) } @@ -1663,7 +1699,7 @@ impl<'a> Parser<'a> { // Skip every token until next possible arg or end. p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); // Create a placeholder argument for proper arg count (issue #34264). - Ok(dummy_arg(Ident::new(kw::Invalid, lo.to(p.prev_token.span)))) + Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)))) }); // ...now that we've parsed the first argument, `self` is no longer allowed. first_param = false; @@ -1723,7 +1759,7 @@ impl<'a> Parser<'a> { } match ty { Ok(ty) => { - let ident = Ident::new(kw::Invalid, self.prev_token.span); + let ident = Ident::new(kw::Empty, self.prev_token.span); let bm = BindingMode::ByValue(Mutability::Not); let pat = self.mk_pat_ident(ty.span, bm, ident); (pat, ty) diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index df4695b18e7..073e62c41d3 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -17,7 +17,7 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; -use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; @@ -132,6 +132,28 @@ struct TokenCursor { // Counts the number of calls to `next` or `next_desugared`, // depending on whether `desugar_doc_comments` is set. num_next_calls: usize, + // During parsing, we may sometimes need to 'unglue' a + // glued token into two component tokens + // (e.g. '>>' into '>' and '>), so that the parser + // can consume them one at a time. This process + // bypasses the normal capturing mechanism + // (e.g. `num_next_calls` will not be incremented), + // since the 'unglued' tokens due not exist in + // the original `TokenStream`. + // + // If we end up consuming both unglued tokens, + // then this is not an issue - we'll end up + // capturing the single 'glued' token. + // + // However, in certain circumstances, we may + // want to capture just the first 'unglued' token. + // For example, capturing the `Vec<u8>` + // in `Option<Vec<u8>>` requires us to unglue + // the trailing `>>` token. The `append_unglued_token` + // field is used to track this token - it gets + // appended to the captured stream when + // we evaluate a `LazyTokenStream` + append_unglued_token: Option<TreeAndSpacing>, } #[derive(Clone)] @@ -336,6 +358,7 @@ impl<'a> Parser<'a> { stack: Vec::new(), num_next_calls: 0, desugar_doc_comments, + append_unglued_token: None, }, desugar_doc_comments, unmatched_angle_bracket_count: 0, @@ -359,6 +382,10 @@ impl<'a> Parser<'a> { self.token_cursor.next() }; self.token_cursor.num_next_calls += 1; + // We've retrieved an token from the underlying + // cursor, so we no longer need to worry about + // an unglued token. See `break_and_eat` for more details + self.token_cursor.append_unglued_token = None; if next.span.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. next.span = fallback_span.with_ctxt(next.span.ctxt()); @@ -555,6 +582,14 @@ impl<'a> Parser<'a> { let first_span = self.sess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); self.token = Token::new(first, first_span); + // Keep track of this token - if we end token capturing now, + // we'll want to append this token to the captured stream. + // + // If we consume any additional tokens, then this token + // is not needed (we'll capture the entire 'glued' token), + // and `next_tok` will set this field to `None` + self.token_cursor.append_unglued_token = + Some((TokenTree::Token(self.token.clone()), Spacing::Alone)); // Use the spacing of the glued token as the spacing // of the unglued second token. self.bump_with((Token::new(second, second_span), self.token_spacing)); @@ -685,13 +720,9 @@ impl<'a> Parser<'a> { Ok(t) => { // Parsed successfully, therefore most probably the code only // misses a separator. - let mut exp_span = self.sess.source_map().next_point(sp); - if self.sess.source_map().is_multiline(exp_span) { - exp_span = sp; - } expect_err .span_suggestion_short( - exp_span, + sp, &format!("missing `{}`", token_str), token_str, Applicability::MaybeIncorrect, @@ -1230,6 +1261,7 @@ impl<'a> Parser<'a> { num_calls: usize, desugar_doc_comments: bool, trailing_semi: bool, + append_unglued_token: Option<TreeAndSpacing>, } impl CreateTokenStream for LazyTokenStreamImpl { fn create_token_stream(&self) -> TokenStream { @@ -1253,12 +1285,18 @@ impl<'a> Parser<'a> { })) .take(num_calls); - make_token_stream(tokens) + make_token_stream(tokens, self.append_unglued_token.clone()) } fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> { if self.trailing_semi { panic!("Called `add_trailing_semi` twice!"); } + if self.append_unglued_token.is_some() { + panic!( + "Cannot call `add_trailing_semi` when we have an unglued token {:?}", + self.append_unglued_token + ); + } let mut new = self.clone(); new.trailing_semi = true; Box::new(new) @@ -1271,6 +1309,7 @@ impl<'a> Parser<'a> { cursor_snapshot, desugar_doc_comments: self.desugar_doc_comments, trailing_semi: false, + append_unglued_token: self.token_cursor.append_unglued_token.clone(), }; Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } @@ -1325,7 +1364,10 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa /// Converts a flattened iterator of tokens (including open and close delimiter tokens) /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair /// of open and close delims. -fn make_token_stream(tokens: impl Iterator<Item = (Token, Spacing)>) -> TokenStream { +fn make_token_stream( + tokens: impl Iterator<Item = (Token, Spacing)>, + append_unglued_token: Option<TreeAndSpacing>, +) -> TokenStream { #[derive(Debug)] struct FrameData { open: Span, @@ -1348,14 +1390,17 @@ fn make_token_stream(tokens: impl Iterator<Item = (Token, Spacing)>) -> TokenStr .inner .push((delimited, Spacing::Alone)); } - token => stack - .last_mut() - .expect("Bottom token frame is missing!") - .inner - .push((TokenTree::Token(token), spacing)), + token => { + stack + .last_mut() + .expect("Bottom token frame is missing!") + .inner + .push((TokenTree::Token(token), spacing)); + } } } - let final_buf = stack.pop().expect("Missing final buf!"); + let mut final_buf = stack.pop().expect("Missing final buf!"); + final_buf.inner.extend(append_unglued_token); assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack); TokenStream::new(final_buf.inner) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index c007f96a798..eb5d7075f00 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -4,6 +4,7 @@ use rustc_ast_pretty::pprust; use rustc_errors::PResult; use rustc_span::symbol::{kw, Ident}; +use crate::parser::pat::{GateOr, RecoverComma}; use crate::parser::{FollowedByType, Parser, PathStyle}; impl<'a> Parser<'a> { @@ -27,6 +28,8 @@ impl<'a> Parser<'a> { token.can_begin_expr() // This exception is here for backwards compatibility. && !token.is_keyword(kw::Let) + // This exception is here for backwards compatibility. + && !token.is_keyword(kw::Const) } NonterminalKind::Ty => token.can_begin_type(), NonterminalKind::Ident => get_macro_ident(token).is_some(), @@ -55,7 +58,7 @@ impl<'a> Parser<'a> { }, _ => false, }, - NonterminalKind::Pat => match token.kind { + NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => match token.kind { token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) token::OpenDelim(token::Paren) | // tuple pattern token::OpenDelim(token::Bracket) | // slice pattern @@ -68,6 +71,8 @@ impl<'a> Parser<'a> { token::ModSep | // path token::Lt | // path (UFCS constant) token::BinOp(token::Shl) => true, // path (double UFCS) + // leading vert `|` or-pattern + token::BinOp(token::Or) => matches!(kind, NonterminalKind::Pat2021 {..}), token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, @@ -84,6 +89,7 @@ impl<'a> Parser<'a> { } } + /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, Nonterminal> { // Any `Nonterminal` which stores its tokens (currently `NtItem` and `NtExpr`) // needs to have them force-captured here. @@ -127,8 +133,14 @@ impl<'a> Parser<'a> { } } } - NonterminalKind::Pat => { - let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?; + NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => { + let (mut pat, tokens) = self.collect_tokens(|this| match kind { + NonterminalKind::Pat2018 { .. } => this.parse_pat(None), + NonterminalKind::Pat2021 { .. } => { + this.parse_top_pat(GateOr::Yes, RecoverComma::No) + } + _ => unreachable!(), + })?; // We have have eaten an NtPat, which could already have tokens if pat.tokens.is_none() { pat.tokens = tokens; diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index b62c7373800..456e32680fe 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -26,7 +26,7 @@ pub(super) enum GateOr { /// Whether or not to recover a `,` when parsing or-patterns. #[derive(PartialEq, Copy, Clone)] -enum RecoverComma { +pub(super) enum RecoverComma { Yes, No, } @@ -43,13 +43,17 @@ impl<'a> Parser<'a> { /// Entry point to the main pattern parser. /// Corresponds to `top_pat` in RFC 2535 and allows or-pattern at the top level. - pub(super) fn parse_top_pat(&mut self, gate_or: GateOr) -> PResult<'a, P<Pat>> { + pub(super) fn parse_top_pat( + &mut self, + gate_or: GateOr, + rc: RecoverComma, + ) -> PResult<'a, P<Pat>> { // Allow a '|' before the pats (RFCs 1925, 2530, and 2535). let gated_leading_vert = self.eat_or_separator(None) && gate_or == GateOr::Yes; let leading_vert_span = self.prev_token.span; // Parse the possibly-or-pattern. - let pat = self.parse_pat_with_or(None, gate_or, RecoverComma::Yes)?; + let pat = self.parse_pat_with_or(None, gate_or, rc)?; // If we parsed a leading `|` which should be gated, // and no other gated or-pattern has been parsed thus far, diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 17e5bcf7605..60a47ca12b8 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -1,4 +1,4 @@ -use super::ty::{AllowPlus, RecoverQPath}; +use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, TokenType}; use crate::maybe_whole; use rustc_ast::ptr::P; @@ -231,7 +231,8 @@ impl<'a> Parser<'a> { // `(T, U) -> R` let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?; let span = ident.span.to(self.prev_token.span); - let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No)?; + let output = + self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?; ParenthesizedArgs { inputs, output, span }.into() }; @@ -500,10 +501,9 @@ impl<'a> Parser<'a> { pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool { match &expr.kind { ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true, - ast::ExprKind::Unary(ast::UnOp::Neg, expr) => match &expr.kind { - ast::ExprKind::Lit(_) => true, - _ => false, - }, + ast::ExprKind::Unary(ast::UnOp::Neg, expr) => { + matches!(expr.kind, ast::ExprKind::Lit(_)) + } // We can only resolve single-segment paths at the moment, because multi-segment paths // require type-checking: see `visit_generic_arg` in `src/librustc_resolve/late.rs`. ast::ExprKind::Path(None, path) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index e974556f43a..2942747991a 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -1,7 +1,7 @@ use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN; use super::diagnostics::{AttemptLocalParseRecovery, Error}; use super::expr::LhsExpr; -use super::pat::GateOr; +use super::pat::{GateOr, RecoverComma}; use super::path::PathStyle; use super::{BlockMode, Parser, Restrictions, SemiColonMode}; use crate::maybe_whole; @@ -185,7 +185,7 @@ impl<'a> Parser<'a> { /// Parses a local variable declaration. fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> { let lo = self.prev_token.span; - let pat = self.parse_top_pat(GateOr::Yes)?; + let pat = self.parse_top_pat(GateOr::Yes, RecoverComma::Yes)?; let (err, ty) = if self.eat(&token::Colon) { // Save the state of the parser before parsing type normally, in case there is a `:` diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 7a6ebca4e15..9553f5d09e8 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -43,6 +43,37 @@ pub(super) enum RecoverQPath { No, } +/// Signals whether parsing a type should recover `->`. +/// +/// More specifically, when parsing a function like: +/// ```rust +/// fn foo() => u8 { 0 } +/// fn bar(): u8 { 0 } +/// ``` +/// The compiler will try to recover interpreting `foo() => u8` as `foo() -> u8` when calling +/// `parse_ty` with anything except `RecoverReturnSign::No`, and it will try to recover `bar(): u8` +/// as `bar() -> u8` when passing `RecoverReturnSign::Yes` to `parse_ty` +#[derive(Copy, Clone, PartialEq)] +pub(super) enum RecoverReturnSign { + Yes, + OnlyFatArrow, + No, +} + +impl RecoverReturnSign { + /// [RecoverReturnSign::Yes] allows for recovering `fn foo() => u8` and `fn foo(): u8`, + /// [RecoverReturnSign::OnlyFatArrow] allows for recovering only `fn foo() => u8` (recovering + /// colons can cause problems when parsing where clauses), and + /// [RecoverReturnSign::No] doesn't allow for any recovery of the return type arrow + fn can_recover(self, token: &TokenKind) -> bool { + match self { + Self::Yes => matches!(token, token::FatArrow | token::Colon), + Self::OnlyFatArrow => matches!(token, token::FatArrow), + Self::No => false, + } + } +} + // Is `...` (`CVarArgs`) legal at this level of type parsing? #[derive(PartialEq)] enum AllowCVariadic { @@ -62,14 +93,24 @@ fn can_continue_type_after_non_fn_ident(t: &Token) -> bool { impl<'a> Parser<'a> { /// Parses a type. pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> { - self.parse_ty_common(AllowPlus::Yes, RecoverQPath::Yes, AllowCVariadic::No) + self.parse_ty_common( + AllowPlus::Yes, + AllowCVariadic::No, + RecoverQPath::Yes, + RecoverReturnSign::Yes, + ) } /// Parse a type suitable for a function or function pointer parameter. /// The difference from `parse_ty` is that this version allows `...` /// (`CVarArgs`) at the top level of the type. pub(super) fn parse_ty_for_param(&mut self) -> PResult<'a, P<Ty>> { - self.parse_ty_common(AllowPlus::Yes, RecoverQPath::Yes, AllowCVariadic::Yes) + self.parse_ty_common( + AllowPlus::Yes, + AllowCVariadic::Yes, + RecoverQPath::Yes, + RecoverReturnSign::Yes, + ) } /// Parses a type in restricted contexts where `+` is not permitted. @@ -79,7 +120,22 @@ impl<'a> Parser<'a> { /// Example 2: `value1 as TYPE + value2` /// `+` is prohibited to avoid interactions with expression grammar. pub(super) fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> { - self.parse_ty_common(AllowPlus::No, RecoverQPath::Yes, AllowCVariadic::No) + self.parse_ty_common( + AllowPlus::No, + AllowCVariadic::No, + RecoverQPath::Yes, + RecoverReturnSign::Yes, + ) + } + + /// Parse a type without recovering `:` as `->` to avoid breaking code such as `where fn() : for<'a>` + pub(super) fn parse_ty_for_where_clause(&mut self) -> PResult<'a, P<Ty>> { + self.parse_ty_common( + AllowPlus::Yes, + AllowCVariadic::Yes, + RecoverQPath::Yes, + RecoverReturnSign::OnlyFatArrow, + ) } /// Parses an optional return type `[ -> TY ]` in a function declaration. @@ -87,10 +143,35 @@ impl<'a> Parser<'a> { &mut self, allow_plus: AllowPlus, recover_qpath: RecoverQPath, + recover_return_sign: RecoverReturnSign, ) -> PResult<'a, FnRetTy> { Ok(if self.eat(&token::RArrow) { // FIXME(Centril): Can we unconditionally `allow_plus`? - let ty = self.parse_ty_common(allow_plus, recover_qpath, AllowCVariadic::No)?; + let ty = self.parse_ty_common( + allow_plus, + AllowCVariadic::No, + recover_qpath, + recover_return_sign, + )?; + FnRetTy::Ty(ty) + } else if recover_return_sign.can_recover(&self.token.kind) { + // Don't `eat` to prevent `=>` from being added as an expected token which isn't + // actually expected and could only confuse users + self.bump(); + self.struct_span_err(self.prev_token.span, "return types are denoted using `->`") + .span_suggestion_short( + self.prev_token.span, + "use `->` instead", + "->".to_string(), + Applicability::MachineApplicable, + ) + .emit(); + let ty = self.parse_ty_common( + allow_plus, + AllowCVariadic::No, + recover_qpath, + recover_return_sign, + )?; FnRetTy::Ty(ty) } else { FnRetTy::Default(self.token.span.shrink_to_lo()) @@ -100,8 +181,9 @@ impl<'a> Parser<'a> { fn parse_ty_common( &mut self, allow_plus: AllowPlus, - recover_qpath: RecoverQPath, allow_c_variadic: AllowCVariadic, + recover_qpath: RecoverQPath, + recover_return_sign: RecoverReturnSign, ) -> PResult<'a, P<Ty>> { let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes; maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); @@ -129,14 +211,14 @@ impl<'a> Parser<'a> { TyKind::Infer } else if self.check_fn_front_matter() { // Function pointer type - self.parse_ty_bare_fn(lo, Vec::new())? + self.parse_ty_bare_fn(lo, Vec::new(), recover_return_sign)? } else if self.check_keyword(kw::For) { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.check_fn_front_matter() { - self.parse_ty_bare_fn(lo, lifetime_defs)? + self.parse_ty_bare_fn(lo, lifetime_defs, recover_return_sign)? } else { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); @@ -338,9 +420,14 @@ impl<'a> Parser<'a> { /// Function Style ABI Parameter types /// ``` /// We actually parse `FnHeader FnDecl`, but we error on `const` and `async` qualifiers. - fn parse_ty_bare_fn(&mut self, lo: Span, params: Vec<GenericParam>) -> PResult<'a, TyKind> { + fn parse_ty_bare_fn( + &mut self, + lo: Span, + params: Vec<GenericParam>, + recover_return_sign: RecoverReturnSign, + ) -> PResult<'a, TyKind> { let ast::FnHeader { ext, unsafety, constness, asyncness } = self.parse_fn_front_matter()?; - let decl = self.parse_fn_decl(|_| false, AllowPlus::No)?; + let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?; let whole_span = lo.to(self.prev_token.span); if let ast::Const::Yes(span) = constness { self.error_fn_ptr_bad_qualifier(whole_span, span, "const"); |
