diff options
Diffstat (limited to 'src/libsyntax/parse/parser')
| -rw-r--r-- | src/libsyntax/parse/parser/attr.rs | 350 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/diagnostics.rs | 1494 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/expr.rs | 195 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/generics.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/item.rs | 700 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/module.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/pat.rs | 62 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/path.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/stmt.rs | 13 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/ty.rs | 15 |
10 files changed, 2621 insertions, 241 deletions
diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs new file mode 100644 index 00000000000..188a144cac9 --- /dev/null +++ b/src/libsyntax/parse/parser/attr.rs @@ -0,0 +1,350 @@ +use super::{SeqSep, PResult, Parser, TokenType, PathStyle}; +use crate::attr; +use crate::ast; +use crate::parse::token::{self, Nonterminal, DelimToken}; +use crate::tokenstream::{TokenStream, TokenTree}; +use crate::source_map::Span; + +use log::debug; + +#[derive(Debug)] +enum InnerAttributeParsePolicy<'a> { + Permitted, + NotPermitted { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> }, +} + +const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ + permitted in this context"; + +impl<'a> Parser<'a> { + /// Parses attributes that appear before an item. + pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { + let mut attrs: Vec<ast::Attribute> = Vec::new(); + let mut just_parsed_doc_comment = false; + loop { + debug!("parse_outer_attributes: self.token={:?}", self.token); + match self.token.kind { + token::Pound => { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = + InnerAttributeParsePolicy::NotPermitted { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().and_then(|a| Some(a.span)) + }; + let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; + attrs.push(attr); + just_parsed_doc_comment = false; + } + token::DocComment(s) => { + let attr = attr::mk_sugared_doc_attr(s, self.token.span); + if attr.style != ast::AttrStyle::Outer { + let mut err = self.fatal("expected outer doc comment"); + err.note("inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items"); + return Err(err); + } + attrs.push(attr); + self.bump(); + just_parsed_doc_comment = true; + } + _ => break, + } + } + Ok(attrs) + } + + /// Matches `attribute = # ! [ meta_item ]`. + /// + /// If `permit_inner` is `true`, then a leading `!` indicates an inner + /// attribute. + pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> { + debug!("parse_attribute: permit_inner={:?} self.token={:?}", + permit_inner, + self.token); + let inner_parse_policy = if permit_inner { + InnerAttributeParsePolicy::Permitted + } else { + InnerAttributeParsePolicy::NotPermitted { + reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG, + saw_doc_comment: false, + prev_attr_sp: None + } + }; + self.parse_attribute_with_inner_parse_policy(inner_parse_policy) + } + + /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` + /// that prescribes how to handle inner attributes. + fn parse_attribute_with_inner_parse_policy( + &mut self, + inner_parse_policy: InnerAttributeParsePolicy<'_> + ) -> PResult<'a, ast::Attribute> { + debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", + inner_parse_policy, + self.token); + let (span, item, style) = match self.token.kind { + token::Pound => { + let lo = self.token.span; + self.bump(); + + if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { + self.expected_tokens.push(TokenType::Token(token::Not)); + } + + let style = if self.token == token::Not { + self.bump(); + ast::AttrStyle::Inner + } else { + ast::AttrStyle::Outer + }; + + self.expect(&token::OpenDelim(token::Bracket))?; + let item = self.parse_attr_item()?; + self.expect(&token::CloseDelim(token::Bracket))?; + let hi = self.prev_span; + + let attr_sp = lo.to(hi); + + // Emit error if inner attribute is encountered and not permitted + if style == ast::AttrStyle::Inner { + if let InnerAttributeParsePolicy::NotPermitted { reason, + saw_doc_comment, prev_attr_sp } = inner_parse_policy { + let prev_attr_note = if saw_doc_comment { + "previous doc comment" + } else { + "previous outer attribute" + }; + + let mut diagnostic = self + .diagnostic() + .struct_span_err(attr_sp, reason); + + if let Some(prev_attr_sp) = prev_attr_sp { + diagnostic + .span_label(attr_sp, "not permitted following an outer attibute") + .span_label(prev_attr_sp, prev_attr_note); + } + + diagnostic + .note("inner attributes, like `#![no_std]`, annotate the item \ + enclosing them, and are usually found at the beginning of \ + source files. Outer attributes, like `#[test]`, annotate the \ + item following them.") + .emit() + } + } + + (attr_sp, item, style) + } + _ => { + let token_str = self.this_token_to_string(); + return Err(self.fatal(&format!("expected `#`, found `{}`", token_str))); + } + }; + + Ok(ast::Attribute { + item, + id: attr::mk_attr_id(), + style, + is_sugared_doc: false, + span, + }) + } + + /// Parses an inner part of an attribute (the path and following tokens). + /// The tokens must be either a delimited token stream, or empty token stream, + /// or the "legacy" key-value form. + /// PATH `(` TOKEN_STREAM `)` + /// PATH `[` TOKEN_STREAM `]` + /// PATH `{` TOKEN_STREAM `}` + /// PATH + /// PATH `=` UNSUFFIXED_LIT + /// The delimiters or `=` are still put into the resulting token stream. + pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { + let item = match self.token.kind { + token::Interpolated(ref nt) => match **nt { + Nonterminal::NtMeta(ref item) => Some(item.clone()), + _ => None, + }, + _ => None, + }; + Ok(if let Some(item) = item { + self.bump(); + item + } else { + let path = self.parse_path(PathStyle::Mod)?; + let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) || + self.check(&token::OpenDelim(DelimToken::Bracket)) || + self.check(&token::OpenDelim(DelimToken::Brace)) { + self.parse_token_tree().into() + } else if self.eat(&token::Eq) { + let eq = TokenTree::token(token::Eq, self.prev_span); + let mut is_interpolated_expr = false; + if let token::Interpolated(nt) = &self.token.kind { + if let token::NtExpr(..) = **nt { + is_interpolated_expr = true; + } + } + let token_tree = if is_interpolated_expr { + // We need to accept arbitrary interpolated expressions to continue + // supporting things like `doc = $expr` that work on stable. + // Non-literal interpolated expressions are rejected after expansion. + self.parse_token_tree() + } else { + self.parse_unsuffixed_lit()?.token_tree() + }; + TokenStream::new(vec![eq.into(), token_tree.into()]) + } else { + TokenStream::default() + }; + ast::AttrItem { path, tokens } + }) + } + + /// Parses attributes that appear after the opening of an item. These should + /// be preceded by an exclamation mark, but we accept and warn about one + /// terminated by a semicolon. + /// + /// Matches `inner_attrs*`. + crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { + let mut attrs: Vec<ast::Attribute> = vec![]; + loop { + match self.token.kind { + token::Pound => { + // Don't even try to parse if it's not an inner attribute. + if !self.look_ahead(1, |t| t == &token::Not) { + break; + } + + let attr = self.parse_attribute(true)?; + assert_eq!(attr.style, ast::AttrStyle::Inner); + attrs.push(attr); + } + token::DocComment(s) => { + // We need to get the position of this token before we bump. + let attr = attr::mk_sugared_doc_attr(s, self.token.span); + if attr.style == ast::AttrStyle::Inner { + attrs.push(attr); + self.bump(); + } else { + break; + } + } + _ => break, + } + } + Ok(attrs) + } + + fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> { + let lit = self.parse_lit()?; + debug!("checking if {:?} is unusuffixed", lit); + + if !lit.kind.is_unsuffixed() { + let msg = "suffixed literals are not allowed in attributes"; + self.diagnostic().struct_span_err(lit.span, msg) + .help("instead of using a suffixed literal \ + (1u8, 1.0f32, etc.), use an unsuffixed version \ + (1, 1.0, etc.).") + .emit() + } + + Ok(lit) + } + + /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited. + crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { + self.expect(&token::OpenDelim(token::Paren))?; + + let cfg_predicate = self.parse_meta_item()?; + self.expect(&token::Comma)?; + + // Presumably, the majority of the time there will only be one attr. + let mut expanded_attrs = Vec::with_capacity(1); + + while !self.check(&token::CloseDelim(token::Paren)) { + let lo = self.token.span.lo(); + let item = self.parse_attr_item()?; + expanded_attrs.push((item, self.prev_span.with_lo(lo))); + self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; + } + + self.expect(&token::CloseDelim(token::Paren))?; + Ok((cfg_predicate, expanded_attrs)) + } + + /// Matches the following grammar (per RFC 1559). + /// + /// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; + /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; + pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { + let nt_meta = match self.token.kind { + token::Interpolated(ref nt) => match **nt { + token::NtMeta(ref e) => Some(e.clone()), + _ => None, + }, + _ => None, + }; + + if let Some(item) = nt_meta { + return match item.meta(item.path.span) { + Some(meta) => { + self.bump(); + Ok(meta) + } + None => self.unexpected(), + } + } + + let lo = self.token.span; + let path = self.parse_path(PathStyle::Mod)?; + let kind = self.parse_meta_item_kind()?; + let span = lo.to(self.prev_span); + Ok(ast::MetaItem { path, kind, span }) + } + + crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { + Ok(if self.eat(&token::Eq) { + ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) + } else if self.eat(&token::OpenDelim(token::Paren)) { + ast::MetaItemKind::List(self.parse_meta_seq()?) + } else { + ast::MetaItemKind::Word + }) + } + + /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`. + fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { + match self.parse_unsuffixed_lit() { + Ok(lit) => { + return Ok(ast::NestedMetaItem::Literal(lit)) + } + Err(ref mut err) => err.cancel(), + } + + match self.parse_meta_item() { + Ok(mi) => { + return Ok(ast::NestedMetaItem::MetaItem(mi)) + } + Err(ref mut err) => err.cancel(), + } + + let found = self.this_token_to_string(); + let msg = format!("expected unsuffixed literal or identifier, found `{}`", found); + Err(self.diagnostic().struct_span_err(self.token.span, &msg)) + } + + /// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`. + fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> { + self.parse_seq_to_end(&token::CloseDelim(token::Paren), + SeqSep::trailing_allowed(token::Comma), + |p: &mut Parser<'a>| p.parse_meta_item_inner()) + } +} diff --git a/src/libsyntax/parse/parser/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs new file mode 100644 index 00000000000..06982c789db --- /dev/null +++ b/src/libsyntax/parse/parser/diagnostics.rs @@ -0,0 +1,1494 @@ +use super::{ + BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, + SeqSep, PResult, Parser +}; +use crate::ast::{ + self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, + Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, +}; +use crate::parse::token::{self, TokenKind}; +use crate::print::pprust; +use crate::ptr::P; +use crate::symbol::{kw, sym}; +use crate::ThinVec; +use crate::util::parser::AssocOp; +use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralise}; +use rustc_data_structures::fx::FxHashSet; +use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError}; +use log::{debug, trace}; +use std::mem; + +const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; + +/// Creates a placeholder argument. +pub(super) fn dummy_arg(ident: Ident) -> Param { + let pat = P(Pat { + id: ast::DUMMY_NODE_ID, + kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), + span: ident.span, + }); + let ty = Ty { + kind: TyKind::Err, + span: ident.span, + id: ast::DUMMY_NODE_ID + }; + Param { + attrs: ThinVec::default(), + id: ast::DUMMY_NODE_ID, + pat, + span: ident.span, + ty: P(ty), + is_placeholder: false, + } +} + +pub enum Error { + FileNotFoundForModule { + mod_name: String, + default_path: String, + secondary_path: String, + dir_path: String, + }, + DuplicatePaths { + mod_name: String, + default_path: String, + secondary_path: String, + }, + UselessDocComment, + InclusiveRangeWithNoEnd, +} + +impl Error { + fn span_err<S: Into<MultiSpan>>( + self, + sp: S, + handler: &errors::Handler, + ) -> DiagnosticBuilder<'_> { + match self { + Error::FileNotFoundForModule { + ref mod_name, + ref default_path, + ref secondary_path, + ref dir_path, + } => { + let mut err = struct_span_err!( + handler, + sp, + E0583, + "file not found for module `{}`", + mod_name, + ); + err.help(&format!( + "name the file either {} or {} inside the directory \"{}\"", + default_path, + secondary_path, + dir_path, + )); + err + } + Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { + let mut err = struct_span_err!( + handler, + sp, + E0584, + "file for module `{}` found at both {} and {}", + mod_name, + default_path, + secondary_path, + ); + err.help("delete or rename one of them to remove the ambiguity"); + err + } + Error::UselessDocComment => { + let mut err = struct_span_err!( + handler, + sp, + E0585, + "found a documentation comment that doesn't document anything", + ); + err.help("doc comments must come before what they document, maybe a comment was \ + intended with `//`?"); + err + } + Error::InclusiveRangeWithNoEnd => { + let mut err = struct_span_err!( + handler, + sp, + E0586, + "inclusive range with no end", + ); + err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); + err + } + } + } +} + +pub(super) trait RecoverQPath: Sized + 'static { + const PATH_STYLE: PathStyle = PathStyle::Expr; + fn to_ty(&self) -> Option<P<Ty>>; + fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self; +} + +impl RecoverQPath for Ty { + const PATH_STYLE: PathStyle = PathStyle::Type; + fn to_ty(&self) -> Option<P<Ty>> { + Some(P(self.clone())) + } + fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { + Self { + span: path.span, + kind: TyKind::Path(qself, path), + id: ast::DUMMY_NODE_ID, + } + } +} + +impl RecoverQPath for Pat { + fn to_ty(&self) -> Option<P<Ty>> { + self.to_ty() + } + fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { + Self { + span: path.span, + kind: PatKind::Path(qself, path), + id: ast::DUMMY_NODE_ID, + } + } +} + +impl RecoverQPath for Expr { + fn to_ty(&self) -> Option<P<Ty>> { + self.to_ty() + } + fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { + Self { + span: path.span, + kind: ExprKind::Path(qself, path), + attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, + } + } +} + +impl<'a> Parser<'a> { + pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { + self.span_fatal(self.token.span, m) + } + + crate fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + self.sess.span_diagnostic.struct_span_fatal(sp, m) + } + + pub(super) fn span_fatal_err<S: Into<MultiSpan>>( + &self, + sp: S, + err: Error, + ) -> DiagnosticBuilder<'a> { + err.span_err(sp, self.diagnostic()) + } + + pub(super) fn bug(&self, m: &str) -> ! { + self.sess.span_diagnostic.span_bug(self.token.span, m) + } + + pub(super) fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { + self.sess.span_diagnostic.span_err(sp, m) + } + + pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + self.sess.span_diagnostic.struct_span_err(sp, m) + } + + pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { + self.sess.span_diagnostic.span_bug(sp, m) + } + + pub(super) fn diagnostic(&self) -> &'a errors::Handler { + &self.sess.span_diagnostic + } + + pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> { + self.sess.source_map().span_to_snippet(span) + } + + pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { + let mut err = self.struct_span_err( + self.token.span, + &format!("expected identifier, found {}", self.this_token_descr()), + ); + if let token::Ident(name, false) = self.token.kind { + if Ident::new(name, self.token.span).is_raw_guess() { + err.span_suggestion( + self.token.span, + "you can escape reserved keywords to use them as identifiers", + format!("r#{}", name), + Applicability::MaybeIncorrect, + ); + } + } + if let Some(token_descr) = self.token_descr() { + err.span_label(self.token.span, format!("expected identifier, found {}", token_descr)); + } else { + err.span_label(self.token.span, "expected identifier"); + if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { + err.span_suggestion( + self.token.span, + "remove this comma", + String::new(), + Applicability::MachineApplicable, + ); + } + } + err + } + + pub(super) fn expected_one_of_not_found( + &mut self, + edible: &[TokenKind], + inedible: &[TokenKind], + ) -> PResult<'a, bool /* recovered */> { + fn tokens_to_string(tokens: &[TokenType]) -> String { + let mut i = tokens.iter(); + // This might be a sign we need a connect method on `Iterator`. + let b = i.next() + .map_or(String::new(), |t| t.to_string()); + i.enumerate().fold(b, |mut b, (i, a)| { + if tokens.len() > 2 && i == tokens.len() - 2 { + b.push_str(", or "); + } else if tokens.len() == 2 && i == tokens.len() - 2 { + b.push_str(" or "); + } else { + b.push_str(", "); + } + b.push_str(&a.to_string()); + b + }) + } + + let mut expected = edible.iter() + .map(|x| TokenType::Token(x.clone())) + .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) + .chain(self.expected_tokens.iter().cloned()) + .collect::<Vec<_>>(); + expected.sort_by_cached_key(|x| x.to_string()); + expected.dedup(); + let expect = tokens_to_string(&expected[..]); + let actual = self.this_token_to_string(); + let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { + let short_expect = if expected.len() > 6 { + format!("{} possible tokens", expected.len()) + } else { + expect.clone() + }; + (format!("expected one of {}, found `{}`", expect, actual), + (self.sess.source_map().next_point(self.prev_span), + format!("expected one of {} here", short_expect))) + } else if expected.is_empty() { + (format!("unexpected token: `{}`", actual), + (self.prev_span, "unexpected token after this".to_string())) + } else { + (format!("expected {}, found `{}`", expect, actual), + (self.sess.source_map().next_point(self.prev_span), + format!("expected {} here", expect))) + }; + self.last_unexpected_token_span = Some(self.token.span); + let mut err = self.fatal(&msg_exp); + if self.token.is_ident_named(sym::and) { + err.span_suggestion_short( + self.token.span, + "use `&&` instead of `and` for the boolean operator", + "&&".to_string(), + Applicability::MaybeIncorrect, + ); + } + if self.token.is_ident_named(sym::or) { + err.span_suggestion_short( + self.token.span, + "use `||` instead of `or` for the boolean operator", + "||".to_string(), + Applicability::MaybeIncorrect, + ); + } + let sp = if self.token == token::Eof { + // This is EOF; don't want to point at the following char, but rather the last token. + self.prev_span + } else { + label_sp + }; + match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { + TokenType::Token(t) => Some(t.clone()), + _ => None, + }).collect::<Vec<_>>(), err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } + + let is_semi_suggestable = expected.iter().any(|t| match t { + TokenType::Token(token::Semi) => true, // We expect a `;` here. + _ => false, + }) && ( // A `;` would be expected before the current keyword. + self.token.is_keyword(kw::Break) || + self.token.is_keyword(kw::Continue) || + self.token.is_keyword(kw::For) || + self.token.is_keyword(kw::If) || + self.token.is_keyword(kw::Let) || + self.token.is_keyword(kw::Loop) || + self.token.is_keyword(kw::Match) || + self.token.is_keyword(kw::Return) || + self.token.is_keyword(kw::While) + ); + let sm = self.sess.source_map(); + match (sm.lookup_line(self.token.span.lo()), sm.lookup_line(sp.lo())) { + (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { + // The spans are in different lines, expected `;` and found `let` or `return`. + // High likelihood that it is only a missing `;`. + err.span_suggestion_short( + label_sp, + "a semicolon may be missing here", + ";".to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + return Ok(true); + } + (Ok(ref a), Ok(ref b)) if a.line == b.line => { + // When the spans are in the same line, it means that the only content between + // them is whitespace, point at the found token in that case: + // + // X | () => { syntax error }; + // | ^^^^^ expected one of 8 possible tokens here + // + // instead of having: + // + // X | () => { syntax error }; + // | -^^^^^ unexpected token + // | | + // | expected one of 8 possible tokens here + err.span_label(self.token.span, label_exp); + } + _ if self.prev_span == syntax_pos::DUMMY_SP => { + // Account for macro context where the previous span might not be + // available to avoid incorrect output (#54841). + err.span_label(self.token.span, "unexpected token"); + } + _ => { + err.span_label(sp, label_exp); + err.span_label(self.token.span, "unexpected token"); + } + } + self.maybe_annotate_with_ascription(&mut err, false); + Err(err) + } + + pub fn maybe_annotate_with_ascription( + &self, + err: &mut DiagnosticBuilder<'_>, + maybe_expected_semicolon: bool, + ) { + if let Some((sp, likely_path)) = self.last_type_ascription { + let sm = self.sess.source_map(); + let next_pos = sm.lookup_char_pos(self.token.span.lo()); + let op_pos = sm.lookup_char_pos(sp.hi()); + + let allow_unstable = self.sess.unstable_features.is_nightly_build(); + + if likely_path { + err.span_suggestion( + sp, + "maybe write a path separator here", + "::".to_string(), + if allow_unstable { + Applicability::MaybeIncorrect + } else { + Applicability::MachineApplicable + }, + ); + } else if op_pos.line != next_pos.line && maybe_expected_semicolon { + err.span_suggestion( + sp, + "try using a semicolon", + ";".to_string(), + Applicability::MaybeIncorrect, + ); + } else if allow_unstable { + err.span_label(sp, "tried to parse a type due to this type ascription"); + } else { + err.span_label(sp, "tried to parse a type due to this"); + } + if allow_unstable { + // Give extra information about type ascription only if it's a nightly compiler. + err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \ + type: `<expr>: <type>`"); + err.note("for more information, see \ + https://github.com/rust-lang/rust/issues/23416"); + } + } + } + + /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, + /// passes through any errors encountered. Used for error recovery. + pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { + if let Err(ref mut err) = self.parse_seq_to_before_tokens( + kets, + SeqSep::none(), + TokenExpectType::Expect, + |p| Ok(p.parse_token_tree()), + ) { + err.cancel(); + } + } + + /// This function checks if there are trailing angle brackets and produces + /// a diagnostic to suggest removing them. + /// + /// ```ignore (diagnostic) + /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>(); + /// ^^ help: remove extra angle brackets + /// ``` + pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { + // This function is intended to be invoked after parsing a path segment where there are two + // cases: + // + // 1. A specific token is expected after the path segment. + // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call), + // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path). + // 2. No specific token is expected after the path segment. + // eg. `x.foo` (field access) + // + // This function is called after parsing `.foo` and before parsing the token `end` (if + // present). This includes any angle bracket arguments, such as `.foo::<u32>` or + // `Foo::<Bar>`. + + // We only care about trailing angle brackets if we previously parsed angle bracket + // arguments. This helps stop us incorrectly suggesting that extra angle brackets be + // removed in this case: + // + // `x.foo >> (3)` (where `x.foo` is a `u32` for example) + // + // This case is particularly tricky as we won't notice it just looking at the tokens - + // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will + // have already been parsed): + // + // `x.foo::<u32>>>(3)` + let parsed_angle_bracket_args = segment.args + .as_ref() + .map(|args| args.is_angle_bracketed()) + .unwrap_or(false); + + debug!( + "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}", + parsed_angle_bracket_args, + ); + if !parsed_angle_bracket_args { + return; + } + + // Keep the span at the start so we can highlight the sequence of `>` characters to be + // removed. + let lo = self.token.span; + + // We need to look-ahead to see if we have `>` characters without moving the cursor forward + // (since we might have the field access case and the characters we're eating are + // actual operators and not trailing characters - ie `x.foo >> 3`). + let mut position = 0; + + // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how + // many of each (so we can correctly pluralize our error messages) and continue to + // advance. + let mut number_of_shr = 0; + let mut number_of_gt = 0; + while self.look_ahead(position, |t| { + trace!("check_trailing_angle_brackets: t={:?}", t); + if *t == token::BinOp(token::BinOpToken::Shr) { + number_of_shr += 1; + true + } else if *t == token::Gt { + number_of_gt += 1; + true + } else { + false + } + }) { + position += 1; + } + + // If we didn't find any trailing `>` characters, then we have nothing to error about. + debug!( + "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}", + number_of_gt, number_of_shr, + ); + if number_of_gt < 1 && number_of_shr < 1 { + return; + } + + // Finally, double check that we have our end token as otherwise this is the + // second case. + if self.look_ahead(position, |t| { + trace!("check_trailing_angle_brackets: t={:?}", t); + *t == end + }) { + // Eat from where we started until the end token so that parsing can continue + // as if we didn't have those extra angle brackets. + self.eat_to_tokens(&[&end]); + let span = lo.until(self.token.span); + + let total_num_of_gt = number_of_gt + number_of_shr * 2; + self.diagnostic() + .struct_span_err( + span, + &format!("unmatched angle bracket{}", pluralise!(total_num_of_gt)), + ) + .span_suggestion( + span, + &format!("remove extra angle bracket{}", pluralise!(total_num_of_gt)), + String::new(), + Applicability::MachineApplicable, + ) + .emit(); + } + } + + /// Produces an error if comparison operators are chained (RFC #558). + /// We only need to check the LHS, not the RHS, because all comparison ops have same + /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`). + /// + /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used + /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the + /// case. + /// + /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left + /// associative we can infer that we have: + /// + /// outer_op + /// / \ + /// inner_op r2 + /// / \ + /// l1 r1 + pub(super) fn check_no_chained_comparison( + &mut self, + lhs: &Expr, + outer_op: &AssocOp, + ) -> PResult<'a, Option<P<Expr>>> { + debug_assert!( + outer_op.is_comparison(), + "check_no_chained_comparison: {:?} is not comparison", + outer_op, + ); + + let mk_err_expr = |this: &Self, span| { + Ok(Some(this.mk_expr(span, ExprKind::Err, ThinVec::new()))) + }; + + match lhs.kind { + ExprKind::Binary(op, _, _) if op.node.is_comparison() => { + // Respan to include both operators. + let op_span = op.span.to(self.prev_span); + let mut err = self.struct_span_err( + op_span, + "chained comparison operators require parentheses", + ); + + let suggest = |err: &mut DiagnosticBuilder<'_>| { + err.span_suggestion_verbose( + op_span.shrink_to_lo(), + TURBOFISH, + "::".to_string(), + Applicability::MaybeIncorrect, + ); + }; + + if op.node == BinOpKind::Lt && + *outer_op == AssocOp::Less || // Include `<` to provide this recommendation + *outer_op == AssocOp::Greater // even in a case like the following: + { // Foo<Bar<Baz<Qux, ()>>> + if *outer_op == AssocOp::Less { + let snapshot = self.clone(); + self.bump(); + // So far we have parsed `foo<bar<`, consume the rest of the type args. + let modifiers = [ + (token::Lt, 1), + (token::Gt, -1), + (token::BinOp(token::Shr), -2), + ]; + self.consume_tts(1, &modifiers[..]); + + if !&[ + token::OpenDelim(token::Paren), + token::ModSep, + ].contains(&self.token.kind) { + // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the + // parser and bail out. + mem::replace(self, snapshot.clone()); + } + } + return if token::ModSep == self.token.kind { + // We have some certainty that this was a bad turbofish at this point. + // `foo< bar >::` + suggest(&mut err); + + let snapshot = self.clone(); + self.bump(); // `::` + + // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`. + match self.parse_expr() { + Ok(_) => { + // 99% certain that the suggestion is correct, continue parsing. + err.emit(); + // FIXME: actually check that the two expressions in the binop are + // paths and resynthesize new fn call expression instead of using + // `ExprKind::Err` placeholder. + mk_err_expr(self, lhs.span.to(self.prev_span)) + } + Err(mut expr_err) => { + expr_err.cancel(); + // Not entirely sure now, but we bubble the error up with the + // suggestion. + mem::replace(self, snapshot); + Err(err) + } + } + } else if token::OpenDelim(token::Paren) == self.token.kind { + // We have high certainty that this was a bad turbofish at this point. + // `foo< bar >(` + suggest(&mut err); + // Consume the fn call arguments. + match self.consume_fn_args() { + Err(()) => Err(err), + Ok(()) => { + err.emit(); + // FIXME: actually check that the two expressions in the binop are + // paths and resynthesize new fn call expression instead of using + // `ExprKind::Err` placeholder. + mk_err_expr(self, lhs.span.to(self.prev_span)) + } + } + } else { + // All we know is that this is `foo < bar >` and *nothing* else. Try to + // be helpful, but don't attempt to recover. + err.help(TURBOFISH); + err.help("or use `(...)` if you meant to specify fn arguments"); + // These cases cause too many knock-down errors, bail out (#61329). + Err(err) + }; + } + err.emit(); + } + _ => {} + } + Ok(None) + } + + fn consume_fn_args(&mut self) -> Result<(), ()> { + let snapshot = self.clone(); + self.bump(); // `(` + + // Consume the fn call arguments. + let modifiers = [ + (token::OpenDelim(token::Paren), 1), + (token::CloseDelim(token::Paren), -1), + ]; + self.consume_tts(1, &modifiers[..]); + + if self.token.kind == token::Eof { + // Not entirely sure that what we consumed were fn arguments, rollback. + mem::replace(self, snapshot); + Err(()) + } else { + // 99% certain that the suggestion is correct, continue parsing. + Ok(()) + } + } + + pub(super) fn maybe_report_ambiguous_plus( + &mut self, + allow_plus: bool, + impl_dyn_multi: bool, + ty: &Ty, + ) { + if !allow_plus && impl_dyn_multi { + let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); + self.struct_span_err(ty.span, "ambiguous `+` in a type") + .span_suggestion( + ty.span, + "use parentheses to disambiguate", + sum_with_parens, + Applicability::MachineApplicable, + ) + .emit(); + } + } + + pub(super) fn maybe_recover_from_bad_type_plus( + &mut self, + allow_plus: bool, + ty: &Ty, + ) -> PResult<'a, ()> { + // Do not add `+` to expected tokens. + if !allow_plus || !self.token.is_like_plus() { + return Ok(()); + } + + self.bump(); // `+` + let bounds = self.parse_generic_bounds(None)?; + let sum_span = ty.span.to(self.prev_span); + + let mut err = struct_span_err!( + self.sess.span_diagnostic, + sum_span, + E0178, + "expected a path on the left-hand side of `+`, not `{}`", + pprust::ty_to_string(ty) + ); + + match ty.kind { + TyKind::Rptr(ref lifetime, ref mut_ty) => { + let sum_with_parens = pprust::to_string(|s| { + s.s.word("&"); + s.print_opt_lifetime(lifetime); + s.print_mutability(mut_ty.mutbl); + s.popen(); + s.print_type(&mut_ty.ty); + s.print_type_bounds(" +", &bounds); + s.pclose() + }); + err.span_suggestion( + sum_span, + "try adding parentheses", + sum_with_parens, + Applicability::MachineApplicable, + ); + } + TyKind::Ptr(..) | TyKind::BareFn(..) => { + err.span_label(sum_span, "perhaps you forgot parentheses?"); + } + _ => { + err.span_label(sum_span, "expected a path"); + } + } + err.emit(); + Ok(()) + } + + /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`. + /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem` + /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type. + pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>( + &mut self, + base: P<T>, + allow_recovery: bool, + ) -> PResult<'a, P<T>> { + // Do not add `::` to expected tokens. + if allow_recovery && self.token == token::ModSep { + if let Some(ty) = base.to_ty() { + return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty); + } + } + Ok(base) + } + + /// Given an already parsed `Ty`, parses the `::AssocItem` tail and + /// combines them into a `<Ty>::AssocItem` expression/pattern/type. + pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>( + &mut self, + ty_span: Span, + ty: P<Ty>, + ) -> PResult<'a, P<T>> { + self.expect(&token::ModSep)?; + + let mut path = ast::Path { + segments: Vec::new(), + span: DUMMY_SP, + }; + self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?; + path.span = ty_span.to(self.prev_span); + + let ty_str = self + .span_to_snippet(ty_span) + .unwrap_or_else(|_| pprust::ty_to_string(&ty)); + self.diagnostic() + .struct_span_err(path.span, "missing angle brackets in associated item path") + .span_suggestion( + // This is a best-effort recovery. + path.span, + "try", + format!("<{}>::{}", ty_str, pprust::path_to_string(&path)), + Applicability::MaybeIncorrect, + ) + .emit(); + + let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`. + Ok(P(T::recovered( + Some(QSelf { + ty, + path_span, + position: 0, + }), + path, + ))) + } + + pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool { + if self.eat(&token::Semi) { + let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`"); + err.span_suggestion_short( + self.prev_span, + "remove this semicolon", + String::new(), + Applicability::MachineApplicable, + ); + if !items.is_empty() { + let previous_item = &items[items.len() - 1]; + let previous_item_kind_name = match previous_item.kind { + // Say "braced struct" because tuple-structs and + // braceless-empty-struct declarations do take a semicolon. + ItemKind::Struct(..) => Some("braced struct"), + ItemKind::Enum(..) => Some("enum"), + ItemKind::Trait(..) => Some("trait"), + ItemKind::Union(..) => Some("union"), + _ => None, + }; + if let Some(name) = previous_item_kind_name { + err.help(&format!( + "{} declarations are not followed by a semicolon", + name + )); + } + } + err.emit(); + true + } else { + false + } + } + + /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a + /// closing delimiter. + pub(super) fn unexpected_try_recover( + &mut self, + t: &TokenKind, + ) -> PResult<'a, bool /* recovered */> { + let token_str = pprust::token_kind_to_string(t); + let this_token_str = self.this_token_descr(); + let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { + // Point at the end of the macro call when reaching end of macro arguments. + (token::Eof, Some(_)) => { + let sp = self.sess.source_map().next_point(self.token.span); + (sp, sp) + } + // We don't want to point at the following span after DUMMY_SP. + // This happens when the parser finds an empty TokenStream. + _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span), + // EOF, don't want to point at the following char, but rather the last token. + (token::Eof, None) => (self.prev_span, self.token.span), + _ => (self.sess.source_map().next_point(self.prev_span), self.token.span), + }; + let msg = format!( + "expected `{}`, found {}", + token_str, + match (&self.token.kind, self.subparser_name) { + (token::Eof, Some(origin)) => format!("end of {}", origin), + _ => this_token_str, + }, + ); + let mut err = self.struct_span_err(sp, &msg); + let label_exp = format!("expected `{}`", token_str); + match self.recover_closing_delimiter(&[t.clone()], err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } + let sm = self.sess.source_map(); + match (sm.lookup_line(prev_sp.lo()), sm.lookup_line(sp.lo())) { + (Ok(ref a), Ok(ref b)) if a.line == b.line => { + // When the spans are in the same line, it means that the only content + // between them is whitespace, point only at the found token. + err.span_label(sp, label_exp); + } + _ => { + err.span_label(prev_sp, label_exp); + err.span_label(sp, "unexpected token"); + } + } + Err(err) + } + + pub(super) fn parse_semi_or_incorrect_foreign_fn_body( + &mut self, + ident: &Ident, + extern_sp: Span, + ) -> PResult<'a, ()> { + if self.token != token::Semi { + // This might be an incorrect fn definition (#62109). + let parser_snapshot = self.clone(); + match self.parse_inner_attrs_and_block() { + Ok((_, body)) => { + self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block") + .span_label(ident.span, "can't have a body") + .span_label(body.span, "this body is invalid here") + .span_label( + extern_sp, + "`extern` blocks define existing foreign functions and `fn`s \ + inside of them cannot have a body") + .help("you might have meant to write a function accessible through ffi, \ + which can be done by writing `extern fn` outside of the \ + `extern` block") + .note("for more information, visit \ + https://doc.rust-lang.org/std/keyword.extern.html") + .emit(); + } + Err(mut err) => { + err.cancel(); + mem::replace(self, parser_snapshot); + self.expect(&token::Semi)?; + } + } + } else { + self.bump(); + } + Ok(()) + } + + /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`, + /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`. + pub(super) fn parse_incorrect_await_syntax( + &mut self, + lo: Span, + await_sp: Span, + ) -> PResult<'a, (Span, ExprKind)> { + if self.token == token::Not { + // Handle `await!(<expr>)`. + self.expect(&token::Not)?; + self.expect(&token::OpenDelim(token::Paren))?; + let expr = self.parse_expr()?; + self.expect(&token::CloseDelim(token::Paren))?; + let sp = self.error_on_incorrect_await(lo, self.prev_span, &expr, false); + return Ok((sp, ExprKind::Await(expr))) + } + + let is_question = self.eat(&token::Question); // Handle `await? <expr>`. + let expr = if self.token == token::OpenDelim(token::Brace) { + // Handle `await { <expr> }`. + // This needs to be handled separatedly from the next arm to avoid + // interpreting `await { <expr> }?` as `<expr>?.await`. + self.parse_block_expr( + None, + self.token.span, + BlockCheckMode::Default, + ThinVec::new(), + ) + } else { + self.parse_expr() + }.map_err(|mut err| { + err.span_label(await_sp, "while parsing this incorrect await expression"); + err + })?; + let sp = self.error_on_incorrect_await(lo, expr.span, &expr, is_question); + Ok((sp, ExprKind::Await(expr))) + } + + fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span { + let expr_str = self.span_to_snippet(expr.span) + .unwrap_or_else(|_| pprust::expr_to_string(&expr)); + let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" }); + let sp = lo.to(hi); + let app = match expr.kind { + ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?` + _ => Applicability::MachineApplicable, + }; + self.struct_span_err(sp, "incorrect use of `await`") + .span_suggestion(sp, "`await` is a postfix operation", suggestion, app) + .emit(); + sp + } + + /// If encountering `future.await()`, consumes and emits an error. + pub(super) fn recover_from_await_method_call(&mut self) { + if self.token == token::OpenDelim(token::Paren) && + self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) + { + // future.await() + let lo = self.token.span; + self.bump(); // ( + let sp = lo.to(self.token.span); + self.bump(); // ) + self.struct_span_err(sp, "incorrect use of `await`") + .span_suggestion( + sp, + "`await` is not a method call, remove the parentheses", + String::new(), + Applicability::MachineApplicable, + ).emit() + } + } + + /// Recovers a situation like `for ( $pat in $expr )` + /// and suggest writing `for $pat in $expr` instead. + /// + /// This should be called before parsing the `$block`. + pub(super) fn recover_parens_around_for_head( + &mut self, + pat: P<Pat>, + expr: &Expr, + begin_paren: Option<Span>, + ) -> P<Pat> { + match (&self.token.kind, begin_paren) { + (token::CloseDelim(token::Paren), Some(begin_par_sp)) => { + self.bump(); + + let pat_str = self + // Remove the `(` from the span of the pattern: + .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap()) + .unwrap_or_else(|_| pprust::pat_to_string(&pat)); + + self.struct_span_err(self.prev_span, "unexpected closing `)`") + .span_label(begin_par_sp, "opening `(`") + .span_suggestion( + begin_par_sp.to(self.prev_span), + "remove parenthesis in `for` loop", + format!("{} in {}", pat_str, pprust::expr_to_string(&expr)), + // With e.g. `for (x) in y)` this would replace `(x) in y)` + // with `x) in y)` which is syntactically invalid. + // However, this is prevented before we get here. + Applicability::MachineApplicable, + ) + .emit(); + + // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint. + pat.and_then(|pat| match pat.kind { + PatKind::Paren(pat) => pat, + _ => P(pat), + }) + } + _ => pat, + } + } + + pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { + self.token.is_ident() && + if let ast::ExprKind::Path(..) = node { true } else { false } && + !self.token.is_reserved_ident() && // v `foo:bar(baz)` + self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) || + self.look_ahead(1, |t| t == &token::Lt) && // `foo:bar<baz` + self.look_ahead(2, |t| t.is_ident()) || + self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz` + self.look_ahead(2, |t| t.is_ident()) || + self.look_ahead(1, |t| t == &token::ModSep) && + (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz` + self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>` + } + + pub(super) fn recover_seq_parse_error( + &mut self, + delim: token::DelimToken, + lo: Span, + result: PResult<'a, P<Expr>>, + ) -> P<Expr> { + match result { + Ok(x) => x, + Err(mut err) => { + err.emit(); + // Recover from parse error. + self.consume_block(delim); + self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new()) + } + } + } + + pub(super) fn recover_closing_delimiter( + &mut self, + tokens: &[TokenKind], + mut err: DiagnosticBuilder<'a>, + ) -> PResult<'a, bool> { + let mut pos = None; + // We want to use the last closing delim that would apply. + for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { + if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) + && Some(self.token.span) > unmatched.unclosed_span + { + pos = Some(i); + } + } + match pos { + Some(pos) => { + // Recover and assume that the detected unclosed delimiter was meant for + // this location. Emit the diagnostic and act as if the delimiter was + // present for the parser's sake. + + // Don't attempt to recover from this unclosed delimiter more than once. + let unmatched = self.unclosed_delims.remove(pos); + let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); + + // We want to suggest the inclusion of the closing delimiter where it makes + // the most sense, which is immediately after the last token: + // + // {foo(bar {}} + // - ^ + // | | + // | help: `)` may belong here + // | + // unclosed delimiter + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "unclosed delimiter"); + } + err.span_suggestion_short( + self.sess.source_map().next_point(self.prev_span), + &format!("{} may belong here", delim.to_string()), + delim.to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + self.expected_tokens.clear(); // reduce errors + Ok(true) + } + _ => Err(err), + } + } + + /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid. + pub(super) fn eat_bad_pub(&mut self) { + if self.token.is_keyword(kw::Pub) { + match self.parse_visibility(false) { + Ok(vis) => { + self.diagnostic() + .struct_span_err(vis.span, "unnecessary visibility qualifier") + .span_label(vis.span, "`pub` not permitted here") + .emit(); + } + Err(mut err) => err.emit(), + } + } + } + + /// Eats tokens until we can be relatively sure we reached the end of the + /// statement. This is something of a best-effort heuristic. + /// + /// We terminate when we find an unmatched `}` (without consuming it). + pub(super) fn recover_stmt(&mut self) { + self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) + } + + /// If `break_on_semi` is `Break`, then we will stop consuming tokens after + /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is + /// approximate -- it can mean we break too early due to macros, but that + /// should only lead to sub-optimal recovery, not inaccurate parsing). + /// + /// If `break_on_block` is `Break`, then we will stop consuming tokens + /// after finding (and consuming) a brace-delimited block. + pub(super) fn recover_stmt_( + &mut self, + break_on_semi: SemiColonMode, + break_on_block: BlockMode, + ) { + let mut brace_depth = 0; + let mut bracket_depth = 0; + let mut in_block = false; + debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", + break_on_semi, break_on_block); + loop { + debug!("recover_stmt_ loop {:?}", self.token); + match self.token.kind { + token::OpenDelim(token::DelimToken::Brace) => { + brace_depth += 1; + self.bump(); + if break_on_block == BlockMode::Break && + brace_depth == 1 && + bracket_depth == 0 { + in_block = true; + } + } + token::OpenDelim(token::DelimToken::Bracket) => { + bracket_depth += 1; + self.bump(); + } + token::CloseDelim(token::DelimToken::Brace) => { + if brace_depth == 0 { + debug!("recover_stmt_ return - close delim {:?}", self.token); + break; + } + brace_depth -= 1; + self.bump(); + if in_block && bracket_depth == 0 && brace_depth == 0 { + debug!("recover_stmt_ return - block end {:?}", self.token); + break; + } + } + token::CloseDelim(token::DelimToken::Bracket) => { + bracket_depth -= 1; + if bracket_depth < 0 { + bracket_depth = 0; + } + self.bump(); + } + token::Eof => { + debug!("recover_stmt_ return - Eof"); + break; + } + token::Semi => { + self.bump(); + if break_on_semi == SemiColonMode::Break && + brace_depth == 0 && + bracket_depth == 0 { + debug!("recover_stmt_ return - Semi"); + break; + } + } + token::Comma if break_on_semi == SemiColonMode::Comma && + brace_depth == 0 && + bracket_depth == 0 => + { + debug!("recover_stmt_ return - Semi"); + break; + } + _ => { + self.bump() + } + } + } + } + + pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) { + if self.eat_keyword(kw::In) { + // a common typo: `for _ in in bar {}` + self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`") + .span_suggestion_short( + in_span.until(self.prev_span), + "remove the duplicated `in`", + String::new(), + Applicability::MachineApplicable, + ) + .emit(); + } + } + + pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> { + let token_str = self.this_token_descr(); + let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); + err.span_label(self.token.span, "expected `;` or `{`"); + Err(err) + } + + pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) { + if let token::DocComment(_) = self.token.kind { + self.struct_span_err( + self.token.span, + "documentation comments cannot be applied to a function parameter's type", + ) + .span_label(self.token.span, "doc comments are not allowed here") + .emit(); + self.bump(); + } else if self.token == token::Pound && self.look_ahead(1, |t| { + *t == token::OpenDelim(token::Bracket) + }) { + let lo = self.token.span; + // Skip every token until next possible arg. + while self.token != token::CloseDelim(token::Bracket) { + self.bump(); + } + let sp = lo.to(self.token.span); + self.bump(); + self.struct_span_err( + sp, + "attributes cannot be applied to a function parameter's type", + ) + .span_label(sp, "attributes are not allowed here") + .emit(); + } + } + + pub(super) fn parameter_without_type( + &mut self, + err: &mut DiagnosticBuilder<'_>, + pat: P<ast::Pat>, + require_name: bool, + is_self_allowed: bool, + is_trait_item: bool, + ) -> Option<Ident> { + // If we find a pattern followed by an identifier, it could be an (incorrect) + // C-style parameter declaration. + if self.check_ident() && self.look_ahead(1, |t| { + *t == token::Comma || *t == token::CloseDelim(token::Paren) + }) { // `fn foo(String s) {}` + let ident = self.parse_ident().unwrap(); + let span = pat.span.with_hi(ident.span.hi()); + + err.span_suggestion( + span, + "declare the type after the parameter binding", + String::from("<identifier>: <type>"), + Applicability::HasPlaceholders, + ); + return Some(ident); + } else if let PatKind::Ident(_, ident, _) = pat.kind { + if require_name && ( + is_trait_item || + self.token == token::Comma || + self.token == token::Lt || + self.token == token::CloseDelim(token::Paren) + ) { // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}` + if is_self_allowed { + err.span_suggestion( + pat.span, + "if this is a `self` type, give it a parameter name", + format!("self: {}", ident), + Applicability::MaybeIncorrect, + ); + } + // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to + // `fn foo(HashMap: TypeName<u32>)`. + if self.token != token::Lt { + err.span_suggestion( + pat.span, + "if this was a parameter name, give it a type", + format!("{}: TypeName", ident), + Applicability::HasPlaceholders, + ); + } + err.span_suggestion( + pat.span, + "if this is a type, explicitly ignore the parameter name", + format!("_: {}", ident), + Applicability::MachineApplicable, + ); + err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); + + // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name. + return if self.token == token::Lt { None } else { Some(ident) }; + } + } + None + } + + pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> { + let pat = self.parse_pat(Some("argument name"))?; + self.expect(&token::Colon)?; + let ty = self.parse_ty()?; + + self.diagnostic() + .struct_span_err_with_code( + pat.span, + "patterns aren't allowed in methods without bodies", + DiagnosticId::Error("E0642".into()), + ) + .span_suggestion_short( + pat.span, + "give this argument a name or use an underscore to ignore it", + "_".to_owned(), + Applicability::MachineApplicable, + ) + .emit(); + + // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. + let pat = P(Pat { + kind: PatKind::Wild, + span: pat.span, + id: ast::DUMMY_NODE_ID + }); + Ok((pat, ty)) + } + + pub(super) fn recover_bad_self_param( + &mut self, + mut param: ast::Param, + is_trait_item: bool, + ) -> PResult<'a, ast::Param> { + let sp = param.pat.span; + param.ty.kind = TyKind::Err; + let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function"); + if is_trait_item { + err.span_label(sp, "must be the first associated function parameter"); + } else { + err.span_label(sp, "not valid as function parameter"); + err.note("`self` is only valid as the first parameter of an associated function"); + } + err.emit(); + Ok(param) + } + + pub(super) fn consume_block(&mut self, delim: token::DelimToken) { + let mut brace_depth = 0; + loop { + if self.eat(&token::OpenDelim(delim)) { + brace_depth += 1; + } else if self.eat(&token::CloseDelim(delim)) { + if brace_depth == 0 { + return; + } else { + brace_depth -= 1; + continue; + } + } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) { + return; + } else { + self.bump(); + } + } + } + + pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { + let (span, msg) = match (&self.token.kind, self.subparser_name) { + (&token::Eof, Some(origin)) => { + let sp = self.sess.source_map().next_point(self.token.span); + (sp, format!("expected expression, found end of {}", origin)) + } + _ => (self.token.span, format!( + "expected expression, found {}", + self.this_token_descr(), + )), + }; + let mut err = self.struct_span_err(span, &msg); + let sp = self.sess.source_map().start_point(self.token.span); + if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { + self.sess.expr_parentheses_needed(&mut err, *sp, None); + } + err.span_label(span, "expected expression"); + err + } + + fn consume_tts( + &mut self, + mut acc: i64, // `i64` because malformed code can have more closing delims than opening. + // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`. + modifier: &[(token::TokenKind, i64)], + ) { + while acc > 0 { + if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) { + acc += *val; + } + if self.token.kind == token::Eof { + break; + } + self.bump(); + } + } + + /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors. + /// + /// This is necessary because at this point we don't know whether we parsed a function with + /// anonymous parameters or a function with names but no types. In order to minimize + /// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where + /// the parameters are *names* (so we don't emit errors about not being able to find `b` in + /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`, + /// we deduplicate them to not complain about duplicated parameter names. + pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) { + let mut seen_inputs = FxHashSet::default(); + for input in fn_inputs.iter_mut() { + let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = ( + &input.pat.kind, &input.ty.kind, + ) { + Some(*ident) + } else { + None + }; + if let Some(ident) = opt_ident { + if seen_inputs.contains(&ident) { + input.pat.kind = PatKind::Wild; + } + seen_inputs.insert(ident); + } + } + } +} diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index b459782d237..67a530ec683 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1,18 +1,18 @@ -use super::{ - Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode, SemiColonMode, - SeqSep, TokenExpectType, -}; +use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; +use super::{SemiColonMode, SeqSep, TokenExpectType}; use super::pat::{GateOr, PARAM_EXPECTED}; +use super::diagnostics::Error; + +use crate::parse::literal::LitError; use crate::ast::{ self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode, Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind, - FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, + FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit, }; use crate::maybe_recover_from_interpolated_ty_qpath; use crate::parse::classify; -use crate::parse::token::{self, Token}; -use crate::parse::diagnostics::Error; +use crate::parse::token::{self, Token, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; @@ -20,6 +20,7 @@ use crate::symbol::{kw, sym}; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; use errors::Applicability; +use syntax_pos::Symbol; use std::mem; use rustc_data_structures::thin_vec::ThinVec; @@ -422,7 +423,7 @@ impl<'a> Parser<'a> { self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator") .span_suggestion_short( span_of_tilde, - "use `!` to perform bitwise negation", + "use `!` to perform bitwise not", "!".to_owned(), Applicability::MachineApplicable ) @@ -552,8 +553,11 @@ impl<'a> Parser<'a> { // Report non-fatal diagnostics, keep `x as usize` as an expression // in AST and continue parsing. - let msg = format!("`<` is interpreted as a start of generic \ - arguments for `{}`, not a {}", path, op_noun); + let msg = format!( + "`<` is interpreted as a start of generic arguments for `{}`, not a {}", + pprust::path_to_string(&path), + op_noun, + ); let span_after_type = parser_snapshot_after_type.token.span; let expr = mk_expr(self, P(Ty { span: path.span, @@ -1069,8 +1073,167 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr, true) } + /// Matches `lit = true | false | token_lit`. + pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { + let mut recovered = None; + if self.token == token::Dot { + // Attempt to recover `.4` as `0.4`. + recovered = self.look_ahead(1, |next_token| { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) + = next_token.kind { + if self.token.span.hi() == next_token.span.lo() { + let s = String::from("0.") + &symbol.as_str(); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + return Some(Token::new(kind, self.token.span.to(next_token.span))); + } + } + None + }); + if let Some(token) = &recovered { + self.bump(); + self.struct_span_err(token.span, "float literals must have an integer part") + .span_suggestion( + token.span, + "must have an integer part", + pprust::token_to_string(token), + Applicability::MachineApplicable, + ) + .emit(); + } + } + + let token = recovered.as_ref().unwrap_or(&self.token); + match Lit::from_token(token) { + Ok(lit) => { + self.bump(); + Ok(lit) + } + Err(LitError::NotLiteral) => { + let msg = format!("unexpected token: {}", self.this_token_descr()); + Err(self.span_fatal(token.span, &msg)) + } + Err(err) => { + let (lit, span) = (token.expect_lit(), token.span); + self.bump(); + self.error_literal_from_token(err, lit, span); + // Pack possible quotes and prefixes from the original literal into + // the error literal's symbol so they can be pretty-printed faithfully. + let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); + let symbol = Symbol::intern(&suffixless_lit.to_string()); + let lit = token::Lit::new(token::Err, symbol, lit.suffix); + Lit::from_lit_token(lit, span).map_err(|_| unreachable!()) + } + } + } + + fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) { + // Checks if `s` looks like i32 or u1234 etc. + fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { + s.len() > 1 + && s.starts_with(first_chars) + && s[1..].chars().all(|c| c.is_ascii_digit()) + } + + let token::Lit { kind, suffix, .. } = lit; + match err { + // `NotLiteral` is not an error by itself, so we don't report + // it and give the parser opportunity to try something else. + LitError::NotLiteral => {} + // `LexerError` *is* an error, but it was already reported + // by lexer, so here we don't report it the second time. + LitError::LexerError => {} + LitError::InvalidSuffix => { + self.expect_no_suffix( + span, + &format!("{} {} literal", kind.article(), kind.descr()), + suffix, + ); + } + LitError::InvalidIntSuffix => { + let suf = suffix.expect("suffix error with no suffix").as_str(); + if looks_like_width_suffix(&['i', 'u'], &suf) { + // If it looks like a width, try to be helpful. + let msg = format!("invalid width `{}` for integer literal", &suf[1..]); + self.struct_span_err(span, &msg) + .help("valid widths are 8, 16, 32, 64 and 128") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for integer literal", suf); + self.struct_span_err(span, &msg) + .span_label(span, format!("invalid suffix `{}`", suf)) + .help("the suffix must be one of the integral types (`u32`, `isize`, etc)") + .emit(); + } + } + LitError::InvalidFloatSuffix => { + let suf = suffix.expect("suffix error with no suffix").as_str(); + if looks_like_width_suffix(&['f'], &suf) { + // If it looks like a width, try to be helpful. + let msg = format!("invalid width `{}` for float literal", &suf[1..]); + self.struct_span_err(span, &msg) + .help("valid widths are 32 and 64") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for float literal", suf); + self.struct_span_err(span, &msg) + .span_label(span, format!("invalid suffix `{}`", suf)) + .help("valid suffixes are `f32` and `f64`") + .emit(); + } + } + LitError::NonDecimalFloat(base) => { + let descr = match base { + 16 => "hexadecimal", + 8 => "octal", + 2 => "binary", + _ => unreachable!(), + }; + self.struct_span_err(span, &format!("{} float literal is not supported", descr)) + .span_label(span, "not supported") + .emit(); + } + LitError::IntTooLarge => { + self.struct_span_err(span, "integer literal is too large") + .emit(); + } + } + } + + pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) { + if let Some(suf) = suffix { + let mut err = if kind == "a tuple index" + && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) + { + // #59553: warn instead of reject out of hand to allow the fix to percolate + // through the ecosystem when people fix their macros + let mut err = self.sess.span_diagnostic.struct_span_warn( + sp, + &format!("suffixes on {} are invalid", kind), + ); + err.note(&format!( + "`{}` is *temporarily* accepted on tuple index fields as it was \ + incorrectly accepted on stable for a few releases", + suf, + )); + err.help( + "on proc macros, you'll want to use `syn::Index::from` or \ + `proc_macro::Literal::*_unsuffixed` for code that will desugar \ + to tuple field access", + ); + err.note( + "for more context, see https://github.com/rust-lang/rust/issues/60210", + ); + err + } else { + self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) + }; + err.span_label(sp, format!("invalid suffix `{}`", suf)); + err.emit(); + } + } + /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). - crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { + pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); let minus_lo = self.token.span; @@ -1090,7 +1253,7 @@ impl<'a> Parser<'a> { } /// Parses a block or unsafe block. - crate fn parse_block_expr( + pub(super) fn parse_block_expr( &mut self, opt_label: Option<Label>, lo: Span, @@ -1395,7 +1558,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } - crate fn parse_arm(&mut self) -> PResult<'a, Arm> { + pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; let lo = self.token.span; let pat = self.parse_top_pat(GateOr::No)?; @@ -1503,7 +1666,7 @@ impl<'a> Parser<'a> { } /// Parses an `async move? {...}` expression. - pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { + fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let span_lo = self.token.span; self.expect_keyword(kw::Async)?; let capture_clause = self.parse_capture_clause(); @@ -1783,4 +1946,8 @@ impl<'a> Parser<'a> { crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { P(Expr { kind, span, attrs, id: DUMMY_NODE_ID }) } + + pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> { + self.mk_expr(span, ExprKind::Err, ThinVec::new()) + } } diff --git a/src/libsyntax/parse/parser/generics.rs b/src/libsyntax/parse/parser/generics.rs index 2ecd9cca3c6..bfcb0042a75 100644 --- a/src/libsyntax/parse/parser/generics.rs +++ b/src/libsyntax/parse/parser/generics.rs @@ -74,7 +74,7 @@ impl<'a> Parser<'a> { /// Parses a (possibly empty) list of lifetime and type parameters, possibly including /// a trailing comma and erroneous trailing attributes. - crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { + pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { let mut params = Vec::new(); loop { let attrs = self.parse_outer_attributes()?; diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index 3c60c88e2aa..73bd80e2a21 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -1,34 +1,28 @@ -use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode, ParamCfg}; +use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode}; +use super::diagnostics::{Error, dummy_arg}; use crate::maybe_whole; use crate::ptr::P; -use crate::ast::{ - self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle, - Item, ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, - UseTree, UseTreeKind, PathSegment, - IsAuto, Constness, IsAsync, Unsafety, Defaultness, - Visibility, VisibilityKind, Mutability, FnDecl, FnHeader, MethodSig, Block, - ForeignItem, ForeignItemKind, - Ty, TyKind, Generics, GenericBounds, TraitRef, - EnumDef, VariantData, StructField, AnonConst, - Mac, MacDelimiter, -}; -use crate::ext::base::DummyResult; +use crate::ast::{self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle, AnonConst, Item, ItemKind}; +use crate::ast::{ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind}; +use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness}; +use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind}; +use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField}; +use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, MethodSig, SelfKind, Param}; use crate::parse::token; -use crate::parse::parser::maybe_append; -use crate::parse::diagnostics::Error; use crate::tokenstream::{TokenTree, TokenStream}; -use crate::source_map::{respan, Span}; use crate::symbol::{kw, sym}; +use crate::source_map::{self, respan, Span}; +use crate::ThinVec; -use std::mem; use log::debug; +use std::mem; use rustc_target::spec::abi::Abi; use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey}; /// Whether the type alias or associated type is a concrete type or an opaque type. #[derive(Debug)] -pub enum AliasKind { +pub(super) enum AliasKind { /// Just a new name for the same type. Weak(P<Ty>), /// Only trait impls of the type will be usable, not the actual type itself. @@ -412,7 +406,7 @@ impl<'a> Parser<'a> { self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, vis) } - fn mk_item_with_info( + pub(super) fn mk_item_with_info( &self, attrs: Vec<Attribute>, lo: Span, @@ -421,18 +415,15 @@ impl<'a> Parser<'a> { ) -> PResult<'a, Option<P<Item>>> { let (ident, item, extra_attrs) = info; let span = lo.to(self.prev_span); - let attrs = maybe_append(attrs, extra_attrs); + let attrs = Self::maybe_append(attrs, extra_attrs); Ok(Some(self.mk_item(span, ident, item, vis, attrs))) } - fn recover_first_param(&mut self) -> &'static str { - match self.parse_outer_attributes() - .and_then(|_| self.parse_self_param()) - .map_err(|mut e| e.cancel()) - { - Ok(Some(_)) => "method", - _ => "function", + fn maybe_append<T>(mut lhs: Vec<T>, mut rhs: Option<Vec<T>>) -> Vec<T> { + if let Some(ref mut rhs) = rhs { + lhs.append(rhs); } + lhs } /// This is the fall-through for parsing items. @@ -620,7 +611,7 @@ impl<'a> Parser<'a> { let ty_second = if self.token == token::DotDot { // We need to report this error after `cfg` expansion for compatibility reasons self.bump(); // `..`, do not add it to expected tokens - Some(DummyResult::raw_ty(self.prev_span, true)) + Some(self.mk_ty(self.prev_span, TyKind::Err)) } else if has_for || self.token.can_begin_type() { Some(self.parse_ty()?) } else { @@ -707,9 +698,11 @@ impl<'a> Parser<'a> { Ok(item) } - fn parse_impl_item_(&mut self, - at_end: &mut bool, - mut attrs: Vec<Attribute>) -> PResult<'a, ImplItem> { + fn parse_impl_item_( + &mut self, + at_end: &mut bool, + mut attrs: Vec<Attribute>, + ) -> PResult<'a, ImplItem> { let lo = self.token.span; let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness(); @@ -722,8 +715,11 @@ impl<'a> Parser<'a> { (name, kind, generics) } else if self.is_const_item() { self.parse_impl_const()? + } else if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(&vis), at_end)? { + // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction! + (Ident::invalid(), ast::ImplItemKind::Macro(mac), Generics::default()) } else { - let (name, inner_attrs, generics, kind) = self.parse_impl_method(&vis, at_end)?; + let (name, inner_attrs, generics, kind) = self.parse_impl_method(at_end)?; attrs.extend(inner_attrs); (name, kind, generics) }; @@ -783,71 +779,6 @@ impl<'a> Parser<'a> { Ok((name, ImplItemKind::Const(typ, expr), Generics::default())) } - /// Parses a method or a macro invocation in a trait impl. - fn parse_impl_method( - &mut self, - vis: &Visibility, - at_end: &mut bool - ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ImplItemKind)> { - // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction! - if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? { - // method macro - Ok((Ident::invalid(), vec![], Generics::default(), ast::ImplItemKind::Macro(mac))) - } else { - let (ident, sig, generics) = self.parse_method_sig(|_| true)?; - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body))) - } - } - - /// Parse the "signature", including the identifier, parameters, and generics - /// of a method. The body is not parsed as that differs between `trait`s and `impl`s. - fn parse_method_sig( - &mut self, - is_name_required: fn(&token::Token) -> bool, - ) -> PResult<'a, (Ident, MethodSig, Generics)> { - let header = self.parse_fn_front_matter()?; - let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { - is_self_allowed: true, - allow_c_variadic: false, - is_name_required, - })?; - Ok((ident, MethodSig { header, decl }, generics)) - } - - /// Parses all the "front matter" for a `fn` declaration, up to - /// and including the `fn` keyword: - /// - /// - `const fn` - /// - `unsafe fn` - /// - `const unsafe fn` - /// - `extern fn` - /// - etc. - fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> { - let is_const_fn = self.eat_keyword(kw::Const); - let const_span = self.prev_span; - let asyncness = self.parse_asyncness(); - if let IsAsync::Async { .. } = asyncness { - self.ban_async_in_2015(self.prev_span); - } - let asyncness = respan(self.prev_span, asyncness); - let unsafety = self.parse_unsafety(); - let (constness, unsafety, abi) = if is_const_fn { - (respan(const_span, Constness::Const), unsafety, Abi::Rust) - } else { - let abi = self.parse_extern_abi()?; - (respan(self.prev_span, Constness::NotConst), unsafety, abi) - }; - if !self.eat_keyword(kw::Fn) { - // It is possible for `expect_one_of` to recover given the contents of - // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't - // account for this. - if !self.expect_one_of(&[], &[])? { unreachable!() } - } - Ok(FnHeader { constness, unsafety, asyncness, abi }) - } - /// Parses `auto? trait Foo { ... }` or `trait Foo = Bar;`. fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<'a, ItemInfo> { // Parse optional `auto` prefix. @@ -957,13 +888,7 @@ impl<'a> Parser<'a> { // trait item macro. (Ident::invalid(), TraitItemKind::Macro(mac), Generics::default()) } else { - // This is somewhat dubious; We don't want to allow - // argument names to be left off if there is a definition... - // - // We don't allow argument names to be left off in edition 2018. - let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?; - let body = self.parse_trait_method_body(at_end, &mut attrs)?; - (ident, TraitItemKind::Method(sig, body), generics) + self.parse_trait_item_method(at_end, &mut attrs)? }; Ok(TraitItem { @@ -991,43 +916,6 @@ impl<'a> Parser<'a> { Ok((ident, TraitItemKind::Const(ty, default), Generics::default())) } - /// Parse the "body" of a method in a trait item definition. - /// This can either be `;` when there's no body, - /// or e.g. a block when the method is a provided one. - fn parse_trait_method_body( - &mut self, - at_end: &mut bool, - attrs: &mut Vec<Attribute>, - ) -> PResult<'a, Option<P<Block>>> { - Ok(match self.token.kind { - token::Semi => { - debug!("parse_trait_method_body(): parsing required method"); - self.bump(); - *at_end = true; - None - } - token::OpenDelim(token::Brace) => { - debug!("parse_trait_method_body(): parsing provided method"); - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } - token::Interpolated(ref nt) => { - match **nt { - token::NtBlock(..) => { - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } - _ => return self.expected_semi_or_open_brace(), - } - } - _ => return self.expected_semi_or_open_brace(), - }) - } - /// Parses the following grammar: /// /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty] @@ -1194,45 +1082,6 @@ impl<'a> Parser<'a> { Ok(ident) } - /// Parses an item-position function declaration. - fn parse_item_fn( - &mut self, - lo: Span, - vis: Visibility, - attrs: Vec<Attribute>, - header: FnHeader, - ) -> PResult<'a, Option<P<Item>>> { - let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { - is_self_allowed: false, - allow_c_variadic: header.abi == Abi::C && header.unsafety == Unsafety::Unsafe, - is_name_required: |_| true, - })?; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - let kind = ItemKind::Fn(decl, header, generics, body); - self.mk_item_with_info(attrs, lo, vis, (ident, kind, Some(inner_attrs))) - } - - /// Parse the "signature", including the identifier, parameters, and generics of a function. - fn parse_fn_sig(&mut self, cfg: ParamCfg) -> PResult<'a, (Ident, P<FnDecl>, Generics)> { - let ident = self.parse_ident()?; - let mut generics = self.parse_generics()?; - let decl = self.parse_fn_decl(cfg, true)?; - generics.where_clause = self.parse_where_clause()?; - Ok((ident, decl, generics)) - } - - /// Parses the parameter list and result type of a function declaration. - pub(super) fn parse_fn_decl( - &mut self, - cfg: ParamCfg, - ret_allow_plus: bool, - ) -> PResult<'a, P<FnDecl>> { - Ok(P(FnDecl { - inputs: self.parse_fn_params(cfg)?, - output: self.parse_ret_ty(ret_allow_plus)?, - })) - } - /// Parses `extern` for foreign ABIs modules. /// /// `extern` is expected to have been @@ -1273,7 +1122,7 @@ impl<'a> Parser<'a> { } /// Parses a foreign item. - crate fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> { + pub fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> { maybe_whole!(self, NtForeignItem, |ni| ni); let attrs = self.parse_outer_attributes()?; @@ -1344,32 +1193,6 @@ impl<'a> Parser<'a> { } } - /// Parses a function declaration from a foreign module. - fn parse_item_foreign_fn( - &mut self, - vis: ast::Visibility, - lo: Span, - attrs: Vec<Attribute>, - extern_sp: Span, - ) -> PResult<'a, ForeignItem> { - self.expect_keyword(kw::Fn)?; - let (ident, decl, generics) = self.parse_fn_sig(super::ParamCfg { - is_self_allowed: false, - allow_c_variadic: true, - is_name_required: |_| true, - })?; - let span = lo.to(self.token.span); - self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?; - Ok(ast::ForeignItem { - ident, - attrs, - kind: ForeignItemKind::Fn(decl, generics), - id: DUMMY_NODE_ID, - span, - vis, - }) - } - /// Parses a static item from a foreign module. /// Assumes that the `static` keyword is already parsed. fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) @@ -1910,3 +1733,466 @@ impl<'a> Parser<'a> { }) } } + +/// The parsing configuration used to parse a parameter list (see `parse_fn_params`). +pub(super) struct ParamCfg { + /// Is `self` is allowed as the first parameter? + pub is_self_allowed: bool, + /// Is `...` allowed as the tail of the parameter list? + pub allow_c_variadic: bool, + /// `is_name_required` decides if, per-parameter, + /// the parameter must have a pattern or just a type. + pub is_name_required: fn(&token::Token) -> bool, +} + +/// Parsing of functions and methods. +impl<'a> Parser<'a> { + /// Parses an item-position function declaration. + fn parse_item_fn( + &mut self, + lo: Span, + vis: Visibility, + attrs: Vec<Attribute>, + header: FnHeader, + ) -> PResult<'a, Option<P<Item>>> { + let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { + is_self_allowed: false, + allow_c_variadic: header.abi == Abi::C && header.unsafety == Unsafety::Unsafe, + is_name_required: |_| true, + })?; + let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; + let kind = ItemKind::Fn(decl, header, generics, body); + self.mk_item_with_info(attrs, lo, vis, (ident, kind, Some(inner_attrs))) + } + + /// Parses a function declaration from a foreign module. + fn parse_item_foreign_fn( + &mut self, + vis: ast::Visibility, + lo: Span, + attrs: Vec<Attribute>, + extern_sp: Span, + ) -> PResult<'a, ForeignItem> { + self.expect_keyword(kw::Fn)?; + let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { + is_self_allowed: false, + allow_c_variadic: true, + is_name_required: |_| true, + })?; + let span = lo.to(self.token.span); + self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?; + Ok(ast::ForeignItem { + ident, + attrs, + kind: ForeignItemKind::Fn(decl, generics), + id: DUMMY_NODE_ID, + span, + vis, + }) + } + + /// Parses a method or a macro invocation in a trait impl. + fn parse_impl_method( + &mut self, + at_end: &mut bool, + ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ImplItemKind)> { + let (ident, sig, generics) = self.parse_method_sig(|_| true)?; + *at_end = true; + let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; + Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body))) + } + + fn parse_trait_item_method( + &mut self, + at_end: &mut bool, + attrs: &mut Vec<Attribute>, + ) -> PResult<'a, (Ident, TraitItemKind, Generics)> { + // This is somewhat dubious; We don't want to allow + // argument names to be left off if there is a definition... + // + // We don't allow argument names to be left off in edition 2018. + let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?; + let body = self.parse_trait_method_body(at_end, attrs)?; + Ok((ident, TraitItemKind::Method(sig, body), generics)) + } + + /// Parse the "body" of a method in a trait item definition. + /// This can either be `;` when there's no body, + /// or e.g. a block when the method is a provided one. + fn parse_trait_method_body( + &mut self, + at_end: &mut bool, + attrs: &mut Vec<Attribute>, + ) -> PResult<'a, Option<P<Block>>> { + Ok(match self.token.kind { + token::Semi => { + debug!("parse_trait_method_body(): parsing required method"); + self.bump(); + *at_end = true; + None + } + token::OpenDelim(token::Brace) => { + debug!("parse_trait_method_body(): parsing provided method"); + *at_end = true; + let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; + attrs.extend(inner_attrs.iter().cloned()); + Some(body) + } + token::Interpolated(ref nt) => { + match **nt { + token::NtBlock(..) => { + *at_end = true; + let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; + attrs.extend(inner_attrs.iter().cloned()); + Some(body) + } + _ => return self.expected_semi_or_open_brace(), + } + } + _ => return self.expected_semi_or_open_brace(), + }) + } + + /// Parse the "signature", including the identifier, parameters, and generics + /// of a method. The body is not parsed as that differs between `trait`s and `impl`s. + fn parse_method_sig( + &mut self, + is_name_required: fn(&token::Token) -> bool, + ) -> PResult<'a, (Ident, MethodSig, Generics)> { + let header = self.parse_fn_front_matter()?; + let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { + is_self_allowed: true, + allow_c_variadic: false, + is_name_required, + })?; + Ok((ident, MethodSig { header, decl }, generics)) + } + + /// Parses all the "front matter" for a `fn` declaration, up to + /// and including the `fn` keyword: + /// + /// - `const fn` + /// - `unsafe fn` + /// - `const unsafe fn` + /// - `extern fn` + /// - etc. + fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> { + let is_const_fn = self.eat_keyword(kw::Const); + let const_span = self.prev_span; + let asyncness = self.parse_asyncness(); + if let IsAsync::Async { .. } = asyncness { + self.ban_async_in_2015(self.prev_span); + } + let asyncness = respan(self.prev_span, asyncness); + let unsafety = self.parse_unsafety(); + let (constness, unsafety, abi) = if is_const_fn { + (respan(const_span, Constness::Const), unsafety, Abi::Rust) + } else { + let abi = self.parse_extern_abi()?; + (respan(self.prev_span, Constness::NotConst), unsafety, abi) + }; + if !self.eat_keyword(kw::Fn) { + // It is possible for `expect_one_of` to recover given the contents of + // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't + // account for this. + if !self.expect_one_of(&[], &[])? { unreachable!() } + } + Ok(FnHeader { constness, unsafety, asyncness, abi }) + } + + /// Parse the "signature", including the identifier, parameters, and generics of a function. + fn parse_fn_sig(&mut self, cfg: ParamCfg) -> PResult<'a, (Ident, P<FnDecl>, Generics)> { + let ident = self.parse_ident()?; + let mut generics = self.parse_generics()?; + let decl = self.parse_fn_decl(cfg, true)?; + generics.where_clause = self.parse_where_clause()?; + Ok((ident, decl, generics)) + } + + /// Parses the parameter list and result type of a function declaration. + pub(super) fn parse_fn_decl( + &mut self, + cfg: ParamCfg, + ret_allow_plus: bool, + ) -> PResult<'a, P<FnDecl>> { + Ok(P(FnDecl { + inputs: self.parse_fn_params(cfg)?, + output: self.parse_ret_ty(ret_allow_plus)?, + })) + } + + /// Parses the parameter list of a function, including the `(` and `)` delimiters. + fn parse_fn_params(&mut self, mut cfg: ParamCfg) -> PResult<'a, Vec<Param>> { + let sp = self.token.span; + let is_trait_item = cfg.is_self_allowed; + let mut c_variadic = false; + // Parse the arguments, starting out with `self` being possibly allowed... + let (params, _) = self.parse_paren_comma_seq(|p| { + let param = p.parse_param_general(&cfg, is_trait_item); + // ...now that we've parsed the first argument, `self` is no longer allowed. + cfg.is_self_allowed = false; + + match param { + Ok(param) => Ok( + if let TyKind::CVarArgs = param.ty.kind { + c_variadic = true; + if p.token != token::CloseDelim(token::Paren) { + p.span_err( + p.token.span, + "`...` must be the last argument of a C-variadic function", + ); + // FIXME(eddyb) this should probably still push `CVarArgs`. + // Maybe AST validation/HIR lowering should emit the above error? + None + } else { + Some(param) + } + } else { + Some(param) + } + ), + Err(mut e) => { + e.emit(); + let lo = p.prev_span; + // Skip every token until next possible arg or end. + p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); + // Create a placeholder argument for proper arg count (issue #34264). + let span = lo.to(p.prev_span); + Ok(Some(dummy_arg(Ident::new(kw::Invalid, span)))) + } + } + })?; + + let mut params: Vec<_> = params.into_iter().filter_map(|x| x).collect(); + + // Replace duplicated recovered params with `_` pattern to avoid unecessary errors. + self.deduplicate_recovered_params_names(&mut params); + + if c_variadic && params.len() <= 1 { + self.span_err( + sp, + "C-variadic function must be declared with at least one named argument", + ); + } + + Ok(params) + } + + /// Skips unexpected attributes and doc comments in this position and emits an appropriate + /// error. + /// This version of parse param doesn't necessarily require identifier names. + fn parse_param_general(&mut self, cfg: &ParamCfg, is_trait_item: bool) -> PResult<'a, Param> { + let lo = self.token.span; + let attrs = self.parse_outer_attributes()?; + + // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here. + if let Some(mut param) = self.parse_self_param()? { + param.attrs = attrs.into(); + return if cfg.is_self_allowed { + Ok(param) + } else { + self.recover_bad_self_param(param, is_trait_item) + }; + } + + let is_name_required = match self.token.kind { + token::DotDotDot => false, + _ => (cfg.is_name_required)(&self.token), + }; + let (pat, ty) = if is_name_required || self.is_named_param() { + debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); + + let pat = self.parse_fn_param_pat()?; + if let Err(mut err) = self.expect(&token::Colon) { + return if let Some(ident) = self.parameter_without_type( + &mut err, + pat, + is_name_required, + cfg.is_self_allowed, + is_trait_item, + ) { + err.emit(); + Ok(dummy_arg(ident)) + } else { + Err(err) + }; + } + + self.eat_incorrect_doc_comment_for_param_type(); + (pat, self.parse_ty_common(true, true, cfg.allow_c_variadic)?) + } else { + debug!("parse_param_general ident_to_pat"); + let parser_snapshot_before_ty = self.clone(); + self.eat_incorrect_doc_comment_for_param_type(); + let mut ty = self.parse_ty_common(true, true, cfg.allow_c_variadic); + if ty.is_ok() && self.token != token::Comma && + self.token != token::CloseDelim(token::Paren) { + // This wasn't actually a type, but a pattern looking like a type, + // so we are going to rollback and re-parse for recovery. + ty = self.unexpected(); + } + match ty { + Ok(ty) => { + let ident = Ident::new(kw::Invalid, self.prev_span); + let bm = BindingMode::ByValue(Mutability::Immutable); + let pat = self.mk_pat_ident(ty.span, bm, ident); + (pat, ty) + } + // If this is a C-variadic argument and we hit an error, return the error. + Err(err) if self.token == token::DotDotDot => return Err(err), + // Recover from attempting to parse the argument as a type without pattern. + Err(mut err) => { + err.cancel(); + mem::replace(self, parser_snapshot_before_ty); + self.recover_arg_parse()? + } + } + }; + + let span = lo.to(self.token.span); + + Ok(Param { + attrs: attrs.into(), + id: ast::DUMMY_NODE_ID, + is_placeholder: false, + pat, + span, + ty, + }) + } + + /// Returns the parsed optional self parameter and whether a self shortcut was used. + /// + /// See `parse_self_param_with_attrs` to collect attributes. + fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { + // Extract an identifier *after* having confirmed that the token is one. + let expect_self_ident = |this: &mut Self| { + match this.token.kind { + // Preserve hygienic context. + token::Ident(name, _) => { + let span = this.token.span; + this.bump(); + Ident::new(name, span) + } + _ => unreachable!(), + } + }; + // Is `self` `n` tokens ahead? + let is_isolated_self = |this: &Self, n| { + this.is_keyword_ahead(n, &[kw::SelfLower]) + && this.look_ahead(n + 1, |t| t != &token::ModSep) + }; + // Is `mut self` `n` tokens ahead? + let is_isolated_mut_self = |this: &Self, n| { + this.is_keyword_ahead(n, &[kw::Mut]) + && is_isolated_self(this, n + 1) + }; + // Parse `self` or `self: TYPE`. We already know the current token is `self`. + let parse_self_possibly_typed = |this: &mut Self, m| { + let eself_ident = expect_self_ident(this); + let eself_hi = this.prev_span; + let eself = if this.eat(&token::Colon) { + SelfKind::Explicit(this.parse_ty()?, m) + } else { + SelfKind::Value(m) + }; + Ok((eself, eself_ident, eself_hi)) + }; + // Recover for the grammar `*self`, `*const self`, and `*mut self`. + let recover_self_ptr = |this: &mut Self| { + let msg = "cannot pass `self` by raw pointer"; + let span = this.token.span; + this.struct_span_err(span, msg) + .span_label(span, msg) + .emit(); + + Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span)) + }; + + // Parse optional `self` parameter of a method. + // Only a limited set of initial token sequences is considered `self` parameters; anything + // else is parsed as a normal function parameter list, so some lookahead is required. + let eself_lo = self.token.span; + let (eself, eself_ident, eself_hi) = match self.token.kind { + token::BinOp(token::And) => { + let eself = if is_isolated_self(self, 1) { + // `&self` + self.bump(); + SelfKind::Region(None, Mutability::Immutable) + } else if is_isolated_mut_self(self, 1) { + // `&mut self` + self.bump(); + self.bump(); + SelfKind::Region(None, Mutability::Mutable) + } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) { + // `&'lt self` + self.bump(); + let lt = self.expect_lifetime(); + SelfKind::Region(Some(lt), Mutability::Immutable) + } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) { + // `&'lt mut self` + self.bump(); + let lt = self.expect_lifetime(); + self.bump(); + SelfKind::Region(Some(lt), Mutability::Mutable) + } else { + // `¬_self` + return Ok(None); + }; + (eself, expect_self_ident(self), self.prev_span) + } + // `*self` + token::BinOp(token::Star) if is_isolated_self(self, 1) => { + self.bump(); + recover_self_ptr(self)? + } + // `*mut self` and `*const self` + token::BinOp(token::Star) if + self.look_ahead(1, |t| t.is_mutability()) + && is_isolated_self(self, 2) => + { + self.bump(); + self.bump(); + recover_self_ptr(self)? + } + // `self` and `self: TYPE` + token::Ident(..) if is_isolated_self(self, 0) => { + parse_self_possibly_typed(self, Mutability::Immutable)? + } + // `mut self` and `mut self: TYPE` + token::Ident(..) if is_isolated_mut_self(self, 0) => { + self.bump(); + parse_self_possibly_typed(self, Mutability::Mutable)? + } + _ => return Ok(None), + }; + + let eself = source_map::respan(eself_lo.to(eself_hi), eself); + Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident))) + } + + fn is_named_param(&self) -> bool { + let offset = match self.token.kind { + token::Interpolated(ref nt) => match **nt { + token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), + _ => 0, + } + token::BinOp(token::And) | token::AndAnd => 1, + _ if self.token.is_keyword(kw::Mut) => 1, + _ => 0, + }; + + self.look_ahead(offset, |t| t.is_ident()) && + self.look_ahead(offset + 1, |t| t == &token::Colon) + } + + fn recover_first_param(&mut self) -> &'static str { + match self.parse_outer_attributes() + .and_then(|_| self.parse_self_param()) + .map_err(|mut e| e.cancel()) + { + Ok(Some(_)) => "method", + _ => "function", + } + } +} diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs index 2d2fb487d7d..a0e4d2bbb7a 100644 --- a/src/libsyntax/parse/parser/module.rs +++ b/src/libsyntax/parse/parser/module.rs @@ -1,24 +1,24 @@ use super::{Parser, PResult}; use super::item::ItemInfo; +use super::diagnostics::Error; use crate::attr; use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate}; use crate::parse::{new_sub_parser_from_file, DirectoryOwnership}; use crate::parse::token::{self, TokenKind}; -use crate::parse::diagnostics::{Error}; use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName}; use crate::symbol::sym; use std::path::{self, Path, PathBuf}; /// Information about the path to a module. -pub struct ModulePath { +pub(super) struct ModulePath { name: String, path_exists: bool, pub result: Result<ModulePathSuccess, Error>, } -pub struct ModulePathSuccess { +pub(super) struct ModulePathSuccess { pub path: PathBuf, pub directory_ownership: DirectoryOwnership, warn: bool, @@ -39,6 +39,8 @@ impl<'a> Parser<'a> { /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { let (in_cfg, outer_attrs) = { + // FIXME(Centril): This results in a cycle between config and parsing. + // Consider using dynamic dispatch via `self.sess` to disentangle the knot. let mut strip_unconfigured = crate::config::StripUnconfigured { sess: self.sess, features: None, // Don't perform gated feature checking. @@ -198,7 +200,7 @@ impl<'a> Parser<'a> { } } - pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { + pub(super) fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) { let s = s.as_str(); @@ -215,7 +217,7 @@ impl<'a> Parser<'a> { } /// Returns a path to a module. - pub fn default_submod_path( + pub(super) fn default_submod_path( id: ast::Ident, relative: Option<ast::Ident>, dir_path: &Path, diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 48f9e301610..af795e51792 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -22,7 +22,7 @@ const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here" /// Whether or not an or-pattern should be gated when occurring in the current context. #[derive(PartialEq)] -pub enum GateOr { Yes, No } +pub(super) enum GateOr { Yes, No } /// Whether or not to recover a `,` when parsing or-patterns. #[derive(PartialEq, Copy, Clone)] @@ -367,6 +367,7 @@ impl<'a> Parser<'a> { let pat = self.mk_pat(lo.to(self.prev_span), pat); let pat = self.maybe_recover_from_bad_qpath(pat, true)?; + let pat = self.recover_intersection_pat(pat)?; if !allow_range_pat { self.ban_pat_range_if_ambiguous(&pat)? @@ -375,6 +376,65 @@ impl<'a> Parser<'a> { Ok(pat) } + /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`. + /// + /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs` + /// should already have been parsed by now at this point, + /// if the next token is `@` then we can try to parse the more general form. + /// + /// Consult `parse_pat_ident` for the `binding` grammar. + /// + /// The notion of intersection patterns are found in + /// e.g. [F#][and] where they are called AND-patterns. + /// + /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching + fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> { + if self.token.kind != token::At { + // Next token is not `@` so it's not going to be an intersection pattern. + return Ok(lhs); + } + + // At this point we attempt to parse `@ $pat_rhs` and emit an error. + self.bump(); // `@` + let mut rhs = self.parse_pat(None)?; + let sp = lhs.span.to(rhs.span); + + if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind { + // The user inverted the order, so help them fix that. + let mut applicability = Applicability::MachineApplicable; + lhs.walk(&mut |p| match p.kind { + // `check_match` is unhappy if the subpattern has a binding anywhere. + PatKind::Ident(..) => { + applicability = Applicability::MaybeIncorrect; + false // Short-circuit. + }, + _ => true, + }); + + let lhs_span = lhs.span; + // Move the LHS into the RHS as a subpattern. + // The RHS is now the full pattern. + *sub = Some(lhs); + + self.struct_span_err(sp, "pattern on wrong side of `@`") + .span_label(lhs_span, "pattern on the left, should be on the right") + .span_label(rhs.span, "binding on the right, should be on the left") + .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability) + .emit(); + } else { + // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`. + rhs.kind = PatKind::Wild; + self.struct_span_err(sp, "left-hand side of `@` must be a binding") + .span_label(lhs.span, "interpreted as a pattern, not a binding") + .span_label(rhs.span, "also a pattern") + .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`") + .emit(); + } + + rhs.span = sp; + Ok(rhs) + } + /// Ban a range pattern if it has an ambiguous interpretation. fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> { match pat.kind { diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs index ca823991a2e..639d61a2b5c 100644 --- a/src/libsyntax/parse/parser/path.rs +++ b/src/libsyntax/parse/parser/path.rs @@ -111,7 +111,7 @@ impl<'a> Parser<'a> { /// Like `parse_path`, but also supports parsing `Word` meta items into paths for /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. - pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> { + fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> { let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref item) => match item.tokens.is_empty() { @@ -129,7 +129,22 @@ impl<'a> Parser<'a> { self.parse_path(style) } - crate fn parse_path_segments( + /// Parse a list of paths inside `#[derive(path_0, ..., path_n)]`. + crate fn parse_derive_paths(&mut self) -> PResult<'a, Vec<Path>> { + self.expect(&token::OpenDelim(token::Paren))?; + let mut list = Vec::new(); + while !self.eat(&token::CloseDelim(token::Paren)) { + let path = self.parse_path_allowing_meta(PathStyle::Mod)?; + list.push(path); + if !self.eat(&token::Comma) { + self.expect(&token::CloseDelim(token::Paren))?; + break + } + } + Ok(list) + } + + pub(super) fn parse_path_segments( &mut self, segments: &mut Vec<PathSegment>, style: PathStyle, diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index 855b03ddd6f..d54d9c4b8e9 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -2,14 +2,13 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMo use super::expr::LhsExpr; use super::path::PathStyle; use super::pat::GateOr; +use super::diagnostics::Error; use crate::ptr::P; use crate::{maybe_whole, ThinVec}; use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind}; use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter}; -use crate::ext::base::DummyResult; use crate::parse::{classify, DirectoryOwnership}; -use crate::parse::diagnostics::Error; use crate::parse::token; use crate::source_map::{respan, Span}; use crate::symbol::{kw, sym}; @@ -373,7 +372,9 @@ impl<'a> Parser<'a> { } /// Parses a block. Inner attributes are allowed. - crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> { + pub(super) fn parse_inner_attrs_and_block( + &mut self + ) -> PResult<'a, (Vec<Attribute>, P<Block>)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); let lo = self.token.span; @@ -400,7 +401,7 @@ impl<'a> Parser<'a> { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); Some(Stmt { id: DUMMY_NODE_ID, - kind: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)), + kind: StmtKind::Expr(self.mk_expr_err(self.token.span)), span: self.token.span, }) } @@ -422,7 +423,7 @@ impl<'a> Parser<'a> { } /// Parses a statement, including the trailing semicolon. - crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { + pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { // Skip looking for a trailing semicolon when we have an interpolated statement. maybe_whole!(self, NtStmt, |x| Some(x)); @@ -443,7 +444,7 @@ impl<'a> Parser<'a> { self.recover_stmt(); // Don't complain about type errors in body tail after parse error (#57383). let sp = expr.span.to(self.prev_span); - stmt.kind = StmtKind::Expr(DummyResult::raw_expr(sp, true)); + stmt.kind = StmtKind::Expr(self.mk_expr_err(sp)); } } } diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs index 018b5951e6e..86c94b680b2 100644 --- a/src/libsyntax/parse/parser/ty.rs +++ b/src/libsyntax/parse/parser/ty.rs @@ -1,4 +1,5 @@ use super::{Parser, PResult, PathStyle, PrevTokenKind, TokenType}; +use super::item::ParamCfg; use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath}; use crate::ptr::P; @@ -209,7 +210,7 @@ impl<'a> Parser<'a> { }; let span = lo.to(self.prev_span); - let ty = P(Ty { kind, span, id: ast::DUMMY_NODE_ID }); + let ty = self.mk_ty(span, kind); // Try to recover from use of `+` with incorrect priority. self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); @@ -281,7 +282,7 @@ impl<'a> Parser<'a> { let unsafety = self.parse_unsafety(); let abi = self.parse_extern_abi()?; self.expect_keyword(kw::Fn)?; - let cfg = super::ParamCfg { + let cfg = ParamCfg { is_self_allowed: false, allow_c_variadic: true, is_name_required: |_| false, @@ -295,7 +296,7 @@ impl<'a> Parser<'a> { }))) } - crate fn parse_generic_bounds(&mut self, + pub(super) fn parse_generic_bounds(&mut self, colon_span: Option<Span>) -> PResult<'a, GenericBounds> { self.parse_generic_bounds_common(true, colon_span) } @@ -432,13 +433,13 @@ impl<'a> Parser<'a> { } } - crate fn check_lifetime(&mut self) -> bool { + pub fn check_lifetime(&mut self) -> bool { self.expected_tokens.push(TokenType::Lifetime); self.token.is_lifetime() } /// Parses a single lifetime `'a` or panics. - crate fn expect_lifetime(&mut self) -> Lifetime { + pub fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.token.span; self.bump(); @@ -447,4 +448,8 @@ impl<'a> Parser<'a> { self.span_bug(self.token.span, "not a lifetime") } } + + pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> { + P(Ty { kind, span, id: ast::DUMMY_NODE_ID }) + } } |
