diff options
Diffstat (limited to 'src/libsyntax/parse/parser')
| -rw-r--r-- | src/libsyntax/parse/parser/attr.rs | 358 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/diagnostics.rs | 1547 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/expr.rs | 1964 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/generics.rs | 309 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/item.rs | 2237 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/mod.rs | 1391 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/module.rs | 315 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/pat.rs | 1016 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/path.rs | 497 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/stmt.rs | 480 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/ty.rs | 458 |
11 files changed, 0 insertions, 10572 deletions
diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs deleted file mode 100644 index 0f9e573af82..00000000000 --- a/src/libsyntax/parse/parser/attr.rs +++ /dev/null @@ -1,358 +0,0 @@ -use super::{SeqSep, Parser, TokenType, PathStyle}; -use crate::attr; -use crate::ast; -use crate::util::comments; -use crate::token::{self, Nonterminal, DelimToken}; -use crate::tokenstream::{TokenStream, TokenTree}; -use crate::source_map::Span; - -use syntax_pos::Symbol; -use errors::PResult; - -use log::debug; - -#[derive(Debug)] -enum InnerAttributeParsePolicy<'a> { - Permitted, - NotPermitted { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> }, -} - -const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ - permitted in this context"; - -impl<'a> Parser<'a> { - /// Parses attributes that appear before an item. - pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { - let mut attrs: Vec<ast::Attribute> = Vec::new(); - let mut just_parsed_doc_comment = false; - loop { - debug!("parse_outer_attributes: self.token={:?}", self.token); - match self.token.kind { - token::Pound => { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" - } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = - InnerAttributeParsePolicy::NotPermitted { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().and_then(|a| Some(a.span)) - }; - let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; - attrs.push(attr); - just_parsed_doc_comment = false; - } - token::DocComment(s) => { - let attr = self.mk_doc_comment(s); - if attr.style != ast::AttrStyle::Outer { - let mut err = self.fatal("expected outer doc comment"); - err.note("inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items"); - return Err(err); - } - attrs.push(attr); - self.bump(); - just_parsed_doc_comment = true; - } - _ => break, - } - } - Ok(attrs) - } - - fn mk_doc_comment(&self, s: Symbol) -> ast::Attribute { - let style = comments::doc_comment_style(&s.as_str()); - attr::mk_doc_comment(style, s, self.token.span) - } - - /// Matches `attribute = # ! [ meta_item ]`. - /// - /// If `permit_inner` is `true`, then a leading `!` indicates an inner - /// attribute. - pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> { - debug!("parse_attribute: permit_inner={:?} self.token={:?}", - permit_inner, - self.token); - let inner_parse_policy = if permit_inner { - InnerAttributeParsePolicy::Permitted - } else { - InnerAttributeParsePolicy::NotPermitted { - reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG, - saw_doc_comment: false, - prev_attr_sp: None - } - }; - self.parse_attribute_with_inner_parse_policy(inner_parse_policy) - } - - /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` - /// that prescribes how to handle inner attributes. - fn parse_attribute_with_inner_parse_policy( - &mut self, - inner_parse_policy: InnerAttributeParsePolicy<'_> - ) -> PResult<'a, ast::Attribute> { - debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", - inner_parse_policy, - self.token); - let (span, item, style) = match self.token.kind { - token::Pound => { - let lo = self.token.span; - self.bump(); - - if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { - self.expected_tokens.push(TokenType::Token(token::Not)); - } - - let style = if self.token == token::Not { - self.bump(); - ast::AttrStyle::Inner - } else { - ast::AttrStyle::Outer - }; - - self.expect(&token::OpenDelim(token::Bracket))?; - let item = self.parse_attr_item()?; - self.expect(&token::CloseDelim(token::Bracket))?; - let hi = self.prev_span; - - let attr_sp = lo.to(hi); - - // Emit error if inner attribute is encountered and not permitted - if style == ast::AttrStyle::Inner { - if let InnerAttributeParsePolicy::NotPermitted { reason, - saw_doc_comment, prev_attr_sp } = inner_parse_policy { - let prev_attr_note = if saw_doc_comment { - "previous doc comment" - } else { - "previous outer attribute" - }; - - let mut diagnostic = self - .diagnostic() - .struct_span_err(attr_sp, reason); - - if let Some(prev_attr_sp) = prev_attr_sp { - diagnostic - .span_label(attr_sp, "not permitted following an outer attibute") - .span_label(prev_attr_sp, prev_attr_note); - } - - diagnostic - .note("inner attributes, like `#![no_std]`, annotate the item \ - enclosing them, and are usually found at the beginning of \ - source files. Outer attributes, like `#[test]`, annotate the \ - item following them.") - .emit() - } - } - - (attr_sp, item, style) - } - _ => { - let token_str = self.this_token_to_string(); - return Err(self.fatal(&format!("expected `#`, found `{}`", token_str))); - } - }; - - Ok(ast::Attribute { - kind: ast::AttrKind::Normal(item), - id: attr::mk_attr_id(), - style, - span, - }) - } - - /// Parses an inner part of an attribute (the path and following tokens). - /// The tokens must be either a delimited token stream, or empty token stream, - /// or the "legacy" key-value form. - /// PATH `(` TOKEN_STREAM `)` - /// PATH `[` TOKEN_STREAM `]` - /// PATH `{` TOKEN_STREAM `}` - /// PATH - /// PATH `=` UNSUFFIXED_LIT - /// The delimiters or `=` are still put into the resulting token stream. - pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { - let item = match self.token.kind { - token::Interpolated(ref nt) => match **nt { - Nonterminal::NtMeta(ref item) => Some(item.clone()), - _ => None, - }, - _ => None, - }; - Ok(if let Some(item) = item { - self.bump(); - item - } else { - let path = self.parse_path(PathStyle::Mod)?; - let tokens = if self.check(&token::OpenDelim(DelimToken::Paren)) || - self.check(&token::OpenDelim(DelimToken::Bracket)) || - self.check(&token::OpenDelim(DelimToken::Brace)) { - self.parse_token_tree().into() - } else if self.eat(&token::Eq) { - let eq = TokenTree::token(token::Eq, self.prev_span); - let mut is_interpolated_expr = false; - if let token::Interpolated(nt) = &self.token.kind { - if let token::NtExpr(..) = **nt { - is_interpolated_expr = true; - } - } - let token_tree = if is_interpolated_expr { - // We need to accept arbitrary interpolated expressions to continue - // supporting things like `doc = $expr` that work on stable. - // Non-literal interpolated expressions are rejected after expansion. - self.parse_token_tree() - } else { - self.parse_unsuffixed_lit()?.token_tree() - }; - TokenStream::new(vec![eq.into(), token_tree.into()]) - } else { - TokenStream::default() - }; - ast::AttrItem { path, tokens } - }) - } - - /// Parses attributes that appear after the opening of an item. These should - /// be preceded by an exclamation mark, but we accept and warn about one - /// terminated by a semicolon. - /// - /// Matches `inner_attrs*`. - crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { - let mut attrs: Vec<ast::Attribute> = vec![]; - loop { - match self.token.kind { - token::Pound => { - // Don't even try to parse if it's not an inner attribute. - if !self.look_ahead(1, |t| t == &token::Not) { - break; - } - - let attr = self.parse_attribute(true)?; - assert_eq!(attr.style, ast::AttrStyle::Inner); - attrs.push(attr); - } - token::DocComment(s) => { - // We need to get the position of this token before we bump. - let attr = self.mk_doc_comment(s); - if attr.style == ast::AttrStyle::Inner { - attrs.push(attr); - self.bump(); - } else { - break; - } - } - _ => break, - } - } - Ok(attrs) - } - - fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> { - let lit = self.parse_lit()?; - debug!("checking if {:?} is unusuffixed", lit); - - if !lit.kind.is_unsuffixed() { - let msg = "suffixed literals are not allowed in attributes"; - self.diagnostic().struct_span_err(lit.span, msg) - .help("instead of using a suffixed literal \ - (1u8, 1.0f32, etc.), use an unsuffixed version \ - (1, 1.0, etc.).") - .emit() - } - - Ok(lit) - } - - /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited. - pub fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { - self.expect(&token::OpenDelim(token::Paren))?; - - let cfg_predicate = self.parse_meta_item()?; - self.expect(&token::Comma)?; - - // Presumably, the majority of the time there will only be one attr. - let mut expanded_attrs = Vec::with_capacity(1); - - while !self.check(&token::CloseDelim(token::Paren)) { - let lo = self.token.span.lo(); - let item = self.parse_attr_item()?; - expanded_attrs.push((item, self.prev_span.with_lo(lo))); - self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; - } - - self.expect(&token::CloseDelim(token::Paren))?; - Ok((cfg_predicate, expanded_attrs)) - } - - /// Matches the following grammar (per RFC 1559). - /// - /// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; - /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; - pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { - let nt_meta = match self.token.kind { - token::Interpolated(ref nt) => match **nt { - token::NtMeta(ref e) => Some(e.clone()), - _ => None, - }, - _ => None, - }; - - if let Some(item) = nt_meta { - return match item.meta(item.path.span) { - Some(meta) => { - self.bump(); - Ok(meta) - } - None => self.unexpected(), - } - } - - let lo = self.token.span; - let path = self.parse_path(PathStyle::Mod)?; - let kind = self.parse_meta_item_kind()?; - let span = lo.to(self.prev_span); - Ok(ast::MetaItem { path, kind, span }) - } - - crate fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { - Ok(if self.eat(&token::Eq) { - ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) - } else if self.eat(&token::OpenDelim(token::Paren)) { - ast::MetaItemKind::List(self.parse_meta_seq()?) - } else { - ast::MetaItemKind::Word - }) - } - - /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`. - fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { - match self.parse_unsuffixed_lit() { - Ok(lit) => { - return Ok(ast::NestedMetaItem::Literal(lit)) - } - Err(ref mut err) => err.cancel(), - } - - match self.parse_meta_item() { - Ok(mi) => { - return Ok(ast::NestedMetaItem::MetaItem(mi)) - } - Err(ref mut err) => err.cancel(), - } - - let found = self.this_token_to_string(); - let msg = format!("expected unsuffixed literal or identifier, found `{}`", found); - Err(self.diagnostic().struct_span_err(self.token.span, &msg)) - } - - /// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`. - fn parse_meta_seq(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> { - self.parse_seq_to_end(&token::CloseDelim(token::Paren), - SeqSep::trailing_allowed(token::Comma), - |p: &mut Parser<'a>| p.parse_meta_item_inner()) - } -} diff --git a/src/libsyntax/parse/parser/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs deleted file mode 100644 index 5df24804a76..00000000000 --- a/src/libsyntax/parse/parser/diagnostics.rs +++ /dev/null @@ -1,1547 +0,0 @@ -use super::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, SeqSep, Parser}; -use crate::ast::{ - self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, - Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, -}; -use crate::token::{self, TokenKind, token_can_begin_expr}; -use crate::print::pprust; -use crate::ptr::P; -use crate::symbol::{kw, sym}; -use crate::ThinVec; -use crate::util::parser::AssocOp; - -use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, pluralize}; -use rustc_data_structures::fx::FxHashSet; -use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError}; -use log::{debug, trace}; -use std::mem; - -const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; - -/// Creates a placeholder argument. -pub(super) fn dummy_arg(ident: Ident) -> Param { - let pat = P(Pat { - id: ast::DUMMY_NODE_ID, - kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), - span: ident.span, - }); - let ty = Ty { - kind: TyKind::Err, - span: ident.span, - id: ast::DUMMY_NODE_ID - }; - Param { - attrs: ThinVec::default(), - id: ast::DUMMY_NODE_ID, - pat, - span: ident.span, - ty: P(ty), - is_placeholder: false, - } -} - -pub enum Error { - FileNotFoundForModule { - mod_name: String, - default_path: String, - secondary_path: String, - dir_path: String, - }, - DuplicatePaths { - mod_name: String, - default_path: String, - secondary_path: String, - }, - UselessDocComment, - InclusiveRangeWithNoEnd, -} - -impl Error { - fn span_err<S: Into<MultiSpan>>( - self, - sp: S, - handler: &errors::Handler, - ) -> DiagnosticBuilder<'_> { - match self { - Error::FileNotFoundForModule { - ref mod_name, - ref default_path, - ref secondary_path, - ref dir_path, - } => { - let mut err = struct_span_err!( - handler, - sp, - E0583, - "file not found for module `{}`", - mod_name, - ); - err.help(&format!( - "name the file either {} or {} inside the directory \"{}\"", - default_path, - secondary_path, - dir_path, - )); - err - } - Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { - let mut err = struct_span_err!( - handler, - sp, - E0584, - "file for module `{}` found at both {} and {}", - mod_name, - default_path, - secondary_path, - ); - err.help("delete or rename one of them to remove the ambiguity"); - err - } - Error::UselessDocComment => { - let mut err = struct_span_err!( - handler, - sp, - E0585, - "found a documentation comment that doesn't document anything", - ); - err.help("doc comments must come before what they document, maybe a comment was \ - intended with `//`?"); - err - } - Error::InclusiveRangeWithNoEnd => { - let mut err = struct_span_err!( - handler, - sp, - E0586, - "inclusive range with no end", - ); - err.help("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)"); - err - } - } - } -} - -pub(super) trait RecoverQPath: Sized + 'static { - const PATH_STYLE: PathStyle = PathStyle::Expr; - fn to_ty(&self) -> Option<P<Ty>>; - fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self; -} - -impl RecoverQPath for Ty { - const PATH_STYLE: PathStyle = PathStyle::Type; - fn to_ty(&self) -> Option<P<Ty>> { - Some(P(self.clone())) - } - fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { - Self { - span: path.span, - kind: TyKind::Path(qself, path), - id: ast::DUMMY_NODE_ID, - } - } -} - -impl RecoverQPath for Pat { - fn to_ty(&self) -> Option<P<Ty>> { - self.to_ty() - } - fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { - Self { - span: path.span, - kind: PatKind::Path(qself, path), - id: ast::DUMMY_NODE_ID, - } - } -} - -impl RecoverQPath for Expr { - fn to_ty(&self) -> Option<P<Ty>> { - self.to_ty() - } - fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self { - Self { - span: path.span, - kind: ExprKind::Path(qself, path), - attrs: ThinVec::new(), - id: ast::DUMMY_NODE_ID, - } - } -} - -/// Control whether the closing delimiter should be consumed when calling `Parser::consume_block`. -crate enum ConsumeClosingDelim { - Yes, - No, -} - -impl<'a> Parser<'a> { - pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { - self.span_fatal(self.token.span, m) - } - - crate fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_fatal(sp, m) - } - - pub(super) fn span_fatal_err<S: Into<MultiSpan>>( - &self, - sp: S, - err: Error, - ) -> DiagnosticBuilder<'a> { - err.span_err(sp, self.diagnostic()) - } - - pub(super) fn bug(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(self.token.span, m) - } - - pub(super) fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { - self.sess.span_diagnostic.span_err(sp, m) - } - - pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { - self.sess.span_diagnostic.struct_span_err(sp, m) - } - - pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(sp, m) - } - - pub(super) fn diagnostic(&self) -> &'a errors::Handler { - &self.sess.span_diagnostic - } - - pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> { - self.sess.source_map().span_to_snippet(span) - } - - pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { - let mut err = self.struct_span_err( - self.token.span, - &format!("expected identifier, found {}", self.this_token_descr()), - ); - if let token::Ident(name, false) = self.token.kind { - if Ident::new(name, self.token.span).is_raw_guess() { - err.span_suggestion( - self.token.span, - "you can escape reserved keywords to use them as identifiers", - format!("r#{}", name), - Applicability::MaybeIncorrect, - ); - } - } - if let Some(token_descr) = self.token_descr() { - err.span_label(self.token.span, format!("expected identifier, found {}", token_descr)); - } else { - err.span_label(self.token.span, "expected identifier"); - if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { - err.span_suggestion( - self.token.span, - "remove this comma", - String::new(), - Applicability::MachineApplicable, - ); - } - } - err - } - - pub(super) fn expected_one_of_not_found( - &mut self, - edible: &[TokenKind], - inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { - fn tokens_to_string(tokens: &[TokenType]) -> String { - let mut i = tokens.iter(); - // This might be a sign we need a connect method on `Iterator`. - let b = i.next() - .map_or(String::new(), |t| t.to_string()); - i.enumerate().fold(b, |mut b, (i, a)| { - if tokens.len() > 2 && i == tokens.len() - 2 { - b.push_str(", or "); - } else if tokens.len() == 2 && i == tokens.len() - 2 { - b.push_str(" or "); - } else { - b.push_str(", "); - } - b.push_str(&a.to_string()); - b - }) - } - - let mut expected = edible.iter() - .map(|x| TokenType::Token(x.clone())) - .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) - .chain(self.expected_tokens.iter().cloned()) - .collect::<Vec<_>>(); - expected.sort_by_cached_key(|x| x.to_string()); - expected.dedup(); - let expect = tokens_to_string(&expected[..]); - let actual = self.this_token_descr(); - let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { - let short_expect = if expected.len() > 6 { - format!("{} possible tokens", expected.len()) - } else { - expect.clone() - }; - (format!("expected one of {}, found {}", expect, actual), - (self.sess.source_map().next_point(self.prev_span), - format!("expected one of {}", short_expect))) - } else if expected.is_empty() { - (format!("unexpected token: {}", actual), - (self.prev_span, "unexpected token after this".to_string())) - } else { - (format!("expected {}, found {}", expect, actual), - (self.sess.source_map().next_point(self.prev_span), - format!("expected {}", expect))) - }; - self.last_unexpected_token_span = Some(self.token.span); - let mut err = self.fatal(&msg_exp); - if self.token.is_ident_named(sym::and) { - err.span_suggestion_short( - self.token.span, - "use `&&` instead of `and` for the boolean operator", - "&&".to_string(), - Applicability::MaybeIncorrect, - ); - } - if self.token.is_ident_named(sym::or) { - err.span_suggestion_short( - self.token.span, - "use `||` instead of `or` for the boolean operator", - "||".to_string(), - Applicability::MaybeIncorrect, - ); - } - let sp = if self.token == token::Eof { - // This is EOF; don't want to point at the following char, but rather the last token. - self.prev_span - } else { - label_sp - }; - match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { - TokenType::Token(t) => Some(t.clone()), - _ => None, - }).collect::<Vec<_>>(), err) { - Err(e) => err = e, - Ok(recovered) => { - return Ok(recovered); - } - } - - let sm = self.sess.source_map(); - if self.prev_span == DUMMY_SP { - // Account for macro context where the previous span might not be - // available to avoid incorrect output (#54841). - err.span_label(self.token.span, label_exp); - } else if !sm.is_multiline(self.token.span.shrink_to_hi().until(sp.shrink_to_lo())) { - // When the spans are in the same line, it means that the only content between - // them is whitespace, point at the found token in that case: - // - // X | () => { syntax error }; - // | ^^^^^ expected one of 8 possible tokens here - // - // instead of having: - // - // X | () => { syntax error }; - // | -^^^^^ unexpected token - // | | - // | expected one of 8 possible tokens here - err.span_label(self.token.span, label_exp); - } else { - err.span_label(sp, label_exp); - err.span_label(self.token.span, "unexpected token"); - } - self.maybe_annotate_with_ascription(&mut err, false); - Err(err) - } - - pub fn maybe_annotate_with_ascription( - &mut self, - err: &mut DiagnosticBuilder<'_>, - maybe_expected_semicolon: bool, - ) { - if let Some((sp, likely_path)) = self.last_type_ascription.take() { - let sm = self.sess.source_map(); - let next_pos = sm.lookup_char_pos(self.token.span.lo()); - let op_pos = sm.lookup_char_pos(sp.hi()); - - let allow_unstable = self.sess.unstable_features.is_nightly_build(); - - if likely_path { - err.span_suggestion( - sp, - "maybe write a path separator here", - "::".to_string(), - if allow_unstable { - Applicability::MaybeIncorrect - } else { - Applicability::MachineApplicable - }, - ); - } else if op_pos.line != next_pos.line && maybe_expected_semicolon { - err.span_suggestion( - sp, - "try using a semicolon", - ";".to_string(), - Applicability::MaybeIncorrect, - ); - } else if allow_unstable { - err.span_label(sp, "tried to parse a type due to this type ascription"); - } else { - err.span_label(sp, "tried to parse a type due to this"); - } - if allow_unstable { - // Give extra information about type ascription only if it's a nightly compiler. - err.note("`#![feature(type_ascription)]` lets you annotate an expression with a \ - type: `<expr>: <type>`"); - err.note("for more information, see \ - https://github.com/rust-lang/rust/issues/23416"); - } - } - } - - /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, - /// passes through any errors encountered. Used for error recovery. - pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { - if let Err(ref mut err) = self.parse_seq_to_before_tokens( - kets, - SeqSep::none(), - TokenExpectType::Expect, - |p| Ok(p.parse_token_tree()), - ) { - err.cancel(); - } - } - - /// This function checks if there are trailing angle brackets and produces - /// a diagnostic to suggest removing them. - /// - /// ```ignore (diagnostic) - /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>(); - /// ^^ help: remove extra angle brackets - /// ``` - pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { - // This function is intended to be invoked after parsing a path segment where there are two - // cases: - // - // 1. A specific token is expected after the path segment. - // eg. `x.foo(`, `x.foo::<u32>(` (parenthesis - method call), - // `Foo::`, or `Foo::<Bar>::` (mod sep - continued path). - // 2. No specific token is expected after the path segment. - // eg. `x.foo` (field access) - // - // This function is called after parsing `.foo` and before parsing the token `end` (if - // present). This includes any angle bracket arguments, such as `.foo::<u32>` or - // `Foo::<Bar>`. - - // We only care about trailing angle brackets if we previously parsed angle bracket - // arguments. This helps stop us incorrectly suggesting that extra angle brackets be - // removed in this case: - // - // `x.foo >> (3)` (where `x.foo` is a `u32` for example) - // - // This case is particularly tricky as we won't notice it just looking at the tokens - - // it will appear the same (in terms of upcoming tokens) as below (since the `::<u32>` will - // have already been parsed): - // - // `x.foo::<u32>>>(3)` - let parsed_angle_bracket_args = segment.args - .as_ref() - .map(|args| args.is_angle_bracketed()) - .unwrap_or(false); - - debug!( - "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}", - parsed_angle_bracket_args, - ); - if !parsed_angle_bracket_args { - return; - } - - // Keep the span at the start so we can highlight the sequence of `>` characters to be - // removed. - let lo = self.token.span; - - // We need to look-ahead to see if we have `>` characters without moving the cursor forward - // (since we might have the field access case and the characters we're eating are - // actual operators and not trailing characters - ie `x.foo >> 3`). - let mut position = 0; - - // We can encounter `>` or `>>` tokens in any order, so we need to keep track of how - // many of each (so we can correctly pluralize our error messages) and continue to - // advance. - let mut number_of_shr = 0; - let mut number_of_gt = 0; - while self.look_ahead(position, |t| { - trace!("check_trailing_angle_brackets: t={:?}", t); - if *t == token::BinOp(token::BinOpToken::Shr) { - number_of_shr += 1; - true - } else if *t == token::Gt { - number_of_gt += 1; - true - } else { - false - } - }) { - position += 1; - } - - // If we didn't find any trailing `>` characters, then we have nothing to error about. - debug!( - "check_trailing_angle_brackets: number_of_gt={:?} number_of_shr={:?}", - number_of_gt, number_of_shr, - ); - if number_of_gt < 1 && number_of_shr < 1 { - return; - } - - // Finally, double check that we have our end token as otherwise this is the - // second case. - if self.look_ahead(position, |t| { - trace!("check_trailing_angle_brackets: t={:?}", t); - *t == end - }) { - // Eat from where we started until the end token so that parsing can continue - // as if we didn't have those extra angle brackets. - self.eat_to_tokens(&[&end]); - let span = lo.until(self.token.span); - - let total_num_of_gt = number_of_gt + number_of_shr * 2; - self.diagnostic() - .struct_span_err( - span, - &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)), - ) - .span_suggestion( - span, - &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)), - String::new(), - Applicability::MachineApplicable, - ) - .emit(); - } - } - - /// Produces an error if comparison operators are chained (RFC #558). - /// We only need to check the LHS, not the RHS, because all comparison ops have same - /// precedence (see `fn precedence`) and are left-associative (see `fn fixity`). - /// - /// This can also be hit if someone incorrectly writes `foo<bar>()` when they should have used - /// the turbofish (`foo::<bar>()`) syntax. We attempt some heuristic recovery if that is the - /// case. - /// - /// Keep in mind that given that `outer_op.is_comparison()` holds and comparison ops are left - /// associative we can infer that we have: - /// - /// outer_op - /// / \ - /// inner_op r2 - /// / \ - /// l1 r1 - pub(super) fn check_no_chained_comparison( - &mut self, - lhs: &Expr, - outer_op: &AssocOp, - ) -> PResult<'a, Option<P<Expr>>> { - debug_assert!( - outer_op.is_comparison(), - "check_no_chained_comparison: {:?} is not comparison", - outer_op, - ); - - let mk_err_expr = |this: &Self, span| { - Ok(Some(this.mk_expr(span, ExprKind::Err, ThinVec::new()))) - }; - - match lhs.kind { - ExprKind::Binary(op, _, _) if op.node.is_comparison() => { - // Respan to include both operators. - let op_span = op.span.to(self.prev_span); - let mut err = self.struct_span_err( - op_span, - "chained comparison operators require parentheses", - ); - - let suggest = |err: &mut DiagnosticBuilder<'_>| { - err.span_suggestion_verbose( - op_span.shrink_to_lo(), - TURBOFISH, - "::".to_string(), - Applicability::MaybeIncorrect, - ); - }; - - if op.node == BinOpKind::Lt && - *outer_op == AssocOp::Less || // Include `<` to provide this recommendation - *outer_op == AssocOp::Greater // even in a case like the following: - { // Foo<Bar<Baz<Qux, ()>>> - if *outer_op == AssocOp::Less { - let snapshot = self.clone(); - self.bump(); - // So far we have parsed `foo<bar<`, consume the rest of the type args. - let modifiers = [ - (token::Lt, 1), - (token::Gt, -1), - (token::BinOp(token::Shr), -2), - ]; - self.consume_tts(1, &modifiers[..]); - - if !&[ - token::OpenDelim(token::Paren), - token::ModSep, - ].contains(&self.token.kind) { - // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the - // parser and bail out. - mem::replace(self, snapshot.clone()); - } - } - return if token::ModSep == self.token.kind { - // We have some certainty that this was a bad turbofish at this point. - // `foo< bar >::` - suggest(&mut err); - - let snapshot = self.clone(); - self.bump(); // `::` - - // Consume the rest of the likely `foo<bar>::new()` or return at `foo<bar>`. - match self.parse_expr() { - Ok(_) => { - // 99% certain that the suggestion is correct, continue parsing. - err.emit(); - // FIXME: actually check that the two expressions in the binop are - // paths and resynthesize new fn call expression instead of using - // `ExprKind::Err` placeholder. - mk_err_expr(self, lhs.span.to(self.prev_span)) - } - Err(mut expr_err) => { - expr_err.cancel(); - // Not entirely sure now, but we bubble the error up with the - // suggestion. - mem::replace(self, snapshot); - Err(err) - } - } - } else if token::OpenDelim(token::Paren) == self.token.kind { - // We have high certainty that this was a bad turbofish at this point. - // `foo< bar >(` - suggest(&mut err); - // Consume the fn call arguments. - match self.consume_fn_args() { - Err(()) => Err(err), - Ok(()) => { - err.emit(); - // FIXME: actually check that the two expressions in the binop are - // paths and resynthesize new fn call expression instead of using - // `ExprKind::Err` placeholder. - mk_err_expr(self, lhs.span.to(self.prev_span)) - } - } - } else { - // All we know is that this is `foo < bar >` and *nothing* else. Try to - // be helpful, but don't attempt to recover. - err.help(TURBOFISH); - err.help("or use `(...)` if you meant to specify fn arguments"); - // These cases cause too many knock-down errors, bail out (#61329). - Err(err) - }; - } - err.emit(); - } - _ => {} - } - Ok(None) - } - - fn consume_fn_args(&mut self) -> Result<(), ()> { - let snapshot = self.clone(); - self.bump(); // `(` - - // Consume the fn call arguments. - let modifiers = [ - (token::OpenDelim(token::Paren), 1), - (token::CloseDelim(token::Paren), -1), - ]; - self.consume_tts(1, &modifiers[..]); - - if self.token.kind == token::Eof { - // Not entirely sure that what we consumed were fn arguments, rollback. - mem::replace(self, snapshot); - Err(()) - } else { - // 99% certain that the suggestion is correct, continue parsing. - Ok(()) - } - } - - pub(super) fn maybe_report_ambiguous_plus( - &mut self, - allow_plus: bool, - impl_dyn_multi: bool, - ty: &Ty, - ) { - if !allow_plus && impl_dyn_multi { - let sum_with_parens = format!("({})", pprust::ty_to_string(&ty)); - self.struct_span_err(ty.span, "ambiguous `+` in a type") - .span_suggestion( - ty.span, - "use parentheses to disambiguate", - sum_with_parens, - Applicability::MachineApplicable, - ) - .emit(); - } - } - - pub(super) fn maybe_recover_from_bad_type_plus( - &mut self, - allow_plus: bool, - ty: &Ty, - ) -> PResult<'a, ()> { - // Do not add `+` to expected tokens. - if !allow_plus || !self.token.is_like_plus() { - return Ok(()); - } - - self.bump(); // `+` - let bounds = self.parse_generic_bounds(None)?; - let sum_span = ty.span.to(self.prev_span); - - let mut err = struct_span_err!( - self.sess.span_diagnostic, - sum_span, - E0178, - "expected a path on the left-hand side of `+`, not `{}`", - pprust::ty_to_string(ty) - ); - - match ty.kind { - TyKind::Rptr(ref lifetime, ref mut_ty) => { - let sum_with_parens = pprust::to_string(|s| { - s.s.word("&"); - s.print_opt_lifetime(lifetime); - s.print_mutability(mut_ty.mutbl); - s.popen(); - s.print_type(&mut_ty.ty); - s.print_type_bounds(" +", &bounds); - s.pclose() - }); - err.span_suggestion( - sum_span, - "try adding parentheses", - sum_with_parens, - Applicability::MachineApplicable, - ); - } - TyKind::Ptr(..) | TyKind::BareFn(..) => { - err.span_label(sum_span, "perhaps you forgot parentheses?"); - } - _ => { - err.span_label(sum_span, "expected a path"); - } - } - err.emit(); - Ok(()) - } - - /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`. - /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem` - /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type. - pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>( - &mut self, - base: P<T>, - allow_recovery: bool, - ) -> PResult<'a, P<T>> { - // Do not add `::` to expected tokens. - if allow_recovery && self.token == token::ModSep { - if let Some(ty) = base.to_ty() { - return self.maybe_recover_from_bad_qpath_stage_2(ty.span, ty); - } - } - Ok(base) - } - - /// Given an already parsed `Ty`, parses the `::AssocItem` tail and - /// combines them into a `<Ty>::AssocItem` expression/pattern/type. - pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>( - &mut self, - ty_span: Span, - ty: P<Ty>, - ) -> PResult<'a, P<T>> { - self.expect(&token::ModSep)?; - - let mut path = ast::Path { - segments: Vec::new(), - span: DUMMY_SP, - }; - self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?; - path.span = ty_span.to(self.prev_span); - - let ty_str = self - .span_to_snippet(ty_span) - .unwrap_or_else(|_| pprust::ty_to_string(&ty)); - self.diagnostic() - .struct_span_err(path.span, "missing angle brackets in associated item path") - .span_suggestion( - // This is a best-effort recovery. - path.span, - "try", - format!("<{}>::{}", ty_str, pprust::path_to_string(&path)), - Applicability::MaybeIncorrect, - ) - .emit(); - - let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`. - Ok(P(T::recovered( - Some(QSelf { - ty, - path_span, - position: 0, - }), - path, - ))) - } - - pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool { - if self.eat(&token::Semi) { - let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`"); - err.span_suggestion_short( - self.prev_span, - "remove this semicolon", - String::new(), - Applicability::MachineApplicable, - ); - if !items.is_empty() { - let previous_item = &items[items.len() - 1]; - let previous_item_kind_name = match previous_item.kind { - // Say "braced struct" because tuple-structs and - // braceless-empty-struct declarations do take a semicolon. - ItemKind::Struct(..) => Some("braced struct"), - ItemKind::Enum(..) => Some("enum"), - ItemKind::Trait(..) => Some("trait"), - ItemKind::Union(..) => Some("union"), - _ => None, - }; - if let Some(name) = previous_item_kind_name { - err.help(&format!( - "{} declarations are not followed by a semicolon", - name - )); - } - } - err.emit(); - true - } else { - false - } - } - - /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a - /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { - let token_str = pprust::token_kind_to_string(t); - let this_token_str = self.this_token_descr(); - let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { - // Point at the end of the macro call when reaching end of macro arguments. - (token::Eof, Some(_)) => { - let sp = self.sess.source_map().next_point(self.token.span); - (sp, sp) - } - // We don't want to point at the following span after DUMMY_SP. - // This happens when the parser finds an empty TokenStream. - _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span), - // EOF, don't want to point at the following char, but rather the last token. - (token::Eof, None) => (self.prev_span, self.token.span), - _ => (self.sess.source_map().next_point(self.prev_span), self.token.span), - }; - let msg = format!( - "expected `{}`, found {}", - token_str, - match (&self.token.kind, self.subparser_name) { - (token::Eof, Some(origin)) => format!("end of {}", origin), - _ => this_token_str, - }, - ); - let mut err = self.struct_span_err(sp, &msg); - let label_exp = format!("expected `{}`", token_str); - match self.recover_closing_delimiter(&[t.clone()], err) { - Err(e) => err = e, - Ok(recovered) => { - return Ok(recovered); - } - } - let sm = self.sess.source_map(); - if !sm.is_multiline(prev_sp.until(sp)) { - // When the spans are in the same line, it means that the only content - // between them is whitespace, point only at the found token. - err.span_label(sp, label_exp); - } else { - err.span_label(prev_sp, label_exp); - err.span_label(sp, "unexpected token"); - } - Err(err) - } - - pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> { - if self.eat(&token::Semi) { - return Ok(()); - } - let sm = self.sess.source_map(); - let msg = format!("expected `;`, found `{}`", self.this_token_descr()); - let appl = Applicability::MachineApplicable; - if self.token.span == DUMMY_SP || self.prev_span == DUMMY_SP { - // Likely inside a macro, can't provide meaninful suggestions. - return self.expect(&token::Semi).map(|_| ()); - } else if !sm.is_multiline(self.prev_span.until(self.token.span)) { - // The current token is in the same line as the prior token, not recoverable. - } else if self.look_ahead(1, |t| t == &token::CloseDelim(token::Brace) - || token_can_begin_expr(t) && t.kind != token::Colon - ) && [token::Comma, token::Colon].contains(&self.token.kind) { - // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is - // either `,` or `:`, and the next token could either start a new statement or is a - // block close. For example: - // - // let x = 32: - // let y = 42; - self.bump(); - let sp = self.prev_span; - self.struct_span_err(sp, &msg) - .span_suggestion(sp, "change this to `;`", ";".to_string(), appl) - .emit(); - return Ok(()) - } else if self.look_ahead(0, |t| t == &token::CloseDelim(token::Brace) || ( - token_can_begin_expr(t) - && t != &token::Semi - && t != &token::Pound // Avoid triggering with too many trailing `#` in raw string. - )) { - // Missing semicolon typo. This is triggered if the next token could either start a - // new statement or is a block close. For example: - // - // let x = 32 - // let y = 42; - let sp = self.prev_span.shrink_to_hi(); - self.struct_span_err(sp, &msg) - .span_label(self.token.span, "unexpected token") - .span_suggestion_short(sp, "add `;` here", ";".to_string(), appl) - .emit(); - return Ok(()) - } - self.expect(&token::Semi).map(|_| ()) // Error unconditionally - } - - pub(super) fn parse_semi_or_incorrect_foreign_fn_body( - &mut self, - ident: &Ident, - extern_sp: Span, - ) -> PResult<'a, ()> { - if self.token != token::Semi { - // This might be an incorrect fn definition (#62109). - let parser_snapshot = self.clone(); - match self.parse_inner_attrs_and_block() { - Ok((_, body)) => { - self.struct_span_err(ident.span, "incorrect `fn` inside `extern` block") - .span_label(ident.span, "can't have a body") - .span_label(body.span, "this body is invalid here") - .span_label( - extern_sp, - "`extern` blocks define existing foreign functions and `fn`s \ - inside of them cannot have a body") - .help("you might have meant to write a function accessible through ffi, \ - which can be done by writing `extern fn` outside of the \ - `extern` block") - .note("for more information, visit \ - https://doc.rust-lang.org/std/keyword.extern.html") - .emit(); - } - Err(mut err) => { - err.cancel(); - mem::replace(self, parser_snapshot); - self.expect_semi()?; - } - } - } else { - self.bump(); - } - Ok(()) - } - - /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`, - /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`. - pub(super) fn parse_incorrect_await_syntax( - &mut self, - lo: Span, - await_sp: Span, - ) -> PResult<'a, (Span, ExprKind)> { - if self.token == token::Not { - // Handle `await!(<expr>)`. - self.expect(&token::Not)?; - self.expect(&token::OpenDelim(token::Paren))?; - let expr = self.parse_expr()?; - self.expect(&token::CloseDelim(token::Paren))?; - let sp = self.error_on_incorrect_await(lo, self.prev_span, &expr, false); - return Ok((sp, ExprKind::Await(expr))) - } - - let is_question = self.eat(&token::Question); // Handle `await? <expr>`. - let expr = if self.token == token::OpenDelim(token::Brace) { - // Handle `await { <expr> }`. - // This needs to be handled separatedly from the next arm to avoid - // interpreting `await { <expr> }?` as `<expr>?.await`. - self.parse_block_expr( - None, - self.token.span, - BlockCheckMode::Default, - ThinVec::new(), - ) - } else { - self.parse_expr() - }.map_err(|mut err| { - err.span_label(await_sp, "while parsing this incorrect await expression"); - err - })?; - let sp = self.error_on_incorrect_await(lo, expr.span, &expr, is_question); - Ok((sp, ExprKind::Await(expr))) - } - - fn error_on_incorrect_await(&self, lo: Span, hi: Span, expr: &Expr, is_question: bool) -> Span { - let expr_str = self.span_to_snippet(expr.span) - .unwrap_or_else(|_| pprust::expr_to_string(&expr)); - let suggestion = format!("{}.await{}", expr_str, if is_question { "?" } else { "" }); - let sp = lo.to(hi); - let app = match expr.kind { - ExprKind::Try(_) => Applicability::MaybeIncorrect, // `await <expr>?` - _ => Applicability::MachineApplicable, - }; - self.struct_span_err(sp, "incorrect use of `await`") - .span_suggestion(sp, "`await` is a postfix operation", suggestion, app) - .emit(); - sp - } - - /// If encountering `future.await()`, consumes and emits an error. - pub(super) fn recover_from_await_method_call(&mut self) { - if self.token == token::OpenDelim(token::Paren) && - self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) - { - // future.await() - let lo = self.token.span; - self.bump(); // ( - let sp = lo.to(self.token.span); - self.bump(); // ) - self.struct_span_err(sp, "incorrect use of `await`") - .span_suggestion( - sp, - "`await` is not a method call, remove the parentheses", - String::new(), - Applicability::MachineApplicable, - ).emit() - } - } - - /// Recovers a situation like `for ( $pat in $expr )` - /// and suggest writing `for $pat in $expr` instead. - /// - /// This should be called before parsing the `$block`. - pub(super) fn recover_parens_around_for_head( - &mut self, - pat: P<Pat>, - expr: &Expr, - begin_paren: Option<Span>, - ) -> P<Pat> { - match (&self.token.kind, begin_paren) { - (token::CloseDelim(token::Paren), Some(begin_par_sp)) => { - self.bump(); - - let pat_str = self - // Remove the `(` from the span of the pattern: - .span_to_snippet(pat.span.trim_start(begin_par_sp).unwrap()) - .unwrap_or_else(|_| pprust::pat_to_string(&pat)); - - self.struct_span_err(self.prev_span, "unexpected closing `)`") - .span_label(begin_par_sp, "opening `(`") - .span_suggestion( - begin_par_sp.to(self.prev_span), - "remove parenthesis in `for` loop", - format!("{} in {}", pat_str, pprust::expr_to_string(&expr)), - // With e.g. `for (x) in y)` this would replace `(x) in y)` - // with `x) in y)` which is syntactically invalid. - // However, this is prevented before we get here. - Applicability::MachineApplicable, - ) - .emit(); - - // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint. - pat.and_then(|pat| match pat.kind { - PatKind::Paren(pat) => pat, - _ => P(pat), - }) - } - _ => pat, - } - } - - pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { - (self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish. - self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) - ) || - self.token.is_ident() && - match node { - // `foo::` → `foo:` or `foo.bar::` → `foo.bar:` - ast::ExprKind::Path(..) | ast::ExprKind::Field(..) => true, - _ => false, - } && - !self.token.is_reserved_ident() && // v `foo:bar(baz)` - self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) || - self.look_ahead(1, |t| t == &token::Lt) && // `foo:bar<baz` - self.look_ahead(2, |t| t.is_ident()) || - self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz` - self.look_ahead(2, |t| t.is_ident()) || - self.look_ahead(1, |t| t == &token::ModSep) && - (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz` - self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>` - } - - pub(super) fn recover_seq_parse_error( - &mut self, - delim: token::DelimToken, - lo: Span, - result: PResult<'a, P<Expr>>, - ) -> P<Expr> { - match result { - Ok(x) => x, - Err(mut err) => { - err.emit(); - // Recover from parse error, callers expect the closing delim to be consumed. - self.consume_block(delim, ConsumeClosingDelim::Yes); - self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new()) - } - } - } - - pub(super) fn recover_closing_delimiter( - &mut self, - tokens: &[TokenKind], - mut err: DiagnosticBuilder<'a>, - ) -> PResult<'a, bool> { - let mut pos = None; - // We want to use the last closing delim that would apply. - for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { - if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) - && Some(self.token.span) > unmatched.unclosed_span - { - pos = Some(i); - } - } - match pos { - Some(pos) => { - // Recover and assume that the detected unclosed delimiter was meant for - // this location. Emit the diagnostic and act as if the delimiter was - // present for the parser's sake. - - // Don't attempt to recover from this unclosed delimiter more than once. - let unmatched = self.unclosed_delims.remove(pos); - let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); - if unmatched.found_delim.is_none() { - // We encountered `Eof`, set this fact here to avoid complaining about missing - // `fn main()` when we found place to suggest the closing brace. - *self.sess.reached_eof.borrow_mut() = true; - } - - // We want to suggest the inclusion of the closing delimiter where it makes - // the most sense, which is immediately after the last token: - // - // {foo(bar {}} - // - ^ - // | | - // | help: `)` may belong here - // | - // unclosed delimiter - if let Some(sp) = unmatched.unclosed_span { - err.span_label(sp, "unclosed delimiter"); - } - err.span_suggestion_short( - self.sess.source_map().next_point(self.prev_span), - &format!("{} may belong here", delim.to_string()), - delim.to_string(), - Applicability::MaybeIncorrect, - ); - if unmatched.found_delim.is_none() { - // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown - // errors which would be emitted elsewhere in the parser and let other error - // recovery consume the rest of the file. - Err(err) - } else { - err.emit(); - self.expected_tokens.clear(); // Reduce the number of errors. - Ok(true) - } - } - _ => Err(err), - } - } - - /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid. - pub(super) fn eat_bad_pub(&mut self) { - // When `unclosed_delims` is populated, it means that the code being parsed is already - // quite malformed, which might mean that, for example, a pub struct definition could be - // parsed as being a trait item, which is invalid and this error would trigger - // unconditionally, resulting in misleading diagnostics. Because of this, we only attempt - // this nice to have recovery for code that is otherwise well formed. - if self.token.is_keyword(kw::Pub) && self.unclosed_delims.is_empty() { - match self.parse_visibility(false) { - Ok(vis) => { - self.diagnostic() - .struct_span_err(vis.span, "unnecessary visibility qualifier") - .span_label(vis.span, "`pub` not permitted here") - .emit(); - } - Err(mut err) => err.emit(), - } - } - } - - /// Eats tokens until we can be relatively sure we reached the end of the - /// statement. This is something of a best-effort heuristic. - /// - /// We terminate when we find an unmatched `}` (without consuming it). - pub(super) fn recover_stmt(&mut self) { - self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) - } - - /// If `break_on_semi` is `Break`, then we will stop consuming tokens after - /// finding (and consuming) a `;` outside of `{}` or `[]` (note that this is - /// approximate -- it can mean we break too early due to macros, but that - /// should only lead to sub-optimal recovery, not inaccurate parsing). - /// - /// If `break_on_block` is `Break`, then we will stop consuming tokens - /// after finding (and consuming) a brace-delimited block. - pub(super) fn recover_stmt_( - &mut self, - break_on_semi: SemiColonMode, - break_on_block: BlockMode, - ) { - let mut brace_depth = 0; - let mut bracket_depth = 0; - let mut in_block = false; - debug!("recover_stmt_ enter loop (semi={:?}, block={:?})", - break_on_semi, break_on_block); - loop { - debug!("recover_stmt_ loop {:?}", self.token); - match self.token.kind { - token::OpenDelim(token::DelimToken::Brace) => { - brace_depth += 1; - self.bump(); - if break_on_block == BlockMode::Break && - brace_depth == 1 && - bracket_depth == 0 { - in_block = true; - } - } - token::OpenDelim(token::DelimToken::Bracket) => { - bracket_depth += 1; - self.bump(); - } - token::CloseDelim(token::DelimToken::Brace) => { - if brace_depth == 0 { - debug!("recover_stmt_ return - close delim {:?}", self.token); - break; - } - brace_depth -= 1; - self.bump(); - if in_block && bracket_depth == 0 && brace_depth == 0 { - debug!("recover_stmt_ return - block end {:?}", self.token); - break; - } - } - token::CloseDelim(token::DelimToken::Bracket) => { - bracket_depth -= 1; - if bracket_depth < 0 { - bracket_depth = 0; - } - self.bump(); - } - token::Eof => { - debug!("recover_stmt_ return - Eof"); - break; - } - token::Semi => { - self.bump(); - if break_on_semi == SemiColonMode::Break && - brace_depth == 0 && - bracket_depth == 0 { - debug!("recover_stmt_ return - Semi"); - break; - } - } - token::Comma if break_on_semi == SemiColonMode::Comma && - brace_depth == 0 && - bracket_depth == 0 => - { - debug!("recover_stmt_ return - Semi"); - break; - } - _ => { - self.bump() - } - } - } - } - - pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) { - if self.eat_keyword(kw::In) { - // a common typo: `for _ in in bar {}` - self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`") - .span_suggestion_short( - in_span.until(self.prev_span), - "remove the duplicated `in`", - String::new(), - Applicability::MachineApplicable, - ) - .emit(); - } - } - - pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); - err.span_label(self.token.span, "expected `;` or `{`"); - Err(err) - } - - pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) { - if let token::DocComment(_) = self.token.kind { - self.struct_span_err( - self.token.span, - "documentation comments cannot be applied to a function parameter's type", - ) - .span_label(self.token.span, "doc comments are not allowed here") - .emit(); - self.bump(); - } else if self.token == token::Pound && self.look_ahead(1, |t| { - *t == token::OpenDelim(token::Bracket) - }) { - let lo = self.token.span; - // Skip every token until next possible arg. - while self.token != token::CloseDelim(token::Bracket) { - self.bump(); - } - let sp = lo.to(self.token.span); - self.bump(); - self.struct_span_err( - sp, - "attributes cannot be applied to a function parameter's type", - ) - .span_label(sp, "attributes are not allowed here") - .emit(); - } - } - - pub(super) fn parameter_without_type( - &mut self, - err: &mut DiagnosticBuilder<'_>, - pat: P<ast::Pat>, - require_name: bool, - is_self_allowed: bool, - is_trait_item: bool, - ) -> Option<Ident> { - // If we find a pattern followed by an identifier, it could be an (incorrect) - // C-style parameter declaration. - if self.check_ident() && self.look_ahead(1, |t| { - *t == token::Comma || *t == token::CloseDelim(token::Paren) - }) { // `fn foo(String s) {}` - let ident = self.parse_ident().unwrap(); - let span = pat.span.with_hi(ident.span.hi()); - - err.span_suggestion( - span, - "declare the type after the parameter binding", - String::from("<identifier>: <type>"), - Applicability::HasPlaceholders, - ); - return Some(ident); - } else if let PatKind::Ident(_, ident, _) = pat.kind { - if require_name && ( - is_trait_item || - self.token == token::Comma || - self.token == token::Lt || - self.token == token::CloseDelim(token::Paren) - ) { // `fn foo(a, b) {}`, `fn foo(a<x>, b<y>) {}` or `fn foo(usize, usize) {}` - if is_self_allowed { - err.span_suggestion( - pat.span, - "if this is a `self` type, give it a parameter name", - format!("self: {}", ident), - Applicability::MaybeIncorrect, - ); - } - // Avoid suggesting that `fn foo(HashMap<u32>)` is fixed with a change to - // `fn foo(HashMap: TypeName<u32>)`. - if self.token != token::Lt { - err.span_suggestion( - pat.span, - "if this was a parameter name, give it a type", - format!("{}: TypeName", ident), - Applicability::HasPlaceholders, - ); - } - err.span_suggestion( - pat.span, - "if this is a type, explicitly ignore the parameter name", - format!("_: {}", ident), - Applicability::MachineApplicable, - ); - err.note("anonymous parameters are removed in the 2018 edition (see RFC 1685)"); - - // Don't attempt to recover by using the `X` in `X<Y>` as the parameter name. - return if self.token == token::Lt { None } else { Some(ident) }; - } - } - None - } - - pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> { - let pat = self.parse_pat(Some("argument name"))?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - - self.diagnostic() - .struct_span_err_with_code( - pat.span, - "patterns aren't allowed in methods without bodies", - DiagnosticId::Error("E0642".into()), - ) - .span_suggestion_short( - pat.span, - "give this argument a name or use an underscore to ignore it", - "_".to_owned(), - Applicability::MachineApplicable, - ) - .emit(); - - // Pretend the pattern is `_`, to avoid duplicate errors from AST validation. - let pat = P(Pat { - kind: PatKind::Wild, - span: pat.span, - id: ast::DUMMY_NODE_ID - }); - Ok((pat, ty)) - } - - pub(super) fn recover_bad_self_param( - &mut self, - mut param: ast::Param, - is_trait_item: bool, - ) -> PResult<'a, ast::Param> { - let sp = param.pat.span; - param.ty.kind = TyKind::Err; - let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function"); - if is_trait_item { - err.span_label(sp, "must be the first associated function parameter"); - } else { - err.span_label(sp, "not valid as function parameter"); - err.note("`self` is only valid as the first parameter of an associated function"); - } - err.emit(); - Ok(param) - } - - pub(super) fn consume_block( - &mut self, - delim: token::DelimToken, - consume_close: ConsumeClosingDelim, - ) { - let mut brace_depth = 0; - loop { - if self.eat(&token::OpenDelim(delim)) { - brace_depth += 1; - } else if self.check(&token::CloseDelim(delim)) { - if brace_depth == 0 { - if let ConsumeClosingDelim::Yes = consume_close { - // Some of the callers of this method expect to be able to parse the - // closing delimiter themselves, so we leave it alone. Otherwise we advance - // the parser. - self.bump(); - } - return; - } else { - self.bump(); - brace_depth -= 1; - continue; - } - } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) { - return; - } else { - self.bump(); - } - } - } - - pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { - let (span, msg) = match (&self.token.kind, self.subparser_name) { - (&token::Eof, Some(origin)) => { - let sp = self.sess.source_map().next_point(self.token.span); - (sp, format!("expected expression, found end of {}", origin)) - } - _ => (self.token.span, format!( - "expected expression, found {}", - self.this_token_descr(), - )), - }; - let mut err = self.struct_span_err(span, &msg); - let sp = self.sess.source_map().start_point(self.token.span); - if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { - self.sess.expr_parentheses_needed(&mut err, *sp, None); - } - err.span_label(span, "expected expression"); - err - } - - fn consume_tts( - &mut self, - mut acc: i64, // `i64` because malformed code can have more closing delims than opening. - // Not using `FxHashMap` due to `token::TokenKind: !Eq + !Hash`. - modifier: &[(token::TokenKind, i64)], - ) { - while acc > 0 { - if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) { - acc += *val; - } - if self.token.kind == token::Eof { - break; - } - self.bump(); - } - } - - /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors. - /// - /// This is necessary because at this point we don't know whether we parsed a function with - /// anonymous parameters or a function with names but no types. In order to minimize - /// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where - /// the parameters are *names* (so we don't emit errors about not being able to find `b` in - /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`, - /// we deduplicate them to not complain about duplicated parameter names. - pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) { - let mut seen_inputs = FxHashSet::default(); - for input in fn_inputs.iter_mut() { - let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = ( - &input.pat.kind, &input.ty.kind, - ) { - Some(*ident) - } else { - None - }; - if let Some(ident) = opt_ident { - if seen_inputs.contains(&ident) { - input.pat.kind = PatKind::Wild; - } - seen_inputs.insert(ident); - } - } - } -} diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs deleted file mode 100644 index 800074035ce..00000000000 --- a/src/libsyntax/parse/parser/expr.rs +++ /dev/null @@ -1,1964 +0,0 @@ -use super::{Parser, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; -use super::{SemiColonMode, SeqSep, TokenExpectType}; -use super::pat::{GateOr, PARAM_EXPECTED}; -use super::diagnostics::Error; - -use crate::ast::{ - self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode, - Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind, - FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit, -}; -use crate::maybe_recover_from_interpolated_ty_qpath; -use crate::token::{self, Token, TokenKind}; -use crate::print::pprust; -use crate::ptr::P; -use crate::source_map::{self, Span}; -use crate::util::classify; -use crate::util::literal::LitError; -use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; - -use errors::{PResult, Applicability}; -use syntax_pos::symbol::{kw, sym}; -use syntax_pos::Symbol; -use std::mem; -use rustc_data_structures::thin_vec::ThinVec; - -/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression -/// dropped into the token stream, which happens while parsing the result of -/// macro expansion). Placement of these is not as complex as I feared it would -/// be. The important thing is to make sure that lookahead doesn't balk at -/// `token::Interpolated` tokens. -macro_rules! maybe_whole_expr { - ($p:expr) => { - if let token::Interpolated(nt) = &$p.token.kind { - match &**nt { - token::NtExpr(e) | token::NtLiteral(e) => { - let e = e.clone(); - $p.bump(); - return Ok(e); - } - token::NtPath(path) => { - let path = path.clone(); - $p.bump(); - return Ok($p.mk_expr( - $p.token.span, ExprKind::Path(None, path), ThinVec::new() - )); - } - token::NtBlock(block) => { - let block = block.clone(); - $p.bump(); - return Ok($p.mk_expr( - $p.token.span, ExprKind::Block(block, None), ThinVec::new() - )); - } - // N.B., `NtIdent(ident)` is normalized to `Ident` in `fn bump`. - _ => {}, - }; - } - } -} - -#[derive(Debug)] -pub(super) enum LhsExpr { - NotYetParsed, - AttributesParsed(ThinVec<Attribute>), - AlreadyParsed(P<Expr>), -} - -impl From<Option<ThinVec<Attribute>>> for LhsExpr { - /// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)` - /// and `None` into `LhsExpr::NotYetParsed`. - /// - /// This conversion does not allocate. - fn from(o: Option<ThinVec<Attribute>>) -> Self { - if let Some(attrs) = o { - LhsExpr::AttributesParsed(attrs) - } else { - LhsExpr::NotYetParsed - } - } -} - -impl From<P<Expr>> for LhsExpr { - /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`. - /// - /// This conversion does not allocate. - fn from(expr: P<Expr>) -> Self { - LhsExpr::AlreadyParsed(expr) - } -} - -impl<'a> Parser<'a> { - /// Parses an expression. - #[inline] - pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> { - self.parse_expr_res(Restrictions::empty(), None) - } - - fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> { - self.parse_paren_comma_seq(|p| { - match p.parse_expr() { - Ok(expr) => Ok(expr), - Err(mut err) => match p.token.kind { - token::Ident(name, false) - if name == kw::Underscore && p.look_ahead(1, |t| { - t == &token::Comma - }) => { - // Special-case handling of `foo(_, _, _)` - err.emit(); - let sp = p.token.span; - p.bump(); - Ok(p.mk_expr(sp, ExprKind::Err, ThinVec::new())) - } - _ => Err(err), - }, - } - }).map(|(r, _)| r) - } - - /// Parses an expression, subject to the given restrictions. - #[inline] - pub(super) fn parse_expr_res( - &mut self, - r: Restrictions, - already_parsed_attrs: Option<ThinVec<Attribute>> - ) -> PResult<'a, P<Expr>> { - self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) - } - - /// Parses an associative expression. - /// - /// This parses an expression accounting for associativity and precedence of the operators in - /// the expression. - #[inline] - fn parse_assoc_expr( - &mut self, - already_parsed_attrs: Option<ThinVec<Attribute>>, - ) -> PResult<'a, P<Expr>> { - self.parse_assoc_expr_with(0, already_parsed_attrs.into()) - } - - /// Parses an associative expression with operators of at least `min_prec` precedence. - pub(super) fn parse_assoc_expr_with( - &mut self, - min_prec: usize, - lhs: LhsExpr, - ) -> PResult<'a, P<Expr>> { - let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs { - expr - } else { - let attrs = match lhs { - LhsExpr::AttributesParsed(attrs) => Some(attrs), - _ => None, - }; - if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) { - return self.parse_prefix_range_expr(attrs); - } else { - self.parse_prefix_expr(attrs)? - } - }; - let last_type_ascription_set = self.last_type_ascription.is_some(); - - match (self.expr_is_complete(&lhs), AssocOp::from_token(&self.token)) { - (true, None) => { - self.last_type_ascription = None; - // Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071 - return Ok(lhs); - } - (false, _) => {} // continue parsing the expression - // An exhaustive check is done in the following block, but these are checked first - // because they *are* ambiguous but also reasonable looking incorrect syntax, so we - // want to keep their span info to improve diagnostics in these cases in a later stage. - (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3` - (true, Some(AssocOp::Subtract)) | // `{ 42 } -5` - (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) - (true, Some(AssocOp::Add)) // `{ 42 } + 42 - // If the next token is a keyword, then the tokens above *are* unambiguously incorrect: - // `if x { a } else { b } && if y { c } else { d }` - if !self.look_ahead(1, |t| t.is_reserved_ident()) => { - self.last_type_ascription = None; - // These cases are ambiguous and can't be identified in the parser alone - let sp = self.sess.source_map().start_point(self.token.span); - self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span); - return Ok(lhs); - } - (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => { - self.last_type_ascription = None; - return Ok(lhs); - } - (true, Some(_)) => { - // We've found an expression that would be parsed as a statement, but the next - // token implies this should be parsed as an expression. - // For example: `if let Some(x) = x { x } else { 0 } / 2` - let mut err = self.struct_span_err(self.token.span, &format!( - "expected expression, found `{}`", - pprust::token_to_string(&self.token), - )); - err.span_label(self.token.span, "expected expression"); - self.sess.expr_parentheses_needed( - &mut err, - lhs.span, - Some(pprust::expr_to_string(&lhs), - )); - err.emit(); - } - } - self.expected_tokens.push(TokenType::Operator); - while let Some(op) = AssocOp::from_token(&self.token) { - - // Adjust the span for interpolated LHS to point to the `$lhs` token and not to what - // it refers to. Interpolated identifiers are unwrapped early and never show up here - // as `PrevTokenKind::Interpolated` so if LHS is a single identifier we always process - // it as "interpolated", it doesn't change the answer for non-interpolated idents. - let lhs_span = match (self.prev_token_kind, &lhs.kind) { - (PrevTokenKind::Interpolated, _) => self.prev_span, - (PrevTokenKind::Ident, &ExprKind::Path(None, ref path)) - if path.segments.len() == 1 => self.prev_span, - _ => lhs.span, - }; - - let cur_op_span = self.token.span; - let restrictions = if op.is_assign_like() { - self.restrictions & Restrictions::NO_STRUCT_LITERAL - } else { - self.restrictions - }; - let prec = op.precedence(); - if prec < min_prec { - break; - } - // Check for deprecated `...` syntax - if self.token == token::DotDotDot && op == AssocOp::DotDotEq { - self.err_dotdotdot_syntax(self.token.span); - } - - if self.token == token::LArrow { - self.err_larrow_operator(self.token.span); - } - - self.bump(); - if op.is_comparison() { - if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { - return Ok(expr); - } - } - // Special cases: - if op == AssocOp::As { - lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; - continue - } else if op == AssocOp::Colon { - let maybe_path = self.could_ascription_be_path(&lhs.kind); - self.last_type_ascription = Some((self.prev_span, maybe_path)); - - lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?; - self.sess.gated_spans.gate(sym::type_ascription, lhs.span); - continue - } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { - // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to - // generalise it to the Fixity::None code. - // - // We have 2 alternatives here: `x..y`/`x..=y` and `x..`/`x..=` The other - // two variants are handled with `parse_prefix_range_expr` call above. - let rhs = if self.is_at_start_of_range_notation_rhs() { - Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?) - } else { - None - }; - let (lhs_span, rhs_span) = (lhs.span, if let Some(ref x) = rhs { - x.span - } else { - cur_op_span - }); - let limits = if op == AssocOp::DotDot { - RangeLimits::HalfOpen - } else { - RangeLimits::Closed - }; - - let r = self.mk_range(Some(lhs), rhs, limits)?; - lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); - break - } - - let fixity = op.fixity(); - let prec_adjustment = match fixity { - Fixity::Right => 0, - Fixity::Left => 1, - // We currently have no non-associative operators that are not handled above by - // the special cases. The code is here only for future convenience. - Fixity::None => 1, - }; - let rhs = self.with_res( - restrictions - Restrictions::STMT_EXPR, - |this| this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed) - )?; - - // Make sure that the span of the parent node is larger than the span of lhs and rhs, - // including the attributes. - let lhs_span = lhs - .attrs - .iter() - .filter(|a| a.style == AttrStyle::Outer) - .next() - .map_or(lhs_span, |a| a.span); - let span = lhs_span.to(rhs.span); - lhs = match op { - AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | - AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | - AssocOp::BitAnd | AssocOp::BitOr | AssocOp::ShiftLeft | AssocOp::ShiftRight | - AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual | - AssocOp::Greater | AssocOp::GreaterEqual => { - let ast_op = op.to_ast_binop().unwrap(); - let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); - self.mk_expr(span, binary, ThinVec::new()) - } - AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), - AssocOp::AssignOp(k) => { - let aop = match k { - token::Plus => BinOpKind::Add, - token::Minus => BinOpKind::Sub, - token::Star => BinOpKind::Mul, - token::Slash => BinOpKind::Div, - token::Percent => BinOpKind::Rem, - token::Caret => BinOpKind::BitXor, - token::And => BinOpKind::BitAnd, - token::Or => BinOpKind::BitOr, - token::Shl => BinOpKind::Shl, - token::Shr => BinOpKind::Shr, - }; - let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); - self.mk_expr(span, aopexpr, ThinVec::new()) - } - AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { - self.bug("AssocOp should have been handled by special case") - } - }; - - if let Fixity::None = fixity { break } - } - if last_type_ascription_set { - self.last_type_ascription = None; - } - Ok(lhs) - } - - /// Checks if this expression is a successfully parsed statement. - fn expr_is_complete(&self, e: &Expr) -> bool { - self.restrictions.contains(Restrictions::STMT_EXPR) && - !classify::expr_requires_semi_to_be_stmt(e) - } - - fn is_at_start_of_range_notation_rhs(&self) -> bool { - if self.token.can_begin_expr() { - // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`. - if self.token == token::OpenDelim(token::Brace) { - return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); - } - true - } else { - false - } - } - - /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`. - fn parse_prefix_range_expr( - &mut self, - already_parsed_attrs: Option<ThinVec<Attribute>> - ) -> PResult<'a, P<Expr>> { - // Check for deprecated `...` syntax. - if self.token == token::DotDotDot { - self.err_dotdotdot_syntax(self.token.span); - } - - debug_assert!([token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind), - "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq", - self.token); - let tok = self.token.clone(); - let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.token.span; - let mut hi = self.token.span; - self.bump(); - let opt_end = if self.is_at_start_of_range_notation_rhs() { - // RHS must be parsed with more associativity than the dots. - let next_prec = AssocOp::from_token(&tok).unwrap().precedence() + 1; - Some(self.parse_assoc_expr_with(next_prec, LhsExpr::NotYetParsed) - .map(|x| { - hi = x.span; - x - })?) - } else { - None - }; - let limits = if tok == token::DotDot { - RangeLimits::HalfOpen - } else { - RangeLimits::Closed - }; - - let r = self.mk_range(None, opt_end, limits)?; - Ok(self.mk_expr(lo.to(hi), r, attrs)) - } - - /// Parses a prefix-unary-operator expr. - fn parse_prefix_expr( - &mut self, - already_parsed_attrs: Option<ThinVec<Attribute>> - ) -> PResult<'a, P<Expr>> { - let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.token.span; - // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.token.kind { - token::Not => { - self.bump(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), self.mk_unary(UnOp::Not, e)) - } - // Suggest `!` for bitwise negation when encountering a `~` - token::Tilde => { - self.bump(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - let span_of_tilde = lo; - self.struct_span_err(span_of_tilde, "`~` cannot be used as a unary operator") - .span_suggestion_short( - span_of_tilde, - "use `!` to perform bitwise not", - "!".to_owned(), - Applicability::MachineApplicable - ) - .emit(); - (lo.to(span), self.mk_unary(UnOp::Not, e)) - } - token::BinOp(token::Minus) => { - self.bump(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), self.mk_unary(UnOp::Neg, e)) - } - token::BinOp(token::Star) => { - self.bump(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), self.mk_unary(UnOp::Deref, e)) - } - token::BinOp(token::And) | token::AndAnd => { - self.expect_and()?; - let m = self.parse_mutability(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), ExprKind::AddrOf(m, e)) - } - token::Ident(..) if self.token.is_keyword(kw::Box) => { - self.bump(); - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - let span = lo.to(span); - self.sess.gated_spans.gate(sym::box_syntax, span); - (span, ExprKind::Box(e)) - } - token::Ident(..) if self.token.is_ident_named(sym::not) => { - // `not` is just an ordinary identifier in Rust-the-language, - // but as `rustc`-the-compiler, we can issue clever diagnostics - // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &Token| match t.kind { - // These tokens can start an expression after `!`, but - // can't continue an expression after an ident - token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), - token::Literal(..) | token::Pound => true, - _ => t.is_whole_expr(), - }; - let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr); - if cannot_continue_expr { - self.bump(); - // Emit the error ... - self.struct_span_err( - self.token.span, - &format!("unexpected {} after identifier",self.this_token_descr()) - ) - .span_suggestion_short( - // Span the `not` plus trailing whitespace to avoid - // trailing whitespace after the `!` in our suggestion - self.sess.source_map() - .span_until_non_whitespace(lo.to(self.token.span)), - "use `!` to perform logical negation", - "!".to_owned(), - Applicability::MachineApplicable - ) - .emit(); - // —and recover! (just as if we were in the block - // for the `token::Not` arm) - let e = self.parse_prefix_expr(None); - let (span, e) = self.interpolated_or_expr_span(e)?; - (lo.to(span), self.mk_unary(UnOp::Not, e)) - } else { - return self.parse_dot_or_call_expr(Some(attrs)); - } - } - _ => { return self.parse_dot_or_call_expr(Some(attrs)); } - }; - return Ok(self.mk_expr(lo.to(hi), ex, attrs)); - } - - /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. - fn interpolated_or_expr_span( - &self, - expr: PResult<'a, P<Expr>>, - ) -> PResult<'a, (Span, P<Expr>)> { - expr.map(|e| { - if self.prev_token_kind == PrevTokenKind::Interpolated { - (self.prev_span, e) - } else { - (e.span, e) - } - }) - } - - fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span, - expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind) - -> PResult<'a, P<Expr>> { - let mk_expr = |this: &mut Self, rhs: P<Ty>| { - this.mk_expr(lhs_span.to(rhs.span), expr_kind(lhs, rhs), ThinVec::new()) - }; - - // Save the state of the parser before parsing type normally, in case there is a - // LessThan comparison after this cast. - let parser_snapshot_before_type = self.clone(); - match self.parse_ty_no_plus() { - Ok(rhs) => { - Ok(mk_expr(self, rhs)) - } - Err(mut type_err) => { - // Rewind to before attempting to parse the type with generics, to recover - // from situations like `x as usize < y` in which we first tried to parse - // `usize < y` as a type with generic arguments. - let parser_snapshot_after_type = self.clone(); - mem::replace(self, parser_snapshot_before_type); - - match self.parse_path(PathStyle::Expr) { - Ok(path) => { - let (op_noun, op_verb) = match self.token.kind { - token::Lt => ("comparison", "comparing"), - token::BinOp(token::Shl) => ("shift", "shifting"), - _ => { - // We can end up here even without `<` being the next token, for - // example because `parse_ty_no_plus` returns `Err` on keywords, - // but `parse_path` returns `Ok` on them due to error recovery. - // Return original error and parser state. - mem::replace(self, parser_snapshot_after_type); - return Err(type_err); - } - }; - - // Successfully parsed the type path leaving a `<` yet to parse. - type_err.cancel(); - - // Report non-fatal diagnostics, keep `x as usize` as an expression - // in AST and continue parsing. - let msg = format!( - "`<` is interpreted as a start of generic arguments for `{}`, not a {}", - pprust::path_to_string(&path), - op_noun, - ); - let span_after_type = parser_snapshot_after_type.token.span; - let expr = mk_expr(self, P(Ty { - span: path.span, - kind: TyKind::Path(None, path), - id: DUMMY_NODE_ID, - })); - - let expr_str = self.span_to_snippet(expr.span) - .unwrap_or_else(|_| pprust::expr_to_string(&expr)); - - self.struct_span_err(self.token.span, &msg) - .span_label( - self.look_ahead(1, |t| t.span).to(span_after_type), - "interpreted as generic arguments" - ) - .span_label(self.token.span, format!("not interpreted as {}", op_noun)) - .span_suggestion( - expr.span, - &format!("try {} the cast value", op_verb), - format!("({})", expr_str), - Applicability::MachineApplicable, - ) - .emit(); - - Ok(expr) - } - Err(mut path_err) => { - // Couldn't parse as a path, return original error and parser state. - path_err.cancel(); - mem::replace(self, parser_snapshot_after_type); - Err(type_err) - } - } - } - } - } - - /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. - fn parse_dot_or_call_expr( - &mut self, - already_parsed_attrs: Option<ThinVec<Attribute>>, - ) -> PResult<'a, P<Expr>> { - let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - - let b = self.parse_bottom_expr(); - let (span, b) = self.interpolated_or_expr_span(b)?; - self.parse_dot_or_call_expr_with(b, span, attrs) - } - - pub(super) fn parse_dot_or_call_expr_with( - &mut self, - e0: P<Expr>, - lo: Span, - mut attrs: ThinVec<Attribute>, - ) -> PResult<'a, P<Expr>> { - // Stitch the list of outer attributes onto the return value. - // A little bit ugly, but the best way given the current code - // structure - self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| - expr.map(|mut expr| { - attrs.extend::<Vec<_>>(expr.attrs.into()); - expr.attrs = attrs; - match expr.kind { - ExprKind::If(..) if !expr.attrs.is_empty() => { - // Just point to the first attribute in there... - let span = expr.attrs[0].span; - self.span_err(span, "attributes are not yet allowed on `if` expressions"); - } - _ => {} - } - expr - }) - ) - } - - fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - let mut e = e0; - let mut hi; - loop { - // expr? - while self.eat(&token::Question) { - let hi = self.prev_span; - e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); - } - - // expr.f - if self.eat(&token::Dot) { - match self.token.kind { - token::Ident(..) => { - e = self.parse_dot_suffix(e, lo)?; - } - token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { - let span = self.token.span; - self.bump(); - let field = ExprKind::Field(e, Ident::new(symbol, span)); - e = self.mk_expr(lo.to(span), field, ThinVec::new()); - - self.expect_no_suffix(span, "a tuple index", suffix); - } - token::Literal(token::Lit { kind: token::Float, symbol, .. }) => { - self.bump(); - let fstr = symbol.as_str(); - let msg = format!("unexpected token: `{}`", symbol); - let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg); - err.span_label(self.prev_span, "unexpected token"); - if fstr.chars().all(|x| "0123456789.".contains(x)) { - let float = match fstr.parse::<f64>().ok() { - Some(f) => f, - None => continue, - }; - let sugg = pprust::to_string(|s| { - s.popen(); - s.print_expr(&e); - s.s.word( "."); - s.print_usize(float.trunc() as usize); - s.pclose(); - s.s.word("."); - s.s.word(fstr.splitn(2, ".").last().unwrap().to_string()) - }); - err.span_suggestion( - lo.to(self.prev_span), - "try parenthesizing the first index", - sugg, - Applicability::MachineApplicable - ); - } - return Err(err); - - } - _ => { - // FIXME Could factor this out into non_fatal_unexpected or something. - let actual = self.this_token_to_string(); - self.span_err(self.token.span, &format!("unexpected token: `{}`", actual)); - } - } - continue; - } - if self.expr_is_complete(&e) { break; } - match self.token.kind { - // expr(...) - token::OpenDelim(token::Paren) => { - let seq = self.parse_paren_expr_seq().map(|es| { - let nd = self.mk_call(e, es); - let hi = self.prev_span; - self.mk_expr(lo.to(hi), nd, ThinVec::new()) - }); - e = self.recover_seq_parse_error(token::Paren, lo, seq); - } - - // expr[...] - // Could be either an index expression or a slicing expression. - token::OpenDelim(token::Bracket) => { - self.bump(); - let ix = self.parse_expr()?; - hi = self.token.span; - self.expect(&token::CloseDelim(token::Bracket))?; - let index = self.mk_index(e, ix); - e = self.mk_expr(lo.to(hi), index, ThinVec::new()) - } - _ => return Ok(e) - } - } - return Ok(e); - } - - /// Assuming we have just parsed `.`, continue parsing into an expression. - fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - if self.token.span.rust_2018() && self.eat_keyword(kw::Await) { - return self.mk_await_expr(self_arg, lo); - } - - let segment = self.parse_path_segment(PathStyle::Expr)?; - self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); - - Ok(match self.token.kind { - token::OpenDelim(token::Paren) => { - // Method call `expr.f()` - let mut args = self.parse_paren_expr_seq()?; - args.insert(0, self_arg); - - let span = lo.to(self.prev_span); - self.mk_expr(span, ExprKind::MethodCall(segment, args), ThinVec::new()) - } - _ => { - // Field access `expr.f` - if let Some(args) = segment.args { - self.span_err(args.span(), - "field expressions may not have generic arguments"); - } - - let span = lo.to(self.prev_span); - self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new()) - } - }) - } - - /// At the bottom (top?) of the precedence hierarchy, - /// Parses things like parenthesized exprs, macros, `return`, etc. - /// - /// N.B., this does not parse outer attributes, and is private because it only works - /// correctly if called from `parse_dot_or_call_expr()`. - fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> { - maybe_recover_from_interpolated_ty_qpath!(self, true); - maybe_whole_expr!(self); - - // Outer attributes are already parsed and will be - // added to the return value after the fact. - // - // Therefore, prevent sub-parser from parsing - // attributes by giving them a empty "already-parsed" list. - let mut attrs = ThinVec::new(); - - let lo = self.token.span; - let mut hi = self.token.span; - - let ex: ExprKind; - - macro_rules! parse_lit { - () => { - match self.parse_lit() { - Ok(literal) => { - hi = self.prev_span; - ex = ExprKind::Lit(literal); - } - Err(mut err) => { - err.cancel(); - return Err(self.expected_expression_found()); - } - } - } - } - - // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. - match self.token.kind { - // This match arm is a special-case of the `_` match arm below and - // could be removed without changing functionality, but it's faster - // to have it here, especially for programs with large constants. - token::Literal(_) => { - parse_lit!() - } - token::OpenDelim(token::Paren) => { - self.bump(); - - attrs.extend(self.parse_inner_attributes()?); - - // `(e)` is parenthesized `e`. - // `(e,)` is a tuple with only one field, `e`. - let mut es = vec![]; - let mut trailing_comma = false; - let mut recovered = false; - while self.token != token::CloseDelim(token::Paren) { - es.push(match self.parse_expr() { - Ok(es) => es, - Err(mut err) => { - // Recover from parse error in tuple list. - match self.token.kind { - token::Ident(name, false) - if name == kw::Underscore && self.look_ahead(1, |t| { - t == &token::Comma - }) => { - // Special-case handling of `Foo<(_, _, _)>` - err.emit(); - let sp = self.token.span; - self.bump(); - self.mk_expr(sp, ExprKind::Err, ThinVec::new()) - } - _ => return Ok( - self.recover_seq_parse_error(token::Paren, lo, Err(err)), - ), - } - } - }); - recovered = self.expect_one_of( - &[], - &[token::Comma, token::CloseDelim(token::Paren)], - )?; - if self.eat(&token::Comma) { - trailing_comma = true; - } else { - trailing_comma = false; - break; - } - } - if !recovered { - self.bump(); - } - - hi = self.prev_span; - ex = if es.len() == 1 && !trailing_comma { - ExprKind::Paren(es.into_iter().nth(0).unwrap()) - } else { - ExprKind::Tup(es) - }; - } - token::OpenDelim(token::Brace) => { - return self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs); - } - token::BinOp(token::Or) | token::OrOr => { - return self.parse_closure_expr(attrs); - } - token::OpenDelim(token::Bracket) => { - self.bump(); - - attrs.extend(self.parse_inner_attributes()?); - - if self.eat(&token::CloseDelim(token::Bracket)) { - // Empty vector - ex = ExprKind::Array(Vec::new()); - } else { - // Non-empty vector - let first_expr = self.parse_expr()?; - if self.eat(&token::Semi) { - // Repeating array syntax: `[ 0; 512 ]` - let count = AnonConst { - id: DUMMY_NODE_ID, - value: self.parse_expr()?, - }; - self.expect(&token::CloseDelim(token::Bracket))?; - ex = ExprKind::Repeat(first_expr, count); - } else if self.eat(&token::Comma) { - // Vector with two or more elements - let remaining_exprs = self.parse_seq_to_end( - &token::CloseDelim(token::Bracket), - SeqSep::trailing_allowed(token::Comma), - |p| Ok(p.parse_expr()?) - )?; - let mut exprs = vec![first_expr]; - exprs.extend(remaining_exprs); - ex = ExprKind::Array(exprs); - } else { - // Vector with one element - self.expect(&token::CloseDelim(token::Bracket))?; - ex = ExprKind::Array(vec![first_expr]); - } - } - hi = self.prev_span; - } - _ => { - if self.eat_lt() { - let (qself, path) = self.parse_qpath(PathStyle::Expr)?; - hi = path.span; - return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); - } - if self.token.is_path_start() { - let path = self.parse_path(PathStyle::Expr)?; - - // `!`, as an operator, is prefix, so we know this isn't that. - if self.eat(&token::Not) { - // MACRO INVOCATION expression - let (delim, tts) = self.expect_delimited_token_tree()?; - hi = self.prev_span; - ex = ExprKind::Mac(Mac { - path, - tts, - delim, - span: lo.to(hi), - prior_type_ascription: self.last_type_ascription, - }); - } else if self.check(&token::OpenDelim(token::Brace)) { - if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) { - return expr; - } else { - hi = path.span; - ex = ExprKind::Path(None, path); - } - } else { - hi = path.span; - ex = ExprKind::Path(None, path); - } - - let expr = self.mk_expr(lo.to(hi), ex, attrs); - return self.maybe_recover_from_bad_qpath(expr, true); - } - if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { - return self.parse_closure_expr(attrs); - } - if self.eat_keyword(kw::If) { - return self.parse_if_expr(attrs); - } - if self.eat_keyword(kw::For) { - let lo = self.prev_span; - return self.parse_for_expr(None, lo, attrs); - } - if self.eat_keyword(kw::While) { - let lo = self.prev_span; - return self.parse_while_expr(None, lo, attrs); - } - if let Some(label) = self.eat_label() { - let lo = label.ident.span; - self.expect(&token::Colon)?; - if self.eat_keyword(kw::While) { - return self.parse_while_expr(Some(label), lo, attrs) - } - if self.eat_keyword(kw::For) { - return self.parse_for_expr(Some(label), lo, attrs) - } - if self.eat_keyword(kw::Loop) { - return self.parse_loop_expr(Some(label), lo, attrs) - } - if self.token == token::OpenDelim(token::Brace) { - return self.parse_block_expr(Some(label), - lo, - BlockCheckMode::Default, - attrs); - } - let msg = "expected `while`, `for`, `loop` or `{` after a label"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err); - } - if self.eat_keyword(kw::Loop) { - let lo = self.prev_span; - return self.parse_loop_expr(None, lo, attrs); - } - if self.eat_keyword(kw::Continue) { - let label = self.eat_label(); - let ex = ExprKind::Continue(label); - let hi = self.prev_span; - return Ok(self.mk_expr(lo.to(hi), ex, attrs)); - } - if self.eat_keyword(kw::Match) { - let match_sp = self.prev_span; - return self.parse_match_expr(attrs).map_err(|mut err| { - err.span_label(match_sp, "while parsing this match expression"); - err - }); - } - if self.eat_keyword(kw::Unsafe) { - return self.parse_block_expr( - None, - lo, - BlockCheckMode::Unsafe(ast::UserProvided), - attrs); - } - if self.is_do_catch_block() { - let mut db = self.fatal("found removed `do catch` syntax"); - db.help("following RFC #2388, the new non-placeholder syntax is `try`"); - return Err(db); - } - if self.is_try_block() { - let lo = self.token.span; - assert!(self.eat_keyword(kw::Try)); - return self.parse_try_block(lo, attrs); - } - - // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly. - let is_span_rust_2018 = self.token.span.rust_2018(); - if is_span_rust_2018 && self.check_keyword(kw::Async) { - return if self.is_async_block() { // Check for `async {` and `async move {`. - self.parse_async_block(attrs) - } else { - self.parse_closure_expr(attrs) - }; - } - if self.eat_keyword(kw::Return) { - if self.token.can_begin_expr() { - let e = self.parse_expr()?; - hi = e.span; - ex = ExprKind::Ret(Some(e)); - } else { - ex = ExprKind::Ret(None); - } - } else if self.eat_keyword(kw::Break) { - let label = self.eat_label(); - let e = if self.token.can_begin_expr() - && !(self.token == token::OpenDelim(token::Brace) - && self.restrictions.contains( - Restrictions::NO_STRUCT_LITERAL)) { - Some(self.parse_expr()?) - } else { - None - }; - ex = ExprKind::Break(label, e); - hi = self.prev_span; - } else if self.eat_keyword(kw::Yield) { - if self.token.can_begin_expr() { - let e = self.parse_expr()?; - hi = e.span; - ex = ExprKind::Yield(Some(e)); - } else { - ex = ExprKind::Yield(None); - } - - let span = lo.to(hi); - self.sess.gated_spans.gate(sym::generators, span); - } else if self.eat_keyword(kw::Let) { - return self.parse_let_expr(attrs); - } else if is_span_rust_2018 && self.eat_keyword(kw::Await) { - let (await_hi, e_kind) = self.parse_incorrect_await_syntax(lo, self.prev_span)?; - hi = await_hi; - ex = e_kind; - } else { - if !self.unclosed_delims.is_empty() && self.check(&token::Semi) { - // Don't complain about bare semicolons after unclosed braces - // recovery in order to keep the error count down. Fixing the - // delimiters will possibly also fix the bare semicolon found in - // expression context. For example, silence the following error: - // - // error: expected expression, found `;` - // --> file.rs:2:13 - // | - // 2 | foo(bar(; - // | ^ expected expression - self.bump(); - return Ok(self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new())); - } - parse_lit!() - } - } - } - - let expr = self.mk_expr(lo.to(hi), ex, attrs); - self.maybe_recover_from_bad_qpath(expr, true) - } - - /// Matches `lit = true | false | token_lit`. - pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { - let mut recovered = None; - if self.token == token::Dot { - // Attempt to recover `.4` as `0.4`. - recovered = self.look_ahead(1, |next_token| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) - = next_token.kind { - if self.token.span.hi() == next_token.span.lo() { - let s = String::from("0.") + &symbol.as_str(); - let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.token.span.to(next_token.span))); - } - } - None - }); - if let Some(token) = &recovered { - self.bump(); - self.struct_span_err(token.span, "float literals must have an integer part") - .span_suggestion( - token.span, - "must have an integer part", - pprust::token_to_string(token), - Applicability::MachineApplicable, - ) - .emit(); - } - } - - let token = recovered.as_ref().unwrap_or(&self.token); - match Lit::from_token(token) { - Ok(lit) => { - self.bump(); - Ok(lit) - } - Err(LitError::NotLiteral) => { - let msg = format!("unexpected token: {}", self.this_token_descr()); - Err(self.span_fatal(token.span, &msg)) - } - Err(err) => { - let span = token.span; - let lit = match token.kind { - token::Literal(lit) => lit, - _ => unreachable!(), - }; - self.bump(); - self.error_literal_from_token(err, lit, span); - // Pack possible quotes and prefixes from the original literal into - // the error literal's symbol so they can be pretty-printed faithfully. - let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); - let symbol = Symbol::intern(&suffixless_lit.to_string()); - let lit = token::Lit::new(token::Err, symbol, lit.suffix); - Lit::from_lit_token(lit, span).map_err(|_| unreachable!()) - } - } - } - - fn error_literal_from_token(&self, err: LitError, lit: token::Lit, span: Span) { - // Checks if `s` looks like i32 or u1234 etc. - fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { - s.len() > 1 - && s.starts_with(first_chars) - && s[1..].chars().all(|c| c.is_ascii_digit()) - } - - let token::Lit { kind, suffix, .. } = lit; - match err { - // `NotLiteral` is not an error by itself, so we don't report - // it and give the parser opportunity to try something else. - LitError::NotLiteral => {} - // `LexerError` *is* an error, but it was already reported - // by lexer, so here we don't report it the second time. - LitError::LexerError => {} - LitError::InvalidSuffix => { - self.expect_no_suffix( - span, - &format!("{} {} literal", kind.article(), kind.descr()), - suffix, - ); - } - LitError::InvalidIntSuffix => { - let suf = suffix.expect("suffix error with no suffix").as_str(); - if looks_like_width_suffix(&['i', 'u'], &suf) { - // If it looks like a width, try to be helpful. - let msg = format!("invalid width `{}` for integer literal", &suf[1..]); - self.struct_span_err(span, &msg) - .help("valid widths are 8, 16, 32, 64 and 128") - .emit(); - } else { - let msg = format!("invalid suffix `{}` for integer literal", suf); - self.struct_span_err(span, &msg) - .span_label(span, format!("invalid suffix `{}`", suf)) - .help("the suffix must be one of the integral types (`u32`, `isize`, etc)") - .emit(); - } - } - LitError::InvalidFloatSuffix => { - let suf = suffix.expect("suffix error with no suffix").as_str(); - if looks_like_width_suffix(&['f'], &suf) { - // If it looks like a width, try to be helpful. - let msg = format!("invalid width `{}` for float literal", &suf[1..]); - self.struct_span_err(span, &msg) - .help("valid widths are 32 and 64") - .emit(); - } else { - let msg = format!("invalid suffix `{}` for float literal", suf); - self.struct_span_err(span, &msg) - .span_label(span, format!("invalid suffix `{}`", suf)) - .help("valid suffixes are `f32` and `f64`") - .emit(); - } - } - LitError::NonDecimalFloat(base) => { - let descr = match base { - 16 => "hexadecimal", - 8 => "octal", - 2 => "binary", - _ => unreachable!(), - }; - self.struct_span_err(span, &format!("{} float literal is not supported", descr)) - .span_label(span, "not supported") - .emit(); - } - LitError::IntTooLarge => { - self.struct_span_err(span, "integer literal is too large") - .emit(); - } - } - } - - pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) { - if let Some(suf) = suffix { - let mut err = if kind == "a tuple index" - && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) - { - // #59553: warn instead of reject out of hand to allow the fix to percolate - // through the ecosystem when people fix their macros - let mut err = self.sess.span_diagnostic.struct_span_warn( - sp, - &format!("suffixes on {} are invalid", kind), - ); - err.note(&format!( - "`{}` is *temporarily* accepted on tuple index fields as it was \ - incorrectly accepted on stable for a few releases", - suf, - )); - err.help( - "on proc macros, you'll want to use `syn::Index::from` or \ - `proc_macro::Literal::*_unsuffixed` for code that will desugar \ - to tuple field access", - ); - err.note( - "for more context, see https://github.com/rust-lang/rust/issues/60210", - ); - err - } else { - self.struct_span_err(sp, &format!("suffixes on {} are invalid", kind)) - }; - err.span_label(sp, format!("invalid suffix `{}`", suf)); - err.emit(); - } - } - - /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). - pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { - maybe_whole_expr!(self); - - let minus_lo = self.token.span; - let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.token.span; - let literal = self.parse_lit()?; - let hi = self.prev_span; - let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); - - if minus_present { - let minus_hi = self.prev_span; - let unary = self.mk_unary(UnOp::Neg, expr); - Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) - } else { - Ok(expr) - } - } - - /// Parses a block or unsafe block. - pub(super) fn parse_block_expr( - &mut self, - opt_label: Option<Label>, - lo: Span, - blk_mode: BlockCheckMode, - outer_attrs: ThinVec<Attribute>, - ) -> PResult<'a, P<Expr>> { - if let Some(label) = opt_label { - self.sess.gated_spans.gate(sym::label_break_value, label.ident.span); - } - - self.expect(&token::OpenDelim(token::Brace))?; - - let mut attrs = outer_attrs; - attrs.extend(self.parse_inner_attributes()?); - - let blk = self.parse_block_tail(lo, blk_mode)?; - Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs)) - } - - /// Parses a closure expression (e.g., `move |args| expr`). - fn parse_closure_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { - let lo = self.token.span; - - let movability = if self.eat_keyword(kw::Static) { - Movability::Static - } else { - Movability::Movable - }; - - let asyncness = if self.token.span.rust_2018() { - self.parse_asyncness() - } else { - IsAsync::NotAsync - }; - if asyncness.is_async() { - // Feature-gate `async ||` closures. - self.sess.gated_spans.gate(sym::async_closure, self.prev_span); - } - - let capture_clause = self.parse_capture_clause(); - let decl = self.parse_fn_block_decl()?; - let decl_hi = self.prev_span; - let body = match decl.output { - FunctionRetTy::Default(_) => { - let restrictions = self.restrictions - Restrictions::STMT_EXPR; - self.parse_expr_res(restrictions, None)? - }, - _ => { - // If an explicit return type is given, require a block to appear (RFC 968). - let body_lo = self.token.span; - self.parse_block_expr(None, body_lo, BlockCheckMode::Default, ThinVec::new())? - } - }; - - Ok(self.mk_expr( - lo.to(body.span), - ExprKind::Closure(capture_clause, asyncness, movability, decl, body, lo.to(decl_hi)), - attrs)) - } - - /// Parses an optional `move` prefix to a closure lke construct. - fn parse_capture_clause(&mut self) -> CaptureBy { - if self.eat_keyword(kw::Move) { - CaptureBy::Value - } else { - CaptureBy::Ref - } - } - - /// Parses the `|arg, arg|` header of a closure. - fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> { - let inputs_captures = { - if self.eat(&token::OrOr) { - Vec::new() - } else { - self.expect(&token::BinOp(token::Or))?; - let args = self.parse_seq_to_before_tokens( - &[&token::BinOp(token::Or), &token::OrOr], - SeqSep::trailing_allowed(token::Comma), - TokenExpectType::NoExpect, - |p| p.parse_fn_block_param() - )?.0; - self.expect_or()?; - args - } - }; - let output = self.parse_ret_ty(true)?; - - Ok(P(FnDecl { - inputs: inputs_captures, - output, - })) - } - - /// Parses a parameter in a closure header (e.g., `|arg, arg|`). - fn parse_fn_block_param(&mut self) -> PResult<'a, Param> { - let lo = self.token.span; - let attrs = self.parse_outer_attributes()?; - let pat = self.parse_pat(PARAM_EXPECTED)?; - let t = if self.eat(&token::Colon) { - self.parse_ty()? - } else { - P(Ty { - id: DUMMY_NODE_ID, - kind: TyKind::Infer, - span: self.prev_span, - }) - }; - let span = lo.to(self.token.span); - Ok(Param { - attrs: attrs.into(), - ty: t, - pat, - span, - id: DUMMY_NODE_ID, - is_placeholder: false, - }) - } - - /// Parses an `if` expression (`if` token already eaten). - fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { - let lo = self.prev_span; - let cond = self.parse_cond_expr()?; - - // Verify that the parsed `if` condition makes sense as a condition. If it is a block, then - // verify that the last statement is either an implicit return (no `;`) or an explicit - // return. This won't catch blocks with an explicit `return`, but that would be caught by - // the dead code lint. - if self.eat_keyword(kw::Else) || !cond.returns() { - let sp = self.sess.source_map().next_point(lo); - let mut err = self.diagnostic() - .struct_span_err(sp, "missing condition for `if` expression"); - err.span_label(sp, "expected if condition here"); - return Err(err) - } - let not_block = self.token != token::OpenDelim(token::Brace); - let thn = self.parse_block().map_err(|mut err| { - if not_block { - err.span_label(lo, "this `if` statement has a condition, but no block"); - } - err - })?; - let mut els: Option<P<Expr>> = None; - let mut hi = thn.span; - if self.eat_keyword(kw::Else) { - let elexpr = self.parse_else_expr()?; - hi = elexpr.span; - els = Some(elexpr); - } - Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) - } - - /// Parses the condition of a `if` or `while` expression. - fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> { - let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; - - if let ExprKind::Let(..) = cond.kind { - // Remove the last feature gating of a `let` expression since it's stable. - self.sess.gated_spans.ungate_last(sym::let_chains, cond.span); - } - - Ok(cond) - } - - /// Parses a `let $pat = $expr` pseudo-expression. - /// The `let` token has already been eaten. - fn parse_let_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { - let lo = self.prev_span; - let pat = self.parse_top_pat(GateOr::No)?; - self.expect(&token::Eq)?; - let expr = self.with_res( - Restrictions::NO_STRUCT_LITERAL, - |this| this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into()) - )?; - let span = lo.to(expr.span); - self.sess.gated_spans.gate(sym::let_chains, span); - Ok(self.mk_expr(span, ExprKind::Let(pat, expr), attrs)) - } - - /// Parses an `else { ... }` expression (`else` token already eaten). - fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> { - if self.eat_keyword(kw::If) { - return self.parse_if_expr(ThinVec::new()); - } else { - let blk = self.parse_block()?; - return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), ThinVec::new())); - } - } - - /// Parses a `for ... in` expression (`for` token already eaten). - fn parse_for_expr( - &mut self, - opt_label: Option<Label>, - span_lo: Span, - mut attrs: ThinVec<Attribute> - ) -> PResult<'a, P<Expr>> { - // Parse: `for <src_pat> in <src_expr> <src_loop_block>` - - // Record whether we are about to parse `for (`. - // This is used below for recovery in case of `for ( $stuff ) $block` - // in which case we will suggest `for $stuff $block`. - let begin_paren = match self.token.kind { - token::OpenDelim(token::Paren) => Some(self.token.span), - _ => None, - }; - - let pat = self.parse_top_pat(GateOr::Yes)?; - if !self.eat_keyword(kw::In) { - let in_span = self.prev_span.between(self.token.span); - self.struct_span_err(in_span, "missing `in` in `for` loop") - .span_suggestion_short( - in_span, - "try adding `in` here", " in ".into(), - // has been misleading, at least in the past (closed Issue #48492) - Applicability::MaybeIncorrect - ) - .emit(); - } - let in_span = self.prev_span; - self.check_for_for_in_in_typo(in_span); - let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; - - let pat = self.recover_parens_around_for_head(pat, &expr, begin_paren); - - let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; - attrs.extend(iattrs); - - let hi = self.prev_span; - Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs)) - } - - /// Parses a `while` or `while let` expression (`while` token already eaten). - fn parse_while_expr( - &mut self, - opt_label: Option<Label>, - span_lo: Span, - mut attrs: ThinVec<Attribute> - ) -> PResult<'a, P<Expr>> { - let cond = self.parse_cond_expr()?; - let (iattrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(iattrs); - let span = span_lo.to(body.span); - Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs)) - } - - /// Parses `loop { ... }` (`loop` token already eaten). - fn parse_loop_expr( - &mut self, - opt_label: Option<Label>, - span_lo: Span, - mut attrs: ThinVec<Attribute> - ) -> PResult<'a, P<Expr>> { - let (iattrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(iattrs); - let span = span_lo.to(body.span); - Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs)) - } - - fn eat_label(&mut self) -> Option<Label> { - if let Some(ident) = self.token.lifetime() { - let span = self.token.span; - self.bump(); - Some(Label { ident: Ident::new(ident.name, span) }) - } else { - None - } - } - - /// Parses a `match ... { ... }` expression (`match` token already eaten). - fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { - let match_span = self.prev_span; - let lo = self.prev_span; - let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; - if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { - if self.token == token::Semi { - e.span_suggestion_short( - match_span, - "try removing this `match`", - String::new(), - Applicability::MaybeIncorrect // speculative - ); - } - return Err(e) - } - attrs.extend(self.parse_inner_attributes()?); - - let mut arms: Vec<Arm> = Vec::new(); - while self.token != token::CloseDelim(token::Brace) { - match self.parse_arm() { - Ok(arm) => arms.push(arm), - Err(mut e) => { - // Recover by skipping to the end of the block. - e.emit(); - self.recover_stmt(); - let span = lo.to(self.token.span); - if self.token == token::CloseDelim(token::Brace) { - self.bump(); - } - return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); - } - } - } - let hi = self.token.span; - self.bump(); - return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); - } - - pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { - let attrs = self.parse_outer_attributes()?; - let lo = self.token.span; - let pat = self.parse_top_pat(GateOr::No)?; - let guard = if self.eat_keyword(kw::If) { - Some(self.parse_expr()?) - } else { - None - }; - let arrow_span = self.token.span; - self.expect(&token::FatArrow)?; - let arm_start_span = self.token.span; - - let expr = self.parse_expr_res(Restrictions::STMT_EXPR, None) - .map_err(|mut err| { - err.span_label(arrow_span, "while parsing the `match` arm starting here"); - err - })?; - - let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) - && self.token != token::CloseDelim(token::Brace); - - let hi = self.token.span; - - if require_comma { - let cm = self.sess.source_map(); - self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) - .map_err(|mut err| { - match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) { - (Ok(ref expr_lines), Ok(ref arm_start_lines)) - if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col - && expr_lines.lines.len() == 2 - && self.token == token::FatArrow => { - // We check whether there's any trailing code in the parse span, - // if there isn't, we very likely have the following: - // - // X | &Y => "y" - // | -- - missing comma - // | | - // | arrow_span - // X | &X => "x" - // | - ^^ self.token.span - // | | - // | parsed until here as `"y" & X` - err.span_suggestion_short( - cm.next_point(arm_start_span), - "missing a comma here to end this `match` arm", - ",".to_owned(), - Applicability::MachineApplicable - ); - } - _ => { - err.span_label(arrow_span, - "while parsing the `match` arm starting here"); - } - } - err - })?; - } else { - self.eat(&token::Comma); - } - - Ok(ast::Arm { - attrs, - pat, - guard, - body: expr, - span: lo.to(hi), - id: DUMMY_NODE_ID, - is_placeholder: false, - }) - } - - /// Parses a `try {...}` expression (`try` token already eaten). - fn parse_try_block( - &mut self, - span_lo: Span, - mut attrs: ThinVec<Attribute> - ) -> PResult<'a, P<Expr>> { - let (iattrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(iattrs); - if self.eat_keyword(kw::Catch) { - let mut error = self.struct_span_err(self.prev_span, - "keyword `catch` cannot follow a `try` block"); - error.help("try using `match` on the result of the `try` block instead"); - error.emit(); - Err(error) - } else { - let span = span_lo.to(body.span); - self.sess.gated_spans.gate(sym::try_blocks, span); - Ok(self.mk_expr(span, ExprKind::TryBlock(body), attrs)) - } - } - - fn is_do_catch_block(&self) -> bool { - self.token.is_keyword(kw::Do) && - self.is_keyword_ahead(1, &[kw::Catch]) && - self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) && - !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) - } - - fn is_try_block(&self) -> bool { - self.token.is_keyword(kw::Try) && - self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) && - self.token.span.rust_2018() && - // Prevent `while try {} {}`, `if try {} {} else {}`, etc. - !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) - } - - /// Parses an `async move? {...}` expression. - fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { - let span_lo = self.token.span; - self.expect_keyword(kw::Async)?; - let capture_clause = self.parse_capture_clause(); - let (iattrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(iattrs); - Ok(self.mk_expr( - span_lo.to(body.span), - ExprKind::Async(capture_clause, DUMMY_NODE_ID, body), attrs)) - } - - fn is_async_block(&self) -> bool { - self.token.is_keyword(kw::Async) && - ( - ( // `async move {` - self.is_keyword_ahead(1, &[kw::Move]) && - self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) - ) || ( // `async {` - self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) - ) - ) - } - - fn maybe_parse_struct_expr( - &mut self, - lo: Span, - path: &ast::Path, - attrs: &ThinVec<Attribute>, - ) -> Option<PResult<'a, P<Expr>>> { - let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL); - let certainly_not_a_block = || self.look_ahead(1, |t| t.is_ident()) && ( - // `{ ident, ` cannot start a block. - self.look_ahead(2, |t| t == &token::Comma) || - self.look_ahead(2, |t| t == &token::Colon) && ( - // `{ ident: token, ` cannot start a block. - self.look_ahead(4, |t| t == &token::Comma) || - // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`. - self.look_ahead(3, |t| !t.can_begin_type()) - ) - ); - - if struct_allowed || certainly_not_a_block() { - // This is a struct literal, but we don't can't accept them here. - let expr = self.parse_struct_expr(lo, path.clone(), attrs.clone()); - if let (Ok(expr), false) = (&expr, struct_allowed) { - self.struct_span_err( - expr.span, - "struct literals are not allowed here", - ) - .multipart_suggestion( - "surround the struct literal with parentheses", - vec![ - (lo.shrink_to_lo(), "(".to_string()), - (expr.span.shrink_to_hi(), ")".to_string()), - ], - Applicability::MachineApplicable, - ) - .emit(); - } - return Some(expr); - } - None - } - - pub(super) fn parse_struct_expr( - &mut self, - lo: Span, - pth: ast::Path, - mut attrs: ThinVec<Attribute> - ) -> PResult<'a, P<Expr>> { - let struct_sp = lo.to(self.prev_span); - self.bump(); - let mut fields = Vec::new(); - let mut base = None; - - attrs.extend(self.parse_inner_attributes()?); - - while self.token != token::CloseDelim(token::Brace) { - if self.eat(&token::DotDot) { - let exp_span = self.prev_span; - match self.parse_expr() { - Ok(e) => { - base = Some(e); - } - Err(mut e) => { - e.emit(); - self.recover_stmt(); - } - } - if self.token == token::Comma { - self.struct_span_err( - exp_span.to(self.prev_span), - "cannot use a comma after the base struct", - ) - .span_suggestion_short( - self.token.span, - "remove this comma", - String::new(), - Applicability::MachineApplicable - ) - .note("the base struct must always be the last field") - .emit(); - self.recover_stmt(); - } - break; - } - - let mut recovery_field = None; - if let token::Ident(name, _) = self.token.kind { - if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { - // Use in case of error after field-looking code: `S { foo: () with a }`. - recovery_field = Some(ast::Field { - ident: Ident::new(name, self.token.span), - span: self.token.span, - expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()), - is_shorthand: false, - attrs: ThinVec::new(), - id: DUMMY_NODE_ID, - is_placeholder: false, - }); - } - } - let mut parsed_field = None; - match self.parse_field() { - Ok(f) => parsed_field = Some(f), - Err(mut e) => { - e.span_label(struct_sp, "while parsing this struct"); - e.emit(); - - // If the next token is a comma, then try to parse - // what comes next as additional fields, rather than - // bailing out until next `}`. - if self.token != token::Comma { - self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); - if self.token != token::Comma { - break; - } - } - } - } - - match self.expect_one_of(&[token::Comma], - &[token::CloseDelim(token::Brace)]) { - Ok(_) => if let Some(f) = parsed_field.or(recovery_field) { - // Only include the field if there's no parse error for the field name. - fields.push(f); - } - Err(mut e) => { - if let Some(f) = recovery_field { - fields.push(f); - } - e.span_label(struct_sp, "while parsing this struct"); - e.emit(); - self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore); - self.eat(&token::Comma); - } - } - } - - let span = lo.to(self.token.span); - self.expect(&token::CloseDelim(token::Brace))?; - return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); - } - - /// Parses `ident (COLON expr)?`. - fn parse_field(&mut self) -> PResult<'a, Field> { - let attrs = self.parse_outer_attributes()?; - let lo = self.token.span; - - // Check if a colon exists one ahead. This means we're parsing a fieldname. - let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| { - t == &token::Colon || t == &token::Eq - }) { - let fieldname = self.parse_field_name()?; - - // Check for an equals token. This means the source incorrectly attempts to - // initialize a field with an eq rather than a colon. - if self.token == token::Eq { - self.diagnostic() - .struct_span_err(self.token.span, "expected `:`, found `=`") - .span_suggestion( - fieldname.span.shrink_to_hi().to(self.token.span), - "replace equals symbol with a colon", - ":".to_string(), - Applicability::MachineApplicable, - ) - .emit(); - } - self.bump(); // `:` - (fieldname, self.parse_expr()?, false) - } else { - let fieldname = self.parse_ident_common(false)?; - - // Mimic `x: x` for the `x` field shorthand. - let path = ast::Path::from_ident(fieldname); - let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new()); - (fieldname, expr, true) - }; - Ok(ast::Field { - ident: fieldname, - span: lo.to(expr.span), - expr, - is_shorthand, - attrs: attrs.into(), - id: DUMMY_NODE_ID, - is_placeholder: false, - }) - } - - fn err_dotdotdot_syntax(&self, span: Span) { - self.struct_span_err(span, "unexpected token: `...`") - .span_suggestion( - span, - "use `..` for an exclusive range", "..".to_owned(), - Applicability::MaybeIncorrect - ) - .span_suggestion( - span, - "or `..=` for an inclusive range", "..=".to_owned(), - Applicability::MaybeIncorrect - ) - .emit(); - } - - fn err_larrow_operator(&self, span: Span) { - self.struct_span_err( - span, - "unexpected token: `<-`" - ).span_suggestion( - span, - "if you meant to write a comparison against a negative value, add a \ - space in between `<` and `-`", - "< -".to_string(), - Applicability::MaybeIncorrect - ).emit(); - } - - fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind { - ExprKind::AssignOp(binop, lhs, rhs) - } - - fn mk_range( - &self, - start: Option<P<Expr>>, - end: Option<P<Expr>>, - limits: RangeLimits - ) -> PResult<'a, ExprKind> { - if end.is_none() && limits == RangeLimits::Closed { - Err(self.span_fatal_err(self.token.span, Error::InclusiveRangeWithNoEnd)) - } else { - Ok(ExprKind::Range(start, end, limits)) - } - } - - fn mk_unary(&self, unop: UnOp, expr: P<Expr>) -> ExprKind { - ExprKind::Unary(unop, expr) - } - - fn mk_binary(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind { - ExprKind::Binary(binop, lhs, rhs) - } - - fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ExprKind { - ExprKind::Index(expr, idx) - } - - fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind { - ExprKind::Call(f, args) - } - - fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - let span = lo.to(self.prev_span); - let await_expr = self.mk_expr(span, ExprKind::Await(self_arg), ThinVec::new()); - self.recover_from_await_method_call(); - Ok(await_expr) - } - - crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { - P(Expr { kind, span, attrs, id: DUMMY_NODE_ID }) - } - - pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> { - self.mk_expr(span, ExprKind::Err, ThinVec::new()) - } -} diff --git a/src/libsyntax/parse/parser/generics.rs b/src/libsyntax/parse/parser/generics.rs deleted file mode 100644 index ae9ecd8fe39..00000000000 --- a/src/libsyntax/parse/parser/generics.rs +++ /dev/null @@ -1,309 +0,0 @@ -use super::Parser; - -use crate::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute}; -use crate::token; -use crate::source_map::DUMMY_SP; - -use syntax_pos::symbol::{kw, sym}; - -use errors::PResult; - -impl<'a> Parser<'a> { - /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. - /// - /// ``` - /// BOUND = LT_BOUND (e.g., `'a`) - /// ``` - fn parse_lt_param_bounds(&mut self) -> GenericBounds { - let mut lifetimes = Vec::new(); - while self.check_lifetime() { - lifetimes.push(ast::GenericBound::Outlives(self.expect_lifetime())); - - if !self.eat_plus() { - break - } - } - lifetimes - } - - /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`. - fn parse_ty_param(&mut self, - preceding_attrs: Vec<Attribute>) - -> PResult<'a, GenericParam> { - let ident = self.parse_ident()?; - - // Parse optional colon and param bounds. - let bounds = if self.eat(&token::Colon) { - self.parse_generic_bounds(Some(self.prev_span))? - } else { - Vec::new() - }; - - let default = if self.eat(&token::Eq) { - Some(self.parse_ty()?) - } else { - None - }; - - Ok(GenericParam { - ident, - id: ast::DUMMY_NODE_ID, - attrs: preceding_attrs.into(), - bounds, - kind: GenericParamKind::Type { - default, - }, - is_placeholder: false - }) - } - - fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> { - let lo = self.token.span; - - self.expect_keyword(kw::Const)?; - let ident = self.parse_ident()?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - - self.sess.gated_spans.gate(sym::const_generics, lo.to(self.prev_span)); - - Ok(GenericParam { - ident, - id: ast::DUMMY_NODE_ID, - attrs: preceding_attrs.into(), - bounds: Vec::new(), - kind: GenericParamKind::Const { - ty, - }, - is_placeholder: false - }) - } - - /// Parses a (possibly empty) list of lifetime and type parameters, possibly including - /// a trailing comma and erroneous trailing attributes. - pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { - let mut params = Vec::new(); - loop { - let attrs = self.parse_outer_attributes()?; - if self.check_lifetime() { - let lifetime = self.expect_lifetime(); - // Parse lifetime parameter. - let bounds = if self.eat(&token::Colon) { - self.parse_lt_param_bounds() - } else { - Vec::new() - }; - params.push(ast::GenericParam { - ident: lifetime.ident, - id: lifetime.id, - attrs: attrs.into(), - bounds, - kind: ast::GenericParamKind::Lifetime, - is_placeholder: false - }); - } else if self.check_keyword(kw::Const) { - // Parse const parameter. - params.push(self.parse_const_param(attrs)?); - } else if self.check_ident() { - // Parse type parameter. - params.push(self.parse_ty_param(attrs)?); - } else if self.token.can_begin_type() { - // Trying to write an associated type bound? (#26271) - let snapshot = self.clone(); - match self.parse_ty_where_predicate() { - Ok(where_predicate) => { - self.struct_span_err( - where_predicate.span(), - "bounds on associated types do not belong here", - ) - .span_label(where_predicate.span(), "belongs in `where` clause") - .emit(); - } - Err(mut err) => { - err.cancel(); - std::mem::replace(self, snapshot); - break - } - } - } else { - // Check for trailing attributes and stop parsing. - if !attrs.is_empty() { - if !params.is_empty() { - self.struct_span_err( - attrs[0].span, - "trailing attribute after generic parameter", - ) - .span_label(attrs[0].span, "attributes must go before parameters") - .emit(); - } else { - self.struct_span_err( - attrs[0].span, - &format!("attribute without generic parameters"), - ) - .span_label( - attrs[0].span, - "attributes are only permitted when preceding parameters", - ) - .emit(); - } - } - break - } - - if !self.eat(&token::Comma) { - break - } - } - Ok(params) - } - - /// Parses a set of optional generic type parameter declarations. Where - /// clauses are not parsed here, and must be added later via - /// `parse_where_clause()`. - /// - /// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > ) - /// | ( < lifetimes , typaramseq ( , )? > ) - /// where typaramseq = ( typaram ) | ( typaram , typaramseq ) - pub(super) fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { - let span_lo = self.token.span; - let (params, span) = if self.eat_lt() { - let params = self.parse_generic_params()?; - self.expect_gt()?; - (params, span_lo.to(self.prev_span)) - } else { - (vec![], self.prev_span.between(self.token.span)) - }; - Ok(ast::Generics { - params, - where_clause: WhereClause { - predicates: Vec::new(), - span: DUMMY_SP, - }, - span, - }) - } - - /// Parses an optional where-clause and places it in `generics`. - /// - /// ```ignore (only-for-syntax-highlight) - /// where T : Trait<U, V> + 'b, 'a : 'b - /// ``` - pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> { - let mut where_clause = WhereClause { - predicates: Vec::new(), - span: self.prev_span.to(self.prev_span), - }; - - if !self.eat_keyword(kw::Where) { - return Ok(where_clause); - } - let lo = self.prev_span; - - // We are considering adding generics to the `where` keyword as an alternative higher-rank - // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking - // change we parse those generics now, but report an error. - if self.choose_generics_over_qpath() { - let generics = self.parse_generics()?; - self.struct_span_err( - generics.span, - "generic parameters on `where` clauses are reserved for future use", - ) - .span_label(generics.span, "currently unsupported") - .emit(); - } - - loop { - let lo = self.token.span; - if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { - let lifetime = self.expect_lifetime(); - // Bounds starting with a colon are mandatory, but possibly empty. - self.expect(&token::Colon)?; - let bounds = self.parse_lt_param_bounds(); - where_clause.predicates.push(ast::WherePredicate::RegionPredicate( - ast::WhereRegionPredicate { - span: lo.to(self.prev_span), - lifetime, - bounds, - } - )); - } else if self.check_type() { - where_clause.predicates.push(self.parse_ty_where_predicate()?); - } else { - break - } - - if !self.eat(&token::Comma) { - break - } - } - - where_clause.span = lo.to(self.prev_span); - Ok(where_clause) - } - - fn parse_ty_where_predicate(&mut self) -> PResult<'a, ast::WherePredicate> { - let lo = self.token.span; - // Parse optional `for<'a, 'b>`. - // This `for` is parsed greedily and applies to the whole predicate, - // the bounded type can have its own `for` applying only to it. - // Examples: - // * `for<'a> Trait1<'a>: Trait2<'a /* ok */>` - // * `(for<'a> Trait1<'a>): Trait2<'a /* not ok */>` - // * `for<'a> for<'b> Trait1<'a, 'b>: Trait2<'a /* ok */, 'b /* not ok */>` - let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - - // Parse type with mandatory colon and (possibly empty) bounds, - // or with mandatory equality sign and the second type. - let ty = self.parse_ty()?; - if self.eat(&token::Colon) { - let bounds = self.parse_generic_bounds(Some(self.prev_span))?; - Ok(ast::WherePredicate::BoundPredicate( - ast::WhereBoundPredicate { - span: lo.to(self.prev_span), - bound_generic_params: lifetime_defs, - bounded_ty: ty, - bounds, - } - )) - // FIXME: Decide what should be used here, `=` or `==`. - // FIXME: We are just dropping the binders in lifetime_defs on the floor here. - } else if self.eat(&token::Eq) || self.eat(&token::EqEq) { - let rhs_ty = self.parse_ty()?; - Ok(ast::WherePredicate::EqPredicate( - ast::WhereEqPredicate { - span: lo.to(self.prev_span), - lhs_ty: ty, - rhs_ty, - id: ast::DUMMY_NODE_ID, - } - )) - } else { - self.unexpected() - } - } - - pub(super) fn choose_generics_over_qpath(&self) -> bool { - // There's an ambiguity between generic parameters and qualified paths in impls. - // If we see `<` it may start both, so we have to inspect some following tokens. - // The following combinations can only start generics, - // but not qualified paths (with one exception): - // `<` `>` - empty generic parameters - // `<` `#` - generic parameters with attributes - // `<` (LIFETIME|IDENT) `>` - single generic parameter - // `<` (LIFETIME|IDENT) `,` - first generic parameter in a list - // `<` (LIFETIME|IDENT) `:` - generic parameter with bounds - // `<` (LIFETIME|IDENT) `=` - generic parameter with a default - // `<` const - generic const parameter - // The only truly ambiguous case is - // `<` IDENT `>` `::` IDENT ... - // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) - // because this is what almost always expected in practice, qualified paths in impls - // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. - self.token == token::Lt && - (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) || - self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) && - self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma || - t == &token::Colon || t == &token::Eq) || - self.is_keyword_ahead(1, &[kw::Const])) - } -} diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs deleted file mode 100644 index 531ad532a54..00000000000 --- a/src/libsyntax/parse/parser/item.rs +++ /dev/null @@ -1,2237 +0,0 @@ -use super::{Parser, PathStyle}; -use super::diagnostics::{Error, dummy_arg, ConsumeClosingDelim}; - -use crate::maybe_whole; -use crate::ptr::P; -use crate::ast::{self, Abi, DUMMY_NODE_ID, Ident, Attribute, AttrKind, AttrStyle, AnonConst, Item}; -use crate::ast::{ItemKind, ImplItem, ImplItemKind, TraitItem, TraitItemKind, UseTree, UseTreeKind}; -use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness}; -use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind}; -use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField}; -use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, FnSig, SelfKind, Param}; -use crate::parse::token; -use crate::tokenstream::{TokenTree, TokenStream}; -use crate::symbol::{kw, sym}; -use crate::source_map::{self, respan, Span}; -use crate::ThinVec; - -use log::debug; -use std::mem; -use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, StashKey}; -use syntax_pos::BytePos; - -/// Whether the type alias or associated type is a concrete type or an opaque type. -#[derive(Debug)] -pub(super) enum AliasKind { - /// Just a new name for the same type. - Weak(P<Ty>), - /// Only trait impls of the type will be usable, not the actual type itself. - OpaqueTy(GenericBounds), -} - -pub(super) type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>); - -impl<'a> Parser<'a> { - pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> { - let attrs = self.parse_outer_attributes()?; - self.parse_item_(attrs, true, false) - } - - pub(super) fn parse_item_( - &mut self, - attrs: Vec<Attribute>, - macros_allowed: bool, - attributes_allowed: bool, - ) -> PResult<'a, Option<P<Item>>> { - let mut unclosed_delims = vec![]; - let (ret, tokens) = self.collect_tokens(|this| { - let item = this.parse_item_implementation(attrs, macros_allowed, attributes_allowed); - unclosed_delims.append(&mut this.unclosed_delims); - item - })?; - self.unclosed_delims.append(&mut unclosed_delims); - - // Once we've parsed an item and recorded the tokens we got while - // parsing we may want to store `tokens` into the item we're about to - // return. Note, though, that we specifically didn't capture tokens - // related to outer attributes. The `tokens` field here may later be - // used with procedural macros to convert this item back into a token - // stream, but during expansion we may be removing attributes as we go - // along. - // - // If we've got inner attributes then the `tokens` we've got above holds - // these inner attributes. If an inner attribute is expanded we won't - // actually remove it from the token stream, so we'll just keep yielding - // it (bad!). To work around this case for now we just avoid recording - // `tokens` if we detect any inner attributes. This should help keep - // expansion correct, but we should fix this bug one day! - Ok(ret.map(|item| { - item.map(|mut i| { - if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { - i.tokens = Some(tokens); - } - i - }) - })) - } - - /// Parses one of the items allowed by the flags. - fn parse_item_implementation( - &mut self, - attrs: Vec<Attribute>, - macros_allowed: bool, - attributes_allowed: bool, - ) -> PResult<'a, Option<P<Item>>> { - maybe_whole!(self, NtItem, |item| { - let mut item = item.into_inner(); - let mut attrs = attrs; - mem::swap(&mut item.attrs, &mut attrs); - item.attrs.extend(attrs); - Some(P(item)) - }); - - let lo = self.token.span; - - let vis = self.parse_visibility(false)?; - - if self.eat_keyword(kw::Use) { - // USE ITEM - let item_ = ItemKind::Use(P(self.parse_use_tree()?)); - self.expect_semi()?; - - let span = lo.to(self.prev_span); - let item = self.mk_item(span, Ident::invalid(), item_, vis, attrs); - return Ok(Some(item)); - } - - if self.eat_keyword(kw::Extern) { - let extern_sp = self.prev_span; - if self.eat_keyword(kw::Crate) { - return Ok(Some(self.parse_item_extern_crate(lo, vis, attrs)?)); - } - - let abi = self.parse_opt_abi()?; - - if self.eat_keyword(kw::Fn) { - // EXTERN FUNCTION ITEM - let fn_span = self.prev_span; - let header = FnHeader { - unsafety: Unsafety::Normal, - asyncness: respan(fn_span, IsAsync::NotAsync), - constness: respan(fn_span, Constness::NotConst), - abi, - }; - return self.parse_item_fn(lo, vis, attrs, header); - } else if self.check(&token::OpenDelim(token::Brace)) { - return Ok(Some( - self.parse_item_foreign_mod(lo, abi, vis, attrs, extern_sp)?, - )); - } - - self.unexpected()?; - } - - if self.is_static_global() { - self.bump(); - // STATIC ITEM - let m = self.parse_mutability(); - let info = self.parse_item_const(Some(m))?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.eat_keyword(kw::Const) { - let const_span = self.prev_span; - if [kw::Fn, kw::Unsafe, kw::Extern].iter().any(|k| self.check_keyword(*k)) { - // CONST FUNCTION ITEM - let unsafety = self.parse_unsafety(); - - if self.check_keyword(kw::Extern) { - self.sess.gated_spans.gate(sym::const_extern_fn, lo.to(self.token.span)); - } - let abi = self.parse_extern_abi()?; - self.bump(); // `fn` - - let header = FnHeader { - unsafety, - asyncness: respan(const_span, IsAsync::NotAsync), - constness: respan(const_span, Constness::Const), - abi, - }; - return self.parse_item_fn(lo, vis, attrs, header); - } - - // CONST ITEM - if self.eat_keyword(kw::Mut) { - let prev_span = self.prev_span; - self.struct_span_err(prev_span, "const globals cannot be mutable") - .span_label(prev_span, "cannot be mutable") - .span_suggestion( - const_span, - "you might want to declare a static instead", - "static".to_owned(), - Applicability::MaybeIncorrect, - ) - .emit(); - } - - let info = self.parse_item_const(None)?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - // Parses `async unsafe? fn`. - if self.check_keyword(kw::Async) { - let async_span = self.token.span; - if self.is_keyword_ahead(1, &[kw::Fn]) - || self.is_keyword_ahead(2, &[kw::Fn]) - { - // ASYNC FUNCTION ITEM - self.bump(); // `async` - let unsafety = self.parse_unsafety(); // `unsafe`? - self.expect_keyword(kw::Fn)?; // `fn` - let fn_span = self.prev_span; - let asyncness = respan(async_span, IsAsync::Async { - closure_id: DUMMY_NODE_ID, - return_impl_trait_id: DUMMY_NODE_ID, - }); - self.ban_async_in_2015(async_span); - let header = FnHeader { - unsafety, - asyncness, - constness: respan(fn_span, Constness::NotConst), - abi: Abi::new(sym::Rust, fn_span), - }; - return self.parse_item_fn(lo, vis, attrs, header); - } - } - - if self.check_keyword(kw::Unsafe) && - self.is_keyword_ahead(1, &[kw::Trait, kw::Auto]) - { - // UNSAFE TRAIT ITEM - self.bump(); // `unsafe` - let info = self.parse_item_trait(lo, Unsafety::Unsafe)?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.check_keyword(kw::Impl) || - self.check_keyword(kw::Unsafe) && - self.is_keyword_ahead(1, &[kw::Impl]) || - self.check_keyword(kw::Default) && - self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) - { - // IMPL ITEM - let defaultness = self.parse_defaultness(); - let unsafety = self.parse_unsafety(); - self.expect_keyword(kw::Impl)?; - let info = self.parse_item_impl(unsafety, defaultness)?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.check_keyword(kw::Fn) { - // FUNCTION ITEM - self.bump(); - let fn_span = self.prev_span; - let header = FnHeader { - unsafety: Unsafety::Normal, - asyncness: respan(fn_span, IsAsync::NotAsync), - constness: respan(fn_span, Constness::NotConst), - abi: Abi::new(sym::Rust, fn_span), - }; - return self.parse_item_fn(lo, vis, attrs, header); - } - - if self.check_keyword(kw::Unsafe) - && self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) - { - // UNSAFE FUNCTION ITEM - self.bump(); // `unsafe` - // `{` is also expected after `unsafe`; in case of error, include it in the diagnostic. - self.check(&token::OpenDelim(token::Brace)); - let abi = self.parse_extern_abi()?; - self.expect_keyword(kw::Fn)?; - let fn_span = self.prev_span; - let header = FnHeader { - unsafety: Unsafety::Unsafe, - asyncness: respan(fn_span, IsAsync::NotAsync), - constness: respan(fn_span, Constness::NotConst), - abi, - }; - return self.parse_item_fn(lo, vis, attrs, header); - } - - if self.eat_keyword(kw::Mod) { - // MODULE ITEM - let info = self.parse_item_mod(&attrs[..])?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if let Some(type_) = self.eat_type() { - let (ident, alias, generics) = type_?; - // TYPE ITEM - let item_ = match alias { - AliasKind::Weak(ty) => ItemKind::TyAlias(ty, generics), - AliasKind::OpaqueTy(bounds) => ItemKind::OpaqueTy(bounds, generics), - }; - let span = lo.to(self.prev_span); - return Ok(Some(self.mk_item(span, ident, item_, vis, attrs))); - } - - if self.eat_keyword(kw::Enum) { - // ENUM ITEM - let info = self.parse_item_enum()?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.check_keyword(kw::Trait) - || (self.check_keyword(kw::Auto) - && self.is_keyword_ahead(1, &[kw::Trait])) - { - // TRAIT ITEM - let info = self.parse_item_trait(lo, Unsafety::Normal)?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.eat_keyword(kw::Struct) { - // STRUCT ITEM - let info = self.parse_item_struct()?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if self.is_union_item() { - // UNION ITEM - self.bump(); - let info = self.parse_item_union()?; - return self.mk_item_with_info(attrs, lo, vis, info); - } - - if let Some(macro_def) = self.eat_macro_def(&attrs, &vis, lo)? { - return Ok(Some(macro_def)); - } - - // Verify whether we have encountered a struct or method definition where the user forgot to - // add the `struct` or `fn` keyword after writing `pub`: `pub S {}` - if vis.node.is_pub() && - self.check_ident() && - self.look_ahead(1, |t| *t != token::Not) - { - // Space between `pub` keyword and the identifier - // - // pub S {} - // ^^^ `sp` points here - let sp = self.prev_span.between(self.token.span); - let full_sp = self.prev_span.to(self.token.span); - let ident_sp = self.token.span; - if self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) { - // possible public struct definition where `struct` was forgotten - let ident = self.parse_ident().unwrap(); - let msg = format!("add `struct` here to parse `{}` as a public struct", - ident); - let mut err = self.diagnostic() - .struct_span_err(sp, "missing `struct` for struct definition"); - err.span_suggestion_short( - sp, &msg, " struct ".into(), Applicability::MaybeIncorrect // speculative - ); - return Err(err); - } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { - let ident = self.parse_ident().unwrap(); - self.bump(); // `(` - let kw_name = self.recover_first_param(); - self.consume_block(token::Paren, ConsumeClosingDelim::Yes); - let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) { - self.eat_to_tokens(&[&token::OpenDelim(token::Brace)]); - self.bump(); // `{` - ("fn", kw_name, false) - } else if self.check(&token::OpenDelim(token::Brace)) { - self.bump(); // `{` - ("fn", kw_name, false) - } else if self.check(&token::Colon) { - let kw = "struct"; - (kw, kw, false) - } else { - ("fn` or `struct", "function or struct", true) - }; - - let msg = format!("missing `{}` for {} definition", kw, kw_name); - let mut err = self.diagnostic().struct_span_err(sp, &msg); - if !ambiguous { - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); - let suggestion = format!("add `{}` here to parse `{}` as a public {}", - kw, - ident, - kw_name); - err.span_suggestion_short( - sp, &suggestion, format!(" {} ", kw), Applicability::MachineApplicable - ); - } else { - if let Ok(snippet) = self.span_to_snippet(ident_sp) { - err.span_suggestion( - full_sp, - "if you meant to call a macro, try", - format!("{}!", snippet), - // this is the `ambiguous` conditional branch - Applicability::MaybeIncorrect - ); - } else { - err.help("if you meant to call a macro, remove the `pub` \ - and add a trailing `!` after the identifier"); - } - } - return Err(err); - } else if self.look_ahead(1, |t| *t == token::Lt) { - let ident = self.parse_ident().unwrap(); - self.eat_to_tokens(&[&token::Gt]); - self.bump(); // `>` - let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) { - ("fn", self.recover_first_param(), false) - } else if self.check(&token::OpenDelim(token::Brace)) { - ("struct", "struct", false) - } else { - ("fn` or `struct", "function or struct", true) - }; - let msg = format!("missing `{}` for {} definition", kw, kw_name); - let mut err = self.diagnostic().struct_span_err(sp, &msg); - if !ambiguous { - err.span_suggestion_short( - sp, - &format!("add `{}` here to parse `{}` as a public {}", kw, ident, kw_name), - format!(" {} ", kw), - Applicability::MachineApplicable, - ); - } - return Err(err); - } - } - self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, vis) - } - - pub(super) fn mk_item_with_info( - &self, - attrs: Vec<Attribute>, - lo: Span, - vis: Visibility, - info: ItemInfo, - ) -> PResult<'a, Option<P<Item>>> { - let (ident, item, extra_attrs) = info; - let span = lo.to(self.prev_span); - let attrs = Self::maybe_append(attrs, extra_attrs); - Ok(Some(self.mk_item(span, ident, item, vis, attrs))) - } - - fn maybe_append<T>(mut lhs: Vec<T>, mut rhs: Option<Vec<T>>) -> Vec<T> { - if let Some(ref mut rhs) = rhs { - lhs.append(rhs); - } - lhs - } - - /// This is the fall-through for parsing items. - fn parse_macro_use_or_failure( - &mut self, - attrs: Vec<Attribute> , - macros_allowed: bool, - attributes_allowed: bool, - lo: Span, - visibility: Visibility - ) -> PResult<'a, Option<P<Item>>> { - if macros_allowed && self.token.is_path_start() && - !(self.is_async_fn() && self.token.span.rust_2015()) { - // MACRO INVOCATION ITEM - - let prev_span = self.prev_span; - self.complain_if_pub_macro(&visibility.node, prev_span); - - let mac_lo = self.token.span; - - // Item macro - let path = self.parse_path(PathStyle::Mod)?; - self.expect(&token::Not)?; - let (delim, tts) = self.expect_delimited_token_tree()?; - if delim != MacDelimiter::Brace && !self.eat(&token::Semi) { - self.report_invalid_macro_expansion_item(); - } - - let hi = self.prev_span; - let mac = Mac { - path, - tts, - delim, - span: mac_lo.to(hi), - prior_type_ascription: self.last_type_ascription, - }; - let item = - self.mk_item(lo.to(hi), Ident::invalid(), ItemKind::Mac(mac), visibility, attrs); - return Ok(Some(item)); - } - - // FAILURE TO PARSE ITEM - match visibility.node { - VisibilityKind::Inherited => {} - _ => { - return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`")); - } - } - - if !attributes_allowed && !attrs.is_empty() { - self.expected_item_err(&attrs)?; - } - Ok(None) - } - - /// Emits an expected-item-after-attributes error. - fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> { - let message = match attrs.last() { - Some(&Attribute { kind: AttrKind::DocComment(_), .. }) => - "expected item after doc comment", - _ => - "expected item after attributes", - }; - - let mut err = self.diagnostic().struct_span_err(self.prev_span, message); - if attrs.last().unwrap().is_doc_comment() { - err.span_label(self.prev_span, "this doc comment doesn't document anything"); - } - Err(err) - } - - pub(super) fn is_async_fn(&self) -> bool { - self.token.is_keyword(kw::Async) && - self.is_keyword_ahead(1, &[kw::Fn]) - } - - /// Parses a macro invocation inside a `trait`, `impl` or `extern` block. - fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>, - at_end: &mut bool) -> PResult<'a, Option<Mac>> - { - if self.token.is_path_start() && - !(self.is_async_fn() && self.token.span.rust_2015()) { - let prev_span = self.prev_span; - let lo = self.token.span; - let path = self.parse_path(PathStyle::Mod)?; - - if path.segments.len() == 1 { - if !self.eat(&token::Not) { - return Err(self.missing_assoc_item_kind_err(item_kind, prev_span)); - } - } else { - self.expect(&token::Not)?; - } - - if let Some(vis) = vis { - self.complain_if_pub_macro(&vis.node, prev_span); - } - - *at_end = true; - - // eat a matched-delimiter token tree: - let (delim, tts) = self.expect_delimited_token_tree()?; - if delim != MacDelimiter::Brace { - self.expect_semi()?; - } - - Ok(Some(Mac { - path, - tts, - delim, - span: lo.to(self.prev_span), - prior_type_ascription: self.last_type_ascription, - })) - } else { - Ok(None) - } - } - - fn missing_assoc_item_kind_err(&self, item_type: &str, prev_span: Span) - -> DiagnosticBuilder<'a> - { - let expected_kinds = if item_type == "extern" { - "missing `fn`, `type`, or `static`" - } else { - "missing `fn`, `type`, or `const`" - }; - - // Given this code `path(`, it seems like this is not - // setting the visibility of a macro invocation, but rather - // a mistyped method declaration. - // Create a diagnostic pointing out that `fn` is missing. - // - // x | pub path(&self) { - // | ^ missing `fn`, `type`, or `const` - // pub path( - // ^^ `sp` below will point to this - let sp = prev_span.between(self.prev_span); - let mut err = self.diagnostic().struct_span_err( - sp, - &format!("{} for {}-item declaration", - expected_kinds, item_type)); - err.span_label(sp, expected_kinds); - err - } - - /// Parses an implementation item, `impl` keyword is already parsed. - /// - /// impl<'a, T> TYPE { /* impl items */ } - /// impl<'a, T> TRAIT for TYPE { /* impl items */ } - /// impl<'a, T> !TRAIT for TYPE { /* impl items */ } - /// - /// We actually parse slightly more relaxed grammar for better error reporting and recovery. - /// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}` - /// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}` - fn parse_item_impl(&mut self, unsafety: Unsafety, defaultness: Defaultness) - -> PResult<'a, ItemInfo> { - // First, parse generic parameters if necessary. - let mut generics = if self.choose_generics_over_qpath() { - self.parse_generics()? - } else { - Generics::default() - }; - - // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. - let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) { - self.bump(); // `!` - ast::ImplPolarity::Negative - } else { - ast::ImplPolarity::Positive - }; - - // Parse both types and traits as a type, then reinterpret if necessary. - let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span)); - let ty_first = if self.token.is_keyword(kw::For) && - self.look_ahead(1, |t| t != &token::Lt) { - let span = self.prev_span.between(self.token.span); - self.struct_span_err(span, "missing trait in a trait impl").emit(); - P(Ty { kind: TyKind::Path(None, err_path(span)), span, id: DUMMY_NODE_ID }) - } else { - self.parse_ty()? - }; - - // If `for` is missing we try to recover. - let has_for = self.eat_keyword(kw::For); - let missing_for_span = self.prev_span.between(self.token.span); - - let ty_second = if self.token == token::DotDot { - // We need to report this error after `cfg` expansion for compatibility reasons - self.bump(); // `..`, do not add it to expected tokens - Some(self.mk_ty(self.prev_span, TyKind::Err)) - } else if has_for || self.token.can_begin_type() { - Some(self.parse_ty()?) - } else { - None - }; - - generics.where_clause = self.parse_where_clause()?; - - let (impl_items, attrs) = self.parse_impl_body()?; - - let item_kind = match ty_second { - Some(ty_second) => { - // impl Trait for Type - if !has_for { - self.struct_span_err(missing_for_span, "missing `for` in a trait impl") - .span_suggestion_short( - missing_for_span, - "add `for` here", - " for ".to_string(), - Applicability::MachineApplicable, - ).emit(); - } - - let ty_first = ty_first.into_inner(); - let path = match ty_first.kind { - // This notably includes paths passed through `ty` macro fragments (#46438). - TyKind::Path(None, path) => path, - _ => { - self.span_err(ty_first.span, "expected a trait, found type"); - err_path(ty_first.span) - } - }; - let trait_ref = TraitRef { path, ref_id: ty_first.id }; - - ItemKind::Impl(unsafety, polarity, defaultness, - generics, Some(trait_ref), ty_second, impl_items) - } - None => { - // impl Type - ItemKind::Impl(unsafety, polarity, defaultness, - generics, None, ty_first, impl_items) - } - }; - - Ok((Ident::invalid(), item_kind, Some(attrs))) - } - - fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> { - self.expect(&token::OpenDelim(token::Brace))?; - let attrs = self.parse_inner_attributes()?; - - let mut impl_items = Vec::new(); - while !self.eat(&token::CloseDelim(token::Brace)) { - let mut at_end = false; - match self.parse_impl_item(&mut at_end) { - Ok(impl_item) => impl_items.push(impl_item), - Err(mut err) => { - err.emit(); - if !at_end { - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); - break; - } - } - } - } - Ok((impl_items, attrs)) - } - - /// Parses an impl item. - pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> { - maybe_whole!(self, NtImplItem, |x| x); - let attrs = self.parse_outer_attributes()?; - let mut unclosed_delims = vec![]; - let (mut item, tokens) = self.collect_tokens(|this| { - let item = this.parse_impl_item_(at_end, attrs); - unclosed_delims.append(&mut this.unclosed_delims); - item - })?; - self.unclosed_delims.append(&mut unclosed_delims); - - // See `parse_item` for why this clause is here. - if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { - item.tokens = Some(tokens); - } - Ok(item) - } - - fn parse_impl_item_( - &mut self, - at_end: &mut bool, - mut attrs: Vec<Attribute>, - ) -> PResult<'a, ImplItem> { - let lo = self.token.span; - let vis = self.parse_visibility(false)?; - let defaultness = self.parse_defaultness(); - let (name, kind, generics) = if let Some(type_) = self.eat_type() { - let (name, alias, generics) = type_?; - let kind = match alias { - AliasKind::Weak(typ) => ast::ImplItemKind::TyAlias(typ), - AliasKind::OpaqueTy(bounds) => ast::ImplItemKind::OpaqueTy(bounds), - }; - (name, kind, generics) - } else if self.is_const_item() { - self.parse_impl_const()? - } else if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(&vis), at_end)? { - // FIXME: code copied from `parse_macro_use_or_failure` -- use abstraction! - (Ident::invalid(), ast::ImplItemKind::Macro(mac), Generics::default()) - } else { - let (name, inner_attrs, generics, kind) = self.parse_impl_method(at_end)?; - attrs.extend(inner_attrs); - (name, kind, generics) - }; - - Ok(ImplItem { - id: DUMMY_NODE_ID, - span: lo.to(self.prev_span), - ident: name, - vis, - defaultness, - attrs, - generics, - kind, - tokens: None, - }) - } - - /// Parses defaultness (i.e., `default` or nothing). - fn parse_defaultness(&mut self) -> Defaultness { - // `pub` is included for better error messages - if self.check_keyword(kw::Default) && - self.is_keyword_ahead(1, &[ - kw::Impl, - kw::Const, - kw::Async, - kw::Fn, - kw::Unsafe, - kw::Extern, - kw::Type, - kw::Pub, - ]) - { - self.bump(); // `default` - Defaultness::Default - } else { - Defaultness::Final - } - } - - /// Returns `true` if we are looking at `const ID` - /// (returns `false` for things like `const fn`, etc.). - fn is_const_item(&self) -> bool { - self.token.is_keyword(kw::Const) && - !self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe]) - } - - /// This parses the grammar: - /// ImplItemConst = "const" Ident ":" Ty "=" Expr ";" - fn parse_impl_const(&mut self) -> PResult<'a, (Ident, ImplItemKind, Generics)> { - self.expect_keyword(kw::Const)?; - let name = self.parse_ident()?; - self.expect(&token::Colon)?; - let typ = self.parse_ty()?; - self.expect(&token::Eq)?; - let expr = self.parse_expr()?; - self.expect_semi()?; - Ok((name, ImplItemKind::Const(typ, expr), Generics::default())) - } - - /// Parses `auto? trait Foo { ... }` or `trait Foo = Bar;`. - fn parse_item_trait(&mut self, lo: Span, unsafety: Unsafety) -> PResult<'a, ItemInfo> { - // Parse optional `auto` prefix. - let is_auto = if self.eat_keyword(kw::Auto) { - IsAuto::Yes - } else { - IsAuto::No - }; - - self.expect_keyword(kw::Trait)?; - let ident = self.parse_ident()?; - let mut tps = self.parse_generics()?; - - // Parse optional colon and supertrait bounds. - let had_colon = self.eat(&token::Colon); - let span_at_colon = self.prev_span; - let bounds = if had_colon { - self.parse_generic_bounds(Some(self.prev_span))? - } else { - Vec::new() - }; - - let span_before_eq = self.prev_span; - if self.eat(&token::Eq) { - // It's a trait alias. - if had_colon { - let span = span_at_colon.to(span_before_eq); - self.struct_span_err(span, "bounds are not allowed on trait aliases") - .emit(); - } - - let bounds = self.parse_generic_bounds(None)?; - tps.where_clause = self.parse_where_clause()?; - self.expect_semi()?; - - let whole_span = lo.to(self.prev_span); - if is_auto == IsAuto::Yes { - let msg = "trait aliases cannot be `auto`"; - self.struct_span_err(whole_span, msg) - .span_label(whole_span, msg) - .emit(); - } - if unsafety != Unsafety::Normal { - let msg = "trait aliases cannot be `unsafe`"; - self.struct_span_err(whole_span, msg) - .span_label(whole_span, msg) - .emit(); - } - - self.sess.gated_spans.gate(sym::trait_alias, whole_span); - - Ok((ident, ItemKind::TraitAlias(tps, bounds), None)) - } else { - // It's a normal trait. - tps.where_clause = self.parse_where_clause()?; - self.expect(&token::OpenDelim(token::Brace))?; - let mut trait_items = vec![]; - while !self.eat(&token::CloseDelim(token::Brace)) { - if let token::DocComment(_) = self.token.kind { - if self.look_ahead(1, - |tok| tok == &token::CloseDelim(token::Brace)) { - self.diagnostic().struct_span_err_with_code( - self.token.span, - "found a documentation comment that doesn't document anything", - DiagnosticId::Error("E0584".into()), - ) - .help( - "doc comments must come before what they document, maybe a \ - comment was intended with `//`?", - ) - .emit(); - self.bump(); - continue; - } - } - let mut at_end = false; - match self.parse_trait_item(&mut at_end) { - Ok(item) => trait_items.push(item), - Err(mut e) => { - e.emit(); - if !at_end { - self.consume_block(token::Brace, ConsumeClosingDelim::Yes); - break; - } - } - } - } - Ok((ident, ItemKind::Trait(is_auto, unsafety, tps, bounds, trait_items), None)) - } - } - - /// Parses the items in a trait declaration. - pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { - maybe_whole!(self, NtTraitItem, |x| x); - let attrs = self.parse_outer_attributes()?; - let mut unclosed_delims = vec![]; - let (mut item, tokens) = self.collect_tokens(|this| { - let item = this.parse_trait_item_(at_end, attrs); - unclosed_delims.append(&mut this.unclosed_delims); - item - })?; - self.unclosed_delims.append(&mut unclosed_delims); - // See `parse_item` for why this clause is here. - if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { - item.tokens = Some(tokens); - } - Ok(item) - } - - fn parse_trait_item_( - &mut self, - at_end: &mut bool, - mut attrs: Vec<Attribute>, - ) -> PResult<'a, TraitItem> { - let lo = self.token.span; - self.eat_bad_pub(); - let (name, kind, generics) = if self.eat_keyword(kw::Type) { - self.parse_trait_item_assoc_ty()? - } else if self.is_const_item() { - self.parse_trait_item_const()? - } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? { - // trait item macro. - (Ident::invalid(), TraitItemKind::Macro(mac), Generics::default()) - } else { - self.parse_trait_item_method(at_end, &mut attrs)? - }; - - Ok(TraitItem { - id: DUMMY_NODE_ID, - ident: name, - attrs, - generics, - kind, - span: lo.to(self.prev_span), - tokens: None, - }) - } - - fn parse_trait_item_const(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> { - self.expect_keyword(kw::Const)?; - let ident = self.parse_ident()?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - let default = if self.eat(&token::Eq) { - Some(self.parse_expr()?) - } else { - None - }; - self.expect_semi()?; - Ok((ident, TraitItemKind::Const(ty, default), Generics::default())) - } - - /// Parses the following grammar: - /// - /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty] - fn parse_trait_item_assoc_ty(&mut self) -> PResult<'a, (Ident, TraitItemKind, Generics)> { - let ident = self.parse_ident()?; - let mut generics = self.parse_generics()?; - - // Parse optional colon and param bounds. - let bounds = if self.eat(&token::Colon) { - self.parse_generic_bounds(None)? - } else { - Vec::new() - }; - generics.where_clause = self.parse_where_clause()?; - - let default = if self.eat(&token::Eq) { - Some(self.parse_ty()?) - } else { - None - }; - self.expect_semi()?; - - Ok((ident, TraitItemKind::Type(bounds, default), generics)) - } - - /// Parses a `UseTree`. - /// - /// ``` - /// USE_TREE = [`::`] `*` | - /// [`::`] `{` USE_TREE_LIST `}` | - /// PATH `::` `*` | - /// PATH `::` `{` USE_TREE_LIST `}` | - /// PATH [`as` IDENT] - /// ``` - fn parse_use_tree(&mut self) -> PResult<'a, UseTree> { - let lo = self.token.span; - - let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo() }; - let kind = if self.check(&token::OpenDelim(token::Brace)) || - self.check(&token::BinOp(token::Star)) || - self.is_import_coupler() { - // `use *;` or `use ::*;` or `use {...};` or `use ::{...};` - let mod_sep_ctxt = self.token.span.ctxt(); - if self.eat(&token::ModSep) { - prefix.segments.push( - PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)) - ); - } - - self.parse_use_tree_glob_or_nested()? - } else { - // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;` - prefix = self.parse_path(PathStyle::Mod)?; - - if self.eat(&token::ModSep) { - self.parse_use_tree_glob_or_nested()? - } else { - UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID) - } - }; - - Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) }) - } - - /// Parses `*` or `{...}`. - fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> { - Ok(if self.eat(&token::BinOp(token::Star)) { - UseTreeKind::Glob - } else { - UseTreeKind::Nested(self.parse_use_tree_list()?) - }) - } - - /// Parses a `UseTreeKind::Nested(list)`. - /// - /// ``` - /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`] - /// ``` - fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> { - self.parse_delim_comma_seq(token::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID))) - .map(|(r, _)| r) - } - - fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> { - if self.eat_keyword(kw::As) { - self.parse_ident_or_underscore().map(Some) - } else { - Ok(None) - } - } - - fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.token.kind { - token::Ident(name, false) if name == kw::Underscore => { - let span = self.token.span; - self.bump(); - Ok(Ident::new(name, span)) - } - _ => self.parse_ident(), - } - } - - /// Parses `extern crate` links. - /// - /// # Examples - /// - /// ``` - /// extern crate foo; - /// extern crate bar as foo; - /// ``` - fn parse_item_extern_crate( - &mut self, - lo: Span, - visibility: Visibility, - attrs: Vec<Attribute> - ) -> PResult<'a, P<Item>> { - // Accept `extern crate name-like-this` for better diagnostics - let orig_name = self.parse_crate_name_with_dashes()?; - let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? { - (rename, Some(orig_name.name)) - } else { - (orig_name, None) - }; - self.expect_semi()?; - - let span = lo.to(self.prev_span); - Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs)) - } - - fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, ast::Ident> { - let error_msg = "crate name using dashes are not valid in `extern crate` statements"; - let suggestion_msg = "if the original crate name uses dashes you need to use underscores \ - in the code"; - let mut ident = if self.token.is_keyword(kw::SelfLower) { - self.parse_path_segment_ident() - } else { - self.parse_ident() - }?; - let mut idents = vec![]; - let mut replacement = vec![]; - let mut fixed_crate_name = false; - // Accept `extern crate name-like-this` for better diagnostics. - let dash = token::BinOp(token::BinOpToken::Minus); - if self.token == dash { // Do not include `-` as part of the expected tokens list. - while self.eat(&dash) { - fixed_crate_name = true; - replacement.push((self.prev_span, "_".to_string())); - idents.push(self.parse_ident()?); - } - } - if fixed_crate_name { - let fixed_name_sp = ident.span.to(idents.last().unwrap().span); - let mut fixed_name = format!("{}", ident.name); - for part in idents { - fixed_name.push_str(&format!("_{}", part.name)); - } - ident = Ident::from_str_and_span(&fixed_name, fixed_name_sp); - - self.struct_span_err(fixed_name_sp, error_msg) - .span_label(fixed_name_sp, "dash-separated idents are not valid") - .multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable) - .emit(); - } - Ok(ident) - } - - /// Parses `extern` for foreign ABIs modules. - /// - /// `extern` is expected to have been - /// consumed before calling this method. - /// - /// # Examples - /// - /// ```ignore (only-for-syntax-highlight) - /// extern "C" {} - /// extern {} - /// ``` - fn parse_item_foreign_mod( - &mut self, - lo: Span, - abi: Abi, - visibility: Visibility, - mut attrs: Vec<Attribute>, - extern_sp: Span, - ) -> PResult<'a, P<Item>> { - self.expect(&token::OpenDelim(token::Brace))?; - - attrs.extend(self.parse_inner_attributes()?); - - let mut foreign_items = vec![]; - while !self.eat(&token::CloseDelim(token::Brace)) { - foreign_items.push(self.parse_foreign_item(extern_sp)?); - } - - let prev_span = self.prev_span; - let m = ast::ForeignMod { - abi, - items: foreign_items - }; - let invalid = Ident::invalid(); - Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) - } - - /// Parses a foreign item. - pub fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> { - maybe_whole!(self, NtForeignItem, |ni| ni); - - let attrs = self.parse_outer_attributes()?; - let lo = self.token.span; - let visibility = self.parse_visibility(false)?; - - // FOREIGN STATIC ITEM - // Treat `const` as `static` for error recovery, but don't add it to expected tokens. - if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) { - if self.token.is_keyword(kw::Const) { - let mut err = self - .struct_span_err(self.token.span, "extern items cannot be `const`"); - - - // The user wrote 'const fn' - if self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe]) { - err.emit(); - // Consume `const` - self.bump(); - // Consume `unsafe` if present, since `extern` blocks - // don't allow it. This will leave behind a plain 'fn' - self.eat_keyword(kw::Unsafe); - // Treat 'const fn` as a plain `fn` for error recovery purposes. - // We've already emitted an error, so compilation is guaranteed - // to fail - return Ok(self.parse_item_foreign_fn(visibility, lo, attrs, extern_sp)?); - } - err.span_suggestion( - self.token.span, - "try using a static value", - "static".to_owned(), - Applicability::MachineApplicable - ); - err.emit(); - } - self.bump(); // `static` or `const` - return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?); - } - // FOREIGN FUNCTION ITEM - if self.check_keyword(kw::Fn) { - return Ok(self.parse_item_foreign_fn(visibility, lo, attrs, extern_sp)?); - } - // FOREIGN TYPE ITEM - if self.check_keyword(kw::Type) { - return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?); - } - - match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? { - Some(mac) => { - Ok( - ForeignItem { - ident: Ident::invalid(), - span: lo.to(self.prev_span), - id: DUMMY_NODE_ID, - attrs, - vis: visibility, - kind: ForeignItemKind::Macro(mac), - } - ) - } - None => { - if !attrs.is_empty() { - self.expected_item_err(&attrs)?; - } - - self.unexpected() - } - } - } - - /// Parses a static item from a foreign module. - /// Assumes that the `static` keyword is already parsed. - fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) - -> PResult<'a, ForeignItem> { - let mutbl = self.parse_mutability(); - let ident = self.parse_ident()?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - let hi = self.token.span; - self.expect_semi()?; - Ok(ForeignItem { - ident, - attrs, - kind: ForeignItemKind::Static(ty, mutbl), - id: DUMMY_NODE_ID, - span: lo.to(hi), - vis, - }) - } - - /// Parses a type from a foreign module. - fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) - -> PResult<'a, ForeignItem> { - self.expect_keyword(kw::Type)?; - - let ident = self.parse_ident()?; - let hi = self.token.span; - self.expect_semi()?; - Ok(ast::ForeignItem { - ident, - attrs, - kind: ForeignItemKind::Ty, - id: DUMMY_NODE_ID, - span: lo.to(hi), - vis - }) - } - - fn is_static_global(&mut self) -> bool { - if self.check_keyword(kw::Static) { - // Check if this could be a closure. - !self.look_ahead(1, |token| { - if token.is_keyword(kw::Move) { - return true; - } - match token.kind { - token::BinOp(token::Or) | token::OrOr => true, - _ => false, - } - }) - } else { - false - } - } - - /// Parse `["const" | ("static" "mut"?)] $ident ":" $ty = $expr` with - /// `["const" | ("static" "mut"?)]` already parsed and stored in `m`. - /// - /// When `m` is `"const"`, `$ident` may also be `"_"`. - fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<'a, ItemInfo> { - let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?; - - // Parse the type of a `const` or `static mut?` item. - // That is, the `":" $ty` fragment. - let ty = if self.token == token::Eq { - self.recover_missing_const_type(id, m) - } else { - // Not `=` so expect `":"" $ty` as usual. - self.expect(&token::Colon)?; - self.parse_ty()? - }; - - self.expect(&token::Eq)?; - let e = self.parse_expr()?; - self.expect_semi()?; - let item = match m { - Some(m) => ItemKind::Static(ty, m, e), - None => ItemKind::Const(ty, e), - }; - Ok((id, item, None)) - } - - /// We were supposed to parse `:` but instead, we're already at `=`. - /// This means that the type is missing. - fn recover_missing_const_type(&mut self, id: Ident, m: Option<Mutability>) -> P<Ty> { - // Construct the error and stash it away with the hope - // that typeck will later enrich the error with a type. - let kind = match m { - Some(Mutability::Mutable) => "static mut", - Some(Mutability::Immutable) => "static", - None => "const", - }; - let mut err = self.struct_span_err(id.span, &format!("missing type for `{}` item", kind)); - err.span_suggestion( - id.span, - "provide a type for the item", - format!("{}: <type>", id), - Applicability::HasPlaceholders, - ); - err.stash(id.span, StashKey::ItemNoType); - - // The user intended that the type be inferred, - // so treat this as if the user wrote e.g. `const A: _ = expr;`. - P(Ty { - kind: TyKind::Infer, - span: id.span, - id: ast::DUMMY_NODE_ID, - }) - } - - /// Parses `type Foo = Bar;` or returns `None` - /// without modifying the parser state. - fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, Generics)>> { - // This parses the grammar: - // Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";" - if self.eat_keyword(kw::Type) { - Some(self.parse_type_alias()) - } else { - None - } - } - - /// Parses a type alias or opaque type. - fn parse_type_alias(&mut self) -> PResult<'a, (Ident, AliasKind, Generics)> { - let ident = self.parse_ident()?; - let mut tps = self.parse_generics()?; - tps.where_clause = self.parse_where_clause()?; - self.expect(&token::Eq)?; - let alias = if self.check_keyword(kw::Impl) { - self.bump(); - let bounds = self.parse_generic_bounds(Some(self.prev_span))?; - AliasKind::OpaqueTy(bounds) - } else { - let ty = self.parse_ty()?; - AliasKind::Weak(ty) - }; - self.expect_semi()?; - Ok((ident, alias, tps)) - } - - /// Parses an enum declaration. - fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { - let id = self.parse_ident()?; - let mut generics = self.parse_generics()?; - generics.where_clause = self.parse_where_clause()?; - self.expect(&token::OpenDelim(token::Brace))?; - - let enum_definition = self.parse_enum_def(&generics).map_err(|e| { - self.recover_stmt(); - self.eat(&token::CloseDelim(token::Brace)); - e - })?; - Ok((id, ItemKind::Enum(enum_definition, generics), None)) - } - - /// Parses the part of an enum declaration following the `{`. - fn parse_enum_def(&mut self, _generics: &Generics) -> PResult<'a, EnumDef> { - let mut variants = Vec::new(); - while self.token != token::CloseDelim(token::Brace) { - let variant_attrs = self.parse_outer_attributes()?; - let vlo = self.token.span; - - self.eat_bad_pub(); - let ident = self.parse_ident()?; - - let struct_def = if self.check(&token::OpenDelim(token::Brace)) { - // Parse a struct variant. - let (fields, recovered) = self.parse_record_struct_body()?; - VariantData::Struct(fields, recovered) - } else if self.check(&token::OpenDelim(token::Paren)) { - VariantData::Tuple( - self.parse_tuple_struct_body()?, - DUMMY_NODE_ID, - ) - } else { - VariantData::Unit(DUMMY_NODE_ID) - }; - - let disr_expr = if self.eat(&token::Eq) { - Some(AnonConst { - id: DUMMY_NODE_ID, - value: self.parse_expr()?, - }) - } else { - None - }; - - let vr = ast::Variant { - ident, - id: DUMMY_NODE_ID, - attrs: variant_attrs, - data: struct_def, - disr_expr, - span: vlo.to(self.prev_span), - is_placeholder: false, - }; - variants.push(vr); - - if !self.eat(&token::Comma) { - if self.token.is_ident() && !self.token.is_reserved_ident() { - let sp = self.sess.source_map().next_point(self.prev_span); - self.struct_span_err(sp, "missing comma") - .span_suggestion_short( - sp, - "missing comma", - ",".to_owned(), - Applicability::MaybeIncorrect, - ) - .emit(); - } else { - break; - } - } - } - self.expect(&token::CloseDelim(token::Brace))?; - - Ok(ast::EnumDef { variants }) - } - - /// Parses `struct Foo { ... }`. - fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; - - let mut generics = self.parse_generics()?; - - // There is a special case worth noting here, as reported in issue #17904. - // If we are parsing a tuple struct it is the case that the where clause - // should follow the field list. Like so: - // - // struct Foo<T>(T) where T: Copy; - // - // If we are parsing a normal record-style struct it is the case - // that the where clause comes before the body, and after the generics. - // So if we look ahead and see a brace or a where-clause we begin - // parsing a record style struct. - // - // Otherwise if we look ahead and see a paren we parse a tuple-style - // struct. - - let vdata = if self.token.is_keyword(kw::Where) { - generics.where_clause = self.parse_where_clause()?; - if self.eat(&token::Semi) { - // If we see a: `struct Foo<T> where T: Copy;` style decl. - VariantData::Unit(DUMMY_NODE_ID) - } else { - // If we see: `struct Foo<T> where T: Copy { ... }` - let (fields, recovered) = self.parse_record_struct_body()?; - VariantData::Struct(fields, recovered) - } - // No `where` so: `struct Foo<T>;` - } else if self.eat(&token::Semi) { - VariantData::Unit(DUMMY_NODE_ID) - // Record-style struct definition - } else if self.token == token::OpenDelim(token::Brace) { - let (fields, recovered) = self.parse_record_struct_body()?; - VariantData::Struct(fields, recovered) - // Tuple-style struct definition with optional where-clause. - } else if self.token == token::OpenDelim(token::Paren) { - let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); - generics.where_clause = self.parse_where_clause()?; - self.expect_semi()?; - body - } else { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!( - "expected `where`, `{{`, `(`, or `;` after struct name, found {}", - token_str - )); - err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name"); - return Err(err); - }; - - Ok((class_name, ItemKind::Struct(vdata, generics), None)) - } - - /// Parses `union Foo { ... }`. - fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> { - let class_name = self.parse_ident()?; - - let mut generics = self.parse_generics()?; - - let vdata = if self.token.is_keyword(kw::Where) { - generics.where_clause = self.parse_where_clause()?; - let (fields, recovered) = self.parse_record_struct_body()?; - VariantData::Struct(fields, recovered) - } else if self.token == token::OpenDelim(token::Brace) { - let (fields, recovered) = self.parse_record_struct_body()?; - VariantData::Struct(fields, recovered) - } else { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!( - "expected `where` or `{{` after union name, found {}", token_str)); - err.span_label(self.token.span, "expected `where` or `{` after union name"); - return Err(err); - }; - - Ok((class_name, ItemKind::Union(vdata, generics), None)) - } - - pub(super) fn is_union_item(&self) -> bool { - self.token.is_keyword(kw::Union) && - self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) - } - - fn parse_record_struct_body( - &mut self, - ) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> { - let mut fields = Vec::new(); - let mut recovered = false; - if self.eat(&token::OpenDelim(token::Brace)) { - while self.token != token::CloseDelim(token::Brace) { - let field = self.parse_struct_decl_field().map_err(|e| { - self.consume_block(token::Brace, ConsumeClosingDelim::No); - recovered = true; - e - }); - match field { - Ok(field) => fields.push(field), - Err(mut err) => { - err.emit(); - break; - } - } - } - self.eat(&token::CloseDelim(token::Brace)); - } else { - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!( - "expected `where`, or `{{` after struct name, found {}", token_str)); - err.span_label(self.token.span, "expected `where`, or `{` after struct name"); - return Err(err); - } - - Ok((fields, recovered)) - } - - fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<StructField>> { - // This is the case where we find `struct Foo<T>(T) where T: Copy;` - // Unit like structs are handled in parse_item_struct function - self.parse_paren_comma_seq(|p| { - let attrs = p.parse_outer_attributes()?; - let lo = p.token.span; - let vis = p.parse_visibility(true)?; - let ty = p.parse_ty()?; - Ok(StructField { - span: lo.to(ty.span), - vis, - ident: None, - id: DUMMY_NODE_ID, - ty, - attrs, - is_placeholder: false, - }) - }).map(|(r, _)| r) - } - - /// Parses an element of a struct declaration. - fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { - let attrs = self.parse_outer_attributes()?; - let lo = self.token.span; - let vis = self.parse_visibility(false)?; - self.parse_single_struct_field(lo, vis, attrs) - } - - /// Parses a structure field declaration. - fn parse_single_struct_field(&mut self, - lo: Span, - vis: Visibility, - attrs: Vec<Attribute> ) - -> PResult<'a, StructField> { - let mut seen_comma: bool = false; - let a_var = self.parse_name_and_ty(lo, vis, attrs)?; - if self.token == token::Comma { - seen_comma = true; - } - match self.token.kind { - token::Comma => { - self.bump(); - } - token::CloseDelim(token::Brace) => {} - token::DocComment(_) => { - let previous_span = self.prev_span; - let mut err = self.span_fatal_err(self.token.span, Error::UselessDocComment); - self.bump(); // consume the doc comment - let comma_after_doc_seen = self.eat(&token::Comma); - // `seen_comma` is always false, because we are inside doc block - // condition is here to make code more readable - if seen_comma == false && comma_after_doc_seen == true { - seen_comma = true; - } - if comma_after_doc_seen || self.token == token::CloseDelim(token::Brace) { - err.emit(); - } else { - if seen_comma == false { - let sp = self.sess.source_map().next_point(previous_span); - err.span_suggestion( - sp, - "missing comma here", - ",".into(), - Applicability::MachineApplicable - ); - } - return Err(err); - } - } - _ => { - let sp = self.sess.source_map().next_point(self.prev_span); - let mut err = self.struct_span_err(sp, &format!("expected `,`, or `}}`, found {}", - self.this_token_descr())); - if self.token.is_ident() { - // This is likely another field; emit the diagnostic and keep going - err.span_suggestion( - sp, - "try adding a comma", - ",".into(), - Applicability::MachineApplicable, - ); - err.emit(); - } else { - return Err(err) - } - } - } - Ok(a_var) - } - - /// Parses a structure field. - fn parse_name_and_ty( - &mut self, - lo: Span, - vis: Visibility, - attrs: Vec<Attribute> - ) -> PResult<'a, StructField> { - let name = self.parse_ident()?; - self.expect(&token::Colon)?; - let ty = self.parse_ty()?; - Ok(StructField { - span: lo.to(self.prev_span), - ident: Some(name), - vis, - id: DUMMY_NODE_ID, - ty, - attrs, - is_placeholder: false, - }) - } - - pub(super) fn eat_macro_def( - &mut self, - attrs: &[Attribute], - vis: &Visibility, - lo: Span - ) -> PResult<'a, Option<P<Item>>> { - let token_lo = self.token.span; - let (ident, def) = if self.eat_keyword(kw::Macro) { - let ident = self.parse_ident()?; - let tokens = if self.check(&token::OpenDelim(token::Brace)) { - match self.parse_token_tree() { - TokenTree::Delimited(_, _, tts) => tts, - _ => unreachable!(), - } - } else if self.check(&token::OpenDelim(token::Paren)) { - let args = self.parse_token_tree(); - let body = if self.check(&token::OpenDelim(token::Brace)) { - self.parse_token_tree() - } else { - self.unexpected()?; - unreachable!() - }; - TokenStream::new(vec![ - args.into(), - TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(), - body.into(), - ]) - } else { - self.unexpected()?; - unreachable!() - }; - - (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) - } else if self.check_keyword(sym::macro_rules) && - self.look_ahead(1, |t| *t == token::Not) && - self.look_ahead(2, |t| t.is_ident()) { - let prev_span = self.prev_span; - self.complain_if_pub_macro(&vis.node, prev_span); - self.bump(); - self.bump(); - - let ident = self.parse_ident()?; - let (delim, tokens) = self.expect_delimited_token_tree()?; - if delim != MacDelimiter::Brace && !self.eat(&token::Semi) { - self.report_invalid_macro_expansion_item(); - } - - (ident, ast::MacroDef { tokens, legacy: true }) - } else { - return Ok(None); - }; - - let span = lo.to(self.prev_span); - - if !def.legacy { - self.sess.gated_spans.gate(sym::decl_macro, span); - } - - Ok(Some(self.mk_item(span, ident, ItemKind::MacroDef(def), vis.clone(), attrs.to_vec()))) - } - - fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) { - match *vis { - VisibilityKind::Inherited => {} - _ => { - let mut err = if self.token.is_keyword(sym::macro_rules) { - let mut err = self.diagnostic() - .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); - err.span_suggestion( - sp, - "try exporting the macro", - "#[macro_export]".to_owned(), - Applicability::MaybeIncorrect // speculative - ); - err - } else { - let mut err = self.diagnostic() - .struct_span_err(sp, "can't qualify macro invocation with `pub`"); - err.help("try adjusting the macro to put `pub` inside the invocation"); - err - }; - err.emit(); - } - } - } - - fn report_invalid_macro_expansion_item(&self) { - self.struct_span_err( - self.prev_span, - "macros that expand to items must be delimited with braces or followed by a semicolon", - ).multipart_suggestion( - "change the delimiters to curly braces", - vec![ - (self.prev_span.with_hi(self.prev_span.lo() + BytePos(1)), String::from(" {")), - (self.prev_span.with_lo(self.prev_span.hi() - BytePos(1)), '}'.to_string()), - ], - Applicability::MaybeIncorrect, - ).span_suggestion( - self.sess.source_map().next_point(self.prev_span), - "add a semicolon", - ';'.to_string(), - Applicability::MaybeIncorrect, - ).emit(); - } - - fn mk_item(&self, span: Span, ident: Ident, kind: ItemKind, vis: Visibility, - attrs: Vec<Attribute>) -> P<Item> { - P(Item { - ident, - attrs, - id: DUMMY_NODE_ID, - kind, - vis, - span, - tokens: None, - }) - } -} - -/// The parsing configuration used to parse a parameter list (see `parse_fn_params`). -pub(super) struct ParamCfg { - /// Is `self` is allowed as the first parameter? - pub is_self_allowed: bool, - /// Is `...` allowed as the tail of the parameter list? - pub allow_c_variadic: bool, - /// `is_name_required` decides if, per-parameter, - /// the parameter must have a pattern or just a type. - pub is_name_required: fn(&token::Token) -> bool, -} - -/// Parsing of functions and methods. -impl<'a> Parser<'a> { - /// Parses an item-position function declaration. - fn parse_item_fn( - &mut self, - lo: Span, - vis: Visibility, - attrs: Vec<Attribute>, - header: FnHeader, - ) -> PResult<'a, Option<P<Item>>> { - let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { - is_self_allowed: false, - allow_c_variadic: header.abi.symbol == sym::C && header.unsafety == Unsafety::Unsafe, - is_name_required: |_| true, - })?; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - let kind = ItemKind::Fn(FnSig { decl, header }, generics, body); - self.mk_item_with_info(attrs, lo, vis, (ident, kind, Some(inner_attrs))) - } - - /// Parses a function declaration from a foreign module. - fn parse_item_foreign_fn( - &mut self, - vis: ast::Visibility, - lo: Span, - attrs: Vec<Attribute>, - extern_sp: Span, - ) -> PResult<'a, ForeignItem> { - self.expect_keyword(kw::Fn)?; - let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { - is_self_allowed: false, - allow_c_variadic: true, - is_name_required: |_| true, - })?; - let span = lo.to(self.token.span); - self.parse_semi_or_incorrect_foreign_fn_body(&ident, extern_sp)?; - Ok(ast::ForeignItem { - ident, - attrs, - kind: ForeignItemKind::Fn(decl, generics), - id: DUMMY_NODE_ID, - span, - vis, - }) - } - - /// Parses a method or a macro invocation in a trait impl. - fn parse_impl_method( - &mut self, - at_end: &mut bool, - ) -> PResult<'a, (Ident, Vec<Attribute>, Generics, ImplItemKind)> { - let (ident, sig, generics) = self.parse_method_sig(|_| true)?; - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - Ok((ident, inner_attrs, generics, ast::ImplItemKind::Method(sig, body))) - } - - fn parse_trait_item_method( - &mut self, - at_end: &mut bool, - attrs: &mut Vec<Attribute>, - ) -> PResult<'a, (Ident, TraitItemKind, Generics)> { - // This is somewhat dubious; We don't want to allow - // argument names to be left off if there is a definition... - // - // We don't allow argument names to be left off in edition 2018. - let (ident, sig, generics) = self.parse_method_sig(|t| t.span.rust_2018())?; - let body = self.parse_trait_method_body(at_end, attrs)?; - Ok((ident, TraitItemKind::Method(sig, body), generics)) - } - - /// Parse the "body" of a method in a trait item definition. - /// This can either be `;` when there's no body, - /// or e.g. a block when the method is a provided one. - fn parse_trait_method_body( - &mut self, - at_end: &mut bool, - attrs: &mut Vec<Attribute>, - ) -> PResult<'a, Option<P<Block>>> { - Ok(match self.token.kind { - token::Semi => { - debug!("parse_trait_method_body(): parsing required method"); - self.bump(); - *at_end = true; - None - } - token::OpenDelim(token::Brace) => { - debug!("parse_trait_method_body(): parsing provided method"); - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } - token::Interpolated(ref nt) => { - match **nt { - token::NtBlock(..) => { - *at_end = true; - let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } - _ => return self.expected_semi_or_open_brace(), - } - } - _ => return self.expected_semi_or_open_brace(), - }) - } - - /// Parse the "signature", including the identifier, parameters, and generics - /// of a method. The body is not parsed as that differs between `trait`s and `impl`s. - fn parse_method_sig( - &mut self, - is_name_required: fn(&token::Token) -> bool, - ) -> PResult<'a, (Ident, FnSig, Generics)> { - let header = self.parse_fn_front_matter()?; - let (ident, decl, generics) = self.parse_fn_sig(ParamCfg { - is_self_allowed: true, - allow_c_variadic: false, - is_name_required, - })?; - Ok((ident, FnSig { header, decl }, generics)) - } - - /// Parses all the "front matter" for a `fn` declaration, up to - /// and including the `fn` keyword: - /// - /// - `const fn` - /// - `unsafe fn` - /// - `const unsafe fn` - /// - `extern fn` - /// - etc. - fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> { - let is_const_fn = self.eat_keyword(kw::Const); - let const_span = self.prev_span; - let asyncness = self.parse_asyncness(); - if let IsAsync::Async { .. } = asyncness { - self.ban_async_in_2015(self.prev_span); - } - let asyncness = respan(self.prev_span, asyncness); - let unsafety = self.parse_unsafety(); - let (constness, unsafety, abi) = if is_const_fn { - (respan(const_span, Constness::Const), unsafety, Abi::default()) - } else { - let abi = self.parse_extern_abi()?; - (respan(self.prev_span, Constness::NotConst), unsafety, abi) - }; - if !self.eat_keyword(kw::Fn) { - // It is possible for `expect_one_of` to recover given the contents of - // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't - // account for this. - if !self.expect_one_of(&[], &[])? { unreachable!() } - } - Ok(FnHeader { constness, unsafety, asyncness, abi }) - } - - /// Parse the "signature", including the identifier, parameters, and generics of a function. - fn parse_fn_sig(&mut self, cfg: ParamCfg) -> PResult<'a, (Ident, P<FnDecl>, Generics)> { - let ident = self.parse_ident()?; - let mut generics = self.parse_generics()?; - let decl = self.parse_fn_decl(cfg, true)?; - generics.where_clause = self.parse_where_clause()?; - Ok((ident, decl, generics)) - } - - /// Parses the parameter list and result type of a function declaration. - pub(super) fn parse_fn_decl( - &mut self, - cfg: ParamCfg, - ret_allow_plus: bool, - ) -> PResult<'a, P<FnDecl>> { - Ok(P(FnDecl { - inputs: self.parse_fn_params(cfg)?, - output: self.parse_ret_ty(ret_allow_plus)?, - })) - } - - /// Parses the parameter list of a function, including the `(` and `)` delimiters. - fn parse_fn_params(&mut self, mut cfg: ParamCfg) -> PResult<'a, Vec<Param>> { - let sp = self.token.span; - let is_trait_item = cfg.is_self_allowed; - let mut c_variadic = false; - // Parse the arguments, starting out with `self` being possibly allowed... - let (params, _) = self.parse_paren_comma_seq(|p| { - let param = p.parse_param_general(&cfg, is_trait_item); - // ...now that we've parsed the first argument, `self` is no longer allowed. - cfg.is_self_allowed = false; - - match param { - Ok(param) => Ok( - if let TyKind::CVarArgs = param.ty.kind { - c_variadic = true; - if p.token != token::CloseDelim(token::Paren) { - p.span_err( - p.token.span, - "`...` must be the last argument of a C-variadic function", - ); - // FIXME(eddyb) this should probably still push `CVarArgs`. - // Maybe AST validation/HIR lowering should emit the above error? - None - } else { - Some(param) - } - } else { - Some(param) - } - ), - Err(mut e) => { - e.emit(); - let lo = p.prev_span; - // Skip every token until next possible arg or end. - p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); - // Create a placeholder argument for proper arg count (issue #34264). - let span = lo.to(p.prev_span); - Ok(Some(dummy_arg(Ident::new(kw::Invalid, span)))) - } - } - })?; - - let mut params: Vec<_> = params.into_iter().filter_map(|x| x).collect(); - - // Replace duplicated recovered params with `_` pattern to avoid unecessary errors. - self.deduplicate_recovered_params_names(&mut params); - - if c_variadic && params.len() <= 1 { - self.span_err( - sp, - "C-variadic function must be declared with at least one named argument", - ); - } - - Ok(params) - } - - /// Skips unexpected attributes and doc comments in this position and emits an appropriate - /// error. - /// This version of parse param doesn't necessarily require identifier names. - fn parse_param_general(&mut self, cfg: &ParamCfg, is_trait_item: bool) -> PResult<'a, Param> { - let lo = self.token.span; - let attrs = self.parse_outer_attributes()?; - - // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here. - if let Some(mut param) = self.parse_self_param()? { - param.attrs = attrs.into(); - return if cfg.is_self_allowed { - Ok(param) - } else { - self.recover_bad_self_param(param, is_trait_item) - }; - } - - let is_name_required = match self.token.kind { - token::DotDotDot => false, - _ => (cfg.is_name_required)(&self.token), - }; - let (pat, ty) = if is_name_required || self.is_named_param() { - debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); - - let pat = self.parse_fn_param_pat()?; - if let Err(mut err) = self.expect(&token::Colon) { - return if let Some(ident) = self.parameter_without_type( - &mut err, - pat, - is_name_required, - cfg.is_self_allowed, - is_trait_item, - ) { - err.emit(); - Ok(dummy_arg(ident)) - } else { - Err(err) - }; - } - - self.eat_incorrect_doc_comment_for_param_type(); - (pat, self.parse_ty_common(true, true, cfg.allow_c_variadic)?) - } else { - debug!("parse_param_general ident_to_pat"); - let parser_snapshot_before_ty = self.clone(); - self.eat_incorrect_doc_comment_for_param_type(); - let mut ty = self.parse_ty_common(true, true, cfg.allow_c_variadic); - if ty.is_ok() && self.token != token::Comma && - self.token != token::CloseDelim(token::Paren) { - // This wasn't actually a type, but a pattern looking like a type, - // so we are going to rollback and re-parse for recovery. - ty = self.unexpected(); - } - match ty { - Ok(ty) => { - let ident = Ident::new(kw::Invalid, self.prev_span); - let bm = BindingMode::ByValue(Mutability::Immutable); - let pat = self.mk_pat_ident(ty.span, bm, ident); - (pat, ty) - } - // If this is a C-variadic argument and we hit an error, return the error. - Err(err) if self.token == token::DotDotDot => return Err(err), - // Recover from attempting to parse the argument as a type without pattern. - Err(mut err) => { - err.cancel(); - mem::replace(self, parser_snapshot_before_ty); - self.recover_arg_parse()? - } - } - }; - - let span = lo.to(self.token.span); - - Ok(Param { - attrs: attrs.into(), - id: ast::DUMMY_NODE_ID, - is_placeholder: false, - pat, - span, - ty, - }) - } - - /// Returns the parsed optional self parameter and whether a self shortcut was used. - /// - /// See `parse_self_param_with_attrs` to collect attributes. - fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { - // Extract an identifier *after* having confirmed that the token is one. - let expect_self_ident = |this: &mut Self| { - match this.token.kind { - // Preserve hygienic context. - token::Ident(name, _) => { - let span = this.token.span; - this.bump(); - Ident::new(name, span) - } - _ => unreachable!(), - } - }; - // Is `self` `n` tokens ahead? - let is_isolated_self = |this: &Self, n| { - this.is_keyword_ahead(n, &[kw::SelfLower]) - && this.look_ahead(n + 1, |t| t != &token::ModSep) - }; - // Is `mut self` `n` tokens ahead? - let is_isolated_mut_self = |this: &Self, n| { - this.is_keyword_ahead(n, &[kw::Mut]) - && is_isolated_self(this, n + 1) - }; - // Parse `self` or `self: TYPE`. We already know the current token is `self`. - let parse_self_possibly_typed = |this: &mut Self, m| { - let eself_ident = expect_self_ident(this); - let eself_hi = this.prev_span; - let eself = if this.eat(&token::Colon) { - SelfKind::Explicit(this.parse_ty()?, m) - } else { - SelfKind::Value(m) - }; - Ok((eself, eself_ident, eself_hi)) - }; - // Recover for the grammar `*self`, `*const self`, and `*mut self`. - let recover_self_ptr = |this: &mut Self| { - let msg = "cannot pass `self` by raw pointer"; - let span = this.token.span; - this.struct_span_err(span, msg) - .span_label(span, msg) - .emit(); - - Ok((SelfKind::Value(Mutability::Immutable), expect_self_ident(this), this.prev_span)) - }; - - // Parse optional `self` parameter of a method. - // Only a limited set of initial token sequences is considered `self` parameters; anything - // else is parsed as a normal function parameter list, so some lookahead is required. - let eself_lo = self.token.span; - let (eself, eself_ident, eself_hi) = match self.token.kind { - token::BinOp(token::And) => { - let eself = if is_isolated_self(self, 1) { - // `&self` - self.bump(); - SelfKind::Region(None, Mutability::Immutable) - } else if is_isolated_mut_self(self, 1) { - // `&mut self` - self.bump(); - self.bump(); - SelfKind::Region(None, Mutability::Mutable) - } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) { - // `&'lt self` - self.bump(); - let lt = self.expect_lifetime(); - SelfKind::Region(Some(lt), Mutability::Immutable) - } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) { - // `&'lt mut self` - self.bump(); - let lt = self.expect_lifetime(); - self.bump(); - SelfKind::Region(Some(lt), Mutability::Mutable) - } else { - // `¬_self` - return Ok(None); - }; - (eself, expect_self_ident(self), self.prev_span) - } - // `*self` - token::BinOp(token::Star) if is_isolated_self(self, 1) => { - self.bump(); - recover_self_ptr(self)? - } - // `*mut self` and `*const self` - token::BinOp(token::Star) if - self.look_ahead(1, |t| t.is_mutability()) - && is_isolated_self(self, 2) => - { - self.bump(); - self.bump(); - recover_self_ptr(self)? - } - // `self` and `self: TYPE` - token::Ident(..) if is_isolated_self(self, 0) => { - parse_self_possibly_typed(self, Mutability::Immutable)? - } - // `mut self` and `mut self: TYPE` - token::Ident(..) if is_isolated_mut_self(self, 0) => { - self.bump(); - parse_self_possibly_typed(self, Mutability::Mutable)? - } - _ => return Ok(None), - }; - - let eself = source_map::respan(eself_lo.to(eself_hi), eself); - Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident))) - } - - fn is_named_param(&self) -> bool { - let offset = match self.token.kind { - token::Interpolated(ref nt) => match **nt { - token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), - _ => 0, - } - token::BinOp(token::And) | token::AndAnd => 1, - _ if self.token.is_keyword(kw::Mut) => 1, - _ => 0, - }; - - self.look_ahead(offset, |t| t.is_ident()) && - self.look_ahead(offset + 1, |t| t == &token::Colon) - } - - fn recover_first_param(&mut self) -> &'static str { - match self.parse_outer_attributes() - .and_then(|_| self.parse_self_param()) - .map_err(|mut e| e.cancel()) - { - Ok(Some(_)) => "method", - _ => "function", - } - } -} diff --git a/src/libsyntax/parse/parser/mod.rs b/src/libsyntax/parse/parser/mod.rs deleted file mode 100644 index 455f4172f5f..00000000000 --- a/src/libsyntax/parse/parser/mod.rs +++ /dev/null @@ -1,1391 +0,0 @@ -pub mod attr; -mod expr; -mod pat; -mod item; -mod module; -mod ty; -mod path; -pub use path::PathStyle; -mod stmt; -mod generics; -mod diagnostics; -use diagnostics::Error; - -use crate::ast::{ - self, Abi, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident, - IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety, -}; -use crate::parse::{Directory, DirectoryOwnership}; -use crate::parse::lexer::UnmatchedBrace; -use crate::util::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::token::{self, Token, TokenKind, DelimToken}; -use crate::print::pprust; -use crate::ptr::P; -use crate::sess::ParseSess; -use crate::source_map::respan; -use crate::symbol::{kw, sym, Symbol}; -use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; -use crate::ThinVec; - -use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; -use syntax_pos::{Span, BytePos, DUMMY_SP, FileName}; -use log::debug; - -use std::borrow::Cow; -use std::{cmp, mem, slice}; -use std::path::PathBuf; - -bitflags::bitflags! { - struct Restrictions: u8 { - const STMT_EXPR = 1 << 0; - const NO_STRUCT_LITERAL = 1 << 1; - } -} - -#[derive(Clone, Copy, PartialEq, Debug)] -enum SemiColonMode { - Break, - Ignore, - Comma, -} - -#[derive(Clone, Copy, PartialEq, Debug)] -enum BlockMode { - Break, - Ignore, -} - -/// Like `maybe_whole_expr`, but for things other than expressions. -#[macro_export] -macro_rules! maybe_whole { - ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token.kind { - if let token::$constructor(x) = &**nt { - let $x = x.clone(); - $p.bump(); - return Ok($e); - } - } - }; -} - -/// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`. -#[macro_export] -macro_rules! maybe_recover_from_interpolated_ty_qpath { - ($self: expr, $allow_qpath_recovery: expr) => { - if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt) = &$self.token.kind { - if let token::NtTy(ty) = &**nt { - let ty = ty.clone(); - $self.bump(); - return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_span, ty); - } - } - } - } -} - -#[derive(Debug, Clone, Copy, PartialEq)] -enum PrevTokenKind { - DocComment, - Comma, - Plus, - Interpolated, - Eof, - Ident, - BitOr, - Other, -} - -// NOTE: `Ident`s are handled by `common.rs`. - -#[derive(Clone)] -pub struct Parser<'a> { - pub sess: &'a ParseSess, - /// The current normalized token. - /// "Normalized" means that some interpolated tokens - /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced - /// with non-interpolated identifier and lifetime tokens they refer to. - /// Perhaps the normalized / non-normalized setup can be simplified somehow. - pub token: Token, - /// The span of the current non-normalized token. - meta_var_span: Option<Span>, - /// The span of the previous non-normalized token. - pub prev_span: Span, - /// The kind of the previous normalized token (in simplified form). - prev_token_kind: PrevTokenKind, - restrictions: Restrictions, - /// Used to determine the path to externally loaded source files. - pub(super) directory: Directory<'a>, - /// `true` to parse sub-modules in other files. - pub(super) recurse_into_file_modules: bool, - /// Name of the root module this parser originated from. If `None`, then the - /// name is not known. This does not change while the parser is descending - /// into modules, and sub-parsers have new values for this name. - pub root_module_name: Option<String>, - expected_tokens: Vec<TokenType>, - token_cursor: TokenCursor, - desugar_doc_comments: bool, - /// `true` we should configure out of line modules as we parse. - cfg_mods: bool, - /// This field is used to keep track of how many left angle brackets we have seen. This is - /// required in order to detect extra leading left angle brackets (`<` characters) and error - /// appropriately. - /// - /// See the comments in the `parse_path_segment` function for more details. - unmatched_angle_bracket_count: u32, - max_angle_bracket_count: u32, - /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery - /// it gets removed from here. Every entry left at the end gets emitted as an independent - /// error. - pub(super) unclosed_delims: Vec<UnmatchedBrace>, - last_unexpected_token_span: Option<Span>, - pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, - /// If present, this `Parser` is not parsing Rust code but rather a macro call. - subparser_name: Option<&'static str>, -} - -impl<'a> Drop for Parser<'a> { - fn drop(&mut self) { - emit_unclosed_delims(&mut self.unclosed_delims, &self.sess); - } -} - -#[derive(Clone)] -struct TokenCursor { - frame: TokenCursorFrame, - stack: Vec<TokenCursorFrame>, -} - -#[derive(Clone)] -struct TokenCursorFrame { - delim: token::DelimToken, - span: DelimSpan, - open_delim: bool, - tree_cursor: tokenstream::Cursor, - close_delim: bool, - last_token: LastToken, -} - -/// This is used in `TokenCursorFrame` above to track tokens that are consumed -/// by the parser, and then that's transitively used to record the tokens that -/// each parse AST item is created with. -/// -/// Right now this has two states, either collecting tokens or not collecting -/// tokens. If we're collecting tokens we just save everything off into a local -/// `Vec`. This should eventually though likely save tokens from the original -/// token stream and just use slicing of token streams to avoid creation of a -/// whole new vector. -/// -/// The second state is where we're passively not recording tokens, but the last -/// token is still tracked for when we want to start recording tokens. This -/// "last token" means that when we start recording tokens we'll want to ensure -/// that this, the first token, is included in the output. -/// -/// You can find some more example usage of this in the `collect_tokens` method -/// on the parser. -#[derive(Clone)] -enum LastToken { - Collecting(Vec<TreeAndJoint>), - Was(Option<TreeAndJoint>), -} - -impl TokenCursorFrame { - fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { - TokenCursorFrame { - delim, - span, - open_delim: delim == token::NoDelim, - tree_cursor: tts.clone().into_trees(), - close_delim: delim == token::NoDelim, - last_token: LastToken::Was(None), - } - } -} - -impl TokenCursor { - fn next(&mut self) -> Token { - loop { - let tree = if !self.frame.open_delim { - self.frame.open_delim = true; - TokenTree::open_tt(self.frame.span, self.frame.delim) - } else if let Some(tree) = self.frame.tree_cursor.next() { - tree - } else if !self.frame.close_delim { - self.frame.close_delim = true; - TokenTree::close_tt(self.frame.span, self.frame.delim) - } else if let Some(frame) = self.stack.pop() { - self.frame = frame; - continue - } else { - return Token::new(token::Eof, DUMMY_SP); - }; - - match self.frame.last_token { - LastToken::Collecting(ref mut v) => v.push(tree.clone().into()), - LastToken::Was(ref mut t) => *t = Some(tree.clone().into()), - } - - match tree { - TokenTree::Token(token) => return token, - TokenTree::Delimited(sp, delim, tts) => { - let frame = TokenCursorFrame::new(sp, delim, &tts); - self.stack.push(mem::replace(&mut self.frame, frame)); - } - } - } - } - - fn next_desugared(&mut self) -> Token { - let (name, sp) = match self.next() { - Token { kind: token::DocComment(name), span } => (name, span), - tok => return tok, - }; - - let stripped = strip_doc_comment_decoration(&name.as_str()); - - // Searches for the occurrences of `"#*` and returns the minimum number of `#`s - // required to wrap the text. - let mut num_of_hashes = 0; - let mut count = 0; - for ch in stripped.chars() { - count = match ch { - '"' => 1, - '#' if count > 0 => count + 1, - _ => 0, - }; - num_of_hashes = cmp::max(num_of_hashes, count); - } - - let delim_span = DelimSpan::from_single(sp); - let body = TokenTree::Delimited( - delim_span, - token::Bracket, - [ - TokenTree::token(token::Ident(sym::doc, false), sp), - TokenTree::token(token::Eq, sp), - TokenTree::token(TokenKind::lit( - token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None - ), sp), - ] - .iter().cloned().collect::<TokenStream>().into(), - ); - - self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new( - delim_span, - token::NoDelim, - &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] - .iter().cloned().collect::<TokenStream>() - } else { - [TokenTree::token(token::Pound, sp), body] - .iter().cloned().collect::<TokenStream>() - }, - ))); - - self.next() - } -} - -#[derive(Clone, PartialEq)] -enum TokenType { - Token(TokenKind), - Keyword(Symbol), - Operator, - Lifetime, - Ident, - Path, - Type, - Const, -} - -impl TokenType { - fn to_string(&self) -> String { - match *self { - TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)), - TokenType::Keyword(kw) => format!("`{}`", kw), - TokenType::Operator => "an operator".to_string(), - TokenType::Lifetime => "lifetime".to_string(), - TokenType::Ident => "identifier".to_string(), - TokenType::Path => "path".to_string(), - TokenType::Type => "type".to_string(), - TokenType::Const => "const".to_string(), - } - } -} - -#[derive(Copy, Clone, Debug)] -enum TokenExpectType { - Expect, - NoExpect, -} - -/// A sequence separator. -struct SeqSep { - /// The separator token. - sep: Option<TokenKind>, - /// `true` if a trailing separator is allowed. - trailing_sep_allowed: bool, -} - -impl SeqSep { - fn trailing_allowed(t: TokenKind) -> SeqSep { - SeqSep { - sep: Some(t), - trailing_sep_allowed: true, - } - } - - fn none() -> SeqSep { - SeqSep { - sep: None, - trailing_sep_allowed: false, - } - } -} - -impl<'a> Parser<'a> { - pub fn new( - sess: &'a ParseSess, - tokens: TokenStream, - directory: Option<Directory<'a>>, - recurse_into_file_modules: bool, - desugar_doc_comments: bool, - subparser_name: Option<&'static str>, - ) -> Self { - let mut parser = Parser { - sess, - token: Token::dummy(), - prev_span: DUMMY_SP, - meta_var_span: None, - prev_token_kind: PrevTokenKind::Other, - restrictions: Restrictions::empty(), - recurse_into_file_modules, - directory: Directory { - path: Cow::from(PathBuf::new()), - ownership: DirectoryOwnership::Owned { relative: None } - }, - root_module_name: None, - expected_tokens: Vec::new(), - token_cursor: TokenCursor { - frame: TokenCursorFrame::new( - DelimSpan::dummy(), - token::NoDelim, - &tokens.into(), - ), - stack: Vec::new(), - }, - desugar_doc_comments, - cfg_mods: true, - unmatched_angle_bracket_count: 0, - max_angle_bracket_count: 0, - unclosed_delims: Vec::new(), - last_unexpected_token_span: None, - last_type_ascription: None, - subparser_name, - }; - - parser.token = parser.next_tok(); - - if let Some(directory) = directory { - parser.directory = directory; - } else if !parser.token.span.is_dummy() { - if let Some(FileName::Real(path)) = - &sess.source_map().lookup_char_pos(parser.token.span.lo()).file.unmapped_path { - if let Some(directory_path) = path.parent() { - parser.directory.path = Cow::from(directory_path.to_path_buf()); - } - } - } - - parser.process_potential_macro_variable(); - parser - } - - fn next_tok(&mut self) -> Token { - let mut next = if self.desugar_doc_comments { - self.token_cursor.next_desugared() - } else { - self.token_cursor.next() - }; - if next.span.is_dummy() { - // Tweak the location for better diagnostics, but keep syntactic context intact. - next.span = self.prev_span.with_ctxt(next.span.ctxt()); - } - next - } - - /// Converts the current token to a string using `self`'s reader. - pub fn this_token_to_string(&self) -> String { - pprust::token_to_string(&self.token) - } - - fn token_descr(&self) -> Option<&'static str> { - Some(match &self.token.kind { - _ if self.token.is_special_ident() => "reserved identifier", - _ if self.token.is_used_keyword() => "keyword", - _ if self.token.is_unused_keyword() => "reserved keyword", - token::DocComment(..) => "doc comment", - _ => return None, - }) - } - - pub(super) fn this_token_descr(&self) -> String { - if let Some(prefix) = self.token_descr() { - format!("{} `{}`", prefix, self.this_token_to_string()) - } else { - format!("`{}`", self.this_token_to_string()) - } - } - - crate fn unexpected<T>(&mut self) -> PResult<'a, T> { - match self.expect_one_of(&[], &[]) { - Err(e) => Err(e), - Ok(_) => unreachable!(), - } - } - - /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { - if self.expected_tokens.is_empty() { - if self.token == *t { - self.bump(); - Ok(false) - } else { - self.unexpected_try_recover(t) - } - } else { - self.expect_one_of(slice::from_ref(t), &[]) - } - } - - /// Expect next token to be edible or inedible token. If edible, - /// then consume it; if inedible, then return without consuming - /// anything. Signal a fatal error if next token is unexpected. - pub fn expect_one_of( - &mut self, - edible: &[TokenKind], - inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { - if edible.contains(&self.token.kind) { - self.bump(); - Ok(false) - } else if inedible.contains(&self.token.kind) { - // leave it in the input - Ok(false) - } else if self.last_unexpected_token_span == Some(self.token.span) { - FatalError.raise(); - } else { - self.expected_one_of_not_found(edible, inedible) - } - } - - fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { - self.parse_ident_common(true) - } - - fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.token.kind { - token::Ident(name, _) => { - if self.token.is_reserved_ident() { - let mut err = self.expected_ident_found(); - if recover { - err.emit(); - } else { - return Err(err); - } - } - let span = self.token.span; - self.bump(); - Ok(Ident::new(name, span)) - } - _ => { - Err(if self.prev_token_kind == PrevTokenKind::DocComment { - self.span_fatal_err(self.prev_span, Error::UselessDocComment) - } else { - self.expected_ident_found() - }) - } - } - } - - /// Checks if the next token is `tok`, and returns `true` if so. - /// - /// This method will automatically add `tok` to `expected_tokens` if `tok` is not - /// encountered. - fn check(&mut self, tok: &TokenKind) -> bool { - let is_present = self.token == *tok; - if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } - is_present - } - - /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &TokenKind) -> bool { - let is_present = self.check(tok); - if is_present { self.bump() } - is_present - } - - /// If the next token is the given keyword, returns `true` without eating it. - /// An expectation is also added for diagnostics purposes. - fn check_keyword(&mut self, kw: Symbol) -> bool { - self.expected_tokens.push(TokenType::Keyword(kw)); - self.token.is_keyword(kw) - } - - /// If the next token is the given keyword, eats it and returns `true`. - /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. - fn eat_keyword(&mut self, kw: Symbol) -> bool { - if self.check_keyword(kw) { - self.bump(); - true - } else { - false - } - } - - fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool { - if self.token.is_keyword(kw) { - self.bump(); - true - } else { - false - } - } - - /// If the given word is not a keyword, signals an error. - /// If the next token is not the given word, signals an error. - /// Otherwise, eats it. - fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> { - if !self.eat_keyword(kw) { - self.unexpected() - } else { - Ok(()) - } - } - - fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool { - if ok { - true - } else { - self.expected_tokens.push(typ); - false - } - } - - fn check_ident(&mut self) -> bool { - self.check_or_expected(self.token.is_ident(), TokenType::Ident) - } - - fn check_path(&mut self) -> bool { - self.check_or_expected(self.token.is_path_start(), TokenType::Path) - } - - fn check_type(&mut self) -> bool { - self.check_or_expected(self.token.can_begin_type(), TokenType::Type) - } - - fn check_const_arg(&mut self) -> bool { - self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) - } - - /// Checks to see if the next token is either `+` or `+=`. - /// Otherwise returns `false`. - fn check_plus(&mut self) -> bool { - self.check_or_expected( - self.token.is_like_plus(), - TokenType::Token(token::BinOp(token::Plus)), - ) - } - - /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` - /// and continues. If a `+` is not seen, returns `false`. - /// - /// This is used when token-splitting `+=` into `+`. - /// See issue #47856 for an example of when this may occur. - fn eat_plus(&mut self) -> bool { - self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - match self.token.kind { - token::BinOp(token::Plus) => { - self.bump(); - true - } - token::BinOpEq(token::Plus) => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - self.bump_with(token::Eq, span); - true - } - _ => false, - } - } - - /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single - /// `&` and continues. If an `&` is not seen, signals an error. - fn expect_and(&mut self) -> PResult<'a, ()> { - self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); - match self.token.kind { - token::BinOp(token::And) => { - self.bump(); - Ok(()) - } - token::AndAnd => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - Ok(self.bump_with(token::BinOp(token::And), span)) - } - _ => self.unexpected() - } - } - - /// Expects and consumes an `|`. If `||` is seen, replaces it with a single - /// `|` and continues. If an `|` is not seen, signals an error. - fn expect_or(&mut self) -> PResult<'a, ()> { - self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); - match self.token.kind { - token::BinOp(token::Or) => { - self.bump(); - Ok(()) - } - token::OrOr => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - Ok(self.bump_with(token::BinOp(token::Or), span)) - } - _ => self.unexpected() - } - } - - /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single - /// `<` and continue. If `<-` is seen, replaces it with a single `<` - /// and continue. If a `<` is not seen, returns false. - /// - /// This is meant to be used when parsing generics on a path to get the - /// starting token. - fn eat_lt(&mut self) -> bool { - self.expected_tokens.push(TokenType::Token(token::Lt)); - let ate = match self.token.kind { - token::Lt => { - self.bump(); - true - } - token::BinOp(token::Shl) => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - self.bump_with(token::Lt, span); - true - } - token::LArrow => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - self.bump_with(token::BinOp(token::Minus), span); - true - } - _ => false, - }; - - if ate { - // See doc comment for `unmatched_angle_bracket_count`. - self.unmatched_angle_bracket_count += 1; - self.max_angle_bracket_count += 1; - debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); - } - - ate - } - - fn expect_lt(&mut self) -> PResult<'a, ()> { - if !self.eat_lt() { - self.unexpected() - } else { - Ok(()) - } - } - - /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it - /// with a single `>` and continues. If a `>` is not seen, signals an error. - fn expect_gt(&mut self) -> PResult<'a, ()> { - self.expected_tokens.push(TokenType::Token(token::Gt)); - let ate = match self.token.kind { - token::Gt => { - self.bump(); - Some(()) - } - token::BinOp(token::Shr) => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - Some(self.bump_with(token::Gt, span)) - } - token::BinOpEq(token::Shr) => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - Some(self.bump_with(token::Ge, span)) - } - token::Ge => { - let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); - Some(self.bump_with(token::Eq, span)) - } - _ => None, - }; - - match ate { - Some(_) => { - // See doc comment for `unmatched_angle_bracket_count`. - if self.unmatched_angle_bracket_count > 0 { - self.unmatched_angle_bracket_count -= 1; - debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); - } - - Ok(()) - }, - None => self.unexpected(), - } - } - - /// Parses a sequence, including the closing delimiter. The function - /// `f` must consume tokens until reaching the next separator or - /// closing bracket. - fn parse_seq_to_end<T>( - &mut self, - ket: &TokenKind, - sep: SeqSep, - f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, Vec<T>> { - let (val, _, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { - self.bump(); - } - Ok(val) - } - - /// Parses a sequence, not including the closing delimiter. The function - /// `f` must consume tokens until reaching the next separator or - /// closing bracket. - fn parse_seq_to_before_end<T>( - &mut self, - ket: &TokenKind, - sep: SeqSep, - f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (Vec<T>, bool, bool)> { - self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) - } - - fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool { - kets.iter().any(|k| { - match expect { - TokenExpectType::Expect => self.check(k), - TokenExpectType::NoExpect => self.token == **k, - } - }) - } - - fn parse_seq_to_before_tokens<T>( - &mut self, - kets: &[&TokenKind], - sep: SeqSep, - expect: TokenExpectType, - mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> { - let mut first = true; - let mut recovered = false; - let mut trailing = false; - let mut v = vec![]; - while !self.expect_any_with_type(kets, expect) { - if let token::CloseDelim(..) | token::Eof = self.token.kind { - break - } - if let Some(ref t) = sep.sep { - if first { - first = false; - } else { - match self.expect(t) { - Ok(false) => {} - Ok(true) => { - recovered = true; - break; - } - Err(mut e) => { - // Attempt to keep parsing if it was a similar separator. - if let Some(ref tokens) = t.similar_tokens() { - if tokens.contains(&self.token.kind) { - self.bump(); - } - } - e.emit(); - // Attempt to keep parsing if it was an omitted separator. - match f(self) { - Ok(t) => { - v.push(t); - continue; - }, - Err(mut e) => { - e.cancel(); - break; - } - } - } - } - } - } - if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { - trailing = true; - break; - } - - let t = f(self)?; - v.push(t); - } - - Ok((v, trailing, recovered)) - } - - /// Parses a sequence, including the closing delimiter. The function - /// `f` must consume tokens until reaching the next separator or - /// closing bracket. - fn parse_unspanned_seq<T>( - &mut self, - bra: &TokenKind, - ket: &TokenKind, - sep: SeqSep, - f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (Vec<T>, bool)> { - self.expect(bra)?; - let (result, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { - self.eat(ket); - } - Ok((result, trailing)) - } - - fn parse_delim_comma_seq<T>( - &mut self, - delim: DelimToken, - f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (Vec<T>, bool)> { - self.parse_unspanned_seq( - &token::OpenDelim(delim), - &token::CloseDelim(delim), - SeqSep::trailing_allowed(token::Comma), - f, - ) - } - - fn parse_paren_comma_seq<T>( - &mut self, - f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (Vec<T>, bool)> { - self.parse_delim_comma_seq(token::Paren, f) - } - - /// Advance the parser by one token. - pub fn bump(&mut self) { - if self.prev_token_kind == PrevTokenKind::Eof { - // Bumping after EOF is a bad sign, usually an infinite loop. - self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); - } - - self.prev_span = self.meta_var_span.take().unwrap_or(self.token.span); - - // Record last token kind for possible error recovery. - self.prev_token_kind = match self.token.kind { - token::DocComment(..) => PrevTokenKind::DocComment, - token::Comma => PrevTokenKind::Comma, - token::BinOp(token::Plus) => PrevTokenKind::Plus, - token::BinOp(token::Or) => PrevTokenKind::BitOr, - token::Interpolated(..) => PrevTokenKind::Interpolated, - token::Eof => PrevTokenKind::Eof, - token::Ident(..) => PrevTokenKind::Ident, - _ => PrevTokenKind::Other, - }; - - self.token = self.next_tok(); - self.expected_tokens.clear(); - // Check after each token. - self.process_potential_macro_variable(); - } - - /// Advances the parser using provided token as a next one. Use this when - /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: TokenKind, span: Span) { - self.prev_span = self.token.span.with_hi(span.lo()); - // It would be incorrect to record the kind of the current token, but - // fortunately for tokens currently using `bump_with`, the - // `prev_token_kind` will be of no use anyway. - self.prev_token_kind = PrevTokenKind::Other; - self.token = Token::new(next, span); - self.expected_tokens.clear(); - } - - /// Look-ahead `dist` tokens of `self.token` and get access to that token there. - /// When `dist == 0` then the current token is looked at. - pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R { - if dist == 0 { - return looker(&self.token); - } - - let frame = &self.token_cursor.frame; - looker(&match frame.tree_cursor.look_ahead(dist - 1) { - Some(tree) => match tree { - TokenTree::Token(token) => token, - TokenTree::Delimited(dspan, delim, _) => - Token::new(token::OpenDelim(delim), dspan.open), - } - None => Token::new(token::CloseDelim(frame.delim), frame.span.close) - }) - } - - /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. - fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { - self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) - } - - /// Parses asyncness: `async` or nothing. - fn parse_asyncness(&mut self) -> IsAsync { - if self.eat_keyword(kw::Async) { - IsAsync::Async { - closure_id: DUMMY_NODE_ID, - return_impl_trait_id: DUMMY_NODE_ID, - } - } else { - IsAsync::NotAsync - } - } - - /// Parses unsafety: `unsafe` or nothing. - fn parse_unsafety(&mut self) -> Unsafety { - if self.eat_keyword(kw::Unsafe) { - Unsafety::Unsafe - } else { - Unsafety::Normal - } - } - - /// Parses mutability (`mut` or nothing). - fn parse_mutability(&mut self) -> Mutability { - if self.eat_keyword(kw::Mut) { - Mutability::Mutable - } else { - Mutability::Immutable - } - } - - /// Possibly parses mutability (`const` or `mut`). - fn parse_const_or_mut(&mut self) -> Option<Mutability> { - if self.eat_keyword(kw::Mut) { - Some(Mutability::Mutable) - } else if self.eat_keyword(kw::Const) { - Some(Mutability::Immutable) - } else { - None - } - } - - fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = - self.token.kind { - self.expect_no_suffix(self.token.span, "a tuple index", suffix); - self.bump(); - Ok(Ident::new(symbol, self.prev_span)) - } else { - self.parse_ident_common(false) - } - } - - fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { - let delim = match self.token.kind { - token::OpenDelim(delim) => delim, - _ => { - let msg = "expected open delimiter"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err) - } - }; - let tts = match self.parse_token_tree() { - TokenTree::Delimited(_, _, tts) => tts, - _ => unreachable!(), - }; - let delim = match delim { - token::Paren => MacDelimiter::Parenthesis, - token::Bracket => MacDelimiter::Bracket, - token::Brace => MacDelimiter::Brace, - token::NoDelim => self.bug("unexpected no delimiter"), - }; - Ok((delim, tts.into())) - } - - fn parse_or_use_outer_attributes( - &mut self, - already_parsed_attrs: Option<ThinVec<Attribute>>, - ) -> PResult<'a, ThinVec<Attribute>> { - if let Some(attrs) = already_parsed_attrs { - Ok(attrs) - } else { - self.parse_outer_attributes().map(|a| a.into()) - } - } - - pub fn process_potential_macro_variable(&mut self) { - self.token = match self.token.kind { - token::Dollar if self.token.span.from_expansion() && - self.look_ahead(1, |t| t.is_ident()) => { - self.bump(); - let name = match self.token.kind { - token::Ident(name, _) => name, - _ => unreachable!() - }; - let span = self.prev_span.to(self.token.span); - self.diagnostic() - .struct_span_fatal(span, &format!("unknown macro variable `{}`", name)) - .span_label(span, "unknown macro variable") - .emit(); - self.bump(); - return - } - token::Interpolated(ref nt) => { - self.meta_var_span = Some(self.token.span); - // Interpolated identifier and lifetime tokens are replaced with usual identifier - // and lifetime tokens, so the former are never encountered during normal parsing. - match **nt { - token::NtIdent(ident, is_raw) => - Token::new(token::Ident(ident.name, is_raw), ident.span), - token::NtLifetime(ident) => - Token::new(token::Lifetime(ident.name), ident.span), - _ => return, - } - } - _ => return, - }; - } - - /// Parses a single token tree from the input. - pub fn parse_token_tree(&mut self) -> TokenTree { - match self.token.kind { - token::OpenDelim(..) => { - let frame = mem::replace(&mut self.token_cursor.frame, - self.token_cursor.stack.pop().unwrap()); - self.token.span = frame.span.entire(); - self.bump(); - TokenTree::Delimited( - frame.span, - frame.delim, - frame.tree_cursor.stream.into(), - ) - }, - token::CloseDelim(_) | token::Eof => unreachable!(), - _ => { - let token = self.token.take(); - self.bump(); - TokenTree::Token(token) - } - } - } - - /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF. - pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> { - let mut tts = Vec::new(); - while self.token != token::Eof { - tts.push(self.parse_token_tree()); - } - Ok(tts) - } - - pub fn parse_tokens(&mut self) -> TokenStream { - let mut result = Vec::new(); - loop { - match self.token.kind { - token::Eof | token::CloseDelim(..) => break, - _ => result.push(self.parse_token_tree().into()), - } - } - TokenStream::new(result) - } - - /// Evaluates the closure with restrictions in place. - /// - /// Afters the closure is evaluated, restrictions are reset. - fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T { - let old = self.restrictions; - self.restrictions = res; - let res = f(self); - self.restrictions = old; - res - } - - fn is_crate_vis(&self) -> bool { - self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep) - } - - /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, - /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. - /// If the following element can't be a tuple (i.e., it's a function definition), then - /// it's not a tuple struct field), and the contents within the parentheses isn't valid, - /// so emit a proper diagnostic. - pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { - maybe_whole!(self, NtVis, |x| x); - - self.expected_tokens.push(TokenType::Keyword(kw::Crate)); - if self.is_crate_vis() { - self.bump(); // `crate` - self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_span); - return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate))); - } - - if !self.eat_keyword(kw::Pub) { - // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no - // keyword to grab a span from for inherited visibility; an empty span at the - // beginning of the current token would seem to be the "Schelling span". - return Ok(respan(self.token.span.shrink_to_lo(), VisibilityKind::Inherited)) - } - let lo = self.prev_span; - - if self.check(&token::OpenDelim(token::Paren)) { - // We don't `self.bump()` the `(` yet because this might be a struct definition where - // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. - // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so - // by the following tokens. - if self.is_keyword_ahead(1, &[kw::Crate]) - && self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)` - { - // Parse `pub(crate)`. - self.bump(); // `(` - self.bump(); // `crate` - self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = VisibilityKind::Crate(CrateSugar::PubCrate); - return Ok(respan(lo.to(self.prev_span), vis)); - } else if self.is_keyword_ahead(1, &[kw::In]) { - // Parse `pub(in path)`. - self.bump(); // `(` - self.bump(); // `in` - let path = self.parse_path(PathStyle::Mod)?; // `path` - self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = VisibilityKind::Restricted { - path: P(path), - id: ast::DUMMY_NODE_ID, - }; - return Ok(respan(lo.to(self.prev_span), vis)); - } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) - && self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower]) - { - // Parse `pub(self)` or `pub(super)`. - self.bump(); // `(` - let path = self.parse_path(PathStyle::Mod)?; // `super`/`self` - self.expect(&token::CloseDelim(token::Paren))?; // `)` - let vis = VisibilityKind::Restricted { - path: P(path), - id: ast::DUMMY_NODE_ID, - }; - return Ok(respan(lo.to(self.prev_span), vis)); - } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct. - self.recover_incorrect_vis_restriction()?; - // Emit diagnostic, but continue with public visibility. - } - } - - Ok(respan(lo, VisibilityKind::Public)) - } - - /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }` - fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { - self.bump(); // `(` - let path = self.parse_path(PathStyle::Mod)?; - self.expect(&token::CloseDelim(token::Paren))?; // `)` - - let msg = "incorrect visibility restriction"; - let suggestion = r##"some possible visibility restrictions are: -`pub(crate)`: visible only on the current crate -`pub(super)`: visible only in the current module's parent -`pub(in path::to::module)`: visible only on the specified path"##; - - let path_str = pprust::path_to_string(&path); - - struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg) - .help(suggestion) - .span_suggestion( - path.span, - &format!("make this visible only to module `{}` with `in`", path_str), - format!("in {}", path_str), - Applicability::MachineApplicable, - ) - .emit(); - - Ok(()) - } - - /// Parses `extern string_literal?`. - /// If `extern` is not found, the Rust ABI is used. - /// If `extern` is found and a `string_literal` does not follow, the C ABI is used. - fn parse_extern_abi(&mut self) -> PResult<'a, Abi> { - Ok(if self.eat_keyword(kw::Extern) { - self.parse_opt_abi()? - } else { - Abi::default() - }) - } - - /// Parses a string literal as an ABI spec. - /// If one is not found, the "C" ABI is used. - fn parse_opt_abi(&mut self) -> PResult<'a, Abi> { - let span = if self.token.can_begin_literal_or_bool() { - let ast::Lit { span, kind, .. } = self.parse_lit()?; - match kind { - ast::LitKind::Str(symbol, _) => return Ok(Abi::new(symbol, span)), - ast::LitKind::Err(_) => {} - _ => { - self.struct_span_err(span, "non-string ABI literal") - .span_suggestion( - span, - "specify the ABI with a string literal", - "\"C\"".to_string(), - Applicability::MaybeIncorrect, - ) - .emit(); - } - } - span - } else { - self.prev_span - }; - Ok(Abi::new(sym::C, span)) - } - - /// We are parsing `async fn`. If we are on Rust 2015, emit an error. - fn ban_async_in_2015(&self, async_span: Span) { - if async_span.rust_2015() { - self.diagnostic() - .struct_span_err_with_code( - async_span, - "`async fn` is not permitted in the 2015 edition", - DiagnosticId::Error("E0670".into()) - ) - .emit(); - } - } - - fn collect_tokens<R>( - &mut self, - f: impl FnOnce(&mut Self) -> PResult<'a, R>, - ) -> PResult<'a, (R, TokenStream)> { - // Record all tokens we parse when parsing this item. - let mut tokens = Vec::new(); - let prev_collecting = match self.token_cursor.frame.last_token { - LastToken::Collecting(ref mut list) => { - Some(mem::take(list)) - } - LastToken::Was(ref mut last) => { - tokens.extend(last.take()); - None - } - }; - self.token_cursor.frame.last_token = LastToken::Collecting(tokens); - let prev = self.token_cursor.stack.len(); - let ret = f(self); - let last_token = if self.token_cursor.stack.len() == prev { - &mut self.token_cursor.frame.last_token - } else if self.token_cursor.stack.get(prev).is_none() { - // This can happen due to a bad interaction of two unrelated recovery mechanisms with - // mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(` - // (#62881). - return Ok((ret?, TokenStream::default())); - } else { - &mut self.token_cursor.stack[prev].last_token - }; - - // Pull out the tokens that we've collected from the call to `f` above. - let mut collected_tokens = match *last_token { - LastToken::Collecting(ref mut v) => mem::take(v), - LastToken::Was(ref was) => { - let msg = format!("our vector went away? - found Was({:?})", was); - debug!("collect_tokens: {}", msg); - self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg); - // This can happen due to a bad interaction of two unrelated recovery mechanisms - // with mismatched delimiters *and* recovery lookahead on the likely typo - // `pub ident(` (#62895, different but similar to the case above). - return Ok((ret?, TokenStream::default())); - } - }; - - // If we're not at EOF our current token wasn't actually consumed by - // `f`, but it'll still be in our list that we pulled out. In that case - // put it back. - let extra_token = if self.token != token::Eof { - collected_tokens.pop() - } else { - None - }; - - // If we were previously collecting tokens, then this was a recursive - // call. In that case we need to record all the tokens we collected in - // our parent list as well. To do that we push a clone of our stream - // onto the previous list. - match prev_collecting { - Some(mut list) => { - list.extend(collected_tokens.iter().cloned()); - list.extend(extra_token); - *last_token = LastToken::Collecting(list); - } - None => { - *last_token = LastToken::Was(extra_token); - } - } - - Ok((ret?, TokenStream::new(collected_tokens))) - } - - /// `::{` or `::*` - fn is_import_coupler(&mut self) -> bool { - self.check(&token::ModSep) && - self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) || - *t == token::BinOp(token::Star)) - } - - fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { - let ret = match self.token.kind { - token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => - (symbol, ast::StrStyle::Cooked, suffix), - token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) => - (symbol, ast::StrStyle::Raw(n), suffix), - _ => return None - }; - self.bump(); - Some(ret) - } - - pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> { - match self.parse_optional_str() { - Some((s, style, suf)) => { - let sp = self.prev_span; - self.expect_no_suffix(sp, "a string literal", suf); - Ok((s, style)) - } - _ => { - let msg = "expected string literal"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - Err(err) - } - } - } -} - -crate fn make_unclosed_delims_error( - unmatched: UnmatchedBrace, - sess: &ParseSess, -) -> Option<DiagnosticBuilder<'_>> { - // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to - // `unmatched_braces` only for error recovery in the `Parser`. - let found_delim = unmatched.found_delim?; - let mut err = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( - "incorrect close delimiter: `{}`", - pprust::token_kind_to_string(&token::CloseDelim(found_delim)), - )); - err.span_label(unmatched.found_span, "incorrect close delimiter"); - if let Some(sp) = unmatched.candidate_span { - err.span_label(sp, "close delimiter possibly meant for this"); - } - if let Some(sp) = unmatched.unclosed_span { - err.span_label(sp, "un-closed delimiter"); - } - Some(err) -} - -pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) { - *sess.reached_eof.borrow_mut() |= unclosed_delims.iter() - .any(|unmatched_delim| unmatched_delim.found_delim.is_none()); - for unmatched in unclosed_delims.drain(..) { - make_unclosed_delims_error(unmatched, sess).map(|mut e| e.emit()); - } -} diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs deleted file mode 100644 index ad72b3a1dea..00000000000 --- a/src/libsyntax/parse/parser/module.rs +++ /dev/null @@ -1,315 +0,0 @@ -use super::Parser; -use super::item::ItemInfo; -use super::diagnostics::Error; - -use crate::attr; -use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate}; -use crate::parse::{new_sub_parser_from_file, DirectoryOwnership}; -use crate::token::{self, TokenKind}; -use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName}; - -use syntax_pos::symbol::sym; -use errors::PResult; - -use std::path::{self, Path, PathBuf}; - -/// Information about the path to a module. -pub(super) struct ModulePath { - name: String, - path_exists: bool, - pub result: Result<ModulePathSuccess, Error>, -} - -pub(super) struct ModulePathSuccess { - pub path: PathBuf, - pub directory_ownership: DirectoryOwnership, -} - -impl<'a> Parser<'a> { - /// Parses a source module as a crate. This is the main entry point for the parser. - pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { - let lo = self.token.span; - let krate = Ok(ast::Crate { - attrs: self.parse_inner_attributes()?, - module: self.parse_mod_items(&token::Eof, lo)?, - span: lo.to(self.token.span), - }); - krate - } - - /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. - pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { - // HACK(Centril): See documentation on `ParseSess::process_cfg_mod`. - let (in_cfg, outer_attrs) = (self.sess.process_cfg_mod)( - self.sess, - self.cfg_mods, - outer_attrs, - ); - - let id_span = self.token.span; - let id = self.parse_ident()?; - if self.eat(&token::Semi) { - if in_cfg && self.recurse_into_file_modules { - // This mod is in an external file. Let's go get it! - let ModulePathSuccess { path, directory_ownership } = - self.submod_path(id, &outer_attrs, id_span)?; - let (module, attrs) = - self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)?; - Ok((id, ItemKind::Mod(module), Some(attrs))) - } else { - let placeholder = ast::Mod { - inner: DUMMY_SP, - items: Vec::new(), - inline: false - }; - Ok((id, ItemKind::Mod(placeholder), None)) - } - } else { - let old_directory = self.directory.clone(); - self.push_directory(id, &outer_attrs); - - self.expect(&token::OpenDelim(token::Brace))?; - let mod_inner_lo = self.token.span; - let attrs = self.parse_inner_attributes()?; - let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; - - self.directory = old_directory; - Ok((id, ItemKind::Mod(module), Some(attrs))) - } - } - - /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { - let mut items = vec![]; - while let Some(item) = self.parse_item()? { - items.push(item); - self.maybe_consume_incorrect_semicolon(&items); - } - - if !self.eat(term) { - let token_str = self.this_token_descr(); - if !self.maybe_consume_incorrect_semicolon(&items) { - let mut err = self.fatal(&format!("expected item, found {}", token_str)); - err.span_label(self.token.span, "expected item"); - return Err(err); - } - } - - let hi = if self.token.span.is_dummy() { - inner_lo - } else { - self.prev_span - }; - - Ok(Mod { - inner: inner_lo.to(hi), - items, - inline: true - }) - } - - fn submod_path( - &mut self, - id: ast::Ident, - outer_attrs: &[Attribute], - id_sp: Span - ) -> PResult<'a, ModulePathSuccess> { - if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) { - return Ok(ModulePathSuccess { - directory_ownership: match path.file_name().and_then(|s| s.to_str()) { - // All `#[path]` files are treated as though they are a `mod.rs` file. - // This means that `mod foo;` declarations inside `#[path]`-included - // files are siblings, - // - // Note that this will produce weirdness when a file named `foo.rs` is - // `#[path]` included and contains a `mod foo;` declaration. - // If you encounter this, it's your own darn fault :P - Some(_) => DirectoryOwnership::Owned { relative: None }, - _ => DirectoryOwnership::UnownedViaMod, - }, - path, - }); - } - - let relative = match self.directory.ownership { - DirectoryOwnership::Owned { relative } => relative, - DirectoryOwnership::UnownedViaBlock | - DirectoryOwnership::UnownedViaMod => None, - }; - let paths = Parser::default_submod_path( - id, relative, &self.directory.path, self.sess.source_map()); - - match self.directory.ownership { - DirectoryOwnership::Owned { .. } => { - paths.result.map_err(|err| self.span_fatal_err(id_sp, err)) - }, - DirectoryOwnership::UnownedViaBlock => { - let msg = - "Cannot declare a non-inline module inside a block \ - unless it has a path attribute"; - let mut err = self.diagnostic().struct_span_err(id_sp, msg); - if paths.path_exists { - let msg = format!("Maybe `use` the module `{}` instead of redeclaring it", - paths.name); - err.span_note(id_sp, &msg); - } - Err(err) - } - DirectoryOwnership::UnownedViaMod => { - let mut err = self.diagnostic().struct_span_err(id_sp, - "cannot declare a new module at this location"); - if !id_sp.is_dummy() { - let src_path = self.sess.source_map().span_to_filename(id_sp); - if let FileName::Real(src_path) = src_path { - if let Some(stem) = src_path.file_stem() { - let mut dest_path = src_path.clone(); - dest_path.set_file_name(stem); - dest_path.push("mod.rs"); - err.span_note(id_sp, - &format!("maybe move this module `{}` to its own \ - directory via `{}`", src_path.display(), - dest_path.display())); - } - } - } - if paths.path_exists { - err.span_note(id_sp, - &format!("... or maybe `use` the module `{}` instead \ - of possibly redeclaring it", - paths.name)); - } - Err(err) - } - } - } - - pub(super) fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { - if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) { - let s = s.as_str(); - - // On windows, the base path might have the form - // `\\?\foo\bar` in which case it does not tolerate - // mixed `/` and `\` separators, so canonicalize - // `/` to `\`. - #[cfg(windows)] - let s = s.replace("/", "\\"); - Some(dir_path.join(&*s)) - } else { - None - } - } - - /// Returns a path to a module. - pub(super) fn default_submod_path( - id: ast::Ident, - relative: Option<ast::Ident>, - dir_path: &Path, - source_map: &SourceMap) -> ModulePath - { - // If we're in a foo.rs file instead of a mod.rs file, - // we need to look for submodules in - // `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than - // `./<id>.rs` and `./<id>/mod.rs`. - let relative_prefix_string; - let relative_prefix = if let Some(ident) = relative { - relative_prefix_string = format!("{}{}", ident, path::MAIN_SEPARATOR); - &relative_prefix_string - } else { - "" - }; - - let mod_name = id.to_string(); - let default_path_str = format!("{}{}.rs", relative_prefix, mod_name); - let secondary_path_str = format!("{}{}{}mod.rs", - relative_prefix, mod_name, path::MAIN_SEPARATOR); - let default_path = dir_path.join(&default_path_str); - let secondary_path = dir_path.join(&secondary_path_str); - let default_exists = source_map.file_exists(&default_path); - let secondary_exists = source_map.file_exists(&secondary_path); - - let result = match (default_exists, secondary_exists) { - (true, false) => Ok(ModulePathSuccess { - path: default_path, - directory_ownership: DirectoryOwnership::Owned { - relative: Some(id), - }, - }), - (false, true) => Ok(ModulePathSuccess { - path: secondary_path, - directory_ownership: DirectoryOwnership::Owned { - relative: None, - }, - }), - (false, false) => Err(Error::FileNotFoundForModule { - mod_name: mod_name.clone(), - default_path: default_path_str, - secondary_path: secondary_path_str, - dir_path: dir_path.display().to_string(), - }), - (true, true) => Err(Error::DuplicatePaths { - mod_name: mod_name.clone(), - default_path: default_path_str, - secondary_path: secondary_path_str, - }), - }; - - ModulePath { - name: mod_name, - path_exists: default_exists || secondary_exists, - result, - } - } - - /// Reads a module from a source file. - fn eval_src_mod( - &mut self, - path: PathBuf, - directory_ownership: DirectoryOwnership, - name: String, - id_sp: Span, - ) -> PResult<'a, (Mod, Vec<Attribute>)> { - let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut(); - if let Some(i) = included_mod_stack.iter().position(|p| *p == path) { - let mut err = String::from("circular modules: "); - let len = included_mod_stack.len(); - for p in &included_mod_stack[i.. len] { - err.push_str(&p.to_string_lossy()); - err.push_str(" -> "); - } - err.push_str(&path.to_string_lossy()); - return Err(self.span_fatal(id_sp, &err[..])); - } - included_mod_stack.push(path.clone()); - drop(included_mod_stack); - - let mut p0 = - new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); - p0.cfg_mods = self.cfg_mods; - let mod_inner_lo = p0.token.span; - let mod_attrs = p0.parse_inner_attributes()?; - let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; - m0.inline = false; - self.sess.included_mod_stack.borrow_mut().pop(); - Ok((m0, mod_attrs)) - } - - fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { - if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) { - self.directory.path.to_mut().push(&*path.as_str()); - self.directory.ownership = DirectoryOwnership::Owned { relative: None }; - } else { - // We have to push on the current module name in the case of relative - // paths in order to ensure that any additional module paths from inline - // `mod x { ... }` come after the relative extension. - // - // For example, a `mod z { ... }` inside `x/y.rs` should set the current - // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`. - if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership { - if let Some(ident) = relative.take() { // remove the relative offset - self.directory.path.to_mut().push(&*ident.as_str()); - } - } - self.directory.path.to_mut().push(&*id.as_str()); - } - } -} diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs deleted file mode 100644 index f347300da71..00000000000 --- a/src/libsyntax/parse/parser/pat.rs +++ /dev/null @@ -1,1016 +0,0 @@ -use super::{Parser, PathStyle}; - -use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; -use crate::ptr::P; -use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac}; -use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind}; -use crate::mut_visit::{noop_visit_pat, noop_visit_mac, MutVisitor}; -use crate::token; -use crate::print::pprust; -use crate::source_map::{respan, Span, Spanned}; -use crate::ThinVec; -use syntax_pos::symbol::{kw, sym}; -use errors::{PResult, Applicability, DiagnosticBuilder}; - -type Expected = Option<&'static str>; - -/// `Expected` for function and lambda parameter patterns. -pub(super) const PARAM_EXPECTED: Expected = Some("parameter name"); - -const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here"; - -/// Whether or not an or-pattern should be gated when occurring in the current context. -#[derive(PartialEq)] -pub(super) enum GateOr { Yes, No } - -/// Whether or not to recover a `,` when parsing or-patterns. -#[derive(PartialEq, Copy, Clone)] -enum RecoverComma { Yes, No } - -impl<'a> Parser<'a> { - /// Parses a pattern. - /// - /// Corresponds to `pat<no_top_alt>` in RFC 2535 and does not admit or-patterns - /// at the top level. Used when parsing the parameters of lambda expressions, - /// functions, function pointers, and `pat` macro fragments. - pub fn parse_pat(&mut self, expected: Expected) -> PResult<'a, P<Pat>> { - self.parse_pat_with_range_pat(true, expected) - } - - /// Entry point to the main pattern parser. - /// Corresponds to `top_pat` in RFC 2535 and allows or-pattern at the top level. - pub(super) fn parse_top_pat(&mut self, gate_or: GateOr) -> PResult<'a, P<Pat>> { - // Allow a '|' before the pats (RFCs 1925, 2530, and 2535). - let gated_leading_vert = self.eat_or_separator(None) && gate_or == GateOr::Yes; - let leading_vert_span = self.prev_span; - - // Parse the possibly-or-pattern. - let pat = self.parse_pat_with_or(None, gate_or, RecoverComma::Yes)?; - - // If we parsed a leading `|` which should be gated, - // and no other gated or-pattern has been parsed thus far, - // then we should really gate the leading `|`. - // This complicated procedure is done purely for diagnostics UX. - if gated_leading_vert && self.sess.gated_spans.is_ungated(sym::or_patterns) { - self.sess.gated_spans.gate(sym::or_patterns, leading_vert_span); - } - - Ok(pat) - } - - /// Parse the pattern for a function or function pointer parameter. - /// Special recovery is provided for or-patterns and leading `|`. - pub(super) fn parse_fn_param_pat(&mut self) -> PResult<'a, P<Pat>> { - self.recover_leading_vert(None, "not allowed in a parameter pattern"); - let pat = self.parse_pat_with_or(PARAM_EXPECTED, GateOr::No, RecoverComma::No)?; - - if let PatKind::Or(..) = &pat.kind { - self.ban_illegal_fn_param_or_pat(&pat); - } - - Ok(pat) - } - - /// Ban `A | B` immediately in a parameter pattern and suggest wrapping in parens. - fn ban_illegal_fn_param_or_pat(&self, pat: &Pat) { - let msg = "wrap the pattern in parenthesis"; - let fix = format!("({})", pprust::pat_to_string(pat)); - self.struct_span_err(pat.span, "an or-pattern parameter must be wrapped in parenthesis") - .span_suggestion(pat.span, msg, fix, Applicability::MachineApplicable) - .emit(); - } - - /// Parses a pattern, that may be a or-pattern (e.g. `Foo | Bar` in `Some(Foo | Bar)`). - /// Corresponds to `pat<allow_top_alt>` in RFC 2535. - fn parse_pat_with_or( - &mut self, - expected: Expected, - gate_or: GateOr, - rc: RecoverComma, - ) -> PResult<'a, P<Pat>> { - // Parse the first pattern (`p_0`). - let first_pat = self.parse_pat(expected)?; - self.maybe_recover_unexpected_comma(first_pat.span, rc)?; - - // If the next token is not a `|`, - // this is not an or-pattern and we should exit here. - if !self.check(&token::BinOp(token::Or)) && self.token != token::OrOr { - return Ok(first_pat) - } - - // Parse the patterns `p_1 | ... | p_n` where `n > 0`. - let lo = first_pat.span; - let mut pats = vec![first_pat]; - while self.eat_or_separator(Some(lo)) { - let pat = self.parse_pat(expected).map_err(|mut err| { - err.span_label(lo, WHILE_PARSING_OR_MSG); - err - })?; - self.maybe_recover_unexpected_comma(pat.span, rc)?; - pats.push(pat); - } - let or_pattern_span = lo.to(self.prev_span); - - // Feature gate the or-pattern if instructed: - if gate_or == GateOr::Yes { - self.sess.gated_spans.gate(sym::or_patterns, or_pattern_span); - } - - Ok(self.mk_pat(or_pattern_span, PatKind::Or(pats))) - } - - /// Eat the or-pattern `|` separator. - /// If instead a `||` token is encountered, recover and pretend we parsed `|`. - fn eat_or_separator(&mut self, lo: Option<Span>) -> bool { - if self.recover_trailing_vert(lo) { - return false; - } - - match self.token.kind { - token::OrOr => { - // Found `||`; Recover and pretend we parsed `|`. - self.ban_unexpected_or_or(lo); - self.bump(); - true - } - _ => self.eat(&token::BinOp(token::Or)), - } - } - - /// Recover if `|` or `||` is the current token and we have one of the - /// tokens `=>`, `if`, `=`, `:`, `;`, `,`, `]`, `)`, or `}` ahead of us. - /// - /// These tokens all indicate that we reached the end of the or-pattern - /// list and can now reliably say that the `|` was an illegal trailing vert. - /// Note that there are more tokens such as `@` for which we know that the `|` - /// is an illegal parse. However, the user's intent is less clear in that case. - fn recover_trailing_vert(&mut self, lo: Option<Span>) -> bool { - let is_end_ahead = self.look_ahead(1, |token| match &token.kind { - token::FatArrow // e.g. `a | => 0,`. - | token::Ident(kw::If, false) // e.g. `a | if expr`. - | token::Eq // e.g. `let a | = 0`. - | token::Semi // e.g. `let a |;`. - | token::Colon // e.g. `let a | :`. - | token::Comma // e.g. `let (a |,)`. - | token::CloseDelim(token::Bracket) // e.g. `let [a | ]`. - | token::CloseDelim(token::Paren) // e.g. `let (a | )`. - | token::CloseDelim(token::Brace) => true, // e.g. `let A { f: a | }`. - _ => false, - }); - match (is_end_ahead, &self.token.kind) { - (true, token::BinOp(token::Or)) | (true, token::OrOr) => { - self.ban_illegal_vert(lo, "trailing", "not allowed in an or-pattern"); - self.bump(); - true - } - _ => false, - } - } - - /// We have parsed `||` instead of `|`. Error and suggest `|` instead. - fn ban_unexpected_or_or(&mut self, lo: Option<Span>) { - let mut err = self.struct_span_err(self.token.span, "unexpected token `||` after pattern"); - err.span_suggestion( - self.token.span, - "use a single `|` to separate multiple alternative patterns", - "|".to_owned(), - Applicability::MachineApplicable - ); - if let Some(lo) = lo { - err.span_label(lo, WHILE_PARSING_OR_MSG); - } - err.emit(); - } - - /// Some special error handling for the "top-level" patterns in a match arm, - /// `for` loop, `let`, &c. (in contrast to subpatterns within such). - fn maybe_recover_unexpected_comma(&mut self, lo: Span, rc: RecoverComma) -> PResult<'a, ()> { - if rc == RecoverComma::No || self.token != token::Comma { - return Ok(()); - } - - // An unexpected comma after a top-level pattern is a clue that the - // user (perhaps more accustomed to some other language) forgot the - // parentheses in what should have been a tuple pattern; return a - // suggestion-enhanced error here rather than choking on the comma later. - let comma_span = self.token.span; - self.bump(); - if let Err(mut err) = self.skip_pat_list() { - // We didn't expect this to work anyway; we just wanted to advance to the - // end of the comma-sequence so we know the span to suggest parenthesizing. - err.cancel(); - } - let seq_span = lo.to(self.prev_span); - let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); - if let Ok(seq_snippet) = self.span_to_snippet(seq_span) { - err.span_suggestion( - seq_span, - "try adding parentheses to match on a tuple..", - format!("({})", seq_snippet), - Applicability::MachineApplicable - ) - .span_suggestion( - seq_span, - "..or a vertical bar to match on multiple alternatives", - format!("{}", seq_snippet.replace(",", " |")), - Applicability::MachineApplicable - ); - } - Err(err) - } - - /// Parse and throw away a parentesized comma separated - /// sequence of patterns until `)` is reached. - fn skip_pat_list(&mut self) -> PResult<'a, ()> { - while !self.check(&token::CloseDelim(token::Paren)) { - self.parse_pat(None)?; - if !self.eat(&token::Comma) { - return Ok(()) - } - } - Ok(()) - } - - /// Recursive possibly-or-pattern parser with recovery for an erroneous leading `|`. - /// See `parse_pat_with_or` for details on parsing or-patterns. - fn parse_pat_with_or_inner(&mut self) -> PResult<'a, P<Pat>> { - self.recover_leading_vert(None, "only allowed in a top-level pattern"); - self.parse_pat_with_or(None, GateOr::Yes, RecoverComma::No) - } - - /// Recover if `|` or `||` is here. - /// The user is thinking that a leading `|` is allowed in this position. - fn recover_leading_vert(&mut self, lo: Option<Span>, ctx: &str) { - if let token::BinOp(token::Or) | token::OrOr = self.token.kind { - self.ban_illegal_vert(lo, "leading", ctx); - self.bump(); - } - } - - /// A `|` or possibly `||` token shouldn't be here. Ban it. - fn ban_illegal_vert(&mut self, lo: Option<Span>, pos: &str, ctx: &str) { - let span = self.token.span; - let mut err = self.struct_span_err(span, &format!("a {} `|` is {}", pos, ctx)); - err.span_suggestion( - span, - &format!("remove the `{}`", pprust::token_to_string(&self.token)), - String::new(), - Applicability::MachineApplicable, - ); - if let Some(lo) = lo { - err.span_label(lo, WHILE_PARSING_OR_MSG); - } - if let token::OrOr = self.token.kind { - err.note("alternatives in or-patterns are separated with `|`, not `||`"); - } - err.emit(); - } - - /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are - /// allowed). - fn parse_pat_with_range_pat( - &mut self, - allow_range_pat: bool, - expected: Expected, - ) -> PResult<'a, P<Pat>> { - maybe_recover_from_interpolated_ty_qpath!(self, true); - maybe_whole!(self, NtPat, |x| x); - - let lo = self.token.span; - let pat = match self.token.kind { - token::BinOp(token::And) | token::AndAnd => self.parse_pat_deref(expected)?, - token::OpenDelim(token::Paren) => self.parse_pat_tuple_or_parens()?, - token::OpenDelim(token::Bracket) => { - // Parse `[pat, pat,...]` as a slice pattern. - let (pats, _) = self.parse_delim_comma_seq( - token::Bracket, - |p| p.parse_pat_with_or_inner(), - )?; - PatKind::Slice(pats) - } - token::DotDot => { - self.bump(); - if self.is_pat_range_end_start() { - // Parse `..42` for recovery. - self.parse_pat_range_to(RangeEnd::Excluded, "..")? - } else { - // A rest pattern `..`. - PatKind::Rest - } - } - token::DotDotEq => { - // Parse `..=42` for recovery. - self.bump(); - self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")? - } - token::DotDotDot => { - // Parse `...42` for recovery. - self.bump(); - self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")? - } - // At this point, token != `&`, `&&`, `(`, `[`, `..`, `..=`, or `...`. - _ => if self.eat_keyword(kw::Underscore) { - // Parse _ - PatKind::Wild - } else if self.eat_keyword(kw::Mut) { - self.parse_pat_ident_mut()? - } else if self.eat_keyword(kw::Ref) { - // Parse ref ident @ pat / ref mut ident @ pat - let mutbl = self.parse_mutability(); - self.parse_pat_ident(BindingMode::ByRef(mutbl))? - } else if self.eat_keyword(kw::Box) { - // Parse `box pat` - let pat = self.parse_pat_with_range_pat(false, None)?; - self.sess.gated_spans.gate(sym::box_patterns, lo.to(self.prev_span)); - PatKind::Box(pat) - } else if self.can_be_ident_pat() { - // Parse `ident @ pat` - // This can give false positives and parse nullary enums, - // they are dealt with later in resolve. - self.parse_pat_ident(BindingMode::ByValue(Mutability::Immutable))? - } else if self.is_start_of_pat_with_path() { - // Parse pattern starting with a path - let (qself, path) = if self.eat_lt() { - // Parse a qualified path - let (qself, path) = self.parse_qpath(PathStyle::Expr)?; - (Some(qself), path) - } else { - // Parse an unqualified path - (None, self.parse_path(PathStyle::Expr)?) - }; - match self.token.kind { - token::Not if qself.is_none() => self.parse_pat_mac_invoc(lo, path)?, - token::DotDotDot | token::DotDotEq | token::DotDot => { - self.parse_pat_range_starting_with_path(lo, qself, path)? - } - token::OpenDelim(token::Brace) => self.parse_pat_struct(qself, path)?, - token::OpenDelim(token::Paren) => self.parse_pat_tuple_struct(qself, path)?, - _ => PatKind::Path(qself, path), - } - } else { - // Try to parse everything else as literal with optional minus - match self.parse_literal_maybe_minus() { - Ok(begin) - if self.check(&token::DotDot) - || self.check(&token::DotDotEq) - || self.check(&token::DotDotDot) => - { - self.parse_pat_range_starting_with_lit(begin)? - } - Ok(begin) => PatKind::Lit(begin), - Err(err) => return self.fatal_unexpected_non_pat(err, expected), - } - } - }; - - let pat = self.mk_pat(lo.to(self.prev_span), pat); - let pat = self.maybe_recover_from_bad_qpath(pat, true)?; - let pat = self.recover_intersection_pat(pat)?; - - if !allow_range_pat { - self.ban_pat_range_if_ambiguous(&pat)? - } - - Ok(pat) - } - - /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`. - /// - /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs` - /// should already have been parsed by now at this point, - /// if the next token is `@` then we can try to parse the more general form. - /// - /// Consult `parse_pat_ident` for the `binding` grammar. - /// - /// The notion of intersection patterns are found in - /// e.g. [F#][and] where they are called AND-patterns. - /// - /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching - fn recover_intersection_pat(&mut self, lhs: P<Pat>) -> PResult<'a, P<Pat>> { - if self.token.kind != token::At { - // Next token is not `@` so it's not going to be an intersection pattern. - return Ok(lhs); - } - - // At this point we attempt to parse `@ $pat_rhs` and emit an error. - self.bump(); // `@` - let mut rhs = self.parse_pat(None)?; - let sp = lhs.span.to(rhs.span); - - if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind { - // The user inverted the order, so help them fix that. - let mut applicability = Applicability::MachineApplicable; - lhs.walk(&mut |p| match p.kind { - // `check_match` is unhappy if the subpattern has a binding anywhere. - PatKind::Ident(..) => { - applicability = Applicability::MaybeIncorrect; - false // Short-circuit. - }, - _ => true, - }); - - let lhs_span = lhs.span; - // Move the LHS into the RHS as a subpattern. - // The RHS is now the full pattern. - *sub = Some(lhs); - - self.struct_span_err(sp, "pattern on wrong side of `@`") - .span_label(lhs_span, "pattern on the left, should be on the right") - .span_label(rhs.span, "binding on the right, should be on the left") - .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability) - .emit(); - } else { - // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`. - rhs.kind = PatKind::Wild; - self.struct_span_err(sp, "left-hand side of `@` must be a binding") - .span_label(lhs.span, "interpreted as a pattern, not a binding") - .span_label(rhs.span, "also a pattern") - .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`") - .emit(); - } - - rhs.span = sp; - Ok(rhs) - } - - /// Ban a range pattern if it has an ambiguous interpretation. - fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> { - match pat.kind { - PatKind::Range( - .., Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. } - ) => return Ok(()), - PatKind::Range(..) => {} - _ => return Ok(()), - } - - let mut err = self.struct_span_err( - pat.span, - "the range pattern here has ambiguous interpretation", - ); - err.span_suggestion( - pat.span, - "add parentheses to clarify the precedence", - format!("({})", pprust::pat_to_string(&pat)), - // "ambiguous interpretation" implies that we have to be guessing - Applicability::MaybeIncorrect - ); - Err(err) - } - - /// Parse `&pat` / `&mut pat`. - fn parse_pat_deref(&mut self, expected: Expected) -> PResult<'a, PatKind> { - self.expect_and()?; - let mutbl = self.parse_mutability(); - - if let token::Lifetime(name) = self.token.kind { - let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); - err.span_label(self.token.span, "unexpected lifetime"); - return Err(err); - } - - let subpat = self.parse_pat_with_range_pat(false, expected)?; - Ok(PatKind::Ref(subpat, mutbl)) - } - - /// Parse a tuple or parenthesis pattern. - fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> { - let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or_inner())?; - - // Here, `(pat,)` is a tuple pattern. - // For backward compatibility, `(..)` is a tuple pattern as well. - Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { - PatKind::Paren(fields.into_iter().nth(0).unwrap()) - } else { - PatKind::Tuple(fields) - }) - } - - /// Parse a mutable binding with the `mut` token already eaten. - fn parse_pat_ident_mut(&mut self) -> PResult<'a, PatKind> { - let mut_span = self.prev_span; - - if self.eat_keyword(kw::Ref) { - return self.recover_mut_ref_ident(mut_span) - } - - self.recover_additional_muts(); - - // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`. - if let token::Interpolated(ref nt) = self.token.kind { - if let token::NtPat(_) = **nt { - self.expected_ident_found().emit(); - } - } - - // Parse the pattern we hope to be an identifier. - let mut pat = self.parse_pat(Some("identifier"))?; - - // Add `mut` to any binding in the parsed pattern. - let changed_any_binding = Self::make_all_value_bindings_mutable(&mut pat); - - // Unwrap; If we don't have `mut $ident`, error. - let pat = pat.into_inner(); - match &pat.kind { - PatKind::Ident(..) => {} - _ => self.ban_mut_general_pat(mut_span, &pat, changed_any_binding), - } - - Ok(pat.kind) - } - - /// Recover on `mut ref? ident @ pat` and suggest - /// that the order of `mut` and `ref` is incorrect. - fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> { - let mutref_span = lo.to(self.prev_span); - self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") - .span_suggestion( - mutref_span, - "try switching the order", - "ref mut".into(), - Applicability::MachineApplicable - ) - .emit(); - - self.parse_pat_ident(BindingMode::ByRef(Mutability::Mutable)) - } - - /// Turn all by-value immutable bindings in a pattern into mutable bindings. - /// Returns `true` if any change was made. - fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool { - struct AddMut(bool); - impl MutVisitor for AddMut { - fn visit_mac(&mut self, mac: &mut Mac) { - noop_visit_mac(mac, self); - } - - fn visit_pat(&mut self, pat: &mut P<Pat>) { - if let PatKind::Ident(BindingMode::ByValue(ref mut m @ Mutability::Immutable), ..) - = pat.kind - { - *m = Mutability::Mutable; - self.0 = true; - } - noop_visit_pat(pat, self); - } - } - - let mut add_mut = AddMut(false); - add_mut.visit_pat(pat); - add_mut.0 - } - - /// Error on `mut $pat` where `$pat` is not an ident. - fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) { - let span = lo.to(pat.span); - let fix = pprust::pat_to_string(&pat); - let (problem, suggestion) = if changed_any_binding { - ("`mut` must be attached to each individual binding", "add `mut` to each binding") - } else { - ("`mut` must be followed by a named binding", "remove the `mut` prefix") - }; - self.struct_span_err(span, problem) - .span_suggestion(span, suggestion, fix, Applicability::MachineApplicable) - .note("`mut` may be followed by `variable` and `variable @ pattern`") - .emit() - } - - /// Eat any extraneous `mut`s and error + recover if we ate any. - fn recover_additional_muts(&mut self) { - let lo = self.token.span; - while self.eat_keyword(kw::Mut) {} - if lo == self.token.span { - return; - } - - let span = lo.to(self.prev_span); - self.struct_span_err(span, "`mut` on a binding may not be repeated") - .span_suggestion( - span, - "remove the additional `mut`s", - String::new(), - Applicability::MachineApplicable, - ) - .emit(); - } - - /// Parse macro invocation - fn parse_pat_mac_invoc(&mut self, lo: Span, path: Path) -> PResult<'a, PatKind> { - self.bump(); - let (delim, tts) = self.expect_delimited_token_tree()?; - let mac = Mac { - path, - tts, - delim, - span: lo.to(self.prev_span), - prior_type_ascription: self.last_type_ascription, - }; - Ok(PatKind::Mac(mac)) - } - - fn excluded_range_end(&self, span: Span) -> RangeEnd { - self.sess.gated_spans.gate(sym::exclusive_range_pattern, span); - RangeEnd::Excluded - } - - /// Parse a range pattern `$path $form $end?` where `$form = ".." | "..." | "..=" ;`. - /// The `$path` has already been parsed and the next token is the `$form`. - fn parse_pat_range_starting_with_path( - &mut self, - lo: Span, - qself: Option<QSelf>, - path: Path - ) -> PResult<'a, PatKind> { - let (end_kind, form) = match self.token.kind { - token::DotDot => (self.excluded_range_end(self.token.span), ".."), - token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."), - token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="), - _ => panic!("can only parse `..`/`...`/`..=` for ranges (checked above)"), - }; - let op_span = self.token.span; - // Parse range - let span = lo.to(self.prev_span); - let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); - self.bump(); - let end = self.parse_pat_range_end_opt(&begin, form)?; - Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) - } - - /// Parse a range pattern `$literal $form $end?` where `$form = ".." | "..." | "..=" ;`. - /// The `$path` has already been parsed and the next token is the `$form`. - fn parse_pat_range_starting_with_lit(&mut self, begin: P<Expr>) -> PResult<'a, PatKind> { - let op_span = self.token.span; - let (end_kind, form) = if self.eat(&token::DotDotDot) { - (RangeEnd::Included(RangeSyntax::DotDotDot), "...") - } else if self.eat(&token::DotDotEq) { - (RangeEnd::Included(RangeSyntax::DotDotEq), "..=") - } else if self.eat(&token::DotDot) { - (self.excluded_range_end(op_span), "..") - } else { - panic!("impossible case: we already matched on a range-operator token") - }; - let end = self.parse_pat_range_end_opt(&begin, form)?; - Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) - } - - fn fatal_unexpected_non_pat( - &mut self, - mut err: DiagnosticBuilder<'a>, - expected: Expected, - ) -> PResult<'a, P<Pat>> { - err.cancel(); - - let expected = expected.unwrap_or("pattern"); - let msg = format!("expected {}, found {}", expected, self.this_token_descr()); - - let mut err = self.fatal(&msg); - err.span_label(self.token.span, format!("expected {}", expected)); - - let sp = self.sess.source_map().start_point(self.token.span); - if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { - self.sess.expr_parentheses_needed(&mut err, *sp, None); - } - - Err(err) - } - - /// Is the current token suitable as the start of a range patterns end? - fn is_pat_range_end_start(&self) -> bool { - self.token.is_path_start() // e.g. `MY_CONST`; - || self.token == token::Dot // e.g. `.5` for recovery; - || self.token.can_begin_literal_or_bool() // e.g. `42`. - || self.token.is_whole_expr() - } - - /// Parse a range-to pattern, e.g. `..X` and `..=X` for recovery. - fn parse_pat_range_to(&mut self, re: RangeEnd, form: &str) -> PResult<'a, PatKind> { - let lo = self.prev_span; - let end = self.parse_pat_range_end()?; - let range_span = lo.to(end.span); - let begin = self.mk_expr(range_span, ExprKind::Err, ThinVec::new()); - - self.diagnostic() - .struct_span_err(range_span, &format!("`{}X` range patterns are not supported", form)) - .span_suggestion( - range_span, - "try using the minimum value for the type", - format!("MIN{}{}", form, pprust::expr_to_string(&end)), - Applicability::HasPlaceholders, - ) - .emit(); - - Ok(PatKind::Range(begin, end, respan(lo, re))) - } - - /// Parse the end of a `X..Y`, `X..=Y`, or `X...Y` range pattern or recover - /// if that end is missing treating it as `X..`, `X..=`, or `X...` respectively. - fn parse_pat_range_end_opt(&mut self, begin: &Expr, form: &str) -> PResult<'a, P<Expr>> { - if self.is_pat_range_end_start() { - // Parsing e.g. `X..=Y`. - self.parse_pat_range_end() - } else { - // Parsing e.g. `X..`. - let range_span = begin.span.to(self.prev_span); - - self.diagnostic() - .struct_span_err( - range_span, - &format!("`X{}` range patterns are not supported", form), - ) - .span_suggestion( - range_span, - "try using the maximum value for the type", - format!("{}{}MAX", pprust::expr_to_string(&begin), form), - Applicability::HasPlaceholders, - ) - .emit(); - - Ok(self.mk_expr(range_span, ExprKind::Err, ThinVec::new())) - } - } - - fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { - if self.token.is_path_start() { - let lo = self.token.span; - let (qself, path) = if self.eat_lt() { - // Parse a qualified path - let (qself, path) = self.parse_qpath(PathStyle::Expr)?; - (Some(qself), path) - } else { - // Parse an unqualified path - (None, self.parse_path(PathStyle::Expr)?) - }; - let hi = self.prev_span; - Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) - } else { - self.parse_literal_maybe_minus() - } - } - - /// Is this the start of a pattern beginning with a path? - fn is_start_of_pat_with_path(&mut self) -> bool { - self.check_path() - // Just for recovery (see `can_be_ident`). - || self.token.is_ident() && !self.token.is_bool_lit() && !self.token.is_keyword(kw::In) - } - - /// Would `parse_pat_ident` be appropriate here? - fn can_be_ident_pat(&mut self) -> bool { - self.check_ident() - && !self.token.is_bool_lit() // Avoid `true` or `false` as a binding as it is a literal. - && !self.token.is_path_segment_keyword() // Avoid e.g. `Self` as it is a path. - // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`. - && !self.token.is_keyword(kw::In) - && self.look_ahead(1, |t| match t.kind { // Try to do something more complex? - token::OpenDelim(token::Paren) // A tuple struct pattern. - | token::OpenDelim(token::Brace) // A struct pattern. - | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. - | token::ModSep // A tuple / struct variant pattern. - | token::Not => false, // A macro expanding to a pattern. - _ => true, - }) - } - - /// Parses `ident` or `ident @ pat`. - /// Used by the copy foo and ref foo patterns to give a good - /// error message when parsing mistakes like `ref foo(a, b)`. - fn parse_pat_ident(&mut self, binding_mode: BindingMode) -> PResult<'a, PatKind> { - let ident = self.parse_ident()?; - let sub = if self.eat(&token::At) { - Some(self.parse_pat(Some("binding pattern"))?) - } else { - None - }; - - // Just to be friendly, if they write something like `ref Some(i)`, - // we end up here with `(` as the current token. - // This shortly leads to a parse error. Note that if there is no explicit - // binding mode then we do not end up here, because the lookahead - // will direct us over to `parse_enum_variant()`. - if self.token == token::OpenDelim(token::Paren) { - return Err(self.span_fatal( - self.prev_span, - "expected identifier, found enum pattern", - )) - } - - Ok(PatKind::Ident(binding_mode, ident, sub)) - } - - /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`). - fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> { - if qself.is_some() { - let msg = "unexpected `{` after qualified path"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err); - } - - self.bump(); - let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| { - e.emit(); - self.recover_stmt(); - (vec![], true) - }); - self.bump(); - Ok(PatKind::Struct(path, fields, etc)) - } - - /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`). - fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> { - if qself.is_some() { - let msg = "unexpected `(` after qualified path"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err); - } - let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or_inner())?; - Ok(PatKind::TupleStruct(path, fields)) - } - - /// Parses the fields of a struct-like pattern. - fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<FieldPat>, bool)> { - let mut fields = Vec::new(); - let mut etc = false; - let mut ate_comma = true; - let mut delayed_err: Option<DiagnosticBuilder<'a>> = None; - let mut etc_span = None; - - while self.token != token::CloseDelim(token::Brace) { - let attrs = match self.parse_outer_attributes() { - Ok(attrs) => attrs, - Err(err) => { - if let Some(mut delayed) = delayed_err { - delayed.emit(); - } - return Err(err); - }, - }; - let lo = self.token.span; - - // check that a comma comes after every field - if !ate_comma { - let err = self.struct_span_err(self.prev_span, "expected `,`"); - if let Some(mut delayed) = delayed_err { - delayed.emit(); - } - return Err(err); - } - ate_comma = false; - - if self.check(&token::DotDot) || self.token == token::DotDotDot { - etc = true; - let mut etc_sp = self.token.span; - - self.recover_one_fewer_dotdot(); - self.bump(); // `..` || `...` - - if self.token == token::CloseDelim(token::Brace) { - etc_span = Some(etc_sp); - break; - } - let token_str = self.this_token_descr(); - let mut err = self.fatal(&format!("expected `}}`, found {}", token_str)); - - err.span_label(self.token.span, "expected `}`"); - let mut comma_sp = None; - if self.token == token::Comma { // Issue #49257 - let nw_span = self.sess.source_map().span_until_non_whitespace(self.token.span); - etc_sp = etc_sp.to(nw_span); - err.span_label(etc_sp, - "`..` must be at the end and cannot have a trailing comma"); - comma_sp = Some(self.token.span); - self.bump(); - ate_comma = true; - } - - etc_span = Some(etc_sp.until(self.token.span)); - if self.token == token::CloseDelim(token::Brace) { - // If the struct looks otherwise well formed, recover and continue. - if let Some(sp) = comma_sp { - err.span_suggestion_short( - sp, - "remove this comma", - String::new(), - Applicability::MachineApplicable, - ); - } - err.emit(); - break; - } else if self.token.is_ident() && ate_comma { - // Accept fields coming after `..,`. - // This way we avoid "pattern missing fields" errors afterwards. - // We delay this error until the end in order to have a span for a - // suggested fix. - if let Some(mut delayed_err) = delayed_err { - delayed_err.emit(); - return Err(err); - } else { - delayed_err = Some(err); - } - } else { - if let Some(mut err) = delayed_err { - err.emit(); - } - return Err(err); - } - } - - fields.push(match self.parse_pat_field(lo, attrs) { - Ok(field) => field, - Err(err) => { - if let Some(mut delayed_err) = delayed_err { - delayed_err.emit(); - } - return Err(err); - } - }); - ate_comma = self.eat(&token::Comma); - } - - if let Some(mut err) = delayed_err { - if let Some(etc_span) = etc_span { - err.multipart_suggestion( - "move the `..` to the end of the field list", - vec![ - (etc_span, String::new()), - (self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })), - ], - Applicability::MachineApplicable, - ); - } - err.emit(); - } - return Ok((fields, etc)); - } - - /// Recover on `...` as if it were `..` to avoid further errors. - /// See issue #46718. - fn recover_one_fewer_dotdot(&self) { - if self.token != token::DotDotDot { - return; - } - - self.struct_span_err(self.token.span, "expected field pattern, found `...`") - .span_suggestion( - self.token.span, - "to omit remaining fields, use one fewer `.`", - "..".to_owned(), - Applicability::MachineApplicable - ) - .emit(); - } - - fn parse_pat_field(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, FieldPat> { - // Check if a colon exists one ahead. This means we're parsing a fieldname. - let hi; - let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { - // Parsing a pattern of the form `fieldname: pat`. - let fieldname = self.parse_field_name()?; - self.bump(); - let pat = self.parse_pat_with_or_inner()?; - hi = pat.span; - (pat, fieldname, false) - } else { - // Parsing a pattern of the form `(box) (ref) (mut) fieldname`. - let is_box = self.eat_keyword(kw::Box); - let boxed_span = self.token.span; - let is_ref = self.eat_keyword(kw::Ref); - let is_mut = self.eat_keyword(kw::Mut); - let fieldname = self.parse_ident()?; - hi = self.prev_span; - - let bind_type = match (is_ref, is_mut) { - (true, true) => BindingMode::ByRef(Mutability::Mutable), - (true, false) => BindingMode::ByRef(Mutability::Immutable), - (false, true) => BindingMode::ByValue(Mutability::Mutable), - (false, false) => BindingMode::ByValue(Mutability::Immutable), - }; - - let fieldpat = self.mk_pat_ident(boxed_span.to(hi), bind_type, fieldname); - let subpat = if is_box { - self.mk_pat(lo.to(hi), PatKind::Box(fieldpat)) - } else { - fieldpat - }; - (subpat, fieldname, true) - }; - - Ok(FieldPat { - ident: fieldname, - pat: subpat, - is_shorthand, - attrs: attrs.into(), - id: ast::DUMMY_NODE_ID, - span: lo.to(hi), - is_placeholder: false, - }) - } - - pub(super) fn mk_pat_ident(&self, span: Span, bm: BindingMode, ident: Ident) -> P<Pat> { - self.mk_pat(span, PatKind::Ident(bm, ident, None)) - } - - fn mk_pat(&self, span: Span, kind: PatKind) -> P<Pat> { - P(Pat { kind, span, id: ast::DUMMY_NODE_ID }) - } -} diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs deleted file mode 100644 index 9ceb3ba1eb4..00000000000 --- a/src/libsyntax/parse/parser/path.rs +++ /dev/null @@ -1,497 +0,0 @@ -use super::{Parser, TokenType}; - -use crate::{maybe_whole, ThinVec}; -use crate::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs}; -use crate::ast::{AnonConst, GenericArg, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode}; -use crate::token::{self, Token}; -use crate::source_map::{Span, BytePos}; -use syntax_pos::symbol::{kw, sym}; - -use std::mem; -use log::debug; -use errors::{PResult, Applicability, pluralize}; - -/// Specifies how to parse a path. -#[derive(Copy, Clone, PartialEq)] -pub enum PathStyle { - /// In some contexts, notably in expressions, paths with generic arguments are ambiguous - /// with something else. For example, in expressions `segment < ....` can be interpreted - /// as a comparison and `segment ( ....` can be interpreted as a function call. - /// In all such contexts the non-path interpretation is preferred by default for practical - /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g. - /// `x<y>` - comparisons, `x::<y>` - unambiguously a path. - Expr, - /// In other contexts, notably in types, no ambiguity exists and paths can be written - /// without the disambiguator, e.g., `x<y>` - unambiguously a path. - /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too. - Type, - /// A path with generic arguments disallowed, e.g., `foo::bar::Baz`, used in imports, - /// visibilities or attributes. - /// Technically, this variant is unnecessary and e.g., `Expr` can be used instead - /// (paths in "mod" contexts have to be checked later for absence of generic arguments - /// anyway, due to macros), but it is used to avoid weird suggestions about expected - /// tokens when something goes wrong. - Mod, -} - -impl<'a> Parser<'a> { - /// Parses a qualified path. - /// Assumes that the leading `<` has been parsed already. - /// - /// `qualified_path = <type [as trait_ref]>::path` - /// - /// # Examples - /// `<T>::default` - /// `<T as U>::a` - /// `<T as U>::F::a<S>` (without disambiguator) - /// `<T as U>::F::a::<S>` (with disambiguator) - pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> { - let lo = self.prev_span; - let ty = self.parse_ty()?; - - // `path` will contain the prefix of the path up to the `>`, - // if any (e.g., `U` in the `<T as U>::*` examples - // above). `path_span` has the span of that path, or an empty - // span in the case of something like `<T>::Bar`. - let (mut path, path_span); - if self.eat_keyword(kw::As) { - let path_lo = self.token.span; - path = self.parse_path(PathStyle::Type)?; - path_span = path_lo.to(self.prev_span); - } else { - path_span = self.token.span.to(self.token.span); - path = ast::Path { segments: Vec::new(), span: path_span }; - } - - // See doc comment for `unmatched_angle_bracket_count`. - self.expect(&token::Gt)?; - if self.unmatched_angle_bracket_count > 0 { - self.unmatched_angle_bracket_count -= 1; - debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count); - } - - self.expect(&token::ModSep)?; - - let qself = QSelf { ty, path_span, position: path.segments.len() }; - self.parse_path_segments(&mut path.segments, style)?; - - Ok((qself, Path { segments: path.segments, span: lo.to(self.prev_span) })) - } - - /// Parses simple paths. - /// - /// `path = [::] segment+` - /// `segment = ident | ident[::]<args> | ident[::](args) [-> type]` - /// - /// # Examples - /// `a::b::C<D>` (without disambiguator) - /// `a::b::C::<D>` (with disambiguator) - /// `Fn(Args)` (without disambiguator) - /// `Fn::(Args)` (with disambiguator) - pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, Path> { - maybe_whole!(self, NtPath, |path| { - if style == PathStyle::Mod && - path.segments.iter().any(|segment| segment.args.is_some()) { - self.diagnostic().span_err(path.span, "unexpected generic arguments in path"); - } - path - }); - - let lo = self.meta_var_span.unwrap_or(self.token.span); - let mut segments = Vec::new(); - let mod_sep_ctxt = self.token.span.ctxt(); - if self.eat(&token::ModSep) { - segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); - } - self.parse_path_segments(&mut segments, style)?; - - Ok(Path { segments, span: lo.to(self.prev_span) }) - } - - /// Like `parse_path`, but also supports parsing `Word` meta items into paths for - /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` - /// attributes. - fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> { - let meta_ident = match self.token.kind { - token::Interpolated(ref nt) => match **nt { - token::NtMeta(ref item) => match item.tokens.is_empty() { - true => Some(item.path.clone()), - false => None, - }, - _ => None, - }, - _ => None, - }; - if let Some(path) = meta_ident { - self.bump(); - return Ok(path); - } - self.parse_path(style) - } - - /// Parse a list of paths inside `#[derive(path_0, ..., path_n)]`. - pub fn parse_derive_paths(&mut self) -> PResult<'a, Vec<Path>> { - self.expect(&token::OpenDelim(token::Paren))?; - let mut list = Vec::new(); - while !self.eat(&token::CloseDelim(token::Paren)) { - let path = self.parse_path_allowing_meta(PathStyle::Mod)?; - list.push(path); - if !self.eat(&token::Comma) { - self.expect(&token::CloseDelim(token::Paren))?; - break - } - } - Ok(list) - } - - pub(super) fn parse_path_segments( - &mut self, - segments: &mut Vec<PathSegment>, - style: PathStyle, - ) -> PResult<'a, ()> { - loop { - let segment = self.parse_path_segment(style)?; - if style == PathStyle::Expr { - // In order to check for trailing angle brackets, we must have finished - // recursing (`parse_path_segment` can indirectly call this function), - // that is, the next token must be the highlighted part of the below example: - // - // `Foo::<Bar as Baz<T>>::Qux` - // ^ here - // - // As opposed to the below highlight (if we had only finished the first - // recursion): - // - // `Foo::<Bar as Baz<T>>::Qux` - // ^ here - // - // `PathStyle::Expr` is only provided at the root invocation and never in - // `parse_path_segment` to recurse and therefore can be checked to maintain - // this invariant. - self.check_trailing_angle_brackets(&segment, token::ModSep); - } - segments.push(segment); - - if self.is_import_coupler() || !self.eat(&token::ModSep) { - return Ok(()); - } - } - } - - pub(super) fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { - let ident = self.parse_path_segment_ident()?; - - let is_args_start = |token: &Token| match token.kind { - token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) - | token::LArrow => true, - _ => false, - }; - let check_args_start = |this: &mut Self| { - this.expected_tokens.extend_from_slice( - &[TokenType::Token(token::Lt), TokenType::Token(token::OpenDelim(token::Paren))] - ); - is_args_start(&this.token) - }; - - Ok(if style == PathStyle::Type && check_args_start(self) || - style != PathStyle::Mod && self.check(&token::ModSep) - && self.look_ahead(1, |t| is_args_start(t)) { - // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If - // it isn't, then we reset the unmatched angle bracket count as we're about to start - // parsing a new path. - if style == PathStyle::Expr { - self.unmatched_angle_bracket_count = 0; - self.max_angle_bracket_count = 0; - } - - // Generic arguments are found - `<`, `(`, `::<` or `::(`. - self.eat(&token::ModSep); - let lo = self.token.span; - let args = if self.eat_lt() { - // `<'a, T, A = U>` - let (args, constraints) = - self.parse_generic_args_with_leaning_angle_bracket_recovery(style, lo)?; - self.expect_gt()?; - let span = lo.to(self.prev_span); - AngleBracketedArgs { args, constraints, span }.into() - } else { - // `(T, U) -> R` - let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?; - let span = ident.span.to(self.prev_span); - let output = if self.eat(&token::RArrow) { - Some(self.parse_ty_common(false, false, false)?) - } else { - None - }; - ParenthesizedArgs { inputs, output, span }.into() - }; - - PathSegment { ident, args, id: ast::DUMMY_NODE_ID } - } else { - // Generic arguments are not found. - PathSegment::from_ident(ident) - }) - } - - pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { - match self.token.kind { - token::Ident(name, _) if name.is_path_segment_keyword() => { - let span = self.token.span; - self.bump(); - Ok(Ident::new(name, span)) - } - _ => self.parse_ident(), - } - } - - /// Parses generic args (within a path segment) with recovery for extra leading angle brackets. - /// For the purposes of understanding the parsing logic of generic arguments, this function - /// can be thought of being the same as just calling `self.parse_generic_args()` if the source - /// had the correct amount of leading angle brackets. - /// - /// ```ignore (diagnostics) - /// bar::<<<<T as Foo>::Output>(); - /// ^^ help: remove extra angle brackets - /// ``` - fn parse_generic_args_with_leaning_angle_bracket_recovery( - &mut self, - style: PathStyle, - lo: Span, - ) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> { - // We need to detect whether there are extra leading left angle brackets and produce an - // appropriate error and suggestion. This cannot be implemented by looking ahead at - // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens - // then there won't be matching `>` tokens to find. - // - // To explain how this detection works, consider the following example: - // - // ```ignore (diagnostics) - // bar::<<<<T as Foo>::Output>(); - // ^^ help: remove extra angle brackets - // ``` - // - // Parsing of the left angle brackets starts in this function. We start by parsing the - // `<` token (incrementing the counter of unmatched angle brackets on `Parser` via - // `eat_lt`): - // - // *Upcoming tokens:* `<<<<T as Foo>::Output>;` - // *Unmatched count:* 1 - // *`parse_path_segment` calls deep:* 0 - // - // This has the effect of recursing as this function is called if a `<` character - // is found within the expected generic arguments: - // - // *Upcoming tokens:* `<<<T as Foo>::Output>;` - // *Unmatched count:* 2 - // *`parse_path_segment` calls deep:* 1 - // - // Eventually we will have recursed until having consumed all of the `<` tokens and - // this will be reflected in the count: - // - // *Upcoming tokens:* `T as Foo>::Output>;` - // *Unmatched count:* 4 - // `parse_path_segment` calls deep:* 3 - // - // The parser will continue until reaching the first `>` - this will decrement the - // unmatched angle bracket count and return to the parent invocation of this function - // having succeeded in parsing: - // - // *Upcoming tokens:* `::Output>;` - // *Unmatched count:* 3 - // *`parse_path_segment` calls deep:* 2 - // - // This will continue until the next `>` character which will also return successfully - // to the parent invocation of this function and decrement the count: - // - // *Upcoming tokens:* `;` - // *Unmatched count:* 2 - // *`parse_path_segment` calls deep:* 1 - // - // At this point, this function will expect to find another matching `>` character but - // won't be able to and will return an error. This will continue all the way up the - // call stack until the first invocation: - // - // *Upcoming tokens:* `;` - // *Unmatched count:* 2 - // *`parse_path_segment` calls deep:* 0 - // - // In doing this, we have managed to work out how many unmatched leading left angle - // brackets there are, but we cannot recover as the unmatched angle brackets have - // already been consumed. To remedy this, we keep a snapshot of the parser state - // before we do the above. We can then inspect whether we ended up with a parsing error - // and unmatched left angle brackets and if so, restore the parser state before we - // consumed any `<` characters to emit an error and consume the erroneous tokens to - // recover by attempting to parse again. - // - // In practice, the recursion of this function is indirect and there will be other - // locations that consume some `<` characters - as long as we update the count when - // this happens, it isn't an issue. - - let is_first_invocation = style == PathStyle::Expr; - // Take a snapshot before attempting to parse - we can restore this later. - let snapshot = if is_first_invocation { - Some(self.clone()) - } else { - None - }; - - debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)"); - match self.parse_generic_args() { - Ok(value) => Ok(value), - Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => { - // Cancel error from being unable to find `>`. We know the error - // must have been this due to a non-zero unmatched angle bracket - // count. - e.cancel(); - - // Swap `self` with our backup of the parser state before attempting to parse - // generic arguments. - let snapshot = mem::replace(self, snapshot.unwrap()); - - debug!( - "parse_generic_args_with_leading_angle_bracket_recovery: (snapshot failure) \ - snapshot.count={:?}", - snapshot.unmatched_angle_bracket_count, - ); - - // Eat the unmatched angle brackets. - for _ in 0..snapshot.unmatched_angle_bracket_count { - self.eat_lt(); - } - - // Make a span over ${unmatched angle bracket count} characters. - let span = lo.with_hi( - lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count) - ); - self.diagnostic() - .struct_span_err( - span, - &format!( - "unmatched angle bracket{}", - pluralize!(snapshot.unmatched_angle_bracket_count) - ), - ) - .span_suggestion( - span, - &format!( - "remove extra angle bracket{}", - pluralize!(snapshot.unmatched_angle_bracket_count) - ), - String::new(), - Applicability::MachineApplicable, - ) - .emit(); - - // Try again without unmatched angle bracket characters. - self.parse_generic_args() - }, - Err(e) => Err(e), - } - } - - /// Parses (possibly empty) list of lifetime and type arguments and associated type bindings, - /// possibly including trailing comma. - fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> { - let mut args = Vec::new(); - let mut constraints = Vec::new(); - let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new(); - let mut assoc_ty_constraints: Vec<Span> = Vec::new(); - - let args_lo = self.token.span; - - loop { - if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { - // Parse lifetime argument. - args.push(GenericArg::Lifetime(self.expect_lifetime())); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else if self.check_ident() - && self.look_ahead(1, |t| t == &token::Eq || t == &token::Colon) - { - // Parse associated type constraint. - let lo = self.token.span; - let ident = self.parse_ident()?; - let kind = if self.eat(&token::Eq) { - AssocTyConstraintKind::Equality { - ty: self.parse_ty()?, - } - } else if self.eat(&token::Colon) { - AssocTyConstraintKind::Bound { - bounds: self.parse_generic_bounds(Some(self.prev_span))?, - } - } else { - unreachable!(); - }; - - let span = lo.to(self.prev_span); - - // Gate associated type bounds, e.g., `Iterator<Item: Ord>`. - if let AssocTyConstraintKind::Bound { .. } = kind { - self.sess.gated_spans.gate(sym::associated_type_bounds, span); - } - - constraints.push(AssocTyConstraint { - id: ast::DUMMY_NODE_ID, - ident, - kind, - span, - }); - assoc_ty_constraints.push(span); - } else if self.check_const_arg() { - // Parse const argument. - let expr = if let token::OpenDelim(token::Brace) = self.token.kind { - self.parse_block_expr( - None, self.token.span, BlockCheckMode::Default, ThinVec::new() - )? - } else if self.token.is_ident() { - // FIXME(const_generics): to distinguish between idents for types and consts, - // we should introduce a GenericArg::Ident in the AST and distinguish when - // lowering to the HIR. For now, idents for const args are not permitted. - if self.token.is_bool_lit() { - self.parse_literal_maybe_minus()? - } else { - return Err( - self.fatal("identifiers may currently not be used for const generics") - ); - } - } else { - self.parse_literal_maybe_minus()? - }; - let value = AnonConst { - id: ast::DUMMY_NODE_ID, - value: expr, - }; - args.push(GenericArg::Const(value)); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else if self.check_type() { - // Parse type argument. - args.push(GenericArg::Type(self.parse_ty()?)); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else { - break - } - - if !self.eat(&token::Comma) { - break - } - } - - // FIXME: we would like to report this in ast_validation instead, but we currently do not - // preserve ordering of generic parameters with respect to associated type binding, so we - // lose that information after parsing. - if misplaced_assoc_ty_constraints.len() > 0 { - let mut err = self.struct_span_err( - args_lo.to(self.prev_span), - "associated type bindings must be declared after generic parameters", - ); - for span in misplaced_assoc_ty_constraints { - err.span_label( - span, - "this associated type binding should be moved after the generic parameters", - ); - } - err.emit(); - } - - Ok((args, constraints)) - } -} diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs deleted file mode 100644 index 30e47b7a0b2..00000000000 --- a/src/libsyntax/parse/parser/stmt.rs +++ /dev/null @@ -1,480 +0,0 @@ -use super::{Parser, Restrictions, PrevTokenKind, SemiColonMode, BlockMode}; -use super::expr::LhsExpr; -use super::path::PathStyle; -use super::pat::GateOr; -use super::diagnostics::Error; - -use crate::ptr::P; -use crate::{maybe_whole, ThinVec}; -use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind}; -use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter}; -use crate::parse::DirectoryOwnership; -use crate::util::classify; -use crate::token; -use crate::source_map::{respan, Span}; -use crate::symbol::{kw, sym}; - -use std::mem; -use errors::{PResult, Applicability}; - -impl<'a> Parser<'a> { - /// Parses a statement. This stops just before trailing semicolons on everything but items. - /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed. - pub fn parse_stmt(&mut self) -> PResult<'a, Option<Stmt>> { - Ok(self.parse_stmt_(true)) - } - - fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> { - self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| { - e.emit(); - self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore); - None - }) - } - - fn parse_stmt_without_recovery( - &mut self, - macro_legacy_warnings: bool, - ) -> PResult<'a, Option<Stmt>> { - maybe_whole!(self, NtStmt, |x| Some(x)); - - let attrs = self.parse_outer_attributes()?; - let lo = self.token.span; - - Ok(Some(if self.eat_keyword(kw::Let) { - Stmt { - id: DUMMY_NODE_ID, - kind: StmtKind::Local(self.parse_local(attrs.into())?), - span: lo.to(self.prev_span), - } - } else if let Some(macro_def) = self.eat_macro_def( - &attrs, - &respan(lo, VisibilityKind::Inherited), - lo, - )? { - Stmt { - id: DUMMY_NODE_ID, - kind: StmtKind::Item(macro_def), - span: lo.to(self.prev_span), - } - // Starts like a simple path, being careful to avoid contextual keywords - // such as a union items, item with `crate` visibility or auto trait items. - // Our goal here is to parse an arbitrary path `a::b::c` but not something that starts - // like a path (1 token), but it fact not a path. - // `union::b::c` - path, `union U { ... }` - not a path. - // `crate::b::c` - path, `crate struct S;` - not a path. - } else if self.token.is_path_start() && - !self.token.is_qpath_start() && - !self.is_union_item() && - !self.is_crate_vis() && - !self.is_auto_trait_item() && - !self.is_async_fn() { - let path = self.parse_path(PathStyle::Expr)?; - - if !self.eat(&token::Not) { - let expr = if self.check(&token::OpenDelim(token::Brace)) { - self.parse_struct_expr(lo, path, ThinVec::new())? - } else { - let hi = self.prev_span; - self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new()) - }; - - let expr = self.with_res(Restrictions::STMT_EXPR, |this| { - let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?; - this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr)) - })?; - - return Ok(Some(Stmt { - id: DUMMY_NODE_ID, - kind: StmtKind::Expr(expr), - span: lo.to(self.prev_span), - })); - } - - let (delim, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span; - - let style = if delim == MacDelimiter::Brace { - MacStmtStyle::Braces - } else { - MacStmtStyle::NoBraces - }; - - let mac = Mac { - path, - tts, - delim, - span: lo.to(hi), - prior_type_ascription: self.last_type_ascription, - }; - let kind = if delim == MacDelimiter::Brace || - self.token == token::Semi || self.token == token::Eof { - StmtKind::Mac(P((mac, style, attrs.into()))) - } - // We used to incorrectly stop parsing macro-expanded statements here. - // If the next token will be an error anyway but could have parsed with the - // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && - match self.token.kind { - // These can continue an expression, so we can't stop parsing and warn. - token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | - token::BinOp(token::Minus) | token::BinOp(token::Star) | - token::BinOp(token::And) | token::BinOp(token::Or) | - token::AndAnd | token::OrOr | - token::DotDot | token::DotDotDot | token::DotDotEq => false, - _ => true, - } - { - self.warn_missing_semicolon(); - StmtKind::Mac(P((mac, style, attrs.into()))) - } else { - let e = self.mk_expr(mac.span, ExprKind::Mac(mac), ThinVec::new()); - let e = self.maybe_recover_from_bad_qpath(e, true)?; - let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; - let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; - StmtKind::Expr(e) - }; - Stmt { - id: DUMMY_NODE_ID, - span: lo.to(hi), - kind, - } - } else { - // FIXME: Bad copy of attrs - let old_directory_ownership = - mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock); - let item = self.parse_item_(attrs.clone(), false, true)?; - self.directory.ownership = old_directory_ownership; - - match item { - Some(i) => Stmt { - id: DUMMY_NODE_ID, - span: lo.to(i.span), - kind: StmtKind::Item(i), - }, - None => { - let unused_attrs = |attrs: &[Attribute], s: &mut Self| { - if !attrs.is_empty() { - if s.prev_token_kind == PrevTokenKind::DocComment { - s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit(); - } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) { - s.span_err( - s.token.span, "expected statement after outer attribute" - ); - } - } - }; - - // Do not attempt to parse an expression if we're done here. - if self.token == token::Semi { - unused_attrs(&attrs, self); - self.bump(); - let mut last_semi = lo; - while self.token == token::Semi { - last_semi = self.token.span; - self.bump(); - } - // We are encoding a string of semicolons as an - // an empty tuple that spans the excess semicolons - // to preserve this info until the lint stage - return Ok(Some(Stmt { - id: DUMMY_NODE_ID, - span: lo.to(last_semi), - kind: StmtKind::Semi(self.mk_expr(lo.to(last_semi), - ExprKind::Tup(Vec::new()), - ThinVec::new() - )), - })); - } - - if self.token == token::CloseDelim(token::Brace) { - unused_attrs(&attrs, self); - return Ok(None); - } - - // Remainder are line-expr stmts. - let e = self.parse_expr_res( - Restrictions::STMT_EXPR, Some(attrs.into()))?; - Stmt { - id: DUMMY_NODE_ID, - span: lo.to(e.span), - kind: StmtKind::Expr(e), - } - } - } - })) - } - - /// Parses a local variable declaration. - fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> { - let lo = self.prev_span; - let pat = self.parse_top_pat(GateOr::Yes)?; - - let (err, ty) = if self.eat(&token::Colon) { - // Save the state of the parser before parsing type normally, in case there is a `:` - // instead of an `=` typo. - let parser_snapshot_before_type = self.clone(); - let colon_sp = self.prev_span; - match self.parse_ty() { - Ok(ty) => (None, Some(ty)), - Err(mut err) => { - // Rewind to before attempting to parse the type and continue parsing. - let parser_snapshot_after_type = self.clone(); - mem::replace(self, parser_snapshot_before_type); - - let snippet = self.span_to_snippet(pat.span).unwrap(); - err.span_label(pat.span, format!("while parsing the type for `{}`", snippet)); - (Some((parser_snapshot_after_type, colon_sp, err)), None) - } - } - } else { - (None, None) - }; - let init = match (self.parse_initializer(err.is_some()), err) { - (Ok(init), None) => { // init parsed, ty parsed - init - } - (Ok(init), Some((_, colon_sp, mut err))) => { // init parsed, ty error - // Could parse the type as if it were the initializer, it is likely there was a - // typo in the code: `:` instead of `=`. Add suggestion and emit the error. - err.span_suggestion_short( - colon_sp, - "use `=` if you meant to assign", - " =".to_string(), - Applicability::MachineApplicable - ); - err.emit(); - // As this was parsed successfully, continue as if the code has been fixed for the - // rest of the file. It will still fail due to the emitted error, but we avoid - // extra noise. - init - } - (Err(mut init_err), Some((snapshot, _, ty_err))) => { // init error, ty error - init_err.cancel(); - // Couldn't parse the type nor the initializer, only raise the type error and - // return to the parser state before parsing the type as the initializer. - // let x: <parse_error>; - mem::replace(self, snapshot); - return Err(ty_err); - } - (Err(err), None) => { // init error, ty parsed - // Couldn't parse the initializer and we're not attempting to recover a failed - // parse of the type, return the error. - return Err(err); - } - }; - let hi = if self.token == token::Semi { - self.token.span - } else { - self.prev_span - }; - Ok(P(ast::Local { - ty, - pat, - init, - id: DUMMY_NODE_ID, - span: lo.to(hi), - attrs, - })) - } - - /// Parses the RHS of a local variable declaration (e.g., '= 14;'). - fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { - if self.eat(&token::Eq) { - Ok(Some(self.parse_expr()?)) - } else if skip_eq { - Ok(Some(self.parse_expr()?)) - } else { - Ok(None) - } - } - - fn is_auto_trait_item(&self) -> bool { - // auto trait - (self.token.is_keyword(kw::Auto) && - self.is_keyword_ahead(1, &[kw::Trait])) - || // unsafe auto trait - (self.token.is_keyword(kw::Unsafe) && - self.is_keyword_ahead(1, &[kw::Auto]) && - self.is_keyword_ahead(2, &[kw::Trait])) - } - - /// Parses a block. No inner attributes are allowed. - pub fn parse_block(&mut self) -> PResult<'a, P<Block>> { - maybe_whole!(self, NtBlock, |x| x); - - let lo = self.token.span; - - if !self.eat(&token::OpenDelim(token::Brace)) { - let sp = self.token.span; - let tok = self.this_token_descr(); - let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok)); - let do_not_suggest_help = - self.token.is_keyword(kw::In) || self.token == token::Colon; - - if self.token.is_ident_named(sym::and) { - e.span_suggestion_short( - self.token.span, - "use `&&` instead of `and` for the boolean operator", - "&&".to_string(), - Applicability::MaybeIncorrect, - ); - } - if self.token.is_ident_named(sym::or) { - e.span_suggestion_short( - self.token.span, - "use `||` instead of `or` for the boolean operator", - "||".to_string(), - Applicability::MaybeIncorrect, - ); - } - - // Check to see if the user has written something like - // - // if (cond) - // bar; - // - // which is valid in other languages, but not Rust. - match self.parse_stmt_without_recovery(false) { - Ok(Some(stmt)) => { - if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) - || do_not_suggest_help { - // If the next token is an open brace (e.g., `if a b {`), the place- - // inside-a-block suggestion would be more likely wrong than right. - e.span_label(sp, "expected `{`"); - return Err(e); - } - let mut stmt_span = stmt.span; - // Expand the span to include the semicolon, if it exists. - if self.eat(&token::Semi) { - stmt_span = stmt_span.with_hi(self.prev_span.hi()); - } - if let Ok(snippet) = self.span_to_snippet(stmt_span) { - e.span_suggestion( - stmt_span, - "try placing this code inside a block", - format!("{{ {} }}", snippet), - // Speculative; has been misleading in the past (#46836). - Applicability::MaybeIncorrect, - ); - } - } - Err(mut e) => { - self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore); - e.cancel(); - } - _ => () - } - e.span_label(sp, "expected `{`"); - return Err(e); - } - - self.parse_block_tail(lo, BlockCheckMode::Default) - } - - /// Parses a block. Inner attributes are allowed. - pub(super) fn parse_inner_attrs_and_block( - &mut self - ) -> PResult<'a, (Vec<Attribute>, P<Block>)> { - maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); - - let lo = self.token.span; - self.expect(&token::OpenDelim(token::Brace))?; - Ok((self.parse_inner_attributes()?, - self.parse_block_tail(lo, BlockCheckMode::Default)?)) - } - - /// Parses the rest of a block expression or function body. - /// Precondition: already parsed the '{'. - pub(super) fn parse_block_tail( - &mut self, - lo: Span, - s: BlockCheckMode - ) -> PResult<'a, P<Block>> { - let mut stmts = vec![]; - while !self.eat(&token::CloseDelim(token::Brace)) { - if self.token == token::Eof { - break; - } - let stmt = match self.parse_full_stmt(false) { - Err(mut err) => { - self.maybe_annotate_with_ascription(&mut err, false); - err.emit(); - self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); - Some(Stmt { - id: DUMMY_NODE_ID, - kind: StmtKind::Expr(self.mk_expr_err(self.token.span)), - span: self.token.span, - }) - } - Ok(stmt) => stmt, - }; - if let Some(stmt) = stmt { - stmts.push(stmt); - } else { - // Found only `;` or `}`. - continue; - }; - } - Ok(P(ast::Block { - stmts, - id: DUMMY_NODE_ID, - rules: s, - span: lo.to(self.prev_span), - })) - } - - /// Parses a statement, including the trailing semicolon. - pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { - // Skip looking for a trailing semicolon when we have an interpolated statement. - maybe_whole!(self, NtStmt, |x| Some(x)); - - let mut stmt = match self.parse_stmt_without_recovery(macro_legacy_warnings)? { - Some(stmt) => stmt, - None => return Ok(None), - }; - - let mut eat_semi = true; - match stmt.kind { - StmtKind::Expr(ref expr) if self.token != token::Eof => { - // expression without semicolon - if classify::expr_requires_semi_to_be_stmt(expr) { - // Just check for errors and recover; do not eat semicolon yet. - if let Err(mut e) = - self.expect_one_of(&[], &[token::Semi, token::CloseDelim(token::Brace)]) - { - e.emit(); - self.recover_stmt(); - // Don't complain about type errors in body tail after parse error (#57383). - let sp = expr.span.to(self.prev_span); - stmt.kind = StmtKind::Expr(self.mk_expr_err(sp)); - } - } - } - StmtKind::Local(..) => { - // We used to incorrectly allow a macro-expanded let statement to lack a semicolon. - if macro_legacy_warnings && self.token != token::Semi { - self.warn_missing_semicolon(); - } else { - self.expect_semi()?; - eat_semi = false; - } - } - _ => {} - } - - if eat_semi && self.eat(&token::Semi) { - stmt = stmt.add_trailing_semicolon(); - } - stmt.span = stmt.span.to(self.prev_span); - Ok(Some(stmt)) - } - - fn warn_missing_semicolon(&self) { - self.diagnostic().struct_span_warn(self.token.span, { - &format!("expected `;`, found {}", self.this_token_descr()) - }).note({ - "this was erroneously allowed and will become a hard error in a future release" - }).emit(); - } -} diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs deleted file mode 100644 index a891634e611..00000000000 --- a/src/libsyntax/parse/parser/ty.rs +++ /dev/null @@ -1,458 +0,0 @@ -use super::{Parser, PathStyle, PrevTokenKind, TokenType}; -use super::item::ParamCfg; - -use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath}; -use crate::ptr::P; -use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident}; -use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef}; -use crate::ast::{Mutability, AnonConst, Mac}; -use crate::token::{self, Token}; -use crate::source_map::Span; -use crate::symbol::{kw}; - -use errors::{PResult, Applicability, pluralize}; - -/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`, -/// `IDENT<<u8 as Trait>::AssocTy>`. -/// -/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes -/// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &Token) -> bool { - t == &token::ModSep || t == &token::Lt || - t == &token::BinOp(token::Shl) -} - -impl<'a> Parser<'a> { - /// Parses a type. - pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> { - self.parse_ty_common(true, true, false) - } - - /// Parses a type in restricted contexts where `+` is not permitted. - /// - /// Example 1: `&'a TYPE` - /// `+` is prohibited to maintain operator priority (P(+) < P(&)). - /// Example 2: `value1 as TYPE + value2` - /// `+` is prohibited to avoid interactions with expression grammar. - pub(super) fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> { - self.parse_ty_common(false, true, false) - } - - /// Parses an optional return type `[ -> TY ]` in a function declaration. - pub(super) fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { - if self.eat(&token::RArrow) { - Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?)) - } else { - Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo())) - } - } - - pub(super) fn parse_ty_common(&mut self, allow_plus: bool, allow_qpath_recovery: bool, - allow_c_variadic: bool) -> PResult<'a, P<Ty>> { - maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); - maybe_whole!(self, NtTy, |x| x); - - let lo = self.token.span; - let mut impl_dyn_multi = false; - let kind = if self.eat(&token::OpenDelim(token::Paren)) { - // `(TYPE)` is a parenthesized type. - // `(TYPE,)` is a tuple with a single field of type TYPE. - let mut ts = vec![]; - let mut last_comma = false; - while self.token != token::CloseDelim(token::Paren) { - ts.push(self.parse_ty()?); - if self.eat(&token::Comma) { - last_comma = true; - } else { - last_comma = false; - break; - } - } - let trailing_plus = self.prev_token_kind == PrevTokenKind::Plus; - self.expect(&token::CloseDelim(token::Paren))?; - - if ts.len() == 1 && !last_comma { - let ty = ts.into_iter().nth(0).unwrap().into_inner(); - let maybe_bounds = allow_plus && self.token.is_like_plus(); - match ty.kind { - // `(TY_BOUND_NOPAREN) + BOUND + ...`. - TyKind::Path(None, ref path) if maybe_bounds => { - self.parse_remaining_bounds(Vec::new(), path.clone(), lo, true)? - } - TyKind::TraitObject(ref bounds, TraitObjectSyntax::None) - if maybe_bounds && bounds.len() == 1 && !trailing_plus => { - let path = match bounds[0] { - GenericBound::Trait(ref pt, ..) => pt.trait_ref.path.clone(), - GenericBound::Outlives(..) => self.bug("unexpected lifetime bound"), - }; - self.parse_remaining_bounds(Vec::new(), path, lo, true)? - } - // `(TYPE)` - _ => TyKind::Paren(P(ty)) - } - } else { - TyKind::Tup(ts) - } - } else if self.eat(&token::Not) { - // Never type `!` - TyKind::Never - } else if self.eat(&token::BinOp(token::Star)) { - // Raw pointer - TyKind::Ptr(self.parse_ptr()?) - } else if self.eat(&token::OpenDelim(token::Bracket)) { - // Array or slice - let t = self.parse_ty()?; - // Parse optional `; EXPR` in `[TYPE; EXPR]` - let t = match self.maybe_parse_fixed_length_of_vec()? { - None => TyKind::Slice(t), - Some(length) => TyKind::Array(t, AnonConst { - id: ast::DUMMY_NODE_ID, - value: length, - }), - }; - self.expect(&token::CloseDelim(token::Bracket))?; - t - } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { - // Reference - self.expect_and()?; - self.parse_borrowed_pointee()? - } else if self.eat_keyword_noexpect(kw::Typeof) { - // `typeof(EXPR)` - // In order to not be ambiguous, the type must be surrounded by parens. - self.expect(&token::OpenDelim(token::Paren))?; - let e = AnonConst { - id: ast::DUMMY_NODE_ID, - value: self.parse_expr()?, - }; - self.expect(&token::CloseDelim(token::Paren))?; - TyKind::Typeof(e) - } else if self.eat_keyword(kw::Underscore) { - // A type to be inferred `_` - TyKind::Infer - } else if self.token_is_bare_fn_keyword() { - // Function pointer type - self.parse_ty_bare_fn(Vec::new())? - } else if self.check_keyword(kw::For) { - // Function pointer type or bound list (trait object type) starting with a poly-trait. - // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` - // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.token.span; - let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - if self.token_is_bare_fn_keyword() { - self.parse_ty_bare_fn(lifetime_defs)? - } else { - let path = self.parse_path(PathStyle::Type)?; - let parse_plus = allow_plus && self.check_plus(); - self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)? - } - } else if self.eat_keyword(kw::Impl) { - // Always parse bounds greedily for better error recovery. - let bounds = self.parse_generic_bounds(None)?; - impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; - TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) - } else if self.check_keyword(kw::Dyn) && - (self.token.span.rust_2018() || - self.look_ahead(1, |t| t.can_begin_bound() && - !can_continue_type_after_non_fn_ident(t))) { - self.bump(); // `dyn` - // Always parse bounds greedily for better error recovery. - let bounds = self.parse_generic_bounds(None)?; - impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; - TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn) - } else if self.check(&token::Question) || - self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) { - // Bound list (trait object type) - TyKind::TraitObject(self.parse_generic_bounds_common(allow_plus, None)?, - TraitObjectSyntax::None) - } else if self.eat_lt() { - // Qualified path - let (qself, path) = self.parse_qpath(PathStyle::Type)?; - TyKind::Path(Some(qself), path) - } else if self.token.is_path_start() { - // Simple path - let path = self.parse_path(PathStyle::Type)?; - if self.eat(&token::Not) { - // Macro invocation in type position - let (delim, tts) = self.expect_delimited_token_tree()?; - let mac = Mac { - path, - tts, - delim, - span: lo.to(self.prev_span), - prior_type_ascription: self.last_type_ascription, - }; - TyKind::Mac(mac) - } else { - // Just a type path or bound list (trait object type) starting with a trait. - // `Type` - // `Trait1 + Trait2 + 'a` - if allow_plus && self.check_plus() { - self.parse_remaining_bounds(Vec::new(), path, lo, true)? - } else { - TyKind::Path(None, path) - } - } - } else if self.check(&token::DotDotDot) { - if allow_c_variadic { - self.eat(&token::DotDotDot); - TyKind::CVarArgs - } else { - return Err(struct_span_fatal!( - self.sess.span_diagnostic, - self.token.span, - E0743, - "only foreign functions are allowed to be C-variadic", - )); - } - } else { - let msg = format!("expected type, found {}", self.this_token_descr()); - let mut err = self.fatal(&msg); - err.span_label(self.token.span, "expected type"); - self.maybe_annotate_with_ascription(&mut err, true); - return Err(err); - }; - - let span = lo.to(self.prev_span); - let ty = self.mk_ty(span, kind); - - // Try to recover from use of `+` with incorrect priority. - self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); - self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?; - self.maybe_recover_from_bad_qpath(ty, allow_qpath_recovery) - } - - fn parse_remaining_bounds(&mut self, generic_params: Vec<GenericParam>, path: ast::Path, - lo: Span, parse_plus: bool) -> PResult<'a, TyKind> { - let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_span)); - let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; - if parse_plus { - self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded - bounds.append(&mut self.parse_generic_bounds(Some(self.prev_span))?); - } - Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) - } - - fn parse_ptr(&mut self) -> PResult<'a, MutTy> { - let mutbl = self.parse_const_or_mut().unwrap_or_else(|| { - let span = self.prev_span; - let msg = "expected mut or const in raw pointer type"; - self.struct_span_err(span, msg) - .span_label(span, msg) - .help("use `*mut T` or `*const T` as appropriate") - .emit(); - Mutability::Immutable - }); - let t = self.parse_ty_no_plus()?; - Ok(MutTy { ty: t, mutbl }) - } - - fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option<P<ast::Expr>>> { - if self.eat(&token::Semi) { - Ok(Some(self.parse_expr()?)) - } else { - Ok(None) - } - } - - fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { - let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; - let mutbl = self.parse_mutability(); - let ty = self.parse_ty_no_plus()?; - return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl })); - } - - /// Is the current token one of the keywords that signals a bare function type? - fn token_is_bare_fn_keyword(&mut self) -> bool { - self.check_keyword(kw::Fn) || - self.check_keyword(kw::Unsafe) || - self.check_keyword(kw::Extern) - } - - /// Parses a `TyKind::BareFn` type. - fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> { - /* - - [unsafe] [extern "ABI"] fn (S) -> T - ^~~~^ ^~~~^ ^~^ ^ - | | | | - | | | Return type - | | Argument types - | | - | ABI - Function Style - */ - - let unsafety = self.parse_unsafety(); - let abi = self.parse_extern_abi()?; - self.expect_keyword(kw::Fn)?; - let cfg = ParamCfg { - is_self_allowed: false, - allow_c_variadic: true, - is_name_required: |_| false, - }; - let decl = self.parse_fn_decl(cfg, false)?; - Ok(TyKind::BareFn(P(BareFnTy { - abi, - unsafety, - generic_params, - decl, - }))) - } - - pub(super) fn parse_generic_bounds(&mut self, - colon_span: Option<Span>) -> PResult<'a, GenericBounds> { - self.parse_generic_bounds_common(true, colon_span) - } - - /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`. - /// - /// ``` - /// BOUND = TY_BOUND | LT_BOUND - /// LT_BOUND = LIFETIME (e.g., `'a`) - /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) - /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`) - /// ``` - fn parse_generic_bounds_common(&mut self, - allow_plus: bool, - colon_span: Option<Span>) -> PResult<'a, GenericBounds> { - let mut bounds = Vec::new(); - let mut negative_bounds = Vec::new(); - let mut last_plus_span = None; - let mut was_negative = false; - loop { - // This needs to be synchronized with `TokenKind::can_begin_bound`. - let is_bound_start = self.check_path() || self.check_lifetime() || - self.check(&token::Not) || // used for error reporting only - self.check(&token::Question) || - self.check_keyword(kw::For) || - self.check(&token::OpenDelim(token::Paren)); - if is_bound_start { - let lo = self.token.span; - let has_parens = self.eat(&token::OpenDelim(token::Paren)); - let inner_lo = self.token.span; - let is_negative = self.eat(&token::Not); - let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None }; - if self.token.is_lifetime() { - if let Some(question_span) = question { - self.span_err(question_span, - "`?` may only modify trait bounds, not lifetime bounds"); - } - bounds.push(GenericBound::Outlives(self.expect_lifetime())); - if has_parens { - let inner_span = inner_lo.to(self.prev_span); - self.expect(&token::CloseDelim(token::Paren))?; - let mut err = self.struct_span_err( - lo.to(self.prev_span), - "parenthesized lifetime bounds are not supported" - ); - if let Ok(snippet) = self.span_to_snippet(inner_span) { - err.span_suggestion_short( - lo.to(self.prev_span), - "remove the parentheses", - snippet.to_owned(), - Applicability::MachineApplicable - ); - } - err.emit(); - } - } else { - let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - let path = self.parse_path(PathStyle::Type)?; - if has_parens { - self.expect(&token::CloseDelim(token::Paren))?; - } - let poly_span = lo.to(self.prev_span); - if is_negative { - was_negative = true; - if let Some(sp) = last_plus_span.or(colon_span) { - negative_bounds.push(sp.to(poly_span)); - } - } else { - let poly_trait = PolyTraitRef::new(lifetime_defs, path, poly_span); - let modifier = if question.is_some() { - TraitBoundModifier::Maybe - } else { - TraitBoundModifier::None - }; - bounds.push(GenericBound::Trait(poly_trait, modifier)); - } - } - } else { - break - } - - if !allow_plus || !self.eat_plus() { - break - } else { - last_plus_span = Some(self.prev_span); - } - } - - if !negative_bounds.is_empty() || was_negative { - let negative_bounds_len = negative_bounds.len(); - let last_span = negative_bounds.last().map(|sp| *sp); - let mut err = self.struct_span_err( - negative_bounds, - "negative trait bounds are not supported", - ); - if let Some(sp) = last_span { - err.span_label(sp, "negative trait bounds are not supported"); - } - if let Some(bound_list) = colon_span { - let bound_list = bound_list.to(self.prev_span); - let mut new_bound_list = String::new(); - if !bounds.is_empty() { - let mut snippets = bounds.iter().map(|bound| bound.span()) - .map(|span| self.span_to_snippet(span)); - while let Some(Ok(snippet)) = snippets.next() { - new_bound_list.push_str(" + "); - new_bound_list.push_str(&snippet); - } - new_bound_list = new_bound_list.replacen(" +", ":", 1); - } - err.span_suggestion_hidden( - bound_list, - &format!("remove the trait bound{}", pluralize!(negative_bounds_len)), - new_bound_list, - Applicability::MachineApplicable, - ); - } - err.emit(); - } - - return Ok(bounds); - } - - pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> { - if self.eat_keyword(kw::For) { - self.expect_lt()?; - let params = self.parse_generic_params()?; - self.expect_gt()?; - // We rely on AST validation to rule out invalid cases: There must not be type - // parameters, and the lifetime parameters must not have bounds. - Ok(params) - } else { - Ok(Vec::new()) - } - } - - pub fn check_lifetime(&mut self) -> bool { - self.expected_tokens.push(TokenType::Lifetime); - self.token.is_lifetime() - } - - /// Parses a single lifetime `'a` or panics. - pub fn expect_lifetime(&mut self) -> Lifetime { - if let Some(ident) = self.token.lifetime() { - let span = self.token.span; - self.bump(); - Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } - } else { - self.span_bug(self.token.span, "not a lifetime") - } - } - - pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> { - P(Ty { kind, span, id: ast::DUMMY_NODE_ID }) - } -} |
