diff options
| author | Baoshan <pangbw@gmail.com> | 2019-09-01 17:52:09 -0700 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2019-09-01 17:52:09 -0700 |
| commit | d5ef9df032ec32c48d6c59050735352c48ff16f8 (patch) | |
| tree | 31a87971a84ffc967645099773910d6498d0cb0b /src/libsyntax/parse | |
| parent | 7726b54c059db0759e9725a58e222118eacec6d9 (diff) | |
| parent | dfd43f0fdd4e6969c7d82c0670d70bf305fbccf8 (diff) | |
| download | rust-d5ef9df032ec32c48d6c59050735352c48ff16f8.tar.gz rust-d5ef9df032ec32c48d6c59050735352c48ff16f8.zip | |
Merge pull request #13 from rust-lang/master
sync with rust-lang/rust
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/parse/diagnostics.rs | 34 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/tests.rs | 78 | ||||
| -rw-r--r-- | src/libsyntax/parse/literal.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 32 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 114 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/expr.rs | 37 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/item.rs | 11 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/pat.rs | 443 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/path.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/stmt.rs | 3 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/ty.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 43 |
13 files changed, 520 insertions, 290 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index a42da112360..671178223f5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -19,11 +19,10 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ permitted in this context"; impl<'a> Parser<'a> { - crate fn parse_arg_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { + crate fn parse_param_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let attrs = self.parse_outer_attributes()?; - attrs.iter().for_each(|a| - self.sess.param_attr_spans.borrow_mut().push(a.span) - ); + self.sess.gated_spans.param_attrs.borrow_mut() + .extend(attrs.iter().map(|a| a.span)); Ok(attrs) } diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 1fbf28fb830..d4e661d1a38 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -1,5 +1,5 @@ use crate::ast::{ - self, Arg, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, + self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, VariantData, }; use crate::feature_gate::{feature_err, UnstableFeatures}; @@ -18,7 +18,7 @@ use log::{debug, trace}; use std::mem; /// Creates a placeholder argument. -crate fn dummy_arg(ident: Ident) -> Arg { +crate fn dummy_arg(ident: Ident) -> Param { let pat = P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), @@ -29,7 +29,7 @@ crate fn dummy_arg(ident: Ident) -> Arg { span: ident.span, id: ast::DUMMY_NODE_ID }; - Arg { attrs: ThinVec::default(), id: ast::DUMMY_NODE_ID, pat, span: ident.span, ty: P(ty) } + Param { attrs: ThinVec::default(), id: ast::DUMMY_NODE_ID, pat, span: ident.span, ty: P(ty) } } pub enum Error { @@ -1183,7 +1183,7 @@ impl<'a> Parser<'a> { Err(err) } - crate fn eat_incorrect_doc_comment_for_arg_type(&mut self) { + crate fn eat_incorrect_doc_comment_for_param_type(&mut self) { if let token::DocComment(_) = self.token.kind { self.struct_span_err( self.token.span, @@ -1211,7 +1211,7 @@ impl<'a> Parser<'a> { } } - crate fn argument_without_type( + crate fn parameter_without_type( &mut self, err: &mut DiagnosticBuilder<'_>, pat: P<ast::Pat>, @@ -1286,13 +1286,13 @@ impl<'a> Parser<'a> { Ok((pat, ty)) } - crate fn recover_bad_self_arg( + crate fn recover_bad_self_param( &mut self, - mut arg: ast::Arg, + mut param: ast::Param, is_trait_item: bool, - ) -> PResult<'a, ast::Arg> { - let sp = arg.pat.span; - arg.ty.node = TyKind::Err; + ) -> PResult<'a, ast::Param> { + let sp = param.pat.span; + param.ty.node = TyKind::Err; let mut err = self.struct_span_err(sp, "unexpected `self` parameter in function"); if is_trait_item { err.span_label(sp, "must be the first associated function parameter"); @@ -1301,7 +1301,7 @@ impl<'a> Parser<'a> { err.note("`self` is only valid as the first parameter of an associated function"); } err.emit(); - Ok(arg) + Ok(param) } crate fn consume_block(&mut self, delim: token::DelimToken) { @@ -1344,15 +1344,15 @@ impl<'a> Parser<'a> { err } - /// Replace duplicated recovered arguments with `_` pattern to avoid unecessary errors. + /// Replace duplicated recovered parameters with `_` pattern to avoid unecessary errors. /// /// This is necessary because at this point we don't know whether we parsed a function with - /// anonymous arguments or a function with names but no types. In order to minimize - /// unecessary errors, we assume the arguments are in the shape of `fn foo(a, b, c)` where - /// the arguments are *names* (so we don't emit errors about not being able to find `b` in + /// anonymous parameters or a function with names but no types. In order to minimize + /// unecessary errors, we assume the parameters are in the shape of `fn foo(a, b, c)` where + /// the parameters are *names* (so we don't emit errors about not being able to find `b` in /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`, - /// we deduplicate them to not complain about duplicated argument names. - crate fn deduplicate_recovered_arg_names(&self, fn_inputs: &mut Vec<Arg>) { + /// we deduplicate them to not complain about duplicated parameter names. + crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) { let mut seen_inputs = FxHashSet::default(); for input in fn_inputs.iter_mut() { let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = ( diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs index a915aa42fd1..652ae95c853 100644 --- a/src/libsyntax/parse/lexer/tests.rs +++ b/src/libsyntax/parse/lexer/tests.rs @@ -10,7 +10,14 @@ use errors::{Handler, emitter::EmitterWriter}; use syntax_pos::{BytePos, Span}; fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess { - let emitter = EmitterWriter::new(Box::new(io::sink()), Some(sm.clone()), false, false, false); + let emitter = EmitterWriter::new( + Box::new(io::sink()), + Some(sm.clone()), + false, + false, + false, + None, + ); ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm) } @@ -28,10 +35,11 @@ fn t1() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - let mut string_reader = setup(&sm, - &sh, - "/* my source file */ fn main() { println!(\"zebra\"); }\n" - .to_string()); + let mut string_reader = setup( + &sm, + &sh, + "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(), + ); assert_eq!(string_reader.next_token(), token::Comment); assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); @@ -127,8 +135,10 @@ fn character_a() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), - mk_lit(token::Char, "a", None)); + assert_eq!( + setup(&sm, &sh, "'a'".to_string()).next_token(), + mk_lit(token::Char, "a", None), + ); }) } @@ -137,8 +147,10 @@ fn character_space() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), - mk_lit(token::Char, " ", None)); + assert_eq!( + setup(&sm, &sh, "' '".to_string()).next_token(), + mk_lit(token::Char, " ", None), + ); }) } @@ -147,8 +159,10 @@ fn character_escaped() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(), - mk_lit(token::Char, "\\n", None)); + assert_eq!( + setup(&sm, &sh, "'\\n'".to_string()).next_token(), + mk_lit(token::Char, "\\n", None), + ); }) } @@ -157,8 +171,10 @@ fn lifetime_name() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(), - token::Lifetime(Symbol::intern("'abc"))); + assert_eq!( + setup(&sm, &sh, "'abc".to_string()).next_token(), + token::Lifetime(Symbol::intern("'abc")), + ); }) } @@ -167,8 +183,10 @@ fn raw_string() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), - mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None)); + assert_eq!( + setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(), + mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None), + ); }) } @@ -179,11 +197,15 @@ fn literal_suffixes() { let sh = mk_sess(sm.clone()); macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ - assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(), - mk_lit(token::$tok_type, $tok_contents, Some("suffix"))); + assert_eq!( + setup(&sm, &sh, format!("{}suffix", $input)).next_token(), + mk_lit(token::$tok_type, $tok_contents, Some("suffix")), + ); // with a whitespace separator: - assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(), - mk_lit(token::$tok_type, $tok_contents, None)); + assert_eq!( + setup(&sm, &sh, format!("{} suffix", $input)).next_token(), + mk_lit(token::$tok_type, $tok_contents, None), + ); }} } @@ -197,12 +219,18 @@ fn literal_suffixes() { test!("1.0", Float, "1.0"); test!("1.0e10", Float, "1.0e10"); - assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(), - mk_lit(token::Integer, "2", Some("us"))); - assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), - mk_lit(token::StrRaw(3), "raw", Some("suffix"))); - assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), - mk_lit(token::ByteStrRaw(3), "raw", Some("suffix"))); + assert_eq!( + setup(&sm, &sh, "2us".to_string()).next_token(), + mk_lit(token::Integer, "2", Some("us")), + ); + assert_eq!( + setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(), + mk_lit(token::StrRaw(3), "raw", Some("suffix")), + ); + assert_eq!( + setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(), + mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")), + ); }) } diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 6409acba573..36233de3cfb 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -104,7 +104,7 @@ impl LitKind { Ok(match kind { token::Bool => { - assert!(symbol == kw::True || symbol == kw::False); + assert!(symbol.is_bool_lit()); LitKind::Bool(symbol == kw::True) } token::Byte => return unescape_byte(&symbol.as_str()) @@ -261,7 +261,7 @@ impl Lit { /// Converts arbitrary token into an AST literal. crate fn from_token(token: &Token) -> Result<Lit, LitError> { let lit = match token.kind { - token::Ident(name, false) if name == kw::True || name == kw::False => + token::Ident(name, false) if name.is_bool_lit() => token::Lit::new(token::Bool, name, None), token::Literal(lit) => lit, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b1f3612a839..b1af4806e2d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -39,6 +39,22 @@ crate mod unescape_error_reporting; pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>; +/// Collected spans during parsing for places where a certain feature was +/// used and should be feature gated accordingly in `check_crate`. +#[derive(Default)] +pub struct GatedSpans { + /// Spans collected for gating `param_attrs`, e.g. `fn foo(#[attr] x: u8) {}`. + pub param_attrs: Lock<Vec<Span>>, + /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`. + pub let_chains: Lock<Vec<Span>>, + /// Spans collected for gating `async_closure`, e.g. `async || ..`. + pub async_closure: Lock<Vec<Span>>, + /// Spans collected for gating `yield e?` expressions (`generators` gate). + pub yields: Lock<Vec<Span>>, + /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`. + pub or_patterns: Lock<Vec<Span>>, +} + /// Info about a parsing session. pub struct ParseSess { pub span_diagnostic: Handler, @@ -58,16 +74,8 @@ pub struct ParseSess { /// operation token that followed it, but that the parser cannot identify without further /// analysis. pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>, - pub param_attr_spans: Lock<Vec<Span>>, - // Places where `let` exprs were used and should be feature gated according to `let_chains`. - pub let_chains_spans: Lock<Vec<Span>>, - // Places where `async || ..` exprs were used and should be feature gated. - pub async_closure_spans: Lock<Vec<Span>>, - // Places where `yield e?` exprs were used and should be feature gated. - pub yield_spans: Lock<Vec<Span>>, pub injected_crate_name: Once<Symbol>, - // Places where or-patterns e.g. `Some(Foo | Bar)` were used and should be feature gated. - pub or_pattern_spans: Lock<Vec<Span>>, + pub gated_spans: GatedSpans, } impl ParseSess { @@ -93,12 +101,8 @@ impl ParseSess { buffered_lints: Lock::new(vec![]), edition: ExpnId::root().expn_data().edition, ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), - param_attr_spans: Lock::new(Vec::new()), - let_chains_spans: Lock::new(Vec::new()), - async_closure_spans: Lock::new(Vec::new()), - yield_spans: Lock::new(Vec::new()), injected_crate_name: Once::new(), - or_pattern_spans: Lock::new(Vec::new()), + gated_spans: GatedSpans::default(), } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 89725d8b339..49b05551bae 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -10,7 +10,7 @@ pub use path::PathStyle; mod stmt; mod generics; -use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind}; +use crate::ast::{self, AttrStyle, Attribute, Param, BindingMode, StrStyle, SelfKind}; use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind}; use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar}; use crate::source_map::{self, respan}; @@ -375,10 +375,11 @@ impl<'a> Parser<'a> { if let Some(directory) = directory { parser.directory = directory; } else if !parser.token.span.is_dummy() { - if let FileName::Real(mut path) = - sess.source_map().span_to_unmapped_path(parser.token.span) { - path.pop(); - parser.directory.path = Cow::from(path); + if let Some(FileName::Real(path)) = + &sess.source_map().lookup_char_pos(parser.token.span.lo()).file.unmapped_path { + if let Some(directory_path) = path.parent() { + parser.directory.path = Cow::from(directory_path.to_path_buf()); + } } } @@ -970,30 +971,27 @@ impl<'a> Parser<'a> { /// Skips unexpected attributes and doc comments in this position and emits an appropriate /// error. - /// This version of parse arg doesn't necessarily require identifier names. - fn parse_arg_general<F>( + /// This version of parse param doesn't necessarily require identifier names. + fn parse_param_general( &mut self, is_trait_item: bool, allow_c_variadic: bool, - is_name_required: F, - ) -> PResult<'a, Arg> - where - F: Fn(&token::Token) -> bool - { + is_name_required: impl Fn(&token::Token) -> bool, + ) -> PResult<'a, Param> { let lo = self.token.span; - let attrs = self.parse_arg_attributes()?; - if let Some(mut arg) = self.parse_self_arg()? { - arg.attrs = attrs.into(); - return self.recover_bad_self_arg(arg, is_trait_item); + let attrs = self.parse_param_attributes()?; + if let Some(mut param) = self.parse_self_param()? { + param.attrs = attrs.into(); + return self.recover_bad_self_param(param, is_trait_item); } let is_name_required = is_name_required(&self.token); let (pat, ty) = if is_name_required || self.is_named_argument() { - debug!("parse_arg_general parse_pat (is_name_required:{})", is_name_required); + debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); - let pat = self.parse_pat(Some("argument name"))?; + let pat = self.parse_fn_param_pat()?; if let Err(mut err) = self.expect(&token::Colon) { - if let Some(ident) = self.argument_without_type( + if let Some(ident) = self.parameter_without_type( &mut err, pat, is_name_required, @@ -1006,12 +1004,12 @@ impl<'a> Parser<'a> { } } - self.eat_incorrect_doc_comment_for_arg_type(); + self.eat_incorrect_doc_comment_for_param_type(); (pat, self.parse_ty_common(true, true, allow_c_variadic)?) } else { - debug!("parse_arg_general ident_to_pat"); + debug!("parse_param_general ident_to_pat"); let parser_snapshot_before_ty = self.clone(); - self.eat_incorrect_doc_comment_for_arg_type(); + self.eat_incorrect_doc_comment_for_param_type(); let mut ty = self.parse_ty_common(true, true, allow_c_variadic); if ty.is_ok() && self.token != token::Comma && self.token != token::CloseDelim(token::Paren) { @@ -1042,7 +1040,7 @@ impl<'a> Parser<'a> { let span = lo.to(self.token.span); - Ok(Arg { attrs: attrs.into(), id: ast::DUMMY_NODE_ID, pat, span, ty }) + Ok(Param { attrs: attrs.into(), id: ast::DUMMY_NODE_ID, pat, span, ty }) } /// Parses mutability (`mut` or nothing). @@ -1188,26 +1186,26 @@ impl<'a> Parser<'a> { } - fn parse_fn_args(&mut self, named_args: bool, allow_c_variadic: bool) - -> PResult<'a, (Vec<Arg> , bool)> { + fn parse_fn_params(&mut self, named_params: bool, allow_c_variadic: bool) + -> PResult<'a, (Vec<Param> , bool)> { let sp = self.token.span; let mut c_variadic = false; - let (args, _): (Vec<Option<Arg>>, _) = self.parse_paren_comma_seq(|p| { + let (params, _): (Vec<Option<Param>>, _) = self.parse_paren_comma_seq(|p| { let do_not_enforce_named_arguments_for_c_variadic = |token: &token::Token| -> bool { if token == &token::DotDotDot { false } else { - named_args + named_params } }; - match p.parse_arg_general( + match p.parse_param_general( false, allow_c_variadic, do_not_enforce_named_arguments_for_c_variadic ) { - Ok(arg) => { - if let TyKind::CVarArgs = arg.ty.node { + Ok(param) => { + if let TyKind::CVarArgs = param.ty.node { c_variadic = true; if p.token != token::CloseDelim(token::Paren) { let span = p.token.span; @@ -1215,10 +1213,10 @@ impl<'a> Parser<'a> { "`...` must be the last argument of a C-variadic function"); Ok(None) } else { - Ok(Some(arg)) + Ok(Some(param)) } } else { - Ok(Some(arg)) + Ok(Some(param)) } }, Err(mut e) => { @@ -1233,20 +1231,20 @@ impl<'a> Parser<'a> { } })?; - let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); + let params: Vec<_> = params.into_iter().filter_map(|x| x).collect(); - if c_variadic && args.len() <= 1 { + if c_variadic && params.len() <= 1 { self.span_err(sp, "C-variadic function must be declared with at least one named argument"); } - Ok((args, c_variadic)) + Ok((params, c_variadic)) } - /// Returns the parsed optional self argument and whether a self shortcut was used. + /// Returns the parsed optional self parameter and whether a self shortcut was used. /// - /// See `parse_self_arg_with_attrs` to collect attributes. - fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { + /// See `parse_self_param_with_attrs` to collect attributes. + fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. token::Ident(name, _) => @@ -1351,49 +1349,51 @@ impl<'a> Parser<'a> { }; let eself = source_map::respan(eself_lo.to(eself_hi), eself); - Ok(Some(Arg::from_self(ThinVec::default(), eself, eself_ident))) + Ok(Some(Param::from_self(ThinVec::default(), eself, eself_ident))) } - /// Returns the parsed optional self argument with attributes and whether a self + /// Returns the parsed optional self parameter with attributes and whether a self /// shortcut was used. - fn parse_self_arg_with_attrs(&mut self) -> PResult<'a, Option<Arg>> { - let attrs = self.parse_arg_attributes()?; - let arg_opt = self.parse_self_arg()?; - Ok(arg_opt.map(|mut arg| { - arg.attrs = attrs.into(); - arg + fn parse_self_parameter_with_attrs(&mut self) -> PResult<'a, Option<Param>> { + let attrs = self.parse_param_attributes()?; + let param_opt = self.parse_self_param()?; + Ok(param_opt.map(|mut param| { + param.attrs = attrs.into(); + param })) } /// Parses the parameter list and result type of a function that may have a `self` parameter. - fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>> - where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>, + fn parse_fn_decl_with_self<F>(&mut self, parse_param_fn: F) -> PResult<'a, P<FnDecl>> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, Param>, { self.expect(&token::OpenDelim(token::Paren))?; // Parse optional self argument. - let self_arg = self.parse_self_arg_with_attrs()?; + let self_param = self.parse_self_parameter_with_attrs()?; // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); - let (mut fn_inputs, recovered) = if let Some(self_arg) = self_arg { + let (mut fn_inputs, recovered) = if let Some(self_param) = self_param { if self.check(&token::CloseDelim(token::Paren)) { - (vec![self_arg], false) + (vec![self_param], false) } else if self.eat(&token::Comma) { - let mut fn_inputs = vec![self_arg]; + let mut fn_inputs = vec![self_param]; let (mut input, _, recovered) = self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), sep, parse_arg_fn)?; + &token::CloseDelim(token::Paren), sep, parse_param_fn)?; fn_inputs.append(&mut input); (fn_inputs, recovered) } else { match self.expect_one_of(&[], &[]) { Err(err) => return Err(err), - Ok(recovered) => (vec![self_arg], recovered), + Ok(recovered) => (vec![self_param], recovered), } } } else { let (input, _, recovered) = - self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?; + self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), + sep, + parse_param_fn)?; (input, recovered) }; @@ -1401,8 +1401,8 @@ impl<'a> Parser<'a> { // Parse closing paren and return type. self.expect(&token::CloseDelim(token::Paren))?; } - // Replace duplicated recovered arguments with `_` pattern to avoid unecessary errors. - self.deduplicate_recovered_arg_names(&mut fn_inputs); + // Replace duplicated recovered params with `_` pattern to avoid unecessary errors. + self.deduplicate_recovered_params_names(&mut fn_inputs); Ok(P(FnDecl { inputs: fn_inputs, diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index ccc6bd15067..5b9f0f1df67 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1,12 +1,13 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle}; use super::{BlockMode, SemiColonMode}; use super::{SeqSep, TokenExpectType}; +use super::pat::{GateOr, PARAM_EXPECTED}; use crate::maybe_recover_from_interpolated_ty_qpath; use crate::ptr::P; use crate::ast::{self, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode}; use crate::ast::{Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm}; -use crate::ast::{Ty, TyKind, FunctionRetTy, Arg, FnDecl}; +use crate::ast::{Ty, TyKind, FunctionRetTy, Param, FnDecl}; use crate::ast::{BinOpKind, BinOp, UnOp}; use crate::ast::{Mac, AnonConst, Field}; @@ -999,7 +1000,7 @@ impl<'a> Parser<'a> { } let span = lo.to(hi); - self.sess.yield_spans.borrow_mut().push(span); + self.sess.gated_spans.yields.borrow_mut().push(span); } else if self.eat_keyword(kw::Let) { return self.parse_let_expr(attrs); } else if is_span_rust_2018 && self.eat_keyword(kw::Await) { @@ -1111,7 +1112,7 @@ impl<'a> Parser<'a> { }; if asyncness.is_async() { // Feature gate `async ||` closures. - self.sess.async_closure_spans.borrow_mut().push(self.prev_span); + self.sess.gated_spans.async_closure.borrow_mut().push(self.prev_span); } let capture_clause = self.parse_capture_clause(); @@ -1156,7 +1157,7 @@ impl<'a> Parser<'a> { &[&token::BinOp(token::Or), &token::OrOr], SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, - |p| p.parse_fn_block_arg() + |p| p.parse_fn_block_param() )?.0; self.expect_or()?; args @@ -1171,11 +1172,11 @@ impl<'a> Parser<'a> { })) } - /// Parses an argument in a lambda header (e.g., `|arg, arg|`). - fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { + /// Parses a parameter in a lambda header (e.g., `|arg, arg|`). + fn parse_fn_block_param(&mut self) -> PResult<'a, Param> { let lo = self.token.span; - let attrs = self.parse_arg_attributes()?; - let pat = self.parse_pat(Some("argument name"))?; + let attrs = self.parse_param_attributes()?; + let pat = self.parse_pat(PARAM_EXPECTED)?; let t = if self.eat(&token::Colon) { self.parse_ty()? } else { @@ -1186,7 +1187,7 @@ impl<'a> Parser<'a> { }) }; let span = lo.to(self.token.span); - Ok(Arg { + Ok(Param { attrs: attrs.into(), ty: t, pat, @@ -1234,26 +1235,27 @@ impl<'a> Parser<'a> { if let ExprKind::Let(..) = cond.node { // Remove the last feature gating of a `let` expression since it's stable. - let last = self.sess.let_chains_spans.borrow_mut().pop(); + let last = self.sess.gated_spans.let_chains.borrow_mut().pop(); debug_assert_eq!(cond.span, last.unwrap()); } Ok(cond) } - /// Parses a `let $pats = $expr` pseudo-expression. + /// Parses a `let $pat = $expr` pseudo-expression. /// The `let` token has already been eaten. fn parse_let_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let lo = self.prev_span; - let pats = self.parse_pats()?; + // FIXME(or_patterns, Centril | dlrobertson): use `parse_top_pat` instead. + let pat = self.parse_top_pat_unpack(GateOr::No)?; self.expect(&token::Eq)?; let expr = self.with_res( Restrictions::NO_STRUCT_LITERAL, |this| this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into()) )?; let span = lo.to(expr.span); - self.sess.let_chains_spans.borrow_mut().push(span); - Ok(self.mk_expr(span, ExprKind::Let(pats, expr), attrs)) + self.sess.gated_spans.let_chains.borrow_mut().push(span); + Ok(self.mk_expr(span, ExprKind::Let(pat, expr), attrs)) } /// `else` token already eaten @@ -1283,7 +1285,7 @@ impl<'a> Parser<'a> { _ => None, }; - let pat = self.parse_top_level_pat()?; + let pat = self.parse_top_pat(GateOr::Yes)?; if !self.eat_keyword(kw::In) { let in_span = self.prev_span.between(self.token.span); self.struct_span_err(in_span, "missing `in` in `for` loop") @@ -1387,7 +1389,8 @@ impl<'a> Parser<'a> { crate fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; let lo = self.token.span; - let pats = self.parse_pats()?; + // FIXME(or_patterns, Centril | dlrobertson): use `parse_top_pat` instead. + let pat = self.parse_top_pat_unpack(GateOr::No)?; let guard = if self.eat_keyword(kw::If) { Some(self.parse_expr()?) } else { @@ -1448,7 +1451,7 @@ impl<'a> Parser<'a> { Ok(ast::Arm { attrs, - pats, + pats: pat, // FIXME(or_patterns, Centril | dlrobertson): this should just be `pat,`. guard, body: expr, span: lo.to(hi), diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index 72819c99660..59a3ade9c30 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -422,7 +422,7 @@ impl<'a> Parser<'a> { } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { let ident = self.parse_ident().unwrap(); self.bump(); // `(` - let kw_name = if let Ok(Some(_)) = self.parse_self_arg_with_attrs() + let kw_name = if let Ok(Some(_)) = self.parse_self_parameter_with_attrs() .map_err(|mut e| e.cancel()) { "method" @@ -475,7 +475,7 @@ impl<'a> Parser<'a> { self.eat_to_tokens(&[&token::Gt]); self.bump(); // `>` let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(token::Paren)) { - if let Ok(Some(_)) = self.parse_self_arg_with_attrs() + if let Ok(Some(_)) = self.parse_self_parameter_with_attrs() .map_err(|mut e| e.cancel()) { ("fn", "method", false) @@ -825,6 +825,7 @@ impl<'a> Parser<'a> { self.is_keyword_ahead(1, &[ kw::Impl, kw::Const, + kw::Async, kw::Fn, kw::Unsafe, kw::Extern, @@ -860,7 +861,7 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; let mut generics = self.parse_generics()?; let decl = self.parse_fn_decl_with_self(|p| { - p.parse_arg_general(true, false, |_| true) + p.parse_param_general(true, false, |_| true) })?; generics.where_clause = self.parse_where_clause()?; *at_end = true; @@ -1039,7 +1040,7 @@ impl<'a> Parser<'a> { // We don't allow argument names to be left off in edition 2018. let is_name_required = p.token.span.rust_2018(); - p.parse_arg_general(true, false, |_| is_name_required) + p.parse_param_general(true, false, |_| is_name_required) })?; generics.where_clause = self.parse_where_clause()?; @@ -1290,7 +1291,7 @@ impl<'a> Parser<'a> { /// Parses the argument list and result type of a function declaration. fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P<FnDecl>> { - let (args, c_variadic) = self.parse_fn_args(true, allow_c_variadic)?; + let (args, c_variadic) = self.parse_fn_params(true, allow_c_variadic)?; let ret_ty = self.parse_ret_ty(true)?; Ok(P(FnDecl { diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index fd458aec743..823f880337d 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -4,6 +4,7 @@ use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::ptr::P; use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac}; use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind}; +use crate::mut_visit::{noop_visit_pat, MutVisitor}; use crate::parse::token::{self}; use crate::print::pprust; use crate::source_map::{respan, Span, Spanned}; @@ -12,80 +13,184 @@ use crate::ThinVec; use errors::{Applicability, DiagnosticBuilder}; +type Expected = Option<&'static str>; + +/// `Expected` for function and lambda parameter patterns. +pub(super) const PARAM_EXPECTED: Expected = Some("parameter name"); + +/// Whether or not an or-pattern should be gated when occurring in the current context. +#[derive(PartialEq)] +pub enum GateOr { Yes, No } + +/// Whether or not to recover a `,` when parsing or-patterns. +#[derive(PartialEq, Copy, Clone)] +enum RecoverComma { Yes, No } + impl<'a> Parser<'a> { /// Parses a pattern. - pub fn parse_pat( + /// + /// Corresponds to `pat<no_top_alt>` in RFC 2535 and does not admit or-patterns + /// at the top level. Used when parsing the parameters of lambda expressions, + /// functions, function pointers, and `pat` macro fragments. + pub fn parse_pat(&mut self, expected: Expected) -> PResult<'a, P<Pat>> { + self.parse_pat_with_range_pat(true, expected) + } + + // FIXME(or_patterns, Centril | dlrobertson): + // remove this and use `parse_top_pat` everywhere it is used instead. + pub(super) fn parse_top_pat_unpack(&mut self, gate_or: GateOr) -> PResult<'a, Vec<P<Pat>>> { + self.parse_top_pat(gate_or) + .map(|pat| pat.and_then(|pat| match pat.node { + PatKind::Or(pats) => pats, + node => vec![self.mk_pat(pat.span, node)], + })) + } + + /// Entry point to the main pattern parser. + /// Corresponds to `top_pat` in RFC 2535 and allows or-pattern at the top level. + pub(super) fn parse_top_pat(&mut self, gate_or: GateOr) -> PResult<'a, P<Pat>> { + // Allow a '|' before the pats (RFCs 1925, 2530, and 2535). + let gated_leading_vert = self.eat_or_separator() && gate_or == GateOr::Yes; + let leading_vert_span = self.prev_span; + + // Parse the possibly-or-pattern. + let pat = self.parse_pat_with_or(None, gate_or, RecoverComma::Yes)?; + + // If we parsed a leading `|` which should be gated, + // and no other gated or-pattern has been parsed thus far, + // then we should really gate the leading `|`. + // This complicated procedure is done purely for diagnostics UX. + if gated_leading_vert { + let mut or_pattern_spans = self.sess.gated_spans.or_patterns.borrow_mut(); + if or_pattern_spans.is_empty() { + or_pattern_spans.push(leading_vert_span); + } + } + + Ok(pat) + } + + /// Parse the pattern for a function or function pointer parameter. + /// Special recovery is provided for or-patterns and leading `|`. + pub(super) fn parse_fn_param_pat(&mut self) -> PResult<'a, P<Pat>> { + self.recover_leading_vert("not allowed in a parameter pattern"); + let pat = self.parse_pat_with_or(PARAM_EXPECTED, GateOr::No, RecoverComma::No)?; + + if let PatKind::Or(..) = &pat.node { + self.ban_illegal_fn_param_or_pat(&pat); + } + + Ok(pat) + } + + /// Ban `A | B` immediately in a parameter pattern and suggest wrapping in parens. + fn ban_illegal_fn_param_or_pat(&self, pat: &Pat) { + let msg = "wrap the pattern in parenthesis"; + let fix = format!("({})", pprust::pat_to_string(pat)); + self.struct_span_err(pat.span, "an or-pattern parameter must be wrapped in parenthesis") + .span_suggestion(pat.span, msg, fix, Applicability::MachineApplicable) + .emit(); + } + + /// Parses a pattern, that may be a or-pattern (e.g. `Foo | Bar` in `Some(Foo | Bar)`). + /// Corresponds to `pat<allow_top_alt>` in RFC 2535. + fn parse_pat_with_or( &mut self, - expected: Option<&'static str> + expected: Expected, + gate_or: GateOr, + rc: RecoverComma, ) -> PResult<'a, P<Pat>> { - self.parse_pat_with_range_pat(true, expected) + // Parse the first pattern. + let first_pat = self.parse_pat(expected)?; + self.maybe_recover_unexpected_comma(first_pat.span, rc)?; + + // If the next token is not a `|`, + // this is not an or-pattern and we should exit here. + if !self.check(&token::BinOp(token::Or)) && self.token != token::OrOr { + return Ok(first_pat) + } + + let lo = first_pat.span; + let mut pats = vec![first_pat]; + while self.eat_or_separator() { + let pat = self.parse_pat(expected).map_err(|mut err| { + err.span_label(lo, "while parsing this or-pattern starting here"); + err + })?; + self.maybe_recover_unexpected_comma(pat.span, rc)?; + pats.push(pat); + } + let or_pattern_span = lo.to(self.prev_span); + + // Feature gate the or-pattern if instructed: + if gate_or == GateOr::Yes { + self.sess.gated_spans.or_patterns.borrow_mut().push(or_pattern_span); + } + + Ok(self.mk_pat(or_pattern_span, PatKind::Or(pats))) } - /// Parses patterns, separated by '|' s. - pub(super) fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> { - // Allow a '|' before the pats (RFC 1925 + RFC 2530) - self.eat(&token::BinOp(token::Or)); - - let mut pats = Vec::new(); - loop { - pats.push(self.parse_top_level_pat()?); - - if self.token == token::OrOr { - self.struct_span_err(self.token.span, "unexpected token `||` after pattern") - .span_suggestion( - self.token.span, - "use a single `|` to specify multiple patterns", - "|".to_owned(), - Applicability::MachineApplicable - ) - .emit(); + /// Eat the or-pattern `|` separator. + /// If instead a `||` token is encountered, recover and pretend we parsed `|`. + fn eat_or_separator(&mut self) -> bool { + match self.token.kind { + token::OrOr => { + // Found `||`; Recover and pretend we parsed `|`. + self.ban_unexpected_or_or(); self.bump(); - } else if self.eat(&token::BinOp(token::Or)) { - // This is a No-op. Continue the loop to parse the next - // pattern. - } else { - return Ok(pats); + true } - }; + _ => self.eat(&token::BinOp(token::Or)), + } } - /// A wrapper around `parse_pat` with some special error handling for the - /// "top-level" patterns in a match arm, `for` loop, `let`, &c. (in contrast - /// to subpatterns within such). - pub(super) fn parse_top_level_pat(&mut self) -> PResult<'a, P<Pat>> { - let pat = self.parse_pat(None)?; - if self.token == token::Comma { - // An unexpected comma after a top-level pattern is a clue that the - // user (perhaps more accustomed to some other language) forgot the - // parentheses in what should have been a tuple pattern; return a - // suggestion-enhanced error here rather than choking on the comma - // later. - let comma_span = self.token.span; - self.bump(); - if let Err(mut err) = self.skip_pat_list() { - // We didn't expect this to work anyway; we just wanted - // to advance to the end of the comma-sequence so we know - // the span to suggest parenthesizing - err.cancel(); - } - let seq_span = pat.span.to(self.prev_span); - let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); - if let Ok(seq_snippet) = self.span_to_snippet(seq_span) { - err.span_suggestion( - seq_span, - "try adding parentheses to match on a tuple..", - format!("({})", seq_snippet), - Applicability::MachineApplicable - ).span_suggestion( - seq_span, - "..or a vertical bar to match on multiple alternatives", - format!("{}", seq_snippet.replace(",", " |")), - Applicability::MachineApplicable - ); - } - return Err(err); + /// We have parsed `||` instead of `|`. Error and suggest `|` instead. + fn ban_unexpected_or_or(&mut self) { + self.struct_span_err(self.token.span, "unexpected token `||` after pattern") + .span_suggestion( + self.token.span, + "use a single `|` to separate multiple alternative patterns", + "|".to_owned(), + Applicability::MachineApplicable + ) + .emit(); + } + + /// Some special error handling for the "top-level" patterns in a match arm, + /// `for` loop, `let`, &c. (in contrast to subpatterns within such). + fn maybe_recover_unexpected_comma(&mut self, lo: Span, rc: RecoverComma) -> PResult<'a, ()> { + if rc == RecoverComma::No || self.token != token::Comma { + return Ok(()); } - Ok(pat) + + // An unexpected comma after a top-level pattern is a clue that the + // user (perhaps more accustomed to some other language) forgot the + // parentheses in what should have been a tuple pattern; return a + // suggestion-enhanced error here rather than choking on the comma later. + let comma_span = self.token.span; + self.bump(); + if let Err(mut err) = self.skip_pat_list() { + // We didn't expect this to work anyway; we just wanted to advance to the + // end of the comma-sequence so we know the span to suggest parenthesizing. + err.cancel(); + } + let seq_span = lo.to(self.prev_span); + let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); + if let Ok(seq_snippet) = self.span_to_snippet(seq_span) { + err.span_suggestion( + seq_span, + "try adding parentheses to match on a tuple..", + format!("({})", seq_snippet), + Applicability::MachineApplicable + ) + .span_suggestion( + seq_span, + "..or a vertical bar to match on multiple alternatives", + format!("{}", seq_snippet.replace(",", " |")), + Applicability::MachineApplicable + ); + } + Err(err) } /// Parse and throw away a parentesized comma separated @@ -100,32 +205,26 @@ impl<'a> Parser<'a> { Ok(()) } - /// Parses a pattern, that may be a or-pattern (e.g. `Some(Foo | Bar)`). - fn parse_pat_with_or(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> { - // Parse the first pattern. - let first_pat = self.parse_pat(expected)?; - - // If the next token is not a `|`, this is not an or-pattern and - // we should exit here. - if !self.check(&token::BinOp(token::Or)) { - return Ok(first_pat) - } + /// Recursive possibly-or-pattern parser with recovery for an erroneous leading `|`. + /// See `parse_pat_with_or` for details on parsing or-patterns. + fn parse_pat_with_or_inner(&mut self) -> PResult<'a, P<Pat>> { + self.recover_leading_vert("only allowed in a top-level pattern"); + self.parse_pat_with_or(None, GateOr::Yes, RecoverComma::No) + } - let lo = first_pat.span; + /// Recover if `|` or `||` is here. + /// The user is thinking that a leading `|` is allowed in this position. + fn recover_leading_vert(&mut self, ctx: &str) { + if let token::BinOp(token::Or) | token::OrOr = self.token.kind { + let span = self.token.span; + let rm_msg = format!("remove the `{}`", pprust::token_to_string(&self.token)); - let mut pats = vec![first_pat]; + self.struct_span_err(span, &format!("a leading `|` is {}", ctx)) + .span_suggestion(span, &rm_msg, String::new(), Applicability::MachineApplicable) + .emit(); - while self.eat(&token::BinOp(token::Or)) { - pats.push(self.parse_pat_with_range_pat( - true, expected - )?); + self.bump(); } - - let or_pattern_span = lo.to(self.prev_span); - - self.sess.or_pattern_spans.borrow_mut().push(or_pattern_span); - - Ok(self.mk_pat(or_pattern_span, PatKind::Or(pats))) } /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are @@ -133,7 +232,7 @@ impl<'a> Parser<'a> { fn parse_pat_with_range_pat( &mut self, allow_range_pat: bool, - expected: Option<&'static str>, + expected: Expected, ) -> PResult<'a, P<Pat>> { maybe_recover_from_interpolated_ty_qpath!(self, true); maybe_whole!(self, NtPat, |x| x); @@ -144,7 +243,11 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Paren) => self.parse_pat_tuple_or_parens()?, token::OpenDelim(token::Bracket) => { // Parse `[pat, pat,...]` as a slice pattern. - PatKind::Slice(self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?.0) + let (pats, _) = self.parse_delim_comma_seq( + token::Bracket, + |p| p.parse_pat_with_or_inner(), + )?; + PatKind::Slice(pats) } token::DotDot => { self.bump(); @@ -171,7 +274,7 @@ impl<'a> Parser<'a> { // Parse _ PatKind::Wild } else if self.eat_keyword(kw::Mut) { - self.recover_pat_ident_mut_first()? + self.parse_pat_ident_mut()? } else if self.eat_keyword(kw::Ref) { // Parse ref ident @ pat / ref mut ident @ pat let mutbl = self.parse_mutability(); @@ -179,13 +282,12 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(kw::Box) { // Parse `box pat` PatKind::Box(self.parse_pat_with_range_pat(false, None)?) - } else if self.token.is_ident() && !self.token.is_reserved_ident() && - self.parse_as_ident() { + } else if self.can_be_ident_pat() { // Parse `ident @ pat` // This can give false positives and parse nullary enums, // they are dealt with later in resolve. self.parse_pat_ident(BindingMode::ByValue(Mutability::Immutable))? - } else if self.token.is_path_start() { + } else if self.is_start_of_pat_with_path() { // Parse pattern starting with a path let (qself, path) = if self.eat_lt() { // Parse a qualified path @@ -255,7 +357,7 @@ impl<'a> Parser<'a> { } /// Parse `&pat` / `&mut pat`. - fn parse_pat_deref(&mut self, expected: Option<&'static str>) -> PResult<'a, PatKind> { + fn parse_pat_deref(&mut self, expected: Expected) -> PResult<'a, PatKind> { self.expect_and()?; let mutbl = self.parse_mutability(); @@ -271,9 +373,7 @@ impl<'a> Parser<'a> { /// Parse a tuple or parenthesis pattern. fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> { - let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| { - p.parse_pat_with_or(None) - })?; + let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or_inner())?; // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. @@ -284,24 +384,108 @@ impl<'a> Parser<'a> { }) } + /// Parse a mutable binding with the `mut` token already eaten. + fn parse_pat_ident_mut(&mut self) -> PResult<'a, PatKind> { + let mut_span = self.prev_span; + + if self.eat_keyword(kw::Ref) { + return self.recover_mut_ref_ident(mut_span) + } + + self.recover_additional_muts(); + + // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`. + if let token::Interpolated(ref nt) = self.token.kind { + if let token::NtPat(_) = **nt { + self.expected_ident_found().emit(); + } + } + + // Parse the pattern we hope to be an identifier. + let mut pat = self.parse_pat(Some("identifier"))?; + + // Add `mut` to any binding in the parsed pattern. + let changed_any_binding = Self::make_all_value_bindings_mutable(&mut pat); + + // Unwrap; If we don't have `mut $ident`, error. + let pat = pat.into_inner(); + match &pat.node { + PatKind::Ident(..) => {} + _ => self.ban_mut_general_pat(mut_span, &pat, changed_any_binding), + } + + Ok(pat.node) + } + /// Recover on `mut ref? ident @ pat` and suggest /// that the order of `mut` and `ref` is incorrect. - fn recover_pat_ident_mut_first(&mut self) -> PResult<'a, PatKind> { - let mutref_span = self.prev_span.to(self.token.span); - let binding_mode = if self.eat_keyword(kw::Ref) { - self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") - .span_suggestion( - mutref_span, - "try switching the order", - "ref mut".into(), - Applicability::MachineApplicable - ) - .emit(); - BindingMode::ByRef(Mutability::Mutable) + fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> { + let mutref_span = lo.to(self.prev_span); + self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") + .span_suggestion( + mutref_span, + "try switching the order", + "ref mut".into(), + Applicability::MachineApplicable + ) + .emit(); + + self.parse_pat_ident(BindingMode::ByRef(Mutability::Mutable)) + } + + /// Turn all by-value immutable bindings in a pattern into mutable bindings. + /// Returns `true` if any change was made. + fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool { + struct AddMut(bool); + impl MutVisitor for AddMut { + fn visit_pat(&mut self, pat: &mut P<Pat>) { + if let PatKind::Ident(BindingMode::ByValue(ref mut m @ Mutability::Immutable), ..) + = pat.node + { + *m = Mutability::Mutable; + self.0 = true; + } + noop_visit_pat(pat, self); + } + } + + let mut add_mut = AddMut(false); + add_mut.visit_pat(pat); + add_mut.0 + } + + /// Error on `mut $pat` where `$pat` is not an ident. + fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) { + let span = lo.to(pat.span); + let fix = pprust::pat_to_string(&pat); + let (problem, suggestion) = if changed_any_binding { + ("`mut` must be attached to each individual binding", "add `mut` to each binding") } else { - BindingMode::ByValue(Mutability::Mutable) + ("`mut` must be followed by a named binding", "remove the `mut` prefix") }; - self.parse_pat_ident(binding_mode) + self.struct_span_err(span, problem) + .span_suggestion(span, suggestion, fix, Applicability::MachineApplicable) + .note("`mut` may be followed by `variable` and `variable @ pattern`") + .emit() + } + + /// Eat any extraneous `mut`s and error + recover if we ate any. + fn recover_additional_muts(&mut self) { + let lo = self.token.span; + while self.eat_keyword(kw::Mut) {} + if lo == self.token.span { + return; + } + + let span = lo.to(self.prev_span); + self.struct_span_err(span, "`mut` on a binding may not be repeated") + .span_suggestion( + span, + "remove the additional `mut`s", + String::new(), + Applicability::MachineApplicable, + ) + .emit(); } /// Parse macro invocation @@ -361,7 +545,7 @@ impl<'a> Parser<'a> { fn fatal_unexpected_non_pat( &mut self, mut err: DiagnosticBuilder<'a>, - expected: Option<&'static str>, + expected: Expected, ) -> PResult<'a, P<Pat>> { self.cancel(&mut err); @@ -379,17 +563,6 @@ impl<'a> Parser<'a> { Err(err) } - // Helper function to decide whether to parse as ident binding - // or to try to do something more complex like range patterns. - fn parse_as_ident(&mut self) -> bool { - self.look_ahead(1, |t| match t.kind { - token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | - token::DotDotDot | token::DotDotEq | token::DotDot | - token::ModSep | token::Not => false, - _ => true, - }) - } - /// Is the current token suitable as the start of a range patterns end? fn is_pat_range_end_start(&self) -> bool { self.token.is_path_start() // e.g. `MY_CONST`; @@ -463,6 +636,30 @@ impl<'a> Parser<'a> { } } + /// Is this the start of a pattern beginning with a path? + fn is_start_of_pat_with_path(&mut self) -> bool { + self.check_path() + // Just for recovery (see `can_be_ident`). + || self.token.is_ident() && !self.token.is_bool_lit() && !self.token.is_keyword(kw::In) + } + + /// Would `parse_pat_ident` be appropriate here? + fn can_be_ident_pat(&mut self) -> bool { + self.check_ident() + && !self.token.is_bool_lit() // Avoid `true` or `false` as a binding as it is a literal. + && !self.token.is_path_segment_keyword() // Avoid e.g. `Self` as it is a path. + // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`. + && !self.token.is_keyword(kw::In) + && self.look_ahead(1, |t| match t.kind { // Try to do something more complex? + token::OpenDelim(token::Paren) // A tuple struct pattern. + | token::OpenDelim(token::Brace) // A struct pattern. + | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern. + | token::ModSep // A tuple / struct variant pattern. + | token::Not => false, // A macro expanding to a pattern. + _ => true, + }) + } + /// Parses `ident` or `ident @ pat`. /// Used by the copy foo and ref foo patterns to give a good /// error message when parsing mistakes like `ref foo(a, b)`. @@ -516,7 +713,7 @@ impl<'a> Parser<'a> { err.span_label(self.token.span, msg); return Err(err); } - let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or(None))?; + let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or_inner())?; Ok(PatKind::TupleStruct(path, fields)) } @@ -660,7 +857,7 @@ impl<'a> Parser<'a> { // Parsing a pattern of the form "fieldname: pat" let fieldname = self.parse_field_name()?; self.bump(); - let pat = self.parse_pat_with_or(None)?; + let pat = self.parse_pat_with_or_inner()?; hi = pat.span; (pat, fieldname, false) } else { diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs index 3eb4d45045a..d4b13cc2e01 100644 --- a/src/libsyntax/parse/parser/path.rs +++ b/src/libsyntax/parse/parser/path.rs @@ -423,7 +423,7 @@ impl<'a> Parser<'a> { // FIXME(const_generics): to distinguish between idents for types and consts, // we should introduce a GenericArg::Ident in the AST and distinguish when // lowering to the HIR. For now, idents for const args are not permitted. - if self.token.is_keyword(kw::True) || self.token.is_keyword(kw::False) { + if self.token.is_bool_lit() { self.parse_literal_maybe_minus()? } else { return Err( diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index c911caba4cd..651ebf6342e 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -1,6 +1,7 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMode}; use super::expr::LhsExpr; use super::path::PathStyle; +use super::pat::GateOr; use crate::ptr::P; use crate::{maybe_whole, ThinVec}; @@ -207,7 +208,7 @@ impl<'a> Parser<'a> { /// Parses a local variable declaration. fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> { let lo = self.prev_span; - let pat = self.parse_top_level_pat()?; + let pat = self.parse_top_pat(GateOr::Yes)?; let (err, ty) = if self.eat(&token::Colon) { // Save the state of the parser before parsing type normally, in case there is a `:` diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs index 337702b8d30..465e31ac57e 100644 --- a/src/libsyntax/parse/parser/ty.rs +++ b/src/libsyntax/parse/parser/ty.rs @@ -292,7 +292,7 @@ impl<'a> Parser<'a> { }; self.expect_keyword(kw::Fn)?; - let (inputs, c_variadic) = self.parse_fn_args(false, true)?; + let (inputs, c_variadic) = self.parse_fn_params(false, true)?; let ret_ty = self.parse_ret_ty(false)?; let decl = P(FnDecl { inputs, diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 1865f925165..fe3b51aa246 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -409,7 +409,7 @@ impl Token { crate fn expect_lit(&self) -> Lit { match self.kind { Literal(lit) => lit, - _=> panic!("`expect_lit` called on non-literal"), + _ => panic!("`expect_lit` called on non-literal"), } } @@ -417,10 +417,8 @@ impl Token { /// for example a '-42', or one of the boolean idents). crate fn can_begin_literal_or_bool(&self) -> bool { match self.kind { - Literal(..) => true, - BinOp(Minus) => true, - Ident(name, false) if name == kw::True => true, - Ident(name, false) if name == kw::False => true, + Literal(..) | BinOp(Minus) => true, + Ident(name, false) if name.is_bool_lit() => true, Interpolated(ref nt) => match **nt { NtLiteral(..) => true, _ => false, @@ -457,6 +455,7 @@ impl Token { pub fn is_ident(&self) -> bool { self.ident().is_some() } + /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { self.lifetime().is_some() @@ -508,45 +507,43 @@ impl Token { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: Symbol) -> bool { - self.ident().map(|(id, is_raw)| id.name == kw && !is_raw).unwrap_or(false) + self.is_non_raw_ident_where(|id| id.name == kw) } crate fn is_path_segment_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_path_segment_keyword(), - _ => false, - } + self.is_non_raw_ident_where(ast::Ident::is_path_segment_keyword) } // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. crate fn is_special_ident(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_special(), - _ => false, - } + self.is_non_raw_ident_where(ast::Ident::is_special) } /// Returns `true` if the token is a keyword used in the language. crate fn is_used_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_used_keyword(), - _ => false, - } + self.is_non_raw_ident_where(ast::Ident::is_used_keyword) } /// Returns `true` if the token is a keyword reserved for possible future use. crate fn is_unused_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_unused_keyword(), - _ => false, - } + self.is_non_raw_ident_where(ast::Ident::is_unused_keyword) } /// Returns `true` if the token is either a special identifier or a keyword. pub fn is_reserved_ident(&self) -> bool { + self.is_non_raw_ident_where(ast::Ident::is_reserved) + } + + /// Returns `true` if the token is the identifier `true` or `false`. + crate fn is_bool_lit(&self) -> bool { + self.is_non_raw_ident_where(|id| id.name.is_bool_lit()) + } + + /// Returns `true` if the token is a non-raw identifier for which `pred` holds. + fn is_non_raw_ident_where(&self, pred: impl FnOnce(ast::Ident) -> bool) -> bool { match self.ident() { - Some((id, false)) => id.is_reserved(), + Some((id, false)) => pred(id), _ => false, } } |
