diff options
Diffstat (limited to 'src/librustc_parse')
| -rw-r--r-- | src/librustc_parse/Cargo.toml | 1 | ||||
| -rw-r--r-- | src/librustc_parse/config.rs | 549 | ||||
| -rw-r--r-- | src/librustc_parse/lexer/mod.rs | 124 | ||||
| -rw-r--r-- | src/librustc_parse/lexer/tokentrees.rs | 7 | ||||
| -rw-r--r-- | src/librustc_parse/lib.rs | 83 | ||||
| -rw-r--r-- | src/librustc_parse/parser/attr.rs | 231 | ||||
| -rw-r--r-- | src/librustc_parse/parser/diagnostics.rs | 245 | ||||
| -rw-r--r-- | src/librustc_parse/parser/expr.rs | 327 | ||||
| -rw-r--r-- | src/librustc_parse/parser/generics.rs | 19 | ||||
| -rw-r--r-- | src/librustc_parse/parser/item.rs | 233 | ||||
| -rw-r--r-- | src/librustc_parse/parser/mod.rs | 96 | ||||
| -rw-r--r-- | src/librustc_parse/parser/module.rs | 306 | ||||
| -rw-r--r-- | src/librustc_parse/parser/pat.rs | 43 | ||||
| -rw-r--r-- | src/librustc_parse/parser/path.rs | 234 | ||||
| -rw-r--r-- | src/librustc_parse/parser/stmt.rs | 91 | ||||
| -rw-r--r-- | src/librustc_parse/parser/ty.rs | 112 | ||||
| -rw-r--r-- | src/librustc_parse/validate_attr.rs | 2 |
17 files changed, 1001 insertions, 1702 deletions
diff --git a/src/librustc_parse/Cargo.toml b/src/librustc_parse/Cargo.toml index b02cabab0a8..a73d30e860b 100644 --- a/src/librustc_parse/Cargo.toml +++ b/src/librustc_parse/Cargo.toml @@ -13,7 +13,6 @@ doctest = false bitflags = "1.0" log = "0.4" rustc_ast_pretty = { path = "../librustc_ast_pretty" } -rustc_attr = { path = "../librustc_attr" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_feature = { path = "../librustc_feature" } rustc_lexer = { path = "../librustc_lexer" } diff --git a/src/librustc_parse/config.rs b/src/librustc_parse/config.rs deleted file mode 100644 index 17b9e78e5df..00000000000 --- a/src/librustc_parse/config.rs +++ /dev/null @@ -1,549 +0,0 @@ -//! Process the potential `cfg` attributes on a module. -//! Also determine if the module should be included in this configuration. -//! -//! This module properly belongs in rustc_expand, but for now it's tied into -//! parsing, so we leave it here to avoid complicated out-of-line dependencies. -//! -//! A principled solution to this wrong location would be to implement [#64197]. -//! -//! [#64197]: https://github.com/rust-lang/rust/issues/64197 - -use crate::{parse_in, validate_attr}; -use rustc_ast::ast::{self, AttrItem, Attribute, MetaItem}; -use rustc_ast::attr::HasAttrs; -use rustc_ast::mut_visit::*; -use rustc_ast::ptr::P; -use rustc_ast::util::map_in_place::MapInPlace; -use rustc_attr as attr; -use rustc_data_structures::fx::FxHashMap; -use rustc_errors::{error_code, struct_span_err, Applicability, Handler}; -use rustc_feature::{Feature, Features, State as FeatureState}; -use rustc_feature::{ - ACCEPTED_FEATURES, ACTIVE_FEATURES, REMOVED_FEATURES, STABLE_REMOVED_FEATURES, -}; -use rustc_session::parse::{feature_err, ParseSess}; -use rustc_span::edition::{Edition, ALL_EDITIONS}; -use rustc_span::symbol::{sym, Symbol}; -use rustc_span::{Span, DUMMY_SP}; - -use smallvec::SmallVec; - -/// A folder that strips out items that do not belong in the current configuration. -pub struct StripUnconfigured<'a> { - pub sess: &'a ParseSess, - pub features: Option<&'a Features>, -} - -fn get_features( - span_handler: &Handler, - krate_attrs: &[ast::Attribute], - crate_edition: Edition, - allow_features: &Option<Vec<String>>, -) -> Features { - fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) { - let mut err = struct_span_err!(span_handler, span, E0557, "feature has been removed"); - err.span_label(span, "feature has been removed"); - if let Some(reason) = reason { - err.note(reason); - } - err.emit(); - } - - fn active_features_up_to(edition: Edition) -> impl Iterator<Item = &'static Feature> { - ACTIVE_FEATURES.iter().filter(move |feature| { - if let Some(feature_edition) = feature.edition { - feature_edition <= edition - } else { - false - } - }) - } - - let mut features = Features::default(); - let mut edition_enabled_features = FxHashMap::default(); - - for &edition in ALL_EDITIONS { - if edition <= crate_edition { - // The `crate_edition` implies its respective umbrella feature-gate - // (i.e., `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX). - edition_enabled_features.insert(edition.feature_name(), edition); - } - } - - for feature in active_features_up_to(crate_edition) { - feature.set(&mut features, DUMMY_SP); - edition_enabled_features.insert(feature.name, crate_edition); - } - - // Process the edition umbrella feature-gates first, to ensure - // `edition_enabled_features` is completed before it's queried. - for attr in krate_attrs { - if !attr.check_name(sym::feature) { - continue; - } - - let list = match attr.meta_item_list() { - Some(list) => list, - None => continue, - }; - - for mi in list { - if !mi.is_word() { - continue; - } - - let name = mi.name_or_empty(); - - let edition = ALL_EDITIONS.iter().find(|e| name == e.feature_name()).copied(); - if let Some(edition) = edition { - if edition <= crate_edition { - continue; - } - - for feature in active_features_up_to(edition) { - // FIXME(Manishearth) there is currently no way to set - // lib features by edition - feature.set(&mut features, DUMMY_SP); - edition_enabled_features.insert(feature.name, edition); - } - } - } - } - - for attr in krate_attrs { - if !attr.check_name(sym::feature) { - continue; - } - - let list = match attr.meta_item_list() { - Some(list) => list, - None => continue, - }; - - let bad_input = |span| { - struct_span_err!(span_handler, span, E0556, "malformed `feature` attribute input") - }; - - for mi in list { - let name = match mi.ident() { - Some(ident) if mi.is_word() => ident.name, - Some(ident) => { - bad_input(mi.span()) - .span_suggestion( - mi.span(), - "expected just one word", - format!("{}", ident.name), - Applicability::MaybeIncorrect, - ) - .emit(); - continue; - } - None => { - bad_input(mi.span()).span_label(mi.span(), "expected just one word").emit(); - continue; - } - }; - - if let Some(edition) = edition_enabled_features.get(&name) { - let msg = - &format!("the feature `{}` is included in the Rust {} edition", name, edition); - span_handler.struct_span_warn_with_code(mi.span(), msg, error_code!(E0705)).emit(); - continue; - } - - if ALL_EDITIONS.iter().any(|e| name == e.feature_name()) { - // Handled in the separate loop above. - continue; - } - - let removed = REMOVED_FEATURES.iter().find(|f| name == f.name); - let stable_removed = STABLE_REMOVED_FEATURES.iter().find(|f| name == f.name); - if let Some(Feature { state, .. }) = removed.or(stable_removed) { - if let FeatureState::Removed { reason } | FeatureState::Stabilized { reason } = - state - { - feature_removed(span_handler, mi.span(), *reason); - continue; - } - } - - if let Some(Feature { since, .. }) = ACCEPTED_FEATURES.iter().find(|f| name == f.name) { - let since = Some(Symbol::intern(since)); - features.declared_lang_features.push((name, mi.span(), since)); - continue; - } - - if let Some(allowed) = allow_features.as_ref() { - if allowed.iter().find(|&f| name.as_str() == *f).is_none() { - struct_span_err!( - span_handler, - mi.span(), - E0725, - "the feature `{}` is not in the list of allowed features", - name - ) - .emit(); - continue; - } - } - - if let Some(f) = ACTIVE_FEATURES.iter().find(|f| name == f.name) { - f.set(&mut features, mi.span()); - features.declared_lang_features.push((name, mi.span(), None)); - continue; - } - - features.declared_lib_features.push((name, mi.span())); - } - } - - features -} - -// `cfg_attr`-process the crate's attributes and compute the crate's features. -pub fn features( - mut krate: ast::Crate, - sess: &ParseSess, - edition: Edition, - allow_features: &Option<Vec<String>>, -) -> (ast::Crate, Features) { - let mut strip_unconfigured = StripUnconfigured { sess, features: None }; - - let unconfigured_attrs = krate.attrs.clone(); - let diag = &sess.span_diagnostic; - let err_count = diag.err_count(); - let features = match strip_unconfigured.configure(krate.attrs) { - None => { - // The entire crate is unconfigured. - krate.attrs = Vec::new(); - krate.module.items = Vec::new(); - Features::default() - } - Some(attrs) => { - krate.attrs = attrs; - let features = get_features(diag, &krate.attrs, edition, allow_features); - if err_count == diag.err_count() { - // Avoid reconfiguring malformed `cfg_attr`s. - strip_unconfigured.features = Some(&features); - strip_unconfigured.configure(unconfigured_attrs); - } - features - } - }; - (krate, features) -} - -#[macro_export] -macro_rules! configure { - ($this:ident, $node:ident) => { - match $this.configure($node) { - Some(node) => node, - None => return Default::default(), - } - }; -} - -const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]"; -const CFG_ATTR_NOTE_REF: &str = "for more information, visit \ - <https://doc.rust-lang.org/reference/conditional-compilation.html\ - #the-cfg_attr-attribute>"; - -impl<'a> StripUnconfigured<'a> { - pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> { - self.process_cfg_attrs(&mut node); - self.in_cfg(node.attrs()).then_some(node) - } - - /// Parse and expand all `cfg_attr` attributes into a list of attributes - /// that are within each `cfg_attr` that has a true configuration predicate. - /// - /// Gives compiler warnigns if any `cfg_attr` does not contain any - /// attributes and is in the original source code. Gives compiler errors if - /// the syntax of any `cfg_attr` is incorrect. - pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: &mut T) { - node.visit_attrs(|attrs| { - attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr)); - }); - } - - /// Parse and expand a single `cfg_attr` attribute into a list of attributes - /// when the configuration predicate is true, or otherwise expand into an - /// empty list of attributes. - /// - /// Gives a compiler warning when the `cfg_attr` contains no attributes and - /// is in the original source file. Gives a compiler error if the syntax of - /// the attribute is incorrect. - fn process_cfg_attr(&mut self, attr: Attribute) -> Vec<Attribute> { - if !attr.has_name(sym::cfg_attr) { - return vec![attr]; - } - - let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) { - None => return vec![], - Some(r) => r, - }; - - // Lint on zero attributes in source. - if expanded_attrs.is_empty() { - return vec![attr]; - } - - // At this point we know the attribute is considered used. - attr::mark_used(&attr); - - if !attr::cfg_matches(&cfg_predicate, self.sess, self.features) { - return vec![]; - } - - // We call `process_cfg_attr` recursively in case there's a - // `cfg_attr` inside of another `cfg_attr`. E.g. - // `#[cfg_attr(false, cfg_attr(true, some_attr))]`. - expanded_attrs - .into_iter() - .flat_map(|(item, span)| { - let attr = attr::mk_attr_from_item(attr.style, item, span); - self.process_cfg_attr(attr) - }) - .collect() - } - - fn parse_cfg_attr(&self, attr: &Attribute) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> { - match attr.get_normal_item().args { - ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => { - let msg = "wrong `cfg_attr` delimiters"; - validate_attr::check_meta_bad_delim(self.sess, dspan, delim, msg); - match parse_in(self.sess, tts.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) { - Ok(r) => return Some(r), - Err(mut e) => { - e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP)) - .note(CFG_ATTR_NOTE_REF) - .emit(); - } - } - } - _ => self.error_malformed_cfg_attr_missing(attr.span), - } - None - } - - fn error_malformed_cfg_attr_missing(&self, span: Span) { - self.sess - .span_diagnostic - .struct_span_err(span, "malformed `cfg_attr` attribute input") - .span_suggestion( - span, - "missing condition and attribute", - CFG_ATTR_GRAMMAR_HELP.to_string(), - Applicability::HasPlaceholders, - ) - .note(CFG_ATTR_NOTE_REF) - .emit(); - } - - /// Determines if a node with the given attributes should be included in this configuration. - pub fn in_cfg(&self, attrs: &[Attribute]) -> bool { - attrs.iter().all(|attr| { - if !is_cfg(attr) { - return true; - } - let meta_item = match validate_attr::parse_meta(self.sess, attr) { - Ok(meta_item) => meta_item, - Err(mut err) => { - err.emit(); - return true; - } - }; - let error = |span, msg, suggestion: &str| { - let mut err = self.sess.span_diagnostic.struct_span_err(span, msg); - if !suggestion.is_empty() { - err.span_suggestion( - span, - "expected syntax is", - suggestion.into(), - Applicability::MaybeIncorrect, - ); - } - err.emit(); - true - }; - let span = meta_item.span; - match meta_item.meta_item_list() { - None => error(span, "`cfg` is not followed by parentheses", "cfg(/* predicate */)"), - Some([]) => error(span, "`cfg` predicate is not specified", ""), - Some([_, .., l]) => error(l.span(), "multiple `cfg` predicates are specified", ""), - Some([single]) => match single.meta_item() { - Some(meta_item) => attr::cfg_matches(meta_item, self.sess, self.features), - None => error(single.span(), "`cfg` predicate key cannot be a literal", ""), - }, - } - }) - } - - /// Visit attributes on expression and statements (but not attributes on items in blocks). - fn visit_expr_attrs(&mut self, attrs: &[Attribute]) { - // flag the offending attributes - for attr in attrs.iter() { - self.maybe_emit_expr_attr_err(attr); - } - } - - /// If attributes are not allowed on expressions, emit an error for `attr` - pub fn maybe_emit_expr_attr_err(&self, attr: &Attribute) { - if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) { - let mut err = feature_err( - self.sess, - sym::stmt_expr_attributes, - attr.span, - "attributes on expressions are experimental", - ); - - if attr.is_doc_comment() { - err.help("`///` is for documentation comments. For a plain comment, use `//`."); - } - - err.emit(); - } - } - - pub fn configure_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) { - let ast::ForeignMod { abi: _, items } = foreign_mod; - items.flat_map_in_place(|item| self.configure(item)); - } - - pub fn configure_generic_params(&mut self, params: &mut Vec<ast::GenericParam>) { - params.flat_map_in_place(|param| self.configure(param)); - } - - fn configure_variant_data(&mut self, vdata: &mut ast::VariantData) { - match vdata { - ast::VariantData::Struct(fields, ..) | ast::VariantData::Tuple(fields, _) => { - fields.flat_map_in_place(|field| self.configure(field)) - } - ast::VariantData::Unit(_) => {} - } - } - - pub fn configure_item_kind(&mut self, item: &mut ast::ItemKind) { - match item { - ast::ItemKind::Struct(def, _generics) | ast::ItemKind::Union(def, _generics) => { - self.configure_variant_data(def) - } - ast::ItemKind::Enum(ast::EnumDef { variants }, _generics) => { - variants.flat_map_in_place(|variant| self.configure(variant)); - for variant in variants { - self.configure_variant_data(&mut variant.data); - } - } - _ => {} - } - } - - pub fn configure_expr_kind(&mut self, expr_kind: &mut ast::ExprKind) { - match expr_kind { - ast::ExprKind::Match(_m, arms) => { - arms.flat_map_in_place(|arm| self.configure(arm)); - } - ast::ExprKind::Struct(_path, fields, _base) => { - fields.flat_map_in_place(|field| self.configure(field)); - } - _ => {} - } - } - - pub fn configure_expr(&mut self, expr: &mut P<ast::Expr>) { - self.visit_expr_attrs(expr.attrs()); - - // If an expr is valid to cfg away it will have been removed by the - // outer stmt or expression folder before descending in here. - // Anything else is always required, and thus has to error out - // in case of a cfg attr. - // - // N.B., this is intentionally not part of the visit_expr() function - // in order for filter_map_expr() to be able to avoid this check - if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) { - let msg = "removing an expression is not supported in this position"; - self.sess.span_diagnostic.span_err(attr.span, msg); - } - - self.process_cfg_attrs(expr) - } - - pub fn configure_pat(&mut self, pat: &mut P<ast::Pat>) { - if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.kind { - fields.flat_map_in_place(|field| self.configure(field)); - } - } - - pub fn configure_fn_decl(&mut self, fn_decl: &mut ast::FnDecl) { - fn_decl.inputs.flat_map_in_place(|arg| self.configure(arg)); - } -} - -impl<'a> MutVisitor for StripUnconfigured<'a> { - fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) { - self.configure_foreign_mod(foreign_mod); - noop_visit_foreign_mod(foreign_mod, self); - } - - fn visit_item_kind(&mut self, item: &mut ast::ItemKind) { - self.configure_item_kind(item); - noop_visit_item_kind(item, self); - } - - fn visit_expr(&mut self, expr: &mut P<ast::Expr>) { - self.configure_expr(expr); - self.configure_expr_kind(&mut expr.kind); - noop_visit_expr(expr, self); - } - - fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> { - let mut expr = configure!(self, expr); - self.configure_expr_kind(&mut expr.kind); - noop_visit_expr(&mut expr, self); - Some(expr) - } - - fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { - noop_flat_map_stmt(configure!(self, stmt), self) - } - - fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { - noop_flat_map_item(configure!(self, item), self) - } - - fn flat_map_impl_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> { - noop_flat_map_assoc_item(configure!(self, item), self) - } - - fn flat_map_trait_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> { - noop_flat_map_assoc_item(configure!(self, item), self) - } - - fn visit_mac(&mut self, _mac: &mut ast::Mac) { - // Don't configure interpolated AST (cf. issue #34171). - // Interpolated AST will get configured once the surrounding tokens are parsed. - } - - fn visit_pat(&mut self, pat: &mut P<ast::Pat>) { - self.configure_pat(pat); - noop_visit_pat(pat, self) - } - - fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) { - self.configure_fn_decl(&mut fn_decl); - noop_visit_fn_decl(fn_decl, self); - } -} - -fn is_cfg(attr: &Attribute) -> bool { - attr.check_name(sym::cfg) -} - -/// Process the potential `cfg` attributes on a module. -/// Also determine if the module should be included in this configuration. -pub fn process_configure_mod(sess: &ParseSess, cfg_mods: bool, attrs: &mut Vec<Attribute>) -> bool { - // Don't perform gated feature checking. - let mut strip_unconfigured = StripUnconfigured { sess, features: None }; - strip_unconfigured.process_cfg_attrs(attrs); - !cfg_mods || strip_unconfigured.in_cfg(&attrs) -} diff --git a/src/librustc_parse/lexer/mod.rs b/src/librustc_parse/lexer/mod.rs index f7fb704fcbc..96321ef2145 100644 --- a/src/librustc_parse/lexer/mod.rs +++ b/src/librustc_parse/lexer/mod.rs @@ -1,20 +1,20 @@ use rustc_ast::token::{self, Token, TokenKind}; use rustc_ast::util::comments; use rustc_data_structures::sync::Lrc; -use rustc_errors::{error_code, DiagnosticBuilder, FatalError}; -use rustc_lexer::unescape; +use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError}; use rustc_lexer::Base; +use rustc_lexer::{unescape, LexRawStrError, UnvalidatedRawStr, ValidatedRawStr}; use rustc_session::parse::ParseSess; use rustc_span::symbol::{sym, Symbol}; use rustc_span::{BytePos, Pos, Span}; use log::debug; use std::char; -use std::convert::TryInto; mod tokentrees; mod unescape_error_reporting; mod unicode_chars; + use unescape_error_reporting::{emit_unescape_error, push_escaped_char}; #[derive(Clone, Debug)] @@ -46,12 +46,20 @@ impl<'a> StringReader<'a> { source_file: Lrc<rustc_span::SourceFile>, override_span: Option<Span>, ) -> Self { - if source_file.src.is_none() { + // Make sure external source is loaded first, before accessing it. + // While this can't show up during normal parsing, `retokenize` may + // be called with a source file from an external crate. + sess.source_map().ensure_source_file_source_present(source_file.clone()); + + // FIXME(eddyb) use `Lrc<str>` or similar to avoid cloning the `String`. + let src = if let Some(src) = &source_file.src { + src.clone() + } else if let Some(src) = source_file.external_src.borrow().get_source() { + src.clone() + } else { sess.span_diagnostic .bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); - } - - let src = (*source_file.src.as_ref().unwrap()).clone(); + }; StringReader { sess, @@ -85,9 +93,6 @@ impl<'a> StringReader<'a> { } /// Returns the next token, including trivia like whitespace or comments. - /// - /// `Err(())` means that some errors were encountered, which can be - /// retrieved using `buffer_fatal_errors`. pub fn next_token(&mut self) -> Token { let start_src_index = self.src_index(self.pos); let text: &str = &self.src[start_src_index..self.end_src_index]; @@ -179,14 +184,12 @@ impl<'a> StringReader<'a> { rustc_lexer::TokenKind::LineComment => { let string = self.str_from(start); // comments with only more "/"s are not doc comments - let tok = if comments::is_line_doc_comment(string) { + if comments::is_line_doc_comment(string) { self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment"); token::DocComment(Symbol::intern(string)) } else { token::Comment - }; - - tok + } } rustc_lexer::TokenKind::BlockComment { terminated } => { let string = self.str_from(start); @@ -204,14 +207,12 @@ impl<'a> StringReader<'a> { self.fatal_span_(start, last_bpos, msg).raise(); } - let tok = if is_doc_comment { + if is_doc_comment { self.forbid_bare_cr(start, string, "bare CR not allowed in block doc-comment"); token::DocComment(Symbol::intern(string)) } else { token::Comment - }; - - tok + } } rustc_lexer::TokenKind::Whitespace => token::Whitespace, rustc_lexer::TokenKind::Ident | rustc_lexer::TokenKind::RawIdent => { @@ -372,30 +373,22 @@ impl<'a> StringReader<'a> { let id = self.symbol_from_to(content_start, content_end); (token::ByteStr, id) } - rustc_lexer::LiteralKind::RawStr { n_hashes, started, terminated } => { - if !started { - self.report_non_started_raw_string(start); - } - if !terminated { - self.report_unterminated_raw_string(start, n_hashes) - } - let n_hashes: u16 = self.restrict_n_hashes(start, n_hashes); + rustc_lexer::LiteralKind::RawStr(unvalidated_raw_str) => { + let valid_raw_str = self.validate_and_report_errors(start, unvalidated_raw_str); + let n_hashes = valid_raw_str.num_hashes(); let n = u32::from(n_hashes); + let content_start = start + BytePos(2 + n); let content_end = suffix_start - BytePos(1 + n); self.validate_raw_str_escape(content_start, content_end); let id = self.symbol_from_to(content_start, content_end); (token::StrRaw(n_hashes), id) } - rustc_lexer::LiteralKind::RawByteStr { n_hashes, started, terminated } => { - if !started { - self.report_non_started_raw_string(start); - } - if !terminated { - self.report_unterminated_raw_string(start, n_hashes) - } - let n_hashes: u16 = self.restrict_n_hashes(start, n_hashes); + rustc_lexer::LiteralKind::RawByteStr(unvalidated_raw_str) => { + let validated_raw_str = self.validate_and_report_errors(start, unvalidated_raw_str); + let n_hashes = validated_raw_str.num_hashes(); let n = u32::from(n_hashes); + let content_start = start + BytePos(3 + n); let content_end = suffix_start - BytePos(1 + n); self.validate_raw_byte_str_escape(content_start, content_end); @@ -481,6 +474,26 @@ impl<'a> StringReader<'a> { } } + fn validate_and_report_errors( + &self, + start: BytePos, + unvalidated_raw_str: UnvalidatedRawStr, + ) -> ValidatedRawStr { + match unvalidated_raw_str.validate() { + Err(LexRawStrError::InvalidStarter) => self.report_non_started_raw_string(start), + Err(LexRawStrError::NoTerminator { expected, found, possible_terminator_offset }) => { + self.report_unterminated_raw_string( + start, + expected, + possible_terminator_offset, + found, + ) + } + Err(LexRawStrError::TooManyDelimiters) => self.report_too_many_hashes(start), + Ok(valid) => valid, + } + } + fn report_non_started_raw_string(&self, start: BytePos) -> ! { let bad_char = self.str_from(start).chars().last().unwrap(); self.struct_fatal_span_char( @@ -494,38 +507,51 @@ impl<'a> StringReader<'a> { FatalError.raise() } - fn report_unterminated_raw_string(&self, start: BytePos, n_hashes: usize) -> ! { + fn report_unterminated_raw_string( + &self, + start: BytePos, + n_hashes: usize, + possible_offset: Option<usize>, + found_terminators: usize, + ) -> ! { let mut err = self.sess.span_diagnostic.struct_span_fatal_with_code( self.mk_sp(start, start), "unterminated raw string", error_code!(E0748), ); + err.span_label(self.mk_sp(start, start), "unterminated raw string"); if n_hashes > 0 { err.note(&format!( "this raw string should be terminated with `\"{}`", - "#".repeat(n_hashes as usize) + "#".repeat(n_hashes) )); } + if let Some(possible_offset) = possible_offset { + let lo = start + BytePos(possible_offset as u32); + let hi = lo + BytePos(found_terminators as u32); + let span = self.mk_sp(lo, hi); + err.span_suggestion( + span, + "consider terminating the string here", + "#".repeat(n_hashes), + Applicability::MaybeIncorrect, + ); + } + err.emit(); FatalError.raise() } - fn restrict_n_hashes(&self, start: BytePos, n_hashes: usize) -> u16 { - match n_hashes.try_into() { - Ok(n_hashes) => n_hashes, - Err(_) => { - self.fatal_span_( - start, - self.pos, - "too many `#` symbols: raw strings may be \ - delimited by up to 65535 `#` symbols", - ) - .raise(); - } - } + fn report_too_many_hashes(&self, start: BytePos) -> ! { + self.fatal_span_( + start, + self.pos, + "too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols", + ) + .raise(); } fn validate_char_escape(&self, content_start: BytePos, content_end: BytePos) { diff --git a/src/librustc_parse/lexer/tokentrees.rs b/src/librustc_parse/lexer/tokentrees.rs index 6c0acd0302f..b65b8941728 100644 --- a/src/librustc_parse/lexer/tokentrees.rs +++ b/src/librustc_parse/lexer/tokentrees.rs @@ -40,6 +40,7 @@ struct TokenTreesReader<'a> { /// Used only for error recovery when arriving to EOF with mismatched braces. matching_delim_spans: Vec<(token::DelimToken, Span, Span)>, last_unclosed_found_span: Option<Span>, + /// Collect empty block spans that might have been auto-inserted by editors. last_delim_empty_block_spans: FxHashMap<token::DelimToken, Span>, } @@ -138,7 +139,11 @@ impl<'a> TokenTreesReader<'a> { if tts.is_empty() { let empty_block_span = open_brace_span.to(close_brace_span); - self.last_delim_empty_block_spans.insert(delim, empty_block_span); + if !sm.is_multiline(empty_block_span) { + // Only track if the block is in the form of `{}`, otherwise it is + // likely that it was written on purpose. + self.last_delim_empty_block_spans.insert(delim, empty_block_span); + } } if self.open_braces.is_empty() { diff --git a/src/librustc_parse/lib.rs b/src/librustc_parse/lib.rs index 25f9f8fd3ad..8e2a9513d6b 100644 --- a/src/librustc_parse/lib.rs +++ b/src/librustc_parse/lib.rs @@ -2,9 +2,12 @@ #![feature(bool_to_option)] #![feature(crate_visibility_modifier)] +#![feature(bindings_after_at)] +#![feature(try_blocks)] +#![feature(or_patterns)] use rustc_ast::ast; -use rustc_ast::token::{self, Nonterminal, Token}; +use rustc_ast::token::{self, Nonterminal}; use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; @@ -12,7 +15,7 @@ use rustc_errors::{Diagnostic, FatalError, Level, PResult}; use rustc_session::parse::ParseSess; use rustc_span::{FileName, SourceFile, Span}; -use std::path::{Path, PathBuf}; +use std::path::Path; use std::str; use log::info; @@ -24,24 +27,6 @@ pub mod parser; use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser}; pub mod lexer; pub mod validate_attr; -#[macro_use] -pub mod config; - -#[derive(Clone)] -pub struct Directory { - pub path: PathBuf, - pub ownership: DirectoryOwnership, -} - -#[derive(Copy, Clone)] -pub enum DirectoryOwnership { - Owned { - // None if `mod.rs`, `Some("foo")` if we're in `foo.rs`. - relative: Option<ast::Ident>, - }, - UnownedViaBlock, - UnownedViaMod, -} // A bunch of utility functions of the form `parse_<thing>_from_<source>` // where <thing> includes crate, expr, item, stmt, tts, and one that @@ -66,7 +51,7 @@ macro_rules! panictry_buffer { } pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { - let mut parser = new_parser_from_file(sess, input); + let mut parser = new_parser_from_file(sess, input, None); parser.parse_crate_mod() } @@ -74,7 +59,7 @@ pub fn parse_crate_attrs_from_file<'a>( input: &Path, sess: &'a ParseSess, ) -> PResult<'a, Vec<ast::Attribute>> { - let mut parser = new_parser_from_file(sess, input); + let mut parser = new_parser_from_file(sess, input, None); parser.parse_inner_attributes() } @@ -118,15 +103,13 @@ pub fn maybe_new_parser_from_source_str( name: FileName, source: String, ) -> Result<Parser<'_>, Vec<Diagnostic>> { - let mut parser = - maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))?; - parser.recurse_into_file_modules = false; - Ok(parser) + maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source)) } /// Creates a new parser, handling errors as appropriate if the file doesn't exist. -pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { - source_file_to_parser(sess, file_to_source_file(sess, path, None)) +/// If a span is given, that is used on an error as the as the source of the problem. +pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> { + source_file_to_parser(sess, file_to_source_file(sess, path, sp)) } /// Creates a new parser, returning buffered diagnostics if the file doesn't exist, @@ -139,22 +122,6 @@ pub fn maybe_new_parser_from_file<'a>( maybe_source_file_to_parser(sess, file) } -/// Given a session, a crate config, a path, and a span, add -/// the file at the given path to the `source_map`, and returns a parser. -/// On an error, uses the given span as the source of the problem. -pub fn new_sub_parser_from_file<'a>( - sess: &'a ParseSess, - path: &Path, - directory_ownership: DirectoryOwnership, - module_name: Option<String>, - sp: Span, -) -> Parser<'a> { - let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp))); - p.directory.ownership = directory_ownership; - p.root_module_name = module_name; - p -} - /// Given a `source_file` and config, returns a parser. fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file)) @@ -171,8 +138,7 @@ fn maybe_source_file_to_parser( let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof { - let span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); - parser.set_token(Token::new(token::Eof, span)); + parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); } Ok(parser) @@ -257,26 +223,7 @@ pub fn stream_to_parser<'a>( stream: TokenStream, subparser_name: Option<&'static str>, ) -> Parser<'a> { - Parser::new(sess, stream, None, true, false, subparser_name) -} - -/// Given a stream, the `ParseSess` and the base directory, produces a parser. -/// -/// Use this function when you are creating a parser from the token stream -/// and also care about the current working directory of the parser (e.g., -/// you are trying to resolve modules defined inside a macro invocation). -/// -/// # Note -/// -/// The main usage of this function is outside of rustc, for those who uses -/// librustc_ast as a library. Please do not remove this function while refactoring -/// just because it is not used in rustc codebase! -pub fn stream_to_parser_with_base_dir<'a>( - sess: &'a ParseSess, - stream: TokenStream, - base_dir: Directory, -) -> Parser<'a> { - Parser::new(sess, stream, Some(base_dir), true, false, None) + Parser::new(sess, stream, false, subparser_name) } /// Runs the given subparser `f` on the tokens of the given `attr`'s item. @@ -286,7 +233,7 @@ pub fn parse_in<'a, T>( name: &'static str, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, T> { - let mut parser = Parser::new(sess, tts, None, false, false, Some(name)); + let mut parser = Parser::new(sess, tts, false, Some(name)); let result = f(&mut parser)?; if parser.token != token::Eof { parser.unexpected()?; @@ -361,7 +308,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke going with stringified version" ); } - return tokens_for_real; + tokens_for_real } fn prepend_attrs( diff --git a/src/librustc_parse/parser/attr.rs b/src/librustc_parse/parser/attr.rs index c5f8b2dd862..b56dd30739d 100644 --- a/src/librustc_parse/parser/attr.rs +++ b/src/librustc_parse/parser/attr.rs @@ -1,4 +1,4 @@ -use super::{Parser, PathStyle, TokenType}; +use super::{Parser, PathStyle}; use rustc_ast::ast; use rustc_ast::attr; use rustc_ast::token::{self, Nonterminal}; @@ -10,14 +10,20 @@ use rustc_span::{Span, Symbol}; use log::debug; #[derive(Debug)] -enum InnerAttributeParsePolicy<'a> { +pub(super) enum InnerAttrPolicy<'a> { Permitted, - NotPermitted { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> }, + Forbidden { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> }, } const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ permitted in this context"; +pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden { + reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG, + saw_doc_comment: false, + prev_attr_sp: None, +}; + impl<'a> Parser<'a> { /// Parses attributes that appear before an item. pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { @@ -25,48 +31,44 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); - match self.token.kind { - token::Pound => { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" - } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = InnerAttributeParsePolicy::NotPermitted { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().and_then(|a| Some(a.span)), - }; - let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; - attrs.push(attr); - just_parsed_doc_comment = false; - } - token::DocComment(s) => { - let attr = self.mk_doc_comment(s); - if attr.style != ast::AttrStyle::Outer { - let span = self.token.span; - let mut err = self.struct_span_err(span, "expected outer doc comment"); - err.note( + if self.check(&token::Pound) { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = InnerAttrPolicy::Forbidden { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().map(|a| a.span), + }; + let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; + attrs.push(attr); + just_parsed_doc_comment = false; + } else if let token::DocComment(s) = self.token.kind { + let attr = self.mk_doc_comment(s); + if attr.style != ast::AttrStyle::Outer { + self.struct_span_err(self.token.span, "expected outer doc comment") + .note( "inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items", - ); - return Err(err); - } - attrs.push(attr); - self.bump(); - just_parsed_doc_comment = true; + `//!` or `/*!`) can only appear before items", + ) + .emit(); } - _ => break, + attrs.push(attr); + self.bump(); + just_parsed_doc_comment = true; + } else { + break; } } Ok(attrs) } fn mk_doc_comment(&self, s: Symbol) -> ast::Attribute { - let style = comments::doc_comment_style(&s.as_str()); - attr::mk_doc_comment(style, s, self.token.span) + attr::mk_doc_comment(comments::doc_comment_style(&s.as_str()), s, self.token.span) } /// Matches `attribute = # ! [ meta_item ]`. @@ -75,96 +77,67 @@ impl<'a> Parser<'a> { /// attribute. pub fn parse_attribute(&mut self, permit_inner: bool) -> PResult<'a, ast::Attribute> { debug!("parse_attribute: permit_inner={:?} self.token={:?}", permit_inner, self.token); - let inner_parse_policy = if permit_inner { - InnerAttributeParsePolicy::Permitted - } else { - InnerAttributeParsePolicy::NotPermitted { - reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG, - saw_doc_comment: false, - prev_attr_sp: None, - } - }; + let inner_parse_policy = + if permit_inner { InnerAttrPolicy::Permitted } else { DEFAULT_INNER_ATTR_FORBIDDEN }; self.parse_attribute_with_inner_parse_policy(inner_parse_policy) } - /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` + /// The same as `parse_attribute`, except it takes in an `InnerAttrPolicy` /// that prescribes how to handle inner attributes. fn parse_attribute_with_inner_parse_policy( &mut self, - inner_parse_policy: InnerAttributeParsePolicy<'_>, + inner_parse_policy: InnerAttrPolicy<'_>, ) -> PResult<'a, ast::Attribute> { debug!( "parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token ); - let (span, item, style) = match self.token.kind { - token::Pound => { - let lo = self.token.span; - self.bump(); - - if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { - self.expected_tokens.push(TokenType::Token(token::Not)); - } - - let style = if self.token == token::Not { - self.bump(); - ast::AttrStyle::Inner - } else { - ast::AttrStyle::Outer - }; + let lo = self.token.span; + let (span, item, style) = if self.eat(&token::Pound) { + let style = + if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; - self.expect(&token::OpenDelim(token::Bracket))?; - let item = self.parse_attr_item()?; - self.expect(&token::CloseDelim(token::Bracket))?; - let hi = self.prev_token.span; - - let attr_sp = lo.to(hi); - - // Emit error if inner attribute is encountered and not permitted - if style == ast::AttrStyle::Inner { - if let InnerAttributeParsePolicy::NotPermitted { - reason, - saw_doc_comment, - prev_attr_sp, - } = inner_parse_policy - { - let prev_attr_note = if saw_doc_comment { - "previous doc comment" - } else { - "previous outer attribute" - }; - - let mut diagnostic = self.struct_span_err(attr_sp, reason); - - if let Some(prev_attr_sp) = prev_attr_sp { - diagnostic - .span_label(attr_sp, "not permitted following an outer attibute") - .span_label(prev_attr_sp, prev_attr_note); - } - - diagnostic - .note( - "inner attributes, like `#![no_std]`, annotate the item \ - enclosing them, and are usually found at the beginning of \ - source files. Outer attributes, like `#[test]`, annotate the \ - item following them.", - ) - .emit(); - } - } + self.expect(&token::OpenDelim(token::Bracket))?; + let item = self.parse_attr_item()?; + self.expect(&token::CloseDelim(token::Bracket))?; + let attr_sp = lo.to(self.prev_token.span); - (attr_sp, item, style) - } - _ => { - let token_str = pprust::token_to_string(&self.token); - let msg = &format!("expected `#`, found `{}`", token_str); - return Err(self.struct_span_err(self.token.span, msg)); + // Emit error if inner attribute is encountered and forbidden. + if style == ast::AttrStyle::Inner { + self.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy); } + + (attr_sp, item, style) + } else { + let token_str = pprust::token_to_string(&self.token); + let msg = &format!("expected `#`, found `{}`", token_str); + return Err(self.struct_span_err(self.token.span, msg)); }; Ok(attr::mk_attr_from_item(style, item, span)) } + pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) { + if let InnerAttrPolicy::Forbidden { reason, saw_doc_comment, prev_attr_sp } = policy { + let prev_attr_note = + if saw_doc_comment { "previous doc comment" } else { "previous outer attribute" }; + + let mut diag = self.struct_span_err(attr_sp, reason); + + if let Some(prev_attr_sp) = prev_attr_sp { + diag.span_label(attr_sp, "not permitted following an outer attribute") + .span_label(prev_attr_sp, prev_attr_note); + } + + diag.note( + "inner attributes, like `#![no_std]`, annotate the item enclosing them, \ + and are usually found at the beginning of source files. \ + Outer attributes, like `#[test]`, annotate the item following them.", + ) + .emit(); + } + } + /// Parses an inner part of an attribute (the path and following tokens). /// The tokens must be either a delimited token stream, or empty token stream, /// or the "legacy" key-value form. @@ -200,28 +173,22 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = vec![]; loop { - match self.token.kind { - token::Pound => { - // Don't even try to parse if it's not an inner attribute. - if !self.look_ahead(1, |t| t == &token::Not) { - break; - } - - let attr = self.parse_attribute(true)?; - assert_eq!(attr.style, ast::AttrStyle::Inner); + // Only try to parse if it is an inner attribute (has `!`). + if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { + let attr = self.parse_attribute(true)?; + assert_eq!(attr.style, ast::AttrStyle::Inner); + attrs.push(attr); + } else if let token::DocComment(s) = self.token.kind { + // We need to get the position of this token before we bump. + let attr = self.mk_doc_comment(s); + if attr.style == ast::AttrStyle::Inner { attrs.push(attr); + self.bump(); + } else { + break; } - token::DocComment(s) => { - // We need to get the position of this token before we bump. - let attr = self.mk_doc_comment(s); - if attr.style == ast::AttrStyle::Inner { - attrs.push(attr); - self.bump(); - } else { - break; - } - } - _ => break, + } else { + break; } } Ok(attrs) @@ -232,12 +199,10 @@ impl<'a> Parser<'a> { debug!("checking if {:?} is unusuffixed", lit); if !lit.kind.is_unsuffixed() { - let msg = "suffixed literals are not allowed in attributes"; - self.struct_span_err(lit.span, msg) + self.struct_span_err(lit.span, "suffixed literals are not allowed in attributes") .help( - "instead of using a suffixed literal \ - (`1u8`, `1.0f32`, etc.), use an unsuffixed version \ - (`1`, `1.0`, etc.)", + "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \ + use an unsuffixed version (`1`, `1.0`, etc.)", ) .emit(); } diff --git a/src/librustc_parse/parser/diagnostics.rs b/src/librustc_parse/parser/diagnostics.rs index 0759c43d452..12b9b682682 100644 --- a/src/librustc_parse/parser/diagnostics.rs +++ b/src/librustc_parse/parser/diagnostics.rs @@ -6,7 +6,7 @@ use rustc_ast::ast::{ }; use rustc_ast::ast::{AttrVec, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind}; use rustc_ast::ptr::P; -use rustc_ast::token::{self, TokenKind}; +use rustc_ast::token::{self, Lit, LitKind, TokenKind}; use rustc_ast::util::parser::AssocOp; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashSet; @@ -17,9 +17,8 @@ use rustc_span::symbol::kw; use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP}; use log::{debug, trace}; -use std::mem; -const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; +const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments"; /// Creates a placeholder argument. pub(super) fn dummy_arg(ident: Ident) -> Param { @@ -40,55 +39,12 @@ pub(super) fn dummy_arg(ident: Ident) -> Param { } pub enum Error { - FileNotFoundForModule { - mod_name: String, - default_path: String, - secondary_path: String, - dir_path: String, - }, - DuplicatePaths { - mod_name: String, - default_path: String, - secondary_path: String, - }, UselessDocComment, } impl Error { fn span_err(self, sp: impl Into<MultiSpan>, handler: &Handler) -> DiagnosticBuilder<'_> { match self { - Error::FileNotFoundForModule { - ref mod_name, - ref default_path, - ref secondary_path, - ref dir_path, - } => { - let mut err = struct_span_err!( - handler, - sp, - E0583, - "file not found for module `{}`", - mod_name, - ); - err.help(&format!( - "name the file either {} or {} inside the directory \"{}\"", - default_path, secondary_path, dir_path, - )); - err - } - Error::DuplicatePaths { ref mod_name, ref default_path, ref secondary_path } => { - let mut err = struct_span_err!( - handler, - sp, - E0584, - "file for module `{}` found at both {} and {}", - mod_name, - default_path, - secondary_path, - ); - err.help("delete or rename one of them to remove the ambiguity"); - err - } Error::UselessDocComment => { let mut err = struct_span_err!( handler, @@ -192,17 +148,19 @@ impl<'a> Parser<'a> { TokenKind::CloseDelim(token::DelimToken::Brace), TokenKind::CloseDelim(token::DelimToken::Paren), ]; - if let token::Ident(name, false) = self.normalized_token.kind { - if Ident::new(name, self.normalized_token.span).is_raw_guess() - && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) + match self.token.ident() { + Some((ident, false)) + if ident.is_raw_guess() + && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) => { err.span_suggestion( - self.normalized_token.span, + ident.span, "you can escape reserved keywords to use them as identifiers", - format!("r#{}", name), + format!("r#{}", ident.name), Applicability::MaybeIncorrect, ); } + _ => {} } if let Some(token_descr) = super::token_descr_opt(&self.token) { err.span_label(self.token.span, format!("expected identifier, found {}", token_descr)); @@ -297,6 +255,10 @@ impl<'a> Parser<'a> { } } + if self.check_too_many_raw_str_terminators(&mut err) { + return Err(err); + } + let sm = self.sess.source_map(); if self.prev_token.span == DUMMY_SP { // Account for macro context where the previous span might not be @@ -324,6 +286,29 @@ impl<'a> Parser<'a> { Err(err) } + fn check_too_many_raw_str_terminators(&mut self, err: &mut DiagnosticBuilder<'_>) -> bool { + match (&self.prev_token.kind, &self.token.kind) { + ( + TokenKind::Literal(Lit { + kind: LitKind::StrRaw(n_hashes) | LitKind::ByteStrRaw(n_hashes), + .. + }), + TokenKind::Pound, + ) => { + err.set_primary_message("too many `#` when terminating raw string"); + err.span_suggestion( + self.token.span, + "remove the extra `#`", + String::new(), + Applicability::MachineApplicable, + ); + err.note(&format!("the raw string started with {} `#`s", n_hashes)); + true + } + _ => false, + } + } + pub fn maybe_annotate_with_ascription( &mut self, err: &mut DiagnosticBuilder<'_>, @@ -500,9 +485,28 @@ impl<'a> Parser<'a> { err: &mut DiagnosticBuilder<'_>, inner_op: &Expr, outer_op: &Spanned<AssocOp>, - ) { + ) -> bool /* advanced the cursor */ { if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind { - match (op.node, &outer_op.node) { + if let ExprKind::Field(_, ident) = l1.kind { + if ident.as_str().parse::<i32>().is_err() && !matches!(r1.kind, ExprKind::Lit(_)) { + // The parser has encountered `foo.bar<baz`, the likelihood of the turbofish + // suggestion being the only one to apply is high. + return false; + } + } + let mut enclose = |left: Span, right: Span| { + err.multipart_suggestion( + "parenthesize the comparison", + vec![ + (left.shrink_to_lo(), "(".to_string()), + (right.shrink_to_hi(), ")".to_string()), + ], + Applicability::MaybeIncorrect, + ); + }; + return match (op.node, &outer_op.node) { + // `x == y == z` + (BinOpKind::Eq, AssocOp::Equal) | // `x < y < z` and friends. (BinOpKind::Lt, AssocOp::Less) | (BinOpKind::Lt, AssocOp::LessEqual) | (BinOpKind::Le, AssocOp::LessEqual) | (BinOpKind::Le, AssocOp::Less) | @@ -513,35 +517,55 @@ impl<'a> Parser<'a> { self.span_to_snippet(e.span) .unwrap_or_else(|_| pprust::expr_to_string(&e)) }; - err.span_suggestion( - inner_op.span.to(outer_op.span), - "split the comparison into two...", - format!( - "{} {} {} && {} {}", - expr_to_str(&l1), - op.node.to_string(), - expr_to_str(&r1), - expr_to_str(&r1), - outer_op.node.to_ast_binop().unwrap().to_string(), - ), - Applicability::MaybeIncorrect, - ); - err.span_suggestion( - inner_op.span.to(outer_op.span), - "...or parenthesize one of the comparisons", - format!( - "({} {} {}) {}", - expr_to_str(&l1), - op.node.to_string(), - expr_to_str(&r1), - outer_op.node.to_ast_binop().unwrap().to_string(), - ), + err.span_suggestion_verbose( + inner_op.span.shrink_to_hi(), + "split the comparison into two", + format!(" && {}", expr_to_str(&r1)), Applicability::MaybeIncorrect, ); + false // Keep the current parse behavior, where the AST is `(x < y) < z`. } - _ => {} - } + // `x == y < z` + (BinOpKind::Eq, AssocOp::Less) | (BinOpKind::Eq, AssocOp::LessEqual) | + (BinOpKind::Eq, AssocOp::Greater) | (BinOpKind::Eq, AssocOp::GreaterEqual) => { + // Consume `z`/outer-op-rhs. + let snapshot = self.clone(); + match self.parse_expr() { + Ok(r2) => { + // We are sure that outer-op-rhs could be consumed, the suggestion is + // likely correct. + enclose(r1.span, r2.span); + true + } + Err(mut expr_err) => { + expr_err.cancel(); + *self = snapshot; + false + } + } + } + // `x > y == z` + (BinOpKind::Lt, AssocOp::Equal) | (BinOpKind::Le, AssocOp::Equal) | + (BinOpKind::Gt, AssocOp::Equal) | (BinOpKind::Ge, AssocOp::Equal) => { + let snapshot = self.clone(); + // At this point it is always valid to enclose the lhs in parentheses, no + // further checks are necessary. + match self.parse_expr() { + Ok(_) => { + enclose(l1.span, r1.span); + true + } + Err(mut expr_err) => { + expr_err.cancel(); + *self = snapshot; + false + } + } + } + _ => false, + }; } + false } /// Produces an error if comparison operators are chained (RFC #558). @@ -575,31 +599,26 @@ impl<'a> Parser<'a> { |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new()))); match inner_op.kind { - ExprKind::Binary(op, _, _) if op.node.is_comparison() => { - // Respan to include both operators. - let op_span = op.span.to(self.prev_token.span); - let mut err = - self.struct_span_err(op_span, "comparison operators cannot be chained"); - - // If it looks like a genuine attempt to chain operators (as opposed to a - // misformatted turbofish, for instance), suggest a correct form. - self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => { + let mut err = self.struct_span_err( + vec![op.span, self.prev_token.span], + "comparison operators cannot be chained", + ); let suggest = |err: &mut DiagnosticBuilder<'_>| { err.span_suggestion_verbose( - op_span.shrink_to_lo(), + op.span.shrink_to_lo(), TURBOFISH, "::".to_string(), Applicability::MaybeIncorrect, ); }; - if op.node == BinOpKind::Lt && - outer_op.node == AssocOp::Less || // Include `<` to provide this recommendation - outer_op.node == AssocOp::Greater - // even in a case like the following: + // Include `<` to provide this recommendation even in a case like + // `Foo<Bar<Baz<Qux, ()>>>` + if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less + || outer_op.node == AssocOp::Greater { - // Foo<Bar<Baz<Qux, ()>>> if outer_op.node == AssocOp::Less { let snapshot = self.clone(); self.bump(); @@ -613,7 +632,7 @@ impl<'a> Parser<'a> { { // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the // parser and bail out. - mem::replace(self, snapshot.clone()); + *self = snapshot.clone(); } } return if token::ModSep == self.token.kind { @@ -638,7 +657,7 @@ impl<'a> Parser<'a> { expr_err.cancel(); // Not entirely sure now, but we bubble the error up with the // suggestion. - mem::replace(self, snapshot); + *self = snapshot; Err(err) } } @@ -658,15 +677,33 @@ impl<'a> Parser<'a> { } } } else { - // All we know is that this is `foo < bar >` and *nothing* else. Try to - // be helpful, but don't attempt to recover. - err.help(TURBOFISH); - err.help("or use `(...)` if you meant to specify fn arguments"); - // These cases cause too many knock-down errors, bail out (#61329). - Err(err) + if !matches!(l1.kind, ExprKind::Lit(_)) + && !matches!(r1.kind, ExprKind::Lit(_)) + { + // All we know is that this is `foo < bar >` and *nothing* else. Try to + // be helpful, but don't attempt to recover. + err.help(TURBOFISH); + err.help("or use `(...)` if you meant to specify fn arguments"); + } + + // If it looks like a genuine attempt to chain operators (as opposed to a + // misformatted turbofish, for instance), suggest a correct form. + if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) + { + err.emit(); + mk_err_expr(self, inner_op.span.to(self.prev_token.span)) + } else { + // These cases cause too many knock-down errors, bail out (#61329). + Err(err) + } }; } + let recover = + self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); err.emit(); + if recover { + return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); + } } _ => {} } @@ -684,7 +721,7 @@ impl<'a> Parser<'a> { if self.token.kind == token::Eof { // Not entirely sure that what we consumed were fn arguments, rollback. - mem::replace(self, snapshot); + *self = snapshot; Err(()) } else { // 99% certain that the suggestion is correct, continue parsing. @@ -895,7 +932,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `;`, found `{}`", super::token_descr(&self.token)); let appl = Applicability::MachineApplicable; if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP { - // Likely inside a macro, can't provide meaninful suggestions. + // Likely inside a macro, can't provide meaningful suggestions. return self.expect(&token::Semi).map(drop); } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. @@ -1076,7 +1113,7 @@ impl<'a> Parser<'a> { self.look_ahead(2, |t| t.is_ident()) || self.look_ahead(1, |t| t == &token::ModSep) && (self.look_ahead(2, |t| t.is_ident()) || // `foo:bar::baz` - self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>` + self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>` } pub(super) fn recover_seq_parse_error( diff --git a/src/librustc_parse/parser/expr.rs b/src/librustc_parse/parser/expr.rs index 18ddd23588e..cbff99f8da6 100644 --- a/src/librustc_parse/parser/expr.rs +++ b/src/librustc_parse/parser/expr.rs @@ -4,8 +4,8 @@ use super::{BlockMode, Parser, PathStyle, Restrictions, TokenType}; use super::{SemiColonMode, SeqSep, TokenExpectType}; use crate::maybe_recover_from_interpolated_ty_qpath; -use rustc_ast::ast::{self, AttrStyle, AttrVec, CaptureBy, Field, Ident, Lit, DUMMY_NODE_ID}; -use rustc_ast::ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, Mac, Param, Ty, TyKind, UnOp}; +use rustc_ast::ast::{self, AttrStyle, AttrVec, CaptureBy, Field, Ident, Lit, UnOp, DUMMY_NODE_ID}; +use rustc_ast::ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind}; use rustc_ast::ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits}; use rustc_ast::ptr::P; use rustc_ast::token::{self, Token, TokenKind}; @@ -50,7 +50,6 @@ macro_rules! maybe_whole_expr { AttrVec::new(), )); } - // N.B., `NtIdent(ident)` is normalized to `Ident` in `fn bump`. _ => {} }; } @@ -97,9 +96,9 @@ impl<'a> Parser<'a> { fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> { match self.parse_expr() { Ok(expr) => Ok(expr), - Err(mut err) => match self.normalized_token.kind { - token::Ident(name, false) - if name == kw::Underscore && self.look_ahead(1, |t| t == &token::Comma) => + Err(mut err) => match self.token.ident() { + Some((Ident { name: kw::Underscore, .. }, false)) + if self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` err.emit(); @@ -331,21 +330,19 @@ impl<'a> Parser<'a> { /// /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> { - Some(Spanned { - node: match (AssocOp::from_token(&self.token), &self.normalized_token.kind) { - (Some(op), _) => op, - (None, token::Ident(sym::and, false)) => { - self.error_bad_logical_op("and", "&&", "conjunction"); - AssocOp::LAnd - } - (None, token::Ident(sym::or, false)) => { - self.error_bad_logical_op("or", "||", "disjunction"); - AssocOp::LOr - } - _ => return None, - }, - span: self.normalized_token.span, - }) + let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) { + (Some(op), _) => (op, self.token.span), + (None, Some((Ident { name: sym::and, span }, false))) => { + self.error_bad_logical_op("and", "&&", "conjunction"); + (AssocOp::LAnd, span) + } + (None, Some((Ident { name: sym::or, span }, false))) => { + self.error_bad_logical_op("or", "||", "disjunction"); + (AssocOp::LOr, span) + } + _ => return None, + }; + Some(source_map::respan(span, op)) } /// Error on `and` and `or` suggesting `&&` and `||` respectively. @@ -436,7 +433,7 @@ impl<'a> Parser<'a> { let attrs = self.parse_or_use_outer_attributes(attrs)?; let lo = self.token.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.normalized_token.kind { + let (hi, ex) = match self.token.uninterpolate().kind { token::Not => self.parse_unary_expr(lo, UnOp::Not), // `!expr` token::Tilde => self.recover_tilde_expr(lo), // `~expr` token::BinOp(token::Minus) => self.parse_unary_expr(lo, UnOp::Neg), // `-expr` @@ -483,7 +480,7 @@ impl<'a> Parser<'a> { } fn is_mistaken_not_ident_negation(&self) -> bool { - let token_cannot_continue_expr = |t: &Token| match t.kind { + let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), @@ -544,8 +541,8 @@ impl<'a> Parser<'a> { // Save the state of the parser before parsing type normally, in case there is a // LessThan comparison after this cast. let parser_snapshot_before_type = self.clone(); - match self.parse_ty_no_plus() { - Ok(rhs) => Ok(mk_expr(self, rhs)), + let cast_expr = match self.parse_ty_no_plus() { + Ok(rhs) => mk_expr(self, rhs), Err(mut type_err) => { // Rewind to before attempting to parse the type with generics, to recover // from situations like `x as usize < y` in which we first tried to parse @@ -599,17 +596,71 @@ impl<'a> Parser<'a> { ) .emit(); - Ok(expr) + expr } Err(mut path_err) => { // Couldn't parse as a path, return original error and parser state. path_err.cancel(); mem::replace(self, parser_snapshot_after_type); - Err(type_err) + return Err(type_err); } } } - } + }; + + self.parse_and_disallow_postfix_after_cast(cast_expr) + } + + /// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast, + /// then emits an error and returns the newly parsed tree. + /// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`. + fn parse_and_disallow_postfix_after_cast( + &mut self, + cast_expr: P<Expr>, + ) -> PResult<'a, P<Expr>> { + // Save the memory location of expr before parsing any following postfix operators. + // This will be compared with the memory location of the output expression. + // If they different we can assume we parsed another expression because the existing expression is not reallocated. + let addr_before = &*cast_expr as *const _ as usize; + let span = cast_expr.span; + let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?; + let changed = addr_before != &*with_postfix as *const _ as usize; + + // Check if an illegal postfix operator has been added after the cast. + // If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator. + if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed { + let msg = format!( + "casts cannot be followed by {}", + match with_postfix.kind { + ExprKind::Index(_, _) => "indexing", + ExprKind::Try(_) => "?", + ExprKind::Field(_, _) => "a field access", + ExprKind::MethodCall(_, _) => "a method call", + ExprKind::Call(_, _) => "a function call", + ExprKind::Await(_) => "`.await`", + ExprKind::Err => return Ok(with_postfix), + _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"), + } + ); + let mut err = self.struct_span_err(span, &msg); + // If type ascription is "likely an error", the user will already be getting a useful + // help message, and doesn't need a second. + if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) { + self.maybe_annotate_with_ascription(&mut err, false); + } else { + let suggestions = vec![ + (span.shrink_to_lo(), "(".to_string()), + (span.shrink_to_hi(), ")".to_string()), + ]; + err.multipart_suggestion( + "try surrounding the expression in parentheses", + suggestions, + Applicability::MachineApplicable, + ); + } + err.emit(); + }; + Ok(with_postfix) } fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> { @@ -623,10 +674,28 @@ impl<'a> Parser<'a> { /// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`. fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { self.expect_and()?; + let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon); + let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below. let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo); let expr = self.parse_prefix_expr(None); - let (span, expr) = self.interpolated_or_expr_span(expr)?; - Ok((lo.to(span), ExprKind::AddrOf(borrow_kind, mutbl, expr))) + let (hi, expr) = self.interpolated_or_expr_span(expr)?; + let span = lo.to(hi); + if let Some(lt) = lifetime { + self.error_remove_borrow_lifetime(span, lt.ident.span); + } + Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) + } + + fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) { + self.struct_span_err(span, "borrow expressions cannot be annotated with lifetimes") + .span_label(lt_span, "annotated with lifetime here") + .span_suggestion( + lt_span, + "remove the lifetime annotation", + String::new(), + Applicability::MachineApplicable, + ) + .emit(); } /// Parse `mut?` or `raw [ const | mut ]`. @@ -665,20 +734,11 @@ impl<'a> Parser<'a> { expr.map(|mut expr| { attrs.extend::<Vec<_>>(expr.attrs.into()); expr.attrs = attrs; - self.error_attr_on_if_expr(&expr); expr }) }) } - fn error_attr_on_if_expr(&self, expr: &Expr) { - if let (ExprKind::If(..), [a0, ..]) = (&expr.kind, &*expr.attrs) { - // Just point to the first attribute in there... - self.struct_span_err(a0.span, "attributes are not yet allowed on `if` expressions") - .emit(); - } - } - fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { loop { if self.eat(&token::Question) { @@ -703,7 +763,7 @@ impl<'a> Parser<'a> { } fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> { - match self.normalized_token.kind { + match self.token.uninterpolate().kind { token::Ident(..) => self.parse_dot_suffix(base, lo), token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix)) @@ -797,7 +857,7 @@ impl<'a> Parser<'a> { /// Assuming we have just parsed `.`, continue parsing into an expression. fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> { - if self.normalized_token.span.rust_2018() && self.eat_keyword(kw::Await) { + if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) { return self.mk_await_expr(self_arg, lo); } @@ -860,14 +920,23 @@ impl<'a> Parser<'a> { } else if self.eat_lt() { let (qself, path) = self.parse_qpath(PathStyle::Expr)?; Ok(self.mk_expr(lo.to(path.span), ExprKind::Path(Some(qself), path), attrs)) - } else if self.token.is_path_start() { + } else if self.check_path() { self.parse_path_start_expr(attrs) } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { self.parse_closure_expr(attrs) } else if self.eat_keyword(kw::If) { self.parse_if_expr(attrs) - } else if self.eat_keyword(kw::For) { - self.parse_for_expr(None, self.prev_token.span, attrs) + } else if self.check_keyword(kw::For) { + if self.choose_generics_over_qpath(1) { + // NOTE(Centril, eddyb): DO NOT REMOVE! Beyond providing parser recovery, + // this is an insurance policy in case we allow qpaths in (tuple-)struct patterns. + // When `for <Foo as Bar>::Proj in $expr $block` is wanted, + // you can disambiguate in favor of a pattern with `(...)`. + self.recover_quantified_closure_expr(attrs) + } else { + assert!(self.eat_keyword(kw::For)); + self.parse_for_expr(None, self.prev_token.span, attrs) + } } else if self.eat_keyword(kw::While) { self.parse_while_expr(None, self.prev_token.span, attrs) } else if let Some(label) = self.eat_label() { @@ -911,7 +980,7 @@ impl<'a> Parser<'a> { // | ^ expected expression self.bump(); Ok(self.mk_expr_err(self.token.span)) - } else if self.normalized_token.span.rust_2018() { + } else if self.token.uninterpolated_span().rust_2018() { // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly. if self.check_keyword(kw::Async) { if self.is_async_block() { @@ -937,7 +1006,7 @@ impl<'a> Parser<'a> { let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal), attrs); self.maybe_recover_from_bad_qpath(expr, true) } - None => return Err(self.expected_expression_found()), + None => Err(self.expected_expression_found()), } } @@ -955,7 +1024,7 @@ impl<'a> Parser<'a> { }; let kind = if es.len() == 1 && !trailing_comma { // `(e)` is parenthesized `e`. - ExprKind::Paren(es.into_iter().nth(0).unwrap()) + ExprKind::Paren(es.into_iter().next().unwrap()) } else { // `(e,)` is a tuple with only one field, `e`. ExprKind::Tup(es) @@ -1006,12 +1075,12 @@ impl<'a> Parser<'a> { // `!`, as an operator, is prefix, so we know this isn't that. let (hi, kind) = if self.eat(&token::Not) { // MACRO INVOCATION expression - let mac = Mac { + let mac = MacCall { path, args: self.parse_mac_args()?, prior_type_ascription: self.last_type_ascription, }; - (self.prev_token.span, ExprKind::Mac(mac)) + (self.prev_token.span, ExprKind::MacCall(mac)) } else if self.check(&token::OpenDelim(token::Brace)) { if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) { return expr; @@ -1026,26 +1095,44 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr, true) } + /// Parse `'label: $expr`. The label is already parsed. fn parse_labeled_expr(&mut self, label: Label, attrs: AttrVec) -> PResult<'a, P<Expr>> { let lo = label.ident.span; - self.expect(&token::Colon)?; - if self.eat_keyword(kw::While) { - return self.parse_while_expr(Some(label), lo, attrs); - } - if self.eat_keyword(kw::For) { - return self.parse_for_expr(Some(label), lo, attrs); - } - if self.eat_keyword(kw::Loop) { - return self.parse_loop_expr(Some(label), lo, attrs); - } - if self.token == token::OpenDelim(token::Brace) { - return self.parse_block_expr(Some(label), lo, BlockCheckMode::Default, attrs); + let label = Some(label); + let ate_colon = self.eat(&token::Colon); + let expr = if self.eat_keyword(kw::While) { + self.parse_while_expr(label, lo, attrs) + } else if self.eat_keyword(kw::For) { + self.parse_for_expr(label, lo, attrs) + } else if self.eat_keyword(kw::Loop) { + self.parse_loop_expr(label, lo, attrs) + } else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { + self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs) + } else { + let msg = "expected `while`, `for`, `loop` or `{` after a label"; + self.struct_span_err(self.token.span, msg).span_label(self.token.span, msg).emit(); + // Continue as an expression in an effort to recover on `'label: non_block_expr`. + self.parse_expr() + }?; + + if !ate_colon { + self.error_labeled_expr_must_be_followed_by_colon(lo, expr.span); } - let msg = "expected `while`, `for`, `loop` or `{` after a label"; - self.struct_span_err(self.token.span, msg).span_label(self.token.span, msg).emit(); - // Continue as an expression in an effort to recover on `'label: non_block_expr`. - self.parse_expr() + Ok(expr) + } + + fn error_labeled_expr_must_be_followed_by_colon(&self, lo: Span, span: Span) { + self.struct_span_err(span, "labeled expression must be followed by `:`") + .span_label(lo, "the label") + .span_suggestion_short( + lo.shrink_to_hi(), + "add `:` after the label", + ": ".to_string(), + Applicability::MachineApplicable, + ) + .note("labels are used before loops and blocks, allowing e.g., `break 'label` to them") + .emit(); } /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead. @@ -1297,6 +1384,7 @@ impl<'a> Parser<'a> { } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). + /// Keep this in sync with `Token::can_begin_literal_maybe_minus`. pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); @@ -1322,21 +1410,43 @@ impl<'a> Parser<'a> { opt_label: Option<Label>, lo: Span, blk_mode: BlockCheckMode, - outer_attrs: AttrVec, + mut attrs: AttrVec, ) -> PResult<'a, P<Expr>> { if let Some(label) = opt_label { self.sess.gated_spans.gate(sym::label_break_value, label.ident.span); } - self.expect(&token::OpenDelim(token::Brace))?; - - let mut attrs = outer_attrs; - attrs.extend(self.parse_inner_attributes()?); + if self.token.is_whole_block() { + self.struct_span_err(self.token.span, "cannot use a `block` macro fragment here") + .span_label(lo.to(self.token.span), "the `block` fragment is within this context") + .emit(); + } - let blk = self.parse_block_tail(lo, blk_mode)?; + let (inner_attrs, blk) = self.parse_block_common(lo, blk_mode)?; + attrs.extend(inner_attrs); Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs)) } + /// Recover on an explicitly quantified closure expression, e.g., `for<'a> |x: &'a u8| *x + 1`. + fn recover_quantified_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { + let lo = self.token.span; + let _ = self.parse_late_bound_lifetime_defs()?; + let span_for = lo.to(self.prev_token.span); + let closure = self.parse_closure_expr(attrs)?; + + self.struct_span_err(span_for, "cannot introduce explicit parameters for a closure") + .span_label(closure.span, "the parameters are attached to this closure") + .span_suggestion( + span_for, + "remove the parameters", + String::new(), + Applicability::MachineApplicable, + ) + .emit(); + + Ok(self.mk_expr_err(lo.to(closure.span))) + } + /// Parses a closure expression (e.g., `move |args| expr`). fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> { let lo = self.token.span; @@ -1344,11 +1454,14 @@ impl<'a> Parser<'a> { let movability = if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable }; - let asyncness = - if self.normalized_token.span.rust_2018() { self.parse_asyncness() } else { Async::No }; - if asyncness.is_async() { + let asyncness = if self.token.uninterpolated_span().rust_2018() { + self.parse_asyncness() + } else { + Async::No + }; + if let Async::Yes { span, .. } = asyncness { // Feature-gate `async ||` closures. - self.sess.gated_spans.gate(sym::async_closure, self.normalized_prev_token.span); + self.sess.gated_spans.gate(sym::async_closure, span); } let capture_clause = self.parse_capture_clause(); @@ -1373,7 +1486,7 @@ impl<'a> Parser<'a> { )) } - /// Parses an optional `move` prefix to a closure lke construct. + /// Parses an optional `move` prefix to a closure-like construct. fn parse_capture_clause(&mut self) -> CaptureBy { if self.eat_keyword(kw::Move) { CaptureBy::Value } else { CaptureBy::Ref } } @@ -1432,13 +1545,16 @@ impl<'a> Parser<'a> { let thn = if self.eat_keyword(kw::Else) || !cond.returns() { self.error_missing_if_cond(lo, cond.span) } else { + let attrs = self.parse_outer_attributes()?; // For recovery. let not_block = self.token != token::OpenDelim(token::Brace); - self.parse_block().map_err(|mut err| { + let block = self.parse_block().map_err(|mut err| { if not_block { err.span_label(lo, "this `if` expression has a condition, but no block"); } err - })? + })?; + self.error_on_if_block_attrs(lo, false, block.span, &attrs); + block }; let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None }; Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els), attrs)) @@ -1480,12 +1596,40 @@ impl<'a> Parser<'a> { /// Parses an `else { ... }` expression (`else` token already eaten). fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> { - if self.eat_keyword(kw::If) { - self.parse_if_expr(AttrVec::new()) + let ctx_span = self.prev_token.span; // `else` + let attrs = self.parse_outer_attributes()?; // For recovery. + let expr = if self.eat_keyword(kw::If) { + self.parse_if_expr(AttrVec::new())? } else { let blk = self.parse_block()?; - Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new())) - } + self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()) + }; + self.error_on_if_block_attrs(ctx_span, true, expr.span, &attrs); + Ok(expr) + } + + fn error_on_if_block_attrs( + &self, + ctx_span: Span, + is_ctx_else: bool, + branch_span: Span, + attrs: &[ast::Attribute], + ) { + let (span, last) = match attrs { + [] => return, + [x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span), + }; + let ctx = if is_ctx_else { "else" } else { "if" }; + self.struct_span_err(last, "outer attributes are not allowed on `if` and `else` branches") + .span_label(branch_span, "the attributes are attached to this branch") + .span_label(ctx_span, format!("the branch belongs to this `{}`", ctx)) + .span_suggestion( + span, + "remove the attributes", + String::new(), + Applicability::MachineApplicable, + ) + .emit(); } /// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten). @@ -1600,7 +1744,7 @@ impl<'a> Parser<'a> { } let hi = self.token.span; self.bump(); - return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs)); + Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs)) } pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { @@ -1704,7 +1848,7 @@ impl<'a> Parser<'a> { fn is_try_block(&self) -> bool { self.token.is_keyword(kw::Try) && self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) && - self.normalized_token.span.rust_2018() && + self.token.uninterpolated_span().rust_2018() && // Prevent `while try {} {}`, `if try {} {} else {}`, etc. !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL) } @@ -1854,20 +1998,23 @@ impl<'a> Parser<'a> { /// Use in case of error after field-looking code: `S { foo: () with a }`. fn find_struct_error_after_field_looking_code(&self) -> Option<Field> { - if let token::Ident(name, _) = self.normalized_token.kind { - if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { - return Some(ast::Field { - ident: Ident::new(name, self.normalized_token.span), + match self.token.ident() { + Some((ident, is_raw)) + if (is_raw || !ident.is_reserved()) + && self.look_ahead(1, |t| *t == token::Colon) => + { + Some(ast::Field { + ident, span: self.token.span, expr: self.mk_expr_err(self.token.span), is_shorthand: false, attrs: AttrVec::new(), id: DUMMY_NODE_ID, is_placeholder: false, - }); + }) } + _ => None, } - None } fn recover_struct_comma_after_dotdot(&mut self, span: Span) { diff --git a/src/librustc_parse/parser/generics.rs b/src/librustc_parse/parser/generics.rs index 59fd5f7c4be..3442c5081c1 100644 --- a/src/librustc_parse/parser/generics.rs +++ b/src/librustc_parse/parser/generics.rs @@ -181,7 +181,7 @@ impl<'a> Parser<'a> { // We are considering adding generics to the `where` keyword as an alternative higher-rank // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking // change we parse those generics now, but report an error. - if self.choose_generics_over_qpath() { + if self.choose_generics_over_qpath(0) { let generics = self.parse_generics()?; self.struct_span_err( generics.span, @@ -257,7 +257,7 @@ impl<'a> Parser<'a> { } } - pub(super) fn choose_generics_over_qpath(&self) -> bool { + pub(super) fn choose_generics_over_qpath(&self, start: usize) -> bool { // There's an ambiguity between generic parameters and qualified paths in impls. // If we see `<` it may start both, so we have to inspect some following tokens. // The following combinations can only start generics, @@ -274,15 +274,12 @@ impl<'a> Parser<'a> { // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`) // because this is what almost always expected in practice, qualified paths in impls // (`impl <Type>::AssocTy { ... }`) aren't even allowed by type checker at the moment. - self.token == token::Lt - && (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) - || self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) - && self.look_ahead(2, |t| { - t == &token::Gt - || t == &token::Comma - || t == &token::Colon - || t == &token::Eq + self.look_ahead(start, |t| t == &token::Lt) + && (self.look_ahead(start + 1, |t| t == &token::Pound || t == &token::Gt) + || self.look_ahead(start + 1, |t| t.is_lifetime() || t.is_ident()) + && self.look_ahead(start + 2, |t| { + matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq) }) - || self.is_keyword_ahead(1, &[kw::Const])) + || self.is_keyword_ahead(start + 1, &[kw::Const])) } } diff --git a/src/librustc_parse/parser/item.rs b/src/librustc_parse/parser/item.rs index 9bca1d09901..798eb85f36f 100644 --- a/src/librustc_parse/parser/item.rs +++ b/src/librustc_parse/parser/item.rs @@ -5,17 +5,14 @@ use super::{FollowedByType, Parser, PathStyle}; use crate::maybe_whole; use rustc_ast::ast::{self, AttrStyle, AttrVec, Attribute, Ident, DUMMY_NODE_ID}; -use rustc_ast::ast::{AssocItem, AssocItemKind, ForeignItemKind, Item, ItemKind}; -use rustc_ast::ast::{ - Async, Const, Defaultness, IsAuto, PathSegment, Unsafe, UseTree, UseTreeKind, -}; -use rustc_ast::ast::{ - BindingMode, Block, FnDecl, FnSig, Mac, MacArgs, MacDelimiter, Param, SelfKind, -}; +use rustc_ast::ast::{AssocItem, AssocItemKind, ForeignItemKind, Item, ItemKind, Mod}; +use rustc_ast::ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind}; +use rustc_ast::ast::{BindingMode, Block, FnDecl, FnSig, Param, SelfKind}; use rustc_ast::ast::{EnumDef, Generics, StructField, TraitRef, Ty, TyKind, Variant, VariantData}; -use rustc_ast::ast::{FnHeader, ForeignItem, Mutability, Visibility, VisibilityKind}; +use rustc_ast::ast::{FnHeader, ForeignItem, PathSegment, Visibility, VisibilityKind}; +use rustc_ast::ast::{MacArgs, MacCall, MacDelimiter}; use rustc_ast::ptr::P; -use rustc_ast::token; +use rustc_ast::token::{self, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_errors::{struct_span_err, Applicability, PResult, StashKey}; @@ -24,8 +21,64 @@ use rustc_span::source_map::{self, Span}; use rustc_span::symbol::{kw, sym, Symbol}; use log::debug; +use std::convert::TryFrom; use std::mem; +impl<'a> Parser<'a> { + /// Parses a source module as a crate. This is the main entry point for the parser. + pub fn parse_crate_mod(&mut self) -> PResult<'a, ast::Crate> { + let lo = self.token.span; + let (module, attrs) = self.parse_mod(&token::Eof)?; + let span = lo.to(self.token.span); + let proc_macros = Vec::new(); // Filled in by `proc_macro_harness::inject()`. + Ok(ast::Crate { attrs, module, span, proc_macros }) + } + + /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. + fn parse_item_mod(&mut self, attrs: &mut Vec<Attribute>) -> PResult<'a, ItemInfo> { + let id = self.parse_ident()?; + let (module, mut inner_attrs) = if self.eat(&token::Semi) { + Default::default() + } else { + self.expect(&token::OpenDelim(token::Brace))?; + self.parse_mod(&token::CloseDelim(token::Brace))? + }; + attrs.append(&mut inner_attrs); + Ok((id, ItemKind::Mod(module))) + } + + /// Parses the contents of a module (inner attributes followed by module items). + pub fn parse_mod(&mut self, term: &TokenKind) -> PResult<'a, (Mod, Vec<Attribute>)> { + let lo = self.token.span; + let attrs = self.parse_inner_attributes()?; + let module = self.parse_mod_items(term, lo)?; + Ok((module, attrs)) + } + + /// Given a termination token, parses all of the items in a module. + fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { + let mut items = vec![]; + while let Some(item) = self.parse_item()? { + items.push(item); + self.maybe_consume_incorrect_semicolon(&items); + } + + if !self.eat(term) { + let token_str = super::token_descr(&self.token); + if !self.maybe_consume_incorrect_semicolon(&items) { + let msg = &format!("expected item, found {}", token_str); + let mut err = self.struct_span_err(self.token.span, msg); + err.span_label(self.token.span, "expected item"); + return Err(err); + } + } + + let hi = if self.token.span.is_dummy() { inner_lo } else { self.prev_token.span }; + + Ok(Mod { inner: inner_lo.to(hi), items, inline: true }) + } +} + pub(super) type ItemInfo = (Ident, ItemKind); impl<'a> Parser<'a> { @@ -217,9 +270,9 @@ impl<'a> Parser<'a> { } else if vis.node.is_pub() && self.isnt_macro_invocation() { self.recover_missing_kw_before_item()?; return Ok(None); - } else if macros_allowed && self.token.is_path_start() { + } else if macros_allowed && self.check_path() { // MACRO INVOCATION ITEM - (Ident::invalid(), ItemKind::Mac(self.parse_item_macro(vis)?)) + (Ident::invalid(), ItemKind::MacCall(self.parse_item_macro(vis)?)) } else { return Ok(None); }; @@ -261,7 +314,7 @@ impl<'a> Parser<'a> { " struct ".into(), Applicability::MaybeIncorrect, // speculative ); - return Err(err); + Err(err) } else if self.look_ahead(1, |t| *t == token::OpenDelim(token::Paren)) { let ident = self.parse_ident().unwrap(); self.bump(); // `(` @@ -309,7 +362,7 @@ impl<'a> Parser<'a> { ); } } - return Err(err); + Err(err) } else if self.look_ahead(1, |t| *t == token::Lt) { let ident = self.parse_ident().unwrap(); self.eat_to_tokens(&[&token::Gt]); @@ -331,28 +384,27 @@ impl<'a> Parser<'a> { Applicability::MachineApplicable, ); } - return Err(err); + Err(err) } else { Ok(()) } } /// Parses an item macro, e.g., `item!();`. - fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, Mac> { + fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> { let path = self.parse_path(PathStyle::Mod)?; // `foo::bar` self.expect(&token::Not)?; // `!` let args = self.parse_mac_args()?; // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`. self.eat_semi_for_macro_if_needed(&args); self.complain_if_pub_macro(vis, false); - Ok(Mac { path, args, prior_type_ascription: self.last_type_ascription }) + Ok(MacCall { path, args, prior_type_ascription: self.last_type_ascription }) } /// Recover if we parsed attributes and expected an item but there was none. fn recover_attrs_no_item(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> { let (start, end) = match attrs { [] => return Ok(()), - [x0] => (x0, x0), - [x0, .., xn] => (x0, xn), + [x0 @ xn] | [x0, .., xn] => (x0, xn), }; let msg = if end.is_doc_comment() { "expected item after doc comment" @@ -373,6 +425,16 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::Async) && self.is_keyword_ahead(1, &[kw::Fn]) } + fn parse_polarity(&mut self) -> ast::ImplPolarity { + // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. + if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) { + self.bump(); // `!` + ast::ImplPolarity::Negative(self.prev_token.span) + } else { + ast::ImplPolarity::Positive + } + } + /// Parses an implementation item. /// /// ``` @@ -396,7 +458,7 @@ impl<'a> Parser<'a> { self.expect_keyword(kw::Impl)?; // First, parse generic parameters if necessary. - let mut generics = if self.choose_generics_over_qpath() { + let mut generics = if self.choose_generics_over_qpath(0) { self.parse_generics()? } else { let mut generics = Generics::default(); @@ -411,13 +473,7 @@ impl<'a> Parser<'a> { self.sess.gated_spans.gate(sym::const_trait_impl, span); } - // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type. - let polarity = if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) { - self.bump(); // `!` - ast::ImplPolarity::Negative - } else { - ast::ImplPolarity::Positive - }; + let polarity = self.parse_polarity(); // Parse both types and traits as a type, then reinterpret if necessary. let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span)); @@ -573,7 +629,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As)) { self.bump(); // `default` - Defaultness::Default(self.normalized_prev_token.span) + Defaultness::Default(self.prev_token.uninterpolated_span()) } else { Defaultness::Final } @@ -650,16 +706,16 @@ impl<'a> Parser<'a> { /// Parses associated items. fn parse_assoc_item(&mut self, req_name: ReqName) -> PResult<'a, Option<Option<P<AssocItem>>>> { Ok(self.parse_item_(req_name)?.map(|Item { attrs, id, span, vis, ident, kind, tokens }| { - let kind = match kind { - ItemKind::Mac(a) => AssocItemKind::Macro(a), - ItemKind::Fn(a, b, c, d) => AssocItemKind::Fn(a, b, c, d), - ItemKind::TyAlias(a, b, c, d) => AssocItemKind::TyAlias(a, b, c, d), - ItemKind::Const(a, b, c) => AssocItemKind::Const(a, b, c), - ItemKind::Static(a, _, b) => { - self.struct_span_err(span, "associated `static` items are not allowed").emit(); - AssocItemKind::Const(Defaultness::Final, a, b) - } - _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"), + let kind = match AssocItemKind::try_from(kind) { + Ok(kind) => kind, + Err(kind) => match kind { + ItemKind::Static(a, _, b) => { + self.struct_span_err(span, "associated `static` items are not allowed") + .emit(); + AssocItemKind::Const(Defaultness::Final, a, b) + } + _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"), + }, }; Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) })) @@ -749,10 +805,10 @@ impl<'a> Parser<'a> { } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.normalized_token.kind { - token::Ident(name @ kw::Underscore, false) => { + match self.token.ident() { + Some((ident @ Ident { name: kw::Underscore, .. }, false)) => { self.bump(); - Ok(Ident::new(name, self.normalized_prev_token.span)) + Ok(ident) } _ => self.parse_ident(), } @@ -836,26 +892,27 @@ impl<'a> Parser<'a> { /// Parses a foreign item (one in an `extern { ... }` block). pub fn parse_foreign_item(&mut self) -> PResult<'a, Option<Option<P<ForeignItem>>>> { Ok(self.parse_item_(|_| true)?.map(|Item { attrs, id, span, vis, ident, kind, tokens }| { - let kind = match kind { - ItemKind::Mac(a) => ForeignItemKind::Macro(a), - ItemKind::Fn(a, b, c, d) => ForeignItemKind::Fn(a, b, c, d), - ItemKind::TyAlias(a, b, c, d) => ForeignItemKind::TyAlias(a, b, c, d), - ItemKind::Static(a, b, c) => ForeignItemKind::Static(a, b, c), - ItemKind::Const(_, a, b) => { - self.error_on_foreign_const(span, ident); - ForeignItemKind::Static(a, Mutability::Not, b) - } - _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"), + let kind = match ForeignItemKind::try_from(kind) { + Ok(kind) => kind, + Err(kind) => match kind { + ItemKind::Const(_, a, b) => { + self.error_on_foreign_const(span, ident); + ForeignItemKind::Static(a, Mutability::Not, b) + } + _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"), + }, }; Some(P(Item { attrs, id, span, vis, ident, kind, tokens })) })) } fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &str) -> Option<T> { - let span = self.sess.source_map().def_span(span); - let msg = format!("{} is not supported in {}", kind.descr(), ctx); - self.struct_span_err(span, &msg).emit(); - return None; + let span = self.sess.source_map().guess_head_span(span); + let descr = kind.descr(); + self.struct_span_err(span, &format!("{} is not supported in {}", descr, ctx)) + .help(&format!("consider moving the {} out to a nearby module scope", descr)) + .emit(); + None } fn error_on_foreign_const(&self, span: Span, ident: Ident) { @@ -1261,7 +1318,7 @@ impl<'a> Parser<'a> { }; self.sess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span)); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, legacy: false }))) + Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false }))) } /// Is this unambiguously the start of a `macro_rules! foo` item defnition? @@ -1271,7 +1328,7 @@ impl<'a> Parser<'a> { && self.look_ahead(2, |t| t.is_ident()) } - /// Parses a legacy `macro_rules! foo { ... }` declarative macro. + /// Parses a `macro_rules! foo { ... }` declarative macro. fn parse_item_macro_rules(&mut self, vis: &Visibility) -> PResult<'a, ItemInfo> { self.expect_keyword(kw::MacroRules)?; // `macro_rules` self.expect(&token::Not)?; // `!` @@ -1281,7 +1338,7 @@ impl<'a> Parser<'a> { self.eat_semi_for_macro_if_needed(&body); self.complain_if_pub_macro(vis, true); - Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, legacy: true }))) + Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: true }))) } /// Item macro invocations or `macro_rules!` definitions need inherited visibility. @@ -1411,30 +1468,35 @@ impl<'a> Parser<'a> { /// This can either be `;` when there's no body, /// or e.g. a block when the function is a provided one. fn parse_fn_body(&mut self, attrs: &mut Vec<Attribute>) -> PResult<'a, Option<P<Block>>> { - let (inner_attrs, body) = match self.token.kind { - token::Semi => { - self.bump(); - (Vec::new(), None) - } - token::OpenDelim(token::Brace) => { - let (attrs, body) = self.parse_inner_attrs_and_block()?; - (attrs, Some(body)) - } - token::Interpolated(ref nt) => match **nt { - token::NtBlock(..) => { - let (attrs, body) = self.parse_inner_attrs_and_block()?; - (attrs, Some(body)) - } - _ => return self.expected_semi_or_open_brace(), - }, - _ => return self.expected_semi_or_open_brace(), + let (inner_attrs, body) = if self.check(&token::Semi) { + self.bump(); // `;` + (Vec::new(), None) + } else if self.check(&token::OpenDelim(token::Brace)) || self.token.is_whole_block() { + self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))? + } else if self.token.kind == token::Eq { + // Recover `fn foo() = $expr;`. + self.bump(); // `=` + let eq_sp = self.prev_token.span; + let _ = self.parse_expr()?; + self.expect_semi()?; // `;` + let span = eq_sp.to(self.prev_token.span); + self.struct_span_err(span, "function body cannot be `= expression;`") + .multipart_suggestion( + "surround the expression with `{` and `}` instead of `=` and `;`", + vec![(eq_sp, "{".to_string()), (self.prev_token.span, " }".to_string())], + Applicability::MachineApplicable, + ) + .emit(); + (Vec::new(), Some(self.mk_block_err(span))) + } else { + return self.expected_semi_or_open_brace(); }; attrs.extend(inner_attrs); Ok(body) } /// Is the current token the start of an `FnHeader` / not a valid parse? - fn check_fn_front_matter(&mut self) -> bool { + pub(super) fn check_fn_front_matter(&mut self) -> bool { // We use an over-approximation here. // `const const`, `fn const` won't parse, but we're not stepping over other syntax either. const QUALS: [Symbol; 4] = [kw::Const, kw::Async, kw::Unsafe, kw::Extern]; @@ -1449,7 +1511,7 @@ impl<'a> Parser<'a> { }) // `extern ABI fn` || self.check_keyword(kw::Extern) - && self.look_ahead(1, |t| t.can_begin_literal_or_bool()) + && self.look_ahead(1, |t| t.can_begin_literal_maybe_minus()) && self.look_ahead(2, |t| t.is_keyword(kw::Fn)) } @@ -1461,7 +1523,7 @@ impl<'a> Parser<'a> { /// FnQual = "const"? "async"? "unsafe"? Extern? ; /// FnFrontMatter = FnQual? "fn" ; /// ``` - fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> { + pub(super) fn parse_fn_front_matter(&mut self) -> PResult<'a, FnHeader> { let constness = self.parse_constness(); let asyncness = self.parse_asyncness(); let unsafety = self.parse_unsafety(); @@ -1544,7 +1606,7 @@ impl<'a> Parser<'a> { let is_name_required = match self.token.kind { token::DotDotDot => false, - _ => req_name(self.normalized_token.span.edition()), + _ => req_name(self.token.span.edition()), }; let (pat, ty) = if is_name_required || self.is_named_param() { debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required); @@ -1609,15 +1671,12 @@ impl<'a> Parser<'a> { /// Returns the parsed optional self parameter and whether a self shortcut was used. fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> { // Extract an identifier *after* having confirmed that the token is one. - let expect_self_ident = |this: &mut Self| { - match this.normalized_token.kind { - // Preserve hygienic context. - token::Ident(name, _) => { - this.bump(); - Ident::new(name, this.normalized_prev_token.span) - } - _ => unreachable!(), + let expect_self_ident = |this: &mut Self| match this.token.ident() { + Some((ident, false)) => { + this.bump(); + ident } + _ => unreachable!(), }; // Is `self` `n` tokens ahead? let is_isolated_self = |this: &Self, n| { @@ -1651,7 +1710,7 @@ impl<'a> Parser<'a> { // Only a limited set of initial token sequences is considered `self` parameters; anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.token.span; - let (eself, eself_ident, eself_hi) = match self.normalized_token.kind { + let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind { token::BinOp(token::And) => { let eself = if is_isolated_self(self, 1) { // `&self` diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index 74101fef8e3..b987813e38d 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -1,8 +1,6 @@ pub mod attr; mod expr; mod item; -mod module; -pub use module::{ModulePath, ModulePathSuccess}; mod pat; mod path; mod ty; @@ -13,7 +11,6 @@ mod stmt; use diagnostics::Error; use crate::lexer::UnmatchedBrace; -use crate::{Directory, DirectoryOwnership}; use log::debug; use rustc_ast::ast::DUMMY_NODE_ID; @@ -28,11 +25,9 @@ use rustc_ast::util::comments::{doc_comment_style, strip_doc_comment_decoration} use rustc_ast_pretty::pprust; use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError, PResult}; use rustc_session::parse::ParseSess; -use rustc_span::source_map::respan; +use rustc_span::source_map::{respan, Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Symbol}; -use rustc_span::{FileName, Span, DUMMY_SP}; -use std::path::PathBuf; use std::{cmp, mem, slice}; bitflags::bitflags! { @@ -88,37 +83,14 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current non-normalized token. + /// The current token. pub token: Token, - /// The current normalized token. - /// "Normalized" means that some interpolated tokens - /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced - /// with non-interpolated identifier and lifetime tokens they refer to. - /// Use this if you need to check for `token::Ident` or `token::Lifetime` specifically, - /// this also includes edition checks for edition-specific keyword identifiers. - pub normalized_token: Token, - /// The previous non-normalized token. + /// The previous token. pub prev_token: Token, - /// The previous normalized token. - /// Use this if you need to check for `token::Ident` or `token::Lifetime` specifically, - /// this also includes edition checks for edition-specific keyword identifiers. - pub normalized_prev_token: Token, restrictions: Restrictions, - /// Used to determine the path to externally loaded source files. - pub(super) directory: Directory, - /// `true` to parse sub-modules in other files. - // Public for rustfmt usage. - pub recurse_into_file_modules: bool, - /// Name of the root module this parser originated from. If `None`, then the - /// name is not known. This does not change while the parser is descending - /// into modules, and sub-parsers have new values for this name. - pub root_module_name: Option<String>, expected_tokens: Vec<TokenType>, token_cursor: TokenCursor, desugar_doc_comments: bool, - /// `true` we should configure out of line modules as we parse. - // Public for rustfmt usage. - pub cfg_mods: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is /// required in order to detect extra leading left angle brackets (`<` characters) and error /// appropriately. @@ -366,31 +338,20 @@ impl<'a> Parser<'a> { pub fn new( sess: &'a ParseSess, tokens: TokenStream, - directory: Option<Directory>, - recurse_into_file_modules: bool, desugar_doc_comments: bool, subparser_name: Option<&'static str>, ) -> Self { let mut parser = Parser { sess, token: Token::dummy(), - normalized_token: Token::dummy(), prev_token: Token::dummy(), - normalized_prev_token: Token::dummy(), restrictions: Restrictions::empty(), - recurse_into_file_modules, - directory: Directory { - path: PathBuf::new(), - ownership: DirectoryOwnership::Owned { relative: None }, - }, - root_module_name: None, expected_tokens: Vec::new(), token_cursor: TokenCursor { frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens), stack: Vec::new(), }, desugar_doc_comments, - cfg_mods: true, unmatched_angle_bracket_count: 0, max_angle_bracket_count: 0, unclosed_delims: Vec::new(), @@ -402,18 +363,6 @@ impl<'a> Parser<'a> { // Make parser point to the first token. parser.bump(); - if let Some(directory) = directory { - parser.directory = directory; - } else if !parser.token.span.is_dummy() { - if let Some(FileName::Real(path)) = - &sess.source_map().lookup_char_pos(parser.token.span.lo()).file.unmapped_path - { - if let Some(directory_path) = path.parent() { - parser.directory.path = directory_path.to_path_buf(); - } - } - } - parser } @@ -480,9 +429,9 @@ impl<'a> Parser<'a> { } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.normalized_token.kind { - token::Ident(name, _) => { - if self.token.is_reserved_ident() { + match self.token.ident() { + Some((ident, is_raw)) => { + if !is_raw && ident.is_reserved() { let mut err = self.expected_ident_found(); if recover { err.emit(); @@ -491,7 +440,7 @@ impl<'a> Parser<'a> { } } self.bump(); - Ok(Ident::new(name, self.normalized_prev_token.span)) + Ok(ident) } _ => Err(match self.prev_token.kind { TokenKind::DocComment(..) => { @@ -609,7 +558,7 @@ impl<'a> Parser<'a> { Some((first, second)) if first == expected => { let first_span = self.sess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); - self.set_token(Token::new(first, first_span)); + self.token = Token::new(first, first_span); self.bump_with(Token::new(second, second_span)); true } @@ -817,23 +766,6 @@ impl<'a> Parser<'a> { self.parse_delim_comma_seq(token::Paren, f) } - // Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`) - // tokens are replaced with usual identifier and lifetime tokens, - // so the former are never encountered during normal parsing. - crate fn set_token(&mut self, token: Token) { - self.token = token; - self.normalized_token = match &self.token.kind { - token::Interpolated(nt) => match **nt { - token::NtIdent(ident, is_raw) => { - Token::new(token::Ident(ident.name, is_raw), ident.span) - } - token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), - _ => self.token.clone(), - }, - _ => self.token.clone(), - } - } - /// Advance the parser by one token using provided token as the next one. fn bump_with(&mut self, next_token: Token) { // Bumping after EOF is a bad sign, usually an infinite loop. @@ -843,9 +775,7 @@ impl<'a> Parser<'a> { } // Update the current and previous tokens. - self.prev_token = self.token.take(); - self.normalized_prev_token = self.normalized_token.take(); - self.set_token(next_token); + self.prev_token = mem::replace(&mut self.token, next_token); // Diagnostics. self.expected_tokens.clear(); @@ -884,7 +814,7 @@ impl<'a> Parser<'a> { /// Parses asyncness: `async` or nothing. fn parse_asyncness(&mut self) -> Async { if self.eat_keyword(kw::Async) { - let span = self.normalized_prev_token.span; + let span = self.prev_token.uninterpolated_span(); Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID } } else { Async::No @@ -894,7 +824,7 @@ impl<'a> Parser<'a> { /// Parses unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self) -> Unsafe { if self.eat_keyword(kw::Unsafe) { - Unsafe::Yes(self.normalized_prev_token.span) + Unsafe::Yes(self.prev_token.uninterpolated_span()) } else { Unsafe::No } @@ -903,7 +833,7 @@ impl<'a> Parser<'a> { /// Parses constness: `const` or nothing. fn parse_constness(&mut self) -> Const { if self.eat_keyword(kw::Const) { - Const::Yes(self.normalized_prev_token.span) + Const::Yes(self.prev_token.uninterpolated_span()) } else { Const::No } @@ -1005,7 +935,7 @@ impl<'a> Parser<'a> { &mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap(), ); - self.set_token(Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close)); + self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close); self.bump(); TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream) } diff --git a/src/librustc_parse/parser/module.rs b/src/librustc_parse/parser/module.rs deleted file mode 100644 index b436f1969bb..00000000000 --- a/src/librustc_parse/parser/module.rs +++ /dev/null @@ -1,306 +0,0 @@ -use super::diagnostics::Error; -use super::item::ItemInfo; -use super::Parser; - -use crate::{new_sub_parser_from_file, DirectoryOwnership}; - -use rustc_ast::ast::{self, Attribute, Crate, Ident, ItemKind, Mod}; -use rustc_ast::attr; -use rustc_ast::token::{self, TokenKind}; -use rustc_errors::PResult; -use rustc_span::source_map::{FileName, SourceMap, Span, DUMMY_SP}; -use rustc_span::symbol::sym; - -use std::path::{self, Path, PathBuf}; - -/// Information about the path to a module. -// Public for rustfmt usage. -pub struct ModulePath { - name: String, - path_exists: bool, - pub result: Result<ModulePathSuccess, Error>, -} - -// Public for rustfmt usage. -pub struct ModulePathSuccess { - pub path: PathBuf, - pub directory_ownership: DirectoryOwnership, -} - -impl<'a> Parser<'a> { - /// Parses a source module as a crate. This is the main entry point for the parser. - pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { - let lo = self.token.span; - let krate = Ok(ast::Crate { - attrs: self.parse_inner_attributes()?, - module: self.parse_mod_items(&token::Eof, lo)?, - span: lo.to(self.token.span), - // Filled in by proc_macro_harness::inject() - proc_macros: Vec::new(), - }); - krate - } - - /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. - pub(super) fn parse_item_mod(&mut self, attrs: &mut Vec<Attribute>) -> PResult<'a, ItemInfo> { - let in_cfg = crate::config::process_configure_mod(self.sess, self.cfg_mods, attrs); - - let id_span = self.token.span; - let id = self.parse_ident()?; - let (module, mut inner_attrs) = if self.eat(&token::Semi) { - if in_cfg && self.recurse_into_file_modules { - // This mod is in an external file. Let's go get it! - let ModulePathSuccess { path, directory_ownership } = - self.submod_path(id, &attrs, id_span)?; - self.eval_src_mod(path, directory_ownership, id.to_string(), id_span)? - } else { - (ast::Mod { inner: DUMMY_SP, items: Vec::new(), inline: false }, Vec::new()) - } - } else { - let old_directory = self.directory.clone(); - self.push_directory(id, &attrs); - - self.expect(&token::OpenDelim(token::Brace))?; - let mod_inner_lo = self.token.span; - let inner_attrs = self.parse_inner_attributes()?; - let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; - - self.directory = old_directory; - (module, inner_attrs) - }; - attrs.append(&mut inner_attrs); - Ok((id, ItemKind::Mod(module))) - } - - /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { - let mut items = vec![]; - while let Some(item) = self.parse_item()? { - items.push(item); - self.maybe_consume_incorrect_semicolon(&items); - } - - if !self.eat(term) { - let token_str = super::token_descr(&self.token); - if !self.maybe_consume_incorrect_semicolon(&items) { - let msg = &format!("expected item, found {}", token_str); - let mut err = self.struct_span_err(self.token.span, msg); - err.span_label(self.token.span, "expected item"); - return Err(err); - } - } - - let hi = if self.token.span.is_dummy() { inner_lo } else { self.prev_token.span }; - - Ok(Mod { inner: inner_lo.to(hi), items, inline: true }) - } - - fn submod_path( - &mut self, - id: ast::Ident, - outer_attrs: &[Attribute], - id_sp: Span, - ) -> PResult<'a, ModulePathSuccess> { - if let Some(path) = Parser::submod_path_from_attr(outer_attrs, &self.directory.path) { - return Ok(ModulePathSuccess { - directory_ownership: match path.file_name().and_then(|s| s.to_str()) { - // All `#[path]` files are treated as though they are a `mod.rs` file. - // This means that `mod foo;` declarations inside `#[path]`-included - // files are siblings, - // - // Note that this will produce weirdness when a file named `foo.rs` is - // `#[path]` included and contains a `mod foo;` declaration. - // If you encounter this, it's your own darn fault :P - Some(_) => DirectoryOwnership::Owned { relative: None }, - _ => DirectoryOwnership::UnownedViaMod, - }, - path, - }); - } - - let relative = match self.directory.ownership { - DirectoryOwnership::Owned { relative } => relative, - DirectoryOwnership::UnownedViaBlock | DirectoryOwnership::UnownedViaMod => None, - }; - let paths = - Parser::default_submod_path(id, relative, &self.directory.path, self.sess.source_map()); - - match self.directory.ownership { - DirectoryOwnership::Owned { .. } => { - paths.result.map_err(|err| self.span_fatal_err(id_sp, err)) - } - DirectoryOwnership::UnownedViaBlock => { - let msg = "Cannot declare a non-inline module inside a block \ - unless it has a path attribute"; - let mut err = self.struct_span_err(id_sp, msg); - if paths.path_exists { - let msg = format!( - "Maybe `use` the module `{}` instead of redeclaring it", - paths.name - ); - err.span_note(id_sp, &msg); - } - Err(err) - } - DirectoryOwnership::UnownedViaMod => { - let mut err = - self.struct_span_err(id_sp, "cannot declare a new module at this location"); - if !id_sp.is_dummy() { - let src_path = self.sess.source_map().span_to_filename(id_sp); - if let FileName::Real(src_path) = src_path { - if let Some(stem) = src_path.file_stem() { - let mut dest_path = src_path.clone(); - dest_path.set_file_name(stem); - dest_path.push("mod.rs"); - err.span_note( - id_sp, - &format!( - "maybe move this module `{}` to its own \ - directory via `{}`", - src_path.display(), - dest_path.display() - ), - ); - } - } - } - if paths.path_exists { - err.span_note( - id_sp, - &format!( - "... or maybe `use` the module `{}` instead \ - of possibly redeclaring it", - paths.name - ), - ); - } - Err(err) - } - } - } - - // Public for rustfmt usage. - pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { - if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) { - let s = s.as_str(); - - // On windows, the base path might have the form - // `\\?\foo\bar` in which case it does not tolerate - // mixed `/` and `\` separators, so canonicalize - // `/` to `\`. - #[cfg(windows)] - let s = s.replace("/", "\\"); - Some(dir_path.join(&*s)) - } else { - None - } - } - - /// Returns a path to a module. - // Public for rustfmt usage. - pub fn default_submod_path( - id: ast::Ident, - relative: Option<ast::Ident>, - dir_path: &Path, - source_map: &SourceMap, - ) -> ModulePath { - // If we're in a foo.rs file instead of a mod.rs file, - // we need to look for submodules in - // `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than - // `./<id>.rs` and `./<id>/mod.rs`. - let relative_prefix_string; - let relative_prefix = if let Some(ident) = relative { - relative_prefix_string = format!("{}{}", ident.name, path::MAIN_SEPARATOR); - &relative_prefix_string - } else { - "" - }; - - let mod_name = id.name.to_string(); - let default_path_str = format!("{}{}.rs", relative_prefix, mod_name); - let secondary_path_str = - format!("{}{}{}mod.rs", relative_prefix, mod_name, path::MAIN_SEPARATOR); - let default_path = dir_path.join(&default_path_str); - let secondary_path = dir_path.join(&secondary_path_str); - let default_exists = source_map.file_exists(&default_path); - let secondary_exists = source_map.file_exists(&secondary_path); - - let result = match (default_exists, secondary_exists) { - (true, false) => Ok(ModulePathSuccess { - path: default_path, - directory_ownership: DirectoryOwnership::Owned { relative: Some(id) }, - }), - (false, true) => Ok(ModulePathSuccess { - path: secondary_path, - directory_ownership: DirectoryOwnership::Owned { relative: None }, - }), - (false, false) => Err(Error::FileNotFoundForModule { - mod_name: mod_name.clone(), - default_path: default_path_str, - secondary_path: secondary_path_str, - dir_path: dir_path.display().to_string(), - }), - (true, true) => Err(Error::DuplicatePaths { - mod_name: mod_name.clone(), - default_path: default_path_str, - secondary_path: secondary_path_str, - }), - }; - - ModulePath { name: mod_name, path_exists: default_exists || secondary_exists, result } - } - - /// Reads a module from a source file. - fn eval_src_mod( - &mut self, - path: PathBuf, - directory_ownership: DirectoryOwnership, - name: String, - id_sp: Span, - ) -> PResult<'a, (Mod, Vec<Attribute>)> { - let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut(); - if let Some(i) = included_mod_stack.iter().position(|p| *p == path) { - let mut err = String::from("circular modules: "); - let len = included_mod_stack.len(); - for p in &included_mod_stack[i..len] { - err.push_str(&p.to_string_lossy()); - err.push_str(" -> "); - } - err.push_str(&path.to_string_lossy()); - return Err(self.struct_span_err(id_sp, &err[..])); - } - included_mod_stack.push(path.clone()); - drop(included_mod_stack); - - let mut p0 = - new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); - p0.cfg_mods = self.cfg_mods; - let mod_inner_lo = p0.token.span; - let mod_attrs = p0.parse_inner_attributes()?; - let mut m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; - m0.inline = false; - self.sess.included_mod_stack.borrow_mut().pop(); - Ok((m0, mod_attrs)) - } - - fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { - if let Some(path) = attr::first_attr_value_str_by_name(attrs, sym::path) { - self.directory.path.push(&*path.as_str()); - self.directory.ownership = DirectoryOwnership::Owned { relative: None }; - } else { - // We have to push on the current module name in the case of relative - // paths in order to ensure that any additional module paths from inline - // `mod x { ... }` come after the relative extension. - // - // For example, a `mod z { ... }` inside `x/y.rs` should set the current - // directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`. - if let DirectoryOwnership::Owned { relative } = &mut self.directory.ownership { - if let Some(ident) = relative.take() { - // remove the relative offset - self.directory.path.push(&*ident.as_str()); - } - } - self.directory.path.push(&*id.as_str()); - } - } -} diff --git a/src/librustc_parse/parser/pat.rs b/src/librustc_parse/parser/pat.rs index 45d1aacdd3c..983aa43916f 100644 --- a/src/librustc_parse/parser/pat.rs +++ b/src/librustc_parse/parser/pat.rs @@ -1,9 +1,7 @@ use super::{Parser, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; -use rustc_ast::ast::{ - self, AttrVec, Attribute, FieldPat, Mac, Pat, PatKind, RangeEnd, RangeSyntax, -}; -use rustc_ast::ast::{BindingMode, Expr, ExprKind, Ident, Mutability, Path, QSelf}; +use rustc_ast::ast::{self, AttrVec, Attribute, FieldPat, MacCall, Pat, PatKind, RangeEnd}; +use rustc_ast::ast::{BindingMode, Expr, ExprKind, Ident, Mutability, Path, QSelf, RangeSyntax}; use rustc_ast::mut_visit::{noop_visit_mac, noop_visit_pat, MutVisitor}; use rustc_ast::ptr::P; use rustc_ast::token; @@ -151,7 +149,7 @@ impl<'a> Parser<'a> { /// Note that there are more tokens such as `@` for which we know that the `|` /// is an illegal parse. However, the user's intent is less clear in that case. fn recover_trailing_vert(&mut self, lo: Option<Span>) -> bool { - let is_end_ahead = self.look_ahead(1, |token| match &token.kind { + let is_end_ahead = self.look_ahead(1, |token| match &token.uninterpolate().kind { token::FatArrow // e.g. `a | => 0,`. | token::Ident(kw::If, false) // e.g. `a | if expr`. | token::Eq // e.g. `let a | = 0`. @@ -297,6 +295,8 @@ impl<'a> Parser<'a> { // A rest pattern `..`. self.bump(); // `..` PatKind::Rest + } else if self.check(&token::DotDotDot) && !self.is_pat_range_end_start(1) { + self.recover_dotdotdot_rest_pat(lo) } else if let Some(form) = self.parse_range_end() { self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`. } else if self.eat_keyword(kw::Underscore) { @@ -364,6 +364,25 @@ impl<'a> Parser<'a> { Ok(pat) } + /// Recover from a typoed `...` pattern that was encountered + /// Ref: Issue #70388 + fn recover_dotdotdot_rest_pat(&mut self, lo: Span) -> PatKind { + // A typoed rest pattern `...`. + self.bump(); // `...` + + // The user probably mistook `...` for a rest pattern `..`. + self.struct_span_err(lo, "unexpected `...`") + .span_label(lo, "not a valid pattern") + .span_suggestion_short( + lo, + "for a rest pattern, use `..` instead of `...`", + "..".to_owned(), + Applicability::MachineApplicable, + ) + .emit(); + PatKind::Rest + } + /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`. /// /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs` @@ -479,7 +498,7 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { - PatKind::Paren(fields.into_iter().nth(0).unwrap()) + PatKind::Paren(fields.into_iter().next().unwrap()) } else { PatKind::Tuple(fields) }) @@ -540,7 +559,7 @@ impl<'a> Parser<'a> { fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool { struct AddMut(bool); impl MutVisitor for AddMut { - fn visit_mac(&mut self, mac: &mut Mac) { + fn visit_mac(&mut self, mac: &mut MacCall) { noop_visit_mac(mac, self); } @@ -597,8 +616,8 @@ impl<'a> Parser<'a> { fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> { self.bump(); let args = self.parse_mac_args()?; - let mac = Mac { path, args, prior_type_ascription: self.last_type_ascription }; - Ok(PatKind::Mac(mac)) + let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription }; + Ok(PatKind::MacCall(mac)) } fn fatal_unexpected_non_pat( @@ -698,13 +717,13 @@ impl<'a> Parser<'a> { self.look_ahead(dist, |t| { t.is_path_start() // e.g. `MY_CONST`; || t.kind == token::Dot // e.g. `.5` for recovery; - || t.can_begin_literal_or_bool() // e.g. `42`. + || t.can_begin_literal_maybe_minus() // e.g. `42`. || t.is_whole_expr() }) } fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> { - if self.token.is_path_start() { + if self.check_path() { let lo = self.token.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path @@ -920,7 +939,7 @@ impl<'a> Parser<'a> { } err.emit(); } - return Ok((fields, etc)); + Ok((fields, etc)) } /// Recover on `...` as if it were `..` to avoid further errors. diff --git a/src/librustc_parse/parser/path.rs b/src/librustc_parse/parser/path.rs index 355b6429a74..9fa7bc027b8 100644 --- a/src/librustc_parse/parser/path.rs +++ b/src/librustc_parse/parser/path.rs @@ -1,12 +1,10 @@ use super::ty::{AllowPlus, RecoverQPath}; use super::{Parser, TokenType}; use crate::maybe_whole; -use rustc_ast::ast::{ - self, AngleBracketedArgs, Ident, ParenthesizedArgs, Path, PathSegment, QSelf, -}; -use rustc_ast::ast::{ - AnonConst, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode, GenericArg, -}; +use rustc_ast::ast::{self, AngleBracketedArg, AngleBracketedArgs, GenericArg, ParenthesizedArgs}; +use rustc_ast::ast::{AnonConst, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode}; +use rustc_ast::ast::{Ident, Path, PathSegment, QSelf}; +use rustc_ast::ptr::P; use rustc_ast::token::{self, Token}; use rustc_errors::{pluralize, Applicability, PResult}; use rustc_span::source_map::{BytePos, Span}; @@ -218,11 +216,11 @@ impl<'a> Parser<'a> { let lo = self.token.span; let args = if self.eat_lt() { // `<'a, T, A = U>` - let (args, constraints) = - self.parse_generic_args_with_leading_angle_bracket_recovery(style, lo)?; + let args = + self.parse_angle_args_with_leading_angle_bracket_recovery(style, lo)?; self.expect_gt()?; let span = lo.to(self.prev_token.span); - AngleBracketedArgs { args, constraints, span }.into() + AngleBracketedArgs { args, span }.into() } else { // `(T, U) -> R` let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?; @@ -240,10 +238,10 @@ impl<'a> Parser<'a> { } pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { - match self.normalized_token.kind { - token::Ident(name, _) if name.is_path_segment_keyword() => { + match self.token.ident() { + Some((ident, false)) if ident.is_path_segment_keyword() => { self.bump(); - Ok(Ident::new(name, self.normalized_prev_token.span)) + Ok(ident) } _ => self.parse_ident(), } @@ -251,18 +249,18 @@ impl<'a> Parser<'a> { /// Parses generic args (within a path segment) with recovery for extra leading angle brackets. /// For the purposes of understanding the parsing logic of generic arguments, this function - /// can be thought of being the same as just calling `self.parse_generic_args()` if the source + /// can be thought of being the same as just calling `self.parse_angle_args()` if the source /// had the correct amount of leading angle brackets. /// /// ```ignore (diagnostics) /// bar::<<<<T as Foo>::Output>(); /// ^^ help: remove extra angle brackets /// ``` - fn parse_generic_args_with_leading_angle_bracket_recovery( + fn parse_angle_args_with_leading_angle_bracket_recovery( &mut self, style: PathStyle, lo: Span, - ) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> { + ) -> PResult<'a, Vec<AngleBracketedArg>> { // We need to detect whether there are extra leading left angle brackets and produce an // appropriate error and suggestion. This cannot be implemented by looking ahead at // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens @@ -337,8 +335,8 @@ impl<'a> Parser<'a> { let snapshot = if is_first_invocation { Some(self.clone()) } else { None }; debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)"); - match self.parse_generic_args() { - Ok(value) => Ok(value), + match self.parse_angle_args() { + Ok(args) => Ok(args), Err(ref mut e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => { // Cancel error from being unable to find `>`. We know the error // must have been this due to a non-zero unmatched angle bracket @@ -381,110 +379,136 @@ impl<'a> Parser<'a> { .emit(); // Try again without unmatched angle bracket characters. - self.parse_generic_args() + self.parse_angle_args() } Err(e) => Err(e), } } - /// Parses (possibly empty) list of lifetime and type arguments and associated type bindings, + /// Parses (possibly empty) list of generic arguments / associated item constraints, /// possibly including trailing comma. - fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<AssocTyConstraint>)> { + fn parse_angle_args(&mut self) -> PResult<'a, Vec<AngleBracketedArg>> { let mut args = Vec::new(); - let mut constraints = Vec::new(); - let mut misplaced_assoc_ty_constraints: Vec<Span> = Vec::new(); - let mut assoc_ty_constraints: Vec<Span> = Vec::new(); - - let args_lo = self.token.span; - - loop { - if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { - // Parse lifetime argument. - args.push(GenericArg::Lifetime(self.expect_lifetime())); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else if self.check_ident() - && self.look_ahead(1, |t| t == &token::Eq || t == &token::Colon) - { - // Parse associated type constraint. - let lo = self.token.span; - let ident = self.parse_ident()?; - let kind = if self.eat(&token::Eq) { - AssocTyConstraintKind::Equality { ty: self.parse_ty()? } - } else if self.eat(&token::Colon) { - AssocTyConstraintKind::Bound { - bounds: self.parse_generic_bounds(Some(self.prev_token.span))?, - } - } else { - unreachable!(); - }; + while let Some(arg) = self.parse_angle_arg()? { + args.push(arg); + if !self.eat(&token::Comma) { + break; + } + } + Ok(args) + } - let span = lo.to(self.prev_token.span); + /// Parses a single argument in the angle arguments `<...>` of a path segment. + fn parse_angle_arg(&mut self) -> PResult<'a, Option<AngleBracketedArg>> { + if self.check_ident() && self.look_ahead(1, |t| matches!(t.kind, token::Eq | token::Colon)) + { + // Parse associated type constraint. + let lo = self.token.span; + let ident = self.parse_ident()?; + let kind = if self.eat(&token::Eq) { + let ty = self.parse_assoc_equality_term(ident, self.prev_token.span)?; + AssocTyConstraintKind::Equality { ty } + } else if self.eat(&token::Colon) { + let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?; + AssocTyConstraintKind::Bound { bounds } + } else { + unreachable!(); + }; - // Gate associated type bounds, e.g., `Iterator<Item: Ord>`. - if let AssocTyConstraintKind::Bound { .. } = kind { - self.sess.gated_spans.gate(sym::associated_type_bounds, span); - } + let span = lo.to(self.prev_token.span); - constraints.push(AssocTyConstraint { id: ast::DUMMY_NODE_ID, ident, kind, span }); - assoc_ty_constraints.push(span); - } else if self.check_const_arg() { - // Parse const argument. - let expr = if let token::OpenDelim(token::Brace) = self.token.kind { - self.parse_block_expr( - None, - self.token.span, - BlockCheckMode::Default, - ast::AttrVec::new(), - )? - } else if self.token.is_ident() { - // FIXME(const_generics): to distinguish between idents for types and consts, - // we should introduce a GenericArg::Ident in the AST and distinguish when - // lowering to the HIR. For now, idents for const args are not permitted. - if self.token.is_bool_lit() { - self.parse_literal_maybe_minus()? - } else { - let span = self.token.span; - let msg = "identifiers may currently not be used for const generics"; - self.struct_span_err(span, msg).emit(); - let block = self.mk_block_err(span); - self.mk_expr(span, ast::ExprKind::Block(block, None), ast::AttrVec::new()) - } - } else { - self.parse_literal_maybe_minus()? - }; - let value = AnonConst { id: ast::DUMMY_NODE_ID, value: expr }; - args.push(GenericArg::Const(value)); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else if self.check_type() { - // Parse type argument. - args.push(GenericArg::Type(self.parse_ty()?)); - misplaced_assoc_ty_constraints.append(&mut assoc_ty_constraints); - } else { - break; + // Gate associated type bounds, e.g., `Iterator<Item: Ord>`. + if let AssocTyConstraintKind::Bound { .. } = kind { + self.sess.gated_spans.gate(sym::associated_type_bounds, span); } - if !self.eat(&token::Comma) { - break; - } + let constraint = AssocTyConstraint { id: ast::DUMMY_NODE_ID, ident, kind, span }; + Ok(Some(AngleBracketedArg::Constraint(constraint))) + } else { + Ok(self.parse_generic_arg()?.map(AngleBracketedArg::Arg)) } + } - // FIXME: we would like to report this in ast_validation instead, but we currently do not - // preserve ordering of generic parameters with respect to associated type binding, so we - // lose that information after parsing. - if !misplaced_assoc_ty_constraints.is_empty() { - let mut err = self.struct_span_err( - args_lo.to(self.prev_token.span), - "associated type bindings must be declared after generic parameters", - ); - for span in misplaced_assoc_ty_constraints { - err.span_label( - span, - "this associated type binding should be moved after the generic parameters", - ); + /// Parse the term to the right of an associated item equality constraint. + /// That is, parse `<term>` in `Item = <term>`. + /// Right now, this only admits types in `<term>`. + fn parse_assoc_equality_term(&mut self, ident: Ident, eq: Span) -> PResult<'a, P<ast::Ty>> { + let arg = self.parse_generic_arg()?; + let span = ident.span.to(self.prev_token.span); + match arg { + Some(GenericArg::Type(ty)) => return Ok(ty), + Some(GenericArg::Const(expr)) => { + self.struct_span_err(span, "cannot constrain an associated constant to a value") + .span_label(ident.span, "this associated constant...") + .span_label(expr.value.span, "...cannot be constrained to this value") + .emit(); + } + Some(GenericArg::Lifetime(lt)) => { + self.struct_span_err(span, "associated lifetimes are not supported") + .span_label(lt.ident.span, "the lifetime is given here") + .help("if you meant to specify a trait object, write `dyn Trait + 'lifetime`") + .emit(); + } + None => { + let after_eq = eq.shrink_to_hi(); + let before_next = self.token.span.shrink_to_lo(); + self.struct_span_err(after_eq.to(before_next), "missing type to the right of `=`") + .span_suggestion( + self.sess.source_map().next_point(eq).to(before_next), + "to constrain the associated type, add a type after `=`", + " TheType".to_string(), + Applicability::HasPlaceholders, + ) + .span_suggestion( + eq.to(before_next), + &format!("remove the `=` if `{}` is a type", ident), + String::new(), + Applicability::MaybeIncorrect, + ) + .emit(); } - err.emit(); } + Ok(self.mk_ty(span, ast::TyKind::Err)) + } - Ok((args, constraints)) + /// Parse a generic argument in a path segment. + /// This does not include constraints, e.g., `Item = u8`, which is handled in `parse_angle_arg`. + fn parse_generic_arg(&mut self) -> PResult<'a, Option<GenericArg>> { + let arg = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) { + // Parse lifetime argument. + GenericArg::Lifetime(self.expect_lifetime()) + } else if self.check_const_arg() { + // Parse const argument. + let expr = if let token::OpenDelim(token::Brace) = self.token.kind { + self.parse_block_expr( + None, + self.token.span, + BlockCheckMode::Default, + ast::AttrVec::new(), + )? + } else if self.token.is_ident() { + // FIXME(const_generics): to distinguish between idents for types and consts, + // we should introduce a GenericArg::Ident in the AST and distinguish when + // lowering to the HIR. For now, idents for const args are not permitted. + if self.token.is_bool_lit() { + self.parse_literal_maybe_minus()? + } else { + let span = self.token.span; + let msg = "identifiers may currently not be used for const generics"; + self.struct_span_err(span, msg).emit(); + let block = self.mk_block_err(span); + self.mk_expr(span, ast::ExprKind::Block(block, None), ast::AttrVec::new()) + } + } else { + self.parse_literal_maybe_minus()? + }; + GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value: expr }) + } else if self.check_type() { + // Parse type argument. + GenericArg::Type(self.parse_ty()?) + } else { + return Ok(None); + }; + Ok(Some(arg)) } } diff --git a/src/librustc_parse/parser/stmt.rs b/src/librustc_parse/parser/stmt.rs index 3864ec3aaa1..b3764d2d47b 100644 --- a/src/librustc_parse/parser/stmt.rs +++ b/src/librustc_parse/parser/stmt.rs @@ -1,13 +1,13 @@ +use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN; use super::diagnostics::Error; use super::expr::LhsExpr; use super::pat::GateOr; use super::path::PathStyle; use super::{BlockMode, Parser, Restrictions, SemiColonMode}; use crate::maybe_whole; -use crate::DirectoryOwnership; use rustc_ast::ast; -use rustc_ast::ast::{AttrStyle, AttrVec, Attribute, Mac, MacStmtStyle}; +use rustc_ast::ast::{AttrStyle, AttrVec, Attribute, MacCall, MacStmtStyle}; use rustc_ast::ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID}; use rustc_ast::ptr::P; use rustc_ast::token::{self, TokenKind}; @@ -47,16 +47,13 @@ impl<'a> Parser<'a> { self.bump(); // `var` let msg = "write `let` instead of `var` to introduce a new variable"; self.recover_stmt_local(lo, attrs.into(), msg, "let")? - } else if self.token.is_path_start() - && !self.token.is_qpath_start() - && !self.is_path_start_item() - { + } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() { // We have avoided contextual keywords like `union`, items with `crate` visibility, // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something // that starts like a path (1 token), but it fact not a path. // Also, we avoid stealing syntax from `parse_item_`. self.parse_stmt_path_start(lo, attrs)? - } else if let Some(item) = self.parse_stmt_item(attrs.clone())? { + } else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? { // FIXME: Bad copy of attrs self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) } else if self.eat(&token::Semi) { @@ -74,13 +71,6 @@ impl<'a> Parser<'a> { Ok(Some(stmt)) } - fn parse_stmt_item(&mut self, attrs: Vec<Attribute>) -> PResult<'a, Option<ast::Item>> { - let old = mem::replace(&mut self.directory.ownership, DirectoryOwnership::UnownedViaBlock); - let item = self.parse_item_common(attrs, false, true, |_| true)?; - self.directory.ownership = old; - Ok(item) - } - fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, Stmt> { let path = self.parse_path(PathStyle::Expr)?; @@ -112,14 +102,14 @@ impl<'a> Parser<'a> { let style = if delim == token::Brace { MacStmtStyle::Braces } else { MacStmtStyle::NoBraces }; - let mac = Mac { path, args, prior_type_ascription: self.last_type_ascription }; + let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription }; let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { - StmtKind::Mac(P((mac, style, attrs))) + StmtKind::MacCall(P((mac, style, attrs))) } else { // Since none of the above applied, this is an expression statement macro. - let e = self.mk_expr(lo.to(hi), ExprKind::Mac(mac), AttrVec::new()); + let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new()); let e = self.maybe_recover_from_bad_qpath(e, true)?; let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?; let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; @@ -175,9 +165,9 @@ impl<'a> Parser<'a> { // Rewind to before attempting to parse the type and continue parsing. let parser_snapshot_after_type = self.clone(); mem::replace(self, parser_snapshot_before_type); - - let snippet = self.span_to_snippet(pat.span).unwrap(); - err.span_label(pat.span, format!("while parsing the type for `{}`", snippet)); + if let Ok(snip) = self.span_to_snippet(pat.span) { + err.span_label(pat.span, format!("while parsing the type for `{}`", snip)); + } (Some((parser_snapshot_after_type, colon_sp, err)), None) } } @@ -227,26 +217,16 @@ impl<'a> Parser<'a> { /// Parses the RHS of a local variable declaration (e.g., '= 14;'). fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { - if self.eat(&token::Eq) { - Ok(Some(self.parse_expr()?)) - } else if skip_eq { - Ok(Some(self.parse_expr()?)) - } else { - Ok(None) - } + if self.eat(&token::Eq) || skip_eq { Ok(Some(self.parse_expr()?)) } else { Ok(None) } } /// Parses a block. No inner attributes are allowed. pub fn parse_block(&mut self) -> PResult<'a, P<Block>> { - maybe_whole!(self, NtBlock, |x| x); - - let lo = self.token.span; - - if !self.eat(&token::OpenDelim(token::Brace)) { - return self.error_block_no_opening_brace(); + let (attrs, block) = self.parse_inner_attrs_and_block()?; + if let [.., last] = &*attrs { + self.error_on_forbidden_inner_attr(last.span, DEFAULT_INNER_ATTR_FORBIDDEN); } - - self.parse_block_tail(lo, BlockCheckMode::Default) + Ok(block) } fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> { @@ -262,16 +242,14 @@ impl<'a> Parser<'a> { // // which is valid in other languages, but not Rust. match self.parse_stmt_without_recovery() { - Ok(Some(stmt)) => { + // If the next token is an open brace (e.g., `if a b {`), the place- + // inside-a-block suggestion would be more likely wrong than right. + Ok(Some(_)) if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) - || do_not_suggest_help - { - // If the next token is an open brace (e.g., `if a b {`), the place- - // inside-a-block suggestion would be more likely wrong than right. - e.span_label(sp, "expected `{`"); - return Err(e); - } - let stmt_span = if self.eat(&token::Semi) { + || do_not_suggest_help => {} + Ok(Some(stmt)) => { + let stmt_own_line = self.sess.source_map().is_line_before_span_empty(sp); + let stmt_span = if stmt_own_line && self.eat(&token::Semi) { // Expand the span to include the semicolon. stmt.span.with_hi(self.prev_token.span.hi()) } else { @@ -294,27 +272,34 @@ impl<'a> Parser<'a> { _ => {} } e.span_label(sp, "expected `{`"); - return Err(e); + Err(e) } /// Parses a block. Inner attributes are allowed. pub(super) fn parse_inner_attrs_and_block( &mut self, ) -> PResult<'a, (Vec<Attribute>, P<Block>)> { + self.parse_block_common(self.token.span, BlockCheckMode::Default) + } + + /// Parses a block. Inner attributes are allowed. + pub(super) fn parse_block_common( + &mut self, + lo: Span, + blk_mode: BlockCheckMode, + ) -> PResult<'a, (Vec<Attribute>, P<Block>)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); - let lo = self.token.span; - self.expect(&token::OpenDelim(token::Brace))?; - Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, BlockCheckMode::Default)?)) + if !self.eat(&token::OpenDelim(token::Brace)) { + return self.error_block_no_opening_brace(); + } + + Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, blk_mode)?)) } /// Parses the rest of a block expression or function body. /// Precondition: already parsed the '{'. - pub(super) fn parse_block_tail( - &mut self, - lo: Span, - s: BlockCheckMode, - ) -> PResult<'a, P<Block>> { + fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> { let mut stmts = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { if self.token == token::Eof { diff --git a/src/librustc_parse/parser/ty.rs b/src/librustc_parse/parser/ty.rs index 3d2b0c014ac..a6015504a32 100644 --- a/src/librustc_parse/parser/ty.rs +++ b/src/librustc_parse/parser/ty.rs @@ -3,10 +3,8 @@ use super::{Parser, PathStyle, TokenType}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use rustc_ast::ast::{self, BareFnTy, FnRetTy, GenericParam, Lifetime, MutTy, Ty, TyKind}; -use rustc_ast::ast::{ - GenericBound, GenericBounds, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, -}; -use rustc_ast::ast::{Mac, Mutability}; +use rustc_ast::ast::{GenericBound, GenericBounds, MacCall, Mutability}; +use rustc_ast::ast::{PolyTraitRef, TraitBoundModifier, TraitObjectSyntax}; use rustc_ast::ptr::P; use rustc_ast::token::{self, Token, TokenKind}; use rustc_errors::{pluralize, struct_span_err, Applicability, PResult}; @@ -129,37 +127,33 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(kw::Underscore) { // A type to be inferred `_` TyKind::Infer - } else if self.token_is_bare_fn_keyword() { + } else if self.check_fn_front_matter() { // Function pointer type - self.parse_ty_bare_fn(Vec::new())? + self.parse_ty_bare_fn(lo, Vec::new())? } else if self.check_keyword(kw::For) { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - if self.token_is_bare_fn_keyword() { - self.parse_ty_bare_fn(lifetime_defs)? + if self.check_fn_front_matter() { + self.parse_ty_bare_fn(lo, lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus(); - self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)? + self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)? } } else if self.eat_keyword(kw::Impl) { self.parse_impl_ty(&mut impl_dyn_multi)? } else if self.is_explicit_dyn_type() { self.parse_dyn_ty(&mut impl_dyn_multi)? - } else if self.check(&token::Question) - || self.check_lifetime() && self.look_ahead(1, |t| t.is_like_plus()) - { - // Bound list (trait object type) - let bounds = self.parse_generic_bounds_common(allow_plus, None)?; - TyKind::TraitObject(bounds, TraitObjectSyntax::None) } else if self.eat_lt() { // Qualified path let (qself, path) = self.parse_qpath(PathStyle::Type)?; TyKind::Path(Some(qself), path) - } else if self.token.is_path_start() { + } else if self.check_path() { self.parse_path_start_ty(lo, allow_plus)? + } else if self.can_begin_bound() { + self.parse_bare_trait_object(lo, allow_plus)? } else if self.eat(&token::DotDotDot) { if allow_c_variadic == AllowCVariadic::Yes { TyKind::CVarArgs @@ -198,26 +192,17 @@ impl<'a> Parser<'a> { })?; if ts.len() == 1 && !trailing { - let ty = ts.into_iter().nth(0).unwrap().into_inner(); + let ty = ts.into_iter().next().unwrap().into_inner(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { // `(TY_BOUND_NOPAREN) + BOUND + ...`. TyKind::Path(None, path) if maybe_bounds => { - self.parse_remaining_bounds(Vec::new(), path, lo, true) + self.parse_remaining_bounds_path(Vec::new(), path, lo, true) } - TyKind::TraitObject(mut bounds, TraitObjectSyntax::None) + TyKind::TraitObject(bounds, TraitObjectSyntax::None) if maybe_bounds && bounds.len() == 1 && !trailing_plus => { - let path = match bounds.remove(0) { - GenericBound::Trait(pt, ..) => pt.trait_ref.path, - GenericBound::Outlives(..) => { - return Err(self.struct_span_err( - ty.span, - "expected trait bound, not lifetime bound", - )); - } - }; - self.parse_remaining_bounds(Vec::new(), path, lo, true) + self.parse_remaining_bounds(bounds, true) } // `(TYPE)` _ => Ok(TyKind::Paren(P(ty))), @@ -227,18 +212,35 @@ impl<'a> Parser<'a> { } } - fn parse_remaining_bounds( + fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> { + let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()); + let bounds = self.parse_generic_bounds_common(allow_plus, None)?; + if lt_no_plus { + self.struct_span_err(lo, "lifetime in trait object type must be followed by `+`").emit() + } + Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None)) + } + + fn parse_remaining_bounds_path( &mut self, generic_params: Vec<GenericParam>, path: ast::Path, lo: Span, parse_plus: bool, ) -> PResult<'a, TyKind> { - assert_ne!(self.token, token::Question); - let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_token.span)); - let mut bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; - if parse_plus { + let bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)]; + self.parse_remaining_bounds(bounds, parse_plus) + } + + /// Parse the remainder of a bare trait object type given an already parsed list. + fn parse_remaining_bounds( + &mut self, + mut bounds: GenericBounds, + plus: bool, + ) -> PResult<'a, TyKind> { + assert_ne!(self.token, token::Question); + if plus { self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded bounds.append(&mut self.parse_generic_bounds(Some(self.prev_token.span))?); } @@ -289,13 +291,6 @@ impl<'a> Parser<'a> { Ok(TyKind::Typeof(expr)) } - /// Is the current token one of the keywords that signals a bare function type? - fn token_is_bare_fn_keyword(&mut self) -> bool { - self.check_keyword(kw::Fn) - || self.check_keyword(kw::Unsafe) - || self.check_keyword(kw::Extern) - } - /// Parses a function pointer type (`TyKind::BareFn`). /// ``` /// [unsafe] [extern "ABI"] fn (S) -> T @@ -304,12 +299,31 @@ impl<'a> Parser<'a> { /// | | | Return type /// Function Style ABI Parameter types /// ``` - fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> { - let unsafety = self.parse_unsafety(); - let ext = self.parse_extern()?; - self.expect_keyword(kw::Fn)?; + /// We actually parse `FnHeader FnDecl`, but we error on `const` and `async` qualifiers. + fn parse_ty_bare_fn(&mut self, lo: Span, params: Vec<GenericParam>) -> PResult<'a, TyKind> { + let ast::FnHeader { ext, unsafety, constness, asyncness } = self.parse_fn_front_matter()?; let decl = self.parse_fn_decl(|_| false, AllowPlus::No)?; - Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params, decl }))) + let whole_span = lo.to(self.prev_token.span); + if let ast::Const::Yes(span) = constness { + self.error_fn_ptr_bad_qualifier(whole_span, span, "const"); + } + if let ast::Async::Yes { span, .. } = asyncness { + self.error_fn_ptr_bad_qualifier(whole_span, span, "async"); + } + Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl }))) + } + + /// Emit an error for the given bad function pointer qualifier. + fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) { + self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual)) + .span_label(qual_span, format!("`{}` because of this", qual)) + .span_suggestion_short( + qual_span, + &format!("remove the `{}` qualifier", qual), + String::new(), + Applicability::MaybeIncorrect, + ) + .emit(); } /// Parses an `impl B0 + ... + Bn` type. @@ -323,7 +337,7 @@ impl<'a> Parser<'a> { /// Is a `dyn B0 + ... + Bn` type allowed here? fn is_explicit_dyn_type(&mut self) -> bool { self.check_keyword(kw::Dyn) - && (self.normalized_token.span.rust_2018() + && (self.token.uninterpolated_span().rust_2018() || self.look_ahead(1, |t| { t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t) })) @@ -351,14 +365,14 @@ impl<'a> Parser<'a> { let path = self.parse_path(PathStyle::Type)?; if self.eat(&token::Not) { // Macro invocation in type position - Ok(TyKind::Mac(Mac { + Ok(TyKind::MacCall(MacCall { path, args: self.parse_mac_args()?, prior_type_ascription: self.last_type_ascription, })) } else if allow_plus == AllowPlus::Yes && self.check_plus() { // `Trait1 + Trait2 + 'a` - self.parse_remaining_bounds(Vec::new(), path, lo, true) + self.parse_remaining_bounds_path(Vec::new(), path, lo, true) } else { // Just a type path. Ok(TyKind::Path(None, path)) diff --git a/src/librustc_parse/validate_attr.rs b/src/librustc_parse/validate_attr.rs index 029aa5ed2ba..2512878ec65 100644 --- a/src/librustc_parse/validate_attr.rs +++ b/src/librustc_parse/validate_attr.rs @@ -57,7 +57,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta }) } -crate fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimiter, msg: &str) { +pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimiter, msg: &str) { if let ast::MacDelimiter::Parenthesis = delim { return; } |
