From 460bf55f8a649a7f19680df2ac67dbeb936f8700 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Mon, 6 Mar 2017 06:45:28 +0000 Subject: Cleanup. --- src/libsyntax/parse/parser.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 6446d38e5ef..9872afd27b7 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -891,7 +891,7 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_tokens(kets, SeqSep::none(), - |p| p.parse_token_tree(), + |p| Ok(p.parse_token_tree()), |mut e| handler.cancel(&mut e)); } @@ -1267,7 +1267,7 @@ impl<'a> Parser<'a> { break; } token::OpenDelim(token::Brace) => { - self.parse_token_tree()?; + self.parse_token_tree(); break; } _ => self.bump(), @@ -2101,10 +2101,10 @@ impl<'a> Parser<'a> { fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> { match self.token { - token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree { - TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()), + token::OpenDelim(delim) => match self.parse_token_tree() { + TokenTree::Delimited(_, delimited) => Ok((delim, delimited.stream().into())), _ => unreachable!(), - }), + }, _ => Err(self.fatal("expected open delimiter")), } } @@ -2643,24 +2643,23 @@ impl<'a> Parser<'a> { } /// parse a single token tree from the input. - pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { + pub fn parse_token_tree(&mut self) -> TokenTree { match self.token { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); self.span = frame.span; self.bump(); - return Ok(TokenTree::Delimited(frame.span, Delimited { + TokenTree::Delimited(frame.span, Delimited { delim: frame.delim, tts: frame.tree_cursor.original_stream().into(), - })); + }) }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { let token = mem::replace(&mut self.token, token::Underscore); - let res = Ok(TokenTree::Token(self.span, token)); self.bump(); - res + TokenTree::Token(self.prev_span, token) } } } @@ -2670,7 +2669,7 @@ impl<'a> Parser<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?); + tts.push(self.parse_token_tree()); } Ok(tts) } -- cgit 1.4.1-3-g733a5 From 68c1cc68b44bb987ec57251bc457a55292515d1d Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 3 Mar 2017 09:23:59 +0000 Subject: Refactor `Attribute` to use `Path` and `TokenStream` instead of `MetaItem`. --- src/librustc/hir/check_attr.rs | 11 +- src/librustc/hir/lowering.rs | 2 +- src/librustc/lint/context.rs | 4 +- src/librustc/middle/stability.rs | 4 +- src/librustc/traits/error_reporting.rs | 2 +- .../calculate_svh/svh_visitor.rs | 55 +--- src/librustc_incremental/persist/dirty_clean.rs | 8 +- src/librustc_lint/builtin.rs | 8 +- src/librustc_lint/lib.rs | 1 + src/librustc_lint/unused.rs | 5 +- src/librustc_metadata/creader.rs | 8 +- src/librustc_metadata/cstore.rs | 9 +- src/librustc_passes/ast_validation.rs | 4 +- src/librustc_resolve/lib.rs | 5 +- src/librustc_resolve/macros.rs | 43 ++- src/librustc_save_analysis/external_data.rs | 4 +- src/librustc_save_analysis/lib.rs | 5 +- src/librustc_trans/assert_module_sources.rs | 2 +- src/librustdoc/clean/mod.rs | 21 +- src/librustdoc/html/render.rs | 4 +- src/librustdoc/test.rs | 14 +- src/librustdoc/visit_ast.rs | 2 +- src/libsyntax/ast.rs | 9 +- src/libsyntax/attr.rs | 306 ++++++++++++++++++--- src/libsyntax/config.rs | 8 +- src/libsyntax/ext/derive.rs | 4 +- src/libsyntax/ext/expand.rs | 42 +-- src/libsyntax/ext/quote.rs | 14 +- src/libsyntax/feature_gate.rs | 28 +- src/libsyntax/fold.rs | 7 +- src/libsyntax/lib.rs | 10 + src/libsyntax/parse/attr.rs | 16 +- src/libsyntax/parse/mod.rs | 131 ++++++--- src/libsyntax/parse/parser.rs | 44 +-- src/libsyntax/parse/token.rs | 4 +- src/libsyntax/print/pprust.rs | 100 ++++--- src/libsyntax/std_inject.rs | 8 +- src/libsyntax/tokenstream.rs | 2 +- src/libsyntax_ext/deriving/custom.rs | 8 +- src/libsyntax_ext/deriving/generic/mod.rs | 2 +- src/libsyntax_ext/proc_macro_registrar.rs | 12 +- 41 files changed, 614 insertions(+), 362 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 6f5f548aa78..54ae9472140 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -120,11 +120,12 @@ impl<'a> CheckAttrVisitor<'a> { } fn check_attribute(&self, attr: &ast::Attribute, target: Target) { - let name: &str = &attr.name().as_str(); - match name { - "inline" => self.check_inline(attr, target), - "repr" => self.check_repr(attr, target), - _ => (), + if let Some(name) = attr.name() { + match &*name.as_str() { + "inline" => self.check_inline(attr, target), + "repr" => self.check_repr(attr, target), + _ => (), + } } } } diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index aa6614b0af4..a5c82130675 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1277,7 +1277,7 @@ impl<'a> LoweringContext<'a> { let attrs = self.lower_attrs(&i.attrs); let mut vis = self.lower_visibility(&i.vis); if let ItemKind::MacroDef(ref tts) = i.node { - if i.attrs.iter().any(|attr| attr.name() == "macro_export") { + if i.attrs.iter().any(|attr| attr.path == "macro_export") { self.exported_macros.push(hir::MacroDef { name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(), }); diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 9279f24a57a..65e2fec0b8b 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -402,14 +402,14 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec Vec> { let mut out = vec![]; - let level = match Level::from_str(&attr.name().as_str()) { + let level = match attr.name().and_then(|name| Level::from_str(&name.as_str())) { None => return out, Some(lvl) => lvl, }; + let meta = unwrap_or!(attr.meta(), return out); attr::mark_used(attr); - let meta = &attr.value; let metas = if let Some(metas) = meta.meta_item_list() { metas } else { diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index baa22d70614..1fb53714025 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -197,7 +197,7 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { } else { // Emit errors for non-staged-api crates. for attr in attrs { - let tag = attr.name(); + let tag = unwrap_or!(attr.name(), continue); if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" { attr::mark_used(attr); self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \ @@ -402,7 +402,7 @@ impl<'a, 'tcx> Index<'tcx> { let mut is_staged_api = false; for attr in &krate.attrs { - if attr.name() == "stable" || attr.name() == "unstable" { + if attr.path == "stable" || attr.path == "unstable" { is_staged_api = true; break } diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 0e5c786cd8d..27525d550ff 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -274,7 +274,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { .filter(|a| a.check_name("rustc_on_unimplemented")) .next() { - let err_sp = item.meta().span.substitute_dummy(span); + let err_sp = item.span.substitute_dummy(span); let trait_str = self.tcx.item_path_str(trait_ref.def_id); if let Some(istring) = item.value_str() { let istring = &*istring.as_str(); diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index d0eedcac0c0..fac49b29598 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -18,16 +18,15 @@ use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; use syntax::parse::token; -use syntax::symbol::{Symbol, InternedString}; +use syntax::symbol::InternedString; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax::tokenstream; use rustc::hir; use rustc::hir::*; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; -use rustc::hir::intravisit as visit; +use rustc::hir::intravisit::{self as visit, Visitor}; use rustc::ty::TyCtxt; -use rustc_data_structures::fnv; use std::hash::{Hash, Hasher}; use super::def_path_hash::DefPathHashes; @@ -559,7 +558,7 @@ macro_rules! hash_span { }); } -impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> { +impl<'a, 'hash, 'tcx> Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> visit::NestedVisitorMap<'this, 'tcx> { if self.hash_bodies { visit::NestedVisitorMap::OnlyBodies(&self.tcx.hir) @@ -960,50 +959,24 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { } } - fn hash_meta_item(&mut self, meta_item: &ast::MetaItem) { - debug!("hash_meta_item: st={:?}", self.st); - - // ignoring span information, it doesn't matter here - self.hash_discriminant(&meta_item.node); - meta_item.name.as_str().len().hash(self.st); - meta_item.name.as_str().hash(self.st); - - match meta_item.node { - ast::MetaItemKind::Word => {} - ast::MetaItemKind::NameValue(ref lit) => saw_lit(lit).hash(self.st), - ast::MetaItemKind::List(ref items) => { - // Sort subitems so the hash does not depend on their order - let indices = self.indices_sorted_by(&items, |p| { - (p.name().map(Symbol::as_str), fnv::hash(&p.literal().map(saw_lit))) - }); - items.len().hash(self.st); - for (index, &item_index) in indices.iter().enumerate() { - index.hash(self.st); - let nested_meta_item: &ast::NestedMetaItemKind = &items[item_index].node; - self.hash_discriminant(nested_meta_item); - match *nested_meta_item { - ast::NestedMetaItemKind::MetaItem(ref meta_item) => { - self.hash_meta_item(meta_item); - } - ast::NestedMetaItemKind::Literal(ref lit) => { - saw_lit(lit).hash(self.st); - } - } - } - } - } - } - pub fn hash_attributes(&mut self, attributes: &[ast::Attribute]) { debug!("hash_attributes: st={:?}", self.st); let indices = self.indices_sorted_by(attributes, |attr| attr.name()); for i in indices { let attr = &attributes[i]; - if !attr.is_sugared_doc && - !IGNORED_ATTRIBUTES.contains(&&*attr.value.name().as_str()) { + match attr.name() { + Some(name) if IGNORED_ATTRIBUTES.contains(&&*name.as_str()) => continue, + _ => {} + }; + if !attr.is_sugared_doc { SawAttribute(attr.style).hash(self.st); - self.hash_meta_item(&attr.value); + for segment in &attr.path.segments { + SawIdent(segment.identifier.name.as_str()).hash(self.st); + } + for tt in attr.tokens.trees() { + self.hash_token_tree(&tt); + } } } } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 156f8b9e7c4..929249df0b1 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -104,9 +104,9 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> { impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode { - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(LABEL) { - let value = expect_associated_value(self.tcx, item); + let value = expect_associated_value(self.tcx, &item); match DepNode::from_label_string(&value.as_str(), def_id) { Ok(def_id) => return def_id, Err(()) => { @@ -331,9 +331,9 @@ fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool { debug!("check_config(attr={:?})", attr); let config = &tcx.sess.parse_sess.config; debug!("check_config: config={:?}", config); - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(CFG) { - let value = expect_associated_value(tcx, item); + let value = expect_associated_value(tcx, &item); debug!("check_config: searching for cfg {:?}", value); return config.contains(&(value, None)); } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 58336f939d1..f0276f90f27 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -312,7 +312,7 @@ impl MissingDoc { } } - let has_doc = attrs.iter().any(|a| a.is_value_str() && a.name() == "doc"); + let has_doc = attrs.iter().any(|a| a.is_value_str() && a.check_name("doc")); if !has_doc { cx.span_lint(MISSING_DOCS, sp, @@ -635,7 +635,7 @@ impl LintPass for DeprecatedAttr { impl EarlyLintPass for DeprecatedAttr { fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) { - let name = attr.name(); + let name = unwrap_or!(attr.name(), return); for &&(n, _, ref g) in &self.depr_attrs { if name == n { if let &AttributeGate::Gated(Stability::Deprecated(link), @@ -1121,8 +1121,8 @@ impl LintPass for UnstableFeatures { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures { fn check_attribute(&mut self, ctx: &LateContext, attr: &ast::Attribute) { - if attr.meta().check_name("feature") { - if let Some(items) = attr.meta().meta_item_list() { + if attr.check_name("feature") { + if let Some(items) = attr.meta_item_list() { for item in items { ctx.span_lint(UNSTABLE_FEATURES, item.span(), "unstable feature"); } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 443a219928f..05dbbc09870 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -38,6 +38,7 @@ #![feature(slice_patterns)] #![feature(staged_api)] +#[macro_use] extern crate syntax; #[macro_use] extern crate rustc; diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index f9b7c685876..abba8afd9da 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -269,6 +269,7 @@ impl LintPass for UnusedAttributes { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes { fn check_attribute(&mut self, cx: &LateContext, attr: &ast::Attribute) { debug!("checking attribute: {:?}", attr); + let name = unwrap_or!(attr.name(), return); // Note that check_name() marks the attribute as used if it matches. for &(ref name, ty, _) in BUILTIN_ATTRIBUTES { @@ -294,13 +295,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes { cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute"); // Is it a builtin attribute that must be used at the crate level? let known_crate = BUILTIN_ATTRIBUTES.iter() - .find(|&&(name, ty, _)| attr.name() == name && ty == AttributeType::CrateLevel) + .find(|&&(builtin, ty, _)| name == builtin && ty == AttributeType::CrateLevel) .is_some(); // Has a plugin registered this attribute as one which must be used at // the crate level? let plugin_crate = plugin_attributes.iter() - .find(|&&(ref x, t)| attr.name() == &**x && AttributeType::CrateLevel == t) + .find(|&&(ref x, t)| name == &**x && AttributeType::CrateLevel == t) .is_some(); if known_crate || plugin_crate { let msg = match attr.style { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 49dcffb4830..9f5ce00f408 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -973,9 +973,11 @@ impl<'a> CrateLoader<'a> { impl<'a> CrateLoader<'a> { pub fn preprocess(&mut self, krate: &ast::Crate) { - for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") { - if let Some(linkarg) = attr.value_str() { - self.cstore.add_used_link_args(&linkarg.as_str()); + for attr in &krate.attrs { + if attr.path == "link_args" { + if let Some(linkarg) = attr.value_str() { + self.cstore.add_used_link_args(&linkarg.as_str()); + } } } } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index bb30245df5f..17a6a706e0a 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -269,9 +269,12 @@ impl CrateMetadata { } pub fn is_staged_api(&self) -> bool { - self.get_item_attrs(CRATE_DEF_INDEX) - .iter() - .any(|attr| attr.name() == "stable" || attr.name() == "unstable") + for attr in self.get_item_attrs(CRATE_DEF_INDEX) { + if attr.path == "stable" || attr.path == "unstable" { + return true; + } + } + false } pub fn is_allocator(&self) -> bool { diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 0933fdfd357..8c45a666945 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -241,12 +241,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ItemKind::Mod(_) => { // Ensure that `path` attributes on modules are recorded as used (c.f. #35584). attr::first_attr_value_str_by_name(&item.attrs, "path"); - if let Some(attr) = - item.attrs.iter().find(|attr| attr.name() == "warn_directory_ownership") { + if item.attrs.iter().any(|attr| attr.check_name("warn_directory_ownership")) { let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP; let msg = "cannot declare a new module at this location"; self.session.add_lint(lint, item.id, item.span, msg.to_string()); - attr::mark_used(attr); } } ItemKind::Union(ref vdata, _) => { diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 0958748ed09..c3e471650a3 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -3360,8 +3360,9 @@ impl<'a> Resolver<'a> { if self.proc_macro_enabled { return; } for attr in attrs { - let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| { - let ident = Ident::with_empty_ctxt(attr.name()); + let name = unwrap_or!(attr.name(), continue); + let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| { + let ident = Ident::with_empty_ctxt(name); self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok() }); diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 7ad122d1c31..9e1dcd1bc35 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -30,6 +30,7 @@ use syntax::feature_gate::{self, emit_feature_err, GateIssue}; use syntax::fold::{self, Folder}; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; +use syntax::tokenstream::TokenStream; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{Span, DUMMY_SP}; @@ -176,12 +177,14 @@ impl<'a> base::Resolver for Resolver<'a> { fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec) -> Option { for i in 0..attrs.len() { + let name = unwrap_or!(attrs[i].name(), continue); + if self.session.plugin_attributes.borrow().iter() - .any(|&(ref attr_nm, _)| attrs[i].name() == &**attr_nm) { + .any(|&(ref attr_nm, _)| name == &**attr_nm) { attr::mark_known(&attrs[i]); } - match self.builtin_macros.get(&attrs[i].name()).cloned() { + match self.builtin_macros.get(&name).cloned() { Some(binding) => match *binding.get_macro(self) { MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { return Some(attrs.remove(i)) @@ -194,9 +197,11 @@ impl<'a> base::Resolver for Resolver<'a> { // Check for legacy derives for i in 0..attrs.len() { - if attrs[i].name() == "derive" { + let name = unwrap_or!(attrs[i].name(), continue); + + if name == "derive" { let mut traits = match attrs[i].meta_item_list() { - Some(traits) if !traits.is_empty() => traits.to_owned(), + Some(traits) => traits, _ => continue, }; @@ -213,18 +218,11 @@ impl<'a> base::Resolver for Resolver<'a> { if traits.is_empty() { attrs.remove(i); } else { - attrs[i].value = ast::MetaItem { - name: attrs[i].name(), - span: attrs[i].span, - node: ast::MetaItemKind::List(traits), - }; + attrs[i].tokens = ast::MetaItemKind::List(traits).tokens(attrs[i].span); } return Some(ast::Attribute { - value: ast::MetaItem { - name: legacy_name, - span: span, - node: ast::MetaItemKind::Word, - }, + path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)), + tokens: TokenStream::empty(), id: attr::mk_attr_id(), style: ast::AttrStyle::Outer, is_sugared_doc: false, @@ -270,19 +268,20 @@ impl<'a> Resolver<'a> { } }; - let (attr_name, path) = { - let attr = attr.as_ref().unwrap(); - (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name()))) - }; - let mut determined = true; + let path = attr.as_ref().unwrap().path.clone(); + let mut determinacy = Determinacy::Determined; match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) { Ok(def) => return Ok(def), - Err(Determinacy::Undetermined) => determined = false, + Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined, Err(Determinacy::Determined) if force => return Err(Determinacy::Determined), Err(Determinacy::Determined) => {} } + let attr_name = match path.segments.len() { + 1 => path.segments[0].identifier.name, + _ => return Err(determinacy), + }; for &(name, span) in traits { let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); match self.resolve_macro(scope, &path, MacroKind::Derive, force) { @@ -304,12 +303,12 @@ impl<'a> Resolver<'a> { } return Err(Determinacy::Undetermined); }, - Err(Determinacy::Undetermined) => determined = false, + Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined, Err(Determinacy::Determined) => {} } } - Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined }) + Err(determinacy) } fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs index 41658dc5b1b..f038c2dc298 100644 --- a/src/librustc_save_analysis/external_data.rs +++ b/src/librustc_save_analysis/external_data.rs @@ -14,7 +14,6 @@ use rustc::ty::TyCtxt; use syntax::ast::{self, NodeId}; use syntax::codemap::CodeMap; use syntax::print::pprust; -use syntax::symbol::Symbol; use syntax_pos::Span; use data::{self, Visibility, SigElement}; @@ -77,10 +76,9 @@ impl Lower for Vec { type Target = Vec; fn lower(self, tcx: TyCtxt) -> Vec { - let doc = Symbol::intern("doc"); self.into_iter() // Only retain real attributes. Doc comments are lowered separately. - .filter(|attr| attr.name() != doc) + .filter(|attr| attr.path != "doc") .map(|mut attr| { // Remove the surrounding '#[..]' or '#![..]' of the pretty printed // attribute. First normalize all inner attribute (#![..]) to outer diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 111c8370be2..90ee19198c9 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -54,7 +54,7 @@ use std::path::{Path, PathBuf}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::lexer::comments::strip_doc_comment_decoration; use syntax::parse::token; -use syntax::symbol::{Symbol, keywords}; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{ty_to_string, arg_to_string}; use syntax::codemap::MacroAttribute; @@ -829,11 +829,10 @@ impl<'a> Visitor<'a> for PathCollector { } fn docs_for_attrs(attrs: &[Attribute]) -> String { - let doc = Symbol::intern("doc"); let mut result = String::new(); for attr in attrs { - if attr.name() == doc { + if attr.check_name("doc") { if let Some(val) = attr.value_str() { if attr.is_sugared_doc { result.push_str(&strip_doc_comment_decoration(&val.as_str())); diff --git a/src/librustc_trans/assert_module_sources.rs b/src/librustc_trans/assert_module_sources.rs index 7a41f834109..8528482c785 100644 --- a/src/librustc_trans/assert_module_sources.rs +++ b/src/librustc_trans/assert_module_sources.rs @@ -113,7 +113,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { } fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name { - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(name) { if let Some(value) = item.value_str() { return value; diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 1294296840e..660fa647882 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -39,12 +39,11 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc::hir; +use std::{mem, slice, vec}; use std::path::PathBuf; use std::rc::Rc; -use std::slice; use std::sync::Arc; use std::u32; -use std::mem; use core::DocContext; use doctree; @@ -472,12 +471,12 @@ impl Clean for doctree::Module { pub struct ListAttributesIter<'a> { attrs: slice::Iter<'a, ast::Attribute>, - current_list: slice::Iter<'a, ast::NestedMetaItem>, + current_list: vec::IntoIter, name: &'a str } impl<'a> Iterator for ListAttributesIter<'a> { - type Item = &'a ast::NestedMetaItem; + type Item = ast::NestedMetaItem; fn next(&mut self) -> Option { if let Some(nested) = self.current_list.next() { @@ -485,9 +484,9 @@ impl<'a> Iterator for ListAttributesIter<'a> { } for attr in &mut self.attrs { - if let Some(ref list) = attr.meta_item_list() { + if let Some(list) = attr.meta_item_list() { if attr.check_name(self.name) { - self.current_list = list.iter(); + self.current_list = list.into_iter(); if let Some(nested) = self.current_list.next() { return Some(nested); } @@ -508,7 +507,7 @@ impl AttributesExt for [ast::Attribute] { fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> { ListAttributesIter { attrs: self.iter(), - current_list: [].iter(), + current_list: Vec::new().into_iter(), name: name } } @@ -519,7 +518,7 @@ pub trait NestedAttributesExt { fn has_word(self, &str) -> bool; } -impl<'a, I: IntoIterator> NestedAttributesExt for I { +impl> NestedAttributesExt for I { fn has_word(self, word: &str) -> bool { self.into_iter().any(|attr| attr.is_word() && attr.check_name(word)) } @@ -2596,9 +2595,9 @@ impl Clean> for doctree::Import { // #[doc(no_inline)] attribute is present. // Don't inline doc(hidden) imports so they can be stripped at a later stage. let denied = self.vis != hir::Public || self.attrs.iter().any(|a| { - a.name() == "doc" && match a.meta_item_list() { - Some(l) => attr::list_contains_name(l, "no_inline") || - attr::list_contains_name(l, "hidden"), + a.name().unwrap() == "doc" && match a.meta_item_list() { + Some(l) => attr::list_contains_name(&l, "no_inline") || + attr::list_contains_name(&l, "hidden"), None => false, } }); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 44f71d89529..130a4526bf7 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -2620,11 +2620,11 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { let mut attrs = String::new(); for attr in &it.attrs.other_attrs { - let name = attr.name(); + let name = attr.name().unwrap(); if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) { continue; } - if let Some(s) = render_attribute(attr.meta()) { + if let Some(s) = render_attribute(&attr.meta().unwrap()) { attrs.push_str(&format!("#[{}]\n", s)); } } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index c1ecc241b7b..f6b7a07bdae 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -137,13 +137,13 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions { attrs: Vec::new(), }; - let attrs = krate.attrs.iter() - .filter(|a| a.check_name("doc")) - .filter_map(|a| a.meta_item_list()) - .flat_map(|l| l) - .filter(|a| a.check_name("test")) - .filter_map(|a| a.meta_item_list()) - .flat_map(|l| l); + let test_attrs: Vec<_> = krate.attrs.iter() + .filter(|a| a.check_name("doc")) + .flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new)) + .filter(|a| a.check_name("test")) + .collect(); + let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[])); + for attr in attrs { if attr.check_name("no_crate_inject") { opts.no_crate_inject = true; diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index b80de3cc505..4a909f8e2a9 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -376,7 +376,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { if item.vis == hir::Public && self.inside_public_path { let please_inline = item.attrs.iter().any(|item| { match item.meta_item_list() { - Some(list) if item.check_name("doc") => { + Some(ref list) if item.check_name("doc") => { list.iter().any(|i| i.check_name("inline")) } _ => false, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 981667337d5..5deb91ef53a 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -116,6 +116,12 @@ pub struct Path { pub segments: Vec, } +impl<'a> PartialEq<&'a str> for Path { + fn eq(&self, string: &&'a str) -> bool { + self.segments.len() == 1 && self.segments[0].identifier.name == *string + } +} + impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", pprust::path_to_string(self)) @@ -1679,7 +1685,8 @@ pub struct AttrId(pub usize); pub struct Attribute { pub id: AttrId, pub style: AttrStyle, - pub value: MetaItem, + pub path: Path, + pub tokens: TokenStream, pub is_sugared_doc: bool, pub span: Span, } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 096657a6e7a..68f1f690a62 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -15,20 +15,24 @@ pub use self::ReprAttr::*; pub use self::IntType::*; use ast; -use ast::{AttrId, Attribute, Name}; +use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; -use ast::{Lit, Expr, Item, Local, Stmt, StmtKind}; +use ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind}; use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; use syntax_pos::{Span, BytePos, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::ParseSess; +use parse::parser::Parser; +use parse::{self, ParseSess, PResult}; +use parse::token::{self, Token}; use ptr::P; use symbol::Symbol; +use tokenstream::{TokenStream, TokenTree, Delimited}; use util::ThinVec; use std::cell::{RefCell, Cell}; +use std::iter; thread_local! { static USED_ATTRS: RefCell> = RefCell::new(Vec::new()); @@ -185,26 +189,38 @@ impl NestedMetaItem { impl Attribute { pub fn check_name(&self, name: &str) -> bool { - let matches = self.name() == name; + let matches = self.path == name; if matches { mark_used(self); } matches } - pub fn name(&self) -> Name { self.meta().name() } + pub fn name(&self) -> Option { + match self.path.segments.len() { + 1 => Some(self.path.segments[0].identifier.name), + _ => None, + } + } pub fn value_str(&self) -> Option { - self.meta().value_str() + self.meta().and_then(|meta| meta.value_str()) } - pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { - self.meta().meta_item_list() + pub fn meta_item_list(&self) -> Option> { + match self.meta() { + Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list), + _ => None + } } - pub fn is_word(&self) -> bool { self.meta().is_word() } + pub fn is_word(&self) -> bool { + self.path.segments.len() == 1 && self.tokens.is_empty() + } - pub fn span(&self) -> Span { self.meta().span } + pub fn span(&self) -> Span { + self.span + } pub fn is_meta_item_list(&self) -> bool { self.meta_item_list().is_some() @@ -225,7 +241,7 @@ impl MetaItem { match self.node { MetaItemKind::NameValue(ref v) => { match v.node { - ast::LitKind::Str(ref s, _) => Some((*s).clone()), + LitKind::Str(ref s, _) => Some((*s).clone()), _ => None, } }, @@ -264,8 +280,35 @@ impl MetaItem { impl Attribute { /// Extract the MetaItem from inside this Attribute. - pub fn meta(&self) -> &MetaItem { - &self.value + pub fn meta(&self) -> Option { + let mut tokens = self.tokens.trees().peekable(); + Some(MetaItem { + name: match self.path.segments.len() { + 1 => self.path.segments[0].identifier.name, + _ => return None, + }, + node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) { + if tokens.peek().is_some() { + return None; + } + node + } else { + return None; + }, + span: self.span, + }) + } + + pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> { + if self.path.segments.len() > 1 { + sess.span_diagnostic.span_err(self.path.span, "expected ident, found path"); + } + + Ok(MetaItem { + name: self.path.segments.last().unwrap().identifier.name, + node: Parser::new(sess, self.tokens.clone(), None, false).parse_meta_item_kind()?, + span: self.span, + }) } /// Convert self to a normal #[doc="foo"] comment, if it is a @@ -293,7 +336,7 @@ impl Attribute { /* Constructors */ pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem { - let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked)); + let value_lit = dummy_spanned(LitKind::Str(value, ast::StrStyle::Cooked)); mk_spanned_name_value_item(DUMMY_SP, name, value_lit) } @@ -348,7 +391,8 @@ pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute Attribute { id: id, style: ast::AttrStyle::Inner, - value: item, + path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)), + tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } @@ -365,7 +409,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute Attribute { id: id, style: ast::AttrStyle::Outer, - value: item, + path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)), + tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } @@ -374,32 +419,25 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) -> Attribute { let style = doc_comment_style(&text.as_str()); - let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked)); + let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id: id, style: style, - value: MetaItem { - span: mk_sp(lo, hi), - name: Symbol::intern("doc"), - node: MetaItemKind::NameValue(lit), - }, + path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")), + tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)), is_sugared_doc: true, span: mk_sp(lo, hi), } } pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool { - debug!("attr::list_contains_name (name={})", name); items.iter().any(|item| { - debug!(" testing: {:?}", item.name()); item.check_name(name) }) } pub fn contains_name(attrs: &[Attribute], name: &str) -> bool { - debug!("attr::contains_name (name={})", name); attrs.iter().any(|item| { - debug!(" testing: {}", item.name()); item.check_name(name) }) } @@ -452,8 +490,14 @@ pub enum InlineAttr { /// Determine what `#[inline]` attribute is present in `attrs`, if any. pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr { attrs.iter().fold(InlineAttr::None, |ia, attr| { - match attr.value.node { - _ if attr.value.name != "inline" => ia, + if attr.path != "inline" { + return ia; + } + let meta = match attr.meta() { + Some(meta) => meta.node, + None => return ia, + }; + match meta { MetaItemKind::Word => { mark_used(attr); InlineAttr::Hint @@ -574,14 +618,15 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, let mut rustc_depr: Option = None; 'outer: for attr in attrs_iter { - let tag = attr.name(); - if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" { + if attr.path != "rustc_deprecated" && attr.path != "unstable" && attr.path != "stable" { continue // not a stability level } mark_used(attr); - if let Some(metas) = attr.meta_item_list() { + let meta = attr.meta(); + if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta { + let meta = meta.as_ref().unwrap(); let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); @@ -596,7 +641,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, } }; - match &*tag.as_str() { + match &*meta.name.as_str() { "rustc_deprecated" => { if rustc_depr.is_some() { span_err!(diagnostic, item_sp, E0540, @@ -772,7 +817,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, let mut depr: Option = None; 'outer: for attr in attrs_iter { - if attr.name() != "deprecated" { + if attr.path != "deprecated" { continue } @@ -847,8 +892,8 @@ pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute], /// structure layout, and `packed` to remove padding. pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec { let mut acc = Vec::new(); - match attr.value.node { - ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => { + if attr.path == "repr" { + if let Some(items) = attr.meta_item_list() { mark_used(attr); for item in items { if !item.is_meta_item() { @@ -883,8 +928,6 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec } } } - // Not a "repr" hint: ignore. - _ => { } } acc } @@ -931,6 +974,195 @@ impl IntType { } } +impl MetaItem { + fn tokens(&self) -> TokenStream { + let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name))); + TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)]) + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + let (mut span, name) = match tokens.next() { + Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), + _ => return None, + }; + let node = match MetaItemKind::from_tokens(tokens) { + Some(node) => node, + _ => return None, + }; + if let Some(last_span) = node.last_span() { + span.hi = last_span.hi; + } + Some(MetaItem { name: name, span: span, node: node }) + } +} + +impl MetaItemKind { + fn last_span(&self) -> Option { + match *self { + MetaItemKind::Word => None, + MetaItemKind::List(ref list) => list.last().map(NestedMetaItem::span), + MetaItemKind::NameValue(ref lit) => Some(lit.span), + } + } + + pub fn tokens(&self, span: Span) -> TokenStream { + match *self { + MetaItemKind::Word => TokenStream::empty(), + MetaItemKind::NameValue(ref lit) => { + TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()]) + } + MetaItemKind::List(ref list) => { + let mut tokens = Vec::new(); + for (i, item) in list.iter().enumerate() { + if i > 0 { + tokens.push(TokenTree::Token(span, Token::Comma).into()); + } + tokens.push(item.node.tokens()); + } + TokenTree::Delimited(span, Delimited { + delim: token::Paren, + tts: TokenStream::concat(tokens).into(), + }).into() + } + } + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + let delimited = match tokens.peek().cloned() { + Some(TokenTree::Token(_, token::Eq)) => { + tokens.next(); + return if let Some(TokenTree::Token(span, token)) = tokens.next() { + LitKind::from_token(token) + .map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span })) + } else { + None + }; + } + Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => { + tokens.next(); + delimited.stream() + } + _ => return Some(MetaItemKind::Word), + }; + + let mut tokens = delimited.into_trees().peekable(); + let mut result = Vec::new(); + while let Some(..) = tokens.peek() { + match NestedMetaItemKind::from_tokens(&mut tokens) { + Some(item) => result.push(Spanned { span: item.span(), node: item }), + None => return None, + } + match tokens.next() { + None | Some(TokenTree::Token(_, Token::Comma)) => {} + _ => return None, + } + } + Some(MetaItemKind::List(result)) + } +} + +impl NestedMetaItemKind { + fn span(&self) -> Span { + match *self { + NestedMetaItemKind::MetaItem(ref item) => item.span, + NestedMetaItemKind::Literal(ref lit) => lit.span, + } + } + + fn tokens(&self) -> TokenStream { + match *self { + NestedMetaItemKind::MetaItem(ref item) => item.tokens(), + NestedMetaItemKind::Literal(ref lit) => lit.tokens(), + } + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { + if let Some(node) = LitKind::from_token(token) { + tokens.next(); + return Some(NestedMetaItemKind::Literal(Spanned { node: node, span: span })); + } + } + + MetaItem::from_tokens(tokens).map(NestedMetaItemKind::MetaItem) + } +} + +impl Lit { + fn tokens(&self) -> TokenStream { + TokenTree::Token(self.span, self.node.token()).into() + } +} + +impl LitKind { + fn token(&self) -> Token { + use std::ascii; + + match *self { + LitKind::Str(string, ast::StrStyle::Cooked) => { + let mut escaped = String::new(); + for ch in string.as_str().chars() { + escaped.extend(ch.escape_unicode()); + } + Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None) + } + LitKind::Str(string, ast::StrStyle::Raw(n)) => { + Token::Literal(token::Lit::StrRaw(string, n), None) + } + LitKind::ByteStr(ref bytes) => { + let string = bytes.iter().cloned().flat_map(ascii::escape_default) + .map(Into::::into).collect::(); + Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None) + } + LitKind::Byte(byte) => { + let string: String = ascii::escape_default(byte).map(Into::::into).collect(); + Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None) + } + LitKind::Char(ch) => { + let string: String = ch.escape_default().map(Into::::into).collect(); + Token::Literal(token::Lit::Char(Symbol::intern(&string)), None) + } + LitKind::Int(n, ty) => { + let suffix = match ty { + ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())), + ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())), + ast::LitIntType::Unsuffixed => None, + }; + Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix) + } + LitKind::Float(symbol, ty) => { + Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string()))) + } + LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None), + LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value { + true => "true", + false => "false", + }))), + } + } + + fn from_token(token: Token) -> Option { + match token { + Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)), + Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)), + Token::Literal(lit, suf) => { + let (suffix_illegal, result) = parse::lit_token(lit, suf, None); + if suffix_illegal && suf.is_some() { + return None; + } + result + } + _ => None, + } + } +} + pub trait HasAttrs: Sized { fn attrs(&self) -> &[ast::Attribute]; fn map_attrs) -> Vec>(self, f: F) -> Self; diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index ea12a31770f..2591a576669 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -109,7 +109,8 @@ impl<'a> StripUnconfigured<'a> { self.process_cfg_attr(ast::Attribute { id: attr::mk_attr_id(), style: attr.style, - value: mi.clone(), + path: ast::Path::from_ident(mi.span, ast::Ident::with_empty_ctxt(mi.name)), + tokens: mi.node.tokens(mi.span), is_sugared_doc: false, span: mi.span, }) @@ -132,8 +133,9 @@ impl<'a> StripUnconfigured<'a> { return false; } - let mis = match attr.value.node { - ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis, + let mis = attr.meta_item_list(); + let mis = match mis { + Some(ref mis) if is_cfg(&attr) => mis, _ => return true }; diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 77cc7bab031..5b253635f25 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -18,7 +18,7 @@ use syntax_pos::Span; pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec<(Symbol, Span)> { let mut result = Vec::new(); attrs.retain(|attr| { - if attr.name() != "derive" { + if attr.path != "derive" { return true; } @@ -27,7 +27,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec return false; } - let traits = attr.meta_item_list().unwrap_or(&[]).to_owned(); + let traits = attr.meta_item_list().unwrap_or_else(Vec::new); if traits.is_empty() { cx.span_warn(attr.span, "empty trait list in `derive`"); return false; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 10168f010a0..c1095d34456 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -272,7 +272,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.collect_invocations(expansion, &[]) } else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind { let item = item - .map_attrs(|mut attrs| { attrs.retain(|a| a.name() != "derive"); attrs }); + .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = add_derived_markers(&mut self.cx, &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); @@ -380,7 +380,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - let name = attr.name(); + let name = attr.path.segments[0].identifier.name; self.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { @@ -392,25 +392,25 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { MultiModifier(ref mac) => { - let item = mac.expand(self.cx, attr.span, &attr.value, item); + let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let item = mac.expand(self.cx, attr.span, &meta, item); kind.expect_from_annotatables(item) } MultiDecorator(ref mac) => { let mut items = Vec::new(); - mac.expand(self.cx, attr.span, &attr.value, &item, - &mut |item| items.push(item)); + let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item)); items.push(item); kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess); let item_toks = stream_for_item(&item, &self.cx.parse_sess); let span = Span { expn_id: self.cx.codemap().record_expansion(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - format: MacroAttribute(name), + format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), span: None, allow_internal_unstable: false, }, @@ -418,7 +418,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ..attr.span }; - let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); + let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); self.parse_expansion(tok_result, kind, name, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { @@ -784,32 +784,6 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream { string_to_stream(text, parse_sess) } -fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream { - use ast::MetaItemKind::*; - use print::pp::Breaks; - use print::pprust::PrintState; - - let token_string = match attr.value.node { - // For `#[foo]`, an empty token - Word => return TokenStream::empty(), - // For `#[foo(bar, baz)]`, returns `(bar, baz)` - List(ref items) => pprust::to_string(|s| { - s.popen()?; - s.commasep(Breaks::Consistent, - &items[..], - |s, i| s.print_meta_list_item(&i))?; - s.pclose() - }), - // For `#[foo = "bar"]`, returns `= "bar"` - NameValue(ref lit) => pprust::to_string(|s| { - s.word_space("=")?; - s.print_literal(lit) - }), - }; - - string_to_stream(token_string, parse_sess) -} - fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { let filename = String::from(""); filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text)) diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 69ff726e719..10b7249743b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -220,16 +220,24 @@ pub mod rt { } impl ToTokens for ast::Attribute { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let mut r = vec![]; // FIXME: The spans could be better r.push(TokenTree::Token(self.span, token::Pound)); if self.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } + let mut inner = Vec::new(); + for (i, segment) in self.path.segments.iter().enumerate() { + if i > 0 { + inner.push(TokenTree::Token(self.span, token::Colon).into()); + } + inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into()); + } + inner.push(self.tokens.clone()); + r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { - delim: token::Bracket, - tts: self.value.to_tokens(cx).into_iter().collect::().into(), + delim: token::Bracket, tts: TokenStream::concat(inner).into() })); r } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index e7bf16eae9e..2c3ad98a6be 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -859,35 +859,34 @@ macro_rules! gate_feature { impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); - let name = &*attr.name().as_str(); + let name = unwrap_or!(attr.name(), return); + for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { - if n == name { + if name == n { if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { gate_feature_fn!(self, has_feature, attr.span, name, desc); } - debug!("check_attribute: {:?} is builtin, {:?}, {:?}", name, ty, gateage); + debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage); return; } } for &(ref n, ref ty) in self.plugin_attributes { - if n == name { + if attr.path == &**n { // Plugins can't gate attributes, so we don't check for it // unlike the code above; we only use this loop to // short-circuit to avoid the checks below - debug!("check_attribute: {:?} is registered by a plugin, {:?}", name, ty); + debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty); return; } } - if name.starts_with("rustc_") { + if name.as_str().starts_with("rustc_") { gate_feature!(self, rustc_attrs, attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); - } else if name.starts_with("derive_") { + } else if name.as_str().starts_with("derive_") { gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); - } else if attr::is_known(attr) { - debug!("check_attribute: {:?} is known", name); - } else { + } else if !attr::is_known(attr) { // Only run the custom attribute lint during regular // feature gate checking. Macro gating runs // before the plugin attributes are registered @@ -898,7 +897,7 @@ impl<'a> Context<'a> { unknown to the compiler and \ may have meaning \ added to it in the future", - name)); + attr.path)); } } } @@ -1097,7 +1096,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { self.context.check_attribute(attr, false); } - if contains_novel_literal(&attr.value) { + let meta = panictry!(attr.parse_meta(&self.context.parse_sess)); + if contains_novel_literal(&meta) { gate_feature_post!(&self, attr_literals, attr.span, "non-string literals in attributes, or string \ literals in top-level positions, are experimental"); @@ -1160,8 +1160,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { `#[repr(simd)]` instead"); } for attr in &i.attrs { - if attr.name() == "repr" { - for item in attr.meta_item_list().unwrap_or(&[]) { + if attr.path == "repr" { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name("simd") { gate_feature_post!(&self, repr_simd, i.span, "SIMD types are experimental \ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index fb4eb19be2b..903dac1f379 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -488,7 +488,8 @@ pub fn noop_fold_attribute(attr: Attribute, fld: &mut T) -> Option(nt: token::Nonterminal, fld: &mut T) token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), token::NtIdent(id) => token::NtIdent(Spanned::{node: fld.fold_ident(id.node), ..id}), - token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), + token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)), token::NtPath(path) => token::NtPath(fld.fold_path(path)), token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), @@ -1369,7 +1370,7 @@ mod tests { matches_codepattern, "matches_codepattern", pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); } // even inside macro defs.... diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 39a9aff48bf..4c9a5d512af 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -65,6 +65,16 @@ macro_rules! panictry { }) } +#[macro_export] +macro_rules! unwrap_or { + ($opt:expr, $default:expr) => { + match $opt { + Some(x) => x, + None => $default, + } + } +} + #[macro_use] pub mod diagnostics { #[macro_use] diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index ded676da3c6..272cff7ad34 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -143,7 +143,8 @@ impl<'a> Parser<'a> { Ok(ast::Attribute { id: attr::mk_attr_id(), style: style, - value: value, + path: ast::Path::from_ident(value.span, ast::Ident::with_empty_ctxt(value.name)), + tokens: value.node.tokens(value.span), is_sugared_doc: false, span: span, }) @@ -221,15 +222,20 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let ident = self.parse_ident()?; - let node = if self.eat(&token::Eq) { + let node = self.parse_meta_item_kind()?; + let hi = self.prev_span.hi; + Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + } + + pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { + Ok(if self.eat(&token::Eq) { ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) } else if self.token == token::OpenDelim(token::Paren) { ast::MetaItemKind::List(self.parse_meta_seq()?) } else { + self.eat(&token::OpenDelim(token::Paren)); ast::MetaItemKind::Word - }; - let hi = self.prev_span.hi; - Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + }) } /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c00d2952b3b..2bdd3938d6b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -374,38 +374,80 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s[1..].chars().all(|c| '0' <= c && c <= '9') } -fn filtered_float_lit(data: Symbol, suffix: Option, sd: &Handler, sp: Span) - -> ast::LitKind { +macro_rules! err { + ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => { + match $opt_diag { + Some(($span, $diag)) => { $($body)* } + None => return None, + } + } +} + +pub fn lit_token(lit: token::Lit, suf: Option, diag: Option<(Span, &Handler)>) + -> (bool /* suffix illegal? */, Option) { + use ast::LitKind; + + match lit { + token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))), + token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str()).0))), + + // There are some valid suffixes for integer and float literals, + // so all the handling is done internally. + token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)), + token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)), + + token::Str_(s) => { + let s = Symbol::intern(&str_lit(&s.as_str())); + (true, Some(LitKind::Str(s, ast::StrStyle::Cooked))) + } + token::StrRaw(s, n) => { + let s = Symbol::intern(&raw_str_lit(&s.as_str())); + (true, Some(LitKind::Str(s, ast::StrStyle::Raw(n)))) + } + token::ByteStr(i) => { + (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str())))) + } + token::ByteStrRaw(i, _) => { + (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))) + } + } +} + +fn filtered_float_lit(data: Symbol, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { debug!("filtered_float_lit: {}, {:?}", data, suffix); let suffix = match suffix { Some(suffix) => suffix, - None => return ast::LitKind::FloatUnsuffixed(data), + None => return Some(ast::LitKind::FloatUnsuffixed(data)), }; - match &*suffix.as_str() { + Some(match &*suffix.as_str() { "f32" => ast::LitKind::Float(data, ast::FloatTy::F32), "f64" => ast::LitKind::Float(data, ast::FloatTy::F64), suf => { - if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { - // if it looks like a width, lets try to be helpful. - sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..])) - .help("valid widths are 32 and 64") - .emit(); - } else { - sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf)) - .help("valid suffixes are `f32` and `f64`") - .emit(); - } + err!(diag, |span, diag| { + if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { + // if it looks like a width, lets try to be helpful. + let msg = format!("invalid width `{}` for float literal", &suf[1..]); + diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit() + } else { + let msg = format!("invalid suffix `{}` for float literal", suf); + diag.struct_span_err(span, &msg) + .help("valid suffixes are `f32` and `f64`") + .emit(); + } + }); ast::LitKind::FloatUnsuffixed(data) } - } + }) } -pub fn float_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { +pub fn float_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::(); - filtered_float_lit(Symbol::intern(&s), suffix, sd, sp) + filtered_float_lit(Symbol::intern(&s), suffix, diag) } /// Parse a string representing a byte literal into its final form. Similar to `char_lit` @@ -500,7 +542,8 @@ pub fn byte_str_lit(lit: &str) -> Rc> { Rc::new(res) } -pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { +pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); @@ -524,13 +567,16 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a // 1f64 and 2f32 etc. are valid float literals. if let Some(suf) = suffix { if looks_like_width_suffix(&['f'], &suf.as_str()) { - match base { - 16 => sd.span_err(sp, "hexadecimal float literal is not supported"), - 8 => sd.span_err(sp, "octal float literal is not supported"), - 2 => sd.span_err(sp, "binary float literal is not supported"), - _ => () + let err = match base { + 16 => Some("hexadecimal float literal is not supported"), + 8 => Some("octal float literal is not supported"), + 2 => Some("binary float literal is not supported"), + _ => None, + }; + if let Some(err) = err { + err!(diag, |span, diag| diag.span_err(span, err)); } - return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp) + return filtered_float_lit(Symbol::intern(&s), Some(suf), diag) } } @@ -539,7 +585,9 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a } if let Some(suf) = suffix { - if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} + if suf.as_str().is_empty() { + err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some")); + } ty = match &*suf.as_str() { "isize" => ast::LitIntType::Signed(ast::IntTy::Is), "i8" => ast::LitIntType::Signed(ast::IntTy::I8), @@ -556,17 +604,20 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a suf => { // i and u look like widths, so lets // give an error message along those lines - if looks_like_width_suffix(&['i', 'u'], suf) { - sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal", - &suf[1..])) - .help("valid widths are 8, 16, 32, 64 and 128") - .emit(); - } else { - sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf)) - .help("the suffix must be one of the integral types \ - (`u32`, `isize`, etc)") - .emit(); - } + err!(diag, |span, diag| { + if looks_like_width_suffix(&['i', 'u'], suf) { + let msg = format!("invalid width `{}` for integer literal", &suf[1..]); + diag.struct_span_err(span, &msg) + .help("valid widths are 8, 16, 32, 64 and 128") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for numeric literal", suf); + diag.struct_span_err(span, &msg) + .help("the suffix must be one of the integral types \ + (`u32`, `isize`, etc)") + .emit(); + } + }); ty } @@ -576,7 +627,7 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \ string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix); - match u128::from_str_radix(s, base) { + Some(match u128::from_str_radix(s, base) { Ok(r) => ast::LitKind::Int(r, ty), Err(_) => { // small bases are lexed as if they were base 10, e.g, the string @@ -588,11 +639,11 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base)); if !already_errored { - sd.span_err(sp, "int literal is too large"); + err!(diag, |span, diag| diag.span_err(span, "int literal is too large")); } ast::LitKind::Int(0, ty) } - } + }) } #[cfg(test)] @@ -957,7 +1008,7 @@ mod tests { let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.name() == "doc") + let docs = item.attrs.iter().filter(|a| a.path == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 9872afd27b7..ed512b89987 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -60,7 +60,6 @@ use util::ThinVec; use std::collections::HashSet; use std::{cmp, mem, slice}; use std::path::{Path, PathBuf}; -use std::rc::Rc; bitflags! { flags Restrictions: u8 { @@ -1643,44 +1642,15 @@ impl<'a> Parser<'a> { _ => { return self.unexpected_last(&self.token); } }, token::Literal(lit, suf) => { - let (suffix_illegal, out) = match lit { - token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)), - token::Char(i) => (true, LitKind::Char(parse::char_lit(&i.as_str()).0)), - - // there are some valid suffixes for integer and - // float literals, so all the handling is done - // internally. - token::Integer(s) => { - let diag = &self.sess.span_diagnostic; - (false, parse::integer_lit(&s.as_str(), suf, diag, self.span)) - } - token::Float(s) => { - let diag = &self.sess.span_diagnostic; - (false, parse::float_lit(&s.as_str(), suf, diag, self.span)) - } - - token::Str_(s) => { - let s = Symbol::intern(&parse::str_lit(&s.as_str())); - (true, LitKind::Str(s, ast::StrStyle::Cooked)) - } - token::StrRaw(s, n) => { - let s = Symbol::intern(&parse::raw_str_lit(&s.as_str())); - (true, LitKind::Str(s, ast::StrStyle::Raw(n))) - } - token::ByteStr(i) => { - (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))) - } - token::ByteStrRaw(i, _) => { - (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))) - } - }; + let diag = Some((self.span, &self.sess.span_diagnostic)); + let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); if suffix_illegal { let sp = self.span; self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf) } - out + result.unwrap() } _ => { return self.unexpected_last(&self.token); } }; @@ -5135,11 +5105,9 @@ impl<'a> Parser<'a> { let attr = ast::Attribute { id: attr::mk_attr_id(), style: ast::AttrStyle::Outer, - value: ast::MetaItem { - name: Symbol::intern("warn_directory_ownership"), - node: ast::MetaItemKind::Word, - span: syntax_pos::DUMMY_SP, - }, + path: ast::Path::from_ident(syntax_pos::DUMMY_SP, + Ident::from_str("warn_directory_ownership")), + tokens: TokenStream::empty(), is_sugared_doc: false, span: syntax_pos::DUMMY_SP, }; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 5b65aac92b8..38377004572 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -17,7 +17,7 @@ pub use self::Token::*; use ast::{self}; use ptr::P; use symbol::keywords; -use tokenstream; +use tokenstream::TokenTree; use std::fmt; use std::rc::Rc; @@ -348,7 +348,7 @@ pub enum Nonterminal { /// Stuff inside brackets for attributes NtMeta(ast::MetaItem), NtPath(ast::Path), - NtTT(tokenstream::TokenTree), + NtTT(TokenTree), // These are not exposed to macros, but are used by quasiquote. NtArm(ast::Arm), NtImplItem(ast::ImplItem), diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 3efadbd00d1..d8af95d8d30 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -28,7 +28,7 @@ use ptr::P; use std_inject; use symbol::{Symbol, keywords}; use syntax_pos::DUMMY_SP; -use tokenstream::{self, TokenTree}; +use tokenstream::{self, TokenStream, TokenTree}; use std::ascii; use std::io::{self, Write, Read}; @@ -329,6 +329,10 @@ pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String { to_string(|s| s.print_tts(tts.iter().cloned().collect())) } +pub fn tokens_to_string(tokens: TokenStream) -> String { + to_string(|s| s.print_tts(tokens)) +} + pub fn stmt_to_string(stmt: &ast::Stmt) -> String { to_string(|s| s.print_stmt(stmt)) } @@ -750,7 +754,21 @@ pub trait PrintState<'a> { ast::AttrStyle::Inner => word(self.writer(), "#![")?, ast::AttrStyle::Outer => word(self.writer(), "#[")?, } - self.print_meta_item(&attr.meta())?; + if let Some(mi) = attr.meta() { + self.print_meta_item(&mi)? + } else { + for (i, segment) in attr.path.segments.iter().enumerate() { + if i > 0 { + word(self.writer(), "::")? + } + if segment.identifier.name != keywords::CrateRoot.name() && + segment.identifier.name != "$crate" { + word(self.writer(), &segment.identifier.name.as_str())?; + } + } + space(self.writer())?; + self.print_tts(attr.tokens.clone())?; + } word(self.writer(), "]") } } @@ -789,6 +807,45 @@ pub trait PrintState<'a> { self.end() } + /// This doesn't deserve to be called "pretty" printing, but it should be + /// meaning-preserving. A quick hack that might help would be to look at the + /// spans embedded in the TTs to decide where to put spaces and newlines. + /// But it'd be better to parse these according to the grammar of the + /// appropriate macro, transcribe back into the grammar we just parsed from, + /// and then pretty-print the resulting AST nodes (so, e.g., we print + /// expression arguments as expressions). It can be done! I think. + fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { + match tt { + TokenTree::Token(_, ref tk) => { + word(self.writer(), &token_to_string(tk))?; + match *tk { + parse::token::DocComment(..) => { + hardbreak(self.writer()) + } + _ => Ok(()) + } + } + TokenTree::Delimited(_, ref delimed) => { + word(self.writer(), &token_to_string(&delimed.open_token()))?; + space(self.writer())?; + self.print_tts(delimed.stream())?; + space(self.writer())?; + word(self.writer(), &token_to_string(&delimed.close_token())) + }, + } + } + + fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { + self.ibox(0)?; + for (i, tt) in tts.into_trees().enumerate() { + if i != 0 { + space(self.writer())?; + } + self.print_tt(tt)?; + } + self.end() + } + fn space_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { space(self.writer())?; } Ok(()) @@ -1458,45 +1515,6 @@ impl<'a> State<'a> { } } - /// This doesn't deserve to be called "pretty" printing, but it should be - /// meaning-preserving. A quick hack that might help would be to look at the - /// spans embedded in the TTs to decide where to put spaces and newlines. - /// But it'd be better to parse these according to the grammar of the - /// appropriate macro, transcribe back into the grammar we just parsed from, - /// and then pretty-print the resulting AST nodes (so, e.g., we print - /// expression arguments as expressions). It can be done! I think. - pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { - match tt { - TokenTree::Token(_, ref tk) => { - word(&mut self.s, &token_to_string(tk))?; - match *tk { - parse::token::DocComment(..) => { - hardbreak(&mut self.s) - } - _ => Ok(()) - } - } - TokenTree::Delimited(_, ref delimed) => { - word(&mut self.s, &token_to_string(&delimed.open_token()))?; - space(&mut self.s)?; - self.print_tts(delimed.stream())?; - space(&mut self.s)?; - word(&mut self.s, &token_to_string(&delimed.close_token())) - }, - } - } - - pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { - self.ibox(0)?; - for (i, tt) in tts.into_trees().enumerate() { - if i != 0 { - space(&mut self.s)?; - } - self.print_tt(tt)?; - } - self.end() - } - pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { self.head("")?; let generics = ast::Generics::default(); diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 4a2dfaf6124..94954e2c429 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -15,6 +15,7 @@ use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; use parse::ParseSess; use ptr::P; +use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to codemap's is_internal check. @@ -70,11 +71,8 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, - value: ast::MetaItem { - name: Symbol::intern("prelude_import"), - node: ast::MetaItemKind::Word, - span: span, - }, + path: ast::Path::from_ident(span, ast::Ident::from_str("prelude_import")), + tokens: TokenStream::empty(), id: attr::mk_attr_id(), is_sugared_doc: false, span: span, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 2da442a1a53..35e4d9eb68a 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -360,7 +360,7 @@ impl PartialEq for ThinTokenStream { impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(&pprust::tts_to_string(&self.trees().collect::>())) + f.write_str(&pprust::tokens_to_string(self.clone())) } } diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index a7e2d82bb97..b01ef65e5fe 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -23,9 +23,11 @@ struct MarkAttrs<'a>(&'a [ast::Name]); impl<'a> Visitor<'a> for MarkAttrs<'a> { fn visit_attribute(&mut self, attr: &Attribute) { - if self.0.contains(&attr.name()) { - mark_used(attr); - mark_known(attr); + if let Some(name) = attr.name() { + if self.0.contains(&name) { + mark_used(attr); + mark_known(attr); + } } } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index fe492bd7fc8..48e7ff0d243 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -439,7 +439,7 @@ impl<'a> TraitDef<'a> { attrs.extend(item.attrs .iter() .filter(|a| { - match &*a.name().as_str() { + a.name().is_some() && match &*a.name().unwrap().as_str() { "allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true, _ => false, } diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 5adaf470f23..2d815b3f1bb 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -248,7 +248,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { fn visit_item(&mut self, item: &'a ast::Item) { if let ast::ItemKind::MacroDef(..) = item.node { if self.is_proc_macro_crate && - item.attrs.iter().any(|attr| attr.name() == "macro_export") { + item.attrs.iter().any(|attr| attr.path == "macro_export") { let msg = "cannot export macro_rules! macros from a `proc-macro` crate type currently"; self.handler.span_err(item.span, msg); @@ -270,12 +270,12 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { for attr in &item.attrs { if is_proc_macro_attr(&attr) { if let Some(prev_attr) = found_attr { - let msg = if attr.name() == prev_attr.name() { + let msg = if attr.path == prev_attr.path { format!("Only one `#[{}]` attribute is allowed on any given function", - attr.name()) + attr.path) } else { format!("`#[{}]` and `#[{}]` attributes cannot both be applied \ - to the same function", attr.name(), prev_attr.name()) + to the same function", attr.path, prev_attr.path) }; self.handler.struct_span_err(attr.span(), &msg) @@ -299,7 +299,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { if !is_fn { let msg = format!("the `#[{}]` attribute may only be used on bare functions", - attr.name()); + attr.path); self.handler.span_err(attr.span(), &msg); return; @@ -311,7 +311,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { if !self.is_proc_macro_crate { let msg = format!("the `#[{}]` attribute is only usable with crates of the \ - `proc-macro` crate type", attr.name()); + `proc-macro` crate type", attr.path); self.handler.span_err(attr.span(), &msg); return; -- cgit 1.4.1-3-g733a5 From 839c2860ccb7cd3d381abf2838dfba566f52618e Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 8 Mar 2017 23:13:35 +0000 Subject: Liberalize attributes. --- src/librustc_resolve/lib.rs | 2 + src/librustc_resolve/macros.rs | 54 +++++++++----- src/libsyntax/attr.rs | 46 +++++++++++- src/libsyntax/config.rs | 72 +++++++++---------- src/libsyntax/ext/derive.rs | 45 ++++++------ src/libsyntax/ext/expand.rs | 94 +++++++++++++------------ src/libsyntax/ext/tt/macro_rules.rs | 3 +- src/libsyntax/feature_gate.rs | 4 ++ src/libsyntax/parse/attr.rs | 32 +++++++-- src/libsyntax/parse/parser.rs | 11 +++ src/test/compile-fail/macro-attribute.rs | 12 ++++ src/test/compile-fail/malformed-derive-entry.rs | 4 +- src/test/compile-fail/suffixed-literal-meta.rs | 25 +++++++ src/test/parse-fail/attr-bad-meta.rs | 6 +- src/test/parse-fail/macro-attribute.rs | 14 ---- src/test/parse-fail/suffixed-literal-meta.rs | 25 ------- src/test/ui/span/E0536.stderr | 2 +- src/test/ui/span/E0537.stderr | 2 +- 18 files changed, 269 insertions(+), 184 deletions(-) create mode 100644 src/test/compile-fail/macro-attribute.rs create mode 100644 src/test/compile-fail/suffixed-literal-meta.rs delete mode 100644 src/test/parse-fail/macro-attribute.rs delete mode 100644 src/test/parse-fail/suffixed-literal-meta.rs (limited to 'src/libsyntax/parse') diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index c3e471650a3..bf7115abd4e 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1165,6 +1165,7 @@ pub struct Resolver<'a> { privacy_errors: Vec>, ambiguity_errors: Vec>, + gated_errors: FxHashSet, disallowed_shadowing: Vec<&'a LegacyBinding<'a>>, arenas: &'a ResolverArenas<'a>, @@ -1355,6 +1356,7 @@ impl<'a> Resolver<'a> { privacy_errors: Vec::new(), ambiguity_errors: Vec::new(), + gated_errors: FxHashSet(), disallowed_shadowing: Vec::new(), arenas: arenas, diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 9e1dcd1bc35..67ce24efb3b 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -28,9 +28,11 @@ use syntax::ext::placeholders::placeholder; use syntax::ext::tt::macro_rules; use syntax::feature_gate::{self, emit_feature_err, GateIssue}; use syntax::fold::{self, Folder}; +use syntax::parse::parser::PathStyle; +use syntax::parse::token::{self, Token}; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; -use syntax::tokenstream::TokenStream; +use syntax::tokenstream::{TokenStream, TokenTree, Delimited}; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{Span, DUMMY_SP}; @@ -200,16 +202,22 @@ impl<'a> base::Resolver for Resolver<'a> { let name = unwrap_or!(attrs[i].name(), continue); if name == "derive" { - let mut traits = match attrs[i].meta_item_list() { - Some(traits) => traits, - _ => continue, + let result = attrs[i].parse_list(&self.session.parse_sess, + |parser| parser.parse_path(PathStyle::Mod)); + let mut traits = match result { + Ok(traits) => traits, + Err(mut e) => { + e.cancel(); + continue + } }; for j in 0..traits.len() { - let legacy_name = Symbol::intern(&match traits[j].word() { - Some(..) => format!("derive_{}", traits[j].name().unwrap()), - None => continue, - }); + if traits[j].segments.len() > 1 { + continue + } + let trait_name = traits[j].segments[0].identifier.name; + let legacy_name = Symbol::intern(&format!("derive_{}", trait_name)); if !self.builtin_macros.contains_key(&legacy_name) { continue } @@ -218,7 +226,23 @@ impl<'a> base::Resolver for Resolver<'a> { if traits.is_empty() { attrs.remove(i); } else { - attrs[i].tokens = ast::MetaItemKind::List(traits).tokens(attrs[i].span); + let mut tokens = Vec::new(); + for (i, path) in traits.iter().enumerate() { + if i > 0 { + tokens.push(TokenTree::Token(attrs[i].span, Token::Comma).into()); + } + for (j, segment) in path.segments.iter().enumerate() { + if j > 0 { + tokens.push(TokenTree::Token(path.span, Token::ModSep).into()); + } + let tok = Token::Ident(segment.identifier); + tokens.push(TokenTree::Token(path.span, tok).into()); + } + } + attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited { + delim: token::Paren, + tts: TokenStream::concat(tokens).into(), + }).into(); } return Some(ast::Attribute { path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)), @@ -262,9 +286,8 @@ impl<'a> Resolver<'a> { InvocationKind::Bang { ref mac, .. } => { return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force); } - InvocationKind::Derive { name, span, .. } => { - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); - return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force); + InvocationKind::Derive { ref path, .. } => { + return self.resolve_macro_to_def(scope, path, MacroKind::Derive, force); } }; @@ -282,9 +305,8 @@ impl<'a> Resolver<'a> { 1 => path.segments[0].identifier.name, _ => return Err(determinacy), }; - for &(name, span) in traits { - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); - match self.resolve_macro(scope, &path, MacroKind::Derive, force) { + for path in traits { + match self.resolve_macro(scope, path, MacroKind::Derive, force) { Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext { if inert_attrs.contains(&attr_name) { // FIXME(jseyfried) Avoid `mem::replace` here. @@ -327,7 +349,7 @@ impl<'a> Resolver<'a> { self.current_module = invocation.module.get(); if path.len() > 1 { - if !self.use_extern_macros { + if !self.use_extern_macros && self.gated_errors.insert(span) { let msg = "non-ident macro paths are experimental"; let feature = "use_extern_macros"; emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg); diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 68f1f690a62..2f1efd6ad00 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -17,7 +17,7 @@ pub use self::IntType::*; use ast; use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; -use ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind}; +use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind}; use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; use syntax_pos::{Span, BytePos, DUMMY_SP}; use errors::Handler; @@ -299,6 +299,37 @@ impl Attribute { }) } + pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, + { + let mut parser = Parser::new(sess, self.tokens.clone(), None, false); + let result = f(&mut parser)?; + if parser.token != token::Eof { + parser.unexpected()?; + } + Ok(result) + } + + pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, + { + if self.tokens.is_empty() { + return Ok(Vec::new()); + } + self.parse(sess, |parser| { + parser.expect(&token::OpenDelim(token::Paren))?; + let mut list = Vec::new(); + while !parser.eat(&token::CloseDelim(token::Paren)) { + list.push(f(parser)?); + if !parser.eat(&token::Comma) { + parser.expect(&token::CloseDelim(token::Paren))?; + break + } + } + Ok(list) + }) + } + pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> { if self.path.segments.len() > 1 { sess.span_diagnostic.span_err(self.path.span, "expected ident, found path"); @@ -306,7 +337,7 @@ impl Attribute { Ok(MetaItem { name: self.path.segments.last().unwrap().identifier.name, - node: Parser::new(sess, self.tokens.clone(), None, false).parse_meta_item_kind()?, + node: self.parse(sess, |parser| parser.parse_meta_item_kind())?, span: self.span, }) } @@ -985,6 +1016,10 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { + token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), + _ => None, + }, _ => return None, }; let node = match MetaItemKind::from_tokens(tokens) { @@ -1151,6 +1186,13 @@ impl LitKind { match token { Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)), Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)), + Token::Interpolated(ref nt) => match **nt { + token::NtExpr(ref v) => match v.node { + ExprKind::Lit(ref lit) => Some(lit.node.clone()), + _ => None, + }, + _ => None, + }, Token::Literal(lit, suf) => { let (suffix_illegal, result) = parse::lit_token(lit, suf, None); if suffix_illegal && suf.is_some() { diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 2591a576669..ede8a33df65 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -13,9 +13,10 @@ use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features use {fold, attr}; use ast; use codemap::Spanned; -use parse::ParseSess; -use ptr::P; +use parse::{token, ParseSess}; +use syntax_pos::Span; +use ptr::P; use util::small_vector::SmallVector; /// A folder that strips out items that do not belong in the current configuration. @@ -84,44 +85,33 @@ impl<'a> StripUnconfigured<'a> { return Some(attr); } - let attr_list = match attr.meta_item_list() { - Some(attr_list) => attr_list, - None => { - let msg = "expected `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); - return None; - } - }; - - let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) { - (2, Some(cfg), Some(mi)) => (cfg, mi), - _ => { - let msg = "expected `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); + let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| { + parser.expect(&token::OpenDelim(token::Paren))?; + let cfg = parser.parse_meta_item()?; + parser.expect(&token::Comma)?; + let lo = parser.span.lo; + let (path, tokens) = parser.parse_path_and_tokens()?; + parser.expect(&token::CloseDelim(token::Paren))?; + Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span })) + }) { + Ok(result) => result, + Err(mut e) => { + e.emit(); return None; } }; - use attr::cfg_matches; - match (cfg.meta_item(), mi.meta_item()) { - (Some(cfg), Some(mi)) => - if cfg_matches(&cfg, self.sess, self.features) { - self.process_cfg_attr(ast::Attribute { - id: attr::mk_attr_id(), - style: attr.style, - path: ast::Path::from_ident(mi.span, ast::Ident::with_empty_ctxt(mi.name)), - tokens: mi.node.tokens(mi.span), - is_sugared_doc: false, - span: mi.span, - }) - } else { - None - }, - _ => { - let msg = "unexpected literal(s) in `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); - None - } + if attr::cfg_matches(&cfg, self.sess, self.features) { + self.process_cfg_attr(ast::Attribute { + id: attr::mk_attr_id(), + style: attr.style, + path: path, + tokens: tokens, + is_sugared_doc: false, + span: span, + }) + } else { + None } } @@ -133,10 +123,12 @@ impl<'a> StripUnconfigured<'a> { return false; } - let mis = attr.meta_item_list(); - let mis = match mis { - Some(ref mis) if is_cfg(&attr) => mis, - _ => return true + let mis = if !is_cfg(&attr) { + return true; + } else if let Some(mis) = attr.meta_item_list() { + mis + } else { + return true; }; if mis.len() != 1 { diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 5b253635f25..1569d9f540b 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -12,36 +12,31 @@ use attr::HasAttrs; use {ast, codemap}; use ext::base::ExtCtxt; use ext::build::AstBuilder; +use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; -pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec<(Symbol, Span)> { +pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec { let mut result = Vec::new(); attrs.retain(|attr| { if attr.path != "derive" { return true; } - if attr.value_str().is_some() { - cx.span_err(attr.span, "unexpected value in `derive`"); - return false; - } - - let traits = attr.meta_item_list().unwrap_or_else(Vec::new); - if traits.is_empty() { - cx.span_warn(attr.span, "empty trait list in `derive`"); - return false; - } - - for titem in traits { - if titem.word().is_none() { - cx.span_err(titem.span, "malformed `derive` entry"); - return false; + match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) { + Ok(ref traits) if traits.is_empty() => { + cx.span_warn(attr.span, "empty trait list in `derive`"); + false + } + Ok(traits) => { + result.extend(traits); + true + } + Err(mut e) => { + e.emit(); + false } - result.push((titem.name().unwrap(), titem.span)); } - - true }); result } @@ -60,21 +55,21 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { } } -pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[(Symbol, Span)], item: T) -> T { +pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T { let span = match traits.get(0) { - Some(&(_, span)) => span, + Some(path) => path.span, None => return item, }; item.map_attrs(|mut attrs| { - if traits.iter().any(|&(name, _)| name == "PartialEq") && - traits.iter().any(|&(name, _)| name == "Eq") { + if traits.iter().any(|path| *path == "PartialEq") && + traits.iter().any(|path| *path == "Eq") { let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } - if traits.iter().any(|&(name, _)| name == "Copy") && - traits.iter().any(|&(name, _)| name == "Clone") { + if traits.iter().any(|path| *path == "Copy") && + traits.iter().any(|path| *path == "Clone") { let span = allow_unstable(cx, span, "derive(Copy, Clone)"); let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index c1095d34456..c1816582bc6 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Block, Ident, PatKind}; -use ast::{Name, MacStmtStyle, StmtKind, ItemKind}; +use ast::{self, Block, Ident, PatKind, Path}; +use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use config::{is_test_or_bench, StripUnconfigured}; @@ -27,7 +27,7 @@ use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{self, Span, ExpnId}; +use syntax_pos::{Span, ExpnId, DUMMY_SP}; use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -165,12 +165,11 @@ pub enum InvocationKind { }, Attr { attr: Option, - traits: Vec<(Symbol, Span)>, + traits: Vec, item: Annotatable, }, Derive { - name: Symbol, - span: Span, + path: Path, item: Annotatable, }, } @@ -180,8 +179,8 @@ impl Invocation { match self.kind { InvocationKind::Bang { span, .. } => span, InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span, - InvocationKind::Attr { attr: None, .. } => syntax_pos::DUMMY_SP, - InvocationKind::Derive { span, .. } => span, + InvocationKind::Attr { attr: None, .. } => DUMMY_SP, + InvocationKind::Derive { ref path, .. } => path.span, } } } @@ -277,12 +276,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { add_derived_markers(&mut self.cx, &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); - for &(name, span) in &traits { + for path in &traits { let mark = Mark::fresh(); derives.push(mark); - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); let item = match self.cx.resolver.resolve_macro( - Mark::root(), &path, MacroKind::Derive, false) { + Mark::root(), path, MacroKind::Derive, false) { Ok(ext) => match *ext { SyntaxExtension::BuiltinDerive(..) => item_with_markers.clone(), _ => item.clone(), @@ -290,7 +288,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { _ => item.clone(), }; invocations.push(Invocation { - kind: InvocationKind::Derive { name: name, span: span, item: item }, + kind: InvocationKind::Derive { path: path.clone(), item: item }, expansion_kind: invoc.expansion_kind, expansion_data: ExpansionData { mark: mark, @@ -380,11 +378,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - let name = attr.path.segments[0].identifier.name; self.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - format: MacroAttribute(name), + format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), span: Some(attr.span), allow_internal_unstable: false, } @@ -419,14 +416,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); - self.parse_expansion(tok_result, kind, name, span) + self.parse_expansion(tok_result, kind, &attr.path, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { - self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name)); + self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path)); kind.dummy(attr.span) } _ => { - let msg = &format!("macro `{}` may not be used in attributes", name); + let msg = &format!("macro `{}` may not be used in attributes", attr.path); self.cx.span_err(attr.span, &msg); kind.dummy(attr.span) } @@ -442,7 +439,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; let path = &mac.node.path; - let extname = path.segments.last().unwrap().identifier.name; let ident = ident.unwrap_or(keywords::Invalid.ident()); let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None }); @@ -450,7 +446,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { let msg = - format!("macro {}! expects no ident argument, given '{}'", extname, ident); + format!("macro {}! expects no ident argument, given '{}'", path, ident); self.cx.span_err(path.span, &msg); return kind.dummy(span); } @@ -458,7 +454,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), span: exp_span, allow_internal_unstable: allow_internal_unstable, }, @@ -470,14 +466,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { IdentTT(ref expander, tt_span, allow_internal_unstable) => { if ident.name == keywords::Invalid.name() { self.cx.span_err(path.span, - &format!("macro {}! expects an ident argument", extname)); + &format!("macro {}! expects an ident argument", path)); return kind.dummy(span); }; self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), span: tt_span, allow_internal_unstable: allow_internal_unstable, } @@ -489,19 +485,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> { MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => { self.cx.span_err(path.span, - &format!("`{}` can only be used in attributes", extname)); + &format!("`{}` can only be used in attributes", path)); return kind.dummy(span); } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { - self.cx.span_err(path.span, &format!("`{}` is a derive mode", extname)); + self.cx.span_err(path.span, &format!("`{}` is a derive mode", path)); return kind.dummy(span); } SyntaxExtension::ProcMacro(ref expandfun) => { if ident.name != keywords::Invalid.name() { let msg = - format!("macro {}! expects no ident argument, given '{}'", extname, ident); + format!("macro {}! expects no ident argument, given '{}'", path, ident); self.cx.span_err(path.span, &msg); return kind.dummy(span); } @@ -509,7 +505,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), // FIXME procedural macros do not have proper span info // yet, when they do, we should use it here. span: None, @@ -519,7 +515,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }); let tok_result = expandfun.expand(self.cx, span, marked_tts); - Some(self.parse_expansion(tok_result, kind, extname, span)) + Some(self.parse_expansion(tok_result, kind, path, span)) } }; @@ -541,19 +537,24 @@ impl<'a, 'b> MacroExpander<'a, 'b> { /// Expand a derive invocation. Returns the result of expansion. fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { let Invocation { expansion_kind: kind, .. } = invoc; - let (name, span, item) = match invoc.kind { - InvocationKind::Derive { name, span, item } => (name, span, item), + let (path, item) = match invoc.kind { + InvocationKind::Derive { path, item } => (path, item), _ => unreachable!(), }; - let mitem = ast::MetaItem { name: name, span: span, node: ast::MetaItemKind::Word }; - let pretty_name = Symbol::intern(&format!("derive({})", name)); + let pretty_name = Symbol::intern(&format!("derive({})", path)); + let span = path.span; + let attr = ast::Attribute { + path: path, tokens: TokenStream::empty(), span: span, + // irrelevant: + id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, + }; self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroAttribute(pretty_name), - span: Some(span), + span: None, allow_internal_unstable: false, } }); @@ -571,7 +572,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }), ..span }; - return kind.expect_from_annotatables(ext.expand(self.cx, span, &mitem, item)); + let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this + name: keywords::Invalid.name(), + span: DUMMY_SP, + node: ast::MetaItemKind::Word, + }; + return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)); } SyntaxExtension::BuiltinDerive(func) => { let span = Span { @@ -586,20 +592,18 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ..span }; let mut items = Vec::new(); - func(self.cx, span, &mitem, &item, &mut |a| { - items.push(a) - }); + func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); return kind.expect_from_annotatables(items); } _ => { - let msg = &format!("macro `{}` may not be used for derive attributes", name); + let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); self.cx.span_err(span, &msg); kind.dummy(span) } } } - fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) + fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, path: &Path, span: Span) -> Expansion { let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::>()); let expansion = match parser.parse_expansion(kind, false) { @@ -609,7 +613,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } }; - parser.ensure_complete_parse(name, kind.name(), span); + parser.ensure_complete_parse(path, kind.name(), span); // FIXME better span info expansion.fold_with(&mut ChangeSpan { span: span }) } @@ -658,14 +662,14 @@ impl<'a> Parser<'a> { }) } - pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) { + pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); let mut err = self.diagnostic().struct_span_err(self.span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", - macro_name, kind_name); + macro_path, kind_name); err.span_note(span, &msg).emit(); } } @@ -708,20 +712,20 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { fn collect_attr(&mut self, attr: Option, - traits: Vec<(Symbol, Span)>, + traits: Vec, item: Annotatable, kind: ExpansionKind) -> Expansion { if !traits.is_empty() && (kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) { - self.cx.span_err(traits[0].1, "`derive` can be only be applied to items"); + self.cx.span_err(traits[0].span, "`derive` can be only be applied to items"); return kind.expect_from_annotatables(::std::iter::once(item)); } self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item }) } // If `item` is an attr invocation, remove and return the macro attribute. - fn classify_item(&mut self, mut item: T) -> (Option, Vec<(Symbol, Span)>, T) + fn classify_item(&mut self, mut item: T) -> (Option, Vec, T) where T: HasAttrs, { let (mut attr, mut traits) = (None, Vec::new()); @@ -900,7 +904,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`). // In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`). // Thus, if `inner` is the dummy span, we know the module is inline. - let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP; + let inline_module = item.span.contains(inner) || inner == DUMMY_SP; if inline_module { if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 7aa1230f9ae..021c5398a42 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -51,7 +51,8 @@ impl<'a> ParserAnyMacro<'a> { } // Make sure we don't have any tokens left to parse so we don't silently drop anything. - parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span); + let path = ast::Path::from_ident(site_span, macro_ident); + parser.ensure_complete_parse(&path, kind.name(), site_span); expansion } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 2c3ad98a6be..05e7b0f9aa4 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1096,6 +1096,10 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { self.context.check_attribute(attr, false); } + if self.context.features.proc_macro && attr::is_known(attr) { + return + } + let meta = panictry!(attr.parse_meta(&self.context.parse_sess)); if contains_novel_literal(&meta) { gate_feature_post!(&self, attr_literals, attr.span, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 272cff7ad34..53106214fa3 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -14,8 +14,9 @@ use syntax_pos::{mk_sp, Span}; use codemap::spanned; use parse::common::SeqSep; use parse::PResult; -use parse::token; -use parse::parser::{Parser, TokenType}; +use parse::token::{self, Nonterminal}; +use parse::parser::{Parser, TokenType, PathStyle}; +use tokenstream::TokenStream; #[derive(PartialEq, Eq, Debug)] enum InnerAttributeParsePolicy<'a> { @@ -91,7 +92,7 @@ impl<'a> Parser<'a> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, value, mut style) = match self.token { + let (span, path, tokens, mut style) = match self.token { token::Pound => { let lo = self.span.lo; self.bump(); @@ -119,11 +120,11 @@ impl<'a> Parser<'a> { }; self.expect(&token::OpenDelim(token::Bracket))?; - let meta_item = self.parse_meta_item()?; + let (path, tokens) = self.parse_path_and_tokens()?; self.expect(&token::CloseDelim(token::Bracket))?; let hi = self.prev_span.hi; - (mk_sp(lo, hi), meta_item, style) + (mk_sp(lo, hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); @@ -143,13 +144,30 @@ impl<'a> Parser<'a> { Ok(ast::Attribute { id: attr::mk_attr_id(), style: style, - path: ast::Path::from_ident(value.span, ast::Ident::with_empty_ctxt(value.name)), - tokens: value.node.tokens(value.span), + path: path, + tokens: tokens, is_sugared_doc: false, span: span, }) } + pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { + let meta = match self.token { + token::Interpolated(ref nt) => match **nt { + Nonterminal::NtMeta(ref meta) => Some(meta.clone()), + _ => None, + }, + _ => None, + }; + Ok(if let Some(meta) = meta { + self.bump(); + (ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)), + meta.node.tokens(meta.span)) + } else { + (self.parse_path(PathStyle::Mod)?, self.parse_tokens()) + }) + } + /// Parse attributes that appear after the opening of an item. These should /// be preceded by an exclamation mark, but we accept and warn about one /// terminated by a semicolon. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ed512b89987..308876fed56 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2644,6 +2644,17 @@ impl<'a> Parser<'a> { Ok(tts) } + pub fn parse_tokens(&mut self) -> TokenStream { + let mut result = Vec::new(); + loop { + match self.token { + token::Eof | token::CloseDelim(..) => break, + _ => result.push(self.parse_token_tree().into()), + } + } + TokenStream::concat(result) + } + /// Parse a prefix-unary-operator expr pub fn parse_prefix_expr(&mut self, already_parsed_attrs: Option>) diff --git a/src/test/compile-fail/macro-attribute.rs b/src/test/compile-fail/macro-attribute.rs new file mode 100644 index 00000000000..52f867fe913 --- /dev/null +++ b/src/test/compile-fail/macro-attribute.rs @@ -0,0 +1,12 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[doc = $not_there] //~ error: unexpected token: `$` +fn main() { } diff --git a/src/test/compile-fail/malformed-derive-entry.rs b/src/test/compile-fail/malformed-derive-entry.rs index 62dbc21495a..ac000628f2b 100644 --- a/src/test/compile-fail/malformed-derive-entry.rs +++ b/src/test/compile-fail/malformed-derive-entry.rs @@ -9,11 +9,11 @@ // except according to those terms. #[derive(Copy(Bad))] -//~^ ERROR malformed `derive` entry +//~^ ERROR expected one of `)`, `,`, or `::`, found `(` struct Test1; #[derive(Copy="bad")] -//~^ ERROR malformed `derive` entry +//~^ ERROR expected one of `)`, `,`, or `::`, found `=` struct Test2; #[derive()] diff --git a/src/test/compile-fail/suffixed-literal-meta.rs b/src/test/compile-fail/suffixed-literal-meta.rs new file mode 100644 index 00000000000..bf55b7bdcb1 --- /dev/null +++ b/src/test/compile-fail/suffixed-literal-meta.rs @@ -0,0 +1,25 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(attr_literals)] + +#[path = 1usize] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u8] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u16] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u64] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1isize] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i8] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i16] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i64] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes +fn main() { } diff --git a/src/test/parse-fail/attr-bad-meta.rs b/src/test/parse-fail/attr-bad-meta.rs index 092adbf29e3..d57a813311b 100644 --- a/src/test/parse-fail/attr-bad-meta.rs +++ b/src/test/parse-fail/attr-bad-meta.rs @@ -8,10 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - -// error-pattern:expected one of `=` or `]` - // asterisk is bogus -#[attr*] +#[path*] //~ ERROR expected one of `(` or `=` mod m {} diff --git a/src/test/parse-fail/macro-attribute.rs b/src/test/parse-fail/macro-attribute.rs deleted file mode 100644 index 18add7d011c..00000000000 --- a/src/test/parse-fail/macro-attribute.rs +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z parse-only - -#[doc = $not_there] //~ error: unexpected token: `$` -fn main() { } diff --git a/src/test/parse-fail/suffixed-literal-meta.rs b/src/test/parse-fail/suffixed-literal-meta.rs deleted file mode 100644 index 0e2840c69d3..00000000000 --- a/src/test/parse-fail/suffixed-literal-meta.rs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z parse-only - -#[foo = 1usize] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u8] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u16] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u64] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1isize] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i8] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i16] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i64] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes -fn main() { } diff --git a/src/test/ui/span/E0536.stderr b/src/test/ui/span/E0536.stderr index c33b89953e2..b2da0c6a296 100644 --- a/src/test/ui/span/E0536.stderr +++ b/src/test/ui/span/E0536.stderr @@ -2,7 +2,7 @@ error[E0536]: expected 1 cfg-pattern --> $DIR/E0536.rs:11:7 | 11 | #[cfg(not())] //~ ERROR E0536 - | ^^^^^ + | ^^^ error: aborting due to previous error diff --git a/src/test/ui/span/E0537.stderr b/src/test/ui/span/E0537.stderr index 9d66ddbaae3..29873943f44 100644 --- a/src/test/ui/span/E0537.stderr +++ b/src/test/ui/span/E0537.stderr @@ -2,7 +2,7 @@ error[E0537]: invalid predicate `unknown` --> $DIR/E0537.rs:11:7 | 11 | #[cfg(unknown())] //~ ERROR E0537 - | ^^^^^^^^^ + | ^^^^^^^ error: aborting due to previous error -- cgit 1.4.1-3-g733a5 From e3b8550a601ca920284f91ceff30a29340832fe7 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Mon, 13 Mar 2017 19:07:47 -0700 Subject: Point out correct turbofish usage on `Foo>` Whenever we parse a chain of binary operations, as long as the first operation is `<` and the subsequent operations are either `>` or `<`, present the following diagnostic help: use `::<...>` instead of `<...>` if you meant to specify type arguments This will lead to spurious recommendations on situations like `2 < 3 < 4` but should be clear from context that the help doesn't apply in that case. --- src/libsyntax/parse/parser.rs | 5 ++++- src/test/ui/did_you_mean/issue-40396.rs | 23 +++++++++++++++++++ src/test/ui/did_you_mean/issue-40396.stderr | 34 +++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 src/test/ui/did_you_mean/issue-40396.rs create mode 100644 src/test/ui/did_you_mean/issue-40396.stderr (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8f66c1a2b8c..208db839144 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2919,7 +2919,10 @@ impl<'a> Parser<'a> { let op_span = mk_sp(op.span.lo, self.span.hi); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); - if op.node == BinOpKind::Lt && *outer_op == AssocOp::Greater { + if op.node == BinOpKind::Lt && + *outer_op == AssocOp::Less || // Include `<` to provide this recommendation + *outer_op == AssocOp::Greater // even in a case like the following: + { // Foo>> err.help( "use `::<...>` instead of `<...>` if you meant to specify type arguments"); } diff --git a/src/test/ui/did_you_mean/issue-40396.rs b/src/test/ui/did_you_mean/issue-40396.rs new file mode 100644 index 00000000000..1eae180976a --- /dev/null +++ b/src/test/ui/did_you_mean/issue-40396.rs @@ -0,0 +1,23 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo() { + println!("{:?}", (0..13).collect>()); +} + +fn bar() { + println!("{:?}", Vec::new()); +} + +fn qux() { + println!("{:?}", (0..13).collect()); +} + +fn main() {} diff --git a/src/test/ui/did_you_mean/issue-40396.stderr b/src/test/ui/did_you_mean/issue-40396.stderr new file mode 100644 index 00000000000..1a0c74dc01a --- /dev/null +++ b/src/test/ui/did_you_mean/issue-40396.stderr @@ -0,0 +1,34 @@ +error: chained comparison operators require parentheses + --> $DIR/issue-40396.rs:12:37 + | +12 | println!("{:?}", (0..13).collect>()); + | ^^^^^^^^ + | + = help: use `::<...>` instead of `<...>` if you meant to specify type arguments + +error: chained comparison operators require parentheses + --> $DIR/issue-40396.rs:16:25 + | +16 | println!("{:?}", Vec::new()); + | ^^^^^^^ + | + = help: use `::<...>` instead of `<...>` if you meant to specify type arguments + +error: chained comparison operators require parentheses + --> $DIR/issue-40396.rs:20:37 + | +20 | println!("{:?}", (0..13).collect()); + | ^^^^^^^^ + | + = help: use `::<...>` instead of `<...>` if you meant to specify type arguments + +error: chained comparison operators require parentheses + --> $DIR/issue-40396.rs:20:41 + | +20 | println!("{:?}", (0..13).collect()); + | ^^^^^^ + | + = help: use `::<...>` instead of `<...>` if you meant to specify type arguments + +error: aborting due to 4 previous errors + -- cgit 1.4.1-3-g733a5 From 8eaac0843eb206c37b52db9d96094503487cd076 Mon Sep 17 00:00:00 2001 From: topecongiro Date: Fri, 17 Mar 2017 09:03:52 +0900 Subject: Parse 0e+10 as a valid floating-point literal Fixes issue #40408. --- src/libsyntax/parse/lexer/mod.rs | 2 +- src/test/run-pass/issue-40408.rs | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 src/test/run-pass/issue-40408.rs (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index de8a87e3a2b..d48cf6911ed 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -725,7 +725,7 @@ impl<'a> StringReader<'a> { base = 16; num_digits = self.scan_digits(16, 16); } - '0'...'9' | '_' | '.' => { + '0'...'9' | '_' | '.' | 'e' | 'E' => { num_digits = self.scan_digits(10, 10) + 1; } _ => { diff --git a/src/test/run-pass/issue-40408.rs b/src/test/run-pass/issue-40408.rs new file mode 100644 index 00000000000..a73dc1966b4 --- /dev/null +++ b/src/test/run-pass/issue-40408.rs @@ -0,0 +1,16 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + println!("{}", 0E+10); + println!("{}", 0e+10); + println!("{}", 00e+10); + println!("{}", 00E+10); +} -- cgit 1.4.1-3-g733a5 From b5e889791a5ec8cb06224cf07273be8c84192698 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Fri, 17 Mar 2017 00:47:32 +0300 Subject: Refactor parsing of trait object types --- src/libsyntax/ast.rs | 12 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/quote.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 2 +- src/libsyntax/parse/parser.rs | 404 +++++++++------------ src/libsyntax/parse/token.rs | 44 ++- src/test/compile-fail/E0178.rs | 3 - src/test/compile-fail/issue-34334.rs | 2 +- .../privacy/restricted/tuple-struct-fields/test.rs | 4 +- .../restricted/tuple-struct-fields/test2.rs | 4 +- .../restricted/tuple-struct-fields/test3.rs | 4 +- .../compile-fail/trait-object-macro-matcher.rs | 19 + ...t-object-reference-without-parens-suggestion.rs | 3 +- src/test/parse-fail/bounds-obj-parens.rs | 2 +- src/test/parse-fail/issue-17904.rs | 2 +- src/test/parse-fail/removed-syntax-ptr-lifetime.rs | 2 +- src/test/parse-fail/removed-syntax-uniq-mut-ty.rs | 2 +- src/test/parse-fail/trailing-plus-in-bounds.rs | 6 +- src/test/parse-fail/trait-object-macro-matcher.rs | 20 + .../parse-fail/trait-object-polytrait-priority.rs | 19 + src/test/run-pass/issue-28279.rs | 3 +- 21 files changed, 294 insertions(+), 267 deletions(-) create mode 100644 src/test/compile-fail/trait-object-macro-matcher.rs create mode 100644 src/test/parse-fail/trait-object-macro-matcher.rs create mode 100644 src/test/parse-fail/trait-object-polytrait-priority.rs (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 84fb69a7f10..4347046b6b8 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -17,7 +17,7 @@ pub use self::PathParameters::*; pub use symbol::Symbol as Name; pub use util::ThinVec; -use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId}; +use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::SyntaxContext; @@ -1716,6 +1716,16 @@ pub struct PolyTraitRef { pub span: Span, } +impl PolyTraitRef { + pub fn new(lifetimes: Vec, path: Path, lo: BytePos, hi: BytePos) -> Self { + PolyTraitRef { + bound_lifetimes: lifetimes, + trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, + span: mk_sp(lo, hi), + } + } +} + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Visibility { Public, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index c1816582bc6..6abeb4b0b28 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -657,7 +657,7 @@ impl<'a> Parser<'a> { } ExpansionKind::Expr => Expansion::Expr(self.parse_expr()?), ExpansionKind::OptExpr => Expansion::OptExpr(Some(self.parse_expr()?)), - ExpansionKind::Ty => Expansion::Ty(self.parse_ty_no_plus()?), + ExpansionKind::Ty => Expansion::Ty(self.parse_ty()?), ExpansionKind::Pat => Expansion::Pat(self.parse_pat()?), }) } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 10b7249743b..d7a85baa3ff 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -414,7 +414,7 @@ pub fn parse_arm_panic(parser: &mut Parser) -> Arm { } pub fn parse_ty_panic(parser: &mut Parser) -> P { - panictry!(parser.parse_ty_no_plus()) + panictry!(parser.parse_ty()) } pub fn parse_stmt_panic(parser: &mut Parser) -> Option { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 6385d206a0c..ed17f0f956c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -512,7 +512,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { }, "pat" => token::NtPat(panictry!(p.parse_pat())), "expr" => token::NtExpr(panictry!(p.parse_expr())), - "ty" => token::NtTy(panictry!(p.parse_ty_no_plus())), + "ty" => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one "ident" => match p.token { token::Ident(sn) => { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0a97accead6..df4ccc94c04 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -41,7 +41,7 @@ use ast::{BinOpKind, UnOp}; use ast::RangeEnd; use {ast, attr}; use codemap::{self, CodeMap, Spanned, spanned, respan}; -use syntax_pos::{self, Span, Pos, BytePos, mk_sp}; +use syntax_pos::{self, Span, BytePos, mk_sp}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; @@ -1116,57 +1116,13 @@ impl<'a> Parser<'a> { self.check_keyword(keywords::Extern) } - pub fn get_lifetime(&mut self) -> ast::Ident { + fn get_label(&mut self) -> ast::Ident { match self.token { token::Lifetime(ref ident) => *ident, _ => self.bug("not a lifetime"), } } - pub fn parse_for_in_type(&mut self) -> PResult<'a, TyKind> { - /* - Parses whatever can come after a `for` keyword in a type. - The `for` hasn't been consumed. - - - for <'lt> [unsafe] [extern "ABI"] fn (S) -> T - - for <'lt> path::foo(a, b) + Trait + 'a - */ - - let lo = self.span.lo; - let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - - // examine next token to decide to do - if self.token_is_bare_fn_keyword() { - self.parse_ty_bare_fn(lifetime_defs) - } else { - let hi = self.span.hi; - let trait_ref = self.parse_trait_ref()?; - let poly_trait_ref = PolyTraitRef { bound_lifetimes: lifetime_defs, - trait_ref: trait_ref, - span: mk_sp(lo, hi)}; - let other_bounds = if self.eat(&token::BinOp(token::Plus)) { - self.parse_ty_param_bounds()? - } else { - Vec::new() - }; - let all_bounds = - Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter() - .chain(other_bounds) - .collect(); - Ok(ast::TyKind::TraitObject(all_bounds)) - } - } - - pub fn parse_impl_trait_type(&mut self) -> PResult<'a, TyKind> { - // Parses whatever can come after a `impl` keyword in a type. - // The `impl` has already been consumed. - Ok(ast::TyKind::ImplTrait(self.parse_ty_param_bounds()?)) - } - - pub fn parse_ty_path(&mut self) -> PResult<'a, TyKind> { - Ok(TyKind::Path(None, self.parse_path(PathStyle::Type)?)) - } - /// parse a TyKind::BareFn type: pub fn parse_ty_bare_fn(&mut self, lifetime_defs: Vec) -> PResult<'a, TyKind> { @@ -1347,84 +1303,9 @@ impl<'a> Parser<'a> { } } - /// Parse a type. + // Parse a type pub fn parse_ty(&mut self) -> PResult<'a, P> { - let lo = self.span.lo; - let lhs = self.parse_ty_no_plus()?; - - if !self.eat(&token::BinOp(token::Plus)) { - return Ok(lhs); - } - - let mut bounds = self.parse_ty_param_bounds()?; - - // In type grammar, `+` is treated like a binary operator, - // and hence both L and R side are required. - if bounds.is_empty() { - let prev_span = self.prev_span; - self.span_err(prev_span, - "at least one type parameter bound \ - must be specified"); - } - - let mut lhs = lhs.unwrap(); - if let TyKind::Paren(ty) = lhs.node { - // We have to accept the first bound in parens for backward compatibility. - // Example: `(Bound) + Bound + Bound` - lhs = ty.unwrap(); - } - if let TyKind::Path(None, path) = lhs.node { - let poly_trait_ref = PolyTraitRef { - bound_lifetimes: Vec::new(), - trait_ref: TraitRef { path: path, ref_id: lhs.id }, - span: lhs.span, - }; - let poly_trait_ref = TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None); - bounds.insert(0, poly_trait_ref); - } else { - let mut err = struct_span_err!(self.sess.span_diagnostic, lhs.span, E0178, - "expected a path on the left-hand side \ - of `+`, not `{}`", - pprust::ty_to_string(&lhs)); - err.span_label(lhs.span, &format!("expected a path")); - let hi = bounds.iter().map(|x| match *x { - TraitTyParamBound(ref tr, _) => tr.span.hi, - RegionTyParamBound(ref r) => r.span.hi, - }).max_by_key(|x| x.to_usize()); - let full_span = hi.map(|hi| Span { - lo: lhs.span.lo, - hi: hi, - expn_id: lhs.span.expn_id, - }); - match (&lhs.node, full_span) { - (&TyKind::Rptr(ref lifetime, ref mut_ty), Some(full_span)) => { - let ty_str = pprust::to_string(|s| { - use print::pp::word; - use print::pprust::PrintState; - - word(&mut s.s, "&")?; - s.print_opt_lifetime(lifetime)?; - s.print_mutability(mut_ty.mutbl)?; - s.popen()?; - s.print_type(&mut_ty.ty)?; - s.print_bounds(" +", &bounds)?; - s.pclose() - }); - err.span_suggestion(full_span, "try adding parentheses (per RFC 438):", - ty_str); - } - - _ => { - help!(&mut err, - "perhaps you forgot parentheses? (per RFC 438)"); - } - } - err.emit(); - } - - let sp = mk_sp(lo, self.prev_span.hi); - let sum = TyKind::TraitObject(bounds); - Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp})) + self.parse_ty_common(true) } /// Parse a type in restricted contexts where `+` is not permitted. @@ -1432,15 +1313,17 @@ impl<'a> Parser<'a> { /// `+` is prohibited to maintain operator priority (P(+) < P(&)). /// Example 2: `value1 as TYPE + value2` /// `+` is prohibited to avoid interactions with expression grammar. - pub fn parse_ty_no_plus(&mut self) -> PResult<'a, P> { + fn parse_ty_no_plus(&mut self) -> PResult<'a, P> { + self.parse_ty_common(false) + } + + fn parse_ty_common(&mut self, allow_plus: bool) -> PResult<'a, P> { maybe_whole!(self, NtTy, |x| x); let lo = self.span.lo; - - let t = if self.eat(&token::OpenDelim(token::Paren)) { - // (t) is a parenthesized ty - // (t,) is the type of a tuple with only one field, - // of type t + let node = if self.eat(&token::OpenDelim(token::Paren)) { + // `(TYPE)` is a parenthesized type. + // `(TYPE,)` is a tuple with a single field of type TYPE. let mut ts = vec![]; let mut last_comma = false; while self.token != token::CloseDelim(token::Paren) { @@ -1452,81 +1335,162 @@ impl<'a> Parser<'a> { break; } } - self.expect(&token::CloseDelim(token::Paren))?; + if ts.len() == 1 && !last_comma { - TyKind::Paren(ts.into_iter().nth(0).unwrap()) + let ty = ts.into_iter().nth(0).unwrap().unwrap(); + match ty.node { + // Accept `(Trait1) + Trait2 + 'a` for backward compatibility (#39318). + TyKind::Path(None, ref path) + if allow_plus && self.token == token::BinOp(token::Plus) => { + self.bump(); // `+` + let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo, self.prev_span.hi); + let mut bounds = vec![TraitTyParamBound(pt, TraitBoundModifier::None)]; + bounds.append(&mut self.parse_ty_param_bounds()?); + TyKind::TraitObject(bounds) + } + _ => TyKind::Paren(P(ty)) + } } else { TyKind::Tup(ts) } } else if self.eat(&token::Not) { + // Never type `!` TyKind::Never } else if self.eat(&token::BinOp(token::Star)) { - // STAR POINTER (bare pointer?) + // Raw pointer TyKind::Ptr(self.parse_ptr()?) } else if self.eat(&token::OpenDelim(token::Bracket)) { - // VECTOR + // Array or slice let t = self.parse_ty()?; - - // Parse the `; e` in `[ i32; e ]` - // where `e` is a const expression + // Parse optional `; EXPR` in `[TYPE; EXPR]` let t = match self.maybe_parse_fixed_length_of_vec()? { None => TyKind::Slice(t), - Some(suffix) => TyKind::Array(t, suffix) + Some(suffix) => TyKind::Array(t, suffix), }; self.expect(&token::CloseDelim(token::Bracket))?; t - } else if self.check(&token::BinOp(token::And)) || - self.check(&token::AndAnd) { - // BORROWED POINTER + } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) { + // Reference self.expect_and()?; self.parse_borrowed_pointee()? - } else if self.check_keyword(keywords::For) { - // FIXME `+` has incorrect priority in trait object types starting with `for` (#39317). - self.parse_for_in_type()? - } else if self.eat_keyword(keywords::Impl) { - // FIXME figure out priority of `+` in `impl Trait1 + Trait2` (#34511). - self.parse_impl_trait_type()? - } else if self.token_is_bare_fn_keyword() { - // BARE FUNCTION - self.parse_ty_bare_fn(Vec::new())? } else if self.eat_keyword_noexpect(keywords::Typeof) { - // TYPEOF + // `typeof(EXPR)` // In order to not be ambiguous, the type must be surrounded by parens. self.expect(&token::OpenDelim(token::Paren))?; let e = self.parse_expr()?; self.expect(&token::CloseDelim(token::Paren))?; TyKind::Typeof(e) + } else if self.eat(&token::Underscore) { + // A type to be inferred `_` + TyKind::Infer } else if self.eat_lt() { + // Qualified path let (qself, path) = self.parse_qualified_path(PathStyle::Type)?; TyKind::Path(Some(qself), path) } else if self.token.is_path_start() { + // Simple path let path = self.parse_path(PathStyle::Type)?; if self.eat(&token::Not) { - // MACRO INVOCATION + // Macro invocation in type position let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.span.hi; - TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts })) + TyKind::Mac(spanned(lo, self.span.hi, Mac_ { path: path, tts: tts })) } else { - // NAMED TYPE - TyKind::Path(None, path) + // Just a type path or bound list (trait object type) starting with a trait. + // `Type` + // `Trait1 + Trait2 + 'a` + if allow_plus && self.eat(&token::BinOp(token::Plus)) { + let poly_trait = PolyTraitRef::new(Vec::new(), path, lo, self.prev_span.hi); + let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; + bounds.append(&mut self.parse_ty_param_bounds()?); + TyKind::TraitObject(bounds) + } else { + TyKind::Path(None, path) + } } - } else if self.eat(&token::Underscore) { - // TYPE TO BE INFERRED - TyKind::Infer + } else if self.token_is_bare_fn_keyword() { + // Function pointer type + self.parse_ty_bare_fn(Vec::new())? + } else if self.check_keyword(keywords::For) { + // Function pointer type or bound list (trait object type) starting with a poly-trait. + // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` + // `for<'lt> Trait1<'lt> + Trait2 + 'a` + let lo = self.span.lo; + let lifetime_defs = self.parse_late_bound_lifetime_defs()?; + if self.token_is_bare_fn_keyword() { + self.parse_ty_bare_fn(lifetime_defs)? + } else { + let path = self.parse_path(PathStyle::Type)?; + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; + if allow_plus && self.eat(&token::BinOp(token::Plus)) { + bounds.append(&mut self.parse_ty_param_bounds()?) + } + TyKind::TraitObject(bounds) + } + } else if self.eat_keyword(keywords::Impl) { + // FIXME: figure out priority of `+` in `impl Trait1 + Trait2` (#34511). + TyKind::ImplTrait(self.parse_ty_param_bounds()?) + } else if self.check(&token::Question) { + // Bound list (trait object type) + // Bound lists starting with `'lt` are not currently supported (#40043) + TyKind::TraitObject(self.parse_ty_param_bounds_common(allow_plus)?) } else { let msg = format!("expected type, found {}", self.this_token_descr()); return Err(self.fatal(&msg)); }; - let sp = mk_sp(lo, self.prev_span.hi); - Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: t, span: sp})) + let span = mk_sp(lo, self.prev_span.hi); + let ty = Ty { node: node, span: span, id: ast::DUMMY_NODE_ID }; + + // Try to recover from use of `+` with incorrect priority. + self.maybe_recover_from_bad_type_plus(allow_plus, &ty)?; + + Ok(P(ty)) } - pub fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { - // look for `&'lt` or `&'foo ` and interpret `foo` as the region name: - let opt_lifetime = self.eat_lifetime(); - let mutbl = self.parse_mutability()?; + fn maybe_recover_from_bad_type_plus(&mut self, allow_plus: bool, ty: &Ty) -> PResult<'a, ()> { + // Do not add `+` to expected tokens. + if !allow_plus || self.token != token::BinOp(token::Plus) { + return Ok(()) + } + + self.bump(); // `+` + let bounds = self.parse_ty_param_bounds()?; + let sum_span = mk_sp(ty.span.lo, self.prev_span.hi); + + let mut err = struct_span_err!(self.sess.span_diagnostic, ty.span, E0178, + "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty)); + err.span_label(ty.span, &format!("expected a path")); + + match ty.node { + TyKind::Rptr(ref lifetime, ref mut_ty) => { + let sum_with_parens = pprust::to_string(|s| { + use print::pp::word; + use print::pprust::PrintState; + + word(&mut s.s, "&")?; + s.print_opt_lifetime(lifetime)?; + s.print_mutability(mut_ty.mutbl)?; + s.popen()?; + s.print_type(&mut_ty.ty)?; + s.print_bounds(" +", &bounds)?; + s.pclose() + }); + err.span_suggestion(sum_span, "try adding parentheses:", sum_with_parens); + } + TyKind::Ptr(..) | TyKind::BareFn(..) => { + help!(&mut err, "perhaps you forgot parentheses?"); + } + _ => {} + } + err.emit(); + Ok(()) + } + + fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> { + let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; + let mutbl = self.parse_mutability(); let ty = self.parse_ty_no_plus()?; return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); } @@ -1927,30 +1891,28 @@ impl<'a> Parser<'a> { } } - /// Parse single lifetime 'a or nothing. - pub fn eat_lifetime(&mut self) -> Option { + fn check_lifetime(&mut self) -> bool { + self.expected_tokens.push(TokenType::Lifetime); + self.token.is_lifetime() + } + + /// Parse single lifetime 'a or panic. + fn expect_lifetime(&mut self) -> Lifetime { match self.token { token::Lifetime(ident) => { self.bump(); - Some(Lifetime { - id: ast::DUMMY_NODE_ID, - span: self.prev_span, - name: ident.name - }) - } - _ => { - self.expected_tokens.push(TokenType::Lifetime); - None + Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID } } + _ => self.span_bug(self.span, "not a lifetime") } } /// Parse mutability (`mut` or nothing). - pub fn parse_mutability(&mut self) -> PResult<'a, Mutability> { + fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(keywords::Mut) { - Ok(Mutability::Mutable) + Mutability::Mutable } else { - Ok(Mutability::Immutable) + Mutability::Immutable } } @@ -2207,7 +2169,7 @@ impl<'a> Parser<'a> { return self.parse_while_expr(None, lo, attrs); } if self.token.is_lifetime() { - let label = Spanned { node: self.get_lifetime(), + let label = Spanned { node: self.get_label(), span: self.span }; let lo = self.span.lo; self.bump(); @@ -2230,7 +2192,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Continue) { let ex = if self.token.is_lifetime() { let ex = ExprKind::Continue(Some(Spanned{ - node: self.get_lifetime(), + node: self.get_label(), span: self.span })); self.bump(); @@ -2267,7 +2229,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(keywords::Break) { let lt = if self.token.is_lifetime() { let spanned_lt = Spanned { - node: self.get_lifetime(), + node: self.get_label(), span: self.span }; self.bump(); @@ -2700,7 +2662,7 @@ impl<'a> Parser<'a> { } token::BinOp(token::And) | token::AndAnd => { self.expect_and()?; - let m = self.parse_mutability()?; + let m = self.parse_mutability(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; hi = span.hi; @@ -3422,7 +3384,7 @@ impl<'a> Parser<'a> { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; - let mutbl = self.parse_mutability()?; + let mutbl = self.parse_mutability(); if let token::Lifetime(ident) = self.token { return Err(self.fatal(&format!("unexpected lifetime `{}` in pattern", ident))); } @@ -3449,7 +3411,7 @@ impl<'a> Parser<'a> { pat = self.parse_pat_ident(BindingMode::ByValue(Mutability::Mutable))?; } else if self.eat_keyword(keywords::Ref) { // Parse ref ident @ pat / ref mut ident @ pat - let mutbl = self.parse_mutability()?; + let mutbl = self.parse_mutability(); pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?; } else if self.eat_keyword(keywords::Box) { // Parse box pat @@ -4069,30 +4031,32 @@ impl<'a> Parser<'a> { // BOUND = TY_BOUND | LT_BOUND // LT_BOUND = LIFETIME (e.g. `'a`) // TY_BOUND = [?] [for] SIMPLE_PATH (e.g. `?for<'a: 'b> m::Trait<'a>`) - fn parse_ty_param_bounds(&mut self) -> PResult<'a, TyParamBounds> - { + fn parse_ty_param_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, TyParamBounds> { let mut bounds = Vec::new(); loop { let question = if self.eat(&token::Question) { Some(self.prev_span) } else { None }; - if let Some(lifetime) = self.eat_lifetime() { + if self.check_lifetime() { if let Some(question_span) = question { self.span_err(question_span, "`?` may only modify trait bounds, not lifetime bounds"); } - bounds.push(RegionTyParamBound(lifetime)); - } else {if self.check_keyword(keywords::For) || self.check_path() { - let poly_trait_ref = self.parse_poly_trait_ref()?; + bounds.push(RegionTyParamBound(self.expect_lifetime())); + } else if self.check_keyword(keywords::For) || self.check_path() { + let lo = self.span.lo; + let lifetime_defs = self.parse_late_bound_lifetime_defs()?; + let path = self.parse_path(PathStyle::Type)?; + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); let modifier = if question.is_some() { TraitBoundModifier::Maybe } else { TraitBoundModifier::None }; - bounds.push(TraitTyParamBound(poly_trait_ref, modifier)); + bounds.push(TraitTyParamBound(poly_trait, modifier)); } else { break - }} + } - if !self.eat(&token::BinOp(token::Plus)) { + if !allow_plus || !self.eat(&token::BinOp(token::Plus)) { break } } @@ -4100,12 +4064,16 @@ impl<'a> Parser<'a> { return Ok(bounds); } + fn parse_ty_param_bounds(&mut self) -> PResult<'a, TyParamBounds> { + self.parse_ty_param_bounds_common(true) + } + // Parse bounds of a type parameter `BOUND + BOUND + BOUND` without trailing `+`. // BOUND = LT_BOUND (e.g. `'a`) fn parse_lt_param_bounds(&mut self) -> Vec { let mut lifetimes = Vec::new(); - while let Some(lifetime) = self.eat_lifetime() { - lifetimes.push(lifetime); + while self.check_lifetime() { + lifetimes.push(self.expect_lifetime()); if !self.eat(&token::BinOp(token::Plus)) { break @@ -4150,7 +4118,8 @@ impl<'a> Parser<'a> { let mut seen_ty_param = false; loop { let attrs = self.parse_outer_attributes()?; - if let Some(lifetime) = self.eat_lifetime() { + if self.check_lifetime() { + let lifetime = self.expect_lifetime(); // Parse lifetime parameter. let bounds = if self.eat(&token::Colon) { self.parse_lt_param_bounds() @@ -4166,7 +4135,7 @@ impl<'a> Parser<'a> { self.span_err(self.prev_span, "lifetime parameters must be declared prior to type parameters"); } - } else {if self.check_ident() { + } else if self.check_ident() { // Parse type parameter. ty_params.push(self.parse_ty_param(attrs)?); seen_ty_param = true; @@ -4178,7 +4147,7 @@ impl<'a> Parser<'a> { &format!("trailing attribute after {} parameters", param_kind)); } break - }} + } if !self.eat(&token::Comma) { break @@ -4224,14 +4193,14 @@ impl<'a> Parser<'a> { let mut seen_type = false; let mut seen_binding = false; loop { - if let Some(lifetime) = self.eat_lifetime() { + if self.check_lifetime() && self.look_ahead(1, |t| t != &token::BinOp(token::Plus)) { // Parse lifetime argument. - lifetimes.push(lifetime); + lifetimes.push(self.expect_lifetime()); if seen_type || seen_binding { self.span_err(self.prev_span, "lifetime parameters must be declared prior to type parameters"); } - } else {if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { + } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { // Parse associated type binding. let lo = self.span.lo; let ident = self.parse_ident()?; @@ -4254,7 +4223,7 @@ impl<'a> Parser<'a> { seen_type = true; } else { break - }} + } if !self.eat(&token::Comma) { break @@ -4299,7 +4268,8 @@ impl<'a> Parser<'a> { loop { let lo = self.span.lo; - if let Some(lifetime) = self.eat_lifetime() { + if self.check_lifetime() && self.look_ahead(1, |t| t != &token::BinOp(token::Plus)) { + let lifetime = self.expect_lifetime(); // Bounds starting with a colon are mandatory, but possibly empty. self.expect(&token::Colon)?; let bounds = self.parse_lt_param_bounds(); @@ -4310,7 +4280,7 @@ impl<'a> Parser<'a> { bounds: bounds, } )); - } else {if self.check_type() { + } else if self.check_type() { // Parse optional `for<'a, 'b>`. // This `for` is parsed greedily and applies to the whole predicate, // the bounded type can have its own `for` applying only to it. @@ -4348,7 +4318,7 @@ impl<'a> Parser<'a> { } } else { break - }} + } if !self.eat(&token::Comma) { break @@ -4453,13 +4423,13 @@ impl<'a> Parser<'a> { } else if self.look_ahead(1, |t| t.is_lifetime()) && isolated_self(self, 2) { self.bump(); - let lt = self.eat_lifetime().expect("not a lifetime"); + let lt = self.expect_lifetime(); (SelfKind::Region(Some(lt), Mutability::Immutable), expect_ident(self)) } else if self.look_ahead(1, |t| t.is_lifetime()) && self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) && isolated_self(self, 3) { self.bump(); - let lt = self.eat_lifetime().expect("not a lifetime"); + let lt = self.expect_lifetime(); self.bump(); (SelfKind::Region(Some(lt), Mutability::Mutable), expect_ident(self)) } else { @@ -4852,14 +4822,6 @@ impl<'a> Parser<'a> { } } - /// Parse a::B - fn parse_trait_ref(&mut self) -> PResult<'a, TraitRef> { - Ok(TraitRef { - path: self.parse_path(PathStyle::Type)?, - ref_id: ast::DUMMY_NODE_ID, - }) - } - fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec> { if self.eat_keyword(keywords::For) { self.expect_lt()?; @@ -4875,18 +4837,6 @@ impl<'a> Parser<'a> { } } - /// Parse for<'l> a::B - fn parse_poly_trait_ref(&mut self) -> PResult<'a, PolyTraitRef> { - let lo = self.span.lo; - let lifetime_defs = self.parse_late_bound_lifetime_defs()?; - - Ok(PolyTraitRef { - bound_lifetimes: lifetime_defs, - trait_ref: self.parse_trait_ref()?, - span: mk_sp(lo, self.prev_span.hi), - }) - } - /// Parse struct Foo { ... } fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { let class_name = self.parse_ident()?; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 75852629ce1..519d5bd98e4 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -103,6 +103,21 @@ fn ident_can_begin_expr(ident: ast::Ident) -> bool { ].contains(&ident.name) } +fn ident_can_begin_type(ident: ast::Ident) -> bool { + let ident_token: Token = Ident(ident); + + !ident_token.is_any_keyword() || + ident_token.is_path_segment_keyword() || + [ + keywords::For.name(), + keywords::Impl.name(), + keywords::Fn.name(), + keywords::Unsafe.name(), + keywords::Extern.name(), + keywords::Typeof.name(), + ].contains(&ident.name) +} + #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)] pub enum Token { /* Expression-operator symbols. */ @@ -182,23 +197,21 @@ impl Token { /// Returns `true` if the token can appear at the start of an expression. pub fn can_begin_expr(&self) -> bool { match *self { - OpenDelim(..) => true, - Ident(ident) => ident_can_begin_expr(ident), - Literal(..) => true, - Not => true, - BinOp(Minus) => true, - BinOp(Star) => true, - BinOp(And) => true, - BinOp(Or) => true, // in lambda syntax - OrOr => true, // in lambda syntax - AndAnd => true, // double borrow + Ident(ident) => ident_can_begin_expr(ident), // value name or keyword + OpenDelim(..) => true, // tuple, array or block + Literal(..) => true, // literal + Not => true, // operator not + BinOp(Minus) => true, // unary minus + BinOp(Star) => true, // dereference + BinOp(Or) | OrOr => true, // closure + BinOp(And) => true, // reference + AndAnd => true, // double reference DotDot | DotDotDot => true, // range notation Lt | BinOp(Shl) => true, // associated path - ModSep => true, - Pound => true, // for expression attributes + ModSep => true, // global path + Pound => true, // expression attributes Interpolated(ref nt) => match **nt { NtExpr(..) => true, - NtIdent(..) => true, NtBlock(..) => true, NtPath(..) => true, _ => false, @@ -210,19 +223,20 @@ impl Token { /// Returns `true` if the token can appear at the start of a type. pub fn can_begin_type(&self) -> bool { match *self { + Ident(ident) => ident_can_begin_type(ident), // type name or keyword OpenDelim(Paren) => true, // tuple OpenDelim(Bracket) => true, // array - Ident(..) => true, // type name or keyword Underscore => true, // placeholder Not => true, // never BinOp(Star) => true, // raw pointer BinOp(And) => true, // reference AndAnd => true, // double reference + Question => true, // maybe bound in trait object + Lifetime(..) => true, // lifetime bound in trait object Lt | BinOp(Shl) => true, // associated path ModSep => true, // global path Interpolated(ref nt) => match **nt { NtTy(..) => true, - NtIdent(..) => true, NtPath(..) => true, _ => false, }, diff --git a/src/test/compile-fail/E0178.rs b/src/test/compile-fail/E0178.rs index ffc5940c95c..6527465e0b7 100644 --- a/src/test/compile-fail/E0178.rs +++ b/src/test/compile-fail/E0178.rs @@ -17,15 +17,12 @@ struct Bar<'a> { x: &'a Foo + 'a, //~^ ERROR E0178 //~| NOTE expected a path - //~| ERROR at least one non-builtin trait is required for an object type y: &'a mut Foo + 'a, //~^ ERROR E0178 //~| NOTE expected a path - //~| ERROR at least one non-builtin trait is required for an object type z: fn() -> Foo + 'a, //~^ ERROR E0178 //~| NOTE expected a path - //~| ERROR at least one non-builtin trait is required for an object type } fn main() { diff --git a/src/test/compile-fail/issue-34334.rs b/src/test/compile-fail/issue-34334.rs index aff908e5815..95b5fabc81e 100644 --- a/src/test/compile-fail/issue-34334.rs +++ b/src/test/compile-fail/issue-34334.rs @@ -9,7 +9,7 @@ // except according to those terms. fn main () { - let sr: Vec<(u32, _, _) = vec![]; //~ ERROR expected one of `+`, `,`, or `>`, found `=` + let sr: Vec<(u32, _, _) = vec![]; //~ ERROR expected one of `,` or `>`, found `=` let sr2: Vec<(u32, _, _)> = sr.iter().map(|(faction, th_sender, th_receiver)| {}).collect(); //~^ ERROR cannot find value `sr` in this scope } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs index bb212b3114d..208f1a0e2ee 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs @@ -11,6 +11,6 @@ mod foo { type T = (); struct S1(pub(foo) (), pub(T), pub(crate) (), pub(((), T))); - struct S2(pub((foo)) ()); //~ ERROR expected one of `+` or `,`, found `(` - //~| ERROR expected one of `+`, `;`, or `where`, found `(` + struct S2(pub((foo)) ()); //~ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs index 2c6e71d7c55..57769646e3b 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs @@ -12,8 +12,8 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub $t); struct S2(pub (foo) ()); - struct S3(pub $t ()); //~ ERROR expected one of `+` or `,`, found `(` - //~| ERROR expected one of `+`, `;`, or `where`, found `(` + struct S3(pub $t ()); //~ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs index e15eeae8159..db3358f7d50 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs @@ -12,8 +12,8 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub($t)); struct S2(pub (foo) ()); - struct S3(pub($t) ()); //~ ERROR expected one of `+` or `,`, found `(` - //~| ERROR expected one of `+`, `;`, or `where`, found `(` + struct S3(pub($t) ()); //~ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/compile-fail/trait-object-macro-matcher.rs b/src/test/compile-fail/trait-object-macro-matcher.rs new file mode 100644 index 00000000000..de80b04b865 --- /dev/null +++ b/src/test/compile-fail/trait-object-macro-matcher.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// `ty` matcher accepts trait object types + +macro_rules! m { + ($t: ty) => ( let _: $t; ) +} + +fn main() { + m!(Copy + Send + 'static); //~ ERROR the trait `std::marker::Copy` cannot be made into an object +} diff --git a/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs b/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs index c009644c561..f9f887b78b0 100644 --- a/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs +++ b/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs @@ -13,10 +13,9 @@ fn main() { //~^ ERROR expected a path //~| HELP try adding parentheses //~| SUGGESTION let _: &(Copy + 'static); - //~| ERROR at least one non-builtin trait is required for an object type + //~| ERROR the trait `std::marker::Copy` cannot be made into an object let _: &'static Copy + 'static; //~^ ERROR expected a path //~| HELP try adding parentheses //~| SUGGESTION let _: &'static (Copy + 'static); - //~| ERROR at least one non-builtin trait is required for an object type } diff --git a/src/test/parse-fail/bounds-obj-parens.rs b/src/test/parse-fail/bounds-obj-parens.rs index cbdffb4a255..ad59d4a52d7 100644 --- a/src/test/parse-fail/bounds-obj-parens.rs +++ b/src/test/parse-fail/bounds-obj-parens.rs @@ -10,6 +10,6 @@ // compile-flags: -Z parse-only -type A = Box<(Fn(D::Error) -> E) + 'static + Send + Sync>; // OK +type A = Box<(Fn(D::Error) -> E) + 'static + Send + Sync>; // OK (but see #39318) FAIL //~ ERROR diff --git a/src/test/parse-fail/issue-17904.rs b/src/test/parse-fail/issue-17904.rs index ae28ac76acb..a54d89f48c3 100644 --- a/src/test/parse-fail/issue-17904.rs +++ b/src/test/parse-fail/issue-17904.rs @@ -13,6 +13,6 @@ struct Baz where U: Eq(U); //This is parsed as the new Fn* style parenthesis syntax. struct Baz where U: Eq(U) -> R; // Notice this parses as well. struct Baz(U) where U: Eq; // This rightfully signals no error as well. -struct Foo where T: Copy, (T); //~ ERROR expected one of `+`, `:`, `==`, or `=`, found `;` +struct Foo where T: Copy, (T); //~ ERROR expected one of `:`, `==`, or `=`, found `;` fn main() {} diff --git a/src/test/parse-fail/removed-syntax-ptr-lifetime.rs b/src/test/parse-fail/removed-syntax-ptr-lifetime.rs index ebef0e56e3e..b91ab8730b3 100644 --- a/src/test/parse-fail/removed-syntax-ptr-lifetime.rs +++ b/src/test/parse-fail/removed-syntax-ptr-lifetime.rs @@ -10,4 +10,4 @@ // compile-flags: -Z parse-only -type bptr = &lifetime/isize; //~ ERROR expected one of `!`, `(`, `+`, `::`, `;`, or `<`, found `/` +type bptr = &lifetime/isize; //~ ERROR expected one of `!`, `(`, `::`, `;`, or `<`, found `/` diff --git a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs index 9bd8dc9b11b..8a47376179d 100644 --- a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs +++ b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs @@ -10,4 +10,4 @@ // compile-flags: -Z parse-only -type mut_box = Box; //~ ERROR expected type, found keyword `mut` +type mut_box = Box; //~ ERROR expected one of `>`, lifetime, or type, found `mut` diff --git a/src/test/parse-fail/trailing-plus-in-bounds.rs b/src/test/parse-fail/trailing-plus-in-bounds.rs index 44bb1f930c7..4a2e6d5bdcd 100644 --- a/src/test/parse-fail/trailing-plus-in-bounds.rs +++ b/src/test/parse-fail/trailing-plus-in-bounds.rs @@ -13,7 +13,7 @@ use std::fmt::Debug; fn main() { - let x: Box = box 3 as Box; - //~^ ERROR at least one type parameter bound must be specified - //~^^ ERROR at least one type parameter bound must be specified + let x: Box = box 3 as Box; // Trailing `+` is OK } + +FAIL //~ ERROR diff --git a/src/test/parse-fail/trait-object-macro-matcher.rs b/src/test/parse-fail/trait-object-macro-matcher.rs new file mode 100644 index 00000000000..3a5bce509f1 --- /dev/null +++ b/src/test/parse-fail/trait-object-macro-matcher.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// A single lifetime is not parsed as a type. +// `ty` matcher in particular doesn't accept a single lifetime + +macro_rules! m { + ($t: ty) => ( let _: $t; ) +} + +fn main() { + m!('static); //~ ERROR expected type, found `'static` +} diff --git a/src/test/parse-fail/trait-object-polytrait-priority.rs b/src/test/parse-fail/trait-object-polytrait-priority.rs new file mode 100644 index 00000000000..f0abc678c21 --- /dev/null +++ b/src/test/parse-fail/trait-object-polytrait-priority.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Trait<'a> {} + +fn main() { + let _: &for<'a> Trait<'a> + 'static; + //~^ ERROR expected a path on the left-hand side of `+`, not `& for<'a>Trait<'a>` + //~| NOTE expected a path + //~| HELP try adding parentheses + //~| SUGGESTION &( for<'a>Trait<'a> + 'static) +} diff --git a/src/test/run-pass/issue-28279.rs b/src/test/run-pass/issue-28279.rs index ae40ce44d17..3165084b9e9 100644 --- a/src/test/run-pass/issue-28279.rs +++ b/src/test/run-pass/issue-28279.rs @@ -18,7 +18,7 @@ fn test1() -> Rc Fn(&'a usize) + 'static> { } } -fn test2() -> *mut for<'a> Fn(&'a usize) + 'static { +fn test2() -> *mut (for<'a> Fn(&'a usize) + 'static) { if let Some(_) = Some(1) { loop{} } else { @@ -27,4 +27,3 @@ fn test2() -> *mut for<'a> Fn(&'a usize) + 'static { } fn main() {} - -- cgit 1.4.1-3-g733a5 From 769b95dc9f92edb51146727813ea7eae00b5b651 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Fri, 17 Mar 2017 21:13:00 -0700 Subject: Add diagnostic for incorrect `pub (restriction)` Given the following statement ```rust pub (a) fn afn() {} ``` Provide the following diagnostic: ```rust error: incorrect restriction in `pub` --> file.rs:15:1 | 15 | pub (a) fn afn() {} | ^^^^^^^ | = help: some valid visibility restrictions are: `pub(crate)`: visible only on the current crate `pub(super)`: visible only in the current module's parent `pub(in path::to::module)`: visible only on the specified path help: to make this visible only to module `a`, add `in` before the path: | pub (in a) fn afn() {} ``` Remove cruft from old `pub(path)` syntax. --- src/libsyntax/parse/parser.rs | 64 +++++++++++++--------- .../privacy/restricted/tuple-struct-fields/test.rs | 7 ++- .../restricted/tuple-struct-fields/test2.rs | 7 ++- .../restricted/tuple-struct-fields/test3.rs | 7 ++- src/test/ui/pub/pub-restricted-error-fn.rs | 13 +++++ src/test/ui/pub/pub-restricted-error-fn.stderr | 8 +++ src/test/ui/pub/pub-restricted-error.rs | 19 +++++++ src/test/ui/pub/pub-restricted-error.stderr | 8 +++ src/test/ui/pub/pub-restricted-non-path.rs | 15 +++++ src/test/ui/pub/pub-restricted-non-path.stderr | 8 +++ src/test/ui/pub/pub-restricted.rs | 37 +++++++++++++ src/test/ui/pub/pub-restricted.stderr | 47 ++++++++++++++++ 12 files changed, 205 insertions(+), 35 deletions(-) create mode 100644 src/test/ui/pub/pub-restricted-error-fn.rs create mode 100644 src/test/ui/pub/pub-restricted-error-fn.stderr create mode 100644 src/test/ui/pub/pub-restricted-error.rs create mode 100644 src/test/ui/pub/pub-restricted-error.stderr create mode 100644 src/test/ui/pub/pub-restricted-non-path.rs create mode 100644 src/test/ui/pub/pub-restricted-non-path.stderr create mode 100644 src/test/ui/pub/pub-restricted.rs create mode 100644 src/test/ui/pub/pub-restricted.stderr (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index df4ccc94c04..649e9059934 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4626,7 +4626,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let vis = self.parse_visibility()?; + let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { let name = self.parse_ident()?; @@ -4939,25 +4939,8 @@ impl<'a> Parser<'a> { |p| { let attrs = p.parse_outer_attributes()?; let lo = p.span.lo; - let mut vis = p.parse_visibility()?; - let ty_is_interpolated = - p.token.is_interpolated() || p.look_ahead(1, |t| t.is_interpolated()); - let mut ty = p.parse_ty()?; - - // Handle `pub(path) type`, in which `vis` will be `pub` and `ty` will be `(path)`. - if vis == Visibility::Public && !ty_is_interpolated && - p.token != token::Comma && p.token != token::CloseDelim(token::Paren) { - ty = if let TyKind::Paren(ref path_ty) = ty.node { - if let TyKind::Path(None, ref path) = path_ty.node { - vis = Visibility::Restricted { path: P(path.clone()), id: path_ty.id }; - Some(p.parse_ty()?) - } else { - None - } - } else { - None - }.unwrap_or(ty); - } + let vis = p.parse_visibility(true)?; + let ty = p.parse_ty()?; Ok(StructField { span: mk_sp(lo, p.span.hi), vis: vis, @@ -4996,18 +4979,25 @@ impl<'a> Parser<'a> { fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let vis = self.parse_visibility()?; + let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } - // Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts - // `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. - fn parse_visibility(&mut self) -> PResult<'a, Visibility> { + /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `pub(self)` for `pub(in self)` + /// and `pub(super)` for `pub(in super)`. If the following element can't be a tuple (i.e. it's + /// a function definition, it's not a tuple struct field) and the contents within the parens + /// isn't valid, emit a proper diagnostic. + fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { if !self.eat_keyword(keywords::Pub) { return Ok(Visibility::Inherited) } if self.check(&token::OpenDelim(token::Paren)) { + let start_span = self.span; + // We don't `self.bump()` the `(` yet because this might be a struct definition where + // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. + // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so + // by the following tokens. if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) { // `pub(crate)` self.bump(); // `(` @@ -5032,6 +5022,28 @@ impl<'a> Parser<'a> { let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; self.expect(&token::CloseDelim(token::Paren))?; // `)` return Ok(vis) + } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct + // `pub(something) fn ...` or `struct X { pub(something) y: Z }` + self.bump(); // `(` + let msg = "incorrect visibility restriction"; + let suggestion = r##"some possible visibility restrictions are: +`pub(crate)`: visible only on the current crate +`pub(super)`: visible only in the current module's parent +`pub(in path::to::module)`: visible only on the specified path"##; + let path = self.parse_path(PathStyle::Mod)?; + let path_span = self.prev_span; + let help_msg = format!("to make this visible only to module `{}`, add `in` before \ + the path:", + path); + self.expect(&token::CloseDelim(token::Paren))?; // `)` + let sp = Span { + lo: start_span.lo, + hi: self.prev_span.hi, + expn_id: start_span.expn_id, + }; + let mut err = self.span_fatal_help(sp, &msg, &suggestion); + err.span_suggestion(path_span, &help_msg, format!("in {}", path)); + err.emit(); // emit diagnostic, but continue with public visibility } } @@ -5508,7 +5520,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(false)?; if self.eat_keyword(keywords::Use) { // USE ITEM @@ -5787,7 +5799,7 @@ impl<'a> Parser<'a> { fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(false)?; if self.check_keyword(keywords::Static) { // FOREIGN STATIC ITEM diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs index 208f1a0e2ee..d17b604717e 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs @@ -10,7 +10,8 @@ mod foo { type T = (); - struct S1(pub(foo) (), pub(T), pub(crate) (), pub(((), T))); - struct S2(pub((foo)) ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S1(pub(in foo) (), pub(T), pub(crate) (), pub(((), T))); + struct S2(pub((foo)) ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs index 57769646e3b..166d5e27e8d 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs @@ -11,9 +11,10 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub $t); - struct S2(pub (foo) ()); - struct S3(pub $t ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S2(pub (in foo) ()); + struct S3(pub $t ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs index db3358f7d50..edab175f4cd 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs @@ -11,9 +11,10 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub($t)); - struct S2(pub (foo) ()); - struct S3(pub($t) ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S2(pub (in foo) ()); + struct S3(pub($t) ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/ui/pub/pub-restricted-error-fn.rs b/src/test/ui/pub/pub-restricted-error-fn.rs new file mode 100644 index 00000000000..13514310371 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error-fn.rs @@ -0,0 +1,13 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +pub(crate) () fn foo() {} diff --git a/src/test/ui/pub/pub-restricted-error-fn.stderr b/src/test/ui/pub/pub-restricted-error-fn.stderr new file mode 100644 index 00000000000..470e8331247 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error-fn.stderr @@ -0,0 +1,8 @@ +error: unmatched visibility `pub` + --> $DIR/pub-restricted-error-fn.rs:13:10 + | +13 | pub(crate) () fn foo() {} + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted-error.rs b/src/test/ui/pub/pub-restricted-error.rs new file mode 100644 index 00000000000..99af031899a --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +struct Bar(pub(())); + +struct Foo { + pub(crate) () foo: usize, +} + + diff --git a/src/test/ui/pub/pub-restricted-error.stderr b/src/test/ui/pub/pub-restricted-error.stderr new file mode 100644 index 00000000000..b8b4c80778d --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `(` + --> $DIR/pub-restricted-error.rs:16:16 + | +16 | pub(crate) () foo: usize, + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted-non-path.rs b/src/test/ui/pub/pub-restricted-non-path.rs new file mode 100644 index 00000000000..3f74285717a --- /dev/null +++ b/src/test/ui/pub/pub-restricted-non-path.rs @@ -0,0 +1,15 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +pub (.) fn afn() {} + +fn main() {} diff --git a/src/test/ui/pub/pub-restricted-non-path.stderr b/src/test/ui/pub/pub-restricted-non-path.stderr new file mode 100644 index 00000000000..ebfccc4d720 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-non-path.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `.` + --> $DIR/pub-restricted-non-path.rs:13:6 + | +13 | pub (.) fn afn() {} + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted.rs b/src/test/ui/pub/pub-restricted.rs new file mode 100644 index 00000000000..48e487f71a7 --- /dev/null +++ b/src/test/ui/pub/pub-restricted.rs @@ -0,0 +1,37 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +mod a {} + +pub (a) fn afn() {} +pub (b) fn bfn() {} +pub fn privfn() {} +mod x { + mod y { + pub (in x) fn foo() {} + pub (super) fn bar() {} + pub (crate) fn qux() {} + } +} + +mod y { + struct Foo { + pub (crate) c: usize, + pub (super) s: usize, + valid_private: usize, + pub (in y) valid_in_x: usize, + pub (a) invalid: usize, + pub (in x) non_parent_invalid: usize, + } +} + +fn main() {} \ No newline at end of file diff --git a/src/test/ui/pub/pub-restricted.stderr b/src/test/ui/pub/pub-restricted.stderr new file mode 100644 index 00000000000..5bc230e8da3 --- /dev/null +++ b/src/test/ui/pub/pub-restricted.stderr @@ -0,0 +1,47 @@ +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:15:5 + | +15 | pub (a) fn afn() {} + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `a`, add `in` before the path: + | pub (in a) fn afn() {} + +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:16:5 + | +16 | pub (b) fn bfn() {} + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `b`, add `in` before the path: + | pub (in b) fn bfn() {} + +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:32:13 + | +32 | pub (a) invalid: usize, + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `a`, add `in` before the path: + | pub (in a) invalid: usize, + +error: visibilities can only be restricted to ancestor modules + --> $DIR/pub-restricted.rs:33:17 + | +33 | pub (in x) non_parent_invalid: usize, + | ^ + +error: aborting due to 4 previous errors + -- cgit 1.4.1-3-g733a5 From 57009caabd2a45a6efa4d36149feec39ab0a0658 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Fri, 24 Mar 2017 23:00:21 -0700 Subject: Identify missing item category in `impl`s ```rust struct S; impl S { pub hello_method(&self) { println!("Hello"); } } fn main() { S.hello_method(); } ``` ```rust error: can't qualify macro invocation with `pub` --> file.rs:3:4 | 3 | pub hello_method(&self) { | ^^^- - expected `!` here for a macro invocation | | | did you mean to write `fn` here for a method declaration? | = help: try adjusting the macro to put `pub` inside the invocation ``` --- src/libsyntax/parse/parser.rs | 62 ++++++++++++++++++++++------- src/test/ui/did_you_mean/issue-40006.rs | 21 ++++++++++ src/test/ui/did_you_mean/issue-40006.stderr | 12 ++++++ 3 files changed, 80 insertions(+), 15 deletions(-) create mode 100644 src/test/ui/did_you_mean/issue-40006.rs create mode 100644 src/test/ui/did_you_mean/issue-40006.stderr (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index df4ccc94c04..a19339f8cc1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4660,25 +4660,30 @@ impl<'a> Parser<'a> { }) } - fn complain_if_pub_macro(&mut self, visa: &Visibility, span: Span) { - match *visa { - Visibility::Inherited => (), + fn complain_if_pub_macro(&mut self, vis: &Visibility, sp: Span) { + if let Err(mut err) = self.complain_if_pub_macro_diag(vis, sp) { + err.emit(); + } + } + + fn complain_if_pub_macro_diag(&mut self, vis: &Visibility, sp: Span) -> PResult<'a, ()> { + match *vis { + Visibility::Inherited => Ok(()), _ => { let is_macro_rules: bool = match self.token { token::Ident(sid) => sid.name == Symbol::intern("macro_rules"), _ => false, }; if is_macro_rules { - self.diagnostic().struct_span_err(span, "can't qualify macro_rules \ - invocation with `pub`") - .help("did you mean #[macro_export]?") - .emit(); + let mut err = self.diagnostic() + .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); + err.help("did you mean #[macro_export]?"); + Err(err) } else { - self.diagnostic().struct_span_err(span, "can't qualify macro \ - invocation with `pub`") - .help("try adjusting the macro to put `pub` \ - inside the invocation") - .emit(); + let mut err = self.diagnostic() + .struct_span_err(sp, "can't qualify macro invocation with `pub`"); + err.help("try adjusting the macro to put `pub` inside the invocation"); + Err(err) } } } @@ -4689,14 +4694,41 @@ impl<'a> Parser<'a> { -> PResult<'a, (Ident, Vec, ast::ImplItemKind)> { // code copied from parse_macro_use_or_failure... abstraction! if self.token.is_path_start() { - // method macro. + // Method macro. let prev_span = self.prev_span; - self.complain_if_pub_macro(&vis, prev_span); + // Before complaining about trying to set a macro as `pub`, + // check if `!` comes after the path. + let err = self.complain_if_pub_macro_diag(&vis, prev_span); let lo = self.span.lo; let pth = self.parse_path(PathStyle::Mod)?; - self.expect(&token::Not)?; + let bang_err = self.expect(&token::Not); + if let Err(mut err) = err { + if let Err(mut bang_err) = bang_err { + // Given this code `pub path(`, it seems like this is not setting the + // visibility of a macro invocation, but rather a mistyped method declaration. + // Keep the macro diagnostic, but also provide a hint that `fn` might be + // missing. Don't complain about the missing `!` as a separate diagnostic, add + // label in the appropriate place as part of one unified diagnostic. + // + // x | pub path(&self) { + // | ^^^- - expected `!` here for a macro invocation + // | | + // | did you mean to write `fn` here for a method declaration? + + bang_err.cancel(); + err.span_label(self.span, &"expected `!` here for a macro invocation"); + // pub path( + // ^^ `sp` below will point to this + let sp = mk_sp(prev_span.hi, self.prev_span.lo); + err.span_label(sp, + &"did you mean to write `fn` here for a method declaration?"); + } + return Err(err); + } else if let Err(bang_err) = bang_err { + return Err(bang_err); + } // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; diff --git a/src/test/ui/did_you_mean/issue-40006.rs b/src/test/ui/did_you_mean/issue-40006.rs new file mode 100644 index 00000000000..cf75929bae2 --- /dev/null +++ b/src/test/ui/did_you_mean/issue-40006.rs @@ -0,0 +1,21 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct S; + +impl S { + pub hello_method(&self) { + println!("Hello"); + } +} + +fn main() { + S.hello_method(); +} diff --git a/src/test/ui/did_you_mean/issue-40006.stderr b/src/test/ui/did_you_mean/issue-40006.stderr new file mode 100644 index 00000000000..93a0c58f91a --- /dev/null +++ b/src/test/ui/did_you_mean/issue-40006.stderr @@ -0,0 +1,12 @@ +error: can't qualify macro invocation with `pub` + --> $DIR/issue-40006.rs:14:5 + | +14 | pub hello_method(&self) { + | ^^^- - expected `!` here for a macro invocation + | | + | did you mean to write `fn` here for a method declaration? + | + = help: try adjusting the macro to put `pub` inside the invocation + +error: aborting due to previous error + -- cgit 1.4.1-3-g733a5 From 03eca713816ee00ecacde27cc655dc199c6bff40 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Fri, 24 Mar 2017 19:14:58 -0700 Subject: Point at last valid token on failed `expect_one_of` ```rust error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` --> $DIR/token-error-correct-3.rs:29:9 | 25 | foo() | - expected one of `.`, `;`, `?`, `}`, or an operator after this ... 29 | } else { | ^ unexpected token ``` --- src/libsyntax/parse/parser.rs | 28 ++++++++++++------------ src/test/compile-fail/issue-10636-2.rs | 2 ++ src/test/compile-fail/macro-incomplete-parse.rs | 2 ++ src/test/parse-fail/bounds-obj-parens.rs | 4 +++- src/test/parse-fail/match-refactor-to-expr.rs | 4 +++- src/test/parse-fail/trailing-plus-in-bounds.rs | 4 +++- src/test/ui/resolve/token-error-correct-3.stderr | 9 ++++++-- src/test/ui/resolve/token-error-correct.stderr | 4 +++- 8 files changed, 37 insertions(+), 20 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index df4ccc94c04..6379015055b 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -548,20 +548,20 @@ impl<'a> Parser<'a> { expected.dedup(); let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); - Err(self.fatal( - &(if expected.len() > 1 { - (format!("expected one of {}, found `{}`", - expect, - actual)) - } else if expected.is_empty() { - (format!("unexpected token: `{}`", - actual)) - } else { - (format!("expected {}, found `{}`", - expect, - actual)) - })[..] - )) + let (msg_exp, label_exp) = if expected.len() > 1 { + (format!("expected one of {}, found `{}`", expect, actual), + format!("expected one of {} after this", expect)) + } else if expected.is_empty() { + (format!("unexpected token: `{}`", actual), + "unexpected token after this".to_string()) + } else { + (format!("expected {}, found `{}`", expect, actual), + format!("expected {} after this", expect)) + }; + let mut err = self.fatal(&msg_exp); + err.span_label(self.prev_span, &label_exp); + err.span_label(self.span, &"unexpected token"); + Err(err) } } diff --git a/src/test/compile-fail/issue-10636-2.rs b/src/test/compile-fail/issue-10636-2.rs index beaf9e5059f..93759123618 100644 --- a/src/test/compile-fail/issue-10636-2.rs +++ b/src/test/compile-fail/issue-10636-2.rs @@ -14,5 +14,7 @@ pub fn trace_option(option: Option) { option.map(|some| 42; //~ NOTE: unclosed delimiter //~^ ERROR: expected one of + //~| NOTE: expected one of + //~| NOTE: unexpected token } //~ ERROR: incorrect close delimiter //~^ ERROR: expected expression, found `)` diff --git a/src/test/compile-fail/macro-incomplete-parse.rs b/src/test/compile-fail/macro-incomplete-parse.rs index c2ac99d1f6a..682664df981 100644 --- a/src/test/compile-fail/macro-incomplete-parse.rs +++ b/src/test/compile-fail/macro-incomplete-parse.rs @@ -20,6 +20,8 @@ macro_rules! ignored_item { macro_rules! ignored_expr { () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` + //~^ NOTE expected one of `.`, `;`, `?`, `}`, or an operator after this + //~| NOTE unexpected token 2 ) } diff --git a/src/test/parse-fail/bounds-obj-parens.rs b/src/test/parse-fail/bounds-obj-parens.rs index ad59d4a52d7..02c119cf727 100644 --- a/src/test/parse-fail/bounds-obj-parens.rs +++ b/src/test/parse-fail/bounds-obj-parens.rs @@ -12,4 +12,6 @@ type A = Box<(Fn(D::Error) -> E) + 'static + Send + Sync>; // OK (but see #39318) -FAIL //~ ERROR +FAIL +//~^ ERROR +//~| ERROR diff --git a/src/test/parse-fail/match-refactor-to-expr.rs b/src/test/parse-fail/match-refactor-to-expr.rs index 37b66601e70..7bb1c40118a 100644 --- a/src/test/parse-fail/match-refactor-to-expr.rs +++ b/src/test/parse-fail/match-refactor-to-expr.rs @@ -14,7 +14,9 @@ fn main() { let foo = match //~ NOTE did you mean to remove this `match` keyword? Some(4).unwrap_or_else(5) - ; //~ ERROR expected one of `.`, `?`, `{`, or an operator, found `;` + //~^ NOTE expected one of `.`, `?`, `{`, or an operator after this + ; //~ NOTE unexpected token + //~^ ERROR expected one of `.`, `?`, `{`, or an operator, found `;` println!("{}", foo) } diff --git a/src/test/parse-fail/trailing-plus-in-bounds.rs b/src/test/parse-fail/trailing-plus-in-bounds.rs index 4a2e6d5bdcd..2bb2c97790c 100644 --- a/src/test/parse-fail/trailing-plus-in-bounds.rs +++ b/src/test/parse-fail/trailing-plus-in-bounds.rs @@ -16,4 +16,6 @@ fn main() { let x: Box = box 3 as Box; // Trailing `+` is OK } -FAIL //~ ERROR +FAIL +//~^ ERROR +//~| ERROR diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr index 56e36889575..2e0edf0c4b8 100644 --- a/src/test/ui/resolve/token-error-correct-3.stderr +++ b/src/test/ui/resolve/token-error-correct-3.stderr @@ -14,13 +14,18 @@ error: expected one of `,`, `.`, `?`, or an operator, found `;` --> $DIR/token-error-correct-3.rs:23:35 | 23 | callback(path.as_ref(); //~ NOTE: unclosed delimiter - | ^ + | -^ unexpected token + | | + | expected one of `,`, `.`, `?`, or an operator after this error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` --> $DIR/token-error-correct-3.rs:29:9 | +25 | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types + | - expected one of `.`, `;`, `?`, `}`, or an operator after this +... 29 | } else { //~ ERROR: incorrect close delimiter: `}` - | ^ + | ^ unexpected token error[E0425]: cannot find function `is_directory` in this scope --> $DIR/token-error-correct-3.rs:21:13 diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index 248a923efaf..36f298a456a 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -32,7 +32,9 @@ error: expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `f --> $DIR/token-error-correct.rs:14:13 | 14 | foo(bar(; - | ^ + | -^ unexpected token + | | + | expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `for`, `if`, `loop`, `match`, `move`, `return`, `true`, `unsafe`, `while`, or an operator after this error: expected expression, found `)` --> $DIR/token-error-correct.rs:23:1 -- cgit 1.4.1-3-g733a5 From 78ae8feebbf9a2c70d42780d0c646cbbc1f2cdbc Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Sat, 25 Mar 2017 15:36:59 -0700 Subject: Improve wording and spans for unexpected token * Point at where the token was expected instead of the last token successfuly parsed. * Only show `unexpected token` if the next char and the unexpected token don't have the same span. * Change some cfail and pfail tests to ui test. * Don't show all possible tokens in span label if they are more than 6. --- src/libsyntax/parse/parser.rs | 25 +++++++++++---- src/libsyntax_pos/lib.rs | 6 ++++ src/test/compile-fail/issue-10636-2.rs | 20 ------------ src/test/compile-fail/macro-incomplete-parse.rs | 40 ------------------------ src/test/parse-fail/bounds-obj-parens.rs | 17 ---------- src/test/parse-fail/match-refactor-to-expr.rs | 2 +- src/test/parse-fail/trailing-plus-in-bounds.rs | 21 ------------- src/test/ui/resolve/token-error-correct-3.stderr | 6 ++-- src/test/ui/resolve/token-error-correct.stderr | 4 +-- src/test/ui/token/bounds-obj-parens.rs | 17 ++++++++++ src/test/ui/token/bounds-obj-parens.stderr | 7 +++++ src/test/ui/token/issue-10636-2.rs | 20 ++++++++++++ src/test/ui/token/issue-10636-2.stderr | 27 ++++++++++++++++ src/test/ui/token/macro-incomplete-parse.rs | 40 ++++++++++++++++++++++++ src/test/ui/token/macro-incomplete-parse.stderr | 31 ++++++++++++++++++ src/test/ui/token/trailing-plus-in-bounds.rs | 21 +++++++++++++ src/test/ui/token/trailing-plus-in-bounds.stderr | 7 +++++ 17 files changed, 199 insertions(+), 112 deletions(-) delete mode 100644 src/test/compile-fail/issue-10636-2.rs delete mode 100644 src/test/compile-fail/macro-incomplete-parse.rs delete mode 100644 src/test/parse-fail/bounds-obj-parens.rs delete mode 100644 src/test/parse-fail/trailing-plus-in-bounds.rs create mode 100644 src/test/ui/token/bounds-obj-parens.rs create mode 100644 src/test/ui/token/bounds-obj-parens.stderr create mode 100644 src/test/ui/token/issue-10636-2.rs create mode 100644 src/test/ui/token/issue-10636-2.stderr create mode 100644 src/test/ui/token/macro-incomplete-parse.rs create mode 100644 src/test/ui/token/macro-incomplete-parse.stderr create mode 100644 src/test/ui/token/trailing-plus-in-bounds.rs create mode 100644 src/test/ui/token/trailing-plus-in-bounds.stderr (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 6379015055b..4076368c180 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -548,19 +548,32 @@ impl<'a> Parser<'a> { expected.dedup(); let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); - let (msg_exp, label_exp) = if expected.len() > 1 { + let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { + let short_expect = if expected.len() > 6 { + format!("{} possible tokens", expected.len()) + } else { + expect.clone() + }; (format!("expected one of {}, found `{}`", expect, actual), - format!("expected one of {} after this", expect)) + (self.prev_span.next_point(), format!("expected one of {} here", short_expect))) } else if expected.is_empty() { (format!("unexpected token: `{}`", actual), - "unexpected token after this".to_string()) + (self.prev_span, "unexpected token after this".to_string())) } else { (format!("expected {}, found `{}`", expect, actual), - format!("expected {} after this", expect)) + (self.prev_span.next_point(), format!("expected {} here", expect))) }; let mut err = self.fatal(&msg_exp); - err.span_label(self.prev_span, &label_exp); - err.span_label(self.span, &"unexpected token"); + let sp = if self.token == token::Token::Eof { + // This is EOF, don't want to point at the following char, but rather the last token + self.prev_span + } else { + label_sp + }; + err.span_label(sp, &label_exp); + if label_sp != self.span { + err.span_label(self.span, &"unexpected token"); + } Err(err) } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 3808923e772..07494ff904e 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -79,6 +79,12 @@ impl Span { Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} } + /// Returns a new span representing the next character after the end-point of this span + pub fn next_point(self) -> Span { + let lo = BytePos(cmp::max(self.hi.0, self.lo.0 + 1)); + Span { lo: lo, hi: lo, expn_id: self.expn_id} + } + /// Returns `self` if `self` is not the dummy span, and `other` otherwise. pub fn substitute_dummy(self, other: Span) -> Span { if self.source_equal(&DUMMY_SP) { other } else { self } diff --git a/src/test/compile-fail/issue-10636-2.rs b/src/test/compile-fail/issue-10636-2.rs deleted file mode 100644 index 93759123618..00000000000 --- a/src/test/compile-fail/issue-10636-2.rs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// FIXME(31528) we emit a bunch of silly errors here due to continuing past the -// first one. This would be easy-ish to address by better recovery in tokenisation. - -pub fn trace_option(option: Option) { - option.map(|some| 42; //~ NOTE: unclosed delimiter - //~^ ERROR: expected one of - //~| NOTE: expected one of - //~| NOTE: unexpected token -} //~ ERROR: incorrect close delimiter -//~^ ERROR: expected expression, found `)` diff --git a/src/test/compile-fail/macro-incomplete-parse.rs b/src/test/compile-fail/macro-incomplete-parse.rs deleted file mode 100644 index 682664df981..00000000000 --- a/src/test/compile-fail/macro-incomplete-parse.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z continue-parse-after-error - -macro_rules! ignored_item { - () => { - fn foo() {} - fn bar() {} - , //~ ERROR macro expansion ignores token `,` - } -} - -macro_rules! ignored_expr { - () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` - //~^ NOTE expected one of `.`, `;`, `?`, `}`, or an operator after this - //~| NOTE unexpected token - 2 ) -} - -macro_rules! ignored_pat { - () => ( 1, 2 ) //~ ERROR macro expansion ignores token `,` -} - -ignored_item!(); //~ NOTE caused by the macro expansion here - -fn main() { - ignored_expr!(); - match 1 { - ignored_pat!() => (), //~ NOTE caused by the macro expansion here - _ => (), - } -} diff --git a/src/test/parse-fail/bounds-obj-parens.rs b/src/test/parse-fail/bounds-obj-parens.rs deleted file mode 100644 index 02c119cf727..00000000000 --- a/src/test/parse-fail/bounds-obj-parens.rs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z parse-only - -type A = Box<(Fn(D::Error) -> E) + 'static + Send + Sync>; // OK (but see #39318) - -FAIL -//~^ ERROR -//~| ERROR diff --git a/src/test/parse-fail/match-refactor-to-expr.rs b/src/test/parse-fail/match-refactor-to-expr.rs index 7bb1c40118a..e2fee1d1895 100644 --- a/src/test/parse-fail/match-refactor-to-expr.rs +++ b/src/test/parse-fail/match-refactor-to-expr.rs @@ -14,7 +14,7 @@ fn main() { let foo = match //~ NOTE did you mean to remove this `match` keyword? Some(4).unwrap_or_else(5) - //~^ NOTE expected one of `.`, `?`, `{`, or an operator after this + //~^ NOTE expected one of `.`, `?`, `{`, or an operator here ; //~ NOTE unexpected token //~^ ERROR expected one of `.`, `?`, `{`, or an operator, found `;` diff --git a/src/test/parse-fail/trailing-plus-in-bounds.rs b/src/test/parse-fail/trailing-plus-in-bounds.rs deleted file mode 100644 index 2bb2c97790c..00000000000 --- a/src/test/parse-fail/trailing-plus-in-bounds.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z parse-only -Z continue-parse-after-error - -use std::fmt::Debug; - -fn main() { - let x: Box = box 3 as Box; // Trailing `+` is OK -} - -FAIL -//~^ ERROR -//~| ERROR diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr index 2e0edf0c4b8..bf7db67e728 100644 --- a/src/test/ui/resolve/token-error-correct-3.stderr +++ b/src/test/ui/resolve/token-error-correct-3.stderr @@ -14,15 +14,13 @@ error: expected one of `,`, `.`, `?`, or an operator, found `;` --> $DIR/token-error-correct-3.rs:23:35 | 23 | callback(path.as_ref(); //~ NOTE: unclosed delimiter - | -^ unexpected token - | | - | expected one of `,`, `.`, `?`, or an operator after this + | ^ expected one of `,`, `.`, `?`, or an operator here error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` --> $DIR/token-error-correct-3.rs:29:9 | 25 | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types - | - expected one of `.`, `;`, `?`, `}`, or an operator after this + | - expected one of `.`, `;`, `?`, `}`, or an operator here ... 29 | } else { //~ ERROR: incorrect close delimiter: `}` | ^ unexpected token diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index 36f298a456a..226fa6469bc 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -32,9 +32,7 @@ error: expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `f --> $DIR/token-error-correct.rs:14:13 | 14 | foo(bar(; - | -^ unexpected token - | | - | expected one of `)`, `,`, `.`, `<`, `?`, `break`, `continue`, `false`, `for`, `if`, `loop`, `match`, `move`, `return`, `true`, `unsafe`, `while`, or an operator after this + | ^ expected one of 18 possible tokens here error: expected expression, found `)` --> $DIR/token-error-correct.rs:23:1 diff --git a/src/test/ui/token/bounds-obj-parens.rs b/src/test/ui/token/bounds-obj-parens.rs new file mode 100644 index 00000000000..02c119cf727 --- /dev/null +++ b/src/test/ui/token/bounds-obj-parens.rs @@ -0,0 +1,17 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +type A = Box<(Fn(D::Error) -> E) + 'static + Send + Sync>; // OK (but see #39318) + +FAIL +//~^ ERROR +//~| ERROR diff --git a/src/test/ui/token/bounds-obj-parens.stderr b/src/test/ui/token/bounds-obj-parens.stderr new file mode 100644 index 00000000000..ebee363f278 --- /dev/null +++ b/src/test/ui/token/bounds-obj-parens.stderr @@ -0,0 +1,7 @@ +error: expected one of `!` or `::`, found `` + --> $DIR/bounds-obj-parens.rs:15:1 + | +15 | FAIL + | ^^^^ expected one of `!` or `::` here + +error: aborting due to previous error diff --git a/src/test/ui/token/issue-10636-2.rs b/src/test/ui/token/issue-10636-2.rs new file mode 100644 index 00000000000..93759123618 --- /dev/null +++ b/src/test/ui/token/issue-10636-2.rs @@ -0,0 +1,20 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// FIXME(31528) we emit a bunch of silly errors here due to continuing past the +// first one. This would be easy-ish to address by better recovery in tokenisation. + +pub fn trace_option(option: Option) { + option.map(|some| 42; //~ NOTE: unclosed delimiter + //~^ ERROR: expected one of + //~| NOTE: expected one of + //~| NOTE: unexpected token +} //~ ERROR: incorrect close delimiter +//~^ ERROR: expected expression, found `)` diff --git a/src/test/ui/token/issue-10636-2.stderr b/src/test/ui/token/issue-10636-2.stderr new file mode 100644 index 00000000000..183ad30c4ef --- /dev/null +++ b/src/test/ui/token/issue-10636-2.stderr @@ -0,0 +1,27 @@ +error: incorrect close delimiter: `}` + --> $DIR/issue-10636-2.rs:19:1 + | +19 | } //~ ERROR: incorrect close delimiter + | ^ + | +note: unclosed delimiter + --> $DIR/issue-10636-2.rs:15:15 + | +15 | option.map(|some| 42; //~ NOTE: unclosed delimiter + | ^ + +error: expected one of `,`, `.`, `?`, or an operator, found `;` + --> $DIR/issue-10636-2.rs:15:25 + | +15 | option.map(|some| 42; //~ NOTE: unclosed delimiter + | ^ expected one of `,`, `.`, `?`, or an operator here + +error: expected expression, found `)` + --> $DIR/issue-10636-2.rs:19:1 + | +19 | } //~ ERROR: incorrect close delimiter + | ^ + +error: main function not found + +error: aborting due to 4 previous errors diff --git a/src/test/ui/token/macro-incomplete-parse.rs b/src/test/ui/token/macro-incomplete-parse.rs new file mode 100644 index 00000000000..47374fc3c60 --- /dev/null +++ b/src/test/ui/token/macro-incomplete-parse.rs @@ -0,0 +1,40 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z continue-parse-after-error + +macro_rules! ignored_item { + () => { + fn foo() {} + fn bar() {} + , //~ ERROR macro expansion ignores token `,` + } +} + +macro_rules! ignored_expr { + () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` + //~^ NOTE expected one of `.`, `;`, `?`, `}`, or an operator here + //~| NOTE unexpected token + 2 ) +} + +macro_rules! ignored_pat { + () => ( 1, 2 ) //~ ERROR macro expansion ignores token `,` +} + +ignored_item!(); //~ NOTE caused by the macro expansion here + +fn main() { + ignored_expr!(); + match 1 { + ignored_pat!() => (), //~ NOTE caused by the macro expansion here + _ => (), + } +} diff --git a/src/test/ui/token/macro-incomplete-parse.stderr b/src/test/ui/token/macro-incomplete-parse.stderr new file mode 100644 index 00000000000..bea00a6444c --- /dev/null +++ b/src/test/ui/token/macro-incomplete-parse.stderr @@ -0,0 +1,31 @@ +error: macro expansion ignores token `,` and any following + --> $DIR/macro-incomplete-parse.rs:17:9 + | +17 | , //~ ERROR macro expansion ignores token `,` + | ^ + | +note: caused by the macro expansion here; the usage of `ignored_item!` is likely invalid in item context + --> $DIR/macro-incomplete-parse.rs:32:1 + | +32 | ignored_item!(); //~ NOTE caused by the macro expansion here + | ^^^^^^^^^^^^^^^^ + +error: expected one of `.`, `;`, `?`, `}`, or an operator, found `,` + --> $DIR/macro-incomplete-parse.rs:22:14 + | +22 | () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` + | ^ expected one of `.`, `;`, `?`, `}`, or an operator here + +error: macro expansion ignores token `,` and any following + --> $DIR/macro-incomplete-parse.rs:29:14 + | +29 | () => ( 1, 2 ) //~ ERROR macro expansion ignores token `,` + | ^ + | +note: caused by the macro expansion here; the usage of `ignored_pat!` is likely invalid in pattern context + --> $DIR/macro-incomplete-parse.rs:37:9 + | +37 | ignored_pat!() => (), //~ NOTE caused by the macro expansion here + | ^^^^^^^^^^^^^^ + +error: aborting due to 3 previous errors diff --git a/src/test/ui/token/trailing-plus-in-bounds.rs b/src/test/ui/token/trailing-plus-in-bounds.rs new file mode 100644 index 00000000000..2bb2c97790c --- /dev/null +++ b/src/test/ui/token/trailing-plus-in-bounds.rs @@ -0,0 +1,21 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +use std::fmt::Debug; + +fn main() { + let x: Box = box 3 as Box; // Trailing `+` is OK +} + +FAIL +//~^ ERROR +//~| ERROR diff --git a/src/test/ui/token/trailing-plus-in-bounds.stderr b/src/test/ui/token/trailing-plus-in-bounds.stderr new file mode 100644 index 00000000000..74caf8f5c2b --- /dev/null +++ b/src/test/ui/token/trailing-plus-in-bounds.stderr @@ -0,0 +1,7 @@ +error: expected one of `!` or `::`, found `` + --> ../../src/test/ui/token/trailing-plus-in-bounds.rs:19:1 + | +19 | FAIL + | ^^^^ expected one of `!` or `::` here + +error: aborting due to previous error -- cgit 1.4.1-3-g733a5 From eb447f4ef436f0c6211a13de1e6150a09228a9c6 Mon Sep 17 00:00:00 2001 From: Oliver Schneider Date: Fri, 24 Mar 2017 09:31:26 +0100 Subject: Fix various useless derefs and slicings --- src/bootstrap/check.rs | 2 +- src/grammar/verify.rs | 6 +++--- src/libcollections/linked_list.rs | 2 +- src/libgraphviz/lib.rs | 6 +++--- src/librustc/ich/fingerprint.rs | 4 ++-- src/librustc/lint/context.rs | 5 ++--- src/librustc/middle/stability.rs | 2 +- src/librustc_borrowck/borrowck/fragments.rs | 24 ++++++++++++------------ src/librustc_borrowck/borrowck/mod.rs | 2 +- src/librustc_borrowck/graphviz.rs | 2 +- src/librustc_const_eval/_match.rs | 4 ++-- src/librustc_const_eval/check_match.rs | 2 +- src/librustc_data_structures/accumulate_vec.rs | 8 ++++---- src/librustc_data_structures/base_n.rs | 2 +- src/librustc_data_structures/blake2b.rs | 2 +- src/librustc_data_structures/indexed_set.rs | 8 ++++---- src/librustc_driver/lib.rs | 10 +++++----- src/librustc_driver/pretty.rs | 6 +++--- src/librustc_driver/test.rs | 2 +- src/librustc_incremental/persist/file_format.rs | 4 ++-- src/librustc_lint/bad_style.rs | 2 +- src/librustc_lint/builtin.rs | 2 +- src/librustc_lint/unused.rs | 2 +- src/librustc_llvm/build.rs | 4 ++-- src/librustc_metadata/creader.rs | 2 +- src/librustc_metadata/encoder.rs | 4 ++-- src/librustc_metadata/locator.rs | 6 +++--- src/librustc_plugin/load.rs | 6 +++--- src/librustc_save_analysis/csv_dumper.rs | 2 +- src/librustc_trans/abi.rs | 2 +- src/librustc_trans/adt.rs | 8 ++++---- src/librustc_trans/asm.rs | 4 ++-- src/librustc_trans/back/archive.rs | 4 ++-- src/librustc_trans/back/link.rs | 14 +++++++------- src/librustc_trans/back/lto.rs | 4 ++-- src/librustc_trans/back/rpath.rs | 14 +++++++------- src/librustc_trans/back/symbol_export.rs | 4 ++-- src/librustc_trans/back/symbol_names.rs | 2 +- src/librustc_trans/back/write.rs | 12 ++++++------ src/librustc_trans/base.rs | 12 ++++++------ src/librustc_trans/builder.rs | 6 +++--- src/librustdoc/html/render.rs | 4 ++-- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/feature_gate.rs | 9 ++++----- src/libsyntax/parse/parser.rs | 6 +++--- src/libsyntax/test.rs | 2 +- 46 files changed, 120 insertions(+), 122 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 40cdb9242df..f8f641060c4 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -586,7 +586,7 @@ fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) { .arg(ADB_TEST_DIR)); let target_dir = format!("{}/{}", ADB_TEST_DIR, target); - build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]])); + build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir])); for f in t!(build.sysroot_libdir(compiler, target).read_dir()) { let f = t!(f); diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 919fc98e438..bd28a63c5f4 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -196,7 +196,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap, surrogate_ let toknum = &s[content_end + 3 .. toknum_end]; let not_found = format!("didn't find token {:?} in the map", toknum); - let proto_tok = tokens.get(toknum).expect(¬_found[..]); + let proto_tok = tokens.get(toknum).expect(¬_found); let nm = Symbol::intern(content); @@ -304,14 +304,14 @@ fn main() { let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap(); let mut token_list = String::new(); token_file.read_to_string(&mut token_list).unwrap(); - let token_map = parse_token_list(&token_list[..]); + let token_map = parse_token_list(&token_list); let stdin = std::io::stdin(); let lock = stdin.lock(); let lines = lock.lines(); let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(), &token_map, - &surrogate_pairs_pos[..], + &surrogate_pairs_pos, has_bom)); for antlr_tok in antlr_tokens { diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index f58c87b801f..8f0488f6936 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -1376,7 +1376,7 @@ mod tests { thread::spawn(move || { check_links(&n); let a: &[_] = &[&1, &2, &3]; - assert_eq!(a, &n.iter().collect::>()[..]); + assert_eq!(a, &*n.iter().collect::>()); }) .join() .ok() diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 8e587ad211d..1b2c7775185 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -554,7 +554,7 @@ impl<'a> LabelText<'a> { pub fn to_dot_string(&self) -> String { match self { &LabelStr(ref s) => format!("\"{}\"", s.escape_default()), - &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])), + &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)), &HtmlStr(ref s) => format!("<{}>", s), } } @@ -587,7 +587,7 @@ impl<'a> LabelText<'a> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(&suffix[..]); + prefix.push_str(&suffix); EscStr(prefix.into_cow()) } } @@ -878,7 +878,7 @@ mod tests { type Node = Node; type Edge = &'a Edge; fn graph_id(&'a self) -> Id<'a> { - Id::new(&self.name[..]).unwrap() + Id::new(self.name).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/librustc/ich/fingerprint.rs b/src/librustc/ich/fingerprint.rs index d296d8293fb..e760f7efc93 100644 --- a/src/librustc/ich/fingerprint.rs +++ b/src/librustc/ich/fingerprint.rs @@ -55,7 +55,7 @@ impl Fingerprint { impl Encodable for Fingerprint { #[inline] fn encode(&self, s: &mut S) -> Result<(), S::Error> { - for &byte in &self.0[..] { + for &byte in &self.0 { s.emit_u8(byte)?; } Ok(()) @@ -66,7 +66,7 @@ impl Decodable for Fingerprint { #[inline] fn decode(d: &mut D) -> Result { let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]); - for byte in &mut result.0[..] { + for byte in &mut result.0 { *byte = d.read_u8()?; } Ok(result) diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index d35f965e2ff..20bf241a999 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -40,7 +40,6 @@ use std::cmp; use std::default::Default as StdDefault; use std::mem; use std::fmt; -use std::ops::Deref; use syntax::attr; use syntax::ast; use syntax::symbol::Symbol; @@ -485,7 +484,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session, Allow => bug!("earlier conditional return should handle Allow case") }; let hyphen_case_lint_name = name.replace("_", "-"); - if lint_flag_val.as_str().deref() == name { + if lint_flag_val.as_str() == name { err.note(&format!("requested on the command line with `{} {}`", flag, hyphen_case_lint_name)); } else { @@ -496,7 +495,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session, }, Node(lint_attr_name, src) => { def = Some(src); - if lint_attr_name.as_str().deref() != name { + if lint_attr_name.as_str() != name { let level_str = level.as_str(); err.note(&format!("#[{}({})] implied by #[{}({})]", level_str, name, level_str, lint_attr_name)); diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 4115b4669f4..4354ed6817a 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -536,7 +536,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if !self.stability.borrow().active_features.contains(feature) { let msg = match *reason { Some(ref r) => format!("use of unstable library feature '{}': {}", - &feature.as_str(), &r), + feature.as_str(), &r), None => format!("use of unstable library feature '{}'", &feature) }; emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span, diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index c0f681680a9..b728d4d5345 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -267,11 +267,11 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {:?}", path_lps(&moved[..])); + debug!("fragments 1 moved: {:?}", path_lps(&moved)); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..])); + debug!("fragments 1 assigned: {:?}", path_lps(&assigned)); // Second, build parents from the moved and assigned. for m in &moved { @@ -291,14 +291,14 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {:?}", path_lps(&parents[..])); + debug!("fragments 2 parents: {:?}", path_lps(&parents)); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, &parents[..])); - debug!("fragments 3 moved: {:?}", path_lps(&moved[..])); + moved.retain(|f| non_member(*f, &parents)); + debug!("fragments 3 moved: {:?}", path_lps(&moved)); - assigned.retain(|f| non_member(*f, &parents[..])); - debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..])); + assigned.retain(|f| non_member(*f, &parents)); + debug!("fragments 3 assigned: {:?}", path_lps(&assigned)); // Fourth, build the leftover from the moved, assigned, and parents. for m in &moved { @@ -316,16 +316,16 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..])); + debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved)); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, &parents[..]) && - non_member(mpi, &moved[..]) && - non_member(mpi, &assigned[..]) + Just(mpi) => non_member(mpi, &parents) && + non_member(mpi, &moved) && + non_member(mpi, &assigned) }); - debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..])); + debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved)); // Swap contents back in. fragments.unmoved_fragments = unmoved; diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 20d495976b0..59c3e68aada 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -112,7 +112,7 @@ fn borrowck_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_id: hir::BodyId) { &flowed_moves.move_data, owner_id); - check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body); + check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body); } fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>, diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 0da9525efd8..e3a2bfa3927 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp); - set.push_str(&loan_str[..]); + set.push_str(&loan_str); saw_some = true; true }); diff --git a/src/librustc_const_eval/_match.rs b/src/librustc_const_eval/_match.rs index 53a7e872928..c1dc5f5f7a2 100644 --- a/src/librustc_const_eval/_match.rs +++ b/src/librustc_const_eval/_match.rs @@ -680,10 +680,10 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>( }).collect(); let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect(); let matrix = Matrix(m.iter().flat_map(|r| { - specialize(cx, &r[..], &ctor, &wild_patterns) + specialize(cx, &r, &ctor, &wild_patterns) }).collect()); match specialize(cx, v, &ctor, &wild_patterns) { - Some(v) => match is_useful(cx, &matrix, &v[..], witness) { + Some(v) => match is_useful(cx, &matrix, &v, witness) { UsefulWithWitness(witnesses) => UsefulWithWitness( witnesses.into_iter() .map(|witness| witness.apply_constructor(cx, &ctor, lty)) diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index e2b9f174ff0..9d55281d019 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -311,7 +311,7 @@ fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>, for &(pat, hir_pat) in pats { let v = vec![pat]; - match is_useful(cx, &seen, &v[..], LeaveOutWitness) { + match is_useful(cx, &seen, &v, LeaveOutWitness) { NotUseful => { match source { hir::MatchSource::IfLetDesugar { .. } => { diff --git a/src/librustc_data_structures/accumulate_vec.rs b/src/librustc_data_structures/accumulate_vec.rs index d4bd9e707fd..c03c2890ba3 100644 --- a/src/librustc_data_structures/accumulate_vec.rs +++ b/src/librustc_data_structures/accumulate_vec.rs @@ -91,8 +91,8 @@ impl Deref for AccumulateVec { type Target = [A::Element]; fn deref(&self) -> &Self::Target { match *self { - AccumulateVec::Array(ref v) => &v[..], - AccumulateVec::Heap(ref v) => &v[..], + AccumulateVec::Array(ref v) => v, + AccumulateVec::Heap(ref v) => v, } } } @@ -100,8 +100,8 @@ impl Deref for AccumulateVec { impl DerefMut for AccumulateVec { fn deref_mut(&mut self) -> &mut [A::Element] { match *self { - AccumulateVec::Array(ref mut v) => &mut v[..], - AccumulateVec::Heap(ref mut v) => &mut v[..], + AccumulateVec::Array(ref mut v) => v, + AccumulateVec::Heap(ref mut v) => v, } } } diff --git a/src/librustc_data_structures/base_n.rs b/src/librustc_data_structures/base_n.rs index 4359581a897..cf54229fa7f 100644 --- a/src/librustc_data_structures/base_n.rs +++ b/src/librustc_data_structures/base_n.rs @@ -48,7 +48,7 @@ pub fn encode(n: u64, base: u64) -> String { #[test] fn test_encode() { fn test(n: u64, base: u64) { - assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32)); + assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32)); } for base in 2..37 { diff --git a/src/librustc_data_structures/blake2b.rs b/src/librustc_data_structures/blake2b.rs index 31492e26219..9d97a83f693 100644 --- a/src/librustc_data_structures/blake2b.rs +++ b/src/librustc_data_structures/blake2b.rs @@ -35,7 +35,7 @@ pub struct Blake2bCtx { impl ::std::fmt::Debug for Blake2bCtx { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { try!(write!(fmt, "hash: ")); - for v in &self.h[..] { + for v in &self.h { try!(write!(fmt, "{:x}", v)); } Ok(()) diff --git a/src/librustc_data_structures/indexed_set.rs b/src/librustc_data_structures/indexed_set.rs index 2e9e054e97e..572ce98d3ae 100644 --- a/src/librustc_data_structures/indexed_set.rs +++ b/src/librustc_data_structures/indexed_set.rs @@ -91,13 +91,13 @@ impl IdxSet { impl Deref for IdxSetBuf { type Target = IdxSet; fn deref(&self) -> &IdxSet { - unsafe { IdxSet::from_slice(&self.bits[..]) } + unsafe { IdxSet::from_slice(&self.bits) } } } impl DerefMut for IdxSetBuf { fn deref_mut(&mut self) -> &mut IdxSet { - unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) } + unsafe { IdxSet::from_slice_mut(&mut self.bits) } } } @@ -135,11 +135,11 @@ impl IdxSet { } pub fn words(&self) -> &[Word] { - &self.bits[..] + &self.bits } pub fn words_mut(&mut self) -> &mut [Word] { - &mut self.bits[..] + &mut self.bits } pub fn clone_from(&mut self, other: &IdxSet) { diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 68b9f85721a..e11118901d2 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -233,7 +233,7 @@ fn make_output(matches: &getopts::Matches) -> (Option, Option) // Extract input (string or file and optional path) from matches. fn make_input(free_matches: &[String]) -> Option<(Input, Option)> { if free_matches.len() == 1 { - let ifile = &free_matches[0][..]; + let ifile = &free_matches[0]; if ifile == "-" { let mut src = String::new(); io::stdin().read_to_string(&mut src).unwrap(); @@ -800,7 +800,7 @@ Available lint options: for lint in lints { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(&name[..]), + padded(&name), lint.default_level.as_str(), lint.desc); } @@ -838,7 +838,7 @@ Available lint options: .map(|x| x.to_string().replace("_", "-")) .collect::>() .join(", "); - println!(" {} {}", padded(&name[..]), desc); + println!(" {} {}", padded(&name), desc); } println!("\n"); }; @@ -945,7 +945,7 @@ pub fn handle_options(args: &[String]) -> Option { .into_iter() .map(|x| x.opt_group) .collect(); - let matches = match getopts::getopts(&args[..], &all_groups) { + let matches = match getopts::getopts(&args, &all_groups) { Ok(m) => m, Err(f) => early_error(ErrorOutputType::default(), &f.to_string()), }; @@ -1084,7 +1084,7 @@ pub fn monitor(f: F) { format!("we would appreciate a bug report: {}", BUG_REPORT_URL)]; for note in &xs { handler.emit(&MultiSpan::new(), - ¬e[..], + ¬e, errors::Level::Note); } if match env::var_os("RUST_BACKTRACE") { diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 6cd97e95598..18dc504ca8a 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -589,7 +589,7 @@ impl UserIdentifiedItem { -> NodesMatchingUII<'a, 'hir> { match *self { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), - ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])), + ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)), } } @@ -600,7 +600,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(&message[..]) + sess.fatal(&message) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -771,7 +771,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, fn expand_err_details(r: io::Result<()>) -> io::Result<()> { r.map_err(|ioerr| { io::Error::new(io::ErrorKind::Other, - &format!("graphviz::render failed: {}", ioerr)[..]) + format!("graphviz::render failed: {}", ioerr)) }) } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 9568cc3d6de..af2416f787e 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -289,7 +289,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { pub fn t_param(&self, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - self.infcx.tcx.mk_param(index, Symbol::intern(&name[..])) + self.infcx.tcx.mk_param(index, Symbol::intern(&name)) } pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index b67caa6750a..5c20f65274f 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -99,9 +99,9 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { let rustc_version_str_len = rustc_version_str_len[0] as usize; let mut buffer = Vec::with_capacity(rustc_version_str_len); buffer.resize(rustc_version_str_len, 0); - file.read_exact(&mut buffer[..])?; + file.read_exact(&mut buffer)?; - if &buffer[..] != rustc_version().as_bytes() { + if buffer != rustc_version().as_bytes() { report_format_mismatch(sess, path, "Different compiler version"); return Ok(None); } diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index 353b86820c4..c4220e9a0d3 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -88,7 +88,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, name, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m); } } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index f0276f90f27..0ee9d4a42c7 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -334,7 +334,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::list_contains_name(&l[..], "hidden"), + Some(l) => attr::list_contains_name(&l, "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index abba8afd9da..86bf209ccf8 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -146,7 +146,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { ty::TyBool => return, ty::TyAdt(def, _) => { let attrs = cx.tcx.get_attrs(def.did); - check_must_use(cx, &attrs[..], s.span) + check_must_use(cx, &attrs, s.span) } _ => false, }; diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index 42717ec289c..2b945e0a3af 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -140,7 +140,7 @@ fn main() { cfg.flag(flag); } - for component in &components[..] { + for component in &components { let mut flag = String::from("-DLLVM_COMPONENT_"); flag.push_str(&component.to_uppercase()); cfg.flag(&flag); @@ -173,7 +173,7 @@ fn main() { if !is_crossed { cmd.arg("--system-libs"); } - cmd.args(&components[..]); + cmd.args(&components); for lib in output(&mut cmd).split_whitespace() { let name = if lib.starts_with("-l") { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index e1255110a83..04a8b88f8a5 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -669,7 +669,7 @@ impl<'a> CrateLoader<'a> { name, config::host_triple(), self.sess.opts.target_triple); - span_fatal!(self.sess, span, E0456, "{}", &message[..]); + span_fatal!(self.sess, span, E0456, "{}", &message); } let root = ekrate.metadata.get_root(); diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index a324c166e73..1370d69f904 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -918,14 +918,14 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { self.encode_fields(def_id); } hir::ItemImpl(..) => { - for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] { + for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { self.record(trait_item_def_id, EncodeContext::encode_info_for_impl_item, trait_item_def_id); } } hir::ItemTrait(..) => { - for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] { + for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { self.record(item_def_id, EncodeContext::encode_info_for_trait_item, item_def_id); diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index a6771083fc3..e8bc8b01652 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -477,15 +477,15 @@ impl<'a> Context<'a> { Some(file) => file, }; let (hash, found_kind) = - if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") { + if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") { (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib) - } else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") { + } else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") { (&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta) } else if file.starts_with(&dylib_prefix) && file.ends_with(&dypair.1) { (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib) } else { - if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) { + if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) { staticlibs.push(CrateMismatch { path: path.to_path_buf(), got: "static".to_string(), diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 1bfc445fca9..efe9963cecc 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -126,19 +126,19 @@ impl<'a> PluginLoader<'a> { // inside this crate, so continue would spew "macro undefined" // errors Err(err) => { - self.sess.span_fatal(span, &err[..]) + self.sess.span_fatal(span, &err) } }; unsafe { let registrar = - match lib.symbol(&symbol[..]) { + match lib.symbol(&symbol) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros Err(err) => { - self.sess.span_fatal(span, &err[..]) + self.sess.span_fatal(span, &err) } }; diff --git a/src/librustc_save_analysis/csv_dumper.rs b/src/librustc_save_analysis/csv_dumper.rs index 59340ae87ee..4bab135ff12 100644 --- a/src/librustc_save_analysis/csv_dumper.rs +++ b/src/librustc_save_analysis/csv_dumper.rs @@ -423,7 +423,7 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String { let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v)))); strs.fold(String::new(), |mut s, ss| { - s.push_str(&ss[..]); + s.push_str(&ss); s }) } diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 27a19d211c2..1530708b4b8 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -369,7 +369,7 @@ impl FnType { match sig.inputs().last().unwrap().sty { ty::TyTuple(ref tupled_arguments, _) => { inputs = &sig.inputs()[0..sig.inputs().len() - 1]; - &tupled_arguments[..] + &tupled_arguments } _ => { bug!("argument to function with \"rust-call\" ABI \ diff --git a/src/librustc_trans/adt.rs b/src/librustc_trans/adt.rs index 058f37f62dd..5c1ced57340 100644 --- a/src/librustc_trans/adt.rs +++ b/src/librustc_trans/adt.rs @@ -229,11 +229,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, variant_fill].iter().cloned().collect(); match name { None => { - Type::struct_(cx, &fields[..], false) + Type::struct_(cx, &fields, false) } Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(&fields[..], false); + llty.set_struct_body(&fields, false); llty } } @@ -330,7 +330,7 @@ fn struct_wrapped_nullable_bitdiscr( alignment: Alignment, ) -> ValueRef { let llptrptr = bcx.gepi(scrutinee, - &discrfield.iter().map(|f| *f as usize).collect::>()[..]); + &discrfield.iter().map(|f| *f as usize).collect::>()); let llptr = bcx.load(llptrptr, alignment.to_align()); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; bcx.icmp(cmp, llptr, C_null(val_ty(llptr))) @@ -402,7 +402,7 @@ pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: Valu base::call_memset(bcx, llptr, fill_byte, size, align, false); } else { let path = discrfield.iter().map(|&i| i as usize).collect::>(); - let llptrptr = bcx.gepi(val, &path[..]); + let llptrptr = bcx.gepi(val, &path); let llptrty = val_ty(llptrptr).element_type(); bcx.store(C_null(llptrty), llptrptr, None); } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index 12e4e57964f..b6195765b27 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -77,14 +77,14 @@ pub fn trans_inline_asm<'a, 'tcx>( .chain(arch_clobbers.iter().map(|s| s.to_string())) .collect::>().join(","); - debug!("Asm Constraints: {}", &all_constraints[..]); + debug!("Asm Constraints: {}", &all_constraints); // Depending on how many outputs we have, the return type is different let num_outputs = output_types.len(); let output_type = match num_outputs { 0 => Type::void(bcx.ccx), 1 => output_types[0], - _ => Type::struct_(bcx.ccx, &output_types[..], false) + _ => Type::struct_(bcx.ccx, &output_types, false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/back/archive.rs b/src/librustc_trans/back/archive.rs index 11ab6dcaa87..0f908b7d069 100644 --- a/src/librustc_trans/back/archive.rs +++ b/src/librustc_trans/back/archive.rs @@ -65,10 +65,10 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session) for path in search_paths { debug!("looking for {} inside {:?}", name, path); - let test = path.join(&oslibname[..]); + let test = path.join(&oslibname); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(&unixlibname[..]); + let test = path.join(&unixlibname); if test.exists() { return test } } } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index cf1e10b317b..6d17b2f0eed 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -91,7 +91,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |s: String, span: Option| { - cstore::validate_crate_name(sess, &s[..], span); + cstore::validate_crate_name(sess, &s, span); s }; @@ -109,7 +109,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, &msg[..]); + sess.span_err(attr.span, &msg); } } return validate(s.clone(), None); @@ -417,7 +417,7 @@ fn object_filenames(trans: &CrateTranslation, outputs: &OutputFilenames) -> Vec { trans.modules.iter().map(|module| { - outputs.temp_path(OutputType::Object, Some(&module.name[..])) + outputs.temp_path(OutputType::Object, Some(&module.name)) }).collect() } @@ -551,7 +551,7 @@ fn link_rlib<'a>(sess: &'a Session, e)) } - let bc_data_deflated = flate::deflate_bytes(&bc_data[..]); + let bc_data_deflated = flate::deflate_bytes(&bc_data); let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, @@ -819,12 +819,12 @@ fn link_natively(sess: &Session, pname, prog.status)) .note(&format!("{:?}", &cmd)) - .note(&escape_string(&output[..])) + .note(&escape_string(&output)) .emit(); sess.abort_if_errors(); } - info!("linker stderr:\n{}", escape_string(&prog.stderr[..])); - info!("linker stdout:\n{}", escape_string(&prog.stdout[..])); + info!("linker stderr:\n{}", escape_string(&prog.stderr)); + info!("linker stdout:\n{}", escape_string(&prog.stdout)); }, Err(e) => { sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e)) diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index 0ef3f351a2a..e23ddd2542a 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -61,7 +61,7 @@ pub fn run(sess: &session::Session, } let export_threshold = - symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]); + symbol_export::crates_export_threshold(&sess.crate_types.borrow()); let symbol_filter = &|&(ref name, level): &(String, _)| { if symbol_export::is_below_threshold(level, export_threshold) { @@ -147,7 +147,7 @@ pub fn run(sess: &session::Session, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic(), format!("failed to load bc of `{}`", - &name[..])); + name)); } }); } diff --git a/src/librustc_trans/back/rpath.rs b/src/librustc_trans/back/rpath.rs index 9c982be3fa0..104e7bc6a52 100644 --- a/src/librustc_trans/back/rpath.rs +++ b/src/librustc_trans/back/rpath.rs @@ -37,8 +37,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec { let libs = config.used_crates.clone(); let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::>(); - let rpaths = get_rpaths(config, &libs[..]); - flags.extend_from_slice(&rpaths_to_flags(&rpaths[..])); + let rpaths = get_rpaths(config, &libs); + flags.extend_from_slice(&rpaths_to_flags(&rpaths)); // Use DT_RUNPATH instead of DT_RPATH if available if config.linker_is_gnu { @@ -84,14 +84,14 @@ fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec { } } - log_rpaths("relative", &rel_rpaths[..]); - log_rpaths("fallback", &fallback_rpaths[..]); + log_rpaths("relative", &rel_rpaths); + log_rpaths("fallback", &fallback_rpaths); let mut rpaths = rel_rpaths; - rpaths.extend_from_slice(&fallback_rpaths[..]); + rpaths.extend_from_slice(&fallback_rpaths); // Remove duplicates - let rpaths = minimize_rpaths(&rpaths[..]); + let rpaths = minimize_rpaths(&rpaths); return rpaths; } @@ -177,7 +177,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths { - if set.insert(&rpath[..]) { + if set.insert(rpath) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_trans/back/symbol_export.rs b/src/librustc_trans/back/symbol_export.rs index 005fb3533ab..23a67ef5046 100644 --- a/src/librustc_trans/back/symbol_export.rs +++ b/src/librustc_trans/back/symbol_export.rs @@ -154,7 +154,7 @@ impl ExportedSymbols { cnum: CrateNum) -> &[(String, SymbolExportLevel)] { match self.exports.get(&cnum) { - Some(exports) => &exports[..], + Some(exports) => exports, None => &[] } } @@ -167,7 +167,7 @@ impl ExportedSymbols { { for &(ref name, export_level) in self.exported_symbols(cnum) { if is_below_threshold(export_level, export_threshold) { - f(&name[..], export_level) + f(&name, export_level) } } } diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 518995dfedc..3ad04e10cb0 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -341,7 +341,7 @@ pub fn sanitize(s: &str) -> String { if !result.is_empty() && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", &result[..]); + return format!("_{}", result); } return result; diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 377ff34cb7e..5a017e4fb8a 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -105,7 +105,7 @@ impl SharedEmitter { Some(ref code) => { handler.emit_with_code(&MultiSpan::new(), &diag.msg, - &code[..], + &code, diag.lvl); }, None => { @@ -189,8 +189,8 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => &s[..], - None => &sess.target.target.options.code_model[..], + Some(ref s) => &s, + None => &sess.target.target.options.code_model, }; let code_model = match CODE_GEN_MODEL_ARGS.iter().find( @@ -397,7 +397,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s)) .expect("non-UTF8 SMDiagnostic"); - report_inline_asm(cgcx, &msg[..], cookie); + report_inline_asm(cgcx, &msg, cookie); } unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) { @@ -823,7 +823,7 @@ pub fn run_passes(sess: &Session, if trans.modules.len() == 1 { // 1) Only one codegen unit. In this case it's no difficulty // to copy `foo.0.x` to `foo.x`. - let module_name = Some(&(trans.modules[0].name)[..]); + let module_name = Some(&trans.modules[0].name[..]); let path = crate_output.temp_path(output_type, module_name); copy_gracefully(&path, &crate_output.path(output_type)); @@ -939,7 +939,7 @@ pub fn run_passes(sess: &Session, if metadata_config.emit_bc && !user_wants_bitcode { let path = crate_output.temp_path(OutputType::Bitcode, - Some(&trans.metadata_module.name[..])); + Some(&trans.metadata_module.name)); remove(sess, &path); } } diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index f7ca468fdda..ec45c559363 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -514,7 +514,7 @@ pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>, n_bytes: ValueRef, align: u32) { let ccx = b.ccx; - let ptr_width = &ccx.sess().target.target.target_pointer_width[..]; + let ptr_width = &ccx.sess().target.target.target_pointer_width; let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width); let memcpy = ccx.get_intrinsic(&key); let src_ptr = b.pointercast(src, Type::i8p(ccx)); @@ -550,7 +550,7 @@ pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>, size: ValueRef, align: ValueRef, volatile: bool) -> ValueRef { - let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..]; + let ptr_width = &b.ccx.sess().target.target.target_pointer_width; let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width); let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key); let volatile = C_bool(b.ccx, volatile); @@ -765,7 +765,7 @@ fn write_metadata(cx: &SharedCrateContext, let mut compressed = cstore.metadata_encoding_version().to_vec(); compressed.extend_from_slice(&flate::deflate_bytes(&metadata)); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = cx.metadata_symbol_name(); let buf = CString::new(name).unwrap(); @@ -796,7 +796,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session, symbol_map: &SymbolMap<'tcx>, exported_symbols: &ExportedSymbols) { let export_threshold = - symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]); + symbol_export::crates_export_threshold(&sess.crate_types.borrow()); let exported_symbols = exported_symbols .exported_symbols(LOCAL_CRATE) @@ -1035,7 +1035,7 @@ pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet { (generics.parent_types == 0 && generics.types.is_empty()) && // Functions marked with #[inline] are only ever translated // with "internal" linkage and are never exported. - !attr::requests_inline(&attributes[..]) + !attr::requests_inline(&attributes) } _ => false @@ -1574,7 +1574,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a cgus.dedup(); for &(ref cgu_name, linkage) in cgus.iter() { output.push_str(" "); - output.push_str(&cgu_name[..]); + output.push_str(&cgu_name); let linkage_abbrev = match linkage { llvm::Linkage::ExternalLinkage => "External", diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index a62f07042a7..8b1010d89fd 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -627,7 +627,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, &v[..]) + self.inbounds_gep(base, &v) } } @@ -835,8 +835,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", &s[..]); - self.add_comment(&s[..]); + debug!("{}", s); + self.add_comment(&s); } } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 5c94032c6b9..612e765a499 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -2611,7 +2611,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option { if attr.is_word() { Some(format!("{}", name)) } else if let Some(v) = attr.value_str() { - Some(format!("{} = {:?}", name, &v.as_str()[..])) + Some(format!("{} = {:?}", name, v.as_str())) } else if let Some(values) = attr.meta_item_list() { let display: Vec<_> = values.iter().filter_map(|attr| { attr.meta_item().and_then(|mi| render_attribute(mi)) @@ -2642,7 +2642,7 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { for attr in &it.attrs.other_attrs { let name = attr.name().unwrap(); - if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) { + if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) { continue; } if let Some(s) = render_attribute(&attr.meta().unwrap()) { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 021c5398a42..66f5520b882 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -119,7 +119,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, }; let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false); p.root_module_name = cx.current_expansion.module.mod_path.last() - .map(|id| (*id.name.as_str()).to_owned()); + .map(|id| id.name.as_str().to_string()); p.check_unknown_macro_variable(); // Let the context choose how to interpret the result. diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 7af432176cf..9d280a413e6 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -818,7 +818,7 @@ pub struct GatedCfg { impl GatedCfg { pub fn gate(cfg: &ast::MetaItem) -> Option { - let name = &*cfg.name().as_str(); + let name = cfg.name().as_str(); GATED_CFGS.iter() .position(|info| info.0 == name) .map(|idx| { @@ -865,8 +865,7 @@ macro_rules! gate_feature { impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); - let name = unwrap_or!(attr.name(), return); - + let name = unwrap_or!(attr.name(), return).as_str(); for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { if name == n { if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { @@ -885,12 +884,12 @@ impl<'a> Context<'a> { return; } } - if name.as_str().starts_with("rustc_") { + if name.starts_with("rustc_") { gate_feature!(self, rustc_attrs, attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); - } else if name.as_str().starts_with("derive_") { + } else if name.starts_with("derive_") { gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); } else if !attr::is_known(attr) { // Only run the custom attribute lint during regular diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 649e9059934..43a9d8c5f78 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5151,15 +5151,15 @@ impl<'a> Parser<'a> { fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") { - self.directory.path.push(&*path.as_str()); + self.directory.path.push(&path.as_str()); self.directory.ownership = DirectoryOwnership::Owned; } else { - self.directory.path.push(&*id.name.as_str()); + self.directory.path.push(&id.name.as_str()); } } pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option { - attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str())) + attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str())) } /// Returns either a path to a module, or . diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index e052d2cda3a..6fb6db9ca02 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -616,7 +616,7 @@ fn mk_tests(cx: &TestCtxt) -> P { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs) { - Some(s) if "test" == &*s.as_str() => true, + Some(s) if "test" == s.as_str() => true, _ => false } } -- cgit 1.4.1-3-g733a5 From c963d613a2275d5c9b31cd7124dda2f2af61deb6 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Mon, 27 Mar 2017 17:15:16 -0700 Subject: Simplify error output --- src/libsyntax/parse/parser.rs | 17 ++++++----------- src/test/ui/did_you_mean/issue-40006.stderr | 10 +++------- 2 files changed, 9 insertions(+), 18 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a19339f8cc1..2603b3302c6 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4708,26 +4708,21 @@ impl<'a> Parser<'a> { if let Err(mut bang_err) = bang_err { // Given this code `pub path(`, it seems like this is not setting the // visibility of a macro invocation, but rather a mistyped method declaration. - // Keep the macro diagnostic, but also provide a hint that `fn` might be - // missing. Don't complain about the missing `!` as a separate diagnostic, add - // label in the appropriate place as part of one unified diagnostic. + // Create a diagnostic pointing out that `fn` is missing. // // x | pub path(&self) { - // | ^^^- - expected `!` here for a macro invocation - // | | - // | did you mean to write `fn` here for a method declaration? + // | ^ missing `fn` for method declaration + err.cancel(); bang_err.cancel(); - err.span_label(self.span, &"expected `!` here for a macro invocation"); // pub path( // ^^ `sp` below will point to this let sp = mk_sp(prev_span.hi, self.prev_span.lo); - err.span_label(sp, - &"did you mean to write `fn` here for a method declaration?"); + err = self.diagnostic() + .struct_span_err(sp, "missing `fn` for method declaration"); + err.span_label(sp, &"missing `fn`"); } return Err(err); - } else if let Err(bang_err) = bang_err { - return Err(bang_err); } // eat a matched-delimiter token tree: diff --git a/src/test/ui/did_you_mean/issue-40006.stderr b/src/test/ui/did_you_mean/issue-40006.stderr index 93a0c58f91a..460958027ad 100644 --- a/src/test/ui/did_you_mean/issue-40006.stderr +++ b/src/test/ui/did_you_mean/issue-40006.stderr @@ -1,12 +1,8 @@ -error: can't qualify macro invocation with `pub` - --> $DIR/issue-40006.rs:14:5 +error: missing `fn` for method declaration + --> $DIR/issue-40006.rs:14:8 | 14 | pub hello_method(&self) { - | ^^^- - expected `!` here for a macro invocation - | | - | did you mean to write `fn` here for a method declaration? - | - = help: try adjusting the macro to put `pub` inside the invocation + | ^ missing `fn` error: aborting due to previous error -- cgit 1.4.1-3-g733a5 From b477682dca3343eb89a467f0d3c73986a53d49d9 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Sat, 25 Mar 2017 19:06:19 -0700 Subject: Fix unittests --- src/libsyntax/parse/parser.rs | 2 +- src/libsyntax_pos/lib.rs | 4 ++-- src/test/ui/token/bounds-obj-parens.stderr | 1 + src/test/ui/token/issue-10636-2.stderr | 1 + src/test/ui/token/macro-incomplete-parse.stderr | 1 + src/test/ui/token/trailing-plus-in-bounds.stderr | 3 ++- 6 files changed, 8 insertions(+), 4 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 4076368c180..8177d738dc8 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -571,7 +571,7 @@ impl<'a> Parser<'a> { label_sp }; err.span_label(sp, &label_exp); - if label_sp != self.span { + if !sp.source_equal(&self.span) { err.span_label(self.span, &"unexpected token"); } Err(err) diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 07494ff904e..0662c1c9cfd 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -81,8 +81,8 @@ impl Span { /// Returns a new span representing the next character after the end-point of this span pub fn next_point(self) -> Span { - let lo = BytePos(cmp::max(self.hi.0, self.lo.0 + 1)); - Span { lo: lo, hi: lo, expn_id: self.expn_id} + let lo = cmp::max(self.hi.0, self.lo.0 + 1); + Span { lo: BytePos(lo), hi: BytePos(lo + 1), expn_id: self.expn_id} } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. diff --git a/src/test/ui/token/bounds-obj-parens.stderr b/src/test/ui/token/bounds-obj-parens.stderr index ebee363f278..4d60be15eca 100644 --- a/src/test/ui/token/bounds-obj-parens.stderr +++ b/src/test/ui/token/bounds-obj-parens.stderr @@ -5,3 +5,4 @@ error: expected one of `!` or `::`, found `` | ^^^^ expected one of `!` or `::` here error: aborting due to previous error + diff --git a/src/test/ui/token/issue-10636-2.stderr b/src/test/ui/token/issue-10636-2.stderr index 183ad30c4ef..b0bae1248b9 100644 --- a/src/test/ui/token/issue-10636-2.stderr +++ b/src/test/ui/token/issue-10636-2.stderr @@ -25,3 +25,4 @@ error: expected expression, found `)` error: main function not found error: aborting due to 4 previous errors + diff --git a/src/test/ui/token/macro-incomplete-parse.stderr b/src/test/ui/token/macro-incomplete-parse.stderr index bea00a6444c..f23d97586b8 100644 --- a/src/test/ui/token/macro-incomplete-parse.stderr +++ b/src/test/ui/token/macro-incomplete-parse.stderr @@ -29,3 +29,4 @@ note: caused by the macro expansion here; the usage of `ignored_pat!` is likely | ^^^^^^^^^^^^^^ error: aborting due to 3 previous errors + diff --git a/src/test/ui/token/trailing-plus-in-bounds.stderr b/src/test/ui/token/trailing-plus-in-bounds.stderr index 74caf8f5c2b..c765a434b8a 100644 --- a/src/test/ui/token/trailing-plus-in-bounds.stderr +++ b/src/test/ui/token/trailing-plus-in-bounds.stderr @@ -1,7 +1,8 @@ error: expected one of `!` or `::`, found `` - --> ../../src/test/ui/token/trailing-plus-in-bounds.rs:19:1 + --> $DIR/trailing-plus-in-bounds.rs:19:1 | 19 | FAIL | ^^^^ expected one of `!` or `::` here error: aborting due to previous error + -- cgit 1.4.1-3-g733a5 From ec7c0aece17c9a11bc2eca15b994355a161bf878 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 04:04:41 +0000 Subject: Merge `ExpnId` and `SyntaxContext`. --- src/librustc/hir/lowering.rs | 7 +- src/librustc/hir/mod.rs | 5 +- src/librustc/ich/caching_codemap_view.rs | 4 - src/librustc/middle/region.rs | 2 +- src/librustc/middle/stability.rs | 2 +- src/librustc_driver/driver.rs | 4 +- src/librustc_errors/emitter.rs | 22 +- src/librustc_errors/lib.rs | 4 +- .../calculate_svh/svh_visitor.rs | 17 +- src/librustc_mir/transform/qualify_consts.rs | 4 +- src/librustc_plugin/load.rs | 4 +- src/librustc_save_analysis/lib.rs | 7 +- src/librustc_save_analysis/span_utils.rs | 3 +- src/librustc_trans/asm.rs | 4 +- src/librustc_trans/back/write.rs | 6 +- src/librustc_trans/mir/mod.rs | 18 +- src/librustc_typeck/check/mod.rs | 5 +- src/libsyntax/ast.rs | 57 +-- src/libsyntax/codemap.rs | 291 +-------------- src/libsyntax/ext/base.rs | 74 ++-- src/libsyntax/ext/derive.rs | 50 +-- src/libsyntax/ext/expand.rs | 111 +++--- src/libsyntax/ext/source_util.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 14 +- src/libsyntax/feature_gate.rs | 20 +- src/libsyntax/json.rs | 2 +- src/libsyntax/lib.rs | 2 +- src/libsyntax/parse/parser.rs | 6 +- src/libsyntax/std_inject.rs | 21 +- src/libsyntax/symbol.rs | 342 ------------------ src/libsyntax/test.rs | 21 +- src/libsyntax/test_snippet.rs | 2 +- src/libsyntax/tokenstream.rs | 16 +- src/libsyntax_ext/asm.rs | 12 +- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/debug.rs | 4 +- src/libsyntax_ext/deriving/generic/mod.rs | 12 +- src/libsyntax_ext/deriving/mod.rs | 34 +- src/libsyntax_ext/format.rs | 3 +- src/libsyntax_ext/proc_macro_registrar.rs | 6 +- src/libsyntax_pos/hygiene.rs | 94 ++++- src/libsyntax_pos/lib.rs | 101 ++++-- src/libsyntax_pos/symbol.rs | 389 +++++++++++++++++++++ src/test/compile-fail-fulldeps/qquote.rs | 8 - src/test/run-fail-fulldeps/qquote.rs | 8 - src/test/run-pass-fulldeps/qquote.rs | 8 - 47 files changed, 793 insertions(+), 1039 deletions(-) delete mode 100644 src/libsyntax/symbol.rs create mode 100644 src/libsyntax_pos/symbol.rs (limited to 'src/libsyntax/parse') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 6ca0c971ea4..786145f3091 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -57,6 +57,7 @@ use std::mem; use syntax::attr; use syntax::ast::*; use syntax::errors; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::codemap::{self, respan, Spanned}; use syntax::std_inject; @@ -392,7 +393,8 @@ impl<'a> LoweringContext<'a> { } fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span { - span.expn_id = self.sess.codemap().record_expansion(codemap::ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { format: codemap::CompilerDesugaring(Symbol::intern(reason)), @@ -400,6 +402,7 @@ impl<'a> LoweringContext<'a> { allow_internal_unstable: true, }, }); + span.ctxt = SyntaxContext::empty().apply_mark(mark); span } @@ -1986,7 +1989,7 @@ impl<'a> LoweringContext<'a> { volatile: asm.volatile, alignstack: asm.alignstack, dialect: asm.dialect, - expn_id: asm.expn_id, + ctxt: asm.ctxt, }; let outputs = asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(); diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index f4f2f4cf921..da7e71ac07d 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -33,11 +33,12 @@ use hir::def::Def; use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; use util::nodemap::{NodeMap, FxHashSet}; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use syntax::codemap::{self, Spanned}; use syntax::abi::Abi; use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; +use syntax::ext::hygiene::SyntaxContext; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; use syntax::tokenstream::TokenStream; @@ -1367,7 +1368,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// represents an argument in a function header diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index a71251eedf5..1278d9f5171 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -47,10 +47,6 @@ impl<'tcx> CachingCodemapView<'tcx> { } } - pub fn codemap(&self) -> &'tcx CodeMap { - self.codemap - } - pub fn byte_pos_to_line_and_col(&mut self, pos: BytePos) -> Option<(Rc, usize, BytePos)> { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index a19f15a9329..0676075930d 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -236,7 +236,7 @@ impl CodeExtent { // (This is the special case aluded to in the // doc-comment for this method) let stmt_span = blk.stmts[r.first_statement_index as usize].span; - Some(Span { lo: stmt_span.hi, hi: blk.span.hi, expn_id: stmt_span.expn_id }) + Some(Span { lo: stmt_span.hi, hi: blk.span.hi, ctxt: stmt_span.ctxt }) } } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 4354ed6817a..2b5ea61d4e8 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -467,7 +467,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn check_stability(self, def_id: DefId, id: NodeId, span: Span) { - if self.sess.codemap().span_allows_unstable(span) { + if span.allows_unstable() { debug!("stability: \ skipping span={:?} since it is internal", span); return; diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 4873b21c548..977382b33ad 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -580,7 +580,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, krate = time(time_passes, "crate injection", || { let alt_std_name = sess.opts.alt_std_name.clone(); - syntax::std_inject::maybe_inject_crates_ref(&sess.parse_sess, krate, alt_std_name) + syntax::std_inject::maybe_inject_crates_ref(krate, alt_std_name) }); let mut addl_plugins = Some(addl_plugins); @@ -798,7 +798,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, // Discard hygiene data, which isn't required after lowering to HIR. if !keep_hygiene_data(sess) { - syntax::ext::hygiene::reset_hygiene_data(); + syntax::ext::hygiene::clear_markings(); } Ok(ExpansionResult { diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 431edb3c9bc..367b85ac726 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -10,7 +10,7 @@ use self::Destination::*; -use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; +use syntax_pos::{DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, CodeMapper}; use RenderSpan::*; @@ -151,7 +151,7 @@ impl EmitterWriter { if let Some(ref cm) = self.cm { for span_label in msp.span_labels() { - if span_label.span == DUMMY_SP || span_label.span == COMMAND_LINE_SP { + if span_label.span == DUMMY_SP { continue; } let lo = cm.lookup_char_pos(span_label.span.lo); @@ -615,7 +615,7 @@ impl EmitterWriter { let mut max = 0; if let Some(ref cm) = self.cm { for primary_span in msp.primary_spans() { - if primary_span != &DUMMY_SP && primary_span != &COMMAND_LINE_SP { + if primary_span != &DUMMY_SP { let hi = cm.lookup_char_pos(primary_span.hi); if hi.line > max { max = hi.line; @@ -623,7 +623,7 @@ impl EmitterWriter { } } for span_label in msp.span_labels() { - if span_label.span != DUMMY_SP && span_label.span != COMMAND_LINE_SP { + if span_label.span != DUMMY_SP { let hi = cm.lookup_char_pos(span_label.span.hi); if hi.line > max { max = hi.line; @@ -659,20 +659,20 @@ impl EmitterWriter { // First, find all the spans in <*macros> and point instead at their use site for sp in span.primary_spans() { - if (*sp == COMMAND_LINE_SP) || (*sp == DUMMY_SP) { + if *sp == DUMMY_SP { continue; } if cm.span_to_filename(sp.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp.clone()); + let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp.clone(), use_site.call_site.clone())); } } - for trace in cm.macro_backtrace(sp.clone()).iter().rev() { + for trace in sp.macro_backtrace().iter().rev() { // Only show macro locations that are local // and display them like a span_note if let Some(def_site) = trace.def_site_span { - if (def_site == COMMAND_LINE_SP) || (def_site == DUMMY_SP) { + if def_site == DUMMY_SP { continue; } // Check to make sure we're not in any <*macros> @@ -689,11 +689,11 @@ impl EmitterWriter { span.push_span_label(label_span, label_text); } for sp_label in span.span_labels() { - if (sp_label.span == COMMAND_LINE_SP) || (sp_label.span == DUMMY_SP) { + if sp_label.span == DUMMY_SP { continue; } if cm.span_to_filename(sp_label.span.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp_label.span.clone()); + let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); } @@ -848,7 +848,7 @@ impl EmitterWriter { // Make sure our primary file comes first let primary_lo = if let (Some(ref cm), Some(ref primary_span)) = (self.cm.as_ref(), msp.primary_span().as_ref()) { - if primary_span != &&DUMMY_SP && primary_span != &&COMMAND_LINE_SP { + if primary_span != &&DUMMY_SP { cm.lookup_char_pos(primary_span.lo) } else { emit_to_destination(&buffer.render(), level, &mut self.dst)?; diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 4c889dad8ca..2efdaa57fba 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -48,7 +48,6 @@ pub mod styled_buffer; mod lock; use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION}; -use syntax_pos::MacroBacktrace; #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)] pub enum RenderSpan { @@ -75,7 +74,6 @@ pub trait CodeMapper { fn span_to_lines(&self, sp: Span) -> FileLinesResult; fn span_to_string(&self, sp: Span) -> String; fn span_to_filename(&self, sp: Span) -> FileName; - fn macro_backtrace(&self, span: Span) -> Vec; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; } @@ -120,7 +118,7 @@ impl CodeSuggestion { let bounding_span = Span { lo: lo, hi: hi, - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 210803c3f32..5401b371888 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -17,9 +17,10 @@ use self::SawTraitOrImplItemComponent::*; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; +use syntax::ext::hygiene::SyntaxContext; use syntax::parse::token; use syntax::symbol::InternedString; -use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; +use syntax_pos::{Span, BytePos}; use syntax::tokenstream; use rustc::hir; use rustc::hir::*; @@ -92,10 +93,10 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { span.hi }; - let expn_kind = match span.expn_id { - NO_EXPANSION => SawSpanExpnKind::NoExpansion, - COMMAND_LINE_EXPN => SawSpanExpnKind::CommandLine, - _ => SawSpanExpnKind::SomeExpansion, + let expn_kind = if span.ctxt == SyntaxContext::empty() { + SawSpanExpnKind::NoExpansion + } else { + SawSpanExpnKind::SomeExpansion }; let loc1 = self.codemap.byte_pos_to_line_and_col(span.lo); @@ -121,8 +122,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { saw.hash(self.st); if expn_kind == SawSpanExpnKind::SomeExpansion { - let call_site = self.codemap.codemap().source_callsite(span); - self.hash_span(call_site); + self.hash_span(span.source_callsite()); } } @@ -483,7 +483,6 @@ fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent { #[derive(Clone, Copy, Hash, Eq, PartialEq)] enum SawSpanExpnKind { NoExpansion, - CommandLine, SomeExpansion, } @@ -501,7 +500,7 @@ impl<'a> Hash for StableInlineAsm<'a> { volatile, alignstack, dialect, - expn_id: _, // This is used for error reporting + ctxt: _, // This is used for error reporting } = *self.0; asm.as_str().hash(state); diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index ba42804c926..9d236bd013c 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -223,7 +223,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } // This comes from a macro that has #[allow_internal_unstable]. - if self.tcx.sess.codemap().span_allows_unstable(self.span) { + if self.span.allows_unstable() { return; } @@ -805,7 +805,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { self.def_id.is_local() && // this doesn't come from a macro that has #[allow_internal_unstable] - !self.tcx.sess.codemap().span_allows_unstable(self.span) + !self.span.allows_unstable() { let mut err = self.tcx.sess.struct_span_err(self.span, "const fns are an unstable feature"); diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index efe9963cecc..e884f3bdbb1 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -20,7 +20,7 @@ use std::env; use std::mem; use std::path::PathBuf; use syntax::ast; -use syntax_pos::{Span, COMMAND_LINE_SP}; +use syntax_pos::{Span, DUMMY_SP}; /// Pointer to a registrar function. pub type PluginRegistrarFun = @@ -81,7 +81,7 @@ pub fn load_plugins(sess: &Session, if let Some(plugins) = addl_plugins { for plugin in plugins { - loader.load_plugin(COMMAND_LINE_SP, &plugin, vec![]); + loader.load_plugin(DUMMY_SP, &plugin, vec![]); } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index e5c04f6b61e..fd6803e087a 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -690,9 +690,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { // Note we take care to use the source callsite/callee, to handle // nested expansions and ensure we only generate data for source-visible // macro uses. - let callsite = self.tcx.sess.codemap().source_callsite(span); - let callee = self.tcx.sess.codemap().source_callee(span); - let callee = option_try!(callee); + let callsite = span.source_callsite(); + let callee = option_try!(span.source_callee()); let callee_span = option_try!(callee.span); // Ignore attribute macros, their spans are usually mangled @@ -1013,5 +1012,5 @@ fn escape(s: String) -> String { // Helper function to determine if a span came from a // macro expansion or syntax extension. pub fn generated_code(span: Span) -> bool { - span.expn_id != NO_EXPANSION || span == DUMMY_SP + span.ctxt != NO_EXPANSION || span == DUMMY_SP } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 34402742e6c..c19f805a285 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -462,8 +462,7 @@ impl<'a> SpanUtils<'a> { // Otherwise, a generated span is deemed invalid if it is not a sub-span of the root // callsite. This filters out macro internal variables and most malformed spans. - let span = self.sess.codemap().source_callsite(parent); - !(span.contains(parent)) + !parent.source_callsite().contains(parent) } } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index b6195765b27..3e270b7928e 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -111,14 +111,14 @@ pub fn trans_inline_asm<'a, 'tcx>( bcx.store(v, val, None); } - // Store expn_id in a metadata node so we can map LLVM errors + // Store mark in a metadata node so we can map LLVM errors // back to source locations. See #17552. unsafe { let key = "srcloc"; let kind = llvm::LLVMGetMDKindIDInContext(bcx.ccx.llcx(), key.as_ptr() as *const c_char, key.len() as c_uint); - let val: llvm::ValueRef = C_i32(bcx.ccx, ia.expn_id.into_u32() as i32); + let val: llvm::ValueRef = C_i32(bcx.ccx, ia.ctxt.outer().as_u32() as i32); llvm::LLVMSetMetadata(r, kind, llvm::LLVMMDNodeInContext(bcx.ccx.llcx(), &val, 1)); diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 5a017e4fb8a..ccb3f7ac882 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -371,14 +371,14 @@ struct HandlerFreeVars<'a> { unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>, msg: &'b str, cookie: c_uint) { - use syntax_pos::ExpnId; + use syntax::ext::hygiene::Mark; match cgcx.lto_ctxt { Some((sess, _)) => { - sess.codemap().with_expn_info(ExpnId::from_u32(cookie), |info| match info { + match Mark::from_u32(cookie).expn_info() { Some(ei) => sess.span_err(ei.call_site, msg), None => sess.err(msg), - }); + }; } None => { diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 6419f41f86b..21bbbea77d4 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -26,7 +26,7 @@ use monomorphize::{self, Instance}; use abi::FnType; use type_of; -use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos, Span}; +use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span}; use syntax::symbol::keywords; use std::iter; @@ -124,24 +124,18 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { // In order to have a good line stepping behavior in debugger, we overwrite debug // locations of macro expansions with that of the outermost expansion site // (unless the crate is being compiled with `-Z debug-macros`). - if source_info.span.expn_id == NO_EXPANSION || - source_info.span.expn_id == COMMAND_LINE_EXPN || - self.ccx.sess().opts.debugging_opts.debug_macros { - + if source_info.span.ctxt == NO_EXPANSION || + self.ccx.sess().opts.debugging_opts.debug_macros { let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo); (scope, source_info.span) } else { - let cm = self.ccx.sess().codemap(); // Walk up the macro expansion chain until we reach a non-expanded span. // We also stop at the function body level because no line stepping can occurr // at the level above that. let mut span = source_info.span; - while span.expn_id != NO_EXPANSION && - span.expn_id != COMMAND_LINE_EXPN && - span.expn_id != self.mir.span.expn_id { - if let Some(callsite_span) = cm.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite_span; + while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt { + if let Some(info) = span.ctxt.outer().expn_info() { + span = info.call_site; } else { break; } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 9c62fd486d4..b95e01f4ff6 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -4161,12 +4161,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } if let Some(last_stmt) = extra_semi { - let original_span = original_sp(self.tcx.sess.codemap(), - last_stmt.span, blk.span); + let original_span = original_sp(last_stmt.span, blk.span); let span_semi = Span { lo: original_span.hi - BytePos(1), hi: original_span.hi, - expn_id: original_span.expn_id + ctxt: original_span.ctxt, }; err.span_help(span_semi, "consider removing this semicolon:"); } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 7e2b225193f..a4bebd311de 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,10 +14,10 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; -pub use symbol::Symbol as Name; +pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; +use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -27,61 +27,12 @@ use rustc_data_structures::indexed_vec; use symbol::{Symbol, keywords}; use tokenstream::{ThinTokenStream, TokenStream}; +use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; use std::rc::Rc; use std::u32; -use serialize::{self, Encodable, Decodable, Encoder, Decoder}; - -/// An identifier contains a Name (index into the interner -/// table) and a SyntaxContext to track renaming and -/// macro expansion per Flatt et al., "Macros That Work Together" -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct Ident { - pub name: Symbol, - pub ctxt: SyntaxContext -} - -impl Ident { - pub const fn with_empty_ctxt(name: Name) -> Ident { - Ident { name: name, ctxt: SyntaxContext::empty() } - } - - /// Maps a string to an identifier with an empty syntax context. - pub fn from_str(s: &str) -> Ident { - Ident::with_empty_ctxt(Symbol::intern(s)) - } - - pub fn unhygienize(&self) -> Ident { - Ident { name: self.name, ctxt: SyntaxContext::empty() } - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}{:?}", self.name, self.ctxt) - } -} - -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.name, f) - } -} - -impl Encodable for Ident { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.name.encode(s) - } -} - -impl Decodable for Ident { - fn decode(d: &mut D) -> Result { - Ok(Ident::with_empty_ctxt(Name::decode(d)?)) - } -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -1445,7 +1396,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// An argument in a function header. diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 388f3cb7323..ba199eacb62 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -17,6 +17,8 @@ //! within the CodeMap, which upon request can be converted to line and column //! information, source code snippets, etc. +pub use syntax_pos::*; +pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan}; pub use self::ExpnFormat::*; use std::cell::RefCell; @@ -26,35 +28,21 @@ use std::rc::Rc; use std::env; use std::fs; use std::io::{self, Read}; -pub use syntax_pos::*; use errors::CodeMapper; -use ast::Name; - /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. -pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site)); - let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site)); +pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { + let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site); + let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, - (Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp), + (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } } -/// The source of expansion. -#[derive(Clone, Hash, Debug, PartialEq, Eq)] -pub enum ExpnFormat { - /// e.g. #[derive(...)] - MacroAttribute(Name), - /// e.g. `format!()` - MacroBang(Name), - /// Desugaring done by the compiler during HIR lowering. - CompilerDesugaring(Name) -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Spanned { pub node: T, @@ -73,47 +61,6 @@ pub fn dummy_spanned(t: T) -> Spanned { respan(DUMMY_SP, t) } -#[derive(Clone, Hash, Debug)] -pub struct NameAndSpan { - /// The format with which the macro was invoked. - pub format: ExpnFormat, - /// Whether the macro is allowed to use #[unstable]/feature-gated - /// features internally without forcing the whole crate to opt-in - /// to them. - pub allow_internal_unstable: bool, - /// The span of the macro definition itself. The macro may not - /// have a sensible definition span (e.g. something defined - /// completely inside libsyntax) in which case this is None. - pub span: Option -} - -impl NameAndSpan { - pub fn name(&self) -> Name { - match self.format { - ExpnFormat::MacroAttribute(s) | - ExpnFormat::MacroBang(s) | - ExpnFormat::CompilerDesugaring(s) => s, - } - } -} - -/// Extra information for tracking spans of macro and syntax sugar expansion -#[derive(Hash, Debug)] -pub struct ExpnInfo { - /// The location of the actual macro invocation or syntax sugar , e.g. - /// `let x = foo!();` or `if let Some(y) = x {}` - /// - /// This may recursively refer to other macro invocations, e.g. if - /// `foo!()` invoked `bar!()` internally, and there was an - /// expression inside `bar!`; the call_site of the expression in - /// the expansion would point to the `bar!` invocation; that - /// call_site span would have its own ExpnInfo, with the call_site - /// pointing to the `foo!` invocation. - pub call_site: Span, - /// Information about the expansion. - pub callee: NameAndSpan -} - // _____________________________________________________________________________ // FileMap, MultiByteChar, FileName, FileLines // @@ -161,7 +108,6 @@ impl FileLoader for RealFileLoader { pub struct CodeMap { pub files: RefCell>>, - expansions: RefCell>, file_loader: Box } @@ -169,7 +115,6 @@ impl CodeMap { pub fn new() -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: Box::new(RealFileLoader) } } @@ -177,7 +122,6 @@ impl CodeMap { pub fn with_file_loader(file_loader: Box) -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: file_loader } } @@ -353,14 +297,14 @@ impl CodeMap { /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: - /// * the expn_id of both spans much match + /// * the ctxt of both spans much match /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { use std::cmp; // make sure we're at the same expansion id - if sp_lhs.expn_id != sp_rhs.expn_id { + if sp_lhs.ctxt != sp_rhs.ctxt { return None; } @@ -383,7 +327,7 @@ impl CodeMap { Some(Span { lo: cmp::min(sp_lhs.lo, sp_rhs.lo), hi: cmp::max(sp_lhs.hi, sp_rhs.hi), - expn_id: sp_lhs.expn_id, + ctxt: sp_lhs.ctxt, }) } else { None @@ -391,10 +335,6 @@ impl CodeMap { } pub fn span_to_string(&self, sp: Span) -> String { - if sp == COMMAND_LINE_SP { - return "".to_string(); - } - if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) { return "no-location".to_string(); } @@ -409,62 +349,6 @@ impl CodeMap { hi.col.to_usize() + 1)).to_string() } - /// Return the source span - this is either the supplied span, or the span for - /// the macro callsite that expanded to it. - pub fn source_callsite(&self, sp: Span) -> Span { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is the first callsite, which is also source-equivalent to the span. - let mut first = true; - while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN { - if let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return Span { expn_id: NO_EXPANSION, .. span }; - } - } - first = false; - span = callsite; - } - else { - break; - } - } - span - } - - /// Return the source callee. - /// - /// Returns None if the supplied span has no expansion trace, - /// else returns the NameAndSpan for the macro definition - /// corresponding to the source callsite. - pub fn source_callee(&self, sp: Span) -> Option { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is source-equivalent to the span, and the source callee is the first callee. - let mut first = true; - while let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - first = false; - if let Some(_) = self.with_expn_info(callsite.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite; - } - else { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - None - } - pub fn span_to_filename(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo).file.name.to_string() } @@ -628,111 +512,9 @@ impl CodeMap { return a; } - pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId { - let mut expansions = self.expansions.borrow_mut(); - expansions.push(expn_info); - let len = expansions.len(); - if len > u32::max_value() as usize { - panic!("too many ExpnInfo's!"); - } - ExpnId(len as u32 - 1) - } - - pub fn with_expn_info(&self, id: ExpnId, f: F) -> T where - F: FnOnce(Option<&ExpnInfo>) -> T, - { - match id { - NO_EXPANSION | COMMAND_LINE_EXPN => f(None), - ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize])) - } - } - - /// Check if a span is "internal" to a macro in which #[unstable] - /// items can be used (that is, a macro marked with - /// `#[allow_internal_unstable]`). - pub fn span_allows_unstable(&self, span: Span) -> bool { - debug!("span_allows_unstable(span = {:?})", span); - let mut allows_unstable = false; - let mut expn_id = span.expn_id; - loop { - let quit = self.with_expn_info(expn_id, |expninfo| { - debug!("span_allows_unstable: expninfo = {:?}", expninfo); - expninfo.map_or(/* hit the top level */ true, |info| { - - let span_comes_from_this_expansion = - info.callee.span.map_or(span.source_equal(&info.call_site), |mac_span| { - mac_span.contains(span) - }); - - debug!("span_allows_unstable: span: {:?} call_site: {:?} callee: {:?}", - (span.lo, span.hi), - (info.call_site.lo, info.call_site.hi), - info.callee.span.map(|x| (x.lo, x.hi))); - debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}", - span_comes_from_this_expansion, - info.callee.allow_internal_unstable); - if span_comes_from_this_expansion { - allows_unstable = info.callee.allow_internal_unstable; - // we've found the right place, stop looking - true - } else { - // not the right place, keep looking - expn_id = info.call_site.expn_id; - false - } - }) - }); - if quit { - break - } - } - debug!("span_allows_unstable? {}", allows_unstable); - allows_unstable - } - pub fn count_lines(&self) -> usize { self.files.borrow().iter().fold(0, |a, f| a + f.count_lines()) } - - pub fn macro_backtrace(&self, span: Span) -> Vec { - let mut prev_span = DUMMY_SP; - let mut span = span; - let mut result = vec![]; - loop { - let span_name_span = self.with_expn_info(span.expn_id, |expn_info| { - expn_info.map(|ei| { - let (pre, post) = match ei.callee.format { - MacroAttribute(..) => ("#[", "]"), - MacroBang(..) => ("", "!"), - CompilerDesugaring(..) => ("desugaring of `", "`"), - }; - let macro_decl_name = format!("{}{}{}", - pre, - ei.callee.name(), - post); - let def_site_span = ei.callee.span; - (ei.call_site, macro_decl_name, def_site_span) - }) - }); - - match span_name_span { - None => break, - Some((call_site, macro_decl_name, def_site_span)) => { - // Don't print recursive invocations - if !call_site.source_equal(&prev_span) { - result.push(MacroBacktrace { - call_site: call_site, - macro_decl_name: macro_decl_name, - def_site_span: def_site_span, - }); - } - prev_span = span; - span = call_site; - } - } - } - result - } } impl CodeMapper for CodeMap { @@ -748,9 +530,6 @@ impl CodeMapper for CodeMap { fn span_to_filename(&self, sp: Span) -> FileName { self.span_to_filename(sp) } - fn macro_backtrace(&self, span: Span) -> Vec { - self.macro_backtrace(span) - } fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { self.merge_spans(sp_lhs, sp_rhs) } @@ -763,7 +542,6 @@ impl CodeMapper for CodeMap { #[cfg(test)] mod tests { use super::*; - use symbol::keywords; use std::rc::Rc; #[test] @@ -912,7 +690,7 @@ mod tests { fn t7() { // Test span_to_lines for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let file_lines = cm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, "blork.rs"); @@ -928,7 +706,7 @@ mod tests { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } + Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION } } /// Test span_to_snippet and span_to_lines for a span coverting 3 @@ -958,7 +736,7 @@ mod tests { fn t8() { // Test span_to_snippet for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let snippet = cm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); @@ -968,7 +746,7 @@ mod tests { fn t9() { // Test span_to_str for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let sstr = cm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); @@ -1022,7 +800,7 @@ mod tests { let span = Span { lo: BytePos(lo as u32 + file.start_pos.0), hi: BytePos(hi as u32 + file.start_pos.0), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); @@ -1032,45 +810,4 @@ mod tests { } } } - - fn init_expansion_chain(cm: &CodeMap) -> Span { - // Creates an expansion chain containing two recursive calls - // root -> expA -> expA -> expB -> expB -> end - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - - let format_root = ExpnFormat::MacroBang(keywords::Invalid.name()); - let callee_root = NameAndSpan { format: format_root, - allow_internal_unstable: false, - span: Some(root) }; - - let info_a1 = ExpnInfo { call_site: root, callee: callee_root }; - let id_a1 = cm.record_expansion(info_a1); - let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 }; - - let format_a = ExpnFormat::MacroBang(keywords::As.name()); - let callee_a = NameAndSpan { format: format_a, - allow_internal_unstable: false, - span: Some(span_a1) }; - - let info_a2 = ExpnInfo { call_site: span_a1, callee: callee_a.clone() }; - let id_a2 = cm.record_expansion(info_a2); - let span_a2 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a2 }; - - let info_b1 = ExpnInfo { call_site: span_a2, callee: callee_a }; - let id_b1 = cm.record_expansion(info_b1); - let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 }; - - let format_b = ExpnFormat::MacroBang(keywords::Box.name()); - let callee_b = NameAndSpan { format: format_b, - allow_internal_unstable: false, - span: None }; - - let info_b2 = ExpnInfo { call_site: span_b1, callee: callee_b.clone() }; - let id_b2 = cm.record_expansion(info_b2); - let span_b2 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b2 }; - - let info_end = ExpnInfo { call_site: span_b2, callee: callee_b }; - let id_end = cm.record_expansion(info_end); - Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end } - } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index dc7e7673eb0..a2d54b62ec6 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -12,11 +12,11 @@ pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT use ast::{self, Attribute, Name, PatKind, MetaItem}; use attr::HasAttrs; -use codemap::{self, CodeMap, ExpnInfo, Spanned, respan}; -use syntax_pos::{Span, ExpnId, NO_EXPANSION}; -use errors::{DiagnosticBuilder, FatalError}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{Span, DUMMY_SP}; +use errors::DiagnosticBuilder; use ext::expand::{self, Expansion, Invocation}; -use ext::hygiene::Mark; +use ext::hygiene::{Mark, SyntaxContext}; use fold::{self, Folder}; use parse::{self, parser, DirectoryOwnership}; use parse::token; @@ -56,6 +56,14 @@ impl HasAttrs for Annotatable { } impl Annotatable { + pub fn span(&self) -> Span { + match *self { + Annotatable::Item(ref item) => item.span, + Annotatable::TraitItem(ref trait_item) => trait_item.span, + Annotatable::ImplItem(ref impl_item) => impl_item.span, + } + } + pub fn expect_item(self) -> P { match self { Annotatable::Item(i) => i, @@ -602,7 +610,6 @@ pub struct ModuleData { pub struct ExpansionData { pub mark: Mark, pub depth: usize, - pub backtrace: ExpnId, pub module: Rc, pub directory_ownership: DirectoryOwnership, } @@ -633,7 +640,6 @@ impl<'a> ExtCtxt<'a> { current_expansion: ExpansionData { mark: Mark::root(), depth: 0, - backtrace: NO_EXPANSION, module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }), directory_ownership: DirectoryOwnership::Owned, }, @@ -658,30 +664,30 @@ impl<'a> ExtCtxt<'a> { pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { - self.codemap().with_expn_info(self.backtrace(), |ei| match ei { + match self.current_expansion.mark.expn_info() { Some(expn_info) => expn_info.call_site, - None => self.bug("missing top span") - }) + None => DUMMY_SP, + } + } + pub fn backtrace(&self) -> SyntaxContext { + SyntaxContext::empty().apply_mark(self.current_expansion.mark) } - pub fn backtrace(&self) -> ExpnId { self.current_expansion.backtrace } /// Returns span for the macro which originally caused the current expansion to happen. /// /// Stops backtracing at include! boundary. pub fn expansion_cause(&self) -> Span { - let mut expn_id = self.backtrace(); + let mut ctxt = self.backtrace(); let mut last_macro = None; loop { - if self.codemap().with_expn_info(expn_id, |info| { - info.map_or(None, |i| { - if i.callee.name() == "include" { - // Stop going up the backtrace once include! is encountered - return None; - } - expn_id = i.call_site.expn_id; - last_macro = Some(i.call_site); - return Some(()); - }) + if ctxt.outer().expn_info().map_or(None, |info| { + if info.callee.name() == "include" { + // Stop going up the backtrace once include! is encountered + return None; + } + ctxt = info.call_site.ctxt; + last_macro = Some(info.call_site); + return Some(()); }).is_none() { break } @@ -689,28 +695,6 @@ impl<'a> ExtCtxt<'a> { last_macro.expect("missing expansion backtrace") } - pub fn bt_push(&mut self, ei: ExpnInfo) { - if self.current_expansion.depth > self.ecfg.recursion_limit { - let suggested_limit = self.ecfg.recursion_limit * 2; - let mut err = self.struct_span_fatal(ei.call_site, - &format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name())); - err.help(&format!( - "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)); - err.emit(); - panic!(FatalError); - } - - let mut call_site = ei.call_site; - call_site.expn_id = self.backtrace(); - self.current_expansion.backtrace = self.codemap().record_expansion(ExpnInfo { - call_site: call_site, - callee: ei.callee - }); - } - pub fn bt_pop(&mut self) {} - pub fn struct_span_warn(&self, sp: Span, msg: &str) @@ -792,9 +776,9 @@ impl<'a> ExtCtxt<'a> { /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) -> Option> { - // Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation. + // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { - expr.span.expn_id = cx.backtrace(); + expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark); expr }); diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 1569d9f540b..c79040424f6 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -9,13 +9,16 @@ // except according to those terms. use attr::HasAttrs; -use {ast, codemap}; +use ast; +use codemap::{ExpnInfo, NameAndSpan, ExpnFormat}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; +use std::collections::HashSet; + pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec { let mut result = Vec::new(); attrs.retain(|attr| { @@ -41,36 +44,35 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec result } -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span +pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T + where T: HasAttrs, +{ + let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned()); + for (i, path) in traits.iter().enumerate() { + if i > 0 { + pretty_name.push_str(", "); + } + pretty_name.push_str(&path.to_string()); + names.insert(unwrap_or!(path.segments.get(0), continue).identifier.name); } -} + pretty_name.push(')'); -pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T { - let span = match traits.get(0) { - Some(path) => path.span, - None => return item, - }; + cx.current_expansion.mark.set_expn_info(ExpnInfo { + call_site: span, + callee: NameAndSpan { + format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)), + span: None, + allow_internal_unstable: true, + }, + }); + let span = Span { ctxt: cx.backtrace(), ..span }; item.map_attrs(|mut attrs| { - if traits.iter().any(|path| *path == "PartialEq") && - traits.iter().any(|path| *path == "Eq") { - let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); + if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } - if traits.iter().any(|path| *path == "Copy") && - traits.iter().any(|path| *path == "Clone") { - let span = allow_unstable(cx, span, "derive(Copy, Clone)"); + if names.contains(&Symbol::intern("Copy")) && names.contains(&Symbol::intern("Clone")) { let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index e258c51a329..1b3352f73ad 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -13,6 +13,7 @@ use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use config::{is_test_or_bench, StripUnconfigured}; +use errors::FatalError; use ext::base::*; use ext::derive::{add_derived_markers, collect_derives}; use ext::hygiene::Mark; @@ -27,7 +28,7 @@ use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -273,7 +274,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let item = item .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = - add_derived_markers(&mut self.cx, &traits, item.clone()); + add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); for path in &traits { @@ -363,11 +364,26 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } fn expand_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { - match invoc.kind { + let result = match invoc.kind { InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext), InvocationKind::Attr { .. } => self.expand_attr_invoc(invoc, ext), InvocationKind::Derive { .. } => self.expand_derive_invoc(invoc, ext), + }; + + if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { + let info = self.cx.current_expansion.mark.expn_info().unwrap(); + let suggested_limit = self.cx.ecfg.recursion_limit * 2; + let mut err = self.cx.struct_span_fatal(info.call_site, + &format!("recursion limit reached while expanding the macro `{}`", + info.callee.name())); + err.help(&format!( + "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", + suggested_limit)); + err.emit(); + panic!(FatalError); } + + result } fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { @@ -378,11 +394,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: attr.span, callee: NameAndSpan { format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: Some(attr.span), + span: None, allow_internal_unstable: false, } }); @@ -403,19 +419,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtension::AttrProcMacro(ref mac) => { let item_toks = stream_for_item(&item, &self.cx.parse_sess); - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: attr.span, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: None, - allow_internal_unstable: false, - }, - }), - ..attr.span - }; - - let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); + let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; + let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); self.parse_expansion(tok_result, kind, &attr.path, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { @@ -440,8 +445,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let path = &mac.node.path; let ident = ident.unwrap_or(keywords::Invalid.ident()); - let marked_tts = - noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None }); + let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { @@ -451,7 +455,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -470,7 +474,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -502,7 +506,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -528,10 +532,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - expanded.fold_with(&mut Marker { - mark: mark, - expn_id: Some(self.cx.backtrace()), - }) + expanded.fold_with(&mut Marker(mark)) } /// Expand a derive invocation. Returns the result of expansion. @@ -550,50 +551,33 @@ impl<'a, 'b> MacroExpander<'a, 'b> { id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, }; - self.cx.bt_push(ExpnInfo { + let mut expn_info = ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroAttribute(pretty_name), span: None, allow_internal_unstable: false, } - }); + }; match *ext { SyntaxExtension::ProcMacroDerive(ref ext, _) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: false, - }, - }), - ..span - }; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this name: keywords::Invalid.name(), span: DUMMY_SP, node: ast::MetaItemKind::Word, }; - return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)); + kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)) } SyntaxExtension::BuiltinDerive(func) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: true, - }, - }), - ..span - }; + expn_info.callee.allow_internal_unstable = true; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let mut items = Vec::new(); func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); - return kind.expect_from_annotatables(items); + kind.expect_from_annotatables(items) } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); @@ -753,10 +737,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. fn check_attributes(&mut self, attrs: &[ast::Attribute]) { - let codemap = &self.cx.parse_sess.codemap(); let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess, codemap, features); + feature_gate::check_attribute(&attr, &self.cx.parse_sess, features); } } } @@ -1065,23 +1048,21 @@ impl<'feat> ExpansionConfig<'feat> { } } -// A Marker adds the given mark to the syntax context and -// sets spans' `expn_id` to the given expn_id (unless it is `None`). -struct Marker { mark: Mark, expn_id: Option } +// A Marker adds the given mark to the syntax context. +struct Marker(Mark); impl Folder for Marker { fn fold_ident(&mut self, mut ident: Ident) -> Ident { - ident.ctxt = ident.ctxt.apply_mark(self.mark); + ident.ctxt = ident.ctxt.apply_mark(self.0); ident } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - noop_fold_mac(mac, self) - } fn new_span(&mut self, mut span: Span) -> Span { - if let Some(expn_id) = self.expn_id { - span.expn_id = expn_id; - } + span.ctxt = span.ctxt.apply_mark(self.0); span } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + noop_fold_mac(mac, self) + } } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 39b92c7d007..0103d6ea959 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -185,7 +185,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf { // NB: relative paths are resolved relative to the compilation unit if !arg.is_absolute() { - let callsite = cx.codemap().source_callsite(sp); + let callsite = sp.source_callsite(); let mut cu = PathBuf::from(&cx.codemap().span_to_filename(callsite)); cu.pop(); cu.push(arg); diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d56859d805c..12e746e024d 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -34,17 +34,19 @@ impl Delimited { } pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 9d280a413e6..12d25ca4274 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -28,7 +28,7 @@ use self::AttributeGate::*; use abi::Abi; use ast::{self, NodeId, PatKind, RangeEnd}; use attr; -use codemap::{CodeMap, Spanned}; +use codemap::Spanned; use syntax_pos::Span; use errors::{DiagnosticBuilder, Handler, FatalError}; use visit::{self, FnKind, Visitor}; @@ -831,7 +831,7 @@ impl GatedCfg { pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; - if !has_feature(features) && !sess.codemap().span_allows_unstable(self.span) { + if !has_feature(features) && !self.span.allows_unstable() { let explain = format!("`cfg({})` is experimental and subject to change", cfg); emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } @@ -841,7 +841,6 @@ impl GatedCfg { struct Context<'a> { features: &'a Features, parse_sess: &'a ParseSess, - cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } @@ -850,7 +849,7 @@ macro_rules! gate_feature_fn { let (cx, has_feature, span, name, explain) = ($cx, $has_feature, $span, $name, $explain); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); - if !has_feature && !cx.cm.span_allows_unstable(span) { + if !has_feature && !span.allows_unstable() { emit_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain); } }} @@ -908,12 +907,8 @@ impl<'a> Context<'a> { } } -pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, - cm: &CodeMap, features: &Features) { - let cx = Context { - features: features, parse_sess: parse_sess, - cm: cm, plugin_attributes: &[] - }; +pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { + let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; cx.check_attribute(attr, true); } @@ -1016,7 +1011,7 @@ struct PostExpansionVisitor<'a> { macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ let (cx, span) = ($cx, $span); - if !cx.context.cm.span_allows_unstable(span) { + if !span.allows_unstable() { gate_feature!(cx.context, $feature, span, $explain) } }} @@ -1096,7 +1091,7 @@ fn starts_with_digit(s: &str) -> bool { impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { - if !self.context.cm.span_allows_unstable(attr.span) { + if !attr.span.allows_unstable() { // check for gated attributes self.context.check_attribute(attr, false); } @@ -1530,7 +1525,6 @@ pub fn check_crate(krate: &ast::Crate, let ctx = Context { features: features, parse_sess: sess, - cm: sess.codemap(), plugin_attributes: plugin_attributes, }; visit::walk_crate(&mut PostExpansionVisitor { context: &ctx }, krate); diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index fd762552248..dec1b7d1d87 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -202,7 +202,7 @@ impl DiagnosticSpan { // backtrace ourselves, but the `macro_backtrace` helper makes // some decision, such as dropping some frames, and I don't // want to duplicate that logic here. - let backtrace = je.cm.macro_backtrace(span).into_iter(); + let backtrace = span.macro_backtrace().into_iter(); DiagnosticSpan::from_span_full(span, is_primary, label, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 6c975f3fc40..86ee1c5336d 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -125,7 +125,7 @@ pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; -pub mod symbol; +pub use syntax_pos::symbol; pub mod test; pub mod tokenstream; pub mod visit; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 43a9d8c5f78..e9eb4fbcc91 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5036,11 +5036,7 @@ impl<'a> Parser<'a> { the path:", path); self.expect(&token::CloseDelim(token::Paren))?; // `)` - let sp = Span { - lo: start_span.lo, - hi: self.prev_span.hi, - expn_id: start_span.expn_id, - }; + let sp = start_span.to(self.prev_span); let mut err = self.span_fatal_help(sp, &msg, &suggestion); err.span_suggestion(path_span, &help_msg, format!("in {}", path)); err.emit(); // emit diagnostic, but continue with public visibility diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index c541df9230a..c7820a15fb3 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,29 +10,27 @@ use ast; use attr; +use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; -use parse::ParseSess; use ptr::P; use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to codemap's is_internal check. /// The expanded code uses the unstable `#[prelude_import]` attribute. -fn ignored_span(sess: &ParseSess, sp: Span) -> Span { - let info = ExpnInfo { +fn ignored_span(sp: Span) -> Span { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("std_inject")), span: None, allow_internal_unstable: true, } - }; - let expn_id = sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + }); + Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp } } pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { @@ -45,10 +43,7 @@ pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { } } -pub fn maybe_inject_crates_ref(sess: &ParseSess, - mut krate: ast::Crate, - alt_std_name: Option) - -> ast::Crate { +pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option) -> ast::Crate { let name = match injected_crate_name(&krate) { Some(name) => name, None => return krate, @@ -67,7 +62,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, span: DUMMY_SP, })); - let span = ignored_span(sess, DUMMY_SP); + let span = ignored_span(DUMMY_SP); krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs deleted file mode 100644 index 2acbeee426b..00000000000 --- a/src/libsyntax/symbol.rs +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An "interner" is a data structure that associates values with usize tags and -//! allows bidirectional lookup; i.e. given a value, one can easily find the -//! type, and vice versa. - -use serialize::{Decodable, Decoder, Encodable, Encoder}; -use std::cell::RefCell; -use std::collections::HashMap; -use std::fmt; - -/// A symbol is an interned or gensymed string. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Symbol(u32); - -// The interner in thread-local, so `Symbol` shouldn't move between threads. -impl !Send for Symbol { } - -impl Symbol { - /// Maps a string to its interned representation. - pub fn intern(string: &str) -> Self { - with_interner(|interner| interner.intern(string)) - } - - /// gensym's a new usize, using the current interner. - pub fn gensym(string: &str) -> Self { - with_interner(|interner| interner.gensym(string)) - } - - pub fn as_str(self) -> InternedString { - with_interner(|interner| unsafe { - InternedString { - string: ::std::mem::transmute::<&str, &str>(interner.get(self)) - } - }) - } - - pub fn as_u32(self) -> u32 { - self.0 - } -} - -impl fmt::Debug for Symbol { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}({})", self, self.0) - } -} - -impl fmt::Display for Symbol { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.as_str(), f) - } -} - -impl Encodable for Symbol { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.as_str()) - } -} - -impl Decodable for Symbol { - fn decode(d: &mut D) -> Result { - Ok(Symbol::intern(&d.read_str()?)) - } -} - -impl> PartialEq for Symbol { - fn eq(&self, other: &T) -> bool { - self.as_str() == other.deref() - } -} - -#[derive(Default)] -pub struct Interner { - names: HashMap, Symbol>, - strings: Vec>, -} - -impl Interner { - pub fn new() -> Self { - Interner::default() - } - - fn prefill(init: &[&str]) -> Self { - let mut this = Interner::new(); - for &string in init { - this.intern(string); - } - this - } - - pub fn intern(&mut self, string: &str) -> Symbol { - if let Some(&name) = self.names.get(string) { - return name; - } - - let name = Symbol(self.strings.len() as u32); - let string = string.to_string().into_boxed_str(); - self.strings.push(string.clone()); - self.names.insert(string, name); - name - } - - fn gensym(&mut self, string: &str) -> Symbol { - let gensym = Symbol(self.strings.len() as u32); - // leave out of `names` to avoid colliding - self.strings.push(string.to_string().into_boxed_str()); - gensym - } - - pub fn get(&self, name: Symbol) -> &str { - &self.strings[name.0 as usize] - } -} - -// In this macro, there is the requirement that the name (the number) must be monotonically -// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero. -macro_rules! declare_keywords {( - $( ($index: expr, $konst: ident, $string: expr) )* -) => { - pub mod keywords { - use ast; - #[derive(Clone, Copy, PartialEq, Eq)] - pub struct Keyword { - ident: ast::Ident, - } - impl Keyword { - #[inline] pub fn ident(self) -> ast::Ident { self.ident } - #[inline] pub fn name(self) -> ast::Name { self.ident.name } - } - $( - #[allow(non_upper_case_globals)] - pub const $konst: Keyword = Keyword { - ident: ast::Ident::with_empty_ctxt(super::Symbol($index)) - }; - )* - } - - impl Interner { - fn fresh() -> Self { - Interner::prefill(&[$($string,)*]) - } - } -}} - -// NB: leaving holes in the ident table is bad! a different ident will get -// interned with the id from the hole, but it will be between the min and max -// of the reserved words, and thus tagged as "reserved". -// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, -// this should be rarely necessary though if the keywords are kept in alphabetic order. -declare_keywords! { - // Invalid identifier - (0, Invalid, "") - - // Strict keywords used in the language. - (1, As, "as") - (2, Box, "box") - (3, Break, "break") - (4, Const, "const") - (5, Continue, "continue") - (6, Crate, "crate") - (7, Else, "else") - (8, Enum, "enum") - (9, Extern, "extern") - (10, False, "false") - (11, Fn, "fn") - (12, For, "for") - (13, If, "if") - (14, Impl, "impl") - (15, In, "in") - (16, Let, "let") - (17, Loop, "loop") - (18, Match, "match") - (19, Mod, "mod") - (20, Move, "move") - (21, Mut, "mut") - (22, Pub, "pub") - (23, Ref, "ref") - (24, Return, "return") - (25, SelfValue, "self") - (26, SelfType, "Self") - (27, Static, "static") - (28, Struct, "struct") - (29, Super, "super") - (30, Trait, "trait") - (31, True, "true") - (32, Type, "type") - (33, Unsafe, "unsafe") - (34, Use, "use") - (35, Where, "where") - (36, While, "while") - - // Keywords reserved for future use. - (37, Abstract, "abstract") - (38, Alignof, "alignof") - (39, Become, "become") - (40, Do, "do") - (41, Final, "final") - (42, Macro, "macro") - (43, Offsetof, "offsetof") - (44, Override, "override") - (45, Priv, "priv") - (46, Proc, "proc") - (47, Pure, "pure") - (48, Sizeof, "sizeof") - (49, Typeof, "typeof") - (50, Unsized, "unsized") - (51, Virtual, "virtual") - (52, Yield, "yield") - - // Weak keywords, have special meaning only in specific contexts. - (53, Default, "default") - (54, StaticLifetime, "'static") - (55, Union, "union") - (56, Catch, "catch") - - // A virtual keyword that resolves to the crate root when used in a lexical scope. - (57, CrateRoot, "{{root}}") -} - -// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. -fn with_interner T>(f: F) -> T { - thread_local!(static INTERNER: RefCell = { - RefCell::new(Interner::fresh()) - }); - INTERNER.with(|interner| f(&mut *interner.borrow_mut())) -} - -/// Represents a string stored in the thread-local interner. Because the -/// interner lives for the life of the thread, this can be safely treated as an -/// immortal string, as long as it never crosses between threads. -/// -/// FIXME(pcwalton): You must be careful about what you do in the destructors -/// of objects stored in TLS, because they may run after the interner is -/// destroyed. In particular, they must not access string contents. This can -/// be fixed in the future by just leaking all strings until thread death -/// somehow. -#[derive(Clone, Hash, PartialOrd, Eq, Ord)] -pub struct InternedString { - string: &'static str, -} - -impl ::std::convert::AsRef for InternedString where str: ::std::convert::AsRef { - fn as_ref(&self) -> &U { - self.string.as_ref() - } -} - -impl> ::std::cmp::PartialEq for InternedString { - fn eq(&self, other: &T) -> bool { - self.string == other.deref() - } -} - -impl ::std::cmp::PartialEq for str { - fn eq(&self, other: &InternedString) -> bool { - self == other.string - } -} - -impl<'a> ::std::cmp::PartialEq for &'a str { - fn eq(&self, other: &InternedString) -> bool { - *self == other.string - } -} - -impl ::std::cmp::PartialEq for String { - fn eq(&self, other: &InternedString) -> bool { - self == other.string - } -} - -impl<'a> ::std::cmp::PartialEq for &'a String { - fn eq(&self, other: &InternedString) -> bool { - *self == other.string - } -} - -impl !Send for InternedString { } - -impl ::std::ops::Deref for InternedString { - type Target = str; - fn deref(&self) -> &str { self.string } -} - -impl fmt::Debug for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(self.string, f) - } -} - -impl fmt::Display for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self.string, f) - } -} - -impl Decodable for InternedString { - fn decode(d: &mut D) -> Result { - Ok(Symbol::intern(&d.read_str()?).as_str()) - } -} - -impl Encodable for InternedString { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn interner_tests() { - let mut i: Interner = Interner::new(); - // first one is zero: - assert_eq!(i.intern("dog"), Symbol(0)); - // re-use gets the same entry: - assert_eq!(i.intern ("dog"), Symbol(0)); - // different string gets a different #: - assert_eq!(i.intern("cat"), Symbol(1)); - assert_eq!(i.intern("cat"), Symbol(1)); - // dog is still at zero - assert_eq!(i.intern("dog"), Symbol(0)); - // gensym gets 3 - assert_eq!(i.gensym("zebra"), Symbol(2)); - // gensym of same string gets new number : - assert_eq!(i.gensym("zebra"), Symbol(3)); - // gensym of *existing* string gets new number: - assert_eq!(i.gensym("dog"), Symbol(4)); - } -} diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6fb6db9ca02..50380626d7f 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -31,6 +31,7 @@ use entry::{self, EntryPointType}; use ext::base::{ExtCtxt, Resolver}; use ext::build::AstBuilder; use ext::expand::ExpansionConfig; +use ext::hygiene::{Mark, SyntaxContext}; use fold::Folder; use util::move_map::MoveMap; use fold; @@ -62,6 +63,7 @@ struct TestCtxt<'a> { testfns: Vec, reexport_test_harness_main: Option, is_test_crate: bool, + ctxt: SyntaxContext, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option, @@ -275,6 +277,7 @@ fn generate_test_harness(sess: &ParseSess, let mut cleaner = EntryPointCleaner { depth: 0 }; let krate = cleaner.fold_crate(krate); + let mark = Mark::fresh(); let mut cx: TestCtxt = TestCtxt { sess: sess, span_diagnostic: sd, @@ -284,15 +287,16 @@ fn generate_test_harness(sess: &ParseSess, reexport_test_harness_main: reexport_test_harness_main, is_test_crate: is_test_crate(&krate), toplevel_reexport: None, + ctxt: SyntaxContext::empty().apply_mark(mark), }; cx.ext_cx.crate_root = Some("std"); - cx.ext_cx.bt_push(ExpnInfo { + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("test")), span: None, - allow_internal_unstable: false, + allow_internal_unstable: true, } }); @@ -307,18 +311,7 @@ fn generate_test_harness(sess: &ParseSess, /// call to codemap's is_internal check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { - let info = ExpnInfo { - call_site: sp, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern("test")), - span: None, - allow_internal_unstable: true, - } - }; - let expn_id = cx.sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + Span { ctxt: cx.ctxt, ..sp } } #[derive(PartialEq)] diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index c6d6e6237f2..c537a0ee166 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -83,7 +83,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { Span { lo: BytePos(start as u32), hi: BytePos(end as u32), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b75b3efda36..86bfdebe42b 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -56,18 +56,20 @@ impl Delimited { /// Returns the opening delimiter as a token tree. pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } @@ -425,7 +427,7 @@ mod tests { Span { lo: BytePos(a), hi: BytePos(b), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 767ec94a0ce..923e8072f43 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -13,7 +13,6 @@ use self::State::*; use syntax::ast; -use syntax::codemap; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; @@ -240,15 +239,6 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, } } - let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: sp, - callee: codemap::NameAndSpan { - format: codemap::MacroBang(Symbol::intern("asm")), - span: None, - allow_internal_unstable: false, - }, - }); - MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { @@ -260,7 +250,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, volatile: volatile, alignstack: alignstack, dialect: dialect, - expn_id: expn_id, + ctxt: cx.backtrace(), })), span: sp, attrs: ast::ThinVec::new(), diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index d14b59d6c70..1993d6ebe5b 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -111,7 +111,7 @@ fn cs_clone_shallow(name: &str, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Clone)"); + let span = Span { ctxt: cx.backtrace(), ..span}; let assert_path = cx.path_all(span, true, cx.std_path(&["clone", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 6ab5987a159..eef21492deb 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -58,7 +58,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Eq)"); + let span = Span { ctxt: cx.backtrace(), ..span }; let assert_path = cx.path_all(span, true, cx.std_path(&["cmp", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index a767716466c..ec4cb815960 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -66,8 +66,8 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"), }; - // We want to make sure we have the expn_id set so that we can use unstable methods - let span = Span { expn_id: cx.backtrace(), ..span }; + // We want to make sure we have the ctxt set so that we can use unstable methods + let span = Span { ctxt: cx.backtrace(), ..span }; let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); let builder = Ident::from_str("builder"); let builder_expr = cx.expr_ident(span, builder.clone()); diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 48e7ff0d243..1ff0fec1c96 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -375,7 +375,7 @@ fn find_type_parameters(ty: &ast::Ty, } fn visit_mac(&mut self, mac: &ast::Mac) { - let span = Span { expn_id: self.span.expn_id, ..mac.span }; + let span = Span { ctxt: self.span.ctxt, ..mac.span }; self.cx.span_err(span, "`derive` cannot be used on items with type macros"); } } @@ -1458,7 +1458,7 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let ident = v.node.name; - let sp = Span { expn_id: trait_.span.expn_id, ..v.span }; + let sp = Span { ctxt: trait_.span.ctxt, ..v.span }; let summary = trait_.summarise_struct(cx, &v.node.data); (ident, sp, summary) }) @@ -1478,7 +1478,7 @@ impl<'a> TraitDef<'a> { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields() { - let sp = Span { expn_id: self.span.expn_id, ..field.span }; + let sp = Span { ctxt: self.span.ctxt, ..field.span }; match field.ident { Some(ident) => named_idents.push((ident, sp)), _ => just_spans.push(sp), @@ -1523,7 +1523,7 @@ impl<'a> TraitDef<'a> { let mut paths = Vec::new(); let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = Span { expn_id: self.span.expn_id, ..struct_field.span }; + let sp = Span { ctxt: self.span.ctxt, ..struct_field.span }; let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned { span: sp, @@ -1544,7 +1544,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } codemap::Spanned { - span: Span { expn_id: self.span.expn_id, ..pat.span }, + span: Span { ctxt: self.span.ctxt, ..pat.span }, node: ast::FieldPat { ident: ident.unwrap(), pat: pat, @@ -1576,7 +1576,7 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; - let sp = Span { expn_id: self.span.expn_id, ..variant.span }; + let sp = Span { ctxt: self.span.ctxt, ..variant.span }; let variant_path = cx.path(sp, vec![enum_ident, variant_ident]); self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl) } diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index b51591bf89d..b2bb43e41ed 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -12,9 +12,9 @@ use std::rc::Rc; use syntax::ast; -use syntax::codemap; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver}; use syntax::ext::build::AstBuilder; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; @@ -74,20 +74,6 @@ pub mod ord; pub mod generic; -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span - } -} - macro_rules! derive_traits { ($( $name:expr => $func:path, )+) => { pub fn is_builtin_trait(name: ast::Name) -> bool { @@ -177,15 +163,15 @@ fn call_intrinsic(cx: &ExtCtxt, intrinsic: &str, args: Vec>) -> P { - span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern("derive")), - span: Some(span), - allow_internal_unstable: true, - }, - }); - + if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable { + span.ctxt = cx.backtrace(); + } else { // Avoid instability errors with user defined curstom derives, cc #36316 + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + let mark = Mark::fresh(); + mark.set_expn_info(info); + span.ctxt = SyntaxContext::empty().apply_mark(mark); + } let path = cx.std_path(&["intrinsics", intrinsic]); let call = cx.expr_call_global(span, path, args); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index d2afa08cada..aeb5b1e0a53 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -641,10 +641,11 @@ impl<'a, 'b> Context<'a, 'b> { fn format_arg(ecx: &ExtCtxt, macsp: Span, - sp: Span, + mut sp: Span, ty: &ArgumentType, arg: P) -> P { + sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 2d815b3f1bb..bb89caab709 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -17,6 +17,7 @@ use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::fold::Folder; use syntax::parse::ParseSess; use syntax::ptr::P; @@ -360,7 +361,8 @@ fn mk_registrar(cx: &mut ExtCtxt, custom_derives: &[ProcMacroDerive], custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef]) -> P { - let eid = cx.codemap().record_expansion(ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("proc_macro")), @@ -368,7 +370,7 @@ fn mk_registrar(cx: &mut ExtCtxt, allow_internal_unstable: true, } }); - let span = Span { expn_id: eid, ..DUMMY_SP }; + let span = Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..DUMMY_SP }; let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index feebbcd6f03..8a9ff647b3e 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -15,12 +15,16 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 +use Span; +use symbol::Symbol; + +use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; /// A SyntaxContext represents a chain of macro expansions (represented by marks). -#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContext(u32); #[derive(Copy, Clone)] @@ -36,8 +40,8 @@ pub struct Mark(u32); impl Mark { pub fn fresh() -> Self { HygieneData::with(|data| { - let next_mark = Mark(data.next_mark.0 + 1); - ::std::mem::replace(&mut data.next_mark, next_mark) + data.marks.push(None); + Mark(data.marks.len() as u32 - 1) }) } @@ -53,23 +57,31 @@ impl Mark { pub fn from_u32(raw: u32) -> Mark { Mark(raw) } + + pub fn expn_info(self) -> Option { + HygieneData::with(|data| data.marks[self.0 as usize].clone()) + } + + pub fn set_expn_info(self, info: ExpnInfo) { + HygieneData::with(|data| data.marks[self.0 as usize] = Some(info)) + } } struct HygieneData { + marks: Vec>, syntax_contexts: Vec, markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, - next_mark: Mark, } impl HygieneData { fn new() -> Self { HygieneData { + marks: vec![None], syntax_contexts: vec![SyntaxContextData { outer_mark: Mark::root(), prev_ctxt: SyntaxContext::empty(), }], markings: HashMap::new(), - next_mark: Mark(1), } } @@ -81,8 +93,8 @@ impl HygieneData { } } -pub fn reset_hygiene_data() { - HygieneData::with(|data| *data = HygieneData::new()) +pub fn clear_markings() { + HygieneData::with(|data| data.markings = HashMap::new()); } impl SyntaxContext { @@ -113,6 +125,10 @@ impl SyntaxContext { }) }) } + + pub fn outer(self) -> Mark { + HygieneData::with(|data| data.syntax_contexts[self.0 as usize].outer_mark) + } } impl fmt::Debug for SyntaxContext { @@ -120,3 +136,67 @@ impl fmt::Debug for SyntaxContext { write!(f, "#{}", self.0) } } + +/// Extra information for tracking spans of macro and syntax sugar expansion +#[derive(Clone, Hash, Debug)] +pub struct ExpnInfo { + /// The location of the actual macro invocation or syntax sugar , e.g. + /// `let x = foo!();` or `if let Some(y) = x {}` + /// + /// This may recursively refer to other macro invocations, e.g. if + /// `foo!()` invoked `bar!()` internally, and there was an + /// expression inside `bar!`; the call_site of the expression in + /// the expansion would point to the `bar!` invocation; that + /// call_site span would have its own ExpnInfo, with the call_site + /// pointing to the `foo!` invocation. + pub call_site: Span, + /// Information about the expansion. + pub callee: NameAndSpan +} + +#[derive(Clone, Hash, Debug)] +pub struct NameAndSpan { + /// The format with which the macro was invoked. + pub format: ExpnFormat, + /// Whether the macro is allowed to use #[unstable]/feature-gated + /// features internally without forcing the whole crate to opt-in + /// to them. + pub allow_internal_unstable: bool, + /// The span of the macro definition itself. The macro may not + /// have a sensible definition span (e.g. something defined + /// completely inside libsyntax) in which case this is None. + pub span: Option +} + +impl NameAndSpan { + pub fn name(&self) -> Symbol { + match self.format { + ExpnFormat::MacroAttribute(s) | + ExpnFormat::MacroBang(s) | + ExpnFormat::CompilerDesugaring(s) => s, + } + } +} + +/// The source of expansion. +#[derive(Clone, Hash, Debug, PartialEq, Eq)] +pub enum ExpnFormat { + /// e.g. #[derive(...)] + MacroAttribute(Symbol), + /// e.g. `format!()` + MacroBang(Symbol), + /// Desugaring done by the compiler during HIR lowering. + CompilerDesugaring(Symbol) +} + +impl Encodable for SyntaxContext { + fn encode(&self, _: &mut E) -> Result<(), E::Error> { + Ok(()) // FIXME(jseyfried) intercrate hygiene + } +} + +impl Decodable for SyntaxContext { + fn decode(_: &mut D) -> Result { + Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene + } +} diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 1b62d62348b..9b45e364ecf 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -25,6 +25,7 @@ #![feature(const_fn)] #![feature(custom_attribute)] +#![feature(optin_builtin_traits)] #![allow(unused_attributes)] #![feature(rustc_private)] #![feature(staged_api)] @@ -43,6 +44,9 @@ extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving pub mod hygiene; +pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan}; + +pub mod symbol; pub type FileName = String; @@ -60,7 +64,7 @@ pub struct Span { pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. - pub expn_id: ExpnId + pub ctxt: SyntaxContext, } /// A collection of spans. Spans have two orthogonal attributes: @@ -79,7 +83,7 @@ impl Span { /// Returns a new span representing just the end-point of this span pub fn end_point(self) -> Span { let lo = cmp::max(self.hi.0 - 1, self.lo.0); - Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} + Span { lo: BytePos(lo), hi: self.hi, ctxt: self.ctxt } } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. @@ -107,6 +111,69 @@ impl Span { None } } + + /// Return the source span - this is either the supplied span, or the span for + /// the macro callsite that expanded to it. + pub fn source_callsite(self) -> Span { + self.ctxt.outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) + } + + /// Return the source callee. + /// + /// Returns None if the supplied span has no expansion trace, + /// else returns the NameAndSpan for the macro definition + /// corresponding to the source callsite. + pub fn source_callee(self) -> Option { + fn source_callee(info: ExpnInfo) -> NameAndSpan { + match info.call_site.ctxt.outer().expn_info() { + Some(info) => source_callee(info), + None => info.callee, + } + } + self.ctxt.outer().expn_info().map(source_callee) + } + + /// Check if a span is "internal" to a macro in which #[unstable] + /// items can be used (that is, a macro marked with + /// `#[allow_internal_unstable]`). + pub fn allows_unstable(&self) -> bool { + match self.ctxt.outer().expn_info() { + Some(info) => info.callee.allow_internal_unstable, + None => false, + } + } + + pub fn macro_backtrace(mut self) -> Vec { + let mut prev_span = DUMMY_SP; + let mut result = vec![]; + loop { + let info = match self.ctxt.outer().expn_info() { + Some(info) => info, + None => break, + }; + + let (pre, post) = match info.callee.format { + ExpnFormat::MacroAttribute(..) => ("#[", "]"), + ExpnFormat::MacroBang(..) => ("", "!"), + ExpnFormat::CompilerDesugaring(..) => ("desugaring of `", "`"), + }; + let macro_decl_name = format!("{}{}{}", pre, info.callee.name(), post); + let def_site_span = info.callee.span; + + // Don't print recursive invocations + if !info.call_site.source_equal(&prev_span) { + result.push(MacroBacktrace { + call_site: info.call_site, + macro_decl_name: macro_decl_name, + def_site_span: def_site_span, + }); + } + + prev_span = self; + self = info.call_site; + } + result + } } #[derive(Clone, Debug)] @@ -147,8 +214,8 @@ impl serialize::UseSpecializedDecodable for Span { } fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}", - span.lo, span.hi, span.expn_id) + write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}", + span.lo, span.hi, span.ctxt) } impl fmt::Debug for Span { @@ -157,12 +224,7 @@ impl fmt::Debug for Span { } } -pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; - -// Generic span to be used for code originating from the command line -pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), - hi: BytePos(0), - expn_id: COMMAND_LINE_EXPN }; +pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION }; impl MultiSpan { pub fn new() -> MultiSpan { @@ -256,22 +318,7 @@ impl From for MultiSpan { } } -#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy, Ord, PartialOrd)] -pub struct ExpnId(pub u32); - -pub const NO_EXPANSION: ExpnId = ExpnId(!0); -// For code appearing from the command line -pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); - -impl ExpnId { - pub fn from_u32(id: u32) -> ExpnId { - ExpnId(id) - } - - pub fn into_u32(self) -> u32 { - self.0 - } -} +pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); /// Identifies an offset of a multi-byte character in a FileMap #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)] @@ -651,7 +698,7 @@ thread_local!(pub static SPAN_DEBUG: Cell fmt:: /* assuming that we're not in macro expansion */ pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, expn_id: NO_EXPANSION} + Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} } pub struct MacroBacktrace { diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs new file mode 100644 index 00000000000..b866652c49f --- /dev/null +++ b/src/libsyntax_pos/symbol.rs @@ -0,0 +1,389 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An "interner" is a data structure that associates values with usize tags and +//! allows bidirectional lookup; i.e. given a value, one can easily find the +//! type, and vice versa. + +use hygiene::SyntaxContext; + +use serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Ident { + pub name: Symbol, + pub ctxt: SyntaxContext, +} + +impl Ident { + pub const fn with_empty_ctxt(name: Symbol) -> Ident { + Ident { name: name, ctxt: SyntaxContext::empty() } + } + + /// Maps a string to an identifier with an empty syntax context. + pub fn from_str(string: &str) -> Ident { + Ident::with_empty_ctxt(Symbol::intern(string)) + } + + pub fn unhygienize(self) -> Ident { + Ident { name: self.name, ctxt: SyntaxContext::empty() } + } +} + +impl fmt::Debug for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}{:?}", self.name, self.ctxt) + } +} + +impl fmt::Display for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.name, f) + } +} + +impl Encodable for Ident { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.name.encode(s) + } +} + +impl Decodable for Ident { + fn decode(d: &mut D) -> Result { + Ok(Ident::with_empty_ctxt(Symbol::decode(d)?)) + } +} + +/// A symbol is an interned or gensymed string. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Symbol(u32); + +// The interner in thread-local, so `Symbol` shouldn't move between threads. +impl !Send for Symbol { } + +impl Symbol { + /// Maps a string to its interned representation. + pub fn intern(string: &str) -> Self { + with_interner(|interner| interner.intern(string)) + } + + /// gensym's a new usize, using the current interner. + pub fn gensym(string: &str) -> Self { + with_interner(|interner| interner.gensym(string)) + } + + pub fn as_str(self) -> InternedString { + with_interner(|interner| unsafe { + InternedString { + string: ::std::mem::transmute::<&str, &str>(interner.get(self)) + } + }) + } + + pub fn as_u32(self) -> u32 { + self.0 + } +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}({})", self, self.0) + } +} + +impl fmt::Display for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.as_str(), f) + } +} + +impl Encodable for Symbol { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(&self.as_str()) + } +} + +impl Decodable for Symbol { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?)) + } +} + +impl> PartialEq for Symbol { + fn eq(&self, other: &T) -> bool { + self.as_str() == other.deref() + } +} + +#[derive(Default)] +pub struct Interner { + names: HashMap, Symbol>, + strings: Vec>, +} + +impl Interner { + pub fn new() -> Self { + Interner::default() + } + + fn prefill(init: &[&str]) -> Self { + let mut this = Interner::new(); + for &string in init { + this.intern(string); + } + this + } + + pub fn intern(&mut self, string: &str) -> Symbol { + if let Some(&name) = self.names.get(string) { + return name; + } + + let name = Symbol(self.strings.len() as u32); + let string = string.to_string().into_boxed_str(); + self.strings.push(string.clone()); + self.names.insert(string, name); + name + } + + fn gensym(&mut self, string: &str) -> Symbol { + let gensym = Symbol(self.strings.len() as u32); + // leave out of `names` to avoid colliding + self.strings.push(string.to_string().into_boxed_str()); + gensym + } + + pub fn get(&self, name: Symbol) -> &str { + &self.strings[name.0 as usize] + } +} + +// In this macro, there is the requirement that the name (the number) must be monotonically +// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* +) => { + pub mod keywords { + use super::{Symbol, Ident}; + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: Ident, + } + impl Keyword { + #[inline] pub fn ident(self) -> Ident { self.ident } + #[inline] pub fn name(self) -> Symbol { self.ident.name } + } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: Ident::with_empty_ctxt(super::Symbol($index)) + }; + )* + } + + impl Interner { + fn fresh() -> Self { + Interner::prefill(&[$($string,)*]) + } + } +}} + +// NB: leaving holes in the ident table is bad! a different ident will get +// interned with the id from the hole, but it will be between the min and max +// of the reserved words, and thus tagged as "reserved". +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") + (56, Catch, "catch") + + // A virtual keyword that resolves to the crate root when used in a lexical scope. + (57, CrateRoot, "{{root}}") +} + +// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +fn with_interner T>(f: F) -> T { + thread_local!(static INTERNER: RefCell = { + RefCell::new(Interner::fresh()) + }); + INTERNER.with(|interner| f(&mut *interner.borrow_mut())) +} + +/// Represents a string stored in the thread-local interner. Because the +/// interner lives for the life of the thread, this can be safely treated as an +/// immortal string, as long as it never crosses between threads. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until thread death +/// somehow. +#[derive(Clone, Hash, PartialOrd, Eq, Ord)] +pub struct InternedString { + string: &'static str, +} + +impl ::std::convert::AsRef for InternedString where str: ::std::convert::AsRef { + fn as_ref(&self) -> &U { + self.string.as_ref() + } +} + +impl> ::std::cmp::PartialEq for InternedString { + fn eq(&self, other: &T) -> bool { + self.string == other.deref() + } +} + +impl ::std::cmp::PartialEq for str { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a str { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + +impl ::std::cmp::PartialEq for String { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a String { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + +impl !Send for InternedString { } + +impl ::std::ops::Deref for InternedString { + type Target = str; + fn deref(&self) -> &str { self.string } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(self.string, f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self.string, f) + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?).as_str()) + } +} + +impl Encodable for InternedString { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(self.string) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn interner_tests() { + let mut i: Interner = Interner::new(); + // first one is zero: + assert_eq!(i.intern("dog"), Symbol(0)); + // re-use gets the same entry: + assert_eq!(i.intern ("dog"), Symbol(0)); + // different string gets a different #: + assert_eq!(i.intern("cat"), Symbol(1)); + assert_eq!(i.intern("cat"), Symbol(1)); + // dog is still at zero + assert_eq!(i.intern("dog"), Symbol(0)); + // gensym gets 3 + assert_eq!(i.gensym("zebra"), Symbol(2)); + // gensym of same string gets new number : + assert_eq!(i.gensym("zebra"), Symbol(3)); + // gensym of *existing* string gets new number: + assert_eq!(i.gensym("dog"), Symbol(4)); + } +} diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index bd25561065b..272bf1150ca 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -27,14 +27,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23"); diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index d692bb519c1..5518ab47c2b 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -30,14 +30,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; println!("{}", pprust::expr_to_string(&*quote_expr!(&cx, 23))); diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index b4ed57192cc..4a8246ec429 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -26,14 +26,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; macro_rules! check { -- cgit 1.4.1-3-g733a5 From f08d5ad4c59ca5fc1c961a94c53807d70959c375 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 15 Mar 2017 00:22:48 +0000 Subject: Refactor how spans are combined in the parser. --- src/librustc/middle/resolve_lifetime.rs | 5 +- src/librustc_metadata/cstore_impl.rs | 4 +- src/librustc_metadata/decoder.rs | 6 +- src/librustc_save_analysis/lib.rs | 2 +- src/librustc_save_analysis/span_utils.rs | 4 +- src/libsyntax/ast.rs | 10 +- src/libsyntax/attr.rs | 15 +- src/libsyntax/codemap.rs | 4 - src/libsyntax/ext/tt/macro_parser.rs | 9 +- src/libsyntax/parse/attr.rs | 29 +- src/libsyntax/parse/lexer/mod.rs | 42 +- src/libsyntax/parse/lexer/unicode_chars.rs | 4 +- src/libsyntax/parse/mod.rs | 6 +- src/libsyntax/parse/parser.rs | 617 ++++++++++----------- src/libsyntax_ext/format.rs | 15 +- src/libsyntax_pos/lib.rs | 16 +- src/test/compile-fail/imports/macro-paths.rs | 2 - src/test/compile-fail/imports/macros.rs | 2 - .../compile-fail/imports/shadow_builtin_macros.rs | 1 - src/test/compile-fail/issue-25385.rs | 1 - src/test/run-pass/syntax-extension-source-utils.rs | 2 +- .../ui/macros/macro_path_as_generic_bound.stderr | 5 +- 22 files changed, 363 insertions(+), 438 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 5094e28475b..8037570d24a 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -29,7 +29,7 @@ use syntax::ast; use syntax::attr; use syntax::ptr::P; use syntax::symbol::keywords; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::Span; use errors::DiagnosticBuilder; use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap}; use rustc_back::slice; @@ -1468,8 +1468,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { self.resolve_lifetime_ref(bound); } else { self.insert_lifetime(bound, Region::Static); - let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi); - self.sess.struct_span_warn(full_span, + self.sess.struct_span_warn(lifetime_i.lifetime.span.to(bound.span), &format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name)) .help(&format!("you can use the `'static` lifetime directly, in place \ of `{}`", lifetime_i.lifetime.name)) diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 3e9b6a6226a..41a2e8a8d55 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -36,7 +36,7 @@ use syntax::ast; use syntax::attr; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::{Span, NO_EXPANSION}; use rustc::hir::svh::Svh; use rustc_back::target::Target; use rustc::hir; @@ -395,7 +395,7 @@ impl CrateStore for cstore::CStore { let source_name = format!("<{} macros>", name); let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body); - let local_span = mk_sp(filemap.start_pos, filemap.end_pos); + let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; let body = filemap_to_stream(&sess.parse_sess, filemap); // Mark the attrs as used diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3de1e3442c6..43e076e799b 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -39,7 +39,7 @@ use syntax::attr; use syntax::ast; use syntax::codemap; use syntax::ext::base::MacroKind; -use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP}; +use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; pub struct DecodeContext<'a, 'tcx: 'a> { opaque: opaque::Decoder<'a>, @@ -243,7 +243,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let sess = if let Some(sess) = self.sess { sess } else { - return Ok(syntax_pos::mk_sp(lo, hi)); + return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }); }; let (lo, hi) = if lo > hi { @@ -290,7 +290,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; - Ok(syntax_pos::mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index fd6803e087a..1de9fbc8e49 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -742,7 +742,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let ident_start = text.find(&name).expect("Name not in signature?"); let ident_end = ident_start + name.len(); Signature { - span: mk_sp(item.span.lo, item.span.lo + BytePos(text.len() as u32)), + span: Span { hi: item.span.lo + BytePos(text.len() as u32), ..item.span }, text: text, ident_start: ident_start, ident_end: ident_end, diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index c19f805a285..af3efb48090 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> { continue; } if let TokenTree::Token(_, token::Semi) = tok { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } else if let TokenTree::Delimited(_, ref d) = tok { if d.delim == token::Brace { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } } prev = tok; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a4bebd311de..9eb86aa006d 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -17,7 +17,7 @@ pub use self::PathParameters::*; pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -1433,7 +1433,7 @@ impl Arg { TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } - _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), + _ => Some(respan(self.pat.span.to(self.ty.span), SelfKind::Explicit(self.ty.clone(), mutbl))), } } @@ -1450,7 +1450,7 @@ impl Arg { } pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg { - let span = mk_sp(eself.span.lo, eself_ident.span.hi); + let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, node: TyKind::ImplicitSelf, @@ -1687,11 +1687,11 @@ pub struct PolyTraitRef { } impl PolyTraitRef { - pub fn new(lifetimes: Vec, path: Path, lo: BytePos, hi: BytePos) -> Self { + pub fn new(lifetimes: Vec, path: Path, span: Span) -> Self { PolyTraitRef { bound_lifetimes: lifetimes, trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, - span: mk_sp(lo, hi), + span: span, } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 2f1efd6ad00..5dcce2572af 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -18,8 +18,8 @@ use ast; use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind}; -use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; -use syntax_pos::{Span, BytePos, DUMMY_SP}; +use codemap::{Spanned, respan, dummy_spanned}; +use syntax_pos::{Span, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; @@ -447,17 +447,16 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute } } -pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) - -> Attribute { +pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute { let style = doc_comment_style(&text.as_str()); - let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked)); + let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id: id, style: style, - path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")), - tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)), + path: ast::Path::from_ident(span, ast::Ident::from_str("doc")), + tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, - span: mk_sp(lo, hi), + span: span, } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index ba199eacb62..4d67390d442 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -49,10 +49,6 @@ pub struct Spanned { pub span: Span, } -pub fn spanned(lo: BytePos, hi: BytePos, t: T) -> Spanned { - respan(mk_sp(lo, hi), t) -} - pub fn respan(sp: Span, t: T) -> Spanned { Spanned {node: t, span: sp} } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ed17f0f956c..9ee427eed35 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -79,7 +79,7 @@ pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; use ast::Ident; -use syntax_pos::{self, BytePos, mk_sp, Span}; +use syntax_pos::{self, BytePos, Span}; use codemap::Spanned; use errors::FatalError; use ext::tt::quoted::{self, TokenTree}; @@ -285,7 +285,7 @@ fn inner_parse_loop(sess: &ParseSess, eof_eis: &mut SmallVector>, bb_eis: &mut SmallVector>, token: &Token, - span: &syntax_pos::Span) + span: syntax_pos::Span) -> ParseResult<()> { while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them @@ -323,8 +323,7 @@ fn inner_parse_loop(sess: &ParseSess, for idx in ei.match_lo..ei.match_hi { let sub = ei.matches[idx].clone(); new_pos.matches[idx] - .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo, - span.hi)))); + .push(Rc::new(MatchedSeq(sub, Span { lo: ei.sp_lo, ..span }))); } new_pos.match_cur = ei.match_hi; @@ -426,7 +425,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op assert!(next_eis.is_empty()); match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis, - &parser.token, &parser.span) { + &parser.token, parser.span) { Success(_) => {}, Failure(sp, tok) => return Failure(sp, tok), Error(sp, msg) => return Error(sp, msg), diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 53106214fa3..92cec462ffb 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -10,8 +10,7 @@ use attr; use ast; -use syntax_pos::{mk_sp, Span}; -use codemap::spanned; +use codemap::respan; use parse::common::SeqSep; use parse::PResult; use parse::token::{self, Nonterminal}; @@ -49,8 +48,7 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ @@ -94,7 +92,7 @@ impl<'a> Parser<'a> { self.token); let (span, path, tokens, mut style) = match self.token { token::Pound => { - let lo = self.span.lo; + let lo = self.span; self.bump(); if inner_parse_policy == InnerAttributeParsePolicy::Permitted { @@ -122,9 +120,9 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket))?; let (path, tokens) = self.parse_path_and_tokens()?; self.expect(&token::CloseDelim(token::Bracket))?; - let hi = self.prev_span.hi; + let hi = self.prev_span; - (mk_sp(lo, hi), path, tokens, style) + (lo.to(hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); @@ -189,8 +187,7 @@ impl<'a> Parser<'a> { } token::DocComment(s) => { // we need to get the position of this token before we bump. - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); @@ -238,11 +235,10 @@ impl<'a> Parser<'a> { return Ok(meta); } - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; let node = self.parse_meta_item_kind()?; - let hi = self.prev_span.hi; - Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + Ok(ast::MetaItem { name: ident.name, node: node, span: lo.to(self.prev_span) }) } pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { @@ -258,26 +254,25 @@ impl<'a> Parser<'a> { /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { - let sp = self.span; - let lo = self.span.lo; + let lo = self.span; match self.parse_unsuffixed_lit() { Ok(lit) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::Literal(lit))) } Err(ref mut err) => self.diagnostic().cancel(err) } match self.parse_meta_item() { Ok(mi) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::MetaItem(mi))) } Err(ref mut err) => self.diagnostic().cancel(err) } let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found {}", found); - Err(self.diagnostic().struct_span_err(sp, &msg)) + Err(self.diagnostic().struct_span_err(lo, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d48cf6911ed..920b2c401e2 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, Ident}; -use syntax_pos::{self, BytePos, CharPos, Pos, Span}; +use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use codemap::CodeMap; use errors::{FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; @@ -68,6 +68,10 @@ pub struct StringReader<'a> { open_braces: Vec<(token::DelimToken, Span)>, } +fn mk_sp(lo: BytePos, hi: BytePos) -> Span { + Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } +} + impl<'a> StringReader<'a> { fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); @@ -225,12 +229,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.fatal_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.err_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -254,7 +258,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -278,7 +282,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -302,11 +306,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos); + self.peek_span = mk_sp(start_bytepos, self.pos); }; } } @@ -489,7 +493,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); } } @@ -532,13 +536,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }; } @@ -571,7 +575,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: syntax_pos::mk_sp(start, self.pos), + sp: mk_sp(start, self.pos), }); } } @@ -599,7 +603,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -661,7 +665,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } @@ -858,7 +862,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = syntax_pos::mk_sp(start, self.pos); + let span = mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -896,13 +900,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } @@ -1735,7 +1739,7 @@ mod tests { sp: Span { lo: BytePos(21), hi: BytePos(23), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok1, tok2); @@ -1749,7 +1753,7 @@ mod tests { sp: Span { lo: BytePos(24), hi: BytePos(28), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok3, tok4); @@ -1908,7 +1912,7 @@ mod tests { let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); - assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); + assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(Symbol::intern("/// test"))); diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index 6da3e5de75c..4df23da3c9c 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -11,7 +11,7 @@ // Characters and their corresponding confusables were collected from // http://www.unicode.org/Public/security/revision-06/confusables.txt -use syntax_pos::mk_sp as make_span; +use syntax_pos::{Span, NO_EXPANSION}; use errors::DiagnosticBuilder; use super::StringReader; @@ -234,7 +234,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>, .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { - let span = make_span(reader.pos, reader.next_pos); + let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION }; match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e188bcaf105..b5d0a46de49 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -12,7 +12,7 @@ use ast::{self, CrateConfig}; use codemap::CodeMap; -use syntax_pos::{self, Span, FileMap}; +use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -178,7 +178,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Par let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { - parser.span = syntax_pos::mk_sp(end_pos, end_pos); + parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; } parser @@ -665,7 +665,7 @@ mod tests { // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { - Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION} + Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION} } fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e9eb4fbcc91..b0611d75290 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -40,8 +40,8 @@ use ast::{Visibility, WhereClause}; use ast::{BinOpKind, UnOp}; use ast::RangeEnd; use {ast, attr}; -use codemap::{self, CodeMap, Spanned, spanned, respan}; -use syntax_pos::{self, Span, BytePos, mk_sp}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{self, Span, BytePos}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; @@ -108,13 +108,13 @@ macro_rules! maybe_whole_expr { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; @@ -731,7 +731,7 @@ impl<'a> Parser<'a> { token::AndAnd => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::BinOp(token::And), lo, span.hi)) + Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -765,7 +765,7 @@ impl<'a> Parser<'a> { token::BinOp(token::Shl) => { let span = self.span; let lo = span.lo + BytePos(1); - self.bump_with(token::Lt, lo, span.hi); + self.bump_with(token::Lt, Span { lo: lo, ..span }); true } _ => false, @@ -793,17 +793,17 @@ impl<'a> Parser<'a> { token::BinOp(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Gt, lo, span.hi)) + Ok(self.bump_with(token::Gt, Span { lo: lo, ..span })) } token::BinOpEq(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Ge, lo, span.hi)) + Ok(self.bump_with(token::Ge, Span { lo: lo, ..span })) } token::Ge => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Eq, lo, span.hi)) + Ok(self.bump_with(token::Eq, Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -997,12 +997,12 @@ impl<'a> Parser<'a> { -> PResult<'a, Spanned>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let lo = self.span.lo; + let lo = self.span; self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f); - let hi = self.span.hi; + let hi = self.span; self.bump(); - Ok(spanned(lo, hi, result)) + Ok(respan(lo.to(hi), result)) } /// Advance the parser by one token @@ -1033,16 +1033,13 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - pub fn bump_with(&mut self, - next: token::Token, - lo: BytePos, - hi: BytePos) { - self.prev_span = mk_sp(self.span.lo, lo); + pub fn bump_with(&mut self, next: token::Token, span: Span) { + self.prev_span = Span { hi: span.lo, ..self.span }; // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = mk_sp(lo, hi); + self.span = span; self.token = next; self.expected_tokens.clear(); } @@ -1173,7 +1170,7 @@ impl<'a> Parser<'a> { pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let (name, node) = if self.eat_keyword(keywords::Type) { let TyParam {ident, bounds, default, ..} = self.parse_ty_param(vec![])?; @@ -1197,7 +1194,7 @@ impl<'a> Parser<'a> { } else if self.token.is_path_start() { // trait item macro. // code copied from parse_macro_use_or_failure... abstraction! - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -1207,7 +1204,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac)) } else { let (constness, unsafety, abi) = match self.parse_fn_front_matter() { @@ -1277,7 +1274,7 @@ impl<'a> Parser<'a> { ident: name, attrs: attrs, node: node, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }) } @@ -1298,8 +1295,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?)) } else { - let pos = self.span.lo; - Ok(FunctionRetTy::Default(mk_sp(pos, pos))) + Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span })) } } @@ -1320,7 +1316,7 @@ impl<'a> Parser<'a> { fn parse_ty_common(&mut self, allow_plus: bool) -> PResult<'a, P> { maybe_whole!(self, NtTy, |x| x); - let lo = self.span.lo; + let lo = self.span; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. // `(TYPE,)` is a tuple with a single field of type TYPE. @@ -1344,7 +1340,7 @@ impl<'a> Parser<'a> { TyKind::Path(None, ref path) if allow_plus && self.token == token::BinOp(token::Plus) => { self.bump(); // `+` - let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo, self.prev_span.hi); + let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(pt, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1394,13 +1390,13 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // Macro invocation in type position let (_, tts) = self.expect_delimited_token_tree()?; - TyKind::Mac(spanned(lo, self.span.hi, Mac_ { path: path, tts: tts })) + TyKind::Mac(respan(lo.to(self.span), Mac_ { path: path, tts: tts })) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` // `Trait1 + Trait2 + 'a` if allow_plus && self.eat(&token::BinOp(token::Plus)) { - let poly_trait = PolyTraitRef::new(Vec::new(), path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(Vec::new(), path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1415,13 +1411,13 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; if allow_plus && self.eat(&token::BinOp(token::Plus)) { bounds.append(&mut self.parse_ty_param_bounds()?) @@ -1440,7 +1436,7 @@ impl<'a> Parser<'a> { return Err(self.fatal(&msg)); }; - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); let ty = Ty { node: node, span: span, id: ast::DUMMY_NODE_ID }; // Try to recover from use of `+` with incorrect priority. @@ -1457,7 +1453,7 @@ impl<'a> Parser<'a> { self.bump(); // `+` let bounds = self.parse_ty_param_bounds()?; - let sum_span = mk_sp(ty.span.lo, self.prev_span.hi); + let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, ty.span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty)); @@ -1577,7 +1573,7 @@ impl<'a> Parser<'a> { P(Ty { id: ast::DUMMY_NODE_ID, node: TyKind::Infer, - span: mk_sp(self.span.lo, self.span.hi), + span: self.span, }) }; Ok(Arg { @@ -1625,7 +1621,7 @@ impl<'a> Parser<'a> { /// Matches lit = true | false | token_lit pub fn parse_lit(&mut self) -> PResult<'a, Lit> { - let lo = self.span.lo; + let lo = self.span; let lit = if self.eat_keyword(keywords::True) { LitKind::Bool(true) } else if self.eat_keyword(keywords::False) { @@ -1634,22 +1630,22 @@ impl<'a> Parser<'a> { let lit = self.parse_lit_token()?; lit }; - Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.prev_span.hi) }) + Ok(codemap::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// matches '-' lit | lit pub fn parse_pat_literal_maybe_minus(&mut self) -> PResult<'a, P> { - let minus_lo = self.span.lo; + let minus_lo = self.span; let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.span.lo; + let lo = self.span; let literal = P(self.parse_lit()?); - let hi = self.prev_span.hi; - let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new()); + let hi = self.prev_span; + let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); if minus_present { - let minus_hi = self.prev_span.hi; + let minus_hi = self.prev_span; let unary = self.mk_unary(UnOp::Neg, expr); - Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new())) + Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) } else { Ok(expr) } @@ -1726,7 +1722,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span.lo; + let lo = self.span; let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1750,7 +1746,7 @@ impl<'a> Parser<'a> { // Assemble the span. // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); // Assemble the result. Ok(ast::Path { @@ -1791,7 +1787,7 @@ impl<'a> Parser<'a> { bindings: bindings, }.into() } else if self.eat(&token::OpenDelim(token::Paren)) { - let lo = self.prev_span.lo; + let lo = self.prev_span; let inputs = self.parse_seq_to_end( &token::CloseDelim(token::Paren), @@ -1804,10 +1800,10 @@ impl<'a> Parser<'a> { None }; - let hi = self.prev_span.hi; + let hi = self.prev_span; Some(P(ast::PathParameters::Parenthesized(ast::ParenthesizedParameterData { - span: mk_sp(lo, hi), + span: lo.to(hi), inputs: inputs, output: output_ty, }))) @@ -1928,38 +1924,37 @@ impl<'a> Parser<'a> { /// Parse ident (COLON expr)? pub fn parse_field(&mut self) -> PResult<'a, Field> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; // Check if a colon exists one ahead. This means we're parsing a fieldname. let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { let fieldname = self.parse_field_name()?; self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; (fieldname, self.parse_expr()?, false) } else { let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; // Mimic `x: x` for the `x` field shorthand. - let path = ast::Path::from_ident(mk_sp(lo, hi), fieldname); - (fieldname, self.mk_expr(lo, hi, ExprKind::Path(None, path), ThinVec::new()), true) + let path = ast::Path::from_ident(lo.to(hi), fieldname); + (fieldname, self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new()), true) }; Ok(ast::Field { - ident: spanned(lo, hi, fieldname), - span: mk_sp(lo, expr.span.hi), + ident: respan(lo.to(hi), fieldname), + span: lo.to(expr.span), expr: expr, is_shorthand: is_shorthand, attrs: attrs.into(), }) } - pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: ExprKind, attrs: ThinVec) - -> P { + pub fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, node: node, - span: mk_sp(lo, hi), + span: span, attrs: attrs.into(), }) } @@ -2013,12 +2008,11 @@ impl<'a> Parser<'a> { ExprKind::AssignOp(binop, lhs, rhs) } - pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, - m: Mac_, attrs: ThinVec) -> P { + pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, - node: ExprKind::Mac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}), - span: mk_sp(lo, hi), + node: ExprKind::Mac(codemap::Spanned {node: m, span: span}), + span: span, attrs: attrs, }) } @@ -2065,8 +2059,8 @@ impl<'a> Parser<'a> { // attributes by giving them a empty "already parsed" list. let mut attrs = ThinVec::new(); - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; let ex: ExprKind; @@ -2095,18 +2089,19 @@ impl<'a> Parser<'a> { } self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; + let span = lo.to(hi); return if es.len() == 1 && !trailing_comma { - Ok(self.mk_expr(lo, hi, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) + Ok(self.mk_expr(span, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) } else { - Ok(self.mk_expr(lo, hi, ExprKind::Tup(es), attrs)) + Ok(self.mk_expr(span, ExprKind::Tup(es), attrs)) } }, token::OpenDelim(token::Brace) => { return self.parse_block_expr(lo, BlockCheckMode::Default, attrs); }, token::BinOp(token::Or) | token::OrOr => { - let lo = self.span.lo; + let lo = self.span; return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs); }, token::OpenDelim(token::Bracket) => { @@ -2144,34 +2139,34 @@ impl<'a> Parser<'a> { ex = ExprKind::Array(vec![first_expr]); } } - hi = self.prev_span.hi; + hi = self.prev_span; } _ => { if self.eat_lt() { let (qself, path) = self.parse_qualified_path(PathStyle::Expr)?; - hi = path.span.hi; - return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); + hi = path.span; + return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); } if self.eat_keyword(keywords::Move) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_lambda_expr(lo, CaptureBy::Value, attrs); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(attrs); } if self.eat_keyword(keywords::For) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_for_expr(None, lo, attrs); } if self.eat_keyword(keywords::While) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_while_expr(None, lo, attrs); } if self.token.is_lifetime() { let label = Spanned { node: self.get_label(), span: self.span }; - let lo = self.span.lo; + let lo = self.span; self.bump(); self.expect(&token::Colon)?; if self.eat_keyword(keywords::While) { @@ -2186,7 +2181,7 @@ impl<'a> Parser<'a> { return Err(self.fatal("expected `while`, `for`, or `loop` after a label")) } if self.eat_keyword(keywords::Loop) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { @@ -2200,8 +2195,8 @@ impl<'a> Parser<'a> { } else { ExprKind::Continue(None) }; - let hi = self.prev_span.hi; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + let hi = self.prev_span; + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } if self.eat_keyword(keywords::Match) { return self.parse_match_expr(attrs); @@ -2215,13 +2210,13 @@ impl<'a> Parser<'a> { if self.is_catch_expr() { assert!(self.eat_keyword(keywords::Do)); assert!(self.eat_keyword(keywords::Catch)); - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_catch_expr(lo, attrs); } if self.eat_keyword(keywords::Return) { if self.token.can_begin_expr() { let e = self.parse_expr()?; - hi = e.span.hi; + hi = e.span; ex = ExprKind::Ret(Some(e)); } else { ex = ExprKind::Ret(None); @@ -2246,7 +2241,7 @@ impl<'a> Parser<'a> { None }; ex = ExprKind::Break(lt, e); - hi = self.prev_span.hi; + hi = self.prev_span; } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `check_strict_keywords`, so // that we can explicitly mention that let is not to be used as an expression @@ -2260,8 +2255,8 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // MACRO INVOCATION expression let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; - return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs)); + let hi = self.prev_span; + return Ok(self.mk_mac_expr(lo.to(hi), Mac_ { path: pth, tts: tts }, attrs)); } if self.check(&token::OpenDelim(token::Brace)) { // This is a struct literal, unless we're prohibited @@ -2274,12 +2269,12 @@ impl<'a> Parser<'a> { } } - hi = pth.span.hi; + hi = pth.span; ex = ExprKind::Path(None, pth); } else { match self.parse_lit() { Ok(lit) => { - hi = lit.span.hi; + hi = lit.span; ex = ExprKind::Lit(P(lit)); } Err(mut err) => { @@ -2293,10 +2288,10 @@ impl<'a> Parser<'a> { } } - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } - fn parse_struct_expr(&mut self, lo: BytePos, pth: ast::Path, mut attrs: ThinVec) + fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec) -> PResult<'a, P> { self.bump(); let mut fields = Vec::new(); @@ -2338,9 +2333,9 @@ impl<'a> Parser<'a> { } } - let hi = self.span.hi; + let span = lo.to(self.span); self.expect(&token::CloseDelim(token::Brace))?; - return Ok(self.mk_expr(lo, hi, ExprKind::Struct(pth, fields, base), attrs)); + return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); } fn parse_or_use_outer_attributes(&mut self, @@ -2354,7 +2349,7 @@ impl<'a> Parser<'a> { } /// Parse a block or unsafe block - pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode, + pub fn parse_block_expr(&mut self, lo: Span, blk_mode: BlockCheckMode, outer_attrs: ThinVec) -> PResult<'a, P> { @@ -2364,7 +2359,7 @@ impl<'a> Parser<'a> { attrs.extend(self.parse_inner_attributes()?); let blk = self.parse_block_tail(lo, blk_mode)?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), attrs)); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), attrs)); } /// parse a.b or a(13) or a[4] or just a @@ -2375,12 +2370,12 @@ impl<'a> Parser<'a> { let b = self.parse_bottom_expr(); let (span, b) = self.interpolated_or_expr_span(b)?; - self.parse_dot_or_call_expr_with(b, span.lo, attrs) + self.parse_dot_or_call_expr_with(b, span, attrs) } pub fn parse_dot_or_call_expr_with(&mut self, e0: P, - lo: BytePos, + lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. @@ -2411,11 +2406,7 @@ impl<'a> Parser<'a> { // Assuming we have just parsed `.foo` (i.e., a dot and an ident), continue // parsing into an expression. - fn parse_dot_suffix(&mut self, - ident: Ident, - ident_span: Span, - self_value: P, - lo: BytePos) + fn parse_dot_suffix(&mut self, ident: Ident, ident_span: Span, self_value: P, lo: Span) -> PResult<'a, P> { let (_, tys, bindings) = if self.eat(&token::ModSep) { self.expect_lt()?; @@ -2440,12 +2431,12 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - let hi = self.prev_span.hi; + let hi = self.prev_span; es.insert(0, self_value); - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let nd = self.mk_method_call(id, tys, es); - self.mk_expr(lo, hi, nd, ThinVec::new()) + self.mk_expr(lo.to(hi), nd, ThinVec::new()) } // Field access. _ => { @@ -2456,32 +2447,30 @@ impl<'a> Parser<'a> { have type parameters"); } - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let field = self.mk_field(self_value, id); - self.mk_expr(lo, ident_span.hi, field, ThinVec::new()) + self.mk_expr(lo.to(ident_span), field, ThinVec::new()) } }) } - fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: BytePos) -> PResult<'a, P> { + fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, P> { let mut e = e0; let mut hi; loop { // expr? while self.eat(&token::Question) { - let hi = self.prev_span.hi; - e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new()); + let hi = self.prev_span; + e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); } // expr.f if self.eat(&token::Dot) { match self.token { token::Ident(i) => { - let dot_pos = self.prev_span.hi; - hi = self.span.hi; + let ident_span = self.span; self.bump(); - - e = self.parse_dot_suffix(i, mk_sp(dot_pos, hi), e, lo)?; + e = self.parse_dot_suffix(i, ident_span, e, lo)?; } token::Literal(token::Integer(n), suf) => { let sp = self.span; @@ -2489,16 +2478,16 @@ impl<'a> Parser<'a> { // A tuple index may not have a suffix self.expect_no_suffix(sp, "tuple index", suf); - let dot = self.prev_span.hi; - hi = self.span.hi; + let dot_span = self.prev_span; + hi = self.span; self.bump(); let index = n.as_str().parse::().ok(); match index { Some(n) => { - let id = spanned(dot, hi, n); + let id = respan(dot_span.to(hi), n); let field = self.mk_tup_field(e, id); - e = self.mk_expr(lo, hi, field, ThinVec::new()); + e = self.mk_expr(lo.to(hi), field, ThinVec::new()); } None => { let prev_span = self.prev_span; @@ -2541,10 +2530,8 @@ impl<'a> Parser<'a> { let actual = self.this_token_to_string(); self.span_err(self.span, &format!("unexpected token: `{}`", actual)); - let dot_pos = self.prev_span.hi; - e = self.parse_dot_suffix(keywords::Invalid.ident(), - mk_sp(dot_pos, dot_pos), - e, lo)?; + let dot_span = self.prev_span; + e = self.parse_dot_suffix(keywords::Invalid.ident(), dot_span, e, lo)?; } } continue; @@ -2559,10 +2546,10 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - hi = self.prev_span.hi; + hi = self.prev_span; let nd = self.mk_call(e, es); - e = self.mk_expr(lo, hi, nd, ThinVec::new()); + e = self.mk_expr(lo.to(hi), nd, ThinVec::new()); } // expr[...] @@ -2570,10 +2557,10 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Bracket) => { self.bump(); let ix = self.parse_expr()?; - hi = self.span.hi; + hi = self.span; self.expect(&token::CloseDelim(token::Bracket))?; let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index, ThinVec::new()) + e = self.mk_expr(lo.to(hi), index, ThinVec::new()) } _ => return Ok(e) } @@ -2635,38 +2622,33 @@ impl<'a> Parser<'a> { already_parsed_attrs: Option>) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let hi; + let lo = self.span; // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() - let ex = match self.token { + let (hi, ex) = match self.token { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Not, e) + (span, self.mk_unary(UnOp::Not, e)) } token::BinOp(token::Minus) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Neg, e) + (span, self.mk_unary(UnOp::Neg, e)) } token::BinOp(token::Star) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Deref, e) + (span, self.mk_unary(UnOp::Deref, e)) } token::BinOp(token::And) | token::AndAnd => { self.expect_and()?; let m = self.parse_mutability(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::AddrOf(m, e) + (span, ExprKind::AddrOf(m, e)) } token::Ident(..) if self.token.is_keyword(keywords::In) => { self.bump(); @@ -2676,20 +2658,18 @@ impl<'a> Parser<'a> { )?; let blk = self.parse_block()?; let span = blk.span; - hi = span.hi; - let blk_expr = self.mk_expr(span.lo, hi, ExprKind::Block(blk), ThinVec::new()); - ExprKind::InPlace(place, blk_expr) + let blk_expr = self.mk_expr(span, ExprKind::Block(blk), ThinVec::new()); + (span, ExprKind::InPlace(place, blk_expr)) } token::Ident(..) if self.token.is_keyword(keywords::Box) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::Box(e) + (span, ExprKind::Box(e)) } _ => return self.parse_dot_or_call_expr(Some(attrs)) }; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } /// Parse an associative expression @@ -2750,13 +2730,11 @@ impl<'a> Parser<'a> { // Special cases: if op == AssocOp::As { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Cast(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Cast(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::Colon { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Type(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Type(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotDot { // If we didn’t have to handle `x..`/`x...`, it would be pretty easy to @@ -2782,7 +2760,7 @@ impl<'a> Parser<'a> { }; let r = try!(self.mk_range(Some(lhs), rhs, limits)); - lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); break } @@ -2809,7 +2787,7 @@ impl<'a> Parser<'a> { }), }?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); + let span = lhs_span.to(rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | @@ -2818,12 +2796,12 @@ impl<'a> Parser<'a> { AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs); - self.mk_expr(lo, hi, binary, ThinVec::new()) + self.mk_expr(span, binary, ThinVec::new()) } AssocOp::Assign => - self.mk_expr(lo, hi, ExprKind::Assign(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), AssocOp::Inplace => - self.mk_expr(lo, hi, ExprKind::InPlace(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::InPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, @@ -2838,7 +2816,7 @@ impl<'a> Parser<'a> { token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs); - self.mk_expr(lo, hi, aopexpr, ThinVec::new()) + self.mk_expr(span, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotDot => { self.bug("As, Colon, DotDot or DotDotDot branch reached") @@ -2858,7 +2836,7 @@ impl<'a> Parser<'a> { match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators - let op_span = mk_sp(op.span.lo, self.span.hi); + let op_span = op.span.to(self.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && @@ -2881,8 +2859,8 @@ impl<'a> Parser<'a> { debug_assert!(self.token == token::DotDot || self.token == token::DotDotDot); let tok = self.token.clone(); let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; self.bump(); let opt_end = if self.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. @@ -2890,7 +2868,7 @@ impl<'a> Parser<'a> { Some(self.parse_assoc_expr_with(next_prec, LhsExpr::NotYetParsed) .map(|x|{ - hi = x.span.hi; + hi = x.span; x })?) } else { @@ -2905,7 +2883,7 @@ impl<'a> Parser<'a> { let r = try!(self.mk_range(None, opt_end, limits)); - Ok(self.mk_expr(lo, hi, r, attrs)) + Ok(self.mk_expr(lo.to(hi), r, attrs)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { @@ -2925,23 +2903,23 @@ impl<'a> Parser<'a> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } - let lo = self.prev_span.lo; + let lo = self.prev_span; let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let thn = self.parse_block()?; let mut els: Option> = None; - let mut hi = thn.span.hi; + let mut hi = thn.span; if self.eat_keyword(keywords::Else) { let elexpr = self.parse_else_expr()?; - hi = elexpr.span.hi; + hi = elexpr.span; els = Some(elexpr); } - Ok(self.mk_expr(lo, hi, ExprKind::If(cond, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) } /// Parse an 'if let' expression ('if' token already eaten) pub fn parse_if_let_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.prev_span.lo; + let lo = self.prev_span; self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; self.expect(&token::Eq)?; @@ -2949,36 +2927,35 @@ impl<'a> Parser<'a> { let thn = self.parse_block()?; let (hi, els) = if self.eat_keyword(keywords::Else) { let expr = self.parse_else_expr()?; - (expr.span.hi, Some(expr)) + (expr.span, Some(expr)) } else { - (thn.span.hi, None) + (thn.span, None) }; - Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pat, expr, thn, els), attrs)) } // `move |args| expr` pub fn parse_lambda_expr(&mut self, - lo: BytePos, + lo: Span, capture_clause: CaptureBy, attrs: ThinVec) -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; - let decl_hi = self.prev_span.hi; + let decl_hi = self.prev_span; let body = match decl.output { FunctionRetTy::Default(_) => self.parse_expr()?, _ => { // If an explicit return type is given, require a // block to appear (RFC 968). - let body_lo = self.span.lo; + let body_lo = self.span; self.parse_block_expr(body_lo, BlockCheckMode::Default, ThinVec::new())? } }; Ok(self.mk_expr( - lo, - body.span.hi, - ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)), + lo.to(body.span), + ExprKind::Closure(capture_clause, decl, body, lo.to(decl_hi)), attrs)) } @@ -2988,13 +2965,13 @@ impl<'a> Parser<'a> { return self.parse_if_expr(ThinVec::new()); } else { let blk = self.parse_block()?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), ThinVec::new())); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), ThinVec::new())); } } /// Parse a 'for' .. 'in' expression ('for' token already eaten) pub fn parse_for_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Parse: `for in ` @@ -3004,16 +2981,13 @@ impl<'a> Parser<'a> { let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = self.prev_span.hi; - - Ok(self.mk_expr(span_lo, hi, - ExprKind::ForLoop(pat, expr, loop_block, opt_ident), - attrs)) + let hi = self.prev_span; + Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs)) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) pub fn parse_while_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { if self.token.is_keyword(keywords::Let) { return self.parse_while_let_expr(opt_ident, span_lo, attrs); @@ -3021,14 +2995,13 @@ impl<'a> Parser<'a> { let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::While(cond, body, opt_ident), - attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs)); } /// Parse a 'while let' expression ('while' token already eaten) pub fn parse_while_let_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; @@ -3036,34 +3009,33 @@ impl<'a> Parser<'a> { let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); } // parse `loop {...}`, `loop` token already eaten pub fn parse_loop_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs)) + let span = span_lo.to(body.span); + Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs)) } /// Parse a `do catch {...}` expression (`do catch` token already eaten) - pub fn parse_catch_expr(&mut self, span_lo: BytePos, mut attrs: ThinVec) + pub fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Catch(body), attrs)) + Ok(self.mk_expr(span_lo.to(body.span), ExprKind::Catch(body), attrs)) } // `match` token already eaten fn parse_match_expr(&mut self, mut attrs: ThinVec) -> PResult<'a, P> { let match_span = self.prev_span; - let lo = self.prev_span.lo; + let lo = self.prev_span; let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { @@ -3082,17 +3054,17 @@ impl<'a> Parser<'a> { // Recover by skipping to the end of the block. e.emit(); self.recover_stmt(); - let hi = self.span.hi; + let span = lo.to(self.span); if self.token == token::CloseDelim(token::Brace) { self.bump(); } - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); } } } - let hi = self.span.hi; + let hi = self.span; self.bump(); - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } pub fn parse_arm(&mut self) -> PResult<'a, Arm> { @@ -3266,7 +3238,7 @@ impl<'a> Parser<'a> { } let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; if self.check(&token::DotDot) { @@ -3286,16 +3258,16 @@ impl<'a> Parser<'a> { let fieldname = self.parse_field_name()?; self.bump(); let pat = self.parse_pat()?; - hi = pat.span.hi; + hi = pat.span; (pat, fieldname, false) } else { // Parsing a pattern of the form "(box) (ref) (mut) fieldname" let is_box = self.eat_keyword(keywords::Box); - let boxed_span_lo = self.span.lo; + let boxed_span = self.span; let is_ref = self.eat_keyword(keywords::Ref); let is_mut = self.eat_keyword(keywords::Mut); let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; let bind_type = match (is_ref, is_mut) { (true, true) => BindingMode::ByRef(Mutability::Mutable), @@ -3307,14 +3279,14 @@ impl<'a> Parser<'a> { let fieldpat = P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Ident(bind_type, fieldpath, None), - span: mk_sp(boxed_span_lo, hi), + span: boxed_span.to(hi), }); let subpat = if is_box { P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Box(fieldpat), - span: mk_sp(lo, hi), + span: lo.to(hi), }) } else { fieldpat @@ -3322,7 +3294,7 @@ impl<'a> Parser<'a> { (subpat, fieldname, true) }; - fields.push(codemap::Spanned { span: mk_sp(lo, hi), + fields.push(codemap::Spanned { span: lo.to(hi), node: ast::FieldPat { ident: fieldname, pat: subpat, @@ -3336,7 +3308,7 @@ impl<'a> Parser<'a> { fn parse_pat_range_end(&mut self) -> PResult<'a, P> { if self.token.is_path_start() { - let lo = self.span.lo; + let lo = self.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = @@ -3346,8 +3318,8 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - let hi = self.prev_span.hi; - Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new())) + let hi = self.prev_span; + Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_pat_literal_maybe_minus() } @@ -3373,7 +3345,7 @@ impl<'a> Parser<'a> { pub fn parse_pat(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtPat, |x| x); - let lo = self.span.lo; + let lo = self.span; let pat; match self.token { token::Underscore => { @@ -3439,7 +3411,7 @@ impl<'a> Parser<'a> { // Parse macro invocation self.bump(); let (_, tts) = self.expect_delimited_token_tree()?; - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: path, tts: tts }); pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDot => { @@ -3449,9 +3421,8 @@ impl<'a> Parser<'a> { _ => panic!("can only parse `..` or `...` for ranges (checked above)"), }; // Parse range - let hi = self.prev_span.hi; - let begin = - self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()); + let span = lo.to(self.prev_span); + let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; pat = PatKind::Range(begin, end, end_kind); @@ -3505,11 +3476,10 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; Ok(P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, - span: mk_sp(lo, hi), + span: lo.to(self.prev_span), })) } @@ -3545,7 +3515,7 @@ impl<'a> Parser<'a> { /// Parse a local variable declaration fn parse_local(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; let pat = self.parse_pat()?; let mut ty = None; @@ -3558,14 +3528,14 @@ impl<'a> Parser<'a> { pat: pat, init: init, id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), attrs: attrs, })) } /// Parse a structure field fn parse_name_and_ty(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec) -> PResult<'a, StructField> { @@ -3573,7 +3543,7 @@ impl<'a> Parser<'a> { self.expect(&token::Colon)?; let ty = self.parse_ty()?; Ok(StructField { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: Some(name), vis: vis, id: ast::DUMMY_NODE_ID, @@ -3683,7 +3653,7 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility) -> PResult<'a, Option>> { - let lo = self.span.lo; + let lo = self.span; match self.token { token::Ident(ident) if ident.name == "macro_rules" => { if self.look_ahead(1, |t| *t == token::Not) { @@ -3706,9 +3676,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; + let span = lo.to(self.prev_span); let kind = ItemKind::MacroDef(tts); - Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned()))) + Ok(Some(self.mk_item(span, id, kind, Visibility::Inherited, attrs.to_owned()))) } fn parse_stmt_without_recovery(&mut self, @@ -3717,19 +3687,19 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtStmt, |x| Some(x)); let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; Ok(Some(if self.eat_keyword(keywords::Let) { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Local(self.parse_local(attrs.into())?), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Item(macro_def), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } // Starts like a simple path, but not a union item. } else if self.token.is_path_start() && @@ -3741,8 +3711,8 @@ impl<'a> Parser<'a> { let expr = if self.check(&token::OpenDelim(token::Brace)) { self.parse_struct_expr(lo, pth, ThinVec::new())? } else { - let hi = self.prev_span.hi; - self.mk_expr(lo, hi, ExprKind::Path(None, pth), ThinVec::new()) + let hi = self.prev_span; + self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new()) }; let expr = self.with_res(Restrictions::RESTRICTION_STMT_EXPR, |this| { @@ -3753,7 +3723,7 @@ impl<'a> Parser<'a> { return Ok(Some(Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Expr(expr), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })); } @@ -3784,7 +3754,7 @@ impl<'a> Parser<'a> { }; let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; + let hi = self.prev_span; let style = if delim == token::Brace { MacStmtStyle::Braces @@ -3793,7 +3763,7 @@ impl<'a> Parser<'a> { }; if id.name == keywords::Invalid.name() { - let mac = spanned(lo, hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(hi), Mac_ { path: pth, tts: tts }); let node = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { StmtKind::Mac(P((mac, style, attrs.into()))) @@ -3813,14 +3783,14 @@ impl<'a> Parser<'a> { self.warn_missing_semicolon(); StmtKind::Mac(P((mac, style, attrs.into()))) } else { - let e = self.mk_mac_expr(lo, hi, mac.node, ThinVec::new()); + let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new()); let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; StmtKind::Expr(e) }; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), node: node, } } else { @@ -3835,13 +3805,14 @@ impl<'a> Parser<'a> { followed by a semicolon"); } } + let span = lo.to(hi); Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: span, node: StmtKind::Item({ self.mk_item( - lo, hi, id /*id is good here*/, - ItemKind::Mac(spanned(lo, hi, Mac_ { path: pth, tts: tts })), + span, id /*id is good here*/, + ItemKind::Mac(respan(span, Mac_ { path: pth, tts: tts })), Visibility::Inherited, attrs) }), @@ -3856,7 +3827,7 @@ impl<'a> Parser<'a> { match item { Some(i) => Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, i.span.hi), + span: lo.to(i.span), node: StmtKind::Item(i), }, None => { @@ -3887,7 +3858,7 @@ impl<'a> Parser<'a> { Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into()))?; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, e.span.hi), + span: lo.to(e.span), node: StmtKind::Expr(e), } } @@ -3905,7 +3876,7 @@ impl<'a> Parser<'a> { pub fn parse_block(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtBlock, |x| x); - let lo = self.span.lo; + let lo = self.span; if !self.eat(&token::OpenDelim(token::Brace)) { let sp = self.span; @@ -3950,7 +3921,7 @@ impl<'a> Parser<'a> { fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec, P)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); - let lo = self.span.lo; + let lo = self.span; self.expect(&token::OpenDelim(token::Brace))?; Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, BlockCheckMode::Default)?)) @@ -3958,7 +3929,7 @@ impl<'a> Parser<'a> { /// Parse the rest of a block expression or function body /// Precondition: already parsed the '{'. - fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P> { + fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P> { let mut stmts = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { @@ -3976,7 +3947,7 @@ impl<'a> Parser<'a> { stmts: stmts, id: ast::DUMMY_NODE_ID, rules: s, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })) } @@ -4042,10 +4013,10 @@ impl<'a> Parser<'a> { } bounds.push(RegionTyParamBound(self.expect_lifetime())); } else if self.check_keyword(keywords::For) || self.check_path() { - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let modifier = if question.is_some() { TraitBoundModifier::Maybe } else { @@ -4166,7 +4137,7 @@ impl<'a> Parser<'a> { pub fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { maybe_whole!(self, NtGenerics, |x| x); - let span_lo = self.span.lo; + let span_lo = self.span; if self.eat_lt() { let (lifetime_defs, ty_params) = self.parse_generic_params()?; self.expect_gt()?; @@ -4177,7 +4148,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, - span: mk_sp(span_lo, self.prev_span.hi), + span: span_lo.to(self.prev_span), }) } else { Ok(ast::Generics::default()) @@ -4202,7 +4173,7 @@ impl<'a> Parser<'a> { } } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { // Parse associated type binding. - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; self.bump(); let ty = self.parse_ty()?; @@ -4210,7 +4181,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, ident: ident, ty: ty, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }); seen_binding = true; } else if self.check_type() { @@ -4267,7 +4238,7 @@ impl<'a> Parser<'a> { } loop { - let lo = self.span.lo; + let lo = self.span; if self.check_lifetime() && self.look_ahead(1, |t| t != &token::BinOp(token::Plus)) { let lifetime = self.expect_lifetime(); // Bounds starting with a colon are mandatory, but possibly empty. @@ -4275,7 +4246,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_lt_param_bounds(); where_clause.predicates.push(ast::WherePredicate::RegionPredicate( ast::WhereRegionPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lifetime: lifetime, bounds: bounds, } @@ -4296,7 +4267,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_ty_param_bounds()?; where_clause.predicates.push(ast::WherePredicate::BoundPredicate( ast::WhereBoundPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), bound_lifetimes: lifetime_defs, bounded_ty: ty, bounds: bounds, @@ -4307,7 +4278,7 @@ impl<'a> Parser<'a> { let rhs_ty = self.parse_ty()?; where_clause.predicates.push(ast::WherePredicate::EqPredicate( ast::WhereEqPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lhs_ty: ty, rhs_ty: rhs_ty, id: ast::DUMMY_NODE_ID, @@ -4404,7 +4375,7 @@ impl<'a> Parser<'a> { // Parse optional self parameter of a method. // Only a limited set of initial token sequences is considered self parameters, anything // else is parsed as a normal function parameter list, so some lookahead is required. - let eself_lo = self.span.lo; + let eself_lo = self.span; let (eself, eself_ident) = match self.token { token::BinOp(token::And) => { // &self @@ -4486,7 +4457,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), }; - let eself = codemap::respan(mk_sp(eself_lo, self.prev_span.hi), eself); + let eself = codemap::respan(eself_lo.to(self.prev_span), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } @@ -4558,8 +4529,7 @@ impl<'a> Parser<'a> { Ok((id, generics)) } - fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, - node: ItemKind, vis: Visibility, + fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility, attrs: Vec) -> P { P(Item { ident: ident, @@ -4567,7 +4537,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, node: node, vis: vis, - span: mk_sp(lo, hi) + span: span, }) } @@ -4625,7 +4595,7 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtImplItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { @@ -4651,7 +4621,7 @@ impl<'a> Parser<'a> { Ok(ImplItem { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: name, vis: vis, defaultness: defaultness, @@ -4694,7 +4664,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis, prev_span); - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -4704,7 +4674,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(mac))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; @@ -4938,11 +4908,11 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| { let attrs = p.parse_outer_attributes()?; - let lo = p.span.lo; + let lo = p.span; let vis = p.parse_visibility(true)?; let ty = p.parse_ty()?; Ok(StructField { - span: mk_sp(lo, p.span.hi), + span: lo.to(p.span), vis: vis, ident: None, id: ast::DUMMY_NODE_ID, @@ -4956,7 +4926,7 @@ impl<'a> Parser<'a> { /// Parse a structure field declaration pub fn parse_single_struct_field(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec ) -> PResult<'a, StructField> { @@ -4978,7 +4948,7 @@ impl<'a> Parser<'a> { /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } @@ -5056,7 +5026,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parse all of the items in a module - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); @@ -5070,11 +5040,11 @@ impl<'a> Parser<'a> { let hi = if self.span == syntax_pos::DUMMY_SP { inner_lo } else { - self.prev_span.hi + self.prev_span }; Ok(ast::Mod { - inner: mk_sp(inner_lo, hi), + inner: inner_lo.to(hi), items: items }) } @@ -5137,7 +5107,7 @@ impl<'a> Parser<'a> { let old_directory = self.directory.clone(); self.push_directory(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; - let mod_inner_lo = self.span.lo; + let mod_inner_lo = self.span; let attrs = self.parse_inner_attributes()?; let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; self.directory = old_directory; @@ -5280,7 +5250,7 @@ impl<'a> Parser<'a> { let mut p0 = new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); p0.cfg_mods = self.cfg_mods; - let mod_inner_lo = p0.span.lo; + let mod_inner_lo = p0.span; let mod_attrs = p0.parse_inner_attributes()?; let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; self.sess.included_mod_stack.borrow_mut().pop(); @@ -5288,42 +5258,42 @@ impl<'a> Parser<'a> { } /// Parse a function declaration from a foreign module - fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Fn)?; let (ident, mut generics) = self.parse_fn_header()?; let decl = self.parse_fn_decl(true)?; generics.where_clause = self.parse_where_clause()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Fn(decl, generics), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } /// Parse a static item from a foreign module - fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Static)?; let mutbl = self.eat_keyword(keywords::Mut); let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Static(ty, mutbl), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } @@ -5335,7 +5305,7 @@ impl<'a> Parser<'a> { /// extern crate foo; /// extern crate bar as foo; fn parse_item_extern_crate(&mut self, - lo: BytePos, + lo: Span, visibility: Visibility, attrs: Vec) -> PResult<'a, P> { @@ -5349,8 +5319,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - Ok(self.mk_item(lo, - prev_span.hi, + Ok(self.mk_item(lo.to(prev_span), ident, ItemKind::ExternCrate(maybe_path), visibility, @@ -5368,7 +5337,7 @@ impl<'a> Parser<'a> { /// extern "C" {} /// extern {} fn parse_item_foreign_mod(&mut self, - lo: BytePos, + lo: Span, opt_abi: Option, visibility: Visibility, mut attrs: Vec) @@ -5390,12 +5359,8 @@ impl<'a> Parser<'a> { abi: abi, items: foreign_items }; - Ok(self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - ItemKind::ForeignMod(m), - visibility, - attrs)) + let invalid = keywords::Invalid.ident(); + Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) } /// Parse type Foo = Bar; @@ -5416,7 +5381,7 @@ impl<'a> Parser<'a> { let mut any_disr = None; while self.token != token::CloseDelim(token::Brace) { let variant_attrs = self.parse_outer_attributes()?; - let vlo = self.span.lo; + let vlo = self.span; let struct_def; let mut disr_expr = None; @@ -5444,7 +5409,7 @@ impl<'a> Parser<'a> { data: struct_def, disr_expr: disr_expr, }; - variants.push(spanned(vlo, self.prev_span.hi, vr)); + variants.push(respan(vlo.to(self.prev_span), vr)); if !self.eat(&token::Comma) { break; } } @@ -5514,7 +5479,7 @@ impl<'a> Parser<'a> { Some(P(item)) }); - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; @@ -5524,12 +5489,8 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - item_, - visibility, - attrs); + let invalid = keywords::Invalid.ident(); + let item = self.mk_item(lo.to(prev_span), invalid, item_, visibility, attrs); return Ok(Some(item)); } @@ -5549,8 +5510,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5572,8 +5532,7 @@ impl<'a> Parser<'a> { }; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5597,8 +5556,7 @@ impl<'a> Parser<'a> { respan(const_span, Constness::Const), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5615,8 +5573,7 @@ impl<'a> Parser<'a> { } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5632,8 +5589,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5648,8 +5604,7 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Impl)?; let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5665,8 +5620,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5689,8 +5643,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5702,8 +5655,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5714,8 +5666,7 @@ impl<'a> Parser<'a> { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5726,8 +5677,7 @@ impl<'a> Parser<'a> { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5739,8 +5689,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5751,8 +5700,7 @@ impl<'a> Parser<'a> { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5763,8 +5711,7 @@ impl<'a> Parser<'a> { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5776,8 +5723,7 @@ impl<'a> Parser<'a> { self.bump(); let (ident, item_, extra_attrs) = self.parse_item_union()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5794,7 +5740,7 @@ impl<'a> Parser<'a> { /// Parse a foreign item. fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; if self.check_keyword(keywords::Static) { @@ -5821,7 +5767,7 @@ impl<'a> Parser<'a> { attrs: Vec , macros_allowed: bool, attributes_allowed: bool, - lo: BytePos, + lo: Span, visibility: Visibility ) -> PResult<'a, Option>> { if macros_allowed && self.token.is_path_start() { @@ -5830,7 +5776,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&visibility, prev_span); - let mac_lo = self.span.lo; + let mac_lo = self.span; // item macro. let pth = self.parse_path(PathStyle::Mod)?; @@ -5856,9 +5802,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; - let mac = spanned(mac_lo, hi, Mac_ { path: pth, tts: tts }); - let item = self.mk_item(lo, hi, id, ItemKind::Mac(mac), visibility, attrs); + let hi = self.prev_span; + let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts: tts }); + let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); } @@ -5886,7 +5832,7 @@ impl<'a> Parser<'a> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), SeqSep::trailing_allowed(token::Comma), |this| { - let lo = this.span.lo; + let lo = this.span; let ident = if this.eat_keyword(keywords::SelfValue) { keywords::SelfValue.ident() } else { @@ -5898,8 +5844,7 @@ impl<'a> Parser<'a> { rename: rename, id: ast::DUMMY_NODE_ID }; - let hi = this.prev_span.hi; - Ok(spanned(lo, hi, node)) + Ok(respan(lo.to(this.prev_span), node)) }) } @@ -5917,21 +5862,21 @@ impl<'a> Parser<'a> { /// MOD_SEP? non_global_path MOD_SEP LBRACE item_seq RBRACE /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || self.is_import_coupler() { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { segments: vec![PathSegment::crate_root()], - span: mk_sp(lo, self.span.hi), + span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { ViewPathGlob(prefix) } else { ViewPathList(prefix, self.parse_path_list_items()?) }; - Ok(P(spanned(lo, self.span.hi, view_path_kind))) + Ok(P(respan(lo.to(self.span), view_path_kind))) } else { let prefix = self.parse_path(PathStyle::Mod)?.default_to_global(); if self.is_import_coupler() { @@ -5939,16 +5884,16 @@ impl<'a> Parser<'a> { self.bump(); if self.check(&token::BinOp(token::Star)) { self.bump(); - Ok(P(spanned(lo, self.span.hi, ViewPathGlob(prefix)))) + Ok(P(respan(lo.to(self.span), ViewPathGlob(prefix)))) } else { let items = self.parse_path_list_items()?; - Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + Ok(P(respan(lo.to(self.span), ViewPathList(prefix, items)))) } } else { // `foo::bar` or `foo::bar as baz` let rename = self.parse_rename()?. unwrap_or(prefix.segments.last().unwrap().identifier); - Ok(P(spanned(lo, self.prev_span.hi, ViewPathSimple(rename, prefix)))) + Ok(P(respan(lo.to(self.prev_span), ViewPathSimple(rename, prefix)))) } } } @@ -5964,11 +5909,11 @@ impl<'a> Parser<'a> { /// Parses a source module as a crate. This is the main /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { - let lo = self.span.lo; + let lo = self.span; Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, - span: mk_sp(lo, self.span.lo), + span: lo.to(self.span), }) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index aeb5b1e0a53..6f5ab50b2fe 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -559,11 +559,7 @@ impl<'a, 'b> Context<'a, 'b> { let name = self.ecx.ident_of(&format!("__arg{}", i)); pats.push(self.ecx.pat_ident(DUMMY_SP, name)); for ref arg_ty in self.arg_unique_types[i].iter() { - locals.push(Context::format_arg(self.ecx, - self.macsp, - e.span, - arg_ty, - self.ecx.expr_ident(e.span, name))); + locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); } heads.push(self.ecx.expr_addr_of(e.span, e)); } @@ -576,11 +572,7 @@ impl<'a, 'b> Context<'a, 'b> { Exact(i) => spans_pos[i], _ => panic!("should never happen"), }; - counts.push(Context::format_arg(self.ecx, - self.macsp, - span, - &Count, - self.ecx.expr_ident(span, name))); + counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count, name)); } // Now create a vector containing all the arguments @@ -643,9 +635,10 @@ impl<'a, 'b> Context<'a, 'b> { macsp: Span, mut sp: Span, ty: &ArgumentType, - arg: P) + arg: ast::Ident) -> P { sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); + let arg = ecx.expr_ident(sp, arg); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 9b45e364ecf..947192a0a23 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -174,6 +174,15 @@ impl Span { } result } + + pub fn to(self, end: Span) -> Span { + // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) + if end.ctxt == SyntaxContext::empty() { + Span { lo: self.lo, ..end } + } else { + Span { hi: end.hi, ..self } + } + } } #[derive(Clone, Debug)] @@ -208,7 +217,7 @@ impl serialize::UseSpecializedDecodable for Span { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; - Ok(mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) }) } } @@ -696,11 +705,6 @@ pub struct FileLines { thread_local!(pub static SPAN_DEBUG: Cell fmt::Result> = Cell::new(default_span_debug)); -/* assuming that we're not in macro expansion */ -pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} -} - pub struct MacroBacktrace { /// span where macro was applied to generate this code pub call_site: Span, diff --git a/src/test/compile-fail/imports/macro-paths.rs b/src/test/compile-fail/imports/macro-paths.rs index 48e7ca0eee4..7c19917acc4 100644 --- a/src/test/compile-fail/imports/macro-paths.rs +++ b/src/test/compile-fail/imports/macro-paths.rs @@ -25,7 +25,6 @@ fn f() { bar::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod bar { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } @@ -37,6 +36,5 @@ fn g() { baz::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod baz { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } diff --git a/src/test/compile-fail/imports/macros.rs b/src/test/compile-fail/imports/macros.rs index cfa7681dc22..06b0964a3b1 100644 --- a/src/test/compile-fail/imports/macros.rs +++ b/src/test/compile-fail/imports/macros.rs @@ -28,7 +28,6 @@ mod m2 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use foo::m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } @@ -43,7 +42,6 @@ mod m3 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use two_macros::n as m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } } diff --git a/src/test/compile-fail/imports/shadow_builtin_macros.rs b/src/test/compile-fail/imports/shadow_builtin_macros.rs index 2b3ba1b4aa7..a7f1cf3c9d3 100644 --- a/src/test/compile-fail/imports/shadow_builtin_macros.rs +++ b/src/test/compile-fail/imports/shadow_builtin_macros.rs @@ -31,7 +31,6 @@ mod m2 { mod m3 { ::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here - //~| NOTE in this expansion fn f() { panic!(); } //~ ERROR ambiguous //~| NOTE `panic` is also a builtin macro //~| NOTE macro-expanded macro imports do not shadow diff --git a/src/test/compile-fail/issue-25385.rs b/src/test/compile-fail/issue-25385.rs index 51d7baaf3e9..4aacb6840e9 100644 --- a/src/test/compile-fail/issue-25385.rs +++ b/src/test/compile-fail/issue-25385.rs @@ -21,5 +21,4 @@ fn main() { foo!(1i32.foo()); //~^ ERROR no method named `foo` found for type `i32` in the current scope - //~^^ NOTE in this expansion of foo! } diff --git a/src/test/run-pass/syntax-extension-source-utils.rs b/src/test/run-pass/syntax-extension-source-utils.rs index 3b5f033d07b..25c7417f7eb 100644 --- a/src/test/run-pass/syntax-extension-source-utils.rs +++ b/src/test/run-pass/syntax-extension-source-utils.rs @@ -22,7 +22,7 @@ macro_rules! indirect_line { () => ( line!() ) } pub fn main() { assert_eq!(line!(), 24); - assert_eq!(column!(), 4); + assert_eq!(column!(), 15); assert_eq!(indirect_line!(), 26); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); diff --git a/src/test/ui/macros/macro_path_as_generic_bound.stderr b/src/test/ui/macros/macro_path_as_generic_bound.stderr index 96635032105..e4044f5aaf2 100644 --- a/src/test/ui/macros/macro_path_as_generic_bound.stderr +++ b/src/test/ui/macros/macro_path_as_generic_bound.stderr @@ -2,10 +2,7 @@ error[E0433]: failed to resolve. Use of undeclared type or module `m` --> $DIR/macro_path_as_generic_bound.rs:17:6 | 17 | foo!(m::m2::A); - | -----^^^^^^^^-- - | | | - | | Use of undeclared type or module `m` - | in this macro invocation + | ^^^^^^^^ Use of undeclared type or module `m` error: cannot continue compilation due to previous error -- cgit 1.4.1-3-g733a5 From 8fde04b4a295792249d4a01f87a9f66143aa7c83 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 29 Mar 2017 07:17:18 +0000 Subject: Improve `Path` spans. --- src/libsyntax/attr.rs | 7 +++-- src/libsyntax/ext/base.rs | 21 +++++++++++++- src/libsyntax/ext/tt/macro_parser.rs | 2 +- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 13 +++++---- src/libsyntax/ext/tt/transcribe.rs | 9 +----- src/libsyntax/parse/mod.rs | 4 +-- src/libsyntax/parse/parser.rs | 56 ++++++++++++++++++++++-------------- src/libsyntax/parse/token.rs | 48 +++++++++++++++---------------- 9 files changed, 95 insertions(+), 67 deletions(-) (limited to 'src/libsyntax/parse') diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 5dcce2572af..6f5f52ff1e9 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -1015,9 +1015,10 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { - token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), - _ => None, + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt { + token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), + token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), + _ => return None, }, _ => return None, }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index a2d54b62ec6..fda026fec64 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -209,7 +209,26 @@ impl TTMacroExpander for F { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box { - (*self)(ecx, span, &input.trees().collect::>()) + struct AvoidInterpolatedIdents; + + impl Folder for AvoidInterpolatedIdents { + fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { + if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { + if let token::NtIdent(ident) = **nt { + return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node)); + } + } + fold::noop_fold_tt(tt, self) + } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(mac, self) + } + } + + let input: Vec<_> = + input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect(); + (*self)(ecx, span, &input) } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 9ee427eed35..6cd1fea2e75 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -492,7 +492,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { _ => {} } // check at the beginning and the parser checks after each bump - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); match name { "item" => match panictry!(p.parse_item()) { Some(i) => token::NtItem(i), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 66f5520b882..93348c8f083 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.name.as_str().to_string()); - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return Box::new(ParserAnyMacro { diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 12e746e024d..d216effbd45 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -136,11 +136,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { - let span = Span { lo: start_sp.lo, ..end_sp }; - result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue - } + Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { + Some(kind) => { + let span = Span { lo: start_sp.lo, ..end_sp }; + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue + } + _ => end_sp, + }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 24004492be2..947089b0b9a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -12,7 +12,7 @@ use ast::Ident; use errors::Handler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; +use parse::token::{self, SubstNt, Token, NtTT}; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -154,13 +154,6 @@ pub fn transcribe(sp_diag: &Handler, None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { match **nt { - // sidestep the interpolation tricks for ident because - // (a) idents can be in lots of places, so it'd be a pain - // (b) we actually can, since it's a token. - NtIdent(ref sn) => { - let token = TokenTree::Token(sn.span, token::Ident(sn.node)); - result.push(token.into()); - } NtTT(ref tt) => result.push(tt.clone().into()), _ => { let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b5d0a46de49..c63a6524f74 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -218,9 +218,7 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream /// Given stream and the ParseSess, produce a parser pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { - let mut p = Parser::new(sess, stream, None, false); - p.check_unknown_macro_variable(); - p + Parser::new(sess, stream, None, false) } /// Parse a string representing a character literal into its final form. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b0611d75290..db2878c6b1e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -160,6 +160,7 @@ pub struct Parser<'a> { /// the span of the current token: pub span: Span, /// the span of the previous token: + pub meta_var_span: Option, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, @@ -417,6 +418,7 @@ impl<'a> Parser<'a> { token: token::Underscore, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, + meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), obsolete_set: HashSet::new(), @@ -443,6 +445,7 @@ impl<'a> Parser<'a> { parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span)); parser.directory.path.pop(); } + parser.process_potential_macro_variable(); parser } @@ -1012,7 +1015,7 @@ impl<'a> Parser<'a> { self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - self.prev_span = self.span; + self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { @@ -1028,7 +1031,7 @@ impl<'a> Parser<'a> { self.token = next.tok; self.expected_tokens.clear(); // check after each token - self.check_unknown_macro_variable(); + self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when @@ -1722,7 +1725,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span; + let lo = self.meta_var_span.unwrap_or(self.span); let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1744,13 +1747,9 @@ impl<'a> Parser<'a> { segments.insert(0, PathSegment::crate_root()); } - // Assemble the span. - // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = lo.to(self.prev_span); - // Assemble the result. Ok(ast::Path { - span: span, + span: lo.to(self.prev_span), segments: segments, }) } @@ -1763,8 +1762,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) { self.bump(); @@ -1831,8 +1830,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; // If we do not see a `::`, stop. if !self.eat(&token::ModSep) { @@ -1873,10 +1872,11 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; // Assemble and push the result. - segments.push(PathSegment::from_ident(identifier, self.prev_span)); + segments.push(PathSegment::from_ident(identifier, ident_span)); // If we do not see a `::` or see `::{`/`::*`, stop. if !self.check(&token::ModSep) || self.is_import_coupler() { @@ -1896,8 +1896,9 @@ impl<'a> Parser<'a> { fn expect_lifetime(&mut self) -> Lifetime { match self.token { token::Lifetime(ident) => { + let ident_span = self.span; self.bump(); - Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID } + Lifetime { name: ident.name, span: ident_span, id: ast::DUMMY_NODE_ID } } _ => self.span_bug(self.span, "not a lifetime") } @@ -2568,10 +2569,23 @@ impl<'a> Parser<'a> { return Ok(e); } - pub fn check_unknown_macro_variable(&mut self) { - if let token::SubstNt(name) = self.token { - self.fatal(&format!("unknown macro variable `{}`", name)).emit() - } + pub fn process_potential_macro_variable(&mut self) { + let ident = match self.token { + token::SubstNt(name) => { + self.fatal(&format!("unknown macro variable `{}`", name)).emit(); + return + } + token::Interpolated(ref nt) => { + self.meta_var_span = Some(self.span); + match **nt { + token::NtIdent(ident) => ident, + _ => return, + } + } + _ => return, + }; + self.token = token::Ident(ident.node); + self.span = ident.span; } /// parse a single token tree from the input. @@ -2589,9 +2603,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let token = mem::replace(&mut self.token, token::Underscore); + let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span); self.bump(); - TokenTree::Token(self.prev_span, token) + TokenTree::Token(span, token) } } } @@ -3489,9 +3503,9 @@ impl<'a> Parser<'a> { fn parse_pat_ident(&mut self, binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { + let ident_span = self.span; let ident = self.parse_ident()?; - let prev_span = self.prev_span; - let name = codemap::Spanned{span: prev_span, node: ident}; + let name = codemap::Spanned{span: ident_span, node: ident}; let sub = if self.eat(&token::At) { Some(self.parse_pat()?) } else { @@ -4364,7 +4378,7 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. - token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) } + token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 519d5bd98e4..74aa3984a9a 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -211,9 +211,7 @@ impl Token { ModSep => true, // global path Pound => true, // expression attributes Interpolated(ref nt) => match **nt { - NtExpr(..) => true, - NtBlock(..) => true, - NtPath(..) => true, + NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -236,8 +234,7 @@ impl Token { Lt | BinOp(Shl) => true, // associated path ModSep => true, // global path Interpolated(ref nt) => match **nt { - NtTy(..) => true, - NtPath(..) => true, + NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -252,14 +249,22 @@ impl Token { } } - /// Returns `true` if the token is an identifier. - pub fn is_ident(&self) -> bool { + pub fn ident(&self) -> Option { match *self { - Ident(..) => true, - _ => false, + Ident(ident) => Some(ident), + Interpolated(ref nt) => match **nt { + NtIdent(ident) => Some(ident.node), + _ => None, + }, + _ => None, } } + /// Returns `true` if the token is an identifier. + pub fn is_ident(&self) -> bool { + self.ident().is_some() + } + /// Returns `true` if the token is a documentation comment. pub fn is_doc_comment(&self) -> bool { match *self { @@ -311,18 +316,15 @@ impl Token { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { - match *self { - Ident(id) => id.name == kw.name(), - _ => false, - } + self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match *self { - Ident(id) => id.name == keywords::Super.name() || - id.name == keywords::SelfValue.name() || - id.name == keywords::SelfType.name(), - _ => false, + match self.ident() { + Some(id) => id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name(), + None => false, } } @@ -333,18 +335,16 @@ impl Token { /// Returns `true` if the token is a strict keyword. pub fn is_strict_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::As.name() && - id.name <= keywords::While.name(), + match self.ident() { + Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(), _ => false, } } /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::Abstract.name() && - id.name <= keywords::Yield.name(), + match self.ident() { + Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(), _ => false, } } -- cgit 1.4.1-3-g733a5 From b3763862280946cab09cbedc4ad5626ebd95a5b2 Mon Sep 17 00:00:00 2001 From: Thomas Jespersen Date: Fri, 17 Mar 2017 23:11:27 +0100 Subject: Replace hardcoded forward slash with path::MAIN_SEPARATOR Fixes #40149 --- src/libstd/sys_common/backtrace.rs | 4 +-- src/libsyntax/parse/parser.rs | 9 ++++--- src/test/parse-fail/mod_file_not_exist.rs | 2 ++ src/test/parse-fail/mod_file_not_exist_windows.rs | 32 +++++++++++++++++++++++ 4 files changed, 41 insertions(+), 6 deletions(-) create mode 100644 src/test/parse-fail/mod_file_not_exist_windows.rs (limited to 'src/libsyntax/parse') diff --git a/src/libstd/sys_common/backtrace.rs b/src/libstd/sys_common/backtrace.rs index 99297b781e4..f5c188f7a75 100644 --- a/src/libstd/sys_common/backtrace.rs +++ b/src/libstd/sys_common/backtrace.rs @@ -19,7 +19,7 @@ use io; use libc; use str; use sync::atomic::{self, Ordering}; -use path::Path; +use path::{self, Path}; use sys::mutex::Mutex; use ptr; @@ -262,7 +262,7 @@ fn output_fileline(w: &mut Write, file: &[u8], line: libc::c_int, if let Ok(cwd) = env::current_dir() { if let Ok(stripped) = file_path.strip_prefix(&cwd) { if let Some(s) = stripped.to_str() { - write!(w, " at ./{}:{}", s, line)?; + write!(w, " at .{}{}:{}", path::MAIN_SEPARATOR, s, line)?; already_printed = true; } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index db2878c6b1e..c2c3e5a6855 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -59,7 +59,7 @@ use util::ThinVec; use std::collections::HashSet; use std::{cmp, mem, slice}; -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; bitflags! { flags Restrictions: u8 { @@ -5146,7 +5146,7 @@ impl<'a> Parser<'a> { pub fn default_submod_path(id: ast::Ident, dir_path: &Path, codemap: &CodeMap) -> ModulePath { let mod_name = id.to_string(); let default_path_str = format!("{}.rs", mod_name); - let secondary_path_str = format!("{}/mod.rs", mod_name); + let secondary_path_str = format!("{}{}mod.rs", mod_name, path::MAIN_SEPARATOR); let default_path = dir_path.join(&default_path_str); let secondary_path = dir_path.join(&secondary_path_str); let default_exists = codemap.file_exists(&default_path); @@ -5224,8 +5224,9 @@ impl<'a> Parser<'a> { }; err.span_note(id_sp, &format!("maybe move this module `{0}` to its own directory \ - via `{0}/mod.rs`", - this_module)); + via `{0}{1}mod.rs`", + this_module, + path::MAIN_SEPARATOR)); if paths.path_exists { err.span_note(id_sp, &format!("... or maybe `use` the module `{}` instead \ diff --git a/src/test/parse-fail/mod_file_not_exist.rs b/src/test/parse-fail/mod_file_not_exist.rs index 7736394a6f5..4bc6e706d42 100644 --- a/src/test/parse-fail/mod_file_not_exist.rs +++ b/src/test/parse-fail/mod_file_not_exist.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-windows + // compile-flags: -Z parse-only mod not_a_real_file; //~ ERROR file not found for module `not_a_real_file` diff --git a/src/test/parse-fail/mod_file_not_exist_windows.rs b/src/test/parse-fail/mod_file_not_exist_windows.rs new file mode 100644 index 00000000000..c58603b4398 --- /dev/null +++ b/src/test/parse-fail/mod_file_not_exist_windows.rs @@ -0,0 +1,32 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-gnu +// ignore-android +// ignore-bitrig +// ignore-macos +// ignore-dragonfly +// ignore-freebsd +// ignore-haiku +// ignore-ios +// ignore-linux +// ignore-netbsd +// ignore-openbsd +// ignore-solaris +// ignore-emscripten + +// compile-flags: -Z parse-only + +mod not_a_real_file; //~ ERROR file not found for module `not_a_real_file` +//~^ HELP name the file either not_a_real_file.rs or not_a_real_file\mod.rs inside the directory + +fn main() { + assert_eq!(mod_file_aux::bar(), 10); +} -- cgit 1.4.1-3-g733a5 From 56847af9163284f928d5632a3d0d29399716414f Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Fri, 17 Mar 2017 09:51:31 -0400 Subject: port the match code to use `CoerceMany` `match { }` now (correctly?) indicates divergence, which results in more unreachable warnings. We also avoid fallback to `!` if there is just one arm (see new test: `match-unresolved-one-arm.rs`). --- src/librustc_typeck/check/_match.rs | 90 ++++++++++------------ src/libsyntax/parse/obsolete.rs | 1 + .../match-no-arms-unreachable-after.rs | 22 ++++++ ...h-unreachable-warning-with-diverging-discrim.rs | 16 ++++ src/test/compile-fail/match-unresolved-one-arm.rs | 17 ++++ 5 files changed, 95 insertions(+), 51 deletions(-) create mode 100644 src/test/compile-fail/match-no-arms-unreachable-after.rs create mode 100644 src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs create mode 100644 src/test/compile-fail/match-unresolved-one-arm.rs (limited to 'src/libsyntax/parse') diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index feed5752cf8..f0d2598a0fb 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -16,6 +16,7 @@ use rustc::infer::type_variable::TypeVariableOrigin; use rustc::traits::ObligationCauseCode; use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference}; use check::{FnCtxt, Expectation, Diverges}; +use check::coercion::CoerceMany; use util::nodemap::FxHashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; @@ -414,6 +415,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { discrim_ty = self.next_ty_var(TypeVariableOrigin::TypeInference(discrim.span)); self.check_expr_has_type(discrim, discrim_ty); }; + + // If the discriminant diverges, the match is pointless (e.g., + // `match (return) { }`). + self.warn_if_unreachable(expr.id, expr.span, "expression"); + + // If there are no arms, that is a diverging match; a special case. + if arms.is_empty() { + self.diverges.set(self.diverges.get() | Diverges::Always); + return tcx.types.never; + } + + // Otherwise, we have to union together the types that the + // arms produce and so forth. + let discrim_diverges = self.diverges.get(); self.diverges.set(Diverges::Maybe); @@ -426,6 +441,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_pat(&p, discrim_ty); all_pats_diverge &= self.diverges.get(); } + // As discussed with @eddyb, this is for disabling unreachable_code // warnings on patterns (they're now subsumed by unreachable_patterns // warnings). @@ -444,20 +460,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // on any empty type and is therefore unreachable; should the flow // of execution reach it, we will panic, so bottom is an appropriate // type in that case) - let expected = expected.adjust_for_branches(self); - let mut result_ty = self.next_diverging_ty_var( - TypeVariableOrigin::DivergingBlockExpr(expr.span)); let mut all_arms_diverge = Diverges::WarnedAlways; - let coerce_first = match expected { - // We don't coerce to `()` so that if the match expression is a - // statement it's branches can have any consistent type. That allows - // us to give better error messages (pointing to a usually better - // arm for inconsistent arms or to the whole match when a `()` type - // is required). - Expectation::ExpectHasType(ety) if ety != self.tcx.mk_nil() => { - ety - } - _ => result_ty + + let expected = expected.adjust_for_branches(self); + + let mut coercion = { + let coerce_first = match expected { + // We don't coerce to `()` so that if the match expression is a + // statement it's branches can have any consistent type. That allows + // us to give better error messages (pointing to a usually better + // arm for inconsistent arms or to the whole match when a `()` type + // is required). + Expectation::ExpectHasType(ety) if ety != self.tcx.mk_nil() => ety, + _ => self.next_ty_var(TypeVariableOrigin::MiscVariable(expr.span)), + }; + CoerceMany::new(coerce_first) }; for (i, (arm, pats_diverge)) in arms.iter().zip(all_arm_pats_diverge).enumerate() { @@ -470,11 +487,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let arm_ty = self.check_expr_with_expectation(&arm.body, expected); all_arms_diverge &= self.diverges.get(); - if result_ty.references_error() || arm_ty.references_error() { - result_ty = tcx.types.err; - continue; - } - // Handle the fallback arm of a desugared if-let like a missing else. let is_if_let_fallback = match match_src { hir::MatchSource::IfLetDesugar { contains_else_clause: false } => { @@ -483,47 +495,23 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { _ => false }; - let cause = if is_if_let_fallback { - self.cause(expr.span, ObligationCauseCode::IfExpressionWithNoElse) + if is_if_let_fallback { + let cause = self.cause(expr.span, ObligationCauseCode::IfExpressionWithNoElse); + assert!(arm_ty.is_nil()); + coercion.coerce_forced_unit(self, &cause); } else { - self.cause(expr.span, ObligationCauseCode::MatchExpressionArm { + let cause = self.cause(expr.span, ObligationCauseCode::MatchExpressionArm { arm_span: arm.body.span, source: match_src - }) - }; - - let result = if is_if_let_fallback { - self.eq_types(true, &cause, arm_ty, result_ty) - .map(|infer_ok| { - self.register_infer_ok_obligations(infer_ok); - arm_ty - }) - } else if i == 0 { - // Special-case the first arm, as it has no "previous expressions". - self.try_coerce(&arm.body, arm_ty, coerce_first) - } else { - let prev_arms = || arms[..i].iter().map(|arm| &*arm.body); - self.try_find_coercion_lub(&cause, prev_arms, result_ty, &arm.body, arm_ty) - }; - - result_ty = match result { - Ok(ty) => ty, - Err(e) => { - let (expected, found) = if is_if_let_fallback { - (arm_ty, result_ty) - } else { - (result_ty, arm_ty) - }; - self.report_mismatched_types(&cause, expected, found, e).emit(); - self.tcx.types.err - } - }; + }); + coercion.coerce(self, &cause, &arm.body, arm_ty); + } } // We won't diverge unless the discriminant or all arms diverge. self.diverges.set(discrim_diverges | all_arms_diverge); - result_ty + coercion.complete(self) } fn check_pat_struct(&self, diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index a46a788ca08..d5baec675e4 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -36,6 +36,7 @@ pub trait ParserObsoleteMethods { impl<'a> ParserObsoleteMethods for parser::Parser<'a> { /// Reports an obsolete syntax non-fatal error. #[allow(unused_variables)] + #[allow(unreachable_code)] fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) { let (kind_str, desc, error) = match kind { // Nothing here at the moment diff --git a/src/test/compile-fail/match-no-arms-unreachable-after.rs b/src/test/compile-fail/match-no-arms-unreachable-after.rs new file mode 100644 index 00000000000..db08f5e5e66 --- /dev/null +++ b/src/test/compile-fail/match-no-arms-unreachable-after.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(warnings)] +#![deny(unreachable_code)] + +enum Void { } + +fn foo(v: Void) { + match v { } + let x = 2; //~ ERROR unreachable +} + +fn main() { +} diff --git a/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs b/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs new file mode 100644 index 00000000000..aae0f3135d8 --- /dev/null +++ b/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused_parens)] +#![deny(unreachable_code)] + +fn main() { + match (return) { } //~ ERROR unreachable expression +} diff --git a/src/test/compile-fail/match-unresolved-one-arm.rs b/src/test/compile-fail/match-unresolved-one-arm.rs new file mode 100644 index 00000000000..ea0f8db99e8 --- /dev/null +++ b/src/test/compile-fail/match-unresolved-one-arm.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo() -> T { panic!("Rocks for my pillow") } + +fn main() { + let x = match () { //~ ERROR type annotations needed + () => foo() // T here should be unresolved + }; +} -- cgit 1.4.1-3-g733a5 From b83352e44c36e81db7f00eb60e78ff3828c51c9e Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Tue, 28 Mar 2017 18:56:29 -0700 Subject: Introduce `TyErr` independent from `TyInfer` Add a `TyErr` type to represent unknown types in places where parse errors have happened, while still able to build the AST. Initially only used to represent incorrectly written fn arguments and avoid "expected X parameters, found Y" errors when called with the appropriate amount of parameters. We cannot use `TyInfer` for this as `_` is not allowed as a valid argument type. Example output: ```rust error: expected one of `:` or `@`, found `,` --> file.rs:12:9 | 12 | fn bar(x, y: usize) {} | ^ error[E0061]: this function takes 2 parameters but 3 parameters were supplied --> file.rs:19:9 | 12 | fn bar(x, y) {} | --------------- defined here ... 19 | bar(1, 2, 3); | ^^^^^^^ expected 2 parameters ``` --- src/librustc/hir/intravisit.rs | 2 +- src/librustc/hir/lowering.rs | 1 + src/librustc/hir/mod.rs | 2 + src/librustc/hir/print.rs | 3 ++ .../calculate_svh/svh_visitor.rs | 6 ++- src/librustc_typeck/astconv.rs | 3 ++ src/librustdoc/clean/mod.rs | 2 +- src/libsyntax/ast.rs | 2 + src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/parser.rs | 25 ++++++++++- src/libsyntax/print/pprust.rs | 3 ++ src/libsyntax/visit.rs | 2 +- src/test/ui/span/issue-34264.rs | 20 +++++++++ src/test/ui/span/issue-34264.stderr | 49 ++++++++++++++++++++++ 14 files changed, 115 insertions(+), 7 deletions(-) create mode 100644 src/test/ui/span/issue-34264.rs create mode 100644 src/test/ui/span/issue-34264.stderr (limited to 'src/libsyntax/parse') diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index c7ad143c949..2c8b145f126 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -578,7 +578,7 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { TyTypeof(expression) => { visitor.visit_nested_body(expression) } - TyInfer => {} + TyInfer | TyErr => {} } } diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 17185a6ab69..acc6d21ddc6 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -555,6 +555,7 @@ impl<'a> LoweringContext<'a> { fn lower_ty(&mut self, t: &Ty) -> P { let kind = match t.node { TyKind::Infer => hir::TyInfer, + TyKind::Err => hir::TyErr, TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)), TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), TyKind::Rptr(ref region, ref mt) => { diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index d5000ac9c18..0da405d1821 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -1351,6 +1351,8 @@ pub enum Ty_ { /// TyInfer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. TyInfer, + /// Placeholder for a type that has failed to be defined. + TyErr, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 04a65fd5e3a..4a5a35aa82c 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -450,6 +450,9 @@ impl<'a> State<'a> { hir::TyInfer => { word(&mut self.s, "_")?; } + hir::TyErr => { + word(&mut self.s, "?")?; + } } self.end() } diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 5401b371888..4700b77be07 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -432,7 +432,8 @@ enum SawTyComponent { SawTyObjectSum, SawTyImplTrait, SawTyTypeof, - SawTyInfer + SawTyInfer, + SawTyErr, } fn saw_ty(node: &Ty_) -> SawTyComponent { @@ -448,7 +449,8 @@ fn saw_ty(node: &Ty_) -> SawTyComponent { TyTraitObject(..) => SawTyObjectSum, TyImplTrait(..) => SawTyImplTrait, TyTypeof(..) => SawTyTypeof, - TyInfer => SawTyInfer + TyInfer => SawTyInfer, + TyErr => SawTyErr, } } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 923ec05c22b..66c4a81a5c0 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -1229,6 +1229,9 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { // handled specially and will not descend into this routine. self.ty_infer(ast_ty.span) } + hir::TyErr => { + tcx.types.err + } }; cache.borrow_mut().insert(ast_ty.id, result_ty); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index f3ea6c4467c..ac72d7d29a2 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -1805,7 +1805,7 @@ impl Clean for hir::Ty { } TyBareFn(ref barefn) => BareFunction(box barefn.clean(cx)), TyImplTrait(ref bounds) => ImplTrait(bounds.clean(cx)), - TyInfer => Infer, + TyInfer | TyErr => Infer, TyTypeof(..) => panic!("Unimplemented type {:?}", self.node), } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 9eb86aa006d..c6a3e8a2ded 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1361,6 +1361,8 @@ pub enum TyKind { ImplicitSelf, // A macro in the type position. Mac(Mac), + /// Placeholder for a kind that has failed to be defined. + Err, } /// Inline assembly dialect. diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 1a4e196ac55..92e25b00e0a 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -358,7 +358,7 @@ pub fn noop_fold_ty(t: P, fld: &mut T) -> P { t.map(|Ty {id, node, span}| Ty { id: fld.new_id(id), node: match node { - TyKind::Infer | TyKind::ImplicitSelf => node, + TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => node, TyKind::Slice(ty) => TyKind::Slice(fld.fold_ty(ty)), TyKind::Ptr(mt) => TyKind::Ptr(fld.fold_mt(mt)), TyKind::Rptr(region, mt) => { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index c2c3e5a6855..23fc1351426 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -407,6 +407,25 @@ impl From> for LhsExpr { } } +/// Create a placeholder argument. +fn dummy_arg(span: Span) -> Arg { + let spanned = Spanned { + span: span, + node: keywords::Invalid.ident() + }; + let pat = P(Pat { + id: ast::DUMMY_NODE_ID, + node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), spanned, None), + span: span + }); + let ty = Ty { + node: TyKind::Err, + span: span, + id: ast::DUMMY_NODE_ID + }; + Arg { ty: P(ty), pat: pat, id: ast::DUMMY_NODE_ID } +} + impl<'a> Parser<'a> { pub fn new(sess: &'a ParseSess, tokens: TokenStream, @@ -4343,8 +4362,12 @@ impl<'a> Parser<'a> { Ok(arg) => Ok(Some(arg)), Err(mut e) => { e.emit(); + let lo = p.prev_span; + // Skip every token until next possible arg or end. p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(token::Paren)]); - Ok(None) + // Create a placeholder argument for proper arg count (#34264). + let span = lo.to(p.prev_span); + Ok(Some(dummy_arg(span))) } } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f042a18d610..e7feff2b79f 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1095,6 +1095,9 @@ impl<'a> State<'a> { ast::TyKind::Infer => { word(&mut self.s, "_")?; } + ast::TyKind::Err => { + word(&mut self.s, "?")?; + } ast::TyKind::ImplicitSelf => { word(&mut self.s, "Self")?; } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index a5333f3bb6a..b5e9a1892ac 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -350,7 +350,7 @@ pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) { TyKind::Typeof(ref expression) => { visitor.visit_expr(expression) } - TyKind::Infer | TyKind::ImplicitSelf => {} + TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => {} TyKind::Mac(ref mac) => { visitor.visit_mac(mac) } diff --git a/src/test/ui/span/issue-34264.rs b/src/test/ui/span/issue-34264.rs new file mode 100644 index 00000000000..00482f50618 --- /dev/null +++ b/src/test/ui/span/issue-34264.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo(Option, String) {} +fn bar(x, y: usize) {} + +fn main() { + foo(Some(42), 2); + foo(Some(42), 2, ""); + bar("", ""); + bar(1, 2); + bar(1, 2, 3); +} diff --git a/src/test/ui/span/issue-34264.stderr b/src/test/ui/span/issue-34264.stderr new file mode 100644 index 00000000000..c79db54eaef --- /dev/null +++ b/src/test/ui/span/issue-34264.stderr @@ -0,0 +1,49 @@ +error: expected one of `:` or `@`, found `<` + --> $DIR/issue-34264.rs:11:14 + | +11 | fn foo(Option, String) {} + | ^ + +error: expected one of `:` or `@`, found `)` + --> $DIR/issue-34264.rs:11:27 + | +11 | fn foo(Option, String) {} + | ^ + +error: expected one of `:` or `@`, found `,` + --> $DIR/issue-34264.rs:12:9 + | +12 | fn bar(x, y: usize) {} + | ^ + +error[E0061]: this function takes 2 parameters but 3 parameters were supplied + --> $DIR/issue-34264.rs:16:9 + | +11 | fn foo(Option, String) {} + | ------------------------------ defined here +... +16 | foo(Some(42), 2, ""); + | ^^^^^^^^^^^^^^^ expected 2 parameters + +error[E0308]: mismatched types + --> $DIR/issue-34264.rs:17:13 + | +17 | bar("", ""); + | ^^ expected usize, found reference + | + = note: expected type `usize` + found type `&'static str` + = help: here are some functions which might fulfill your needs: + - .len() + +error[E0061]: this function takes 2 parameters but 3 parameters were supplied + --> $DIR/issue-34264.rs:19:9 + | +12 | fn bar(x, y: usize) {} + | ---------------------- defined here +... +19 | bar(1, 2, 3); + | ^^^^^^^ expected 2 parameters + +error: aborting due to 3 previous errors + -- cgit 1.4.1-3-g733a5 From 6a9448b523b95dbc850e856508342644fc17db45 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Mon, 3 Apr 2017 22:23:32 +0000 Subject: Fix bug parsing `#[derive]` macro invocations. --- src/librustc_resolve/macros.rs | 6 ++++-- src/libsyntax/ext/derive.rs | 3 ++- src/libsyntax/parse/parser.rs | 20 ++++++++++++++++++++ src/test/run-pass/issue-40962.rs | 20 ++++++++++++++++++++ 4 files changed, 46 insertions(+), 3 deletions(-) create mode 100644 src/test/run-pass/issue-40962.rs (limited to 'src/libsyntax/parse') diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 05f30f039c8..966cb7ee8d8 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -222,8 +222,10 @@ impl<'a> base::Resolver for Resolver<'a> { let name = unwrap_or!(attrs[i].name(), continue); if name == "derive" { - let result = attrs[i].parse_list(&self.session.parse_sess, - |parser| parser.parse_path(PathStyle::Mod)); + let result = attrs[i].parse_list(&self.session.parse_sess, |parser| { + parser.parse_path_allowing_meta(PathStyle::Mod) + }); + let mut traits = match result { Ok(traits) => traits, Err(mut e) => { diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index c79040424f6..e7c5d8278d9 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -26,7 +26,8 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec return true; } - match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) { + match attr.parse_list(cx.parse_sess, + |parser| parser.parse_path_allowing_meta(PathStyle::Mod)) { Ok(ref traits) if traits.is_empty() => { cx.span_warn(attr.span, "empty trait list in `derive`"); false diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index c2c3e5a6855..a89811d8abb 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1754,6 +1754,26 @@ impl<'a> Parser<'a> { }) } + /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat. + /// This is used when parsing derive macro paths in `#[derive]` attributes. + pub fn parse_path_allowing_meta(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { + let meta_ident = match self.token { + token::Interpolated(ref nt) => match **nt { + token::NtMeta(ref meta) => match meta.node { + ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)), + _ => None, + }, + _ => None, + }, + _ => None, + }; + if let Some(ident) = meta_ident { + self.bump(); + return Ok(ast::Path::from_ident(self.prev_span, ident)); + } + self.parse_path(mode) + } + /// Examples: /// - `a::b::c` /// - `a::b::c(V) -> W` diff --git a/src/test/run-pass/issue-40962.rs b/src/test/run-pass/issue-40962.rs new file mode 100644 index 00000000000..b35cfa12eab --- /dev/null +++ b/src/test/run-pass/issue-40962.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { + ($i:meta) => { + #[derive($i)] + struct S; + } +} + +m!(Clone); + +fn main() {} -- cgit 1.4.1-3-g733a5