From 559127b4517229115397404f20167bc7b702d3d6 Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Mar 2017 15:50:04 +0100 Subject: Implement indexed_vec::Idx for ast::NodeId --- src/libsyntax/ast.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 4347046b6b8..3dd4bdbd14d 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -23,6 +23,7 @@ use abi::Abi; use ext::hygiene::SyntaxContext; use print::pprust; use ptr::P; +use rustc_data_structures::indexed_vec; use symbol::{Symbol, keywords}; use tokenstream::{ThinTokenStream, TokenStream}; @@ -275,6 +276,16 @@ impl serialize::UseSpecializedDecodable for NodeId { } } +impl indexed_vec::Idx for NodeId { + fn new(idx: usize) -> Self { + NodeId::new(idx) + } + + fn index(self) -> usize { + self.as_usize() + } +} + /// Node id used to represent the root of the crate. pub const CRATE_NODE_ID: NodeId = NodeId(0); -- cgit 1.4.1-3-g733a5 From bc259ee844f608599293c83d96de353005681cca Mon Sep 17 00:00:00 2001 From: Michael Woerister Date: Tue, 14 Mar 2017 15:50:40 +0100 Subject: Introduce HirId, a replacement for NodeId after lowering to HIR. HirId has a more stable representation than NodeId, meaning that modifications to one item don't influence (part of) the IDs within other items. The other part is a DefIndex for which there already is a way of stable hashing and persistence. This commit introduces the HirId type and generates a HirId for every NodeId during HIR lowering, but the resulting values are not yet used anywhere, except in consistency checks. --- src/librustc/hir/lowering.rs | 1773 +++++++++++--------- src/librustc/hir/map/definitions.rs | 30 + src/librustc/hir/map/hir_id_validator.rs | 184 ++ src/librustc/hir/map/mod.rs | 9 +- src/librustc/hir/mod.rs | 61 +- src/libsyntax/ext/placeholders.rs | 14 +- ...region-bounds-on-objects-and-type-parameters.rs | 2 +- 7 files changed, 1286 insertions(+), 787 deletions(-) create mode 100644 src/librustc/hir/map/hir_id_validator.rs (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 81591a5650f..22ca0e421be 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -43,12 +43,14 @@ use hir; use hir::map::{Definitions, DefKey}; use hir::map::definitions::DefPathData; -use hir::def_id::{DefIndex, DefId}; +use hir::def_id::{DefIndex, DefId, CRATE_DEF_INDEX}; use hir::def::{Def, PathResolution}; +use rustc_data_structures::indexed_vec::IndexVec; use session::Session; use util::nodemap::{DefIdMap, NodeMap}; use std::collections::BTreeMap; +use std::fmt::Debug; use std::iter; use std::mem; @@ -63,6 +65,8 @@ use syntax::util::small_vector::SmallVector; use syntax::visit::{self, Visitor}; use syntax_pos::Span; +const HIR_ID_COUNTER_LOCKED: u32 = 0xFFFFFFFF; + pub struct LoweringContext<'a> { crate_root: Option<&'static str>, // Use to assign ids to hir nodes that do not directly correspond to an ast node @@ -89,6 +93,10 @@ pub struct LoweringContext<'a> { is_in_loop_condition: bool, type_def_lifetime_params: DefIdMap, + + current_hir_id_owner: Vec<(DefIndex, u32)>, + item_local_id_counters: NodeMap, + node_id_to_hir_id: IndexVec, } pub trait Resolver { @@ -128,6 +136,9 @@ pub fn lower_crate(sess: &Session, loop_scopes: Vec::new(), is_in_loop_condition: false, type_def_lifetime_params: DefIdMap(), + current_hir_id_owner: vec![(CRATE_DEF_INDEX, 0)], + item_local_id_counters: NodeMap(), + node_id_to_hir_id: IndexVec::new(), }.lower_crate(krate) } @@ -152,6 +163,8 @@ impl<'a> LoweringContext<'a> { impl<'lcx, 'interner> Visitor<'lcx> for MiscCollector<'lcx, 'interner> { fn visit_item(&mut self, item: &'lcx Item) { + self.lctx.allocate_hir_id_counter(item.id, item); + match item.node { ItemKind::Struct(_, ref generics) | ItemKind::Union(_, ref generics) | @@ -166,6 +179,16 @@ impl<'a> LoweringContext<'a> { } visit::walk_item(self, item); } + + fn visit_trait_item(&mut self, item: &'lcx TraitItem) { + self.lctx.allocate_hir_id_counter(item.id, item); + visit::walk_trait_item(self, item); + } + + fn visit_impl_item(&mut self, item: &'lcx ImplItem) { + self.lctx.allocate_hir_id_counter(item.id, item); + visit::walk_impl_item(self, item); + } } struct ItemLowerer<'lcx, 'interner: 'lcx> { @@ -174,27 +197,43 @@ impl<'a> LoweringContext<'a> { impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { fn visit_item(&mut self, item: &'lcx Item) { - if let Some(hir_item) = self.lctx.lower_item(item) { - self.lctx.items.insert(item.id, hir_item); + let mut item_lowered = true; + self.lctx.with_hir_id_owner(item.id, |lctx| { + if let Some(hir_item) = lctx.lower_item(item) { + lctx.items.insert(item.id, hir_item); + } else { + item_lowered = false; + } + }); + + if item_lowered { visit::walk_item(self, item); } } fn visit_trait_item(&mut self, item: &'lcx TraitItem) { - let id = hir::TraitItemId { node_id: item.id }; - let hir_item = self.lctx.lower_trait_item(item); - self.lctx.trait_items.insert(id, hir_item); + self.lctx.with_hir_id_owner(item.id, |lctx| { + let id = hir::TraitItemId { node_id: item.id }; + let hir_item = lctx.lower_trait_item(item); + lctx.trait_items.insert(id, hir_item); + }); + visit::walk_trait_item(self, item); } fn visit_impl_item(&mut self, item: &'lcx ImplItem) { - let id = hir::ImplItemId { node_id: item.id }; - let hir_item = self.lctx.lower_impl_item(item); - self.lctx.impl_items.insert(id, hir_item); + self.lctx.with_hir_id_owner(item.id, |lctx| { + let id = hir::ImplItemId { node_id: item.id }; + let hir_item = lctx.lower_impl_item(item); + lctx.impl_items.insert(id, hir_item); + }); visit::walk_impl_item(self, item); } } + self.lower_node_id(CRATE_NODE_ID); + debug_assert!(self.node_id_to_hir_id[CRATE_NODE_ID] == hir::CRATE_HIR_ID); + visit::walk_crate(&mut MiscCollector { lctx: &mut self }, c); visit::walk_crate(&mut ItemLowerer { lctx: &mut self }, c); @@ -202,6 +241,10 @@ impl<'a> LoweringContext<'a> { let attrs = self.lower_attrs(&c.attrs); let body_ids = body_ids(&self.bodies); + self.resolver + .definitions() + .init_node_id_to_hir_id_mapping(self.node_id_to_hir_id); + hir::Crate { module: module, attrs: attrs, @@ -217,6 +260,103 @@ impl<'a> LoweringContext<'a> { } } + fn allocate_hir_id_counter(&mut self, + owner: NodeId, + debug: &T) { + if self.item_local_id_counters.insert(owner, 0).is_some() { + bug!("Tried to allocate item_local_id_counter for {:?} twice", debug); + } + // Always allocate the first HirId for the owner itself + self.lower_node_id_with_owner(owner, owner); + } + + fn lower_node_id_generic(&mut self, + ast_node_id: NodeId, + alloc_hir_id: F) + -> NodeId + where F: FnOnce(&mut Self) -> hir::HirId + { + if ast_node_id == DUMMY_NODE_ID { + return ast_node_id; + } + + let min_size = ast_node_id.as_usize() + 1; + + if min_size > self.node_id_to_hir_id.len() { + self.node_id_to_hir_id.resize(min_size, hir::DUMMY_HIR_ID); + } + + if self.node_id_to_hir_id[ast_node_id] == hir::DUMMY_HIR_ID { + // Generate a new HirId + self.node_id_to_hir_id[ast_node_id] = alloc_hir_id(self); + } + + ast_node_id + } + + fn with_hir_id_owner(&mut self, owner: NodeId, f: F) + where F: FnOnce(&mut Self) + { + let counter = self.item_local_id_counters + .insert(owner, HIR_ID_COUNTER_LOCKED) + .unwrap(); + let def_index = self.resolver.definitions().opt_def_index(owner).unwrap(); + self.current_hir_id_owner.push((def_index, counter)); + f(self); + let (new_def_index, new_counter) = self.current_hir_id_owner.pop().unwrap(); + + debug_assert!(def_index == new_def_index); + debug_assert!(new_counter >= counter); + + let prev = self.item_local_id_counters.insert(owner, new_counter).unwrap(); + debug_assert!(prev == HIR_ID_COUNTER_LOCKED); + } + + /// This method allocates a new HirId for the given NodeId and stores it in + /// the LoweringContext's NodeId => HirId map. + /// Take care not to call this method if the resulting HirId is then not + /// actually used in the HIR, as that would trigger an assertion in the + /// HirIdValidator later on, which makes sure that all NodeIds got mapped + /// properly. Calling the method twice with the same NodeId is fine though. + fn lower_node_id(&mut self, ast_node_id: NodeId) -> NodeId { + self.lower_node_id_generic(ast_node_id, |this| { + let &mut (def_index, ref mut local_id_counter) = this.current_hir_id_owner + .last_mut() + .unwrap(); + let local_id = *local_id_counter; + *local_id_counter += 1; + hir::HirId { + owner: def_index, + local_id: hir::ItemLocalId(local_id), + } + }) + } + + fn lower_node_id_with_owner(&mut self, + ast_node_id: NodeId, + owner: NodeId) + -> NodeId { + self.lower_node_id_generic(ast_node_id, |this| { + let local_id_counter = this.item_local_id_counters + .get_mut(&owner) + .unwrap(); + let local_id = *local_id_counter; + + // We want to be sure not to modify the counter in the map while it + // is also on the stack. Otherwise we'll get lost updates when writing + // back from the stack to the map. + debug_assert!(local_id != HIR_ID_COUNTER_LOCKED); + + *local_id_counter += 1; + let def_index = this.resolver.definitions().opt_def_index(owner).unwrap(); + + hir::HirId { + owner: def_index, + local_id: hir::ItemLocalId(local_id), + } + }) + } + fn record_body(&mut self, value: hir::Expr, decl: Option<&FnDecl>) -> hir::BodyId { let body = hir::Body { @@ -230,8 +370,8 @@ impl<'a> LoweringContext<'a> { id } - fn next_id(&self) -> NodeId { - self.sess.next_node_id() + fn next_id(&mut self) -> NodeId { + self.lower_node_id(self.sess.next_node_id()) } fn expect_full_def(&mut self, id: NodeId) -> Def { @@ -362,7 +502,7 @@ impl<'a> LoweringContext<'a> { match destination { Some((id, label_ident)) => { let target = if let Def::Label(loop_id) = self.expect_full_def(id) { - hir::LoopIdResult::Ok(loop_id) + hir::LoopIdResult::Ok(self.lower_node_id(loop_id)) } else { hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel) }; @@ -371,11 +511,18 @@ impl<'a> LoweringContext<'a> { target_id: hir::ScopeTarget::Loop(target), } }, - None => hir::Destination { - ident: None, - target_id: hir::ScopeTarget::Loop( - self.loop_scopes.last().map(|innermost_loop_id| Ok(*innermost_loop_id)) - .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)).into()) + None => { + let loop_id = self.loop_scopes + .last() + .map(|innermost_loop_id| *innermost_loop_id); + + hir::Destination { + ident: None, + target_id: hir::ScopeTarget::Loop( + loop_id.map(|id| Ok(self.lower_node_id(id))) + .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)) + .into()) + } } } } @@ -395,7 +542,7 @@ impl<'a> LoweringContext<'a> { fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding { hir::TypeBinding { - id: b.id, + id: self.lower_node_id(b.id), name: b.ident.name, ty: self.lower_ty(&b.ty), span: b.span, @@ -403,82 +550,87 @@ impl<'a> LoweringContext<'a> { } fn lower_ty(&mut self, t: &Ty) -> P { - P(hir::Ty { - id: t.id, - node: match t.node { - TyKind::Infer => hir::TyInfer, - TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)), - TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), - TyKind::Rptr(ref region, ref mt) => { - let span = Span { hi: t.span.lo, ..t.span }; - let lifetime = match *region { - Some(ref lt) => self.lower_lifetime(lt), - None => self.elided_lifetime(span) - }; - hir::TyRptr(lifetime, self.lower_mt(mt)) - } - TyKind::BareFn(ref f) => { - hir::TyBareFn(P(hir::BareFnTy { - lifetimes: self.lower_lifetime_defs(&f.lifetimes), - unsafety: self.lower_unsafety(f.unsafety), - abi: f.abi, - decl: self.lower_fn_decl(&f.decl), - })) - } - TyKind::Never => hir::TyNever, - TyKind::Tup(ref tys) => { - hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect()) - } - TyKind::Paren(ref ty) => { - return self.lower_ty(ty); - } - TyKind::Path(ref qself, ref path) => { - let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit); - return self.ty_path(t.id, t.span, qpath); - } - TyKind::ImplicitSelf => { - hir::TyPath(hir::QPath::Resolved(None, P(hir::Path { - def: self.expect_full_def(t.id), - segments: hir_vec![hir::PathSegment { - name: keywords::SelfType.name(), - parameters: hir::PathParameters::none() - }], - span: t.span, - }))) - } - TyKind::Array(ref ty, ref length) => { - let length = self.lower_expr(length); - hir::TyArray(self.lower_ty(ty), - self.record_body(length, None)) - } - TyKind::Typeof(ref expr) => { - let expr = self.lower_expr(expr); - hir::TyTypeof(self.record_body(expr, None)) - } - TyKind::TraitObject(ref bounds) => { - let mut lifetime_bound = None; - let bounds = bounds.iter().filter_map(|bound| { - match *bound { - TraitTyParamBound(ref ty, TraitBoundModifier::None) => { - Some(self.lower_poly_trait_ref(ty)) - } - TraitTyParamBound(_, TraitBoundModifier::Maybe) => None, - RegionTyParamBound(ref lifetime) => { + let kind = match t.node { + TyKind::Infer => hir::TyInfer, + TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)), + TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), + TyKind::Rptr(ref region, ref mt) => { + let span = Span { hi: t.span.lo, ..t.span }; + let lifetime = match *region { + Some(ref lt) => self.lower_lifetime(lt), + None => self.elided_lifetime(span) + }; + hir::TyRptr(lifetime, self.lower_mt(mt)) + } + TyKind::BareFn(ref f) => { + hir::TyBareFn(P(hir::BareFnTy { + lifetimes: self.lower_lifetime_defs(&f.lifetimes), + unsafety: self.lower_unsafety(f.unsafety), + abi: f.abi, + decl: self.lower_fn_decl(&f.decl), + })) + } + TyKind::Never => hir::TyNever, + TyKind::Tup(ref tys) => { + hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect()) + } + TyKind::Paren(ref ty) => { + return self.lower_ty(ty); + } + TyKind::Path(ref qself, ref path) => { + let id = self.lower_node_id(t.id); + let qpath = self.lower_qpath(t.id, qself, path, ParamMode::Explicit); + return self.ty_path(id, t.span, qpath); + } + TyKind::ImplicitSelf => { + hir::TyPath(hir::QPath::Resolved(None, P(hir::Path { + def: self.expect_full_def(t.id), + segments: hir_vec![hir::PathSegment { + name: keywords::SelfType.name(), + parameters: hir::PathParameters::none() + }], + span: t.span, + }))) + } + TyKind::Array(ref ty, ref length) => { + let length = self.lower_expr(length); + hir::TyArray(self.lower_ty(ty), + self.record_body(length, None)) + } + TyKind::Typeof(ref expr) => { + let expr = self.lower_expr(expr); + hir::TyTypeof(self.record_body(expr, None)) + } + TyKind::TraitObject(ref bounds) => { + let mut lifetime_bound = None; + let bounds = bounds.iter().filter_map(|bound| { + match *bound { + TraitTyParamBound(ref ty, TraitBoundModifier::None) => { + Some(self.lower_poly_trait_ref(ty)) + } + TraitTyParamBound(_, TraitBoundModifier::Maybe) => None, + RegionTyParamBound(ref lifetime) => { + if lifetime_bound.is_none() { lifetime_bound = Some(self.lower_lifetime(lifetime)); - None } + None } - }).collect(); - let lifetime_bound = lifetime_bound.unwrap_or_else(|| { - self.elided_lifetime(t.span) - }); - hir::TyTraitObject(bounds, lifetime_bound) - } - TyKind::ImplTrait(ref bounds) => { - hir::TyImplTrait(self.lower_bounds(bounds)) - } - TyKind::Mac(_) => panic!("TyMac should have been expanded by now."), - }, + } + }).collect(); + let lifetime_bound = lifetime_bound.unwrap_or_else(|| { + self.elided_lifetime(t.span) + }); + hir::TyTraitObject(bounds, lifetime_bound) + } + TyKind::ImplTrait(ref bounds) => { + hir::TyImplTrait(self.lower_bounds(bounds)) + } + TyKind::Mac(_) => panic!("TyMac should have been expanded by now."), + }; + + P(hir::Ty { + id: self.lower_node_id(t.id), + node: kind, span: t.span, }) } @@ -712,7 +864,7 @@ impl<'a> LoweringContext<'a> { fn lower_local(&mut self, l: &Local) -> P { P(hir::Local { - id: l.id, + id: self.lower_node_id(l.id), ty: l.ty.as_ref().map(|t| self.lower_ty(t)), pat: self.lower_pat(&l.pat), init: l.init.as_ref().map(|e| P(self.lower_expr(e))), @@ -730,7 +882,7 @@ impl<'a> LoweringContext<'a> { fn lower_arg(&mut self, arg: &Arg) -> hir::Arg { hir::Arg { - id: arg.id, + id: self.lower_node_id(arg.id), pat: self.lower_pat(&arg.pat), } } @@ -786,7 +938,7 @@ impl<'a> LoweringContext<'a> { } hir::TyParam { - id: tp.id, + id: self.lower_node_id(tp.id), name: name, bounds: bounds, default: tp.default.as_ref().map(|x| self.lower_ty(x)), @@ -804,7 +956,7 @@ impl<'a> LoweringContext<'a> { fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime { hir::Lifetime { - id: l.id, + id: self.lower_node_id(l.id), name: l.name, span: l.span, } @@ -876,7 +1028,7 @@ impl<'a> LoweringContext<'a> { fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause { hir::WhereClause { - id: wc.id, + id: self.lower_node_id(wc.id), predicates: wc.predicates .iter() .map(|predicate| self.lower_where_predicate(predicate)) @@ -915,7 +1067,7 @@ impl<'a> LoweringContext<'a> { ref rhs_ty, span}) => { hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { - id: id, + id: self.lower_node_id(id), lhs_ty: self.lower_ty(lhs_ty), rhs_ty: self.lower_ty(rhs_ty), span: span, @@ -931,16 +1083,16 @@ impl<'a> LoweringContext<'a> { .enumerate() .map(|f| self.lower_struct_field(f)) .collect(), - id) + self.lower_node_id(id)) } VariantData::Tuple(ref fields, id) => { hir::VariantData::Tuple(fields.iter() .enumerate() .map(|f| self.lower_struct_field(f)) .collect(), - id) + self.lower_node_id(id)) } - VariantData::Unit(id) => hir::VariantData::Unit(id), + VariantData::Unit(id) => hir::VariantData::Unit(self.lower_node_id(id)), } } @@ -951,7 +1103,7 @@ impl<'a> LoweringContext<'a> { }; hir::TraitRef { path: path, - ref_id: p.ref_id, + ref_id: self.lower_node_id(p.ref_id), } } @@ -966,9 +1118,9 @@ impl<'a> LoweringContext<'a> { fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField { hir::StructField { span: f.span, - id: f.id, + id: self.lower_node_id(f.id), name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())), - vis: self.lower_visibility(&f.vis), + vis: self.lower_visibility(&f.vis, None), ty: self.lower_ty(&f.ty), attrs: self.lower_attrs(&f.attrs), } @@ -997,17 +1149,22 @@ impl<'a> LoweringContext<'a> { fn lower_block(&mut self, b: &Block, break_to: Option) -> P { let mut expr = None; - let mut stmts = b.stmts.iter().flat_map(|s| self.lower_stmt(s)).collect::>(); - if let Some(last) = stmts.pop() { - if let hir::StmtExpr(e, _) = last.node { - expr = Some(e); + let mut stmts = vec![]; + + for (index, stmt) in b.stmts.iter().enumerate() { + if index == b.stmts.len() - 1 { + if let StmtKind::Expr(ref e) = stmt.node { + expr = Some(P(self.lower_expr(e))); + } else { + stmts.extend(self.lower_stmt(stmt)); + } } else { - stmts.push(last); + stmts.extend(self.lower_stmt(stmt)); } } P(hir::Block { - id: b.id, + id: self.lower_node_id(b.id), stmts: stmts.into(), expr: expr, rules: self.lower_block_check_mode(&b.rules), @@ -1046,13 +1203,30 @@ impl<'a> LoweringContext<'a> { let mut path = self.lower_path_extra(import.id, path, suffix, ParamMode::Explicit, true); path.span = span; - self.items.insert(import.id, hir::Item { - id: import.id, - name: import.rename.unwrap_or(ident).name, - attrs: attrs.clone(), - node: hir::ItemUse(P(path), hir::UseKind::Single), - vis: vis.clone(), - span: span, + + self.allocate_hir_id_counter(import.id, import); + self.with_hir_id_owner(import.id, |this| { + let vis = match *vis { + hir::Visibility::Public => hir::Visibility::Public, + hir::Visibility::Crate => hir::Visibility::Crate, + hir::Visibility::Inherited => hir::Visibility::Inherited, + hir::Visibility::Restricted { ref path, id: _ } => { + hir::Visibility::Restricted { + path: path.clone(), + // We are allocating a new NodeId here + id: this.next_id(), + } + } + }; + + this.items.insert(import.id, hir::Item { + id: import.id, + name: import.rename.unwrap_or(ident).name, + attrs: attrs.clone(), + node: hir::ItemUse(P(path), hir::UseKind::Single), + vis: vis, + span: span, + }); }); } path @@ -1167,7 +1341,7 @@ impl<'a> LoweringContext<'a> { fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem { self.with_parent_def(i.id, |this| { hir::TraitItem { - id: i.id, + id: this.lower_node_id(i.id), name: i.ident.name, attrs: this.lower_attrs(&i.attrs), node: match i.node { @@ -1228,10 +1402,10 @@ impl<'a> LoweringContext<'a> { fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem { self.with_parent_def(i.id, |this| { hir::ImplItem { - id: i.id, + id: this.lower_node_id(i.id), name: i.ident.name, attrs: this.lower_attrs(&i.attrs), - vis: this.lower_visibility(&i.vis), + vis: this.lower_visibility(&i.vis, None), defaultness: this.lower_defaultness(i.defaultness, true /* [1] */), node: match i.node { ImplItemKind::Const(ref ty, ref expr) => { @@ -1260,7 +1434,7 @@ impl<'a> LoweringContext<'a> { id: hir::ImplItemId { node_id: i.id }, name: i.ident.name, span: i.span, - vis: self.lower_visibility(&i.vis), + vis: self.lower_visibility(&i.vis, Some(i.id)), defaultness: self.lower_defaultness(i.defaultness, true /* [1] */), kind: match i.node { ImplItemKind::Const(..) => hir::AssociatedItemKind::Const, @@ -1299,7 +1473,6 @@ impl<'a> LoweringContext<'a> { pub fn lower_item(&mut self, i: &Item) -> Option { let mut name = i.ident.name; let attrs = self.lower_attrs(&i.attrs); - let mut vis = self.lower_visibility(&i.vis); if let ItemKind::MacroDef(ref tts) = i.node { if i.attrs.iter().any(|attr| attr.path == "macro_export") { self.exported_macros.push(hir::MacroDef { @@ -1309,12 +1482,13 @@ impl<'a> LoweringContext<'a> { return None; } + let mut vis = self.lower_visibility(&i.vis, None); let node = self.with_parent_def(i.id, |this| { this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node) }); Some(hir::Item { - id: i.id, + id: self.lower_node_id(i.id), name: name, attrs: attrs, node: node, @@ -1326,7 +1500,7 @@ impl<'a> LoweringContext<'a> { fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem { self.with_parent_def(i.id, |this| { hir::ForeignItem { - id: i.id, + id: this.lower_node_id(i.id), name: i.ident.name, attrs: this.lower_attrs(&i.attrs), node: match i.node { @@ -1339,7 +1513,7 @@ impl<'a> LoweringContext<'a> { hir::ForeignItemStatic(this.lower_ty(t), m) } }, - vis: this.lower_visibility(&i.vis), + vis: this.lower_visibility(&i.vis, None), span: i.span, } }) @@ -1405,7 +1579,7 @@ impl<'a> LoweringContext<'a> { fn lower_pat(&mut self, p: &Pat) -> P { P(hir::Pat { - id: p.id, + id: self.lower_node_id(p.id), node: match p.node { PatKind::Wild => hir::PatKind::Wild, PatKind::Ident(ref binding_mode, pth1, ref sub) => { @@ -1491,707 +1665,746 @@ impl<'a> LoweringContext<'a> { } fn lower_expr(&mut self, e: &Expr) -> hir::Expr { - hir::Expr { - id: e.id, - node: match e.node { - // Issue #22181: - // Eventually a desugaring for `box EXPR` - // (similar to the desugaring above for `in PLACE BLOCK`) - // should go here, desugaring - // + let kind = match e.node { + // Issue #22181: + // Eventually a desugaring for `box EXPR` + // (similar to the desugaring above for `in PLACE BLOCK`) + // should go here, desugaring + // + // to: + // + // let mut place = BoxPlace::make_place(); + // let raw_place = Place::pointer(&mut place); + // let value = $value; + // unsafe { + // ::std::ptr::write(raw_place, value); + // Boxed::finalize(place) + // } + // + // But for now there are type-inference issues doing that. + ExprKind::Box(ref inner) => { + hir::ExprBox(P(self.lower_expr(inner))) + } + + // Desugar ExprBox: `in (PLACE) EXPR` + ExprKind::InPlace(ref placer, ref value_expr) => { // to: // - // let mut place = BoxPlace::make_place(); + // let p = PLACE; + // let mut place = Placer::make_place(p); // let raw_place = Place::pointer(&mut place); - // let value = $value; - // unsafe { - // ::std::ptr::write(raw_place, value); - // Boxed::finalize(place) - // } - // - // But for now there are type-inference issues doing that. - ExprKind::Box(ref e) => { - hir::ExprBox(P(self.lower_expr(e))) - } - - // Desugar ExprBox: `in (PLACE) EXPR` - ExprKind::InPlace(ref placer, ref value_expr) => { - // to: - // - // let p = PLACE; - // let mut place = Placer::make_place(p); - // let raw_place = Place::pointer(&mut place); - // push_unsafe!({ - // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); - // InPlace::finalize(place) - // }) - let placer_expr = P(self.lower_expr(placer)); - let value_expr = P(self.lower_expr(value_expr)); - - let placer_ident = self.str_to_ident("placer"); - let place_ident = self.str_to_ident("place"); - let p_ptr_ident = self.str_to_ident("p_ptr"); - - let make_place = ["ops", "Placer", "make_place"]; - let place_pointer = ["ops", "Place", "pointer"]; - let move_val_init = ["intrinsics", "move_val_init"]; - let inplace_finalize = ["ops", "InPlace", "finalize"]; - - let unstable_span = self.allow_internal_unstable("<-", e.span); - let make_call = |this: &mut LoweringContext, p, args| { - let path = P(this.expr_std_path(unstable_span, p, ThinVec::new())); - P(this.expr_call(e.span, path, args)) - }; + // push_unsafe!({ + // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); + // InPlace::finalize(place) + // }) + let placer_expr = P(self.lower_expr(placer)); + let value_expr = P(self.lower_expr(value_expr)); + + let placer_ident = self.str_to_ident("placer"); + let place_ident = self.str_to_ident("place"); + let p_ptr_ident = self.str_to_ident("p_ptr"); + + let make_place = ["ops", "Placer", "make_place"]; + let place_pointer = ["ops", "Place", "pointer"]; + let move_val_init = ["intrinsics", "move_val_init"]; + let inplace_finalize = ["ops", "InPlace", "finalize"]; + + let unstable_span = self.allow_internal_unstable("<-", e.span); + let make_call = |this: &mut LoweringContext, p, args| { + let path = P(this.expr_std_path(unstable_span, p, ThinVec::new())); + P(this.expr_call(e.span, path, args)) + }; - let mk_stmt_let = |this: &mut LoweringContext, bind, expr| { - this.stmt_let(e.span, false, bind, expr) - }; + let mk_stmt_let = |this: &mut LoweringContext, bind, expr| { + this.stmt_let(e.span, false, bind, expr) + }; - let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| { - this.stmt_let(e.span, true, bind, expr) - }; + let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| { + this.stmt_let(e.span, true, bind, expr) + }; - // let placer = ; - let (s1, placer_binding) = { - mk_stmt_let(self, placer_ident, placer_expr) - }; + // let placer = ; + let (s1, placer_binding) = { + mk_stmt_let(self, placer_ident, placer_expr) + }; - // let mut place = Placer::make_place(placer); - let (s2, place_binding) = { - let placer = self.expr_ident(e.span, placer_ident, placer_binding); - let call = make_call(self, &make_place, hir_vec![placer]); - mk_stmt_let_mut(self, place_ident, call) - }; + // let mut place = Placer::make_place(placer); + let (s2, place_binding) = { + let placer = self.expr_ident(e.span, placer_ident, placer_binding); + let call = make_call(self, &make_place, hir_vec![placer]); + mk_stmt_let_mut(self, place_ident, call) + }; - // let p_ptr = Place::pointer(&mut place); - let (s3, p_ptr_binding) = { - let agent = P(self.expr_ident(e.span, place_ident, place_binding)); - let args = hir_vec![self.expr_mut_addr_of(e.span, agent)]; - let call = make_call(self, &place_pointer, args); - mk_stmt_let(self, p_ptr_ident, call) - }; + // let p_ptr = Place::pointer(&mut place); + let (s3, p_ptr_binding) = { + let agent = P(self.expr_ident(e.span, place_ident, place_binding)); + let args = hir_vec![self.expr_mut_addr_of(e.span, agent)]; + let call = make_call(self, &place_pointer, args); + mk_stmt_let(self, p_ptr_ident, call) + }; - // pop_unsafe!(EXPR)); - let pop_unsafe_expr = { - self.signal_block_expr(hir_vec![], - value_expr, - e.span, - hir::PopUnsafeBlock(hir::CompilerGenerated), - ThinVec::new()) - }; + // pop_unsafe!(EXPR)); + let pop_unsafe_expr = { + self.signal_block_expr(hir_vec![], + value_expr, + e.span, + hir::PopUnsafeBlock(hir::CompilerGenerated), + ThinVec::new()) + }; - // push_unsafe!({ - // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); - // InPlace::finalize(place) - // }) - let expr = { - let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding); - let call_move_val_init = - hir::StmtSemi( - make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]), - self.next_id()); - let call_move_val_init = respan(e.span, call_move_val_init); - - let place = self.expr_ident(e.span, place_ident, place_binding); - let call = make_call(self, &inplace_finalize, hir_vec![place]); - P(self.signal_block_expr(hir_vec![call_move_val_init], - call, - e.span, - hir::PushUnsafeBlock(hir::CompilerGenerated), - ThinVec::new())) - }; + // push_unsafe!({ + // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); + // InPlace::finalize(place) + // }) + let expr = { + let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding); + let call_move_val_init = + hir::StmtSemi( + make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]), + self.next_id()); + let call_move_val_init = respan(e.span, call_move_val_init); + + let place = self.expr_ident(e.span, place_ident, place_binding); + let call = make_call(self, &inplace_finalize, hir_vec![place]); + P(self.signal_block_expr(hir_vec![call_move_val_init], + call, + e.span, + hir::PushUnsafeBlock(hir::CompilerGenerated), + ThinVec::new())) + }; - let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr)); - // add the attributes to the outer returned expr node - return self.expr_block(P(block), e.attrs.clone()); - } + let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr)); + hir::ExprBlock(P(block)) + } - ExprKind::Array(ref exprs) => { - hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect()) - } - ExprKind::Repeat(ref expr, ref count) => { - let expr = P(self.lower_expr(expr)); - let count = self.lower_expr(count); - hir::ExprRepeat(expr, self.record_body(count, None)) - } - ExprKind::Tup(ref elts) => { - hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect()) - } - ExprKind::Call(ref f, ref args) => { - let f = P(self.lower_expr(f)); - hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect()) - } - ExprKind::MethodCall(i, ref tps, ref args) => { - let tps = tps.iter().map(|x| self.lower_ty(x)).collect(); - let args = args.iter().map(|x| self.lower_expr(x)).collect(); - hir::ExprMethodCall(respan(i.span, i.node.name), tps, args) - } - ExprKind::Binary(binop, ref lhs, ref rhs) => { - let binop = self.lower_binop(binop); - let lhs = P(self.lower_expr(lhs)); - let rhs = P(self.lower_expr(rhs)); - hir::ExprBinary(binop, lhs, rhs) - } - ExprKind::Unary(op, ref ohs) => { - let op = self.lower_unop(op); - let ohs = P(self.lower_expr(ohs)); - hir::ExprUnary(op, ohs) - } - ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())), - ExprKind::Cast(ref expr, ref ty) => { - let expr = P(self.lower_expr(expr)); - hir::ExprCast(expr, self.lower_ty(ty)) - } - ExprKind::Type(ref expr, ref ty) => { - let expr = P(self.lower_expr(expr)); - hir::ExprType(expr, self.lower_ty(ty)) - } - ExprKind::AddrOf(m, ref ohs) => { - let m = self.lower_mutability(m); - let ohs = P(self.lower_expr(ohs)); - hir::ExprAddrOf(m, ohs) - } - // More complicated than you might expect because the else branch - // might be `if let`. - ExprKind::If(ref cond, ref blk, ref else_opt) => { - let else_opt = else_opt.as_ref().map(|els| { - match els.node { - ExprKind::IfLet(..) => { - // wrap the if-let expr in a block - let span = els.span; - let els = P(self.lower_expr(els)); - let id = self.next_id(); - let blk = P(hir::Block { - stmts: hir_vec![], - expr: Some(els), - id: id, - rules: hir::DefaultBlock, - span: span, - break_to_expr_id: None, - }); - P(self.expr_block(blk, ThinVec::new())) - } - _ => P(self.lower_expr(els)), + ExprKind::Array(ref exprs) => { + hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::Repeat(ref expr, ref count) => { + let expr = P(self.lower_expr(expr)); + let count = self.lower_expr(count); + hir::ExprRepeat(expr, self.record_body(count, None)) + } + ExprKind::Tup(ref elts) => { + hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::Call(ref f, ref args) => { + let f = P(self.lower_expr(f)); + hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::MethodCall(i, ref tps, ref args) => { + let tps = tps.iter().map(|x| self.lower_ty(x)).collect(); + let args = args.iter().map(|x| self.lower_expr(x)).collect(); + hir::ExprMethodCall(respan(i.span, i.node.name), tps, args) + } + ExprKind::Binary(binop, ref lhs, ref rhs) => { + let binop = self.lower_binop(binop); + let lhs = P(self.lower_expr(lhs)); + let rhs = P(self.lower_expr(rhs)); + hir::ExprBinary(binop, lhs, rhs) + } + ExprKind::Unary(op, ref ohs) => { + let op = self.lower_unop(op); + let ohs = P(self.lower_expr(ohs)); + hir::ExprUnary(op, ohs) + } + ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())), + ExprKind::Cast(ref expr, ref ty) => { + let expr = P(self.lower_expr(expr)); + hir::ExprCast(expr, self.lower_ty(ty)) + } + ExprKind::Type(ref expr, ref ty) => { + let expr = P(self.lower_expr(expr)); + hir::ExprType(expr, self.lower_ty(ty)) + } + ExprKind::AddrOf(m, ref ohs) => { + let m = self.lower_mutability(m); + let ohs = P(self.lower_expr(ohs)); + hir::ExprAddrOf(m, ohs) + } + // More complicated than you might expect because the else branch + // might be `if let`. + ExprKind::If(ref cond, ref blk, ref else_opt) => { + let else_opt = else_opt.as_ref().map(|els| { + match els.node { + ExprKind::IfLet(..) => { + // wrap the if-let expr in a block + let span = els.span; + let els = P(self.lower_expr(els)); + let id = self.next_id(); + let blk = P(hir::Block { + stmts: hir_vec![], + expr: Some(els), + id: id, + rules: hir::DefaultBlock, + span: span, + break_to_expr_id: None, + }); + P(self.expr_block(blk, ThinVec::new())) } - }); + _ => P(self.lower_expr(els)), + } + }); - hir::ExprIf(P(self.lower_expr(cond)), self.lower_block(blk, None), else_opt) - } - ExprKind::While(ref cond, ref body, opt_ident) => { - self.with_loop_scope(e.id, |this| - hir::ExprWhile( - this.with_loop_condition_scope(|this| P(this.lower_expr(cond))), - this.lower_block(body, None), - this.lower_opt_sp_ident(opt_ident))) - } - ExprKind::Loop(ref body, opt_ident) => { - self.with_loop_scope(e.id, |this| - hir::ExprLoop(this.lower_block(body, None), - this.lower_opt_sp_ident(opt_ident), - hir::LoopSource::Loop)) - } - ExprKind::Catch(ref body) => { - self.with_catch_scope(e.id, |this| - hir::ExprBlock(this.lower_block(body, Some(e.id)))) - } - ExprKind::Match(ref expr, ref arms) => { - hir::ExprMatch(P(self.lower_expr(expr)), - arms.iter().map(|x| self.lower_arm(x)).collect(), - hir::MatchSource::Normal) - } - ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => { - self.with_new_scopes(|this| { - this.with_parent_def(e.id, |this| { - let expr = this.lower_expr(body); - hir::ExprClosure(this.lower_capture_clause(capture_clause), - this.lower_fn_decl(decl), - this.record_body(expr, Some(decl)), - fn_decl_span) - }) + hir::ExprIf(P(self.lower_expr(cond)), self.lower_block(blk, None), else_opt) + } + ExprKind::While(ref cond, ref body, opt_ident) => { + self.with_loop_scope(e.id, |this| + hir::ExprWhile( + this.with_loop_condition_scope(|this| P(this.lower_expr(cond))), + this.lower_block(body, None), + this.lower_opt_sp_ident(opt_ident))) + } + ExprKind::Loop(ref body, opt_ident) => { + self.with_loop_scope(e.id, |this| + hir::ExprLoop(this.lower_block(body, None), + this.lower_opt_sp_ident(opt_ident), + hir::LoopSource::Loop)) + } + ExprKind::Catch(ref body) => { + self.with_catch_scope(e.id, |this| + hir::ExprBlock(this.lower_block(body, Some(e.id)))) + } + ExprKind::Match(ref expr, ref arms) => { + hir::ExprMatch(P(self.lower_expr(expr)), + arms.iter().map(|x| self.lower_arm(x)).collect(), + hir::MatchSource::Normal) + } + ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => { + self.with_new_scopes(|this| { + this.with_parent_def(e.id, |this| { + let expr = this.lower_expr(body); + hir::ExprClosure(this.lower_capture_clause(capture_clause), + this.lower_fn_decl(decl), + this.record_body(expr, Some(decl)), + fn_decl_span) }) - } - ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk, None)), - ExprKind::Assign(ref el, ref er) => { - hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er))) - } - ExprKind::AssignOp(op, ref el, ref er) => { - hir::ExprAssignOp(self.lower_binop(op), - P(self.lower_expr(el)), - P(self.lower_expr(er))) - } - ExprKind::Field(ref el, ident) => { - hir::ExprField(P(self.lower_expr(el)), respan(ident.span, ident.node.name)) - } - ExprKind::TupField(ref el, ident) => { - hir::ExprTupField(P(self.lower_expr(el)), ident) - } - ExprKind::Index(ref el, ref er) => { - hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er))) - } - ExprKind::Range(ref e1, ref e2, lims) => { - fn make_struct(this: &mut LoweringContext, - ast_expr: &Expr, - path: &[&str], - fields: &[(&str, &P)]) -> hir::Expr { - let struct_path = &iter::once(&"ops").chain(path).map(|s| *s) - .collect::>(); - let unstable_span = this.allow_internal_unstable("...", ast_expr.span); - - if fields.len() == 0 { - this.expr_std_path(unstable_span, struct_path, - ast_expr.attrs.clone()) - } else { - let fields = fields.into_iter().map(|&(s, e)| { - let expr = P(this.lower_expr(&e)); - let unstable_span = this.allow_internal_unstable("...", e.span); - this.field(Symbol::intern(s), expr, unstable_span) - }).collect(); - let attrs = ast_expr.attrs.clone(); - - this.expr_std_struct(unstable_span, struct_path, fields, None, attrs) - } + }) + } + ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk, None)), + ExprKind::Assign(ref el, ref er) => { + hir::ExprAssign(P(self.lower_expr(el)), P(self.lower_expr(er))) + } + ExprKind::AssignOp(op, ref el, ref er) => { + hir::ExprAssignOp(self.lower_binop(op), + P(self.lower_expr(el)), + P(self.lower_expr(er))) + } + ExprKind::Field(ref el, ident) => { + hir::ExprField(P(self.lower_expr(el)), respan(ident.span, ident.node.name)) + } + ExprKind::TupField(ref el, ident) => { + hir::ExprTupField(P(self.lower_expr(el)), ident) + } + ExprKind::Index(ref el, ref er) => { + hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er))) + } + ExprKind::Range(ref e1, ref e2, lims) => { + fn make_struct(this: &mut LoweringContext, + ast_expr: &Expr, + path: &[&str], + fields: &[(&str, &P)]) -> hir::Expr { + let struct_path = &iter::once(&"ops").chain(path).map(|s| *s) + .collect::>(); + let unstable_span = this.allow_internal_unstable("...", ast_expr.span); + + if fields.len() == 0 { + this.expr_std_path(unstable_span, struct_path, + ast_expr.attrs.clone()) + } else { + let fields = fields.into_iter().map(|&(s, e)| { + let expr = P(this.lower_expr(&e)); + let unstable_span = this.allow_internal_unstable("...", e.span); + this.field(Symbol::intern(s), expr, unstable_span) + }).collect(); + let attrs = ast_expr.attrs.clone(); + + this.expr_std_struct(unstable_span, struct_path, fields, None, attrs) } + } - use syntax::ast::RangeLimits::*; + use syntax::ast::RangeLimits::*; - return match (e1, e2, lims) { - (&None, &None, HalfOpen) => - make_struct(self, e, &["RangeFull"], &[]), + return match (e1, e2, lims) { + (&None, &None, HalfOpen) => + make_struct(self, e, &["RangeFull"], &[]), - (&Some(ref e1), &None, HalfOpen) => - make_struct(self, e, &["RangeFrom"], - &[("start", e1)]), + (&Some(ref e1), &None, HalfOpen) => + make_struct(self, e, &["RangeFrom"], + &[("start", e1)]), - (&None, &Some(ref e2), HalfOpen) => - make_struct(self, e, &["RangeTo"], - &[("end", e2)]), + (&None, &Some(ref e2), HalfOpen) => + make_struct(self, e, &["RangeTo"], + &[("end", e2)]), - (&Some(ref e1), &Some(ref e2), HalfOpen) => - make_struct(self, e, &["Range"], - &[("start", e1), ("end", e2)]), + (&Some(ref e1), &Some(ref e2), HalfOpen) => + make_struct(self, e, &["Range"], + &[("start", e1), ("end", e2)]), - (&None, &Some(ref e2), Closed) => - make_struct(self, e, &["RangeToInclusive"], - &[("end", e2)]), + (&None, &Some(ref e2), Closed) => + make_struct(self, e, &["RangeToInclusive"], + &[("end", e2)]), - (&Some(ref e1), &Some(ref e2), Closed) => - make_struct(self, e, &["RangeInclusive", "NonEmpty"], - &[("start", e1), ("end", e2)]), + (&Some(ref e1), &Some(ref e2), Closed) => + make_struct(self, e, &["RangeInclusive", "NonEmpty"], + &[("start", e1), ("end", e2)]), - _ => panic!(self.diagnostic() - .span_fatal(e.span, "inclusive range with no end")), - }; - } - ExprKind::Path(ref qself, ref path) => { - hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional)) - } - ExprKind::Break(opt_ident, ref opt_expr) => { - let label_result = if self.is_in_loop_condition && opt_ident.is_none() { + _ => panic!(self.diagnostic() + .span_fatal(e.span, "inclusive range with no end")), + }; + } + ExprKind::Path(ref qself, ref path) => { + hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional)) + } + ExprKind::Break(opt_ident, ref opt_expr) => { + let label_result = if self.is_in_loop_condition && opt_ident.is_none() { + hir::Destination { + ident: opt_ident, + target_id: hir::ScopeTarget::Loop( + Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()), + } + } else { + self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident))) + }; + hir::ExprBreak( + label_result, + opt_expr.as_ref().map(|x| P(self.lower_expr(x)))) + } + ExprKind::Continue(opt_ident) => + hir::ExprAgain( + if self.is_in_loop_condition && opt_ident.is_none() { hir::Destination { ident: opt_ident, - target_id: hir::ScopeTarget::Loop( - Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()), + target_id: hir::ScopeTarget::Loop(Err( + hir::LoopIdError::UnlabeledCfInWhileCondition).into()), } } else { - self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident))) - }; - hir::ExprBreak( - label_result, - opt_expr.as_ref().map(|x| P(self.lower_expr(x)))) - } - ExprKind::Continue(opt_ident) => - hir::ExprAgain( - if self.is_in_loop_condition && opt_ident.is_none() { - hir::Destination { - ident: opt_ident, - target_id: hir::ScopeTarget::Loop(Err( - hir::LoopIdError::UnlabeledCfInWhileCondition).into()), - } + self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident))) + }), + ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))), + ExprKind::InlineAsm(ref asm) => { + let hir_asm = hir::InlineAsm { + inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(), + outputs: asm.outputs.iter().map(|out| { + hir::InlineAsmOutput { + constraint: out.constraint.clone(), + is_rw: out.is_rw, + is_indirect: out.is_indirect, + } + }).collect(), + asm: asm.asm.clone(), + asm_str_style: asm.asm_str_style, + clobbers: asm.clobbers.clone().into(), + volatile: asm.volatile, + alignstack: asm.alignstack, + dialect: asm.dialect, + expn_id: asm.expn_id, + }; + let outputs = + asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(); + let inputs = + asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect(); + hir::ExprInlineAsm(P(hir_asm), outputs, inputs) + } + ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { + hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional), + fields.iter().map(|x| self.lower_field(x)).collect(), + maybe_expr.as_ref().map(|x| P(self.lower_expr(x)))) + } + ExprKind::Paren(ref ex) => { + let mut ex = self.lower_expr(ex); + // include parens in span, but only if it is a super-span. + if e.span.contains(ex.span) { + ex.span = e.span; + } + // merge attributes into the inner expression. + let mut attrs = e.attrs.clone(); + attrs.extend::>(ex.attrs.into()); + ex.attrs = attrs; + return ex; + } + + // Desugar ExprIfLet + // From: `if let = []` + ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => { + // to: + // + // match { + // => , + // [_ if => ,] + // _ => [ | ()] + // } + + let mut arms = vec![]; + + // ` => ` + { + let body = self.lower_block(body, None); + let body_expr = P(self.expr_block(body, ThinVec::new())); + let pat = self.lower_pat(pat); + arms.push(self.arm(hir_vec![pat], body_expr)); + } + + // `[_ if => ,]` + // `_ => [ | ()]` + { + let mut current: Option<&Expr> = else_opt.as_ref().map(|p| &**p); + let mut else_exprs: Vec> = vec![current]; + + // First, we traverse the AST and recursively collect all + // `else` branches into else_exprs, e.g.: + // + // if let Some(_) = x { + // ... + // } else if ... { // Expr1 + // ... + // } else if ... { // Expr2 + // ... + // } else { // Expr3 + // ... + // } + // + // ... results in else_exprs = [Some(&Expr1), + // Some(&Expr2), + // Some(&Expr3)] + // + // Because there also the case there is no `else`, these + // entries can also be `None`, as in: + // + // if let Some(_) = x { + // ... + // } else if ... { // Expr1 + // ... + // } else if ... { // Expr2 + // ... + // } + // + // ... results in else_exprs = [Some(&Expr1), + // Some(&Expr2), + // None] + // + // The last entry in this list is always translated into + // the final "unguard" wildcard arm of the `match`. In the + // case of a `None`, it becomes `_ => ()`. + loop { + if let Some(e) = current { + // There is an else branch at this level + if let ExprKind::If(_, _, ref else_opt) = e.node { + // The else branch is again an if-expr + current = else_opt.as_ref().map(|p| &**p); + else_exprs.push(current); + } else { + // The last item in the list is not an if-expr, + // stop here + break + } } else { - self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident))) - }), - ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))), - ExprKind::InlineAsm(ref asm) => { - let hir_asm = hir::InlineAsm { - inputs: asm.inputs.iter().map(|&(ref c, _)| c.clone()).collect(), - outputs: asm.outputs.iter().map(|out| { - hir::InlineAsmOutput { - constraint: out.constraint.clone(), - is_rw: out.is_rw, - is_indirect: out.is_indirect, - } - }).collect(), - asm: asm.asm.clone(), - asm_str_style: asm.asm_str_style, - clobbers: asm.clobbers.clone().into(), - volatile: asm.volatile, - alignstack: asm.alignstack, - dialect: asm.dialect, - expn_id: asm.expn_id, - }; - let outputs = - asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(); - let inputs = - asm.inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect(); - hir::ExprInlineAsm(P(hir_asm), outputs, inputs) - } - ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { - hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional), - fields.iter().map(|x| self.lower_field(x)).collect(), - maybe_expr.as_ref().map(|x| P(self.lower_expr(x)))) - } - ExprKind::Paren(ref ex) => { - let mut ex = self.lower_expr(ex); - // include parens in span, but only if it is a super-span. - if e.span.contains(ex.span) { - ex.span = e.span; + // We have no more else branch + break + } } - // merge attributes into the inner expression. - let mut attrs = e.attrs.clone(); - attrs.extend::>(ex.attrs.into()); - ex.attrs = attrs; - return ex; - } - // Desugar ExprIfLet - // From: `if let = []` - ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => { - // to: - // - // match { - // => , - // [_ if => ,] - // _ => [ | ()] - // } - - // ` => ` - let pat_arm = { - let body = self.lower_block(body, None); - let body_expr = P(self.expr_block(body, ThinVec::new())); - let pat = self.lower_pat(pat); - self.arm(hir_vec![pat], body_expr) - }; + // Now translate the list of nested else-branches into the + // arms of the match statement. + for else_expr in else_exprs { + if let Some(else_expr) = else_expr { + let (guard, body) = if let ExprKind::If(ref cond, + ref then, + _) = else_expr.node { + let then = self.lower_block(then, None); + (Some(cond), + self.expr_block(then, ThinVec::new())) + } else { + (None, + self.lower_expr(else_expr)) + }; - // `[_ if => ,]` - let mut else_opt = else_opt.as_ref().map(|e| P(self.lower_expr(e))); - let else_if_arms = { - let mut arms = vec![]; - loop { - let else_opt_continue = else_opt.and_then(|els| { - els.and_then(|els| { - match els.node { - // else if - hir::ExprIf(cond, then, else_opt) => { - let pat_under = self.pat_wild(e.span); - arms.push(hir::Arm { - attrs: hir_vec![], - pats: hir_vec![pat_under], - guard: Some(cond), - body: P(self.expr_block(then, ThinVec::new())), - }); - else_opt.map(|else_opt| (else_opt, true)) - } - _ => Some((P(els), false)), - } - }) + arms.push(hir::Arm { + attrs: hir_vec![], + pats: hir_vec![self.pat_wild(e.span)], + guard: guard.map(|e| P(self.lower_expr(e))), + body: P(body), }); - match else_opt_continue { - Some((e, true)) => { - else_opt = Some(e); - } - Some((e, false)) => { - else_opt = Some(e); - break; - } - None => { - else_opt = None; - break; - } - } + } else { + // There was no else-branch, push a noop + let pat_under = self.pat_wild(e.span); + let unit = self.expr_tuple(e.span, hir_vec![]); + arms.push(self.arm(hir_vec![pat_under], unit)); } - arms - }; + } + } - let contains_else_clause = else_opt.is_some(); + let contains_else_clause = else_opt.is_some(); - // `_ => [ | ()]` - let else_arm = { - let pat_under = self.pat_wild(e.span); - let else_expr = - else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![])); - self.arm(hir_vec![pat_under], else_expr) - }; + let sub_expr = P(self.lower_expr(sub_expr)); - let mut arms = Vec::with_capacity(else_if_arms.len() + 2); - arms.push(pat_arm); - arms.extend(else_if_arms); - arms.push(else_arm); - - let sub_expr = P(self.lower_expr(sub_expr)); - // add attributes to the outer returned expr node - return self.expr(e.span, - hir::ExprMatch(sub_expr, - arms.into(), - hir::MatchSource::IfLetDesugar { - contains_else_clause: contains_else_clause, - }), - e.attrs.clone()); - } + hir::ExprMatch( + sub_expr, + arms.into(), + hir::MatchSource::IfLetDesugar { + contains_else_clause: contains_else_clause, + }) + } - // Desugar ExprWhileLet - // From: `[opt_ident]: while let = ` - ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { - // to: - // - // [opt_ident]: loop { - // match { - // => , - // _ => break - // } - // } - - // Note that the block AND the condition are evaluated in the loop scope. - // This is done to allow `break` from inside the condition of the loop. - let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| ( - this.lower_block(body, None), - this.expr_break(e.span, ThinVec::new()), - this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))), - )); - - // ` => ` - let pat_arm = { - let body_expr = P(self.expr_block(body, ThinVec::new())); - let pat = self.lower_pat(pat); - self.arm(hir_vec![pat], body_expr) - }; + // Desugar ExprWhileLet + // From: `[opt_ident]: while let = ` + ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { + // to: + // + // [opt_ident]: loop { + // match { + // => , + // _ => break + // } + // } + + // Note that the block AND the condition are evaluated in the loop scope. + // This is done to allow `break` from inside the condition of the loop. + let (body, break_expr, sub_expr) = self.with_loop_scope(e.id, |this| ( + this.lower_block(body, None), + this.expr_break(e.span, ThinVec::new()), + this.with_loop_condition_scope(|this| P(this.lower_expr(sub_expr))), + )); + + // ` => ` + let pat_arm = { + let body_expr = P(self.expr_block(body, ThinVec::new())); + let pat = self.lower_pat(pat); + self.arm(hir_vec![pat], body_expr) + }; - // `_ => break` - let break_arm = { - let pat_under = self.pat_wild(e.span); - self.arm(hir_vec![pat_under], break_expr) - }; + // `_ => break` + let break_arm = { + let pat_under = self.pat_wild(e.span); + self.arm(hir_vec![pat_under], break_expr) + }; - // `match { ... }` - let arms = hir_vec![pat_arm, break_arm]; - let match_expr = self.expr(e.span, - hir::ExprMatch(sub_expr, - arms, - hir::MatchSource::WhileLetDesugar), - ThinVec::new()); - - // `[opt_ident]: loop { ... }` - let loop_block = P(self.block_expr(P(match_expr))); - let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), - hir::LoopSource::WhileLet); - // add attributes to the outer returned expr node - let attrs = e.attrs.clone(); - return hir::Expr { id: e.id, node: loop_expr, span: e.span, attrs: attrs }; - } + // `match { ... }` + let arms = hir_vec![pat_arm, break_arm]; + let match_expr = self.expr(e.span, + hir::ExprMatch(sub_expr, + arms, + hir::MatchSource::WhileLetDesugar), + ThinVec::new()); + + // `[opt_ident]: loop { ... }` + let loop_block = P(self.block_expr(P(match_expr))); + let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), + hir::LoopSource::WhileLet); + // add attributes to the outer returned expr node + loop_expr + } - // Desugar ExprForLoop - // From: `[opt_ident]: for in ` - ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { - // to: - // - // { - // let result = match ::std::iter::IntoIterator::into_iter() { - // mut iter => { - // [opt_ident]: loop { - // match ::std::iter::Iterator::next(&mut iter) { - // ::std::option::Option::Some() => , - // ::std::option::Option::None => break - // } - // } - // } - // }; - // result - // } - - // expand - let head = self.lower_expr(head); - - let iter = self.str_to_ident("iter"); - - // `::std::option::Option::Some() => ` - let pat_arm = { - let body_block = self.with_loop_scope(e.id, - |this| this.lower_block(body, None)); - let body_expr = P(self.expr_block(body_block, ThinVec::new())); - let pat = self.lower_pat(pat); - let some_pat = self.pat_some(e.span, pat); - - self.arm(hir_vec![some_pat], body_expr) - }; + // Desugar ExprForLoop + // From: `[opt_ident]: for in ` + ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { + // to: + // + // { + // let result = match ::std::iter::IntoIterator::into_iter() { + // mut iter => { + // [opt_ident]: loop { + // match ::std::iter::Iterator::next(&mut iter) { + // ::std::option::Option::Some() => , + // ::std::option::Option::None => break + // } + // } + // } + // }; + // result + // } + + // expand + let head = self.lower_expr(head); + + let iter = self.str_to_ident("iter"); + + // `::std::option::Option::Some() => ` + let pat_arm = { + let body_block = self.with_loop_scope(e.id, + |this| this.lower_block(body, None)); + let body_expr = P(self.expr_block(body_block, ThinVec::new())); + let pat = self.lower_pat(pat); + let some_pat = self.pat_some(e.span, pat); + + self.arm(hir_vec![some_pat], body_expr) + }; - // `::std::option::Option::None => break` - let break_arm = { - let break_expr = self.with_loop_scope(e.id, |this| - this.expr_break(e.span, ThinVec::new())); - let pat = self.pat_none(e.span); - self.arm(hir_vec![pat], break_expr) - }; + // `::std::option::Option::None => break` + let break_arm = { + let break_expr = self.with_loop_scope(e.id, |this| + this.expr_break(e.span, ThinVec::new())); + let pat = self.pat_none(e.span); + self.arm(hir_vec![pat], break_expr) + }; - // `mut iter` - let iter_pat = self.pat_ident_binding_mode(e.span, iter, - hir::BindByValue(hir::MutMutable)); + // `mut iter` + let iter_pat = self.pat_ident_binding_mode(e.span, iter, + hir::BindByValue(hir::MutMutable)); + + // `match ::std::iter::Iterator::next(&mut iter) { ... }` + let match_expr = { + let iter = P(self.expr_ident(e.span, iter, iter_pat.id)); + let ref_mut_iter = self.expr_mut_addr_of(e.span, iter); + let next_path = &["iter", "Iterator", "next"]; + let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new())); + let next_expr = P(self.expr_call(e.span, next_path, + hir_vec![ref_mut_iter])); + let arms = hir_vec![pat_arm, break_arm]; - // `match ::std::iter::Iterator::next(&mut iter) { ... }` - let match_expr = { - let iter = P(self.expr_ident(e.span, iter, iter_pat.id)); - let ref_mut_iter = self.expr_mut_addr_of(e.span, iter); - let next_path = &["iter", "Iterator", "next"]; - let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new())); - let next_expr = P(self.expr_call(e.span, next_path, - hir_vec![ref_mut_iter])); - let arms = hir_vec![pat_arm, break_arm]; + P(self.expr(e.span, + hir::ExprMatch(next_expr, arms, + hir::MatchSource::ForLoopDesugar), + ThinVec::new())) + }; - P(self.expr(e.span, - hir::ExprMatch(next_expr, arms, - hir::MatchSource::ForLoopDesugar), - ThinVec::new())) - }; + // `[opt_ident]: loop { ... }` + let loop_block = P(self.block_expr(match_expr)); + let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), + hir::LoopSource::ForLoop); + let loop_expr = P(hir::Expr { + id: self.lower_node_id(e.id), + node: loop_expr, + span: e.span, + attrs: ThinVec::new(), + }); - // `[opt_ident]: loop { ... }` - let loop_block = P(self.block_expr(match_expr)); - let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), - hir::LoopSource::ForLoop); - let loop_expr = P(hir::Expr { - id: e.id, - node: loop_expr, - span: e.span, - attrs: ThinVec::new(), - }); - - // `mut iter => { ... }` - let iter_arm = self.arm(hir_vec![iter_pat], loop_expr); - - // `match ::std::iter::IntoIterator::into_iter() { ... }` - let into_iter_expr = { - let into_iter_path = &["iter", "IntoIterator", "into_iter"]; - let into_iter = P(self.expr_std_path(e.span, into_iter_path, - ThinVec::new())); - P(self.expr_call(e.span, into_iter, hir_vec![head])) - }; + // `mut iter => { ... }` + let iter_arm = self.arm(hir_vec![iter_pat], loop_expr); - let match_expr = P(self.expr_match(e.span, - into_iter_expr, - hir_vec![iter_arm], - hir::MatchSource::ForLoopDesugar)); - - // `{ let _result = ...; _result }` - // underscore prevents an unused_variables lint if the head diverges - let result_ident = self.str_to_ident("_result"); - let (let_stmt, let_stmt_binding) = - self.stmt_let(e.span, false, result_ident, match_expr); - - let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding)); - let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result))); - // add the attributes to the outer returned expr node - return self.expr_block(block, e.attrs.clone()); - } + // `match ::std::iter::IntoIterator::into_iter() { ... }` + let into_iter_expr = { + let into_iter_path = &["iter", "IntoIterator", "into_iter"]; + let into_iter = P(self.expr_std_path(e.span, into_iter_path, + ThinVec::new())); + P(self.expr_call(e.span, into_iter, hir_vec![head])) + }; - // Desugar ExprKind::Try - // From: `?` - ExprKind::Try(ref sub_expr) => { - // to: - // - // match Carrier::translate() { - // Ok(val) => #[allow(unreachable_code)] val, - // Err(err) => #[allow(unreachable_code)] - // // If there is an enclosing `catch {...}` - // break 'catch_target Carrier::from_error(From::from(err)), - // // Otherwise - // return Carrier::from_error(From::from(err)), - // } + let match_expr = P(self.expr_match(e.span, + into_iter_expr, + hir_vec![iter_arm], + hir::MatchSource::ForLoopDesugar)); + + // `{ let _result = ...; _result }` + // underscore prevents an unused_variables lint if the head diverges + let result_ident = self.str_to_ident("_result"); + let (let_stmt, let_stmt_binding) = + self.stmt_let(e.span, false, result_ident, match_expr); + + let result = P(self.expr_ident(e.span, result_ident, let_stmt_binding)); + let block = P(self.block_all(e.span, hir_vec![let_stmt], Some(result))); + // add the attributes to the outer returned expr node + return self.expr_block(block, e.attrs.clone()); + } - let unstable_span = self.allow_internal_unstable("?", e.span); + // Desugar ExprKind::Try + // From: `?` + ExprKind::Try(ref sub_expr) => { + // to: + // + // match Carrier::translate() { + // Ok(val) => #[allow(unreachable_code)] val, + // Err(err) => #[allow(unreachable_code)] + // // If there is an enclosing `catch {...}` + // break 'catch_target Carrier::from_error(From::from(err)), + // // Otherwise + // return Carrier::from_error(From::from(err)), + // } - // Carrier::translate() - let discr = { - // expand - let sub_expr = self.lower_expr(sub_expr); + let unstable_span = self.allow_internal_unstable("?", e.span); - let path = &["ops", "Carrier", "translate"]; - let path = P(self.expr_std_path(unstable_span, path, ThinVec::new())); - P(self.expr_call(e.span, path, hir_vec![sub_expr])) + // Carrier::translate() + let discr = { + // expand + let sub_expr = self.lower_expr(sub_expr); + + let path = &["ops", "Carrier", "translate"]; + let path = P(self.expr_std_path(unstable_span, path, ThinVec::new())); + P(self.expr_call(e.span, path, hir_vec![sub_expr])) + }; + + // #[allow(unreachable_code)] + let attr = { + // allow(unreachable_code) + let allow = { + let allow_ident = self.str_to_ident("allow"); + let uc_ident = self.str_to_ident("unreachable_code"); + let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident); + let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item); + let uc_spanned = respan(e.span, uc_nested); + attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned]) }; + attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow) + }; + let attrs = vec![attr]; + + // Ok(val) => #[allow(unreachable_code)] val, + let ok_arm = { + let val_ident = self.str_to_ident("val"); + let val_pat = self.pat_ident(e.span, val_ident); + let val_expr = P(self.expr_ident_with_attrs(e.span, + val_ident, + val_pat.id, + ThinVec::from(attrs.clone()))); + let ok_pat = self.pat_ok(e.span, val_pat); + + self.arm(hir_vec![ok_pat], val_expr) + }; - // #[allow(unreachable_code)] - let attr = { - // allow(unreachable_code) - let allow = { - let allow_ident = self.str_to_ident("allow"); - let uc_ident = self.str_to_ident("unreachable_code"); - let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident); - let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item); - let uc_spanned = respan(e.span, uc_nested); - attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned]) - }; - attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow) + // Err(err) => #[allow(unreachable_code)] + // return Carrier::from_error(From::from(err)), + let err_arm = { + let err_ident = self.str_to_ident("err"); + let err_local = self.pat_ident(e.span, err_ident); + let from_expr = { + let path = &["convert", "From", "from"]; + let from = P(self.expr_std_path(e.span, path, ThinVec::new())); + let err_expr = self.expr_ident(e.span, err_ident, err_local.id); + + self.expr_call(e.span, from, hir_vec![err_expr]) }; - let attrs = vec![attr]; - - // Ok(val) => #[allow(unreachable_code)] val, - let ok_arm = { - let val_ident = self.str_to_ident("val"); - let val_pat = self.pat_ident(e.span, val_ident); - let val_expr = P(self.expr_ident_with_attrs(e.span, - val_ident, - val_pat.id, - ThinVec::from(attrs.clone()))); - let ok_pat = self.pat_ok(e.span, val_pat); - - self.arm(hir_vec![ok_pat], val_expr) + let from_err_expr = { + let path = &["ops", "Carrier", "from_error"]; + let from_err = P(self.expr_std_path(unstable_span, path, + ThinVec::new())); + P(self.expr_call(e.span, from_err, hir_vec![from_expr])) }; - // Err(err) => #[allow(unreachable_code)] - // return Carrier::from_error(From::from(err)), - let err_arm = { - let err_ident = self.str_to_ident("err"); - let err_local = self.pat_ident(e.span, err_ident); - let from_expr = { - let path = &["convert", "From", "from"]; - let from = P(self.expr_std_path(e.span, path, ThinVec::new())); - let err_expr = self.expr_ident(e.span, err_ident, err_local.id); - - self.expr_call(e.span, from, hir_vec![err_expr]) - }; - let from_err_expr = { - let path = &["ops", "Carrier", "from_error"]; - let from_err = P(self.expr_std_path(unstable_span, path, - ThinVec::new())); - P(self.expr_call(e.span, from_err, hir_vec![from_expr])) - }; + let thin_attrs = ThinVec::from(attrs); + let catch_scope = self.catch_scopes.last().map(|x| *x); + let ret_expr = if let Some(catch_node) = catch_scope { + P(self.expr( + e.span, + hir::ExprBreak( + hir::Destination { + ident: None, + target_id: hir::ScopeTarget::Block(catch_node), + }, + Some(from_err_expr) + ), + thin_attrs)) + } else { + P(self.expr(e.span, + hir::Expr_::ExprRet(Some(from_err_expr)), + thin_attrs)) + }; - let thin_attrs = ThinVec::from(attrs); - let catch_scope = self.catch_scopes.last().map(|x| *x); - let ret_expr = if let Some(catch_node) = catch_scope { - P(self.expr( - e.span, - hir::ExprBreak( - hir::Destination { - ident: None, - target_id: hir::ScopeTarget::Block(catch_node), - }, - Some(from_err_expr) - ), - thin_attrs)) - } else { - P(self.expr(e.span, - hir::Expr_::ExprRet(Some(from_err_expr)), - thin_attrs)) - }; - let err_pat = self.pat_err(e.span, err_local); - self.arm(hir_vec![err_pat], ret_expr) - }; + let err_pat = self.pat_err(e.span, err_local); + self.arm(hir_vec![err_pat], ret_expr) + }; - return self.expr_match(e.span, discr, hir_vec![err_arm, ok_arm], - hir::MatchSource::TryDesugar); - } + hir::ExprMatch(discr, + hir_vec![err_arm, ok_arm], + hir::MatchSource::TryDesugar) + } - ExprKind::Mac(_) => panic!("Shouldn't exist here"), - }, + ExprKind::Mac(_) => panic!("Shouldn't exist here"), + }; + + hir::Expr { + id: self.lower_node_id(e.id), + node: kind, span: e.span, attrs: e.attrs.clone(), } @@ -2203,7 +2416,7 @@ impl<'a> LoweringContext<'a> { node: hir::StmtDecl(P(Spanned { node: hir::DeclLocal(self.lower_local(l)), span: s.span, - }), s.id), + }), self.lower_node_id(s.id)), span: s.span, }, StmtKind::Item(ref it) => { @@ -2213,19 +2426,23 @@ impl<'a> LoweringContext<'a> { node: hir::StmtDecl(P(Spanned { node: hir::DeclItem(item_id), span: s.span, - }), id.take().unwrap_or_else(|| self.next_id())), + }), id.take() + .map(|id| self.lower_node_id(id)) + .unwrap_or_else(|| self.next_id())), span: s.span, }).collect(); } StmtKind::Expr(ref e) => { Spanned { - node: hir::StmtExpr(P(self.lower_expr(e)), s.id), + node: hir::StmtExpr(P(self.lower_expr(e)), + self.lower_node_id(s.id)), span: s.span, } } StmtKind::Semi(ref e) => { Spanned { - node: hir::StmtSemi(P(self.lower_expr(e)), s.id), + node: hir::StmtSemi(P(self.lower_expr(e)), + self.lower_node_id(s.id)), span: s.span, } } @@ -2240,14 +2457,26 @@ impl<'a> LoweringContext<'a> { } } - fn lower_visibility(&mut self, v: &Visibility) -> hir::Visibility { + /// If an `explicit_owner` is given, this method allocates the `HirId` in + /// the address space of that item instead of the item currently being + /// lowered. This can happen during `lower_impl_item_ref()` where we need to + /// lower a `Visibility` value although we haven't lowered the owning + /// `ImplItem` in question yet. + fn lower_visibility(&mut self, + v: &Visibility, + explicit_owner: Option) + -> hir::Visibility { match *v { Visibility::Public => hir::Public, Visibility::Crate(_) => hir::Visibility::Crate, Visibility::Restricted { ref path, id } => { hir::Visibility::Restricted { path: P(self.lower_path(id, path, ParamMode::Explicit, true)), - id: id + id: if let Some(owner) = explicit_owner { + self.lower_node_id_with_owner(id, owner) + } else { + self.lower_node_id(id) + } } } Visibility::Inherited => hir::Inherited, diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index bf52a036cc8..0f7e54953b0 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -14,8 +14,10 @@ //! There are also some rather random cases (like const initializer //! expressions) that are mostly just leftovers. +use hir; use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::stable_hasher::StableHasher; use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::fmt::Write; @@ -121,6 +123,7 @@ pub struct Definitions { table: DefPathTable, node_to_def_index: NodeMap, def_index_to_node: Vec, + pub(super) node_to_hir_id: IndexVec, } /// A unique identifier that we can use to lookup a definition @@ -206,6 +209,23 @@ impl DefPath { s } + /// Returns a string representation of the DefPath without + /// the crate-prefix. This method is useful if you don't have + /// a TyCtxt available. + pub fn to_string_no_crate(&self) -> String { + let mut s = String::with_capacity(self.data.len() * 16); + + for component in &self.data { + write!(s, + "::{}[{}]", + component.data.as_interned_str(), + component.disambiguator) + .unwrap(); + } + + s + } + pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 { debug!("deterministic_hash({:?})", self); let mut state = StableHasher::new(); @@ -275,6 +295,7 @@ impl Definitions { }, node_to_def_index: NodeMap(), def_index_to_node: vec![], + node_to_hir_id: IndexVec::new(), } } @@ -367,6 +388,15 @@ impl Definitions { index } + + /// Initialize the ast::NodeId to HirId mapping once it has been generated during + /// AST to HIR lowering. + pub fn init_node_id_to_hir_id_mapping(&mut self, + mapping: IndexVec) { + assert!(self.node_to_hir_id.is_empty(), + "Trying initialize NodeId -> HirId mapping twice"); + self.node_to_hir_id = mapping; + } } impl DefPathData { diff --git a/src/librustc/hir/map/hir_id_validator.rs b/src/librustc/hir/map/hir_id_validator.rs new file mode 100644 index 00000000000..b3cc0c542ef --- /dev/null +++ b/src/librustc/hir/map/hir_id_validator.rs @@ -0,0 +1,184 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; +use hir::{self, intravisit, HirId, ItemLocalId}; +use syntax::ast::NodeId; +use hir::itemlikevisit::ItemLikeVisitor; +use rustc_data_structures::fx::FxHashMap; + +pub fn check_crate<'hir>(hir_map: &hir::map::Map<'hir>) { + let mut outer_visitor = OuterVisitor { + hir_map: hir_map, + errors: vec![], + }; + + hir_map.dep_graph.with_ignore(|| { + hir_map.krate().visit_all_item_likes(&mut outer_visitor); + if !outer_visitor.errors.is_empty() { + let message = outer_visitor + .errors + .iter() + .fold(String::new(), |s1, s2| s1 + "\n" + s2); + bug!("{}", message); + } + }); +} + +struct HirIdValidator<'a, 'hir: 'a> { + hir_map: &'a hir::map::Map<'hir>, + owner_def_index: Option, + hir_ids_seen: FxHashMap, + errors: Vec, +} + +struct OuterVisitor<'a, 'hir: 'a> { + hir_map: &'a hir::map::Map<'hir>, + errors: Vec, +} + +impl<'a, 'hir: 'a> OuterVisitor<'a, 'hir> { + fn new_inner_visitor(&self, + hir_map: &'a hir::map::Map<'hir>) + -> HirIdValidator<'a, 'hir> { + HirIdValidator { + hir_map: hir_map, + owner_def_index: None, + hir_ids_seen: FxHashMap(), + errors: Vec::new(), + } + } +} + +impl<'a, 'hir: 'a> ItemLikeVisitor<'hir> for OuterVisitor<'a, 'hir> { + fn visit_item(&mut self, i: &'hir hir::Item) { + let mut inner_visitor = self.new_inner_visitor(self.hir_map); + inner_visitor.check(i.id, |this| intravisit::walk_item(this, i)); + self.errors.extend(inner_visitor.errors.drain(..)); + } + + fn visit_trait_item(&mut self, i: &'hir hir::TraitItem) { + let mut inner_visitor = self.new_inner_visitor(self.hir_map); + inner_visitor.check(i.id, |this| intravisit::walk_trait_item(this, i)); + self.errors.extend(inner_visitor.errors.drain(..)); + } + + fn visit_impl_item(&mut self, i: &'hir hir::ImplItem) { + let mut inner_visitor = self.new_inner_visitor(self.hir_map); + inner_visitor.check(i.id, |this| intravisit::walk_impl_item(this, i)); + self.errors.extend(inner_visitor.errors.drain(..)); + } +} + +impl<'a, 'hir: 'a> HirIdValidator<'a, 'hir> { + + fn check)>(&mut self, + node_id: NodeId, + walk: F) { + assert!(self.owner_def_index.is_none()); + let owner_def_index = self.hir_map.local_def_id(node_id).index; + self.owner_def_index = Some(owner_def_index); + walk(self); + + if owner_def_index == CRATE_DEF_INDEX { + return + } + + // There's always at least one entry for the owning item itself + let max = self.hir_ids_seen + .keys() + .map(|local_id| local_id.as_usize()) + .max() + .unwrap(); + + if max != self.hir_ids_seen.len() - 1 { + // Collect the missing ItemLocalIds + let missing: Vec<_> = (0 .. max + 1) + .filter(|&i| !self.hir_ids_seen.contains_key(&ItemLocalId(i as u32))) + .collect(); + + // Try to map those to something more useful + let mut missing_items = vec![]; + + for local_id in missing { + let hir_id = HirId { + owner: owner_def_index, + local_id: ItemLocalId(local_id as u32), + }; + + // We are already in ICE mode here, so doing a linear search + // should be fine. + let (node_id, _) = self.hir_map + .definitions() + .node_to_hir_id + .iter() + .enumerate() + .find(|&(_, &entry)| hir_id == entry) + .unwrap(); + let node_id = NodeId::new(node_id); + missing_items.push(format!("[local_id: {}, node:{}]", + local_id, + self.hir_map.node_to_string(node_id))); + } + + self.errors.push(format!( + "ItemLocalIds not assigned densely in {}. \ + Max ItemLocalId = {}, missing IDs = {:?}", + self.hir_map.def_path(DefId::local(owner_def_index)).to_string_no_crate(), + max, + missing_items)); + } + } +} + +impl<'a, 'hir: 'a> intravisit::Visitor<'hir> for HirIdValidator<'a, 'hir> { + + fn nested_visit_map<'this>(&'this mut self) + -> intravisit::NestedVisitorMap<'this, 'hir> { + intravisit::NestedVisitorMap::OnlyBodies(self.hir_map) + } + + fn visit_id(&mut self, node_id: NodeId) { + let owner = self.owner_def_index.unwrap(); + let stable_id = self.hir_map.definitions().node_to_hir_id[node_id]; + + if stable_id == hir::DUMMY_HIR_ID { + self.errors.push(format!("HirIdValidator: No HirId assigned for NodeId {}: {:?}", + node_id, + self.hir_map.node_to_string(node_id))); + } + + if owner != stable_id.owner { + self.errors.push(format!( + "HirIdValidator: The recorded owner of {} is {} instead of {}", + self.hir_map.node_to_string(node_id), + self.hir_map.def_path(DefId::local(stable_id.owner)).to_string_no_crate(), + self.hir_map.def_path(DefId::local(owner)).to_string_no_crate())); + } + + if let Some(prev) = self.hir_ids_seen.insert(stable_id.local_id, node_id) { + if prev != node_id { + self.errors.push(format!( + "HirIdValidator: Same HirId {}/{} assigned for nodes {} and {}", + self.hir_map.def_path(DefId::local(stable_id.owner)).to_string_no_crate(), + stable_id.local_id.as_usize(), + self.hir_map.node_to_string(prev), + self.hir_map.node_to_string(node_id))); + } + } + } + + fn visit_impl_item_ref(&mut self, _: &'hir hir::ImplItemRef) { + // Explicitly do nothing here. ImplItemRefs contain hir::Visibility + // values that actually belong to an ImplItem instead of the ItemImpl + // we are currently in. So for those it's correct that they have a + // different owner. + } +} diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 5d074903b2b..3def41fd425 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -36,6 +36,7 @@ pub mod blocks; mod collector; mod def_collector; pub mod definitions; +mod hir_id_validator; #[derive(Copy, Clone, Debug)] pub enum Node<'hir> { @@ -964,13 +965,17 @@ pub fn map_crate<'hir>(forest: &'hir mut Forest, entries, vector_length, (entries as f64 / vector_length as f64) * 100.); } - Map { + let map = Map { forest: forest, dep_graph: forest.dep_graph.clone(), map: map, definitions: definitions, inlined_bodies: RefCell::new(DefIdMap()), - } + }; + + hir_id_validator::check_crate(&map); + + map } /// Identical to the `PpAnn` implementation for `hir::Crate`, diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index edcfcffaa03..1c79a02d3da 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -30,7 +30,7 @@ pub use self::Visibility::{Public, Inherited}; pub use self::PathParameters::*; use hir::def::Def; -use hir::def_id::DefId; +use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; use util::nodemap::{NodeMap, FxHashSet}; use syntax_pos::{Span, ExpnId, DUMMY_SP}; @@ -43,6 +43,8 @@ use syntax::symbol::{Symbol, keywords}; use syntax::tokenstream::TokenStream; use syntax::util::ThinVec; +use rustc_data_structures::indexed_vec; + use std::collections::BTreeMap; use std::fmt; @@ -73,6 +75,63 @@ pub mod pat_util; pub mod print; pub mod svh; +/// A HirId uniquely identifies a node in the HIR of then current crate. It is +/// composed of the `owner`, which is the DefIndex of the directly enclosing +/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e. the closest "item-like"), +/// and the `local_id` which is unique within the given owner. +/// +/// This two-level structure makes for more stable values: One can move an item +/// around within the source code, or add or remove stuff before it, without +/// the local_id part of the HirId changing, which is a very useful property +/// incremental compilation where we have to persist things through changes to +/// the code base. +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, + RustcEncodable, RustcDecodable)] +pub struct HirId { + pub owner: DefIndex, + pub local_id: ItemLocalId, +} + +/// An `ItemLocalId` uniquely identifies something within a given "item-like", +/// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no +/// guarantee that the numerical value of a given `ItemLocalId` corresponds to +/// the node's position within the owning item in any way, but there is a +/// guarantee that the `LocalItemId`s within an owner occupy a dense range of +/// integers starting at zero, so a mapping that maps all or most nodes within +/// an "item-like" to something else can be implement by a `Vec` instead of a +/// tree or hash map. +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug, + RustcEncodable, RustcDecodable)] +pub struct ItemLocalId(pub u32); + +impl ItemLocalId { + pub fn as_usize(&self) -> usize { + self.0 as usize + } +} + +impl indexed_vec::Idx for ItemLocalId { + fn new(idx: usize) -> Self { + debug_assert!((idx as u32) as usize == idx); + ItemLocalId(idx as u32) + } + + fn index(self) -> usize { + self.0 as usize + } +} + +/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX +pub const CRATE_HIR_ID: HirId = HirId { + owner: CRATE_DEF_INDEX, + local_id: ItemLocalId(0) +}; + +pub const DUMMY_HIR_ID: HirId = HirId { + owner: CRATE_DEF_INDEX, + local_id: ItemLocalId(!0) +}; + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index f60b1d17a5e..f0e328a551d 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -178,17 +178,9 @@ impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { block.stmts = block.stmts.move_flat_map(|mut stmt| { remaining_stmts -= 1; - match stmt.node { - // Avoid wasting a node id on a trailing expression statement, - // which shares a HIR node with the expression itself. - ast::StmtKind::Expr(ref expr) if remaining_stmts == 0 => stmt.id = expr.id, - - _ if self.monotonic => { - assert_eq!(stmt.id, ast::DUMMY_NODE_ID); - stmt.id = self.cx.resolver.next_node_id(); - } - - _ => {} + if self.monotonic { + assert_eq!(stmt.id, ast::DUMMY_NODE_ID); + stmt.id = self.cx.resolver.next_node_id(); } Some(stmt) diff --git a/src/test/compile-fail/region-bounds-on-objects-and-type-parameters.rs b/src/test/compile-fail/region-bounds-on-objects-and-type-parameters.rs index 503b577b1f1..fd8d5ff9e7e 100644 --- a/src/test/compile-fail/region-bounds-on-objects-and-type-parameters.rs +++ b/src/test/compile-fail/region-bounds-on-objects-and-type-parameters.rs @@ -18,7 +18,7 @@ trait SomeTrait { } // Bounds on object types: -struct Foo<'a,'b,'c> { //~ ERROR parameter `'b` is never used +struct Foo<'a,'b,'c> { //~ ERROR parameter `'c` is never used // All of these are ok, because we can derive exactly one bound: a: Box, b: Box>, -- cgit 1.4.1-3-g733a5 From 769b95dc9f92edb51146727813ea7eae00b5b651 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Fri, 17 Mar 2017 21:13:00 -0700 Subject: Add diagnostic for incorrect `pub (restriction)` Given the following statement ```rust pub (a) fn afn() {} ``` Provide the following diagnostic: ```rust error: incorrect restriction in `pub` --> file.rs:15:1 | 15 | pub (a) fn afn() {} | ^^^^^^^ | = help: some valid visibility restrictions are: `pub(crate)`: visible only on the current crate `pub(super)`: visible only in the current module's parent `pub(in path::to::module)`: visible only on the specified path help: to make this visible only to module `a`, add `in` before the path: | pub (in a) fn afn() {} ``` Remove cruft from old `pub(path)` syntax. --- src/libsyntax/parse/parser.rs | 64 +++++++++++++--------- .../privacy/restricted/tuple-struct-fields/test.rs | 7 ++- .../restricted/tuple-struct-fields/test2.rs | 7 ++- .../restricted/tuple-struct-fields/test3.rs | 7 ++- src/test/ui/pub/pub-restricted-error-fn.rs | 13 +++++ src/test/ui/pub/pub-restricted-error-fn.stderr | 8 +++ src/test/ui/pub/pub-restricted-error.rs | 19 +++++++ src/test/ui/pub/pub-restricted-error.stderr | 8 +++ src/test/ui/pub/pub-restricted-non-path.rs | 15 +++++ src/test/ui/pub/pub-restricted-non-path.stderr | 8 +++ src/test/ui/pub/pub-restricted.rs | 37 +++++++++++++ src/test/ui/pub/pub-restricted.stderr | 47 ++++++++++++++++ 12 files changed, 205 insertions(+), 35 deletions(-) create mode 100644 src/test/ui/pub/pub-restricted-error-fn.rs create mode 100644 src/test/ui/pub/pub-restricted-error-fn.stderr create mode 100644 src/test/ui/pub/pub-restricted-error.rs create mode 100644 src/test/ui/pub/pub-restricted-error.stderr create mode 100644 src/test/ui/pub/pub-restricted-non-path.rs create mode 100644 src/test/ui/pub/pub-restricted-non-path.stderr create mode 100644 src/test/ui/pub/pub-restricted.rs create mode 100644 src/test/ui/pub/pub-restricted.stderr (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index df4ccc94c04..649e9059934 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4626,7 +4626,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let vis = self.parse_visibility()?; + let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { let name = self.parse_ident()?; @@ -4939,25 +4939,8 @@ impl<'a> Parser<'a> { |p| { let attrs = p.parse_outer_attributes()?; let lo = p.span.lo; - let mut vis = p.parse_visibility()?; - let ty_is_interpolated = - p.token.is_interpolated() || p.look_ahead(1, |t| t.is_interpolated()); - let mut ty = p.parse_ty()?; - - // Handle `pub(path) type`, in which `vis` will be `pub` and `ty` will be `(path)`. - if vis == Visibility::Public && !ty_is_interpolated && - p.token != token::Comma && p.token != token::CloseDelim(token::Paren) { - ty = if let TyKind::Paren(ref path_ty) = ty.node { - if let TyKind::Path(None, ref path) = path_ty.node { - vis = Visibility::Restricted { path: P(path.clone()), id: path_ty.id }; - Some(p.parse_ty()?) - } else { - None - } - } else { - None - }.unwrap_or(ty); - } + let vis = p.parse_visibility(true)?; + let ty = p.parse_ty()?; Ok(StructField { span: mk_sp(lo, p.span.hi), vis: vis, @@ -4996,18 +4979,25 @@ impl<'a> Parser<'a> { fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let vis = self.parse_visibility()?; + let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } - // Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts - // `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. - fn parse_visibility(&mut self) -> PResult<'a, Visibility> { + /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `pub(self)` for `pub(in self)` + /// and `pub(super)` for `pub(in super)`. If the following element can't be a tuple (i.e. it's + /// a function definition, it's not a tuple struct field) and the contents within the parens + /// isn't valid, emit a proper diagnostic. + fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { if !self.eat_keyword(keywords::Pub) { return Ok(Visibility::Inherited) } if self.check(&token::OpenDelim(token::Paren)) { + let start_span = self.span; + // We don't `self.bump()` the `(` yet because this might be a struct definition where + // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. + // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so + // by the following tokens. if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) { // `pub(crate)` self.bump(); // `(` @@ -5032,6 +5022,28 @@ impl<'a> Parser<'a> { let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; self.expect(&token::CloseDelim(token::Paren))?; // `)` return Ok(vis) + } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct + // `pub(something) fn ...` or `struct X { pub(something) y: Z }` + self.bump(); // `(` + let msg = "incorrect visibility restriction"; + let suggestion = r##"some possible visibility restrictions are: +`pub(crate)`: visible only on the current crate +`pub(super)`: visible only in the current module's parent +`pub(in path::to::module)`: visible only on the specified path"##; + let path = self.parse_path(PathStyle::Mod)?; + let path_span = self.prev_span; + let help_msg = format!("to make this visible only to module `{}`, add `in` before \ + the path:", + path); + self.expect(&token::CloseDelim(token::Paren))?; // `)` + let sp = Span { + lo: start_span.lo, + hi: self.prev_span.hi, + expn_id: start_span.expn_id, + }; + let mut err = self.span_fatal_help(sp, &msg, &suggestion); + err.span_suggestion(path_span, &help_msg, format!("in {}", path)); + err.emit(); // emit diagnostic, but continue with public visibility } } @@ -5508,7 +5520,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(false)?; if self.eat_keyword(keywords::Use) { // USE ITEM @@ -5787,7 +5799,7 @@ impl<'a> Parser<'a> { fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(false)?; if self.check_keyword(keywords::Static) { // FOREIGN STATIC ITEM diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs index 208f1a0e2ee..d17b604717e 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs @@ -10,7 +10,8 @@ mod foo { type T = (); - struct S1(pub(foo) (), pub(T), pub(crate) (), pub(((), T))); - struct S2(pub((foo)) ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S1(pub(in foo) (), pub(T), pub(crate) (), pub(((), T))); + struct S2(pub((foo)) ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs index 57769646e3b..166d5e27e8d 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs @@ -11,9 +11,10 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub $t); - struct S2(pub (foo) ()); - struct S3(pub $t ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S2(pub (in foo) ()); + struct S3(pub $t ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs index db3358f7d50..edab175f4cd 100644 --- a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs @@ -11,9 +11,10 @@ macro_rules! define_struct { ($t:ty) => { struct S1(pub($t)); - struct S2(pub (foo) ()); - struct S3(pub($t) ()); //~ ERROR expected `,`, found `(` - //~| ERROR expected one of `;` or `where`, found `(` + struct S2(pub (in foo) ()); + struct S3(pub($t) ()); + //~^ ERROR expected `,`, found `(` + //~| ERROR expected one of `;` or `where`, found `(` } } diff --git a/src/test/ui/pub/pub-restricted-error-fn.rs b/src/test/ui/pub/pub-restricted-error-fn.rs new file mode 100644 index 00000000000..13514310371 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error-fn.rs @@ -0,0 +1,13 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +pub(crate) () fn foo() {} diff --git a/src/test/ui/pub/pub-restricted-error-fn.stderr b/src/test/ui/pub/pub-restricted-error-fn.stderr new file mode 100644 index 00000000000..470e8331247 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error-fn.stderr @@ -0,0 +1,8 @@ +error: unmatched visibility `pub` + --> $DIR/pub-restricted-error-fn.rs:13:10 + | +13 | pub(crate) () fn foo() {} + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted-error.rs b/src/test/ui/pub/pub-restricted-error.rs new file mode 100644 index 00000000000..99af031899a --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +struct Bar(pub(())); + +struct Foo { + pub(crate) () foo: usize, +} + + diff --git a/src/test/ui/pub/pub-restricted-error.stderr b/src/test/ui/pub/pub-restricted-error.stderr new file mode 100644 index 00000000000..b8b4c80778d --- /dev/null +++ b/src/test/ui/pub/pub-restricted-error.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `(` + --> $DIR/pub-restricted-error.rs:16:16 + | +16 | pub(crate) () foo: usize, + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted-non-path.rs b/src/test/ui/pub/pub-restricted-non-path.rs new file mode 100644 index 00000000000..3f74285717a --- /dev/null +++ b/src/test/ui/pub/pub-restricted-non-path.rs @@ -0,0 +1,15 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +pub (.) fn afn() {} + +fn main() {} diff --git a/src/test/ui/pub/pub-restricted-non-path.stderr b/src/test/ui/pub/pub-restricted-non-path.stderr new file mode 100644 index 00000000000..ebfccc4d720 --- /dev/null +++ b/src/test/ui/pub/pub-restricted-non-path.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `.` + --> $DIR/pub-restricted-non-path.rs:13:6 + | +13 | pub (.) fn afn() {} + | ^ + +error: aborting due to previous error + diff --git a/src/test/ui/pub/pub-restricted.rs b/src/test/ui/pub/pub-restricted.rs new file mode 100644 index 00000000000..48e487f71a7 --- /dev/null +++ b/src/test/ui/pub/pub-restricted.rs @@ -0,0 +1,37 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +mod a {} + +pub (a) fn afn() {} +pub (b) fn bfn() {} +pub fn privfn() {} +mod x { + mod y { + pub (in x) fn foo() {} + pub (super) fn bar() {} + pub (crate) fn qux() {} + } +} + +mod y { + struct Foo { + pub (crate) c: usize, + pub (super) s: usize, + valid_private: usize, + pub (in y) valid_in_x: usize, + pub (a) invalid: usize, + pub (in x) non_parent_invalid: usize, + } +} + +fn main() {} \ No newline at end of file diff --git a/src/test/ui/pub/pub-restricted.stderr b/src/test/ui/pub/pub-restricted.stderr new file mode 100644 index 00000000000..5bc230e8da3 --- /dev/null +++ b/src/test/ui/pub/pub-restricted.stderr @@ -0,0 +1,47 @@ +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:15:5 + | +15 | pub (a) fn afn() {} + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `a`, add `in` before the path: + | pub (in a) fn afn() {} + +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:16:5 + | +16 | pub (b) fn bfn() {} + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `b`, add `in` before the path: + | pub (in b) fn bfn() {} + +error: incorrect visibility restriction + --> $DIR/pub-restricted.rs:32:13 + | +32 | pub (a) invalid: usize, + | ^^^ + | + = help: some possible visibility restrictions are: + `pub(crate)`: visible only on the current crate + `pub(super)`: visible only in the current module's parent + `pub(in path::to::module)`: visible only on the specified path +help: to make this visible only to module `a`, add `in` before the path: + | pub (in a) invalid: usize, + +error: visibilities can only be restricted to ancestor modules + --> $DIR/pub-restricted.rs:33:17 + | +33 | pub (in x) non_parent_invalid: usize, + | ^ + +error: aborting due to 4 previous errors + -- cgit 1.4.1-3-g733a5 From e341d603fe7c35ce174bd2e54e47ed6941ea4b03 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 15 Feb 2017 07:57:59 -0800 Subject: Remove internal liblog This commit deletes the internal liblog in favor of the implementation that lives on crates.io. Similarly it's also setting a convention for adding crates to the compiler. The main restriction right now is that we want compiler implementation details to be unreachable from normal Rust code (e.g. requires a feature), and by default everything in the sysroot is reachable via `extern crate`. The proposal here is to require that crates pulled in have these lines in their `src/lib.rs`: #![cfg_attr(rustbuild, feature(staged_api, rustc_private))] #![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))] This'll mean that by default they're not using these attributes but when compiled as part of the compiler they do a few things: * Mark themselves as entirely unstable via the `staged_api` feature and the `#![unstable]` attribute. * Allow usage of other unstable crates via `feature(rustc_private)` which is required if the crate relies on any other crates to compile (other than std). --- src/Cargo.lock | 53 +-- src/liblog/Cargo.toml | 9 - src/liblog/directive.rs | 193 -------- src/liblog/lib.rs | 506 --------------------- src/liblog/macros.rs | 205 --------- src/librustc/Cargo.toml | 2 +- src/librustc/hir/map/mod.rs | 2 +- src/librustc/ty/item_path.rs | 3 +- src/librustc_back/Cargo.toml | 2 +- src/librustc_borrowck/Cargo.toml | 2 +- src/librustc_const_eval/Cargo.toml | 2 +- src/librustc_data_structures/Cargo.toml | 2 +- src/librustc_driver/Cargo.toml | 3 +- src/librustc_driver/driver.rs | 4 +- src/librustc_driver/lib.rs | 2 + src/librustc_incremental/Cargo.toml | 2 +- src/librustc_lint/Cargo.toml | 2 +- src/librustc_metadata/Cargo.toml | 2 +- src/librustc_metadata/creader.rs | 2 +- src/librustc_mir/Cargo.toml | 2 +- src/librustc_passes/Cargo.toml | 4 +- src/librustc_resolve/Cargo.toml | 2 +- src/librustc_save_analysis/Cargo.toml | 2 +- src/librustc_trans/Cargo.toml | 2 +- src/librustc_typeck/Cargo.toml | 2 +- src/librustdoc/Cargo.toml | 5 +- src/librustdoc/lib.rs | 2 + src/libsyntax/Cargo.toml | 2 +- src/libsyntax_ext/Cargo.toml | 2 +- .../auxiliary/logging_right_crate.rs | 18 - .../conditional-debug-macro-off.rs | 23 - .../run-pass-fulldeps/logging-enabled-debug.rs | 24 - src/test/run-pass-fulldeps/logging-enabled.rs | 27 -- src/test/run-pass-fulldeps/logging-right-crate.rs | 31 -- .../run-pass-fulldeps/logging-separate-lines.rs | 40 -- src/test/run-pass-fulldeps/rust-log-filter.rs | 58 --- src/test/run-pass/conditional-debug-macro-on.rs | 2 - src/tools/compiletest/Cargo.toml | 2 +- 38 files changed, 54 insertions(+), 1194 deletions(-) delete mode 100644 src/liblog/Cargo.toml delete mode 100644 src/liblog/directive.rs delete mode 100644 src/liblog/lib.rs delete mode 100644 src/liblog/macros.rs delete mode 100644 src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs delete mode 100644 src/test/run-pass-fulldeps/conditional-debug-macro-off.rs delete mode 100644 src/test/run-pass-fulldeps/logging-enabled-debug.rs delete mode 100644 src/test/run-pass-fulldeps/logging-enabled.rs delete mode 100644 src/test/run-pass-fulldeps/logging-right-crate.rs delete mode 100644 src/test/run-pass-fulldeps/logging-separate-lines.rs delete mode 100644 src/test/run-pass-fulldeps/rust-log-filter.rs (limited to 'src/libsyntax') diff --git a/src/Cargo.lock b/src/Cargo.lock index 9ae894061a6..a0b47f4f0b2 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -147,7 +147,7 @@ dependencies = [ name = "compiletest" version = "0.0.0" dependencies = [ - "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)", @@ -162,14 +162,6 @@ name = "dtoa" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "env_logger" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "env_logger" version = "0.4.2" @@ -270,10 +262,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "linkchecker" version = "0.1.0" -[[package]] -name = "log" -version = "0.0.0" - [[package]] name = "log" version = "0.3.7" @@ -439,7 +427,7 @@ dependencies = [ "arena 0.0.0", "fmt_macros 0.0.0", "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", @@ -479,7 +467,7 @@ dependencies = [ name = "rustc_back" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", "syntax 0.0.0", ] @@ -493,7 +481,7 @@ name = "rustc_borrowck" version = "0.0.0" dependencies = [ "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", @@ -508,7 +496,7 @@ version = "0.0.0" dependencies = [ "arena 0.0.0", "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_const_math 0.0.0", @@ -530,7 +518,7 @@ dependencies = [ name = "rustc_data_structures" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "serialize 0.0.0", ] @@ -539,8 +527,9 @@ name = "rustc_driver" version = "0.0.0" dependencies = [ "arena 0.0.0", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro_plugin 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", @@ -579,7 +568,7 @@ name = "rustc_incremental" version = "0.0.0" dependencies = [ "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_data_structures 0.0.0", "serialize 0.0.0", @@ -591,7 +580,7 @@ dependencies = [ name = "rustc_lint" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_const_eval 0.0.0", @@ -623,7 +612,7 @@ name = "rustc_metadata" version = "0.0.0" dependencies = [ "flate 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", @@ -642,7 +631,7 @@ name = "rustc_mir" version = "0.0.0" dependencies = [ "graphviz 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_bitflags 0.0.0", "rustc_const_eval 0.0.0", @@ -666,7 +655,7 @@ dependencies = [ name = "rustc_passes" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", @@ -705,7 +694,7 @@ name = "rustc_resolve" version = "0.0.0" dependencies = [ "arena 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", @@ -716,7 +705,7 @@ dependencies = [ name = "rustc_save_analysis" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rls-data 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", @@ -730,7 +719,7 @@ name = "rustc_trans" version = "0.0.0" dependencies = [ "flate 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", @@ -762,7 +751,7 @@ version = "0.0.0" dependencies = [ "arena 0.0.0", "fmt_macros 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_const_eval 0.0.0", @@ -780,8 +769,9 @@ version = "0.0.0" dependencies = [ "arena 0.0.0", "build_helper 0.1.0", + "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_const_eval 0.0.0", @@ -857,7 +847,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "syntax" version = "0.0.0" dependencies = [ - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_bitflags 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", @@ -870,7 +860,7 @@ name = "syntax_ext" version = "0.0.0" dependencies = [ "fmt_macros 0.0.0", - "log 0.0.0", + "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", @@ -996,7 +986,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "74a80f603221c9cd9aa27a28f52af452850051598537bb6b359c38a7d61e5cda" "checksum cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "d18d68987ed4c516dcc3e7913659bfa4076f5182eea4a7e0038bb060953e76ac" "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" -"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83" "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" "checksum gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "a32cd40070d7611ab76343dcb3204b2bb28c8a9450989a83a3d590248142f439" diff --git a/src/liblog/Cargo.toml b/src/liblog/Cargo.toml deleted file mode 100644 index 31a862478d0..00000000000 --- a/src/liblog/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "log" -version = "0.0.0" - -[lib] -name = "log" -path = "lib.rs" -crate-type = ["dylib", "rlib"] diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs deleted file mode 100644 index eb50d6e6135..00000000000 --- a/src/liblog/directive.rs +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::ascii::AsciiExt; -use std::cmp; - -#[derive(Debug, Clone)] -pub struct LogDirective { - pub name: Option, - pub level: u32, -} - -pub const LOG_LEVEL_NAMES: [&'static str; 5] = ["ERROR", "WARN", "INFO", "DEBUG", "TRACE"]; - -/// Parse an individual log level that is either a number or a symbolic log level -fn parse_log_level(level: &str) -> Option { - level.parse::() - .ok() - .or_else(|| { - let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level)); - pos.map(|p| p as u32 + 1) - }) - .map(|p| cmp::min(p, ::MAX_LOG_LEVEL)) -} - -/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=1/foo") -/// and return a vector with log directives. -/// -/// Valid log levels are 0-255, with the most likely ones being 1-4 (defined in -/// std::). Also supports string log levels of error, warn, info, and debug -pub fn parse_logging_spec(spec: &str) -> (Vec, Option) { - let mut dirs = Vec::new(); - - let mut parts = spec.split('/'); - let mods = parts.next(); - let filter = parts.next(); - if parts.next().is_some() { - println!("warning: invalid logging spec '{}', ignoring it (too many '/'s)", - spec); - return (dirs, None); - } - if let Some(m) = mods { - for s in m.split(',') { - if s.is_empty() { - continue; - } - let mut parts = s.split('='); - let (log_level, name) = - match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) { - (Some(part0), None, None) => { - // if the single argument is a log-level string or number, - // treat that as a global fallback - match parse_log_level(part0) { - Some(num) => (num, None), - None => (::MAX_LOG_LEVEL, Some(part0)), - } - } - (Some(part0), Some(""), None) => (::MAX_LOG_LEVEL, Some(part0)), - (Some(part0), Some(part1), None) => { - match parse_log_level(part1) { - Some(num) => (num, Some(part0)), - _ => { - println!("warning: invalid logging spec '{}', ignoring it", part1); - continue; - } - } - } - _ => { - println!("warning: invalid logging spec '{}', ignoring it", s); - continue; - } - }; - dirs.push(LogDirective { - name: name.map(str::to_owned), - level: log_level, - }); - } - } - - (dirs, filter.map(str::to_owned)) -} - -#[cfg(test)] -mod tests { - use super::parse_logging_spec; - - #[test] - fn parse_logging_spec_valid() { - let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4"); - assert_eq!(dirs.len(), 3); - assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned())); - assert_eq!(dirs[0].level, 1); - - assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned())); - assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL); - - assert_eq!(dirs[2].name, Some("crate2".to_owned())); - assert_eq!(dirs[2].level, 4); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_invalid_crate() { - // test parse_logging_spec with multiple = in specification - let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4"); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate2".to_owned())); - assert_eq!(dirs[0].level, 4); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_invalid_log_level() { - // test parse_logging_spec with 'noNumber' as log level - let (dirs, filter) = parse_logging_spec("crate1::mod1=noNumber,crate2=4"); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate2".to_owned())); - assert_eq!(dirs[0].level, 4); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_string_log_level() { - // test parse_logging_spec with 'warn' as log level - let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=warn"); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate2".to_owned())); - assert_eq!(dirs[0].level, ::WARN); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_empty_log_level() { - // test parse_logging_spec with '' as log level - let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2="); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate2".to_owned())); - assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_global() { - // test parse_logging_spec with no crate - let (dirs, filter) = parse_logging_spec("warn,crate2=4"); - assert_eq!(dirs.len(), 2); - assert_eq!(dirs[0].name, None); - assert_eq!(dirs[0].level, 2); - assert_eq!(dirs[1].name, Some("crate2".to_owned())); - assert_eq!(dirs[1].level, 4); - assert!(filter.is_none()); - } - - #[test] - fn parse_logging_spec_valid_filter() { - let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4/abc"); - assert_eq!(dirs.len(), 3); - assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned())); - assert_eq!(dirs[0].level, 1); - - assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned())); - assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL); - - assert_eq!(dirs[2].name, Some("crate2".to_owned())); - assert_eq!(dirs[2].level, 4); - assert!(filter.is_some() && filter.unwrap().to_owned() == "abc"); - } - - #[test] - fn parse_logging_spec_invalid_crate_filter() { - let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4/a.c"); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate2".to_owned())); - assert_eq!(dirs[0].level, 4); - assert!(filter.is_some() && filter.unwrap().to_owned() == "a.c"); - } - - #[test] - fn parse_logging_spec_empty_with_filter() { - let (dirs, filter) = parse_logging_spec("crate1/a*c"); - assert_eq!(dirs.len(), 1); - assert_eq!(dirs[0].name, Some("crate1".to_owned())); - assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL); - assert!(filter.is_some() && filter.unwrap().to_owned() == "a*c"); - } -} diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs deleted file mode 100644 index 057df647c72..00000000000 --- a/src/liblog/lib.rs +++ /dev/null @@ -1,506 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Utilities for program-wide and customizable logging -//! -//! # Examples -//! -//! ``` -//! # #![feature(rustc_private)] -//! #[macro_use] extern crate log; -//! -//! fn main() { -//! debug!("this is a debug {:?}", "message"); -//! error!("this is printed by default"); -//! -//! if log_enabled!(log::INFO) { -//! let x = 3 * 4; // expensive computation -//! info!("the answer was: {:?}", x); -//! } -//! } -//! ``` -//! -//! Assumes the binary is `main`: -//! -//! ```{.bash} -//! $ RUST_LOG=error ./main -//! ERROR:main: this is printed by default -//! ``` -//! -//! ```{.bash} -//! $ RUST_LOG=info ./main -//! ERROR:main: this is printed by default -//! INFO:main: the answer was: 12 -//! ``` -//! -//! ```{.bash} -//! $ RUST_LOG=debug ./main -//! DEBUG:main: this is a debug message -//! ERROR:main: this is printed by default -//! INFO:main: the answer was: 12 -//! ``` -//! -//! You can also set the log level on a per module basis: -//! -//! ```{.bash} -//! $ RUST_LOG=main=info ./main -//! ERROR:main: this is printed by default -//! INFO:main: the answer was: 12 -//! ``` -//! -//! And enable all logging: -//! -//! ```{.bash} -//! $ RUST_LOG=main ./main -//! DEBUG:main: this is a debug message -//! ERROR:main: this is printed by default -//! INFO:main: the answer was: 12 -//! ``` -//! -//! # Logging Macros -//! -//! There are five macros that the logging subsystem uses: -//! -//! * `log!(level, ...)` - the generic logging macro, takes a level as a u32 and any -//! related `format!` arguments -//! * `debug!(...)` - a macro hard-wired to the log level of `DEBUG` -//! * `info!(...)` - a macro hard-wired to the log level of `INFO` -//! * `warn!(...)` - a macro hard-wired to the log level of `WARN` -//! * `error!(...)` - a macro hard-wired to the log level of `ERROR` -//! -//! All of these macros use the same style of syntax as the `format!` syntax -//! extension. Details about the syntax can be found in the documentation of -//! `std::fmt` along with the Rust tutorial/manual. -//! -//! If you want to check at runtime if a given logging level is enabled (e.g. if the -//! information you would want to log is expensive to produce), you can use the -//! following macro: -//! -//! * `log_enabled!(level)` - returns true if logging of the given level is enabled -//! -//! # Enabling logging -//! -//! Log levels are controlled on a per-module basis, and by default all logging is -//! disabled except for `error!` (a log level of 1). Logging is controlled via the -//! `RUST_LOG` environment variable. The value of this environment variable is a -//! comma-separated list of logging directives. A logging directive is of the form: -//! -//! ```text -//! path::to::module=log_level -//! ``` -//! -//! The path to the module is rooted in the name of the crate it was compiled for, -//! so if your program is contained in a file `hello.rs`, for example, to turn on -//! logging for this file you would use a value of `RUST_LOG=hello`. -//! Furthermore, this path is a prefix-search, so all modules nested in the -//! specified module will also have logging enabled. -//! -//! The actual `log_level` is optional to specify. If omitted, all logging will be -//! enabled. If specified, the it must be either a numeric in the range of 1-255, or -//! it must be one of the strings `debug`, `error`, `info`, or `warn`. If a numeric -//! is specified, then all logging less than or equal to that numeral is enabled. -//! For example, if logging level 3 is active, error, warn, and info logs will be -//! printed, but debug will be omitted. -//! -//! As the log level for a module is optional, the module to enable logging for is -//! also optional. If only a `log_level` is provided, then the global log level for -//! all modules is set to this value. -//! -//! Some examples of valid values of `RUST_LOG` are: -//! -//! * `hello` turns on all logging for the 'hello' module -//! * `info` turns on all info logging -//! * `hello=debug` turns on debug logging for 'hello' -//! * `hello=3` turns on info logging for 'hello' -//! * `hello,std::option` turns on hello, and std's option logging -//! * `error,hello=warn` turn on global error logging and also warn for hello -//! -//! # Filtering results -//! -//! A RUST_LOG directive may include a string filter. The syntax is to append -//! `/` followed by a string. Each message is checked against the string and is -//! only logged if it contains the string. Note that the matching is done after -//! formatting the log string but before adding any logging meta-data. There is -//! a single filter for all modules. -//! -//! Some examples: -//! -//! * `hello/foo` turns on all logging for the 'hello' module where the log message -//! includes 'foo'. -//! * `info/f.o` turns on all info logging where the log message includes 'foo', -//! 'f1o', 'fao', etc. -//! * `hello=debug/foo*foo` turns on debug logging for 'hello' where the log -//! message includes 'foofoo' or 'fofoo' or 'fooooooofoo', etc. -//! * `error,hello=warn/[0-9] scopes` turn on global error logging and also warn for -//! hello. In both cases the log message must include a single digit number -//! followed by 'scopes' -//! -//! # Performance and Side Effects -//! -//! Each of these macros will expand to code similar to: -//! -//! ```rust,ignore -//! if log_level <= my_module_log_level() { -//! ::log::log(log_level, format!(...)); -//! } -//! ``` -//! -//! What this means is that each of these macros are very cheap at runtime if -//! they're turned off (just a load and an integer comparison). This also means that -//! if logging is disabled, none of the components of the log will be executed. - -#![crate_name = "log"] -#![unstable(feature = "rustc_private", - reason = "use the crates.io `log` library instead", - issue = "27812")] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - html_playground_url = "https://play.rust-lang.org/", - test(attr(deny(warnings))))] -#![deny(missing_docs)] -#![deny(warnings)] - -#![feature(staged_api)] - -use std::cell::RefCell; -use std::fmt; -use std::io::{self, Stderr}; -use std::io::prelude::*; -use std::mem; -use std::env; -use std::slice; -use std::sync::{Mutex, ONCE_INIT, Once}; - -use directive::LOG_LEVEL_NAMES; - -#[macro_use] -pub mod macros; - -mod directive; - -/// Maximum logging level of a module that can be specified. Common logging -/// levels are found in the DEBUG/INFO/WARN/ERROR constants. -pub const MAX_LOG_LEVEL: u32 = 255; - -/// The default logging level of a crate if no other is specified. -const DEFAULT_LOG_LEVEL: u32 = 1; - -static mut LOCK: *mut Mutex<(Vec, Option)> = 0 as *mut _; - -/// An unsafe constant that is the maximum logging level of any module -/// specified. This is the first line of defense to determining whether a -/// logging statement should be run. -static mut LOG_LEVEL: u32 = MAX_LOG_LEVEL; - -/// Debug log level -pub const DEBUG: u32 = 4; -/// Info log level -pub const INFO: u32 = 3; -/// Warn log level -pub const WARN: u32 = 2; -/// Error log level -pub const ERROR: u32 = 1; - -thread_local! { - static LOCAL_LOGGER: RefCell>> = { - RefCell::new(None) - } -} - -/// A trait used to represent an interface to a thread-local logger. Each thread -/// can have its own custom logger which can respond to logging messages -/// however it likes. -pub trait Logger { - /// Logs a single message described by the `record`. - fn log(&mut self, record: &LogRecord); -} - -struct DefaultLogger { - handle: Stderr, -} - -/// Wraps the log level with fmt implementations. -#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] -pub struct LogLevel(pub u32); - -impl fmt::Display for LogLevel { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let LogLevel(level) = *self; - match LOG_LEVEL_NAMES.get(level as usize - 1) { - Some(ref name) => fmt::Display::fmt(name, fmt), - None => fmt::Display::fmt(&level, fmt), - } - } -} - -impl Logger for DefaultLogger { - fn log(&mut self, record: &LogRecord) { - match writeln!(&mut self.handle, - "{}:{}: {}", - record.level, - record.module_path, - record.args) { - Err(e) => panic!("failed to log: {:?}", e), - Ok(()) => {} - } - } -} - -impl Drop for DefaultLogger { - fn drop(&mut self) { - // FIXME(#12628): is panicking the right thing to do? - match self.handle.flush() { - Err(e) => panic!("failed to flush a logger: {:?}", e), - Ok(()) => {} - } - } -} - -/// This function is called directly by the compiler when using the logging -/// macros. This function does not take into account whether the log level -/// specified is active or not, it will always log something if this method is -/// called. -/// -/// It is not recommended to call this function directly, rather it should be -/// invoked through the logging family of macros. -#[doc(hidden)] -pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { - // Test the literal string from args against the current filter, if there - // is one. - unsafe { - let filter = (*LOCK).lock().unwrap(); - if let Some(ref filter) = filter.1 { - if !args.to_string().contains(filter) { - return; - } - } - } - - // Completely remove the local logger from TLS in case anyone attempts to - // frob the slot while we're doing the logging. This will destroy any logger - // set during logging. - let logger = LOCAL_LOGGER.with(|s| s.borrow_mut().take()); - let mut logger = logger.unwrap_or_else(|| Box::new(DefaultLogger { handle: io::stderr() })); - logger.log(&LogRecord { - level: LogLevel(level), - args: args, - file: loc.file, - module_path: loc.module_path, - line: loc.line, - }); - set_logger(logger); -} - -/// Getter for the global log level. This is a function so that it can be called -/// safely -#[doc(hidden)] -#[inline(always)] -pub fn log_level() -> u32 { - unsafe { LOG_LEVEL } -} - -/// Replaces the thread-local logger with the specified logger, returning the old -/// logger. -pub fn set_logger(logger: Box) -> Option> { - LOCAL_LOGGER.with(|slot| mem::replace(&mut *slot.borrow_mut(), Some(logger))) -} - -/// A LogRecord is created by the logging macros, and passed as the only -/// argument to Loggers. -#[derive(Debug)] -pub struct LogRecord<'a> { - /// The module path of where the LogRecord originated. - pub module_path: &'a str, - - /// The LogLevel of this record. - pub level: LogLevel, - - /// The arguments from the log line. - pub args: fmt::Arguments<'a>, - - /// The file of where the LogRecord originated. - pub file: &'a str, - - /// The line number of where the LogRecord originated. - pub line: u32, -} - -#[doc(hidden)] -#[derive(Copy, Clone)] -pub struct LogLocation { - pub module_path: &'static str, - pub file: &'static str, - pub line: u32, -} - -/// Tests whether a given module's name is enabled for a particular level of -/// logging. This is the second layer of defense about determining whether a -/// module's log statement should be emitted or not. -#[doc(hidden)] -pub fn mod_enabled(level: u32, module: &str) -> bool { - static INIT: Once = ONCE_INIT; - INIT.call_once(init); - - // It's possible for many threads are in this function, only one of them - // will perform the global initialization, but all of them will need to check - // again to whether they should really be here or not. Hence, despite this - // check being expanded manually in the logging macro, this function checks - // the log level again. - if level > unsafe { LOG_LEVEL } { - return false; - } - - // This assertion should never get tripped unless we're in an at_exit - // handler after logging has been torn down and a logging attempt was made. - - unsafe { - let directives = (*LOCK).lock().unwrap(); - enabled(level, module, directives.0.iter()) - } -} - -fn enabled(level: u32, module: &str, iter: slice::Iter) -> bool { - // Search for the longest match, the vector is assumed to be pre-sorted. - for directive in iter.rev() { - match directive.name { - Some(ref name) if !module.starts_with(&name[..]) => {} - Some(..) | None => return level <= directive.level, - } - } - level <= DEFAULT_LOG_LEVEL -} - -/// Initialize logging for the current process. -/// -/// This is not threadsafe at all, so initialization is performed through a -/// `Once` primitive (and this function is called from that primitive). -fn init() { - let (mut directives, filter) = match env::var("RUST_LOG") { - Ok(spec) => directive::parse_logging_spec(&spec[..]), - Err(..) => (Vec::new(), None), - }; - - // Sort the provided directives by length of their name, this allows a - // little more efficient lookup at runtime. - directives.sort_by(|a, b| { - let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0); - let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0); - alen.cmp(&blen) - }); - - let max_level = { - let max = directives.iter().max_by_key(|d| d.level); - max.map(|d| d.level).unwrap_or(DEFAULT_LOG_LEVEL) - }; - - unsafe { - LOG_LEVEL = max_level; - - assert!(LOCK.is_null()); - LOCK = Box::into_raw(Box::new(Mutex::new((directives, filter)))); - } -} - -#[cfg(test)] -mod tests { - use super::enabled; - use directive::LogDirective; - - #[test] - fn match_full_path() { - let dirs = [LogDirective { - name: Some("crate2".to_string()), - level: 3, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 2, - }]; - assert!(enabled(2, "crate1::mod1", dirs.iter())); - assert!(!enabled(3, "crate1::mod1", dirs.iter())); - assert!(enabled(3, "crate2", dirs.iter())); - assert!(!enabled(4, "crate2", dirs.iter())); - } - - #[test] - fn no_match() { - let dirs = [LogDirective { - name: Some("crate2".to_string()), - level: 3, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 2, - }]; - assert!(!enabled(2, "crate3", dirs.iter())); - } - - #[test] - fn match_beginning() { - let dirs = [LogDirective { - name: Some("crate2".to_string()), - level: 3, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 2, - }]; - assert!(enabled(3, "crate2::mod1", dirs.iter())); - } - - #[test] - fn match_beginning_longest_match() { - let dirs = [LogDirective { - name: Some("crate2".to_string()), - level: 3, - }, - LogDirective { - name: Some("crate2::mod".to_string()), - level: 4, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 2, - }]; - assert!(enabled(4, "crate2::mod1", dirs.iter())); - assert!(!enabled(4, "crate2", dirs.iter())); - } - - #[test] - fn match_default() { - let dirs = [LogDirective { - name: None, - level: 3, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 2, - }]; - assert!(enabled(2, "crate1::mod1", dirs.iter())); - assert!(enabled(3, "crate2::mod2", dirs.iter())); - } - - #[test] - fn zero_level() { - let dirs = [LogDirective { - name: None, - level: 3, - }, - LogDirective { - name: Some("crate1::mod1".to_string()), - level: 0, - }]; - assert!(!enabled(1, "crate1::mod1", dirs.iter())); - assert!(enabled(3, "crate2::mod2", dirs.iter())); - } -} diff --git a/src/liblog/macros.rs b/src/liblog/macros.rs deleted file mode 100644 index 803a2df9ccc..00000000000 --- a/src/liblog/macros.rs +++ /dev/null @@ -1,205 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Logging macros - -/// The standard logging macro -/// -/// This macro will generically log over a provided level (of type u32) with a -/// format!-based argument list. See documentation in `std::fmt` for details on -/// how to use the syntax. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// fn main() { -/// log!(log::WARN, "this is a warning {}", "message"); -/// log!(log::DEBUG, "this is a debug message"); -/// log!(6, "this is a custom logging level: {level}", level=6); -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=warn ./main -/// WARN:main: this is a warning message -/// ``` -/// -/// ```{.bash} -/// $ RUST_LOG=debug ./main -/// DEBUG:main: this is a debug message -/// WARN:main: this is a warning message -/// ``` -/// -/// ```{.bash} -/// $ RUST_LOG=6 ./main -/// DEBUG:main: this is a debug message -/// WARN:main: this is a warning message -/// 6:main: this is a custom logging level: 6 -/// ``` -#[macro_export] -macro_rules! log { - ($lvl:expr, $($arg:tt)+) => ({ - static LOC: ::log::LogLocation = ::log::LogLocation { - line: line!(), - file: file!(), - module_path: module_path!(), - }; - let lvl = $lvl; - if log_enabled!(lvl) { - ::log::log(lvl, &LOC, format_args!($($arg)+)) - } - }) -} - -/// A convenience macro for logging at the error log level. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// fn main() { -/// let error = 3; -/// error!("the build has failed with error code: {}", error); -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=error ./main -/// ERROR:main: the build has failed with error code: 3 -/// ``` -/// -#[macro_export] -macro_rules! error { - ($($arg:tt)*) => (log!(::log::ERROR, $($arg)*)) -} - -/// A convenience macro for logging at the warning log level. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// fn main() { -/// let code = 3; -/// warn!("you may like to know that a process exited with: {}", code); -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=warn ./main -/// WARN:main: you may like to know that a process exited with: 3 -/// ``` -#[macro_export] -macro_rules! warn { - ($($arg:tt)*) => (log!(::log::WARN, $($arg)*)) -} - -/// A convenience macro for logging at the info log level. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// fn main() { -/// let ret = 3; -/// info!("this function is about to return: {}", ret); -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=info ./main -/// INFO:main: this function is about to return: 3 -/// ``` -#[macro_export] -macro_rules! info { - ($($arg:tt)*) => (log!(::log::INFO, $($arg)*)) -} - -/// A convenience macro for logging at the debug log level. This macro will -/// be omitted at compile time in an optimized build unless `-C debug-assertions` -/// is passed to the compiler. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// fn main() { -/// debug!("x = {x}, y = {y}", x=10, y=20); -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=debug ./main -/// DEBUG:main: x = 10, y = 20 -/// ``` -#[macro_export] -macro_rules! debug { - ($($arg:tt)*) => (if cfg!(debug_assertions) { log!(::log::DEBUG, $($arg)*) }) -} - -/// A macro to test whether a log level is enabled for the current module. -/// -/// # Examples -/// -/// ``` -/// # #![feature(rustc_private)] -/// #[macro_use] extern crate log; -/// -/// struct Point { x: i32, y: i32 } -/// fn some_expensive_computation() -> Point { Point { x: 1, y: 2 } } -/// -/// fn main() { -/// if log_enabled!(log::DEBUG) { -/// let x = some_expensive_computation(); -/// debug!("x.x = {}, x.y = {}", x.x, x.y); -/// } -/// } -/// ``` -/// -/// Assumes the binary is `main`: -/// -/// ```{.bash} -/// $ RUST_LOG=error ./main -/// ``` -/// -/// ```{.bash} -/// $ RUST_LOG=debug ./main -/// DEBUG:main: x.x = 1, x.y = 2 -/// ``` -#[macro_export] -macro_rules! log_enabled { - ($lvl:expr) => ({ - let lvl = $lvl; - (lvl != ::log::DEBUG || cfg!(debug_assertions)) && - lvl <= ::log::log_level() && - ::log::mod_enabled(lvl, module_path!()) - }) -} diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index 5d53c60ad7f..fa217acd9f9 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -12,7 +12,7 @@ crate-type = ["dylib"] arena = { path = "../libarena" } fmt_macros = { path = "../libfmt_macros" } graphviz = { path = "../libgraphviz" } -log = { path = "../liblog" } +log = "0.3" rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 583b3b848f3..d7aa36b24f9 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -948,7 +948,7 @@ pub fn map_crate<'hir>(forest: &'hir mut Forest, intravisit::walk_crate(&mut collector, &forest.krate); let map = collector.map; - if log_enabled!(::log::DEBUG) { + if log_enabled!(::log::LogLevel::Debug) { // This only makes sense for ordered stores; note the // enumerate to count the number of entries. let (entries_less_1, _) = map.iter().filter(|&x| { diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 874e032bc46..38699105290 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -202,7 +202,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } else { // for local crates, check whether type info is // available; typeck might not have completed yet - self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) + self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) && + self.maps.ty.borrow().contains_key(&impl_def_id) }; if !use_types { diff --git a/src/librustc_back/Cargo.toml b/src/librustc_back/Cargo.toml index 85e861b405a..730abc54568 100644 --- a/src/librustc_back/Cargo.toml +++ b/src/librustc_back/Cargo.toml @@ -11,7 +11,7 @@ crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } serialize = { path = "../libserialize" } -log = { path = "../liblog" } +log = "0.3" [features] jemalloc = [] diff --git a/src/librustc_borrowck/Cargo.toml b/src/librustc_borrowck/Cargo.toml index d53318f1768..af99c0e9387 100644 --- a/src/librustc_borrowck/Cargo.toml +++ b/src/librustc_borrowck/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] test = false [dependencies] -log = { path = "../liblog" } +log = "0.3" syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } graphviz = { path = "../libgraphviz" } diff --git a/src/librustc_const_eval/Cargo.toml b/src/librustc_const_eval/Cargo.toml index 780b2c16a32..907410f74dc 100644 --- a/src/librustc_const_eval/Cargo.toml +++ b/src/librustc_const_eval/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] [dependencies] arena = { path = "../libarena" } -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index e2e16059d98..343b1ed68b8 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -9,5 +9,5 @@ path = "lib.rs" crate-type = ["dylib"] [dependencies] -log = { path = "../liblog" } +log = "0.3" serialize = { path = "../libserialize" } diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index caa5c8b7e00..5b5113caa8e 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -11,7 +11,8 @@ crate-type = ["dylib"] [dependencies] arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } -log = { path = "../liblog" } +log = { version = "0.3", features = ["release_max_level_info"] } +env_logger = { version = "0.4", default-features = false } proc_macro_plugin = { path = "../libproc_macro_plugin" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 5d600270626..91f9a803f4a 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -198,13 +198,13 @@ pub fn compile_input(sess: &Session, result?; - if log_enabled!(::log::INFO) { + if log_enabled!(::log::LogLevel::Info) { println!("Pre-trans"); tcx.print_debug_stats(); } let trans = phase_4_translate_to_llvm(tcx, analysis, &incremental_hashes_map); - if log_enabled!(::log::INFO) { + if log_enabled!(::log::LogLevel::Info) { println!("Post-trans"); tcx.print_debug_stats(); } diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 62d75126557..68b9f85721a 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -35,6 +35,7 @@ extern crate arena; extern crate getopts; extern crate graphviz; +extern crate env_logger; extern crate libc; extern crate rustc; extern crate rustc_back; @@ -1127,6 +1128,7 @@ pub fn diagnostics_registry() -> errors::registry::Registry { } pub fn main() { + env_logger::init().unwrap(); let result = run(|| run_compiler(&env::args().collect::>(), &mut RustcDefaultCalls, None, diff --git a/src/librustc_incremental/Cargo.toml b/src/librustc_incremental/Cargo.toml index e3ee7527545..7bf2efa4b88 100644 --- a/src/librustc_incremental/Cargo.toml +++ b/src/librustc_incremental/Cargo.toml @@ -13,6 +13,6 @@ graphviz = { path = "../libgraphviz" } rustc = { path = "../librustc" } rustc_data_structures = { path = "../librustc_data_structures" } serialize = { path = "../libserialize" } -log = { path = "../liblog" } +log = "0.3" syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_lint/Cargo.toml b/src/librustc_lint/Cargo.toml index 4d5c0d7ba0a..c3c5461ff7c 100644 --- a/src/librustc_lint/Cargo.toml +++ b/src/librustc_lint/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] test = false [dependencies] -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index 6f7f03ca216..e8b90609273 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] [dependencies] flate = { path = "../libflate" } -log = { path = "../liblog" } +log = "0.3" proc_macro = { path = "../libproc_macro" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index fcdb968dc06..e1255110a83 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -1058,7 +1058,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { self.inject_allocator_crate(); self.inject_panic_runtime(krate); - if log_enabled!(log::INFO) { + if log_enabled!(log::LogLevel::Info) { dump_crates(&self.cstore); } diff --git a/src/librustc_mir/Cargo.toml b/src/librustc_mir/Cargo.toml index 531be0b6ae9..6e42e02d510 100644 --- a/src/librustc_mir/Cargo.toml +++ b/src/librustc_mir/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] [dependencies] graphviz = { path = "../libgraphviz" } -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } diff --git a/src/librustc_passes/Cargo.toml b/src/librustc_passes/Cargo.toml index cc710e0ac35..d2560c2f820 100644 --- a/src/librustc_passes/Cargo.toml +++ b/src/librustc_passes/Cargo.toml @@ -9,10 +9,10 @@ path = "lib.rs" crate-type = ["dylib"] [dependencies] -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } -rustc_errors = { path = "../librustc_errors" } \ No newline at end of file +rustc_errors = { path = "../librustc_errors" } diff --git a/src/librustc_resolve/Cargo.toml b/src/librustc_resolve/Cargo.toml index 5ce4c74e735..0968ea31b75 100644 --- a/src/librustc_resolve/Cargo.toml +++ b/src/librustc_resolve/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] test = false [dependencies] -log = { path = "../liblog" } +log = "0.3" syntax = { path = "../libsyntax" } rustc = { path = "../librustc" } arena = { path = "../libarena" } diff --git a/src/librustc_save_analysis/Cargo.toml b/src/librustc_save_analysis/Cargo.toml index 06c5150fd13..07a5c266fc0 100644 --- a/src/librustc_save_analysis/Cargo.toml +++ b/src/librustc_save_analysis/Cargo.toml @@ -9,7 +9,7 @@ path = "lib.rs" crate-type = ["dylib"] [dependencies] -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index b5c67ad998b..07dcb2fc29d 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -11,7 +11,7 @@ test = false [dependencies] flate = { path = "../libflate" } -log = { path = "../liblog" } +log = "0.3" rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } diff --git a/src/librustc_typeck/Cargo.toml b/src/librustc_typeck/Cargo.toml index f08d26373e5..07998aa4a30 100644 --- a/src/librustc_typeck/Cargo.toml +++ b/src/librustc_typeck/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] test = false [dependencies] -log = { path = "../liblog" } +log = "0.3" syntax = { path = "../libsyntax" } arena = { path = "../libarena" } fmt_macros = { path = "../libfmt_macros" } diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 93c0bd6d6d8..1c479ce1d01 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -11,11 +11,13 @@ crate-type = ["dylib"] [dependencies] arena = { path = "../libarena" } +env_logger = { version = "0.4", default-features = false } +log = "0.3" rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } -rustc_driver = { path = "../librustc_driver" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_driver = { path = "../librustc_driver" } rustc_errors = { path = "../librustc_errors" } rustc_lint = { path = "../librustc_lint" } rustc_metadata = { path = "../librustc_metadata" } @@ -24,7 +26,6 @@ rustc_trans = { path = "../librustc_trans" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } -log = { path = "../liblog" } [build-dependencies] build_helper = { path = "../build_helper" } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 84f69cd3504..8dd03f6edc4 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -30,6 +30,7 @@ extern crate arena; extern crate getopts; +extern crate env_logger; extern crate libc; extern crate rustc; extern crate rustc_const_eval; @@ -99,6 +100,7 @@ struct Output { pub fn main() { const STACK_SIZE: usize = 32_000_000; // 32MB + env_logger::init().unwrap(); let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || { let s = env::args().collect::>(); main_args(&s) diff --git a/src/libsyntax/Cargo.toml b/src/libsyntax/Cargo.toml index 0b38f5450b6..97d37266130 100644 --- a/src/libsyntax/Cargo.toml +++ b/src/libsyntax/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] [dependencies] serialize = { path = "../libserialize" } -log = { path = "../liblog" } +log = "0.3" rustc_bitflags = { path = "../librustc_bitflags" } syntax_pos = { path = "../libsyntax_pos" } rustc_errors = { path = "../librustc_errors" } diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml index 960db792a62..bdcec26cb83 100644 --- a/src/libsyntax_ext/Cargo.toml +++ b/src/libsyntax_ext/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["dylib"] [dependencies] fmt_macros = { path = "../libfmt_macros" } -log = { path = "../liblog" } +log = "0.3" proc_macro = { path = "../libproc_macro" } rustc_errors = { path = "../librustc_errors" } syntax = { path = "../libsyntax" } diff --git a/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs b/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs deleted file mode 100644 index db26b10fc67..00000000000 --- a/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![feature(rustc_private)] - -#[macro_use] extern crate log; - -pub fn foo() { - fn death() -> isize { panic!() } - debug!("{}", (||{ death() })()); -} diff --git a/src/test/run-pass-fulldeps/conditional-debug-macro-off.rs b/src/test/run-pass-fulldeps/conditional-debug-macro-off.rs deleted file mode 100644 index c6beb5ba358..00000000000 --- a/src/test/run-pass-fulldeps/conditional-debug-macro-off.rs +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -C debug-assertions=no -// exec-env:RUST_LOG=conditional-debug-macro-off=4 - - -#![feature(rustc_private)] - -#[macro_use] -extern crate log; - -pub fn main() { - // only panics if println! evaluates its argument. - debug!("{:?}", { if true { panic!() } }); -} diff --git a/src/test/run-pass-fulldeps/logging-enabled-debug.rs b/src/test/run-pass-fulldeps/logging-enabled-debug.rs deleted file mode 100644 index 3ae4884ce47..00000000000 --- a/src/test/run-pass-fulldeps/logging-enabled-debug.rs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags:-C debug-assertions=no -// exec-env:RUST_LOG=logging-enabled-debug=debug - - -#![feature(rustc_private)] - -#[macro_use] -extern crate log; - -pub fn main() { - if log_enabled!(log::DEBUG) { - panic!("what?! debugging?"); - } -} diff --git a/src/test/run-pass-fulldeps/logging-enabled.rs b/src/test/run-pass-fulldeps/logging-enabled.rs deleted file mode 100644 index 26261348020..00000000000 --- a/src/test/run-pass-fulldeps/logging-enabled.rs +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// exec-env:RUST_LOG=logging_enabled=info -// ignore-emscripten: FIXME(#31622) - - -#![feature(rustc_private)] - -#[macro_use] -extern crate log; - -pub fn main() { - if log_enabled!(log::DEBUG) { - panic!("what?! debugging?"); - } - if !log_enabled!(log::INFO) { - panic!("what?! no info?"); - } -} diff --git a/src/test/run-pass-fulldeps/logging-right-crate.rs b/src/test/run-pass-fulldeps/logging-right-crate.rs deleted file mode 100644 index 7caeeb40124..00000000000 --- a/src/test/run-pass-fulldeps/logging-right-crate.rs +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// aux-build:logging_right_crate.rs -// exec-env:RUST_LOG=logging-right-crate=debug - -// This is a test for issue #3046 to make sure that when we monomorphize a -// function from one crate to another the right top-level logging name is -// preserved. -// -// It used to be the case that if logging were turned on for this crate, all -// monomorphized functions from other crates had logging turned on (their -// logging module names were all incorrect). This test ensures that this no -// longer happens by enabling logging for *this* crate and then invoking a -// function in an external crate which will panic when logging is enabled. - -// pretty-expanded FIXME #23616 - -extern crate logging_right_crate; - -pub fn main() { - // this function panicks if logging is turned on - logging_right_crate::foo::(); -} diff --git a/src/test/run-pass-fulldeps/logging-separate-lines.rs b/src/test/run-pass-fulldeps/logging-separate-lines.rs deleted file mode 100644 index 183a522bba7..00000000000 --- a/src/test/run-pass-fulldeps/logging-separate-lines.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// ignore-windows -// exec-env:RUST_LOG=debug -// compile-flags:-C debug-assertions=y -// ignore-emscripten: FIXME(#31622) - -#![feature(rustc_private)] - -#[macro_use] -extern crate log; - -use std::process::Command; -use std::env; -use std::str; - -fn main() { - let args: Vec = env::args().collect(); - if args.len() > 1 && args[1] == "child" { - debug!("foo"); - debug!("bar"); - return - } - - let p = Command::new(&args[0]) - .arg("child") - .output().unwrap(); - assert!(p.status.success()); - let mut lines = str::from_utf8(&p.stderr).unwrap().lines(); - assert!(lines.next().unwrap().contains("foo")); - assert!(lines.next().unwrap().contains("bar")); -} diff --git a/src/test/run-pass-fulldeps/rust-log-filter.rs b/src/test/run-pass-fulldeps/rust-log-filter.rs deleted file mode 100644 index 306d24e3177..00000000000 --- a/src/test/run-pass-fulldeps/rust-log-filter.rs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// exec-env:RUST_LOG=rust_log_filter/foo -// ignore-emscripten no threads support - -#![allow(unknown_features)] -#![feature(box_syntax, std_misc, rustc_private)] - -#[macro_use] -extern crate log; - -use std::sync::mpsc::{channel, Sender, Receiver}; -use std::thread; - -pub struct ChannelLogger { - tx: Sender -} - -impl ChannelLogger { - pub fn new() -> (Box, Receiver) { - let (tx, rx) = channel(); - (box ChannelLogger { tx: tx }, rx) - } -} - -impl log::Logger for ChannelLogger { - fn log(&mut self, record: &log::LogRecord) { - self.tx.send(format!("{}", record.args)).unwrap(); - } -} - -pub fn main() { - let (logger, rx) = ChannelLogger::new(); - - let t = thread::spawn(move|| { - log::set_logger(logger); - - info!("foo"); - info!("bar"); - info!("foo bar"); - info!("bar foo"); - }); - - assert_eq!(rx.recv().unwrap(), "foo"); - assert_eq!(rx.recv().unwrap(), "foo bar"); - assert_eq!(rx.recv().unwrap(), "bar foo"); - assert!(rx.recv().is_err()); - - t.join(); -} diff --git a/src/test/run-pass/conditional-debug-macro-on.rs b/src/test/run-pass/conditional-debug-macro-on.rs index b335e20f91d..7da33be7a57 100644 --- a/src/test/run-pass/conditional-debug-macro-on.rs +++ b/src/test/run-pass/conditional-debug-macro-on.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// exec-env:RUST_LOG=conditional-debug-macro-on=4 - pub fn main() { // exits early if println! evaluates its arguments, otherwise it // will hit the panic. diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 1fc98a78a7c..7530b65a9b7 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -5,6 +5,6 @@ version = "0.0.0" [dependencies] log = "0.3" -env_logger = { version = "0.3.5", default-features = false } +env_logger = { version = "0.4", default-features = false } rustc-serialize = "0.3" filetime = "0.1" -- cgit 1.4.1-3-g733a5 From 29a052d2d807dcb9f1d45878a3083af7a993263d Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Sat, 25 Mar 2017 04:04:13 +0000 Subject: Fix ICE with nested macros in certain situations. --- src/libsyntax/ext/placeholders.rs | 2 +- src/test/run-pass/issue-40770.rs | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 src/test/run-pass/issue-40770.rs (limited to 'src/libsyntax') diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index f0e328a551d..2d0994a7b78 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -106,8 +106,8 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { fn fold_item(&mut self, item: P) -> SmallVector> { match item.node { - ast::ItemKind::Mac(ref mac) if !mac.node.path.segments.is_empty() => {} ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(), + ast::ItemKind::MacroDef(_) => return SmallVector::one(item), _ => {} } diff --git a/src/test/run-pass/issue-40770.rs b/src/test/run-pass/issue-40770.rs new file mode 100644 index 00000000000..599d0b273e3 --- /dev/null +++ b/src/test/run-pass/issue-40770.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { + ($e:expr) => { + macro_rules! n { () => { $e } } + } +} + +fn main() { + m!(foo!()); +} -- cgit 1.4.1-3-g733a5 From 79feb9476d9275cb6abac88affdbfd3c922a2805 Mon Sep 17 00:00:00 2001 From: Oliver Schneider Date: Thu, 9 Mar 2017 10:10:18 +0100 Subject: allow `InternedString` to be compared to `str` directly --- src/libsyntax/symbol.rs | 44 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 40 insertions(+), 4 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs index 6642c60d256..2acbeee426b 100644 --- a/src/libsyntax/symbol.rs +++ b/src/libsyntax/symbol.rs @@ -72,9 +72,9 @@ impl Decodable for Symbol { } } -impl<'a> PartialEq<&'a str> for Symbol { - fn eq(&self, other: &&str) -> bool { - *self.as_str() == **other +impl> PartialEq for Symbol { + fn eq(&self, other: &T) -> bool { + self.as_str() == other.deref() } } @@ -244,11 +244,47 @@ fn with_interner T>(f: F) -> T { /// destroyed. In particular, they must not access string contents. This can /// be fixed in the future by just leaking all strings until thread death /// somehow. -#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] +#[derive(Clone, Hash, PartialOrd, Eq, Ord)] pub struct InternedString { string: &'static str, } +impl ::std::convert::AsRef for InternedString where str: ::std::convert::AsRef { + fn as_ref(&self) -> &U { + self.string.as_ref() + } +} + +impl> ::std::cmp::PartialEq for InternedString { + fn eq(&self, other: &T) -> bool { + self.string == other.deref() + } +} + +impl ::std::cmp::PartialEq for str { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a str { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + +impl ::std::cmp::PartialEq for String { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a String { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + impl !Send for InternedString { } impl ::std::ops::Deref for InternedString { -- cgit 1.4.1-3-g733a5 From eb447f4ef436f0c6211a13de1e6150a09228a9c6 Mon Sep 17 00:00:00 2001 From: Oliver Schneider Date: Fri, 24 Mar 2017 09:31:26 +0100 Subject: Fix various useless derefs and slicings --- src/bootstrap/check.rs | 2 +- src/grammar/verify.rs | 6 +++--- src/libcollections/linked_list.rs | 2 +- src/libgraphviz/lib.rs | 6 +++--- src/librustc/ich/fingerprint.rs | 4 ++-- src/librustc/lint/context.rs | 5 ++--- src/librustc/middle/stability.rs | 2 +- src/librustc_borrowck/borrowck/fragments.rs | 24 ++++++++++++------------ src/librustc_borrowck/borrowck/mod.rs | 2 +- src/librustc_borrowck/graphviz.rs | 2 +- src/librustc_const_eval/_match.rs | 4 ++-- src/librustc_const_eval/check_match.rs | 2 +- src/librustc_data_structures/accumulate_vec.rs | 8 ++++---- src/librustc_data_structures/base_n.rs | 2 +- src/librustc_data_structures/blake2b.rs | 2 +- src/librustc_data_structures/indexed_set.rs | 8 ++++---- src/librustc_driver/lib.rs | 10 +++++----- src/librustc_driver/pretty.rs | 6 +++--- src/librustc_driver/test.rs | 2 +- src/librustc_incremental/persist/file_format.rs | 4 ++-- src/librustc_lint/bad_style.rs | 2 +- src/librustc_lint/builtin.rs | 2 +- src/librustc_lint/unused.rs | 2 +- src/librustc_llvm/build.rs | 4 ++-- src/librustc_metadata/creader.rs | 2 +- src/librustc_metadata/encoder.rs | 4 ++-- src/librustc_metadata/locator.rs | 6 +++--- src/librustc_plugin/load.rs | 6 +++--- src/librustc_save_analysis/csv_dumper.rs | 2 +- src/librustc_trans/abi.rs | 2 +- src/librustc_trans/adt.rs | 8 ++++---- src/librustc_trans/asm.rs | 4 ++-- src/librustc_trans/back/archive.rs | 4 ++-- src/librustc_trans/back/link.rs | 14 +++++++------- src/librustc_trans/back/lto.rs | 4 ++-- src/librustc_trans/back/rpath.rs | 14 +++++++------- src/librustc_trans/back/symbol_export.rs | 4 ++-- src/librustc_trans/back/symbol_names.rs | 2 +- src/librustc_trans/back/write.rs | 12 ++++++------ src/librustc_trans/base.rs | 12 ++++++------ src/librustc_trans/builder.rs | 6 +++--- src/librustdoc/html/render.rs | 4 ++-- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/feature_gate.rs | 9 ++++----- src/libsyntax/parse/parser.rs | 6 +++--- src/libsyntax/test.rs | 2 +- 46 files changed, 120 insertions(+), 122 deletions(-) (limited to 'src/libsyntax') diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 40cdb9242df..f8f641060c4 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -586,7 +586,7 @@ fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) { .arg(ADB_TEST_DIR)); let target_dir = format!("{}/{}", ADB_TEST_DIR, target); - build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]])); + build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir])); for f in t!(build.sysroot_libdir(compiler, target).read_dir()) { let f = t!(f); diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 919fc98e438..bd28a63c5f4 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -196,7 +196,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap, surrogate_ let toknum = &s[content_end + 3 .. toknum_end]; let not_found = format!("didn't find token {:?} in the map", toknum); - let proto_tok = tokens.get(toknum).expect(¬_found[..]); + let proto_tok = tokens.get(toknum).expect(¬_found); let nm = Symbol::intern(content); @@ -304,14 +304,14 @@ fn main() { let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap(); let mut token_list = String::new(); token_file.read_to_string(&mut token_list).unwrap(); - let token_map = parse_token_list(&token_list[..]); + let token_map = parse_token_list(&token_list); let stdin = std::io::stdin(); let lock = stdin.lock(); let lines = lock.lines(); let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(), &token_map, - &surrogate_pairs_pos[..], + &surrogate_pairs_pos, has_bom)); for antlr_tok in antlr_tokens { diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index f58c87b801f..8f0488f6936 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -1376,7 +1376,7 @@ mod tests { thread::spawn(move || { check_links(&n); let a: &[_] = &[&1, &2, &3]; - assert_eq!(a, &n.iter().collect::>()[..]); + assert_eq!(a, &*n.iter().collect::>()); }) .join() .ok() diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 8e587ad211d..1b2c7775185 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -554,7 +554,7 @@ impl<'a> LabelText<'a> { pub fn to_dot_string(&self) -> String { match self { &LabelStr(ref s) => format!("\"{}\"", s.escape_default()), - &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])), + &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)), &HtmlStr(ref s) => format!("<{}>", s), } } @@ -587,7 +587,7 @@ impl<'a> LabelText<'a> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(&suffix[..]); + prefix.push_str(&suffix); EscStr(prefix.into_cow()) } } @@ -878,7 +878,7 @@ mod tests { type Node = Node; type Edge = &'a Edge; fn graph_id(&'a self) -> Id<'a> { - Id::new(&self.name[..]).unwrap() + Id::new(self.name).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/librustc/ich/fingerprint.rs b/src/librustc/ich/fingerprint.rs index d296d8293fb..e760f7efc93 100644 --- a/src/librustc/ich/fingerprint.rs +++ b/src/librustc/ich/fingerprint.rs @@ -55,7 +55,7 @@ impl Fingerprint { impl Encodable for Fingerprint { #[inline] fn encode(&self, s: &mut S) -> Result<(), S::Error> { - for &byte in &self.0[..] { + for &byte in &self.0 { s.emit_u8(byte)?; } Ok(()) @@ -66,7 +66,7 @@ impl Decodable for Fingerprint { #[inline] fn decode(d: &mut D) -> Result { let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]); - for byte in &mut result.0[..] { + for byte in &mut result.0 { *byte = d.read_u8()?; } Ok(result) diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index d35f965e2ff..20bf241a999 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -40,7 +40,6 @@ use std::cmp; use std::default::Default as StdDefault; use std::mem; use std::fmt; -use std::ops::Deref; use syntax::attr; use syntax::ast; use syntax::symbol::Symbol; @@ -485,7 +484,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session, Allow => bug!("earlier conditional return should handle Allow case") }; let hyphen_case_lint_name = name.replace("_", "-"); - if lint_flag_val.as_str().deref() == name { + if lint_flag_val.as_str() == name { err.note(&format!("requested on the command line with `{} {}`", flag, hyphen_case_lint_name)); } else { @@ -496,7 +495,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session, }, Node(lint_attr_name, src) => { def = Some(src); - if lint_attr_name.as_str().deref() != name { + if lint_attr_name.as_str() != name { let level_str = level.as_str(); err.note(&format!("#[{}({})] implied by #[{}({})]", level_str, name, level_str, lint_attr_name)); diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 4115b4669f4..4354ed6817a 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -536,7 +536,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if !self.stability.borrow().active_features.contains(feature) { let msg = match *reason { Some(ref r) => format!("use of unstable library feature '{}': {}", - &feature.as_str(), &r), + feature.as_str(), &r), None => format!("use of unstable library feature '{}'", &feature) }; emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span, diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index c0f681680a9..b728d4d5345 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -267,11 +267,11 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {:?}", path_lps(&moved[..])); + debug!("fragments 1 moved: {:?}", path_lps(&moved)); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..])); + debug!("fragments 1 assigned: {:?}", path_lps(&assigned)); // Second, build parents from the moved and assigned. for m in &moved { @@ -291,14 +291,14 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {:?}", path_lps(&parents[..])); + debug!("fragments 2 parents: {:?}", path_lps(&parents)); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, &parents[..])); - debug!("fragments 3 moved: {:?}", path_lps(&moved[..])); + moved.retain(|f| non_member(*f, &parents)); + debug!("fragments 3 moved: {:?}", path_lps(&moved)); - assigned.retain(|f| non_member(*f, &parents[..])); - debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..])); + assigned.retain(|f| non_member(*f, &parents)); + debug!("fragments 3 assigned: {:?}", path_lps(&assigned)); // Fourth, build the leftover from the moved, assigned, and parents. for m in &moved { @@ -316,16 +316,16 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..])); + debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved)); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, &parents[..]) && - non_member(mpi, &moved[..]) && - non_member(mpi, &assigned[..]) + Just(mpi) => non_member(mpi, &parents) && + non_member(mpi, &moved) && + non_member(mpi, &assigned) }); - debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..])); + debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved)); // Swap contents back in. fragments.unmoved_fragments = unmoved; diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 20d495976b0..59c3e68aada 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -112,7 +112,7 @@ fn borrowck_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_id: hir::BodyId) { &flowed_moves.move_data, owner_id); - check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body); + check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body); } fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>, diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 0da9525efd8..e3a2bfa3927 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -88,7 +88,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp); - set.push_str(&loan_str[..]); + set.push_str(&loan_str); saw_some = true; true }); diff --git a/src/librustc_const_eval/_match.rs b/src/librustc_const_eval/_match.rs index 53a7e872928..c1dc5f5f7a2 100644 --- a/src/librustc_const_eval/_match.rs +++ b/src/librustc_const_eval/_match.rs @@ -680,10 +680,10 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>( }).collect(); let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect(); let matrix = Matrix(m.iter().flat_map(|r| { - specialize(cx, &r[..], &ctor, &wild_patterns) + specialize(cx, &r, &ctor, &wild_patterns) }).collect()); match specialize(cx, v, &ctor, &wild_patterns) { - Some(v) => match is_useful(cx, &matrix, &v[..], witness) { + Some(v) => match is_useful(cx, &matrix, &v, witness) { UsefulWithWitness(witnesses) => UsefulWithWitness( witnesses.into_iter() .map(|witness| witness.apply_constructor(cx, &ctor, lty)) diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index e2b9f174ff0..9d55281d019 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -311,7 +311,7 @@ fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>, for &(pat, hir_pat) in pats { let v = vec![pat]; - match is_useful(cx, &seen, &v[..], LeaveOutWitness) { + match is_useful(cx, &seen, &v, LeaveOutWitness) { NotUseful => { match source { hir::MatchSource::IfLetDesugar { .. } => { diff --git a/src/librustc_data_structures/accumulate_vec.rs b/src/librustc_data_structures/accumulate_vec.rs index d4bd9e707fd..c03c2890ba3 100644 --- a/src/librustc_data_structures/accumulate_vec.rs +++ b/src/librustc_data_structures/accumulate_vec.rs @@ -91,8 +91,8 @@ impl Deref for AccumulateVec { type Target = [A::Element]; fn deref(&self) -> &Self::Target { match *self { - AccumulateVec::Array(ref v) => &v[..], - AccumulateVec::Heap(ref v) => &v[..], + AccumulateVec::Array(ref v) => v, + AccumulateVec::Heap(ref v) => v, } } } @@ -100,8 +100,8 @@ impl Deref for AccumulateVec { impl DerefMut for AccumulateVec { fn deref_mut(&mut self) -> &mut [A::Element] { match *self { - AccumulateVec::Array(ref mut v) => &mut v[..], - AccumulateVec::Heap(ref mut v) => &mut v[..], + AccumulateVec::Array(ref mut v) => v, + AccumulateVec::Heap(ref mut v) => v, } } } diff --git a/src/librustc_data_structures/base_n.rs b/src/librustc_data_structures/base_n.rs index 4359581a897..cf54229fa7f 100644 --- a/src/librustc_data_structures/base_n.rs +++ b/src/librustc_data_structures/base_n.rs @@ -48,7 +48,7 @@ pub fn encode(n: u64, base: u64) -> String { #[test] fn test_encode() { fn test(n: u64, base: u64) { - assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32)); + assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32)); } for base in 2..37 { diff --git a/src/librustc_data_structures/blake2b.rs b/src/librustc_data_structures/blake2b.rs index 31492e26219..9d97a83f693 100644 --- a/src/librustc_data_structures/blake2b.rs +++ b/src/librustc_data_structures/blake2b.rs @@ -35,7 +35,7 @@ pub struct Blake2bCtx { impl ::std::fmt::Debug for Blake2bCtx { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { try!(write!(fmt, "hash: ")); - for v in &self.h[..] { + for v in &self.h { try!(write!(fmt, "{:x}", v)); } Ok(()) diff --git a/src/librustc_data_structures/indexed_set.rs b/src/librustc_data_structures/indexed_set.rs index 2e9e054e97e..572ce98d3ae 100644 --- a/src/librustc_data_structures/indexed_set.rs +++ b/src/librustc_data_structures/indexed_set.rs @@ -91,13 +91,13 @@ impl IdxSet { impl Deref for IdxSetBuf { type Target = IdxSet; fn deref(&self) -> &IdxSet { - unsafe { IdxSet::from_slice(&self.bits[..]) } + unsafe { IdxSet::from_slice(&self.bits) } } } impl DerefMut for IdxSetBuf { fn deref_mut(&mut self) -> &mut IdxSet { - unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) } + unsafe { IdxSet::from_slice_mut(&mut self.bits) } } } @@ -135,11 +135,11 @@ impl IdxSet { } pub fn words(&self) -> &[Word] { - &self.bits[..] + &self.bits } pub fn words_mut(&mut self) -> &mut [Word] { - &mut self.bits[..] + &mut self.bits } pub fn clone_from(&mut self, other: &IdxSet) { diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 68b9f85721a..e11118901d2 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -233,7 +233,7 @@ fn make_output(matches: &getopts::Matches) -> (Option, Option) // Extract input (string or file and optional path) from matches. fn make_input(free_matches: &[String]) -> Option<(Input, Option)> { if free_matches.len() == 1 { - let ifile = &free_matches[0][..]; + let ifile = &free_matches[0]; if ifile == "-" { let mut src = String::new(); io::stdin().read_to_string(&mut src).unwrap(); @@ -800,7 +800,7 @@ Available lint options: for lint in lints { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(&name[..]), + padded(&name), lint.default_level.as_str(), lint.desc); } @@ -838,7 +838,7 @@ Available lint options: .map(|x| x.to_string().replace("_", "-")) .collect::>() .join(", "); - println!(" {} {}", padded(&name[..]), desc); + println!(" {} {}", padded(&name), desc); } println!("\n"); }; @@ -945,7 +945,7 @@ pub fn handle_options(args: &[String]) -> Option { .into_iter() .map(|x| x.opt_group) .collect(); - let matches = match getopts::getopts(&args[..], &all_groups) { + let matches = match getopts::getopts(&args, &all_groups) { Ok(m) => m, Err(f) => early_error(ErrorOutputType::default(), &f.to_string()), }; @@ -1084,7 +1084,7 @@ pub fn monitor(f: F) { format!("we would appreciate a bug report: {}", BUG_REPORT_URL)]; for note in &xs { handler.emit(&MultiSpan::new(), - ¬e[..], + ¬e, errors::Level::Note); } if match env::var_os("RUST_BACKTRACE") { diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 6cd97e95598..18dc504ca8a 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -589,7 +589,7 @@ impl UserIdentifiedItem { -> NodesMatchingUII<'a, 'hir> { match *self { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), - ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])), + ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)), } } @@ -600,7 +600,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(&message[..]) + sess.fatal(&message) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -771,7 +771,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, fn expand_err_details(r: io::Result<()>) -> io::Result<()> { r.map_err(|ioerr| { io::Error::new(io::ErrorKind::Other, - &format!("graphviz::render failed: {}", ioerr)[..]) + format!("graphviz::render failed: {}", ioerr)) }) } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 9568cc3d6de..af2416f787e 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -289,7 +289,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { pub fn t_param(&self, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - self.infcx.tcx.mk_param(index, Symbol::intern(&name[..])) + self.infcx.tcx.mk_param(index, Symbol::intern(&name)) } pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index b67caa6750a..5c20f65274f 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -99,9 +99,9 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result>> { let rustc_version_str_len = rustc_version_str_len[0] as usize; let mut buffer = Vec::with_capacity(rustc_version_str_len); buffer.resize(rustc_version_str_len, 0); - file.read_exact(&mut buffer[..])?; + file.read_exact(&mut buffer)?; - if &buffer[..] != rustc_version().as_bytes() { + if buffer != rustc_version().as_bytes() { report_format_mismatch(sess, path, "Different compiler version"); return Ok(None); } diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index 353b86820c4..c4220e9a0d3 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -88,7 +88,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, name, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m); } } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index f0276f90f27..0ee9d4a42c7 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -334,7 +334,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::list_contains_name(&l[..], "hidden"), + Some(l) => attr::list_contains_name(&l, "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index abba8afd9da..86bf209ccf8 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -146,7 +146,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedResults { ty::TyBool => return, ty::TyAdt(def, _) => { let attrs = cx.tcx.get_attrs(def.did); - check_must_use(cx, &attrs[..], s.span) + check_must_use(cx, &attrs, s.span) } _ => false, }; diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index 42717ec289c..2b945e0a3af 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -140,7 +140,7 @@ fn main() { cfg.flag(flag); } - for component in &components[..] { + for component in &components { let mut flag = String::from("-DLLVM_COMPONENT_"); flag.push_str(&component.to_uppercase()); cfg.flag(&flag); @@ -173,7 +173,7 @@ fn main() { if !is_crossed { cmd.arg("--system-libs"); } - cmd.args(&components[..]); + cmd.args(&components); for lib in output(&mut cmd).split_whitespace() { let name = if lib.starts_with("-l") { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index e1255110a83..04a8b88f8a5 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -669,7 +669,7 @@ impl<'a> CrateLoader<'a> { name, config::host_triple(), self.sess.opts.target_triple); - span_fatal!(self.sess, span, E0456, "{}", &message[..]); + span_fatal!(self.sess, span, E0456, "{}", &message); } let root = ekrate.metadata.get_root(); diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index a324c166e73..1370d69f904 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -918,14 +918,14 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { self.encode_fields(def_id); } hir::ItemImpl(..) => { - for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] { + for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { self.record(trait_item_def_id, EncodeContext::encode_info_for_impl_item, trait_item_def_id); } } hir::ItemTrait(..) => { - for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] { + for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() { self.record(item_def_id, EncodeContext::encode_info_for_trait_item, item_def_id); diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index a6771083fc3..e8bc8b01652 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -477,15 +477,15 @@ impl<'a> Context<'a> { Some(file) => file, }; let (hash, found_kind) = - if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") { + if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") { (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib) - } else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") { + } else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") { (&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta) } else if file.starts_with(&dylib_prefix) && file.ends_with(&dypair.1) { (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib) } else { - if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) { + if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) { staticlibs.push(CrateMismatch { path: path.to_path_buf(), got: "static".to_string(), diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 1bfc445fca9..efe9963cecc 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -126,19 +126,19 @@ impl<'a> PluginLoader<'a> { // inside this crate, so continue would spew "macro undefined" // errors Err(err) => { - self.sess.span_fatal(span, &err[..]) + self.sess.span_fatal(span, &err) } }; unsafe { let registrar = - match lib.symbol(&symbol[..]) { + match lib.symbol(&symbol) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros Err(err) => { - self.sess.span_fatal(span, &err[..]) + self.sess.span_fatal(span, &err) } }; diff --git a/src/librustc_save_analysis/csv_dumper.rs b/src/librustc_save_analysis/csv_dumper.rs index 59340ae87ee..4bab135ff12 100644 --- a/src/librustc_save_analysis/csv_dumper.rs +++ b/src/librustc_save_analysis/csv_dumper.rs @@ -423,7 +423,7 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String { let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v)))); strs.fold(String::new(), |mut s, ss| { - s.push_str(&ss[..]); + s.push_str(&ss); s }) } diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 27a19d211c2..1530708b4b8 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -369,7 +369,7 @@ impl FnType { match sig.inputs().last().unwrap().sty { ty::TyTuple(ref tupled_arguments, _) => { inputs = &sig.inputs()[0..sig.inputs().len() - 1]; - &tupled_arguments[..] + &tupled_arguments } _ => { bug!("argument to function with \"rust-call\" ABI \ diff --git a/src/librustc_trans/adt.rs b/src/librustc_trans/adt.rs index 058f37f62dd..5c1ced57340 100644 --- a/src/librustc_trans/adt.rs +++ b/src/librustc_trans/adt.rs @@ -229,11 +229,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, variant_fill].iter().cloned().collect(); match name { None => { - Type::struct_(cx, &fields[..], false) + Type::struct_(cx, &fields, false) } Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(&fields[..], false); + llty.set_struct_body(&fields, false); llty } } @@ -330,7 +330,7 @@ fn struct_wrapped_nullable_bitdiscr( alignment: Alignment, ) -> ValueRef { let llptrptr = bcx.gepi(scrutinee, - &discrfield.iter().map(|f| *f as usize).collect::>()[..]); + &discrfield.iter().map(|f| *f as usize).collect::>()); let llptr = bcx.load(llptrptr, alignment.to_align()); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; bcx.icmp(cmp, llptr, C_null(val_ty(llptr))) @@ -402,7 +402,7 @@ pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: Valu base::call_memset(bcx, llptr, fill_byte, size, align, false); } else { let path = discrfield.iter().map(|&i| i as usize).collect::>(); - let llptrptr = bcx.gepi(val, &path[..]); + let llptrptr = bcx.gepi(val, &path); let llptrty = val_ty(llptrptr).element_type(); bcx.store(C_null(llptrty), llptrptr, None); } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index 12e4e57964f..b6195765b27 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -77,14 +77,14 @@ pub fn trans_inline_asm<'a, 'tcx>( .chain(arch_clobbers.iter().map(|s| s.to_string())) .collect::>().join(","); - debug!("Asm Constraints: {}", &all_constraints[..]); + debug!("Asm Constraints: {}", &all_constraints); // Depending on how many outputs we have, the return type is different let num_outputs = output_types.len(); let output_type = match num_outputs { 0 => Type::void(bcx.ccx), 1 => output_types[0], - _ => Type::struct_(bcx.ccx, &output_types[..], false) + _ => Type::struct_(bcx.ccx, &output_types, false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/back/archive.rs b/src/librustc_trans/back/archive.rs index 11ab6dcaa87..0f908b7d069 100644 --- a/src/librustc_trans/back/archive.rs +++ b/src/librustc_trans/back/archive.rs @@ -65,10 +65,10 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session) for path in search_paths { debug!("looking for {} inside {:?}", name, path); - let test = path.join(&oslibname[..]); + let test = path.join(&oslibname); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(&unixlibname[..]); + let test = path.join(&unixlibname); if test.exists() { return test } } } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index cf1e10b317b..6d17b2f0eed 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -91,7 +91,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |s: String, span: Option| { - cstore::validate_crate_name(sess, &s[..], span); + cstore::validate_crate_name(sess, &s, span); s }; @@ -109,7 +109,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, &msg[..]); + sess.span_err(attr.span, &msg); } } return validate(s.clone(), None); @@ -417,7 +417,7 @@ fn object_filenames(trans: &CrateTranslation, outputs: &OutputFilenames) -> Vec { trans.modules.iter().map(|module| { - outputs.temp_path(OutputType::Object, Some(&module.name[..])) + outputs.temp_path(OutputType::Object, Some(&module.name)) }).collect() } @@ -551,7 +551,7 @@ fn link_rlib<'a>(sess: &'a Session, e)) } - let bc_data_deflated = flate::deflate_bytes(&bc_data[..]); + let bc_data_deflated = flate::deflate_bytes(&bc_data); let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, @@ -819,12 +819,12 @@ fn link_natively(sess: &Session, pname, prog.status)) .note(&format!("{:?}", &cmd)) - .note(&escape_string(&output[..])) + .note(&escape_string(&output)) .emit(); sess.abort_if_errors(); } - info!("linker stderr:\n{}", escape_string(&prog.stderr[..])); - info!("linker stdout:\n{}", escape_string(&prog.stdout[..])); + info!("linker stderr:\n{}", escape_string(&prog.stderr)); + info!("linker stdout:\n{}", escape_string(&prog.stdout)); }, Err(e) => { sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e)) diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index 0ef3f351a2a..e23ddd2542a 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -61,7 +61,7 @@ pub fn run(sess: &session::Session, } let export_threshold = - symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]); + symbol_export::crates_export_threshold(&sess.crate_types.borrow()); let symbol_filter = &|&(ref name, level): &(String, _)| { if symbol_export::is_below_threshold(level, export_threshold) { @@ -147,7 +147,7 @@ pub fn run(sess: &session::Session, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic(), format!("failed to load bc of `{}`", - &name[..])); + name)); } }); } diff --git a/src/librustc_trans/back/rpath.rs b/src/librustc_trans/back/rpath.rs index 9c982be3fa0..104e7bc6a52 100644 --- a/src/librustc_trans/back/rpath.rs +++ b/src/librustc_trans/back/rpath.rs @@ -37,8 +37,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec { let libs = config.used_crates.clone(); let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::>(); - let rpaths = get_rpaths(config, &libs[..]); - flags.extend_from_slice(&rpaths_to_flags(&rpaths[..])); + let rpaths = get_rpaths(config, &libs); + flags.extend_from_slice(&rpaths_to_flags(&rpaths)); // Use DT_RUNPATH instead of DT_RPATH if available if config.linker_is_gnu { @@ -84,14 +84,14 @@ fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec { } } - log_rpaths("relative", &rel_rpaths[..]); - log_rpaths("fallback", &fallback_rpaths[..]); + log_rpaths("relative", &rel_rpaths); + log_rpaths("fallback", &fallback_rpaths); let mut rpaths = rel_rpaths; - rpaths.extend_from_slice(&fallback_rpaths[..]); + rpaths.extend_from_slice(&fallback_rpaths); // Remove duplicates - let rpaths = minimize_rpaths(&rpaths[..]); + let rpaths = minimize_rpaths(&rpaths); return rpaths; } @@ -177,7 +177,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths { - if set.insert(&rpath[..]) { + if set.insert(rpath) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_trans/back/symbol_export.rs b/src/librustc_trans/back/symbol_export.rs index 005fb3533ab..23a67ef5046 100644 --- a/src/librustc_trans/back/symbol_export.rs +++ b/src/librustc_trans/back/symbol_export.rs @@ -154,7 +154,7 @@ impl ExportedSymbols { cnum: CrateNum) -> &[(String, SymbolExportLevel)] { match self.exports.get(&cnum) { - Some(exports) => &exports[..], + Some(exports) => exports, None => &[] } } @@ -167,7 +167,7 @@ impl ExportedSymbols { { for &(ref name, export_level) in self.exported_symbols(cnum) { if is_below_threshold(export_level, export_threshold) { - f(&name[..], export_level) + f(&name, export_level) } } } diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 518995dfedc..3ad04e10cb0 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -341,7 +341,7 @@ pub fn sanitize(s: &str) -> String { if !result.is_empty() && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", &result[..]); + return format!("_{}", result); } return result; diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 377ff34cb7e..5a017e4fb8a 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -105,7 +105,7 @@ impl SharedEmitter { Some(ref code) => { handler.emit_with_code(&MultiSpan::new(), &diag.msg, - &code[..], + &code, diag.lvl); }, None => { @@ -189,8 +189,8 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => &s[..], - None => &sess.target.target.options.code_model[..], + Some(ref s) => &s, + None => &sess.target.target.options.code_model, }; let code_model = match CODE_GEN_MODEL_ARGS.iter().find( @@ -397,7 +397,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s)) .expect("non-UTF8 SMDiagnostic"); - report_inline_asm(cgcx, &msg[..], cookie); + report_inline_asm(cgcx, &msg, cookie); } unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) { @@ -823,7 +823,7 @@ pub fn run_passes(sess: &Session, if trans.modules.len() == 1 { // 1) Only one codegen unit. In this case it's no difficulty // to copy `foo.0.x` to `foo.x`. - let module_name = Some(&(trans.modules[0].name)[..]); + let module_name = Some(&trans.modules[0].name[..]); let path = crate_output.temp_path(output_type, module_name); copy_gracefully(&path, &crate_output.path(output_type)); @@ -939,7 +939,7 @@ pub fn run_passes(sess: &Session, if metadata_config.emit_bc && !user_wants_bitcode { let path = crate_output.temp_path(OutputType::Bitcode, - Some(&trans.metadata_module.name[..])); + Some(&trans.metadata_module.name)); remove(sess, &path); } } diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index f7ca468fdda..ec45c559363 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -514,7 +514,7 @@ pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>, n_bytes: ValueRef, align: u32) { let ccx = b.ccx; - let ptr_width = &ccx.sess().target.target.target_pointer_width[..]; + let ptr_width = &ccx.sess().target.target.target_pointer_width; let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width); let memcpy = ccx.get_intrinsic(&key); let src_ptr = b.pointercast(src, Type::i8p(ccx)); @@ -550,7 +550,7 @@ pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>, size: ValueRef, align: ValueRef, volatile: bool) -> ValueRef { - let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..]; + let ptr_width = &b.ccx.sess().target.target.target_pointer_width; let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width); let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key); let volatile = C_bool(b.ccx, volatile); @@ -765,7 +765,7 @@ fn write_metadata(cx: &SharedCrateContext, let mut compressed = cstore.metadata_encoding_version().to_vec(); compressed.extend_from_slice(&flate::deflate_bytes(&metadata)); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = cx.metadata_symbol_name(); let buf = CString::new(name).unwrap(); @@ -796,7 +796,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session, symbol_map: &SymbolMap<'tcx>, exported_symbols: &ExportedSymbols) { let export_threshold = - symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]); + symbol_export::crates_export_threshold(&sess.crate_types.borrow()); let exported_symbols = exported_symbols .exported_symbols(LOCAL_CRATE) @@ -1035,7 +1035,7 @@ pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet { (generics.parent_types == 0 && generics.types.is_empty()) && // Functions marked with #[inline] are only ever translated // with "internal" linkage and are never exported. - !attr::requests_inline(&attributes[..]) + !attr::requests_inline(&attributes) } _ => false @@ -1574,7 +1574,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a cgus.dedup(); for &(ref cgu_name, linkage) in cgus.iter() { output.push_str(" "); - output.push_str(&cgu_name[..]); + output.push_str(&cgu_name); let linkage_abbrev = match linkage { llvm::Linkage::ExternalLinkage => "External", diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index a62f07042a7..8b1010d89fd 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -627,7 +627,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, &v[..]) + self.inbounds_gep(base, &v) } } @@ -835,8 +835,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", &s[..]); - self.add_comment(&s[..]); + debug!("{}", s); + self.add_comment(&s); } } diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 5c94032c6b9..612e765a499 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -2611,7 +2611,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option { if attr.is_word() { Some(format!("{}", name)) } else if let Some(v) = attr.value_str() { - Some(format!("{} = {:?}", name, &v.as_str()[..])) + Some(format!("{} = {:?}", name, v.as_str())) } else if let Some(values) = attr.meta_item_list() { let display: Vec<_> = values.iter().filter_map(|attr| { attr.meta_item().and_then(|mi| render_attribute(mi)) @@ -2642,7 +2642,7 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { for attr in &it.attrs.other_attrs { let name = attr.name().unwrap(); - if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) { + if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) { continue; } if let Some(s) = render_attribute(&attr.meta().unwrap()) { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 021c5398a42..66f5520b882 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -119,7 +119,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, }; let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false); p.root_module_name = cx.current_expansion.module.mod_path.last() - .map(|id| (*id.name.as_str()).to_owned()); + .map(|id| id.name.as_str().to_string()); p.check_unknown_macro_variable(); // Let the context choose how to interpret the result. diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 7af432176cf..9d280a413e6 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -818,7 +818,7 @@ pub struct GatedCfg { impl GatedCfg { pub fn gate(cfg: &ast::MetaItem) -> Option { - let name = &*cfg.name().as_str(); + let name = cfg.name().as_str(); GATED_CFGS.iter() .position(|info| info.0 == name) .map(|idx| { @@ -865,8 +865,7 @@ macro_rules! gate_feature { impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); - let name = unwrap_or!(attr.name(), return); - + let name = unwrap_or!(attr.name(), return).as_str(); for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { if name == n { if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { @@ -885,12 +884,12 @@ impl<'a> Context<'a> { return; } } - if name.as_str().starts_with("rustc_") { + if name.starts_with("rustc_") { gate_feature!(self, rustc_attrs, attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); - } else if name.as_str().starts_with("derive_") { + } else if name.starts_with("derive_") { gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); } else if !attr::is_known(attr) { // Only run the custom attribute lint during regular diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 649e9059934..43a9d8c5f78 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5151,15 +5151,15 @@ impl<'a> Parser<'a> { fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") { - self.directory.path.push(&*path.as_str()); + self.directory.path.push(&path.as_str()); self.directory.ownership = DirectoryOwnership::Owned; } else { - self.directory.path.push(&*id.name.as_str()); + self.directory.path.push(&id.name.as_str()); } } pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option { - attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str())) + attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str())) } /// Returns either a path to a module, or . diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index e052d2cda3a..6fb6db9ca02 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -616,7 +616,7 @@ fn mk_tests(cx: &TestCtxt) -> P { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs) { - Some(s) if "test" == &*s.as_str() => true, + Some(s) if "test" == s.as_str() => true, _ => false } } -- cgit 1.4.1-3-g733a5 From 1979f96549fc41b544d2bf05eb868f26941f2b25 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Thu, 16 Mar 2017 10:23:33 +0000 Subject: Move `syntax::ext::hygiene` to `syntax_pos::hygiene`. --- src/librustc/hir/map/def_collector.rs | 2 +- src/librustc_resolve/build_reduced_graph.rs | 2 +- src/libsyntax/ast.rs | 10 ++- src/libsyntax/ext/expand.rs | 6 +- src/libsyntax/ext/hygiene.rs | 127 ---------------------------- src/libsyntax/ext/placeholders.rs | 4 +- src/libsyntax/lib.rs | 2 +- src/libsyntax_pos/hygiene.rs | 122 ++++++++++++++++++++++++++ src/libsyntax_pos/lib.rs | 3 + 9 files changed, 142 insertions(+), 136 deletions(-) delete mode 100644 src/libsyntax/ext/hygiene.rs create mode 100644 src/libsyntax_pos/hygiene.rs (limited to 'src/libsyntax') diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index cae358a303e..afdb9059ea7 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -92,7 +92,7 @@ impl<'a> DefCollector<'a> { fn visit_macro_invoc(&mut self, id: NodeId, const_expr: bool) { if let Some(ref mut visit) = self.visit_macro_invoc { visit(MacroInvocationData { - mark: Mark::from_placeholder_id(id), + mark: id.placeholder_to_mark(), const_expr: const_expr, def_index: self.parent_def.unwrap(), }) diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 86e0d0039d1..a15431afc16 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -680,7 +680,7 @@ pub struct BuildReducedGraphVisitor<'a, 'b: 'a> { impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { fn visit_invoc(&mut self, id: ast::NodeId) -> &'b InvocationData<'b> { - let mark = Mark::from_placeholder_id(id); + let mark = id.placeholder_to_mark(); self.resolver.current_module.unresolved_invocations.borrow_mut().insert(mark); let invocation = self.resolver.invocations[&mark]; invocation.module.set(self.resolver.current_module); diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3dd4bdbd14d..7e2b225193f 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -20,7 +20,7 @@ pub use util::ThinVec; use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; use codemap::{respan, Spanned}; use abi::Abi; -use ext::hygiene::SyntaxContext; +use ext::hygiene::{Mark, SyntaxContext}; use print::pprust; use ptr::P; use rustc_data_structures::indexed_vec; @@ -256,6 +256,14 @@ impl NodeId { pub fn as_u32(&self) -> u32 { self.0 } + + pub fn placeholder_from_mark(mark: Mark) -> Self { + NodeId(mark.as_u32()) + } + + pub fn placeholder_to_mark(self) -> Mark { + Mark::from_u32(self.0) + } } impl fmt::Display for NodeId { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6abeb4b0b28..e258c51a329 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Block, Ident, PatKind, Path}; +use ast::{self, Block, Ident, NodeId, PatKind, Path}; use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; @@ -321,7 +321,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { while let Some(expansions) = expansions.pop() { for (mark, expansion) in expansions.into_iter().rev() { let derives = derives.remove(&mark).unwrap_or_else(Vec::new); - placeholder_expander.add(mark.as_placeholder_id(), expansion, derives); + placeholder_expander.add(NodeId::placeholder_from_mark(mark), expansion, derives); } } @@ -703,7 +703,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { ..self.cx.current_expansion.clone() }, }); - placeholder(expansion_kind, mark.as_placeholder_id()) + placeholder(expansion_kind, NodeId::placeholder_from_mark(mark)) } fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: ExpansionKind) -> Expansion { diff --git a/src/libsyntax/ext/hygiene.rs b/src/libsyntax/ext/hygiene.rs deleted file mode 100644 index 57f5ab73d37..00000000000 --- a/src/libsyntax/ext/hygiene.rs +++ /dev/null @@ -1,127 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Machinery for hygienic macros, inspired by the MTWT[1] paper. -//! -//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. -//! 2012. *Macros that work together: Compile-time bindings, partial expansion, -//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. -//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 - -use ast::NodeId; -use std::cell::RefCell; -use std::collections::HashMap; -use std::fmt; - -/// A SyntaxContext represents a chain of macro expansions (represented by marks). -#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)] -pub struct SyntaxContext(u32); - -#[derive(Copy, Clone)] -pub struct SyntaxContextData { - pub outer_mark: Mark, - pub prev_ctxt: SyntaxContext, -} - -/// A mark is a unique id associated with a macro expansion. -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default, RustcEncodable, RustcDecodable)] -pub struct Mark(u32); - -impl Mark { - pub fn fresh() -> Self { - HygieneData::with(|data| { - let next_mark = Mark(data.next_mark.0 + 1); - ::std::mem::replace(&mut data.next_mark, next_mark) - }) - } - - /// The mark of the theoretical expansion that generates freshly parsed, unexpanded AST. - pub fn root() -> Self { - Mark(0) - } - - pub fn from_placeholder_id(id: NodeId) -> Self { - Mark(id.as_u32()) - } - - pub fn as_placeholder_id(self) -> NodeId { - NodeId::from_u32(self.0) - } - - pub fn as_u32(self) -> u32 { - self.0 - } -} - -struct HygieneData { - syntax_contexts: Vec, - markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, - next_mark: Mark, -} - -impl HygieneData { - fn new() -> Self { - HygieneData { - syntax_contexts: vec![SyntaxContextData { - outer_mark: Mark::root(), - prev_ctxt: SyntaxContext::empty(), - }], - markings: HashMap::new(), - next_mark: Mark(1), - } - } - - fn with T>(f: F) -> T { - thread_local! { - static HYGIENE_DATA: RefCell = RefCell::new(HygieneData::new()); - } - HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut())) - } -} - -pub fn reset_hygiene_data() { - HygieneData::with(|data| *data = HygieneData::new()) -} - -impl SyntaxContext { - pub const fn empty() -> Self { - SyntaxContext(0) - } - - pub fn data(self) -> SyntaxContextData { - HygieneData::with(|data| data.syntax_contexts[self.0 as usize]) - } - - /// Extend a syntax context with a given mark - pub fn apply_mark(self, mark: Mark) -> SyntaxContext { - // Applying the same mark twice is a no-op - let ctxt_data = self.data(); - if mark == ctxt_data.outer_mark { - return ctxt_data.prev_ctxt; - } - - HygieneData::with(|data| { - let syntax_contexts = &mut data.syntax_contexts; - *data.markings.entry((self, mark)).or_insert_with(|| { - syntax_contexts.push(SyntaxContextData { - outer_mark: mark, - prev_ctxt: self, - }); - SyntaxContext(syntax_contexts.len() as u32 - 1) - }) - }) - } -} - -impl fmt::Debug for SyntaxContext { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "#{}", self.0) - } -} diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 2d0994a7b78..4fb138d506a 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, NodeId}; use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; @@ -88,7 +88,7 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { let mut expansion = expansion.fold_with(self); if let Expansion::Items(mut items) = expansion { for derive in derives { - match self.remove(derive.as_placeholder_id()) { + match self.remove(NodeId::placeholder_from_mark(derive)) { Expansion::Items(derived_items) => items.extend(derived_items), _ => unreachable!(), } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 4c9a5d512af..6c975f3fc40 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -136,12 +136,12 @@ pub mod print { } pub mod ext { + pub use syntax_pos::hygiene; pub mod base; pub mod build; pub mod derive; pub mod expand; pub mod placeholders; - pub mod hygiene; pub mod quote; pub mod source_util; diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs new file mode 100644 index 00000000000..feebbcd6f03 --- /dev/null +++ b/src/libsyntax_pos/hygiene.rs @@ -0,0 +1,122 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Machinery for hygienic macros, inspired by the MTWT[1] paper. +//! +//! [1] Matthew Flatt, Ryan Culpepper, David Darais, and Robert Bruce Findler. +//! 2012. *Macros that work together: Compile-time bindings, partial expansion, +//! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. +//! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 + +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt; + +/// A SyntaxContext represents a chain of macro expansions (represented by marks). +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)] +pub struct SyntaxContext(u32); + +#[derive(Copy, Clone)] +pub struct SyntaxContextData { + pub outer_mark: Mark, + pub prev_ctxt: SyntaxContext, +} + +/// A mark is a unique id associated with a macro expansion. +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default, RustcEncodable, RustcDecodable)] +pub struct Mark(u32); + +impl Mark { + pub fn fresh() -> Self { + HygieneData::with(|data| { + let next_mark = Mark(data.next_mark.0 + 1); + ::std::mem::replace(&mut data.next_mark, next_mark) + }) + } + + /// The mark of the theoretical expansion that generates freshly parsed, unexpanded AST. + pub fn root() -> Self { + Mark(0) + } + + pub fn as_u32(self) -> u32 { + self.0 + } + + pub fn from_u32(raw: u32) -> Mark { + Mark(raw) + } +} + +struct HygieneData { + syntax_contexts: Vec, + markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, + next_mark: Mark, +} + +impl HygieneData { + fn new() -> Self { + HygieneData { + syntax_contexts: vec![SyntaxContextData { + outer_mark: Mark::root(), + prev_ctxt: SyntaxContext::empty(), + }], + markings: HashMap::new(), + next_mark: Mark(1), + } + } + + fn with T>(f: F) -> T { + thread_local! { + static HYGIENE_DATA: RefCell = RefCell::new(HygieneData::new()); + } + HYGIENE_DATA.with(|data| f(&mut *data.borrow_mut())) + } +} + +pub fn reset_hygiene_data() { + HygieneData::with(|data| *data = HygieneData::new()) +} + +impl SyntaxContext { + pub const fn empty() -> Self { + SyntaxContext(0) + } + + pub fn data(self) -> SyntaxContextData { + HygieneData::with(|data| data.syntax_contexts[self.0 as usize]) + } + + /// Extend a syntax context with a given mark + pub fn apply_mark(self, mark: Mark) -> SyntaxContext { + // Applying the same mark twice is a no-op + let ctxt_data = self.data(); + if mark == ctxt_data.outer_mark { + return ctxt_data.prev_ctxt; + } + + HygieneData::with(|data| { + let syntax_contexts = &mut data.syntax_contexts; + *data.markings.entry((self, mark)).or_insert_with(|| { + syntax_contexts.push(SyntaxContextData { + outer_mark: mark, + prev_ctxt: self, + }); + SyntaxContext(syntax_contexts.len() as u32 - 1) + }) + }) + } +} + +impl fmt::Debug for SyntaxContext { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "#{}", self.0) + } +} diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 3808923e772..1c9a05dadd1 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -23,6 +23,7 @@ html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(warnings)] +#![feature(const_fn)] #![feature(custom_attribute)] #![allow(unused_attributes)] #![feature(rustc_private)] @@ -41,6 +42,8 @@ use serialize::{Encodable, Decodable, Encoder, Decoder}; extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving +pub mod hygiene; + pub type FileName = String; /// Spans represent a region of code, used for error reporting. Positions in spans -- cgit 1.4.1-3-g733a5 From 496996c2af6174cb83a65756249d289f315dff80 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Thu, 16 Mar 2017 10:31:36 +0000 Subject: Remove code in `syntax::codemap`. --- src/libsyntax/codemap.rs | 185 ----------------------------------------------- src/libsyntax_pos/lib.rs | 4 - 2 files changed, 189 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 0f4b844b0ea..388f3cb7323 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -409,101 +409,6 @@ impl CodeMap { hi.col.to_usize() + 1)).to_string() } - // Returns true if two spans have the same callee - // (Assumes the same ExpnFormat implies same callee) - fn match_callees(&self, sp_a: &Span, sp_b: &Span) -> bool { - let fmt_a = self - .with_expn_info(sp_a.expn_id, - |ei| ei.map(|ei| ei.callee.format.clone())); - - let fmt_b = self - .with_expn_info(sp_b.expn_id, - |ei| ei.map(|ei| ei.callee.format.clone())); - fmt_a == fmt_b - } - - /// Returns a formatted string showing the expansion chain of a span - /// - /// Spans are printed in the following format: - /// - /// filename:start_line:col: end_line:col - /// snippet - /// Callee: - /// Callee span - /// Callsite: - /// Callsite span - /// - /// Callees and callsites are printed recursively (if available, otherwise header - /// and span is omitted), expanding into their own callee/callsite spans. - /// Each layer of recursion has an increased indent, and snippets are truncated - /// to at most 50 characters. Finally, recursive calls to the same macro are squashed, - /// with '...' used to represent any number of recursive calls. - pub fn span_to_expanded_string(&self, sp: Span) -> String { - self.span_to_expanded_string_internal(sp, "") - } - - fn span_to_expanded_string_internal(&self, sp:Span, indent: &str) -> String { - let mut indent = indent.to_owned(); - let mut output = "".to_owned(); - let span_str = self.span_to_string(sp); - let mut span_snip = self.span_to_snippet(sp) - .unwrap_or("Snippet unavailable".to_owned()); - - // Truncate by code points - in worst case this will be more than 50 characters, - // but ensures at least 50 characters and respects byte boundaries. - let char_vec: Vec<(usize, char)> = span_snip.char_indices().collect(); - if char_vec.len() > 50 { - span_snip.truncate(char_vec[49].0); - span_snip.push_str("..."); - } - - output.push_str(&format!("{}{}\n{}`{}`\n", indent, span_str, indent, span_snip)); - - if sp.expn_id == NO_EXPANSION || sp.expn_id == COMMAND_LINE_EXPN { - return output; - } - - let mut callee = self.with_expn_info(sp.expn_id, - |ei| ei.and_then(|ei| ei.callee.span.clone())); - let mut callsite = self.with_expn_info(sp.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())); - - indent.push_str(" "); - let mut is_recursive = false; - - while callee.is_some() && self.match_callees(&sp, &callee.unwrap()) { - callee = self.with_expn_info(callee.unwrap().expn_id, - |ei| ei.and_then(|ei| ei.callee.span.clone())); - is_recursive = true; - } - if let Some(span) = callee { - output.push_str(&indent); - output.push_str("Callee:\n"); - if is_recursive { - output.push_str(&indent); - output.push_str("...\n"); - } - output.push_str(&(self.span_to_expanded_string_internal(span, &indent))); - } - - is_recursive = false; - while callsite.is_some() && self.match_callees(&sp, &callsite.unwrap()) { - callsite = self.with_expn_info(callsite.unwrap().expn_id, - |ei| ei.map(|ei| ei.call_site.clone())); - is_recursive = true; - } - if let Some(span) = callsite { - output.push_str(&indent); - output.push_str("Callsite:\n"); - if is_recursive { - output.push_str(&indent); - output.push_str("...\n"); - } - output.push_str(&(self.span_to_expanded_string_internal(span, &indent))); - } - output - } - /// Return the source span - this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(&self, sp: Span) -> Span { @@ -1069,59 +974,6 @@ mod tests { assert_eq!(sstr, "blork.rs:2:1: 2:12"); } - #[test] - fn t10() { - // Test span_to_expanded_string works in base case (no expansion) - let cm = init_code_map(); - let span = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let sstr = cm.span_to_expanded_string(span); - assert_eq!(sstr, "blork.rs:1:1: 1:12\n`first line.`\n"); - - let span = Span { lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION }; - let sstr = cm.span_to_expanded_string(span); - assert_eq!(sstr, "blork.rs:2:1: 2:12\n`second line`\n"); - } - - #[test] - fn t11() { - // Test span_to_expanded_string works with expansion - let cm = init_code_map(); - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let format = ExpnFormat::MacroBang(keywords::Invalid.name()); - let callee = NameAndSpan { format: format, - allow_internal_unstable: false, - span: None }; - - let info = ExpnInfo { call_site: root, callee: callee }; - let id = cm.record_expansion(info); - let sp = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id }; - - let sstr = cm.span_to_expanded_string(sp); - assert_eq!(sstr, - "blork.rs:2:1: 2:12\n`second line`\n Callsite:\n \ - blork.rs:1:1: 1:12\n `first line.`\n"); - } - - /// Test merging two spans on the same line - #[test] - fn span_merging() { - let cm = CodeMap::new(); - let inputtext = "bbbb BB bb CCC\n"; - let selection1 = " ~~ \n"; - let selection2 = " ~~~\n"; - cm.new_filemap_and_lines("blork.rs", None, inputtext); - let span1 = span_from_selection(inputtext, selection1); - let span2 = span_from_selection(inputtext, selection2); - - if let Some(sp) = cm.merge_spans(span1, span2) { - let sstr = cm.span_to_expanded_string(sp); - assert_eq!(sstr, "blork.rs:1:6: 1:15\n`BB bb CCC`\n"); - } - else { - assert!(false); - } - } - /// Test failing to merge two spans on different lines #[test] fn span_merging_fail() { @@ -1221,41 +1073,4 @@ mod tests { let id_end = cm.record_expansion(info_end); Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end } } - - #[test] - fn t12() { - // Test span_to_expanded_string collapses recursive macros and handles - // recursive callsite and callee expansions - let cm = init_code_map(); - let end = init_expansion_chain(&cm); - let sstr = cm.span_to_expanded_string(end); - let res_str = -r"blork2.rs:2:1: 2:12 -`second line` - Callsite: - ... - blork2.rs:1:1: 1:12 - `first line.` - Callee: - blork.rs:2:1: 2:12 - `second line` - Callee: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - ... - blork.rs:2:1: 2:12 - `second line` - Callee: - blork.rs:1:1: 1:12 - `first line.` - Callsite: - blork.rs:1:1: 1:12 - `first line.` -"; - assert_eq!(sstr, res_str); - } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 1c9a05dadd1..1b62d62348b 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -263,10 +263,6 @@ pub const NO_EXPANSION: ExpnId = ExpnId(!0); // For code appearing from the command line pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); -// For code generated by a procedural macro, without knowing which -// Used in `qquote!` -pub const PROC_EXPN: ExpnId = ExpnId(!2); - impl ExpnId { pub fn from_u32(id: u32) -> ExpnId { ExpnId(id) -- cgit 1.4.1-3-g733a5 From ec7c0aece17c9a11bc2eca15b994355a161bf878 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 04:04:41 +0000 Subject: Merge `ExpnId` and `SyntaxContext`. --- src/librustc/hir/lowering.rs | 7 +- src/librustc/hir/mod.rs | 5 +- src/librustc/ich/caching_codemap_view.rs | 4 - src/librustc/middle/region.rs | 2 +- src/librustc/middle/stability.rs | 2 +- src/librustc_driver/driver.rs | 4 +- src/librustc_errors/emitter.rs | 22 +- src/librustc_errors/lib.rs | 4 +- .../calculate_svh/svh_visitor.rs | 17 +- src/librustc_mir/transform/qualify_consts.rs | 4 +- src/librustc_plugin/load.rs | 4 +- src/librustc_save_analysis/lib.rs | 7 +- src/librustc_save_analysis/span_utils.rs | 3 +- src/librustc_trans/asm.rs | 4 +- src/librustc_trans/back/write.rs | 6 +- src/librustc_trans/mir/mod.rs | 18 +- src/librustc_typeck/check/mod.rs | 5 +- src/libsyntax/ast.rs | 57 +-- src/libsyntax/codemap.rs | 291 +-------------- src/libsyntax/ext/base.rs | 74 ++-- src/libsyntax/ext/derive.rs | 50 +-- src/libsyntax/ext/expand.rs | 111 +++--- src/libsyntax/ext/source_util.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 14 +- src/libsyntax/feature_gate.rs | 20 +- src/libsyntax/json.rs | 2 +- src/libsyntax/lib.rs | 2 +- src/libsyntax/parse/parser.rs | 6 +- src/libsyntax/std_inject.rs | 21 +- src/libsyntax/symbol.rs | 342 ------------------ src/libsyntax/test.rs | 21 +- src/libsyntax/test_snippet.rs | 2 +- src/libsyntax/tokenstream.rs | 16 +- src/libsyntax_ext/asm.rs | 12 +- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/debug.rs | 4 +- src/libsyntax_ext/deriving/generic/mod.rs | 12 +- src/libsyntax_ext/deriving/mod.rs | 34 +- src/libsyntax_ext/format.rs | 3 +- src/libsyntax_ext/proc_macro_registrar.rs | 6 +- src/libsyntax_pos/hygiene.rs | 94 ++++- src/libsyntax_pos/lib.rs | 101 ++++-- src/libsyntax_pos/symbol.rs | 389 +++++++++++++++++++++ src/test/compile-fail-fulldeps/qquote.rs | 8 - src/test/run-fail-fulldeps/qquote.rs | 8 - src/test/run-pass-fulldeps/qquote.rs | 8 - 47 files changed, 793 insertions(+), 1039 deletions(-) delete mode 100644 src/libsyntax/symbol.rs create mode 100644 src/libsyntax_pos/symbol.rs (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 6ca0c971ea4..786145f3091 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -57,6 +57,7 @@ use std::mem; use syntax::attr; use syntax::ast::*; use syntax::errors; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::codemap::{self, respan, Spanned}; use syntax::std_inject; @@ -392,7 +393,8 @@ impl<'a> LoweringContext<'a> { } fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span { - span.expn_id = self.sess.codemap().record_expansion(codemap::ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { format: codemap::CompilerDesugaring(Symbol::intern(reason)), @@ -400,6 +402,7 @@ impl<'a> LoweringContext<'a> { allow_internal_unstable: true, }, }); + span.ctxt = SyntaxContext::empty().apply_mark(mark); span } @@ -1986,7 +1989,7 @@ impl<'a> LoweringContext<'a> { volatile: asm.volatile, alignstack: asm.alignstack, dialect: asm.dialect, - expn_id: asm.expn_id, + ctxt: asm.ctxt, }; let outputs = asm.outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(); diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index f4f2f4cf921..da7e71ac07d 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -33,11 +33,12 @@ use hir::def::Def; use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; use util::nodemap::{NodeMap, FxHashSet}; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use syntax::codemap::{self, Spanned}; use syntax::abi::Abi; use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; +use syntax::ext::hygiene::SyntaxContext; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; use syntax::tokenstream::TokenStream; @@ -1367,7 +1368,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// represents an argument in a function header diff --git a/src/librustc/ich/caching_codemap_view.rs b/src/librustc/ich/caching_codemap_view.rs index a71251eedf5..1278d9f5171 100644 --- a/src/librustc/ich/caching_codemap_view.rs +++ b/src/librustc/ich/caching_codemap_view.rs @@ -47,10 +47,6 @@ impl<'tcx> CachingCodemapView<'tcx> { } } - pub fn codemap(&self) -> &'tcx CodeMap { - self.codemap - } - pub fn byte_pos_to_line_and_col(&mut self, pos: BytePos) -> Option<(Rc, usize, BytePos)> { diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index a19f15a9329..0676075930d 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -236,7 +236,7 @@ impl CodeExtent { // (This is the special case aluded to in the // doc-comment for this method) let stmt_span = blk.stmts[r.first_statement_index as usize].span; - Some(Span { lo: stmt_span.hi, hi: blk.span.hi, expn_id: stmt_span.expn_id }) + Some(Span { lo: stmt_span.hi, hi: blk.span.hi, ctxt: stmt_span.ctxt }) } } } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 4354ed6817a..2b5ea61d4e8 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -467,7 +467,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn check_stability(self, def_id: DefId, id: NodeId, span: Span) { - if self.sess.codemap().span_allows_unstable(span) { + if span.allows_unstable() { debug!("stability: \ skipping span={:?} since it is internal", span); return; diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 4873b21c548..977382b33ad 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -580,7 +580,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, krate = time(time_passes, "crate injection", || { let alt_std_name = sess.opts.alt_std_name.clone(); - syntax::std_inject::maybe_inject_crates_ref(&sess.parse_sess, krate, alt_std_name) + syntax::std_inject::maybe_inject_crates_ref(krate, alt_std_name) }); let mut addl_plugins = Some(addl_plugins); @@ -798,7 +798,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, // Discard hygiene data, which isn't required after lowering to HIR. if !keep_hygiene_data(sess) { - syntax::ext::hygiene::reset_hygiene_data(); + syntax::ext::hygiene::clear_markings(); } Ok(ExpansionResult { diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 431edb3c9bc..367b85ac726 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -10,7 +10,7 @@ use self::Destination::*; -use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; +use syntax_pos::{DUMMY_SP, FileMap, Span, MultiSpan, CharPos}; use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, CodeMapper}; use RenderSpan::*; @@ -151,7 +151,7 @@ impl EmitterWriter { if let Some(ref cm) = self.cm { for span_label in msp.span_labels() { - if span_label.span == DUMMY_SP || span_label.span == COMMAND_LINE_SP { + if span_label.span == DUMMY_SP { continue; } let lo = cm.lookup_char_pos(span_label.span.lo); @@ -615,7 +615,7 @@ impl EmitterWriter { let mut max = 0; if let Some(ref cm) = self.cm { for primary_span in msp.primary_spans() { - if primary_span != &DUMMY_SP && primary_span != &COMMAND_LINE_SP { + if primary_span != &DUMMY_SP { let hi = cm.lookup_char_pos(primary_span.hi); if hi.line > max { max = hi.line; @@ -623,7 +623,7 @@ impl EmitterWriter { } } for span_label in msp.span_labels() { - if span_label.span != DUMMY_SP && span_label.span != COMMAND_LINE_SP { + if span_label.span != DUMMY_SP { let hi = cm.lookup_char_pos(span_label.span.hi); if hi.line > max { max = hi.line; @@ -659,20 +659,20 @@ impl EmitterWriter { // First, find all the spans in <*macros> and point instead at their use site for sp in span.primary_spans() { - if (*sp == COMMAND_LINE_SP) || (*sp == DUMMY_SP) { + if *sp == DUMMY_SP { continue; } if cm.span_to_filename(sp.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp.clone()); + let v = sp.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp.clone(), use_site.call_site.clone())); } } - for trace in cm.macro_backtrace(sp.clone()).iter().rev() { + for trace in sp.macro_backtrace().iter().rev() { // Only show macro locations that are local // and display them like a span_note if let Some(def_site) = trace.def_site_span { - if (def_site == COMMAND_LINE_SP) || (def_site == DUMMY_SP) { + if def_site == DUMMY_SP { continue; } // Check to make sure we're not in any <*macros> @@ -689,11 +689,11 @@ impl EmitterWriter { span.push_span_label(label_span, label_text); } for sp_label in span.span_labels() { - if (sp_label.span == COMMAND_LINE_SP) || (sp_label.span == DUMMY_SP) { + if sp_label.span == DUMMY_SP { continue; } if cm.span_to_filename(sp_label.span.clone()).contains("macros>") { - let v = cm.macro_backtrace(sp_label.span.clone()); + let v = sp_label.span.macro_backtrace(); if let Some(use_site) = v.last() { before_after.push((sp_label.span.clone(), use_site.call_site.clone())); } @@ -848,7 +848,7 @@ impl EmitterWriter { // Make sure our primary file comes first let primary_lo = if let (Some(ref cm), Some(ref primary_span)) = (self.cm.as_ref(), msp.primary_span().as_ref()) { - if primary_span != &&DUMMY_SP && primary_span != &&COMMAND_LINE_SP { + if primary_span != &&DUMMY_SP { cm.lookup_char_pos(primary_span.lo) } else { emit_to_destination(&buffer.render(), level, &mut self.dst)?; diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 4c889dad8ca..2efdaa57fba 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -48,7 +48,6 @@ pub mod styled_buffer; mod lock; use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION}; -use syntax_pos::MacroBacktrace; #[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)] pub enum RenderSpan { @@ -75,7 +74,6 @@ pub trait CodeMapper { fn span_to_lines(&self, sp: Span) -> FileLinesResult; fn span_to_string(&self, sp: Span) -> String; fn span_to_filename(&self, sp: Span) -> FileName; - fn macro_backtrace(&self, span: Span) -> Vec; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; } @@ -120,7 +118,7 @@ impl CodeSuggestion { let bounding_span = Span { lo: lo, hi: hi, - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index 210803c3f32..5401b371888 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -17,9 +17,10 @@ use self::SawTraitOrImplItemComponent::*; use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; +use syntax::ext::hygiene::SyntaxContext; use syntax::parse::token; use syntax::symbol::InternedString; -use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; +use syntax_pos::{Span, BytePos}; use syntax::tokenstream; use rustc::hir; use rustc::hir::*; @@ -92,10 +93,10 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { span.hi }; - let expn_kind = match span.expn_id { - NO_EXPANSION => SawSpanExpnKind::NoExpansion, - COMMAND_LINE_EXPN => SawSpanExpnKind::CommandLine, - _ => SawSpanExpnKind::SomeExpansion, + let expn_kind = if span.ctxt == SyntaxContext::empty() { + SawSpanExpnKind::NoExpansion + } else { + SawSpanExpnKind::SomeExpansion }; let loc1 = self.codemap.byte_pos_to_line_and_col(span.lo); @@ -121,8 +122,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { saw.hash(self.st); if expn_kind == SawSpanExpnKind::SomeExpansion { - let call_site = self.codemap.codemap().source_callsite(span); - self.hash_span(call_site); + self.hash_span(span.source_callsite()); } } @@ -483,7 +483,6 @@ fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent { #[derive(Clone, Copy, Hash, Eq, PartialEq)] enum SawSpanExpnKind { NoExpansion, - CommandLine, SomeExpansion, } @@ -501,7 +500,7 @@ impl<'a> Hash for StableInlineAsm<'a> { volatile, alignstack, dialect, - expn_id: _, // This is used for error reporting + ctxt: _, // This is used for error reporting } = *self.0; asm.as_str().hash(state); diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index ba42804c926..9d236bd013c 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -223,7 +223,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } // This comes from a macro that has #[allow_internal_unstable]. - if self.tcx.sess.codemap().span_allows_unstable(self.span) { + if self.span.allows_unstable() { return; } @@ -805,7 +805,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { self.def_id.is_local() && // this doesn't come from a macro that has #[allow_internal_unstable] - !self.tcx.sess.codemap().span_allows_unstable(self.span) + !self.span.allows_unstable() { let mut err = self.tcx.sess.struct_span_err(self.span, "const fns are an unstable feature"); diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index efe9963cecc..e884f3bdbb1 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -20,7 +20,7 @@ use std::env; use std::mem; use std::path::PathBuf; use syntax::ast; -use syntax_pos::{Span, COMMAND_LINE_SP}; +use syntax_pos::{Span, DUMMY_SP}; /// Pointer to a registrar function. pub type PluginRegistrarFun = @@ -81,7 +81,7 @@ pub fn load_plugins(sess: &Session, if let Some(plugins) = addl_plugins { for plugin in plugins { - loader.load_plugin(COMMAND_LINE_SP, &plugin, vec![]); + loader.load_plugin(DUMMY_SP, &plugin, vec![]); } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index e5c04f6b61e..fd6803e087a 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -690,9 +690,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { // Note we take care to use the source callsite/callee, to handle // nested expansions and ensure we only generate data for source-visible // macro uses. - let callsite = self.tcx.sess.codemap().source_callsite(span); - let callee = self.tcx.sess.codemap().source_callee(span); - let callee = option_try!(callee); + let callsite = span.source_callsite(); + let callee = option_try!(span.source_callee()); let callee_span = option_try!(callee.span); // Ignore attribute macros, their spans are usually mangled @@ -1013,5 +1012,5 @@ fn escape(s: String) -> String { // Helper function to determine if a span came from a // macro expansion or syntax extension. pub fn generated_code(span: Span) -> bool { - span.expn_id != NO_EXPANSION || span == DUMMY_SP + span.ctxt != NO_EXPANSION || span == DUMMY_SP } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 34402742e6c..c19f805a285 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -462,8 +462,7 @@ impl<'a> SpanUtils<'a> { // Otherwise, a generated span is deemed invalid if it is not a sub-span of the root // callsite. This filters out macro internal variables and most malformed spans. - let span = self.sess.codemap().source_callsite(parent); - !(span.contains(parent)) + !parent.source_callsite().contains(parent) } } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index b6195765b27..3e270b7928e 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -111,14 +111,14 @@ pub fn trans_inline_asm<'a, 'tcx>( bcx.store(v, val, None); } - // Store expn_id in a metadata node so we can map LLVM errors + // Store mark in a metadata node so we can map LLVM errors // back to source locations. See #17552. unsafe { let key = "srcloc"; let kind = llvm::LLVMGetMDKindIDInContext(bcx.ccx.llcx(), key.as_ptr() as *const c_char, key.len() as c_uint); - let val: llvm::ValueRef = C_i32(bcx.ccx, ia.expn_id.into_u32() as i32); + let val: llvm::ValueRef = C_i32(bcx.ccx, ia.ctxt.outer().as_u32() as i32); llvm::LLVMSetMetadata(r, kind, llvm::LLVMMDNodeInContext(bcx.ccx.llcx(), &val, 1)); diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 5a017e4fb8a..ccb3f7ac882 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -371,14 +371,14 @@ struct HandlerFreeVars<'a> { unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>, msg: &'b str, cookie: c_uint) { - use syntax_pos::ExpnId; + use syntax::ext::hygiene::Mark; match cgcx.lto_ctxt { Some((sess, _)) => { - sess.codemap().with_expn_info(ExpnId::from_u32(cookie), |info| match info { + match Mark::from_u32(cookie).expn_info() { Some(ei) => sess.span_err(ei.call_site, msg), None => sess.err(msg), - }); + }; } None => { diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 6419f41f86b..21bbbea77d4 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -26,7 +26,7 @@ use monomorphize::{self, Instance}; use abi::FnType; use type_of; -use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos, Span}; +use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span}; use syntax::symbol::keywords; use std::iter; @@ -124,24 +124,18 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> { // In order to have a good line stepping behavior in debugger, we overwrite debug // locations of macro expansions with that of the outermost expansion site // (unless the crate is being compiled with `-Z debug-macros`). - if source_info.span.expn_id == NO_EXPANSION || - source_info.span.expn_id == COMMAND_LINE_EXPN || - self.ccx.sess().opts.debugging_opts.debug_macros { - + if source_info.span.ctxt == NO_EXPANSION || + self.ccx.sess().opts.debugging_opts.debug_macros { let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo); (scope, source_info.span) } else { - let cm = self.ccx.sess().codemap(); // Walk up the macro expansion chain until we reach a non-expanded span. // We also stop at the function body level because no line stepping can occurr // at the level above that. let mut span = source_info.span; - while span.expn_id != NO_EXPANSION && - span.expn_id != COMMAND_LINE_EXPN && - span.expn_id != self.mir.span.expn_id { - if let Some(callsite_span) = cm.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite_span; + while span.ctxt != NO_EXPANSION && span.ctxt != self.mir.span.ctxt { + if let Some(info) = span.ctxt.outer().expn_info() { + span = info.call_site; } else { break; } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 9c62fd486d4..b95e01f4ff6 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -4161,12 +4161,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } if let Some(last_stmt) = extra_semi { - let original_span = original_sp(self.tcx.sess.codemap(), - last_stmt.span, blk.span); + let original_span = original_sp(last_stmt.span, blk.span); let span_semi = Span { lo: original_span.hi - BytePos(1), hi: original_span.hi, - expn_id: original_span.expn_id + ctxt: original_span.ctxt, }; err.span_help(span_semi, "consider removing this semicolon:"); } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 7e2b225193f..a4bebd311de 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,10 +14,10 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; -pub use symbol::Symbol as Name; +pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP, ExpnId}; +use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -27,61 +27,12 @@ use rustc_data_structures::indexed_vec; use symbol::{Symbol, keywords}; use tokenstream::{ThinTokenStream, TokenStream}; +use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; use std::rc::Rc; use std::u32; -use serialize::{self, Encodable, Decodable, Encoder, Decoder}; - -/// An identifier contains a Name (index into the interner -/// table) and a SyntaxContext to track renaming and -/// macro expansion per Flatt et al., "Macros That Work Together" -#[derive(Clone, Copy, PartialEq, Eq, Hash)] -pub struct Ident { - pub name: Symbol, - pub ctxt: SyntaxContext -} - -impl Ident { - pub const fn with_empty_ctxt(name: Name) -> Ident { - Ident { name: name, ctxt: SyntaxContext::empty() } - } - - /// Maps a string to an identifier with an empty syntax context. - pub fn from_str(s: &str) -> Ident { - Ident::with_empty_ctxt(Symbol::intern(s)) - } - - pub fn unhygienize(&self) -> Ident { - Ident { name: self.name, ctxt: SyntaxContext::empty() } - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}{:?}", self.name, self.ctxt) - } -} - -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.name, f) - } -} - -impl Encodable for Ident { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.name.encode(s) - } -} - -impl Decodable for Ident { - fn decode(d: &mut D) -> Result { - Ok(Ident::with_empty_ctxt(Name::decode(d)?)) - } -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -1445,7 +1396,7 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub expn_id: ExpnId, + pub ctxt: SyntaxContext, } /// An argument in a function header. diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 388f3cb7323..ba199eacb62 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -17,6 +17,8 @@ //! within the CodeMap, which upon request can be converted to line and column //! information, source code snippets, etc. +pub use syntax_pos::*; +pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo, NameAndSpan}; pub use self::ExpnFormat::*; use std::cell::RefCell; @@ -26,35 +28,21 @@ use std::rc::Rc; use std::env; use std::fs; use std::io::{self, Read}; -pub use syntax_pos::*; use errors::CodeMapper; -use ast::Name; - /// Return the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. -pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = cm.with_expn_info(sp.expn_id, |ei| ei.map(|ei| ei.call_site)); - let call_site2 = cm.with_expn_info(enclosing_sp.expn_id, |ei| ei.map(|ei| ei.call_site)); +pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { + let call_site1 = sp.ctxt.outer().expn_info().map(|ei| ei.call_site); + let call_site2 = enclosing_sp.ctxt.outer().expn_info().map(|ei| ei.call_site); match (call_site1, call_site2) { (None, _) => sp, (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, - (Some(call_site1), _) => original_sp(cm, call_site1, enclosing_sp), + (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), } } -/// The source of expansion. -#[derive(Clone, Hash, Debug, PartialEq, Eq)] -pub enum ExpnFormat { - /// e.g. #[derive(...)] - MacroAttribute(Name), - /// e.g. `format!()` - MacroBang(Name), - /// Desugaring done by the compiler during HIR lowering. - CompilerDesugaring(Name) -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct Spanned { pub node: T, @@ -73,47 +61,6 @@ pub fn dummy_spanned(t: T) -> Spanned { respan(DUMMY_SP, t) } -#[derive(Clone, Hash, Debug)] -pub struct NameAndSpan { - /// The format with which the macro was invoked. - pub format: ExpnFormat, - /// Whether the macro is allowed to use #[unstable]/feature-gated - /// features internally without forcing the whole crate to opt-in - /// to them. - pub allow_internal_unstable: bool, - /// The span of the macro definition itself. The macro may not - /// have a sensible definition span (e.g. something defined - /// completely inside libsyntax) in which case this is None. - pub span: Option -} - -impl NameAndSpan { - pub fn name(&self) -> Name { - match self.format { - ExpnFormat::MacroAttribute(s) | - ExpnFormat::MacroBang(s) | - ExpnFormat::CompilerDesugaring(s) => s, - } - } -} - -/// Extra information for tracking spans of macro and syntax sugar expansion -#[derive(Hash, Debug)] -pub struct ExpnInfo { - /// The location of the actual macro invocation or syntax sugar , e.g. - /// `let x = foo!();` or `if let Some(y) = x {}` - /// - /// This may recursively refer to other macro invocations, e.g. if - /// `foo!()` invoked `bar!()` internally, and there was an - /// expression inside `bar!`; the call_site of the expression in - /// the expansion would point to the `bar!` invocation; that - /// call_site span would have its own ExpnInfo, with the call_site - /// pointing to the `foo!` invocation. - pub call_site: Span, - /// Information about the expansion. - pub callee: NameAndSpan -} - // _____________________________________________________________________________ // FileMap, MultiByteChar, FileName, FileLines // @@ -161,7 +108,6 @@ impl FileLoader for RealFileLoader { pub struct CodeMap { pub files: RefCell>>, - expansions: RefCell>, file_loader: Box } @@ -169,7 +115,6 @@ impl CodeMap { pub fn new() -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: Box::new(RealFileLoader) } } @@ -177,7 +122,6 @@ impl CodeMap { pub fn with_file_loader(file_loader: Box) -> CodeMap { CodeMap { files: RefCell::new(Vec::new()), - expansions: RefCell::new(Vec::new()), file_loader: file_loader } } @@ -353,14 +297,14 @@ impl CodeMap { /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: - /// * the expn_id of both spans much match + /// * the ctxt of both spans much match /// * the lhs span needs to end on the same line the rhs span begins /// * the lhs span must start at or before the rhs span pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { use std::cmp; // make sure we're at the same expansion id - if sp_lhs.expn_id != sp_rhs.expn_id { + if sp_lhs.ctxt != sp_rhs.ctxt { return None; } @@ -383,7 +327,7 @@ impl CodeMap { Some(Span { lo: cmp::min(sp_lhs.lo, sp_rhs.lo), hi: cmp::max(sp_lhs.hi, sp_rhs.hi), - expn_id: sp_lhs.expn_id, + ctxt: sp_lhs.ctxt, }) } else { None @@ -391,10 +335,6 @@ impl CodeMap { } pub fn span_to_string(&self, sp: Span) -> String { - if sp == COMMAND_LINE_SP { - return "".to_string(); - } - if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) { return "no-location".to_string(); } @@ -409,62 +349,6 @@ impl CodeMap { hi.col.to_usize() + 1)).to_string() } - /// Return the source span - this is either the supplied span, or the span for - /// the macro callsite that expanded to it. - pub fn source_callsite(&self, sp: Span) -> Span { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is the first callsite, which is also source-equivalent to the span. - let mut first = true; - while span.expn_id != NO_EXPANSION && span.expn_id != COMMAND_LINE_EXPN { - if let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return Span { expn_id: NO_EXPANSION, .. span }; - } - } - first = false; - span = callsite; - } - else { - break; - } - } - span - } - - /// Return the source callee. - /// - /// Returns None if the supplied span has no expansion trace, - /// else returns the NameAndSpan for the macro definition - /// corresponding to the source callsite. - pub fn source_callee(&self, sp: Span) -> Option { - let mut span = sp; - // Special case - if a macro is parsed as an argument to another macro, the source - // callsite is source-equivalent to the span, and the source callee is the first callee. - let mut first = true; - while let Some(callsite) = self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - if first && span.source_equal(&callsite) { - if self.lookup_char_pos(span.lo).file.is_real_file() { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - first = false; - if let Some(_) = self.with_expn_info(callsite.expn_id, - |ei| ei.map(|ei| ei.call_site.clone())) { - span = callsite; - } - else { - return self.with_expn_info(span.expn_id, - |ei| ei.map(|ei| ei.callee.clone())); - } - } - None - } - pub fn span_to_filename(&self, sp: Span) -> FileName { self.lookup_char_pos(sp.lo).file.name.to_string() } @@ -628,111 +512,9 @@ impl CodeMap { return a; } - pub fn record_expansion(&self, expn_info: ExpnInfo) -> ExpnId { - let mut expansions = self.expansions.borrow_mut(); - expansions.push(expn_info); - let len = expansions.len(); - if len > u32::max_value() as usize { - panic!("too many ExpnInfo's!"); - } - ExpnId(len as u32 - 1) - } - - pub fn with_expn_info(&self, id: ExpnId, f: F) -> T where - F: FnOnce(Option<&ExpnInfo>) -> T, - { - match id { - NO_EXPANSION | COMMAND_LINE_EXPN => f(None), - ExpnId(i) => f(Some(&(*self.expansions.borrow())[i as usize])) - } - } - - /// Check if a span is "internal" to a macro in which #[unstable] - /// items can be used (that is, a macro marked with - /// `#[allow_internal_unstable]`). - pub fn span_allows_unstable(&self, span: Span) -> bool { - debug!("span_allows_unstable(span = {:?})", span); - let mut allows_unstable = false; - let mut expn_id = span.expn_id; - loop { - let quit = self.with_expn_info(expn_id, |expninfo| { - debug!("span_allows_unstable: expninfo = {:?}", expninfo); - expninfo.map_or(/* hit the top level */ true, |info| { - - let span_comes_from_this_expansion = - info.callee.span.map_or(span.source_equal(&info.call_site), |mac_span| { - mac_span.contains(span) - }); - - debug!("span_allows_unstable: span: {:?} call_site: {:?} callee: {:?}", - (span.lo, span.hi), - (info.call_site.lo, info.call_site.hi), - info.callee.span.map(|x| (x.lo, x.hi))); - debug!("span_allows_unstable: from this expansion? {}, allows unstable? {}", - span_comes_from_this_expansion, - info.callee.allow_internal_unstable); - if span_comes_from_this_expansion { - allows_unstable = info.callee.allow_internal_unstable; - // we've found the right place, stop looking - true - } else { - // not the right place, keep looking - expn_id = info.call_site.expn_id; - false - } - }) - }); - if quit { - break - } - } - debug!("span_allows_unstable? {}", allows_unstable); - allows_unstable - } - pub fn count_lines(&self) -> usize { self.files.borrow().iter().fold(0, |a, f| a + f.count_lines()) } - - pub fn macro_backtrace(&self, span: Span) -> Vec { - let mut prev_span = DUMMY_SP; - let mut span = span; - let mut result = vec![]; - loop { - let span_name_span = self.with_expn_info(span.expn_id, |expn_info| { - expn_info.map(|ei| { - let (pre, post) = match ei.callee.format { - MacroAttribute(..) => ("#[", "]"), - MacroBang(..) => ("", "!"), - CompilerDesugaring(..) => ("desugaring of `", "`"), - }; - let macro_decl_name = format!("{}{}{}", - pre, - ei.callee.name(), - post); - let def_site_span = ei.callee.span; - (ei.call_site, macro_decl_name, def_site_span) - }) - }); - - match span_name_span { - None => break, - Some((call_site, macro_decl_name, def_site_span)) => { - // Don't print recursive invocations - if !call_site.source_equal(&prev_span) { - result.push(MacroBacktrace { - call_site: call_site, - macro_decl_name: macro_decl_name, - def_site_span: def_site_span, - }); - } - prev_span = span; - span = call_site; - } - } - } - result - } } impl CodeMapper for CodeMap { @@ -748,9 +530,6 @@ impl CodeMapper for CodeMap { fn span_to_filename(&self, sp: Span) -> FileName { self.span_to_filename(sp) } - fn macro_backtrace(&self, span: Span) -> Vec { - self.macro_backtrace(span) - } fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { self.merge_spans(sp_lhs, sp_rhs) } @@ -763,7 +542,6 @@ impl CodeMapper for CodeMap { #[cfg(test)] mod tests { use super::*; - use symbol::keywords; use std::rc::Rc; #[test] @@ -912,7 +690,7 @@ mod tests { fn t7() { // Test span_to_lines for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let file_lines = cm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, "blork.rs"); @@ -928,7 +706,7 @@ mod tests { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } + Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), ctxt: NO_EXPANSION } } /// Test span_to_snippet and span_to_lines for a span coverting 3 @@ -958,7 +736,7 @@ mod tests { fn t8() { // Test span_to_snippet for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let snippet = cm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); @@ -968,7 +746,7 @@ mod tests { fn t9() { // Test span_to_str for a span ending at the end of filemap let cm = init_code_map(); - let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION}; + let span = Span {lo: BytePos(12), hi: BytePos(23), ctxt: NO_EXPANSION}; let sstr = cm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); @@ -1022,7 +800,7 @@ mod tests { let span = Span { lo: BytePos(lo as u32 + file.start_pos.0), hi: BytePos(hi as u32 + file.start_pos.0), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }; assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); @@ -1032,45 +810,4 @@ mod tests { } } } - - fn init_expansion_chain(cm: &CodeMap) -> Span { - // Creates an expansion chain containing two recursive calls - // root -> expA -> expA -> expB -> expB -> end - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - - let format_root = ExpnFormat::MacroBang(keywords::Invalid.name()); - let callee_root = NameAndSpan { format: format_root, - allow_internal_unstable: false, - span: Some(root) }; - - let info_a1 = ExpnInfo { call_site: root, callee: callee_root }; - let id_a1 = cm.record_expansion(info_a1); - let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 }; - - let format_a = ExpnFormat::MacroBang(keywords::As.name()); - let callee_a = NameAndSpan { format: format_a, - allow_internal_unstable: false, - span: Some(span_a1) }; - - let info_a2 = ExpnInfo { call_site: span_a1, callee: callee_a.clone() }; - let id_a2 = cm.record_expansion(info_a2); - let span_a2 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a2 }; - - let info_b1 = ExpnInfo { call_site: span_a2, callee: callee_a }; - let id_b1 = cm.record_expansion(info_b1); - let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 }; - - let format_b = ExpnFormat::MacroBang(keywords::Box.name()); - let callee_b = NameAndSpan { format: format_b, - allow_internal_unstable: false, - span: None }; - - let info_b2 = ExpnInfo { call_site: span_b1, callee: callee_b.clone() }; - let id_b2 = cm.record_expansion(info_b2); - let span_b2 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b2 }; - - let info_end = ExpnInfo { call_site: span_b2, callee: callee_b }; - let id_end = cm.record_expansion(info_end); - Span { lo: BytePos(37), hi: BytePos(48), expn_id: id_end } - } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index dc7e7673eb0..a2d54b62ec6 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -12,11 +12,11 @@ pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT use ast::{self, Attribute, Name, PatKind, MetaItem}; use attr::HasAttrs; -use codemap::{self, CodeMap, ExpnInfo, Spanned, respan}; -use syntax_pos::{Span, ExpnId, NO_EXPANSION}; -use errors::{DiagnosticBuilder, FatalError}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{Span, DUMMY_SP}; +use errors::DiagnosticBuilder; use ext::expand::{self, Expansion, Invocation}; -use ext::hygiene::Mark; +use ext::hygiene::{Mark, SyntaxContext}; use fold::{self, Folder}; use parse::{self, parser, DirectoryOwnership}; use parse::token; @@ -56,6 +56,14 @@ impl HasAttrs for Annotatable { } impl Annotatable { + pub fn span(&self) -> Span { + match *self { + Annotatable::Item(ref item) => item.span, + Annotatable::TraitItem(ref trait_item) => trait_item.span, + Annotatable::ImplItem(ref impl_item) => impl_item.span, + } + } + pub fn expect_item(self) -> P { match self { Annotatable::Item(i) => i, @@ -602,7 +610,6 @@ pub struct ModuleData { pub struct ExpansionData { pub mark: Mark, pub depth: usize, - pub backtrace: ExpnId, pub module: Rc, pub directory_ownership: DirectoryOwnership, } @@ -633,7 +640,6 @@ impl<'a> ExtCtxt<'a> { current_expansion: ExpansionData { mark: Mark::root(), depth: 0, - backtrace: NO_EXPANSION, module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }), directory_ownership: DirectoryOwnership::Owned, }, @@ -658,30 +664,30 @@ impl<'a> ExtCtxt<'a> { pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { - self.codemap().with_expn_info(self.backtrace(), |ei| match ei { + match self.current_expansion.mark.expn_info() { Some(expn_info) => expn_info.call_site, - None => self.bug("missing top span") - }) + None => DUMMY_SP, + } + } + pub fn backtrace(&self) -> SyntaxContext { + SyntaxContext::empty().apply_mark(self.current_expansion.mark) } - pub fn backtrace(&self) -> ExpnId { self.current_expansion.backtrace } /// Returns span for the macro which originally caused the current expansion to happen. /// /// Stops backtracing at include! boundary. pub fn expansion_cause(&self) -> Span { - let mut expn_id = self.backtrace(); + let mut ctxt = self.backtrace(); let mut last_macro = None; loop { - if self.codemap().with_expn_info(expn_id, |info| { - info.map_or(None, |i| { - if i.callee.name() == "include" { - // Stop going up the backtrace once include! is encountered - return None; - } - expn_id = i.call_site.expn_id; - last_macro = Some(i.call_site); - return Some(()); - }) + if ctxt.outer().expn_info().map_or(None, |info| { + if info.callee.name() == "include" { + // Stop going up the backtrace once include! is encountered + return None; + } + ctxt = info.call_site.ctxt; + last_macro = Some(info.call_site); + return Some(()); }).is_none() { break } @@ -689,28 +695,6 @@ impl<'a> ExtCtxt<'a> { last_macro.expect("missing expansion backtrace") } - pub fn bt_push(&mut self, ei: ExpnInfo) { - if self.current_expansion.depth > self.ecfg.recursion_limit { - let suggested_limit = self.ecfg.recursion_limit * 2; - let mut err = self.struct_span_fatal(ei.call_site, - &format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name())); - err.help(&format!( - "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)); - err.emit(); - panic!(FatalError); - } - - let mut call_site = ei.call_site; - call_site.expn_id = self.backtrace(); - self.current_expansion.backtrace = self.codemap().record_expansion(ExpnInfo { - call_site: call_site, - callee: ei.callee - }); - } - pub fn bt_pop(&mut self) {} - pub fn struct_span_warn(&self, sp: Span, msg: &str) @@ -792,9 +776,9 @@ impl<'a> ExtCtxt<'a> { /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) -> Option> { - // Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation. + // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { - expr.span.expn_id = cx.backtrace(); + expr.span.ctxt = expr.span.ctxt.apply_mark(cx.current_expansion.mark); expr }); diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 1569d9f540b..c79040424f6 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -9,13 +9,16 @@ // except according to those terms. use attr::HasAttrs; -use {ast, codemap}; +use ast; +use codemap::{ExpnInfo, NameAndSpan, ExpnFormat}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; +use std::collections::HashSet; + pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec { let mut result = Vec::new(); attrs.retain(|attr| { @@ -41,36 +44,35 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec result } -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span +pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T + where T: HasAttrs, +{ + let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned()); + for (i, path) in traits.iter().enumerate() { + if i > 0 { + pretty_name.push_str(", "); + } + pretty_name.push_str(&path.to_string()); + names.insert(unwrap_or!(path.segments.get(0), continue).identifier.name); } -} + pretty_name.push(')'); -pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T { - let span = match traits.get(0) { - Some(path) => path.span, - None => return item, - }; + cx.current_expansion.mark.set_expn_info(ExpnInfo { + call_site: span, + callee: NameAndSpan { + format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)), + span: None, + allow_internal_unstable: true, + }, + }); + let span = Span { ctxt: cx.backtrace(), ..span }; item.map_attrs(|mut attrs| { - if traits.iter().any(|path| *path == "PartialEq") && - traits.iter().any(|path| *path == "Eq") { - let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); + if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } - if traits.iter().any(|path| *path == "Copy") && - traits.iter().any(|path| *path == "Clone") { - let span = allow_unstable(cx, span, "derive(Copy, Clone)"); + if names.contains(&Symbol::intern("Copy")) && names.contains(&Symbol::intern("Clone")) { let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index e258c51a329..1b3352f73ad 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -13,6 +13,7 @@ use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use config::{is_test_or_bench, StripUnconfigured}; +use errors::FatalError; use ext::base::*; use ext::derive::{add_derived_markers, collect_derives}; use ext::hygiene::Mark; @@ -27,7 +28,7 @@ use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{Span, ExpnId, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -273,7 +274,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let item = item .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = - add_derived_markers(&mut self.cx, &traits, item.clone()); + add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); for path in &traits { @@ -363,11 +364,26 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } fn expand_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { - match invoc.kind { + let result = match invoc.kind { InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext), InvocationKind::Attr { .. } => self.expand_attr_invoc(invoc, ext), InvocationKind::Derive { .. } => self.expand_derive_invoc(invoc, ext), + }; + + if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { + let info = self.cx.current_expansion.mark.expn_info().unwrap(); + let suggested_limit = self.cx.ecfg.recursion_limit * 2; + let mut err = self.cx.struct_span_fatal(info.call_site, + &format!("recursion limit reached while expanding the macro `{}`", + info.callee.name())); + err.help(&format!( + "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", + suggested_limit)); + err.emit(); + panic!(FatalError); } + + result } fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { @@ -378,11 +394,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: attr.span, callee: NameAndSpan { format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: Some(attr.span), + span: None, allow_internal_unstable: false, } }); @@ -403,19 +419,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtension::AttrProcMacro(ref mac) => { let item_toks = stream_for_item(&item, &self.cx.parse_sess); - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: attr.span, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), - span: None, - allow_internal_unstable: false, - }, - }), - ..attr.span - }; - - let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); + let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; + let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); self.parse_expansion(tok_result, kind, &attr.path, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { @@ -440,8 +445,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let path = &mac.node.path; let ident = ident.unwrap_or(keywords::Invalid.ident()); - let marked_tts = - noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None }); + let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { @@ -451,7 +455,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -470,7 +474,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -502,7 +506,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } - self.cx.bt_push(ExpnInfo { + invoc.expansion_data.mark.set_expn_info(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(Symbol::intern(&format!("{}", path))), @@ -528,10 +532,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); }; - expanded.fold_with(&mut Marker { - mark: mark, - expn_id: Some(self.cx.backtrace()), - }) + expanded.fold_with(&mut Marker(mark)) } /// Expand a derive invocation. Returns the result of expansion. @@ -550,50 +551,33 @@ impl<'a, 'b> MacroExpander<'a, 'b> { id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, }; - self.cx.bt_push(ExpnInfo { + let mut expn_info = ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroAttribute(pretty_name), span: None, allow_internal_unstable: false, } - }); + }; match *ext { SyntaxExtension::ProcMacroDerive(ref ext, _) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: false, - }, - }), - ..span - }; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this name: keywords::Invalid.name(), span: DUMMY_SP, node: ast::MetaItemKind::Word, }; - return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)); + kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)) } SyntaxExtension::BuiltinDerive(func) => { - let span = Span { - expn_id: self.cx.codemap().record_expansion(ExpnInfo { - call_site: span, - callee: NameAndSpan { - format: MacroAttribute(pretty_name), - span: None, - allow_internal_unstable: true, - }, - }), - ..span - }; + expn_info.callee.allow_internal_unstable = true; + invoc.expansion_data.mark.set_expn_info(expn_info); + let span = Span { ctxt: self.cx.backtrace(), ..span }; let mut items = Vec::new(); func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); - return kind.expect_from_annotatables(items); + kind.expect_from_annotatables(items) } _ => { let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); @@ -753,10 +737,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. fn check_attributes(&mut self, attrs: &[ast::Attribute]) { - let codemap = &self.cx.parse_sess.codemap(); let features = self.cx.ecfg.features.unwrap(); for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess, codemap, features); + feature_gate::check_attribute(&attr, &self.cx.parse_sess, features); } } } @@ -1065,23 +1048,21 @@ impl<'feat> ExpansionConfig<'feat> { } } -// A Marker adds the given mark to the syntax context and -// sets spans' `expn_id` to the given expn_id (unless it is `None`). -struct Marker { mark: Mark, expn_id: Option } +// A Marker adds the given mark to the syntax context. +struct Marker(Mark); impl Folder for Marker { fn fold_ident(&mut self, mut ident: Ident) -> Ident { - ident.ctxt = ident.ctxt.apply_mark(self.mark); + ident.ctxt = ident.ctxt.apply_mark(self.0); ident } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - noop_fold_mac(mac, self) - } fn new_span(&mut self, mut span: Span) -> Span { - if let Some(expn_id) = self.expn_id { - span.expn_id = expn_id; - } + span.ctxt = span.ctxt.apply_mark(self.0); span } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + noop_fold_mac(mac, self) + } } diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 39b92c7d007..0103d6ea959 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -185,7 +185,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf { // NB: relative paths are resolved relative to the compilation unit if !arg.is_absolute() { - let callsite = cx.codemap().source_callsite(sp); + let callsite = sp.source_callsite(); let mut cu = PathBuf::from(&cx.codemap().span_to_filename(callsite)); cu.pop(); cu.push(arg); diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d56859d805c..12e746e024d 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -34,17 +34,19 @@ impl Delimited { } pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 9d280a413e6..12d25ca4274 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -28,7 +28,7 @@ use self::AttributeGate::*; use abi::Abi; use ast::{self, NodeId, PatKind, RangeEnd}; use attr; -use codemap::{CodeMap, Spanned}; +use codemap::Spanned; use syntax_pos::Span; use errors::{DiagnosticBuilder, Handler, FatalError}; use visit::{self, FnKind, Visitor}; @@ -831,7 +831,7 @@ impl GatedCfg { pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; - if !has_feature(features) && !sess.codemap().span_allows_unstable(self.span) { + if !has_feature(features) && !self.span.allows_unstable() { let explain = format!("`cfg({})` is experimental and subject to change", cfg); emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } @@ -841,7 +841,6 @@ impl GatedCfg { struct Context<'a> { features: &'a Features, parse_sess: &'a ParseSess, - cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } @@ -850,7 +849,7 @@ macro_rules! gate_feature_fn { let (cx, has_feature, span, name, explain) = ($cx, $has_feature, $span, $name, $explain); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); - if !has_feature && !cx.cm.span_allows_unstable(span) { + if !has_feature && !span.allows_unstable() { emit_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain); } }} @@ -908,12 +907,8 @@ impl<'a> Context<'a> { } } -pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, - cm: &CodeMap, features: &Features) { - let cx = Context { - features: features, parse_sess: parse_sess, - cm: cm, plugin_attributes: &[] - }; +pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { + let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; cx.check_attribute(attr, true); } @@ -1016,7 +1011,7 @@ struct PostExpansionVisitor<'a> { macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ let (cx, span) = ($cx, $span); - if !cx.context.cm.span_allows_unstable(span) { + if !span.allows_unstable() { gate_feature!(cx.context, $feature, span, $explain) } }} @@ -1096,7 +1091,7 @@ fn starts_with_digit(s: &str) -> bool { impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { - if !self.context.cm.span_allows_unstable(attr.span) { + if !attr.span.allows_unstable() { // check for gated attributes self.context.check_attribute(attr, false); } @@ -1530,7 +1525,6 @@ pub fn check_crate(krate: &ast::Crate, let ctx = Context { features: features, parse_sess: sess, - cm: sess.codemap(), plugin_attributes: plugin_attributes, }; visit::walk_crate(&mut PostExpansionVisitor { context: &ctx }, krate); diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index fd762552248..dec1b7d1d87 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -202,7 +202,7 @@ impl DiagnosticSpan { // backtrace ourselves, but the `macro_backtrace` helper makes // some decision, such as dropping some frames, and I don't // want to duplicate that logic here. - let backtrace = je.cm.macro_backtrace(span).into_iter(); + let backtrace = span.macro_backtrace().into_iter(); DiagnosticSpan::from_span_full(span, is_primary, label, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 6c975f3fc40..86ee1c5336d 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -125,7 +125,7 @@ pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; -pub mod symbol; +pub use syntax_pos::symbol; pub mod test; pub mod tokenstream; pub mod visit; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 43a9d8c5f78..e9eb4fbcc91 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5036,11 +5036,7 @@ impl<'a> Parser<'a> { the path:", path); self.expect(&token::CloseDelim(token::Paren))?; // `)` - let sp = Span { - lo: start_span.lo, - hi: self.prev_span.hi, - expn_id: start_span.expn_id, - }; + let sp = start_span.to(self.prev_span); let mut err = self.span_fatal_help(sp, &msg, &suggestion); err.span_suggestion(path_span, &help_msg, format!("in {}", path)); err.emit(); // emit diagnostic, but continue with public visibility diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index c541df9230a..c7820a15fb3 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,29 +10,27 @@ use ast; use attr; +use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; -use parse::ParseSess; use ptr::P; use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to codemap's is_internal check. /// The expanded code uses the unstable `#[prelude_import]` attribute. -fn ignored_span(sess: &ParseSess, sp: Span) -> Span { - let info = ExpnInfo { +fn ignored_span(sp: Span) -> Span { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("std_inject")), span: None, allow_internal_unstable: true, } - }; - let expn_id = sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + }); + Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..sp } } pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { @@ -45,10 +43,7 @@ pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { } } -pub fn maybe_inject_crates_ref(sess: &ParseSess, - mut krate: ast::Crate, - alt_std_name: Option) - -> ast::Crate { +pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option) -> ast::Crate { let name = match injected_crate_name(&krate) { Some(name) => name, None => return krate, @@ -67,7 +62,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, span: DUMMY_SP, })); - let span = ignored_span(sess, DUMMY_SP); + let span = ignored_span(DUMMY_SP); krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs deleted file mode 100644 index 2acbeee426b..00000000000 --- a/src/libsyntax/symbol.rs +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An "interner" is a data structure that associates values with usize tags and -//! allows bidirectional lookup; i.e. given a value, one can easily find the -//! type, and vice versa. - -use serialize::{Decodable, Decoder, Encodable, Encoder}; -use std::cell::RefCell; -use std::collections::HashMap; -use std::fmt; - -/// A symbol is an interned or gensymed string. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Symbol(u32); - -// The interner in thread-local, so `Symbol` shouldn't move between threads. -impl !Send for Symbol { } - -impl Symbol { - /// Maps a string to its interned representation. - pub fn intern(string: &str) -> Self { - with_interner(|interner| interner.intern(string)) - } - - /// gensym's a new usize, using the current interner. - pub fn gensym(string: &str) -> Self { - with_interner(|interner| interner.gensym(string)) - } - - pub fn as_str(self) -> InternedString { - with_interner(|interner| unsafe { - InternedString { - string: ::std::mem::transmute::<&str, &str>(interner.get(self)) - } - }) - } - - pub fn as_u32(self) -> u32 { - self.0 - } -} - -impl fmt::Debug for Symbol { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}({})", self, self.0) - } -} - -impl fmt::Display for Symbol { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.as_str(), f) - } -} - -impl Encodable for Symbol { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.as_str()) - } -} - -impl Decodable for Symbol { - fn decode(d: &mut D) -> Result { - Ok(Symbol::intern(&d.read_str()?)) - } -} - -impl> PartialEq for Symbol { - fn eq(&self, other: &T) -> bool { - self.as_str() == other.deref() - } -} - -#[derive(Default)] -pub struct Interner { - names: HashMap, Symbol>, - strings: Vec>, -} - -impl Interner { - pub fn new() -> Self { - Interner::default() - } - - fn prefill(init: &[&str]) -> Self { - let mut this = Interner::new(); - for &string in init { - this.intern(string); - } - this - } - - pub fn intern(&mut self, string: &str) -> Symbol { - if let Some(&name) = self.names.get(string) { - return name; - } - - let name = Symbol(self.strings.len() as u32); - let string = string.to_string().into_boxed_str(); - self.strings.push(string.clone()); - self.names.insert(string, name); - name - } - - fn gensym(&mut self, string: &str) -> Symbol { - let gensym = Symbol(self.strings.len() as u32); - // leave out of `names` to avoid colliding - self.strings.push(string.to_string().into_boxed_str()); - gensym - } - - pub fn get(&self, name: Symbol) -> &str { - &self.strings[name.0 as usize] - } -} - -// In this macro, there is the requirement that the name (the number) must be monotonically -// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero. -macro_rules! declare_keywords {( - $( ($index: expr, $konst: ident, $string: expr) )* -) => { - pub mod keywords { - use ast; - #[derive(Clone, Copy, PartialEq, Eq)] - pub struct Keyword { - ident: ast::Ident, - } - impl Keyword { - #[inline] pub fn ident(self) -> ast::Ident { self.ident } - #[inline] pub fn name(self) -> ast::Name { self.ident.name } - } - $( - #[allow(non_upper_case_globals)] - pub const $konst: Keyword = Keyword { - ident: ast::Ident::with_empty_ctxt(super::Symbol($index)) - }; - )* - } - - impl Interner { - fn fresh() -> Self { - Interner::prefill(&[$($string,)*]) - } - } -}} - -// NB: leaving holes in the ident table is bad! a different ident will get -// interned with the id from the hole, but it will be between the min and max -// of the reserved words, and thus tagged as "reserved". -// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, -// this should be rarely necessary though if the keywords are kept in alphabetic order. -declare_keywords! { - // Invalid identifier - (0, Invalid, "") - - // Strict keywords used in the language. - (1, As, "as") - (2, Box, "box") - (3, Break, "break") - (4, Const, "const") - (5, Continue, "continue") - (6, Crate, "crate") - (7, Else, "else") - (8, Enum, "enum") - (9, Extern, "extern") - (10, False, "false") - (11, Fn, "fn") - (12, For, "for") - (13, If, "if") - (14, Impl, "impl") - (15, In, "in") - (16, Let, "let") - (17, Loop, "loop") - (18, Match, "match") - (19, Mod, "mod") - (20, Move, "move") - (21, Mut, "mut") - (22, Pub, "pub") - (23, Ref, "ref") - (24, Return, "return") - (25, SelfValue, "self") - (26, SelfType, "Self") - (27, Static, "static") - (28, Struct, "struct") - (29, Super, "super") - (30, Trait, "trait") - (31, True, "true") - (32, Type, "type") - (33, Unsafe, "unsafe") - (34, Use, "use") - (35, Where, "where") - (36, While, "while") - - // Keywords reserved for future use. - (37, Abstract, "abstract") - (38, Alignof, "alignof") - (39, Become, "become") - (40, Do, "do") - (41, Final, "final") - (42, Macro, "macro") - (43, Offsetof, "offsetof") - (44, Override, "override") - (45, Priv, "priv") - (46, Proc, "proc") - (47, Pure, "pure") - (48, Sizeof, "sizeof") - (49, Typeof, "typeof") - (50, Unsized, "unsized") - (51, Virtual, "virtual") - (52, Yield, "yield") - - // Weak keywords, have special meaning only in specific contexts. - (53, Default, "default") - (54, StaticLifetime, "'static") - (55, Union, "union") - (56, Catch, "catch") - - // A virtual keyword that resolves to the crate root when used in a lexical scope. - (57, CrateRoot, "{{root}}") -} - -// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. -fn with_interner T>(f: F) -> T { - thread_local!(static INTERNER: RefCell = { - RefCell::new(Interner::fresh()) - }); - INTERNER.with(|interner| f(&mut *interner.borrow_mut())) -} - -/// Represents a string stored in the thread-local interner. Because the -/// interner lives for the life of the thread, this can be safely treated as an -/// immortal string, as long as it never crosses between threads. -/// -/// FIXME(pcwalton): You must be careful about what you do in the destructors -/// of objects stored in TLS, because they may run after the interner is -/// destroyed. In particular, they must not access string contents. This can -/// be fixed in the future by just leaking all strings until thread death -/// somehow. -#[derive(Clone, Hash, PartialOrd, Eq, Ord)] -pub struct InternedString { - string: &'static str, -} - -impl ::std::convert::AsRef for InternedString where str: ::std::convert::AsRef { - fn as_ref(&self) -> &U { - self.string.as_ref() - } -} - -impl> ::std::cmp::PartialEq for InternedString { - fn eq(&self, other: &T) -> bool { - self.string == other.deref() - } -} - -impl ::std::cmp::PartialEq for str { - fn eq(&self, other: &InternedString) -> bool { - self == other.string - } -} - -impl<'a> ::std::cmp::PartialEq for &'a str { - fn eq(&self, other: &InternedString) -> bool { - *self == other.string - } -} - -impl ::std::cmp::PartialEq for String { - fn eq(&self, other: &InternedString) -> bool { - self == other.string - } -} - -impl<'a> ::std::cmp::PartialEq for &'a String { - fn eq(&self, other: &InternedString) -> bool { - *self == other.string - } -} - -impl !Send for InternedString { } - -impl ::std::ops::Deref for InternedString { - type Target = str; - fn deref(&self) -> &str { self.string } -} - -impl fmt::Debug for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(self.string, f) - } -} - -impl fmt::Display for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self.string, f) - } -} - -impl Decodable for InternedString { - fn decode(d: &mut D) -> Result { - Ok(Symbol::intern(&d.read_str()?).as_str()) - } -} - -impl Encodable for InternedString { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn interner_tests() { - let mut i: Interner = Interner::new(); - // first one is zero: - assert_eq!(i.intern("dog"), Symbol(0)); - // re-use gets the same entry: - assert_eq!(i.intern ("dog"), Symbol(0)); - // different string gets a different #: - assert_eq!(i.intern("cat"), Symbol(1)); - assert_eq!(i.intern("cat"), Symbol(1)); - // dog is still at zero - assert_eq!(i.intern("dog"), Symbol(0)); - // gensym gets 3 - assert_eq!(i.gensym("zebra"), Symbol(2)); - // gensym of same string gets new number : - assert_eq!(i.gensym("zebra"), Symbol(3)); - // gensym of *existing* string gets new number: - assert_eq!(i.gensym("dog"), Symbol(4)); - } -} diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6fb6db9ca02..50380626d7f 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -31,6 +31,7 @@ use entry::{self, EntryPointType}; use ext::base::{ExtCtxt, Resolver}; use ext::build::AstBuilder; use ext::expand::ExpansionConfig; +use ext::hygiene::{Mark, SyntaxContext}; use fold::Folder; use util::move_map::MoveMap; use fold; @@ -62,6 +63,7 @@ struct TestCtxt<'a> { testfns: Vec, reexport_test_harness_main: Option, is_test_crate: bool, + ctxt: SyntaxContext, // top-level re-export submodule, filled out after folding is finished toplevel_reexport: Option, @@ -275,6 +277,7 @@ fn generate_test_harness(sess: &ParseSess, let mut cleaner = EntryPointCleaner { depth: 0 }; let krate = cleaner.fold_crate(krate); + let mark = Mark::fresh(); let mut cx: TestCtxt = TestCtxt { sess: sess, span_diagnostic: sd, @@ -284,15 +287,16 @@ fn generate_test_harness(sess: &ParseSess, reexport_test_harness_main: reexport_test_harness_main, is_test_crate: is_test_crate(&krate), toplevel_reexport: None, + ctxt: SyntaxContext::empty().apply_mark(mark), }; cx.ext_cx.crate_root = Some("std"); - cx.ext_cx.bt_push(ExpnInfo { + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("test")), span: None, - allow_internal_unstable: false, + allow_internal_unstable: true, } }); @@ -307,18 +311,7 @@ fn generate_test_harness(sess: &ParseSess, /// call to codemap's is_internal check. /// The expanded code calls some unstable functions in the test crate. fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { - let info = ExpnInfo { - call_site: sp, - callee: NameAndSpan { - format: MacroAttribute(Symbol::intern("test")), - span: None, - allow_internal_unstable: true, - } - }; - let expn_id = cx.sess.codemap().record_expansion(info); - let mut sp = sp; - sp.expn_id = expn_id; - return sp; + Span { ctxt: cx.ctxt, ..sp } } #[derive(PartialEq)] diff --git a/src/libsyntax/test_snippet.rs b/src/libsyntax/test_snippet.rs index c6d6e6237f2..c537a0ee166 100644 --- a/src/libsyntax/test_snippet.rs +++ b/src/libsyntax/test_snippet.rs @@ -83,7 +83,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { Span { lo: BytePos(start as u32), hi: BytePos(end as u32), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b75b3efda36..86bfdebe42b 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -56,18 +56,20 @@ impl Delimited { /// Returns the opening delimiter as a token tree. pub fn open_tt(&self, span: Span) -> TokenTree { - let open_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + let open_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(open_span, self.open_token()) } /// Returns the closing delimiter as a token tree. pub fn close_tt(&self, span: Span) -> TokenTree { - let close_span = match span { - DUMMY_SP => DUMMY_SP, - _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + let close_span = if span == DUMMY_SP { + DUMMY_SP + } else { + Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span } }; TokenTree::Token(close_span, self.close_token()) } @@ -425,7 +427,7 @@ mod tests { Span { lo: BytePos(a), hi: BytePos(b), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, } } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 767ec94a0ce..923e8072f43 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -13,7 +13,6 @@ use self::State::*; use syntax::ast; -use syntax::codemap; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; @@ -240,15 +239,6 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, } } - let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: sp, - callee: codemap::NameAndSpan { - format: codemap::MacroBang(Symbol::intern("asm")), - span: None, - allow_internal_unstable: false, - }, - }); - MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { @@ -260,7 +250,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, volatile: volatile, alignstack: alignstack, dialect: dialect, - expn_id: expn_id, + ctxt: cx.backtrace(), })), span: sp, attrs: ast::ThinVec::new(), diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index d14b59d6c70..1993d6ebe5b 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -111,7 +111,7 @@ fn cs_clone_shallow(name: &str, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Clone)"); + let span = Span { ctxt: cx.backtrace(), ..span}; let assert_path = cx.path_all(span, true, cx.std_path(&["clone", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 6ab5987a159..eef21492deb 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -58,7 +58,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. - let span = super::allow_unstable(cx, span, "derive(Eq)"); + let span = Span { ctxt: cx.backtrace(), ..span }; let assert_path = cx.path_all(span, true, cx.std_path(&["cmp", helper_name]), vec![], vec![ty], vec![]); diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index a767716466c..ec4cb815960 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -66,8 +66,8 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"), }; - // We want to make sure we have the expn_id set so that we can use unstable methods - let span = Span { expn_id: cx.backtrace(), ..span }; + // We want to make sure we have the ctxt set so that we can use unstable methods + let span = Span { ctxt: cx.backtrace(), ..span }; let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); let builder = Ident::from_str("builder"); let builder_expr = cx.expr_ident(span, builder.clone()); diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 48e7ff0d243..1ff0fec1c96 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -375,7 +375,7 @@ fn find_type_parameters(ty: &ast::Ty, } fn visit_mac(&mut self, mac: &ast::Mac) { - let span = Span { expn_id: self.span.expn_id, ..mac.span }; + let span = Span { ctxt: self.span.ctxt, ..mac.span }; self.cx.span_err(span, "`derive` cannot be used on items with type macros"); } } @@ -1458,7 +1458,7 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let ident = v.node.name; - let sp = Span { expn_id: trait_.span.expn_id, ..v.span }; + let sp = Span { ctxt: trait_.span.ctxt, ..v.span }; let summary = trait_.summarise_struct(cx, &v.node.data); (ident, sp, summary) }) @@ -1478,7 +1478,7 @@ impl<'a> TraitDef<'a> { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields() { - let sp = Span { expn_id: self.span.expn_id, ..field.span }; + let sp = Span { ctxt: self.span.ctxt, ..field.span }; match field.ident { Some(ident) => named_idents.push((ident, sp)), _ => just_spans.push(sp), @@ -1523,7 +1523,7 @@ impl<'a> TraitDef<'a> { let mut paths = Vec::new(); let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { - let sp = Span { expn_id: self.span.expn_id, ..struct_field.span }; + let sp = Span { ctxt: self.span.ctxt, ..struct_field.span }; let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned { span: sp, @@ -1544,7 +1544,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } codemap::Spanned { - span: Span { expn_id: self.span.expn_id, ..pat.span }, + span: Span { ctxt: self.span.ctxt, ..pat.span }, node: ast::FieldPat { ident: ident.unwrap(), pat: pat, @@ -1576,7 +1576,7 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; - let sp = Span { expn_id: self.span.expn_id, ..variant.span }; + let sp = Span { ctxt: self.span.ctxt, ..variant.span }; let variant_path = cx.path(sp, vec![enum_ident, variant_ident]); self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl) } diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index b51591bf89d..b2bb43e41ed 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -12,9 +12,9 @@ use std::rc::Rc; use syntax::ast; -use syntax::codemap; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension, Resolver}; use syntax::ext::build::AstBuilder; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; @@ -74,20 +74,6 @@ pub mod ord; pub mod generic; -fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { - Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern(attr_name)), - span: Some(span), - allow_internal_unstable: true, - }, - }), - ..span - } -} - macro_rules! derive_traits { ($( $name:expr => $func:path, )+) => { pub fn is_builtin_trait(name: ast::Name) -> bool { @@ -177,15 +163,15 @@ fn call_intrinsic(cx: &ExtCtxt, intrinsic: &str, args: Vec>) -> P { - span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(Symbol::intern("derive")), - span: Some(span), - allow_internal_unstable: true, - }, - }); - + if cx.current_expansion.mark.expn_info().unwrap().callee.allow_internal_unstable { + span.ctxt = cx.backtrace(); + } else { // Avoid instability errors with user defined curstom derives, cc #36316 + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + let mark = Mark::fresh(); + mark.set_expn_info(info); + span.ctxt = SyntaxContext::empty().apply_mark(mark); + } let path = cx.std_path(&["intrinsics", intrinsic]); let call = cx.expr_call_global(span, path, args); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index d2afa08cada..aeb5b1e0a53 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -641,10 +641,11 @@ impl<'a, 'b> Context<'a, 'b> { fn format_arg(ecx: &ExtCtxt, macsp: Span, - sp: Span, + mut sp: Span, ty: &ArgumentType, arg: P) -> P { + sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 2d815b3f1bb..bb89caab709 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -17,6 +17,7 @@ use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::fold::Folder; use syntax::parse::ParseSess; use syntax::ptr::P; @@ -360,7 +361,8 @@ fn mk_registrar(cx: &mut ExtCtxt, custom_derives: &[ProcMacroDerive], custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef]) -> P { - let eid = cx.codemap().record_expansion(ExpnInfo { + let mark = Mark::fresh(); + mark.set_expn_info(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { format: MacroAttribute(Symbol::intern("proc_macro")), @@ -368,7 +370,7 @@ fn mk_registrar(cx: &mut ExtCtxt, allow_internal_unstable: true, } }); - let span = Span { expn_id: eid, ..DUMMY_SP }; + let span = Span { ctxt: SyntaxContext::empty().apply_mark(mark), ..DUMMY_SP }; let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index feebbcd6f03..8a9ff647b3e 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -15,12 +15,16 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 +use Span; +use symbol::Symbol; + +use serialize::{Encodable, Decodable, Encoder, Decoder}; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; /// A SyntaxContext represents a chain of macro expansions (represented by marks). -#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Default)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct SyntaxContext(u32); #[derive(Copy, Clone)] @@ -36,8 +40,8 @@ pub struct Mark(u32); impl Mark { pub fn fresh() -> Self { HygieneData::with(|data| { - let next_mark = Mark(data.next_mark.0 + 1); - ::std::mem::replace(&mut data.next_mark, next_mark) + data.marks.push(None); + Mark(data.marks.len() as u32 - 1) }) } @@ -53,23 +57,31 @@ impl Mark { pub fn from_u32(raw: u32) -> Mark { Mark(raw) } + + pub fn expn_info(self) -> Option { + HygieneData::with(|data| data.marks[self.0 as usize].clone()) + } + + pub fn set_expn_info(self, info: ExpnInfo) { + HygieneData::with(|data| data.marks[self.0 as usize] = Some(info)) + } } struct HygieneData { + marks: Vec>, syntax_contexts: Vec, markings: HashMap<(SyntaxContext, Mark), SyntaxContext>, - next_mark: Mark, } impl HygieneData { fn new() -> Self { HygieneData { + marks: vec![None], syntax_contexts: vec![SyntaxContextData { outer_mark: Mark::root(), prev_ctxt: SyntaxContext::empty(), }], markings: HashMap::new(), - next_mark: Mark(1), } } @@ -81,8 +93,8 @@ impl HygieneData { } } -pub fn reset_hygiene_data() { - HygieneData::with(|data| *data = HygieneData::new()) +pub fn clear_markings() { + HygieneData::with(|data| data.markings = HashMap::new()); } impl SyntaxContext { @@ -113,6 +125,10 @@ impl SyntaxContext { }) }) } + + pub fn outer(self) -> Mark { + HygieneData::with(|data| data.syntax_contexts[self.0 as usize].outer_mark) + } } impl fmt::Debug for SyntaxContext { @@ -120,3 +136,67 @@ impl fmt::Debug for SyntaxContext { write!(f, "#{}", self.0) } } + +/// Extra information for tracking spans of macro and syntax sugar expansion +#[derive(Clone, Hash, Debug)] +pub struct ExpnInfo { + /// The location of the actual macro invocation or syntax sugar , e.g. + /// `let x = foo!();` or `if let Some(y) = x {}` + /// + /// This may recursively refer to other macro invocations, e.g. if + /// `foo!()` invoked `bar!()` internally, and there was an + /// expression inside `bar!`; the call_site of the expression in + /// the expansion would point to the `bar!` invocation; that + /// call_site span would have its own ExpnInfo, with the call_site + /// pointing to the `foo!` invocation. + pub call_site: Span, + /// Information about the expansion. + pub callee: NameAndSpan +} + +#[derive(Clone, Hash, Debug)] +pub struct NameAndSpan { + /// The format with which the macro was invoked. + pub format: ExpnFormat, + /// Whether the macro is allowed to use #[unstable]/feature-gated + /// features internally without forcing the whole crate to opt-in + /// to them. + pub allow_internal_unstable: bool, + /// The span of the macro definition itself. The macro may not + /// have a sensible definition span (e.g. something defined + /// completely inside libsyntax) in which case this is None. + pub span: Option +} + +impl NameAndSpan { + pub fn name(&self) -> Symbol { + match self.format { + ExpnFormat::MacroAttribute(s) | + ExpnFormat::MacroBang(s) | + ExpnFormat::CompilerDesugaring(s) => s, + } + } +} + +/// The source of expansion. +#[derive(Clone, Hash, Debug, PartialEq, Eq)] +pub enum ExpnFormat { + /// e.g. #[derive(...)] + MacroAttribute(Symbol), + /// e.g. `format!()` + MacroBang(Symbol), + /// Desugaring done by the compiler during HIR lowering. + CompilerDesugaring(Symbol) +} + +impl Encodable for SyntaxContext { + fn encode(&self, _: &mut E) -> Result<(), E::Error> { + Ok(()) // FIXME(jseyfried) intercrate hygiene + } +} + +impl Decodable for SyntaxContext { + fn decode(_: &mut D) -> Result { + Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene + } +} diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 1b62d62348b..9b45e364ecf 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -25,6 +25,7 @@ #![feature(const_fn)] #![feature(custom_attribute)] +#![feature(optin_builtin_traits)] #![allow(unused_attributes)] #![feature(rustc_private)] #![feature(staged_api)] @@ -43,6 +44,9 @@ extern crate serialize; extern crate serialize as rustc_serialize; // used by deriving pub mod hygiene; +pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan}; + +pub mod symbol; pub type FileName = String; @@ -60,7 +64,7 @@ pub struct Span { pub hi: BytePos, /// Information about where the macro came from, if this piece of /// code was created by a macro expansion. - pub expn_id: ExpnId + pub ctxt: SyntaxContext, } /// A collection of spans. Spans have two orthogonal attributes: @@ -79,7 +83,7 @@ impl Span { /// Returns a new span representing just the end-point of this span pub fn end_point(self) -> Span { let lo = cmp::max(self.hi.0 - 1, self.lo.0); - Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} + Span { lo: BytePos(lo), hi: self.hi, ctxt: self.ctxt } } /// Returns `self` if `self` is not the dummy span, and `other` otherwise. @@ -107,6 +111,69 @@ impl Span { None } } + + /// Return the source span - this is either the supplied span, or the span for + /// the macro callsite that expanded to it. + pub fn source_callsite(self) -> Span { + self.ctxt.outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) + } + + /// Return the source callee. + /// + /// Returns None if the supplied span has no expansion trace, + /// else returns the NameAndSpan for the macro definition + /// corresponding to the source callsite. + pub fn source_callee(self) -> Option { + fn source_callee(info: ExpnInfo) -> NameAndSpan { + match info.call_site.ctxt.outer().expn_info() { + Some(info) => source_callee(info), + None => info.callee, + } + } + self.ctxt.outer().expn_info().map(source_callee) + } + + /// Check if a span is "internal" to a macro in which #[unstable] + /// items can be used (that is, a macro marked with + /// `#[allow_internal_unstable]`). + pub fn allows_unstable(&self) -> bool { + match self.ctxt.outer().expn_info() { + Some(info) => info.callee.allow_internal_unstable, + None => false, + } + } + + pub fn macro_backtrace(mut self) -> Vec { + let mut prev_span = DUMMY_SP; + let mut result = vec![]; + loop { + let info = match self.ctxt.outer().expn_info() { + Some(info) => info, + None => break, + }; + + let (pre, post) = match info.callee.format { + ExpnFormat::MacroAttribute(..) => ("#[", "]"), + ExpnFormat::MacroBang(..) => ("", "!"), + ExpnFormat::CompilerDesugaring(..) => ("desugaring of `", "`"), + }; + let macro_decl_name = format!("{}{}{}", pre, info.callee.name(), post); + let def_site_span = info.callee.span; + + // Don't print recursive invocations + if !info.call_site.source_equal(&prev_span) { + result.push(MacroBacktrace { + call_site: info.call_site, + macro_decl_name: macro_decl_name, + def_site_span: def_site_span, + }); + } + + prev_span = self; + self = info.call_site; + } + result + } } #[derive(Clone, Debug)] @@ -147,8 +214,8 @@ impl serialize::UseSpecializedDecodable for Span { } fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}", - span.lo, span.hi, span.expn_id) + write!(f, "Span {{ lo: {:?}, hi: {:?}, ctxt: {:?} }}", + span.lo, span.hi, span.ctxt) } impl fmt::Debug for Span { @@ -157,12 +224,7 @@ impl fmt::Debug for Span { } } -pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; - -// Generic span to be used for code originating from the command line -pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), - hi: BytePos(0), - expn_id: COMMAND_LINE_EXPN }; +pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), ctxt: NO_EXPANSION }; impl MultiSpan { pub fn new() -> MultiSpan { @@ -256,22 +318,7 @@ impl From for MultiSpan { } } -#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy, Ord, PartialOrd)] -pub struct ExpnId(pub u32); - -pub const NO_EXPANSION: ExpnId = ExpnId(!0); -// For code appearing from the command line -pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); - -impl ExpnId { - pub fn from_u32(id: u32) -> ExpnId { - ExpnId(id) - } - - pub fn into_u32(self) -> u32 { - self.0 - } -} +pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); /// Identifies an offset of a multi-byte character in a FileMap #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)] @@ -651,7 +698,7 @@ thread_local!(pub static SPAN_DEBUG: Cell fmt:: /* assuming that we're not in macro expansion */ pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, expn_id: NO_EXPANSION} + Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} } pub struct MacroBacktrace { diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs new file mode 100644 index 00000000000..b866652c49f --- /dev/null +++ b/src/libsyntax_pos/symbol.rs @@ -0,0 +1,389 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An "interner" is a data structure that associates values with usize tags and +//! allows bidirectional lookup; i.e. given a value, one can easily find the +//! type, and vice versa. + +use hygiene::SyntaxContext; + +use serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt; + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct Ident { + pub name: Symbol, + pub ctxt: SyntaxContext, +} + +impl Ident { + pub const fn with_empty_ctxt(name: Symbol) -> Ident { + Ident { name: name, ctxt: SyntaxContext::empty() } + } + + /// Maps a string to an identifier with an empty syntax context. + pub fn from_str(string: &str) -> Ident { + Ident::with_empty_ctxt(Symbol::intern(string)) + } + + pub fn unhygienize(self) -> Ident { + Ident { name: self.name, ctxt: SyntaxContext::empty() } + } +} + +impl fmt::Debug for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}{:?}", self.name, self.ctxt) + } +} + +impl fmt::Display for Ident { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.name, f) + } +} + +impl Encodable for Ident { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.name.encode(s) + } +} + +impl Decodable for Ident { + fn decode(d: &mut D) -> Result { + Ok(Ident::with_empty_ctxt(Symbol::decode(d)?)) + } +} + +/// A symbol is an interned or gensymed string. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Symbol(u32); + +// The interner in thread-local, so `Symbol` shouldn't move between threads. +impl !Send for Symbol { } + +impl Symbol { + /// Maps a string to its interned representation. + pub fn intern(string: &str) -> Self { + with_interner(|interner| interner.intern(string)) + } + + /// gensym's a new usize, using the current interner. + pub fn gensym(string: &str) -> Self { + with_interner(|interner| interner.gensym(string)) + } + + pub fn as_str(self) -> InternedString { + with_interner(|interner| unsafe { + InternedString { + string: ::std::mem::transmute::<&str, &str>(interner.get(self)) + } + }) + } + + pub fn as_u32(self) -> u32 { + self.0 + } +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}({})", self, self.0) + } +} + +impl fmt::Display for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.as_str(), f) + } +} + +impl Encodable for Symbol { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(&self.as_str()) + } +} + +impl Decodable for Symbol { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?)) + } +} + +impl> PartialEq for Symbol { + fn eq(&self, other: &T) -> bool { + self.as_str() == other.deref() + } +} + +#[derive(Default)] +pub struct Interner { + names: HashMap, Symbol>, + strings: Vec>, +} + +impl Interner { + pub fn new() -> Self { + Interner::default() + } + + fn prefill(init: &[&str]) -> Self { + let mut this = Interner::new(); + for &string in init { + this.intern(string); + } + this + } + + pub fn intern(&mut self, string: &str) -> Symbol { + if let Some(&name) = self.names.get(string) { + return name; + } + + let name = Symbol(self.strings.len() as u32); + let string = string.to_string().into_boxed_str(); + self.strings.push(string.clone()); + self.names.insert(string, name); + name + } + + fn gensym(&mut self, string: &str) -> Symbol { + let gensym = Symbol(self.strings.len() as u32); + // leave out of `names` to avoid colliding + self.strings.push(string.to_string().into_boxed_str()); + gensym + } + + pub fn get(&self, name: Symbol) -> &str { + &self.strings[name.0 as usize] + } +} + +// In this macro, there is the requirement that the name (the number) must be monotonically +// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* +) => { + pub mod keywords { + use super::{Symbol, Ident}; + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: Ident, + } + impl Keyword { + #[inline] pub fn ident(self) -> Ident { self.ident } + #[inline] pub fn name(self) -> Symbol { self.ident.name } + } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: Ident::with_empty_ctxt(super::Symbol($index)) + }; + )* + } + + impl Interner { + fn fresh() -> Self { + Interner::prefill(&[$($string,)*]) + } + } +}} + +// NB: leaving holes in the ident table is bad! a different ident will get +// interned with the id from the hole, but it will be between the min and max +// of the reserved words, and thus tagged as "reserved". +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") + (56, Catch, "catch") + + // A virtual keyword that resolves to the crate root when used in a lexical scope. + (57, CrateRoot, "{{root}}") +} + +// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +fn with_interner T>(f: F) -> T { + thread_local!(static INTERNER: RefCell = { + RefCell::new(Interner::fresh()) + }); + INTERNER.with(|interner| f(&mut *interner.borrow_mut())) +} + +/// Represents a string stored in the thread-local interner. Because the +/// interner lives for the life of the thread, this can be safely treated as an +/// immortal string, as long as it never crosses between threads. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until thread death +/// somehow. +#[derive(Clone, Hash, PartialOrd, Eq, Ord)] +pub struct InternedString { + string: &'static str, +} + +impl ::std::convert::AsRef for InternedString where str: ::std::convert::AsRef { + fn as_ref(&self) -> &U { + self.string.as_ref() + } +} + +impl> ::std::cmp::PartialEq for InternedString { + fn eq(&self, other: &T) -> bool { + self.string == other.deref() + } +} + +impl ::std::cmp::PartialEq for str { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a str { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + +impl ::std::cmp::PartialEq for String { + fn eq(&self, other: &InternedString) -> bool { + self == other.string + } +} + +impl<'a> ::std::cmp::PartialEq for &'a String { + fn eq(&self, other: &InternedString) -> bool { + *self == other.string + } +} + +impl !Send for InternedString { } + +impl ::std::ops::Deref for InternedString { + type Target = str; + fn deref(&self) -> &str { self.string } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(self.string, f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self.string, f) + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?).as_str()) + } +} + +impl Encodable for InternedString { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(self.string) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn interner_tests() { + let mut i: Interner = Interner::new(); + // first one is zero: + assert_eq!(i.intern("dog"), Symbol(0)); + // re-use gets the same entry: + assert_eq!(i.intern ("dog"), Symbol(0)); + // different string gets a different #: + assert_eq!(i.intern("cat"), Symbol(1)); + assert_eq!(i.intern("cat"), Symbol(1)); + // dog is still at zero + assert_eq!(i.intern("dog"), Symbol(0)); + // gensym gets 3 + assert_eq!(i.gensym("zebra"), Symbol(2)); + // gensym of same string gets new number : + assert_eq!(i.gensym("zebra"), Symbol(3)); + // gensym of *existing* string gets new number: + assert_eq!(i.gensym("dog"), Symbol(4)); + } +} diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index bd25561065b..272bf1150ca 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -27,14 +27,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23"); diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index d692bb519c1..5518ab47c2b 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -30,14 +30,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; println!("{}", pprust::expr_to_string(&*quote_expr!(&cx, 23))); diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index b4ed57192cc..4a8246ec429 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -26,14 +26,6 @@ fn main() { &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); - cx.bt_push(syntax::codemap::ExpnInfo { - call_site: DUMMY_SP, - callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(Symbol::intern("")), - allow_internal_unstable: false, - span: None, - } - }); let cx = &mut cx; macro_rules! check { -- cgit 1.4.1-3-g733a5 From f08d5ad4c59ca5fc1c961a94c53807d70959c375 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 15 Mar 2017 00:22:48 +0000 Subject: Refactor how spans are combined in the parser. --- src/librustc/middle/resolve_lifetime.rs | 5 +- src/librustc_metadata/cstore_impl.rs | 4 +- src/librustc_metadata/decoder.rs | 6 +- src/librustc_save_analysis/lib.rs | 2 +- src/librustc_save_analysis/span_utils.rs | 4 +- src/libsyntax/ast.rs | 10 +- src/libsyntax/attr.rs | 15 +- src/libsyntax/codemap.rs | 4 - src/libsyntax/ext/tt/macro_parser.rs | 9 +- src/libsyntax/parse/attr.rs | 29 +- src/libsyntax/parse/lexer/mod.rs | 42 +- src/libsyntax/parse/lexer/unicode_chars.rs | 4 +- src/libsyntax/parse/mod.rs | 6 +- src/libsyntax/parse/parser.rs | 617 ++++++++++----------- src/libsyntax_ext/format.rs | 15 +- src/libsyntax_pos/lib.rs | 16 +- src/test/compile-fail/imports/macro-paths.rs | 2 - src/test/compile-fail/imports/macros.rs | 2 - .../compile-fail/imports/shadow_builtin_macros.rs | 1 - src/test/compile-fail/issue-25385.rs | 1 - src/test/run-pass/syntax-extension-source-utils.rs | 2 +- .../ui/macros/macro_path_as_generic_bound.stderr | 5 +- 22 files changed, 363 insertions(+), 438 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 5094e28475b..8037570d24a 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -29,7 +29,7 @@ use syntax::ast; use syntax::attr; use syntax::ptr::P; use syntax::symbol::keywords; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::Span; use errors::DiagnosticBuilder; use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap}; use rustc_back::slice; @@ -1468,8 +1468,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { self.resolve_lifetime_ref(bound); } else { self.insert_lifetime(bound, Region::Static); - let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi); - self.sess.struct_span_warn(full_span, + self.sess.struct_span_warn(lifetime_i.lifetime.span.to(bound.span), &format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name)) .help(&format!("you can use the `'static` lifetime directly, in place \ of `{}`", lifetime_i.lifetime.name)) diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 3e9b6a6226a..41a2e8a8d55 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -36,7 +36,7 @@ use syntax::ast; use syntax::attr; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; -use syntax_pos::{mk_sp, Span}; +use syntax_pos::{Span, NO_EXPANSION}; use rustc::hir::svh::Svh; use rustc_back::target::Target; use rustc::hir; @@ -395,7 +395,7 @@ impl CrateStore for cstore::CStore { let source_name = format!("<{} macros>", name); let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body); - let local_span = mk_sp(filemap.start_pos, filemap.end_pos); + let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; let body = filemap_to_stream(&sess.parse_sess, filemap); // Mark the attrs as used diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3de1e3442c6..43e076e799b 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -39,7 +39,7 @@ use syntax::attr; use syntax::ast; use syntax::codemap; use syntax::ext::base::MacroKind; -use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP}; +use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; pub struct DecodeContext<'a, 'tcx: 'a> { opaque: opaque::Decoder<'a>, @@ -243,7 +243,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let sess = if let Some(sess) = self.sess { sess } else { - return Ok(syntax_pos::mk_sp(lo, hi)); + return Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }); }; let (lo, hi) = if lo > hi { @@ -290,7 +290,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; - Ok(syntax_pos::mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index fd6803e087a..1de9fbc8e49 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -742,7 +742,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let ident_start = text.find(&name).expect("Name not in signature?"); let ident_end = ident_start + name.len(); Signature { - span: mk_sp(item.span.lo, item.span.lo + BytePos(text.len() as u32)), + span: Span { hi: item.span.lo + BytePos(text.len() as u32), ..item.span }, text: text, ident_start: ident_start, ident_end: ident_end, diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index c19f805a285..af3efb48090 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -305,10 +305,10 @@ impl<'a> SpanUtils<'a> { continue; } if let TokenTree::Token(_, token::Semi) = tok { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } else if let TokenTree::Delimited(_, ref d) = tok { if d.delim == token::Brace { - return self.snippet(mk_sp(first_span.lo, prev.span().hi)); + return self.snippet(first_span.to(prev.span())); } } prev = tok; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a4bebd311de..9eb86aa006d 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -17,7 +17,7 @@ pub use self::PathParameters::*; pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; -use syntax_pos::{mk_sp, BytePos, Span, DUMMY_SP}; +use syntax_pos::{Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; @@ -1433,7 +1433,7 @@ impl Arg { TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } - _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), + _ => Some(respan(self.pat.span.to(self.ty.span), SelfKind::Explicit(self.ty.clone(), mutbl))), } } @@ -1450,7 +1450,7 @@ impl Arg { } pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg { - let span = mk_sp(eself.span.lo, eself_ident.span.hi); + let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, node: TyKind::ImplicitSelf, @@ -1687,11 +1687,11 @@ pub struct PolyTraitRef { } impl PolyTraitRef { - pub fn new(lifetimes: Vec, path: Path, lo: BytePos, hi: BytePos) -> Self { + pub fn new(lifetimes: Vec, path: Path, span: Span) -> Self { PolyTraitRef { bound_lifetimes: lifetimes, trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, - span: mk_sp(lo, hi), + span: span, } } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 2f1efd6ad00..5dcce2572af 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -18,8 +18,8 @@ use ast; use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind}; -use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; -use syntax_pos::{Span, BytePos, DUMMY_SP}; +use codemap::{Spanned, respan, dummy_spanned}; +use syntax_pos::{Span, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; @@ -447,17 +447,16 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute } } -pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) - -> Attribute { +pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, span: Span) -> Attribute { let style = doc_comment_style(&text.as_str()); - let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked)); + let lit = respan(span, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id: id, style: style, - path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")), - tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)), + path: ast::Path::from_ident(span, ast::Ident::from_str("doc")), + tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, - span: mk_sp(lo, hi), + span: span, } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index ba199eacb62..4d67390d442 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -49,10 +49,6 @@ pub struct Spanned { pub span: Span, } -pub fn spanned(lo: BytePos, hi: BytePos, t: T) -> Spanned { - respan(mk_sp(lo, hi), t) -} - pub fn respan(sp: Span, t: T) -> Spanned { Spanned {node: t, span: sp} } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ed17f0f956c..9ee427eed35 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -79,7 +79,7 @@ pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; use ast::Ident; -use syntax_pos::{self, BytePos, mk_sp, Span}; +use syntax_pos::{self, BytePos, Span}; use codemap::Spanned; use errors::FatalError; use ext::tt::quoted::{self, TokenTree}; @@ -285,7 +285,7 @@ fn inner_parse_loop(sess: &ParseSess, eof_eis: &mut SmallVector>, bb_eis: &mut SmallVector>, token: &Token, - span: &syntax_pos::Span) + span: syntax_pos::Span) -> ParseResult<()> { while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them @@ -323,8 +323,7 @@ fn inner_parse_loop(sess: &ParseSess, for idx in ei.match_lo..ei.match_hi { let sub = ei.matches[idx].clone(); new_pos.matches[idx] - .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo, - span.hi)))); + .push(Rc::new(MatchedSeq(sub, Span { lo: ei.sp_lo, ..span }))); } new_pos.match_cur = ei.match_hi; @@ -426,7 +425,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op assert!(next_eis.is_empty()); match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis, - &parser.token, &parser.span) { + &parser.token, parser.span) { Success(_) => {}, Failure(sp, tok) => return Failure(sp, tok), Error(sp, msg) => return Error(sp, msg), diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 53106214fa3..92cec462ffb 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -10,8 +10,7 @@ use attr; use ast; -use syntax_pos::{mk_sp, Span}; -use codemap::spanned; +use codemap::respan; use parse::common::SeqSep; use parse::PResult; use parse::token::{self, Nonterminal}; @@ -49,8 +48,7 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ @@ -94,7 +92,7 @@ impl<'a> Parser<'a> { self.token); let (span, path, tokens, mut style) = match self.token { token::Pound => { - let lo = self.span.lo; + let lo = self.span; self.bump(); if inner_parse_policy == InnerAttributeParsePolicy::Permitted { @@ -122,9 +120,9 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket))?; let (path, tokens) = self.parse_path_and_tokens()?; self.expect(&token::CloseDelim(token::Bracket))?; - let hi = self.prev_span.hi; + let hi = self.prev_span; - (mk_sp(lo, hi), path, tokens, style) + (lo.to(hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); @@ -189,8 +187,7 @@ impl<'a> Parser<'a> { } token::DocComment(s) => { // we need to get the position of this token before we bump. - let Span { lo, hi, .. } = self.span; - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); @@ -238,11 +235,10 @@ impl<'a> Parser<'a> { return Ok(meta); } - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; let node = self.parse_meta_item_kind()?; - let hi = self.prev_span.hi; - Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + Ok(ast::MetaItem { name: ident.name, node: node, span: lo.to(self.prev_span) }) } pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { @@ -258,26 +254,25 @@ impl<'a> Parser<'a> { /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> { - let sp = self.span; - let lo = self.span.lo; + let lo = self.span; match self.parse_unsuffixed_lit() { Ok(lit) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::Literal(lit))) } Err(ref mut err) => self.diagnostic().cancel(err) } match self.parse_meta_item() { Ok(mi) => { - return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi))) + return Ok(respan(lo.to(self.prev_span), ast::NestedMetaItemKind::MetaItem(mi))) } Err(ref mut err) => self.diagnostic().cancel(err) } let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found {}", found); - Err(self.diagnostic().struct_span_err(sp, &msg)) + Err(self.diagnostic().struct_span_err(lo, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d48cf6911ed..920b2c401e2 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast::{self, Ident}; -use syntax_pos::{self, BytePos, CharPos, Pos, Span}; +use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use codemap::CodeMap; use errors::{FatalError, DiagnosticBuilder}; use parse::{token, ParseSess}; @@ -68,6 +68,10 @@ pub struct StringReader<'a> { open_braces: Vec<(token::DelimToken, Span)>, } +fn mk_sp(lo: BytePos, hi: BytePos) -> Span { + Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } +} + impl<'a> StringReader<'a> { fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); @@ -225,12 +229,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.fatal_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m) + self.err_span(mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -254,7 +258,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -278,7 +282,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -302,11 +306,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos); + self.peek_span = mk_sp(start_bytepos, self.pos); }; } } @@ -489,7 +493,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); } } @@ -532,13 +536,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }; } @@ -571,7 +575,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: syntax_pos::mk_sp(start, self.pos), + sp: mk_sp(start, self.pos), }); } } @@ -599,7 +603,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -661,7 +665,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.pos), + sp: mk_sp(start_bpos, self.pos), }) }) } @@ -858,7 +862,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = syntax_pos::mk_sp(start, self.pos); + let span = mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -896,13 +900,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(syntax_pos::mk_sp(escaped_pos, pos), + err.span_help(mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } @@ -1735,7 +1739,7 @@ mod tests { sp: Span { lo: BytePos(21), hi: BytePos(23), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok1, tok2); @@ -1749,7 +1753,7 @@ mod tests { sp: Span { lo: BytePos(24), hi: BytePos(28), - expn_id: NO_EXPANSION, + ctxt: NO_EXPANSION, }, }; assert_eq!(tok3, tok4); @@ -1908,7 +1912,7 @@ mod tests { let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let comment = lexer.next_token(); assert_eq!(comment.tok, token::Comment); - assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); + assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, token::DocComment(Symbol::intern("/// test"))); diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index 6da3e5de75c..4df23da3c9c 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -11,7 +11,7 @@ // Characters and their corresponding confusables were collected from // http://www.unicode.org/Public/security/revision-06/confusables.txt -use syntax_pos::mk_sp as make_span; +use syntax_pos::{Span, NO_EXPANSION}; use errors::DiagnosticBuilder; use super::StringReader; @@ -234,7 +234,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>, .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { - let span = make_span(reader.pos, reader.next_pos); + let span = Span { lo: reader.pos, hi: reader.next_pos, ctxt: NO_EXPANSION }; match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e188bcaf105..b5d0a46de49 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -12,7 +12,7 @@ use ast::{self, CrateConfig}; use codemap::CodeMap; -use syntax_pos::{self, Span, FileMap}; +use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -178,7 +178,7 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Par let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { - parser.span = syntax_pos::mk_sp(end_pos, end_pos); + parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; } parser @@ -665,7 +665,7 @@ mod tests { // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { - Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION} + Span {lo: BytePos(a), hi: BytePos(b), ctxt: NO_EXPANSION} } fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index e9eb4fbcc91..b0611d75290 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -40,8 +40,8 @@ use ast::{Visibility, WhereClause}; use ast::{BinOpKind, UnOp}; use ast::RangeEnd; use {ast, attr}; -use codemap::{self, CodeMap, Spanned, spanned, respan}; -use syntax_pos::{self, Span, BytePos, mk_sp}; +use codemap::{self, CodeMap, Spanned, respan}; +use syntax_pos::{self, Span, BytePos}; use errors::{self, DiagnosticBuilder}; use parse::{self, classify, token}; use parse::common::SeqSep; @@ -108,13 +108,13 @@ macro_rules! maybe_whole_expr { $p.bump(); let span = $p.span; let kind = ExprKind::Path(None, (*path).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } token::NtBlock(ref block) => { $p.bump(); let span = $p.span; let kind = ExprKind::Block((*block).clone()); - return Ok($p.mk_expr(span.lo, span.hi, kind, ThinVec::new())); + return Ok($p.mk_expr(span, kind, ThinVec::new())); } _ => {}, }; @@ -731,7 +731,7 @@ impl<'a> Parser<'a> { token::AndAnd => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::BinOp(token::And), lo, span.hi)) + Ok(self.bump_with(token::BinOp(token::And), Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -765,7 +765,7 @@ impl<'a> Parser<'a> { token::BinOp(token::Shl) => { let span = self.span; let lo = span.lo + BytePos(1); - self.bump_with(token::Lt, lo, span.hi); + self.bump_with(token::Lt, Span { lo: lo, ..span }); true } _ => false, @@ -793,17 +793,17 @@ impl<'a> Parser<'a> { token::BinOp(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Gt, lo, span.hi)) + Ok(self.bump_with(token::Gt, Span { lo: lo, ..span })) } token::BinOpEq(token::Shr) => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Ge, lo, span.hi)) + Ok(self.bump_with(token::Ge, Span { lo: lo, ..span })) } token::Ge => { let span = self.span; let lo = span.lo + BytePos(1); - Ok(self.bump_with(token::Eq, lo, span.hi)) + Ok(self.bump_with(token::Eq, Span { lo: lo, ..span })) } _ => self.unexpected() } @@ -997,12 +997,12 @@ impl<'a> Parser<'a> { -> PResult<'a, Spanned>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let lo = self.span.lo; + let lo = self.span; self.expect(bra)?; let result = self.parse_seq_to_before_end(ket, sep, f); - let hi = self.span.hi; + let hi = self.span; self.bump(); - Ok(spanned(lo, hi, result)) + Ok(respan(lo.to(hi), result)) } /// Advance the parser by one token @@ -1033,16 +1033,13 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - pub fn bump_with(&mut self, - next: token::Token, - lo: BytePos, - hi: BytePos) { - self.prev_span = mk_sp(self.span.lo, lo); + pub fn bump_with(&mut self, next: token::Token, span: Span) { + self.prev_span = Span { hi: span.lo, ..self.span }; // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = mk_sp(lo, hi); + self.span = span; self.token = next; self.expected_tokens.clear(); } @@ -1173,7 +1170,7 @@ impl<'a> Parser<'a> { pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let (name, node) = if self.eat_keyword(keywords::Type) { let TyParam {ident, bounds, default, ..} = self.parse_ty_param(vec![])?; @@ -1197,7 +1194,7 @@ impl<'a> Parser<'a> { } else if self.token.is_path_start() { // trait item macro. // code copied from parse_macro_use_or_failure... abstraction! - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -1207,7 +1204,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac)) } else { let (constness, unsafety, abi) = match self.parse_fn_front_matter() { @@ -1277,7 +1274,7 @@ impl<'a> Parser<'a> { ident: name, attrs: attrs, node: node, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }) } @@ -1298,8 +1295,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_no_plus()?)) } else { - let pos = self.span.lo; - Ok(FunctionRetTy::Default(mk_sp(pos, pos))) + Ok(FunctionRetTy::Default(Span { hi: self.span.lo, ..self.span })) } } @@ -1320,7 +1316,7 @@ impl<'a> Parser<'a> { fn parse_ty_common(&mut self, allow_plus: bool) -> PResult<'a, P> { maybe_whole!(self, NtTy, |x| x); - let lo = self.span.lo; + let lo = self.span; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. // `(TYPE,)` is a tuple with a single field of type TYPE. @@ -1344,7 +1340,7 @@ impl<'a> Parser<'a> { TyKind::Path(None, ref path) if allow_plus && self.token == token::BinOp(token::Plus) => { self.bump(); // `+` - let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo, self.prev_span.hi); + let pt = PolyTraitRef::new(Vec::new(), path.clone(), lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(pt, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1394,13 +1390,13 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // Macro invocation in type position let (_, tts) = self.expect_delimited_token_tree()?; - TyKind::Mac(spanned(lo, self.span.hi, Mac_ { path: path, tts: tts })) + TyKind::Mac(respan(lo.to(self.span), Mac_ { path: path, tts: tts })) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` // `Trait1 + Trait2 + 'a` if allow_plus && self.eat(&token::BinOp(token::Plus)) { - let poly_trait = PolyTraitRef::new(Vec::new(), path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(Vec::new(), path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; bounds.append(&mut self.parse_ty_param_bounds()?); TyKind::TraitObject(bounds) @@ -1415,13 +1411,13 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? } else { let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let mut bounds = vec![TraitTyParamBound(poly_trait, TraitBoundModifier::None)]; if allow_plus && self.eat(&token::BinOp(token::Plus)) { bounds.append(&mut self.parse_ty_param_bounds()?) @@ -1440,7 +1436,7 @@ impl<'a> Parser<'a> { return Err(self.fatal(&msg)); }; - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); let ty = Ty { node: node, span: span, id: ast::DUMMY_NODE_ID }; // Try to recover from use of `+` with incorrect priority. @@ -1457,7 +1453,7 @@ impl<'a> Parser<'a> { self.bump(); // `+` let bounds = self.parse_ty_param_bounds()?; - let sum_span = mk_sp(ty.span.lo, self.prev_span.hi); + let sum_span = ty.span.to(self.prev_span); let mut err = struct_span_err!(self.sess.span_diagnostic, ty.span, E0178, "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty)); @@ -1577,7 +1573,7 @@ impl<'a> Parser<'a> { P(Ty { id: ast::DUMMY_NODE_ID, node: TyKind::Infer, - span: mk_sp(self.span.lo, self.span.hi), + span: self.span, }) }; Ok(Arg { @@ -1625,7 +1621,7 @@ impl<'a> Parser<'a> { /// Matches lit = true | false | token_lit pub fn parse_lit(&mut self) -> PResult<'a, Lit> { - let lo = self.span.lo; + let lo = self.span; let lit = if self.eat_keyword(keywords::True) { LitKind::Bool(true) } else if self.eat_keyword(keywords::False) { @@ -1634,22 +1630,22 @@ impl<'a> Parser<'a> { let lit = self.parse_lit_token()?; lit }; - Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.prev_span.hi) }) + Ok(codemap::Spanned { node: lit, span: lo.to(self.prev_span) }) } /// matches '-' lit | lit pub fn parse_pat_literal_maybe_minus(&mut self) -> PResult<'a, P> { - let minus_lo = self.span.lo; + let minus_lo = self.span; let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.span.lo; + let lo = self.span; let literal = P(self.parse_lit()?); - let hi = self.prev_span.hi; - let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new()); + let hi = self.prev_span; + let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); if minus_present { - let minus_hi = self.prev_span.hi; + let minus_hi = self.prev_span; let unary = self.mk_unary(UnOp::Neg, expr); - Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new())) + Ok(self.mk_expr(minus_lo.to(minus_hi), unary, ThinVec::new())) } else { Ok(expr) } @@ -1726,7 +1722,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span.lo; + let lo = self.span; let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1750,7 +1746,7 @@ impl<'a> Parser<'a> { // Assemble the span. // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = mk_sp(lo, self.prev_span.hi); + let span = lo.to(self.prev_span); // Assemble the result. Ok(ast::Path { @@ -1791,7 +1787,7 @@ impl<'a> Parser<'a> { bindings: bindings, }.into() } else if self.eat(&token::OpenDelim(token::Paren)) { - let lo = self.prev_span.lo; + let lo = self.prev_span; let inputs = self.parse_seq_to_end( &token::CloseDelim(token::Paren), @@ -1804,10 +1800,10 @@ impl<'a> Parser<'a> { None }; - let hi = self.prev_span.hi; + let hi = self.prev_span; Some(P(ast::PathParameters::Parenthesized(ast::ParenthesizedParameterData { - span: mk_sp(lo, hi), + span: lo.to(hi), inputs: inputs, output: output_ty, }))) @@ -1928,38 +1924,37 @@ impl<'a> Parser<'a> { /// Parse ident (COLON expr)? pub fn parse_field(&mut self) -> PResult<'a, Field> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; // Check if a colon exists one ahead. This means we're parsing a fieldname. let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { let fieldname = self.parse_field_name()?; self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; (fieldname, self.parse_expr()?, false) } else { let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; // Mimic `x: x` for the `x` field shorthand. - let path = ast::Path::from_ident(mk_sp(lo, hi), fieldname); - (fieldname, self.mk_expr(lo, hi, ExprKind::Path(None, path), ThinVec::new()), true) + let path = ast::Path::from_ident(lo.to(hi), fieldname); + (fieldname, self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new()), true) }; Ok(ast::Field { - ident: spanned(lo, hi, fieldname), - span: mk_sp(lo, expr.span.hi), + ident: respan(lo.to(hi), fieldname), + span: lo.to(expr.span), expr: expr, is_shorthand: is_shorthand, attrs: attrs.into(), }) } - pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: ExprKind, attrs: ThinVec) - -> P { + pub fn mk_expr(&mut self, span: Span, node: ExprKind, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, node: node, - span: mk_sp(lo, hi), + span: span, attrs: attrs.into(), }) } @@ -2013,12 +2008,11 @@ impl<'a> Parser<'a> { ExprKind::AssignOp(binop, lhs, rhs) } - pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, - m: Mac_, attrs: ThinVec) -> P { + pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec) -> P { P(Expr { id: ast::DUMMY_NODE_ID, - node: ExprKind::Mac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}), - span: mk_sp(lo, hi), + node: ExprKind::Mac(codemap::Spanned {node: m, span: span}), + span: span, attrs: attrs, }) } @@ -2065,8 +2059,8 @@ impl<'a> Parser<'a> { // attributes by giving them a empty "already parsed" list. let mut attrs = ThinVec::new(); - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; let ex: ExprKind; @@ -2095,18 +2089,19 @@ impl<'a> Parser<'a> { } self.bump(); - hi = self.prev_span.hi; + hi = self.prev_span; + let span = lo.to(hi); return if es.len() == 1 && !trailing_comma { - Ok(self.mk_expr(lo, hi, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) + Ok(self.mk_expr(span, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) } else { - Ok(self.mk_expr(lo, hi, ExprKind::Tup(es), attrs)) + Ok(self.mk_expr(span, ExprKind::Tup(es), attrs)) } }, token::OpenDelim(token::Brace) => { return self.parse_block_expr(lo, BlockCheckMode::Default, attrs); }, token::BinOp(token::Or) | token::OrOr => { - let lo = self.span.lo; + let lo = self.span; return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs); }, token::OpenDelim(token::Bracket) => { @@ -2144,34 +2139,34 @@ impl<'a> Parser<'a> { ex = ExprKind::Array(vec![first_expr]); } } - hi = self.prev_span.hi; + hi = self.prev_span; } _ => { if self.eat_lt() { let (qself, path) = self.parse_qualified_path(PathStyle::Expr)?; - hi = path.span.hi; - return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); + hi = path.span; + return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs)); } if self.eat_keyword(keywords::Move) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_lambda_expr(lo, CaptureBy::Value, attrs); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(attrs); } if self.eat_keyword(keywords::For) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_for_expr(None, lo, attrs); } if self.eat_keyword(keywords::While) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_while_expr(None, lo, attrs); } if self.token.is_lifetime() { let label = Spanned { node: self.get_label(), span: self.span }; - let lo = self.span.lo; + let lo = self.span; self.bump(); self.expect(&token::Colon)?; if self.eat_keyword(keywords::While) { @@ -2186,7 +2181,7 @@ impl<'a> Parser<'a> { return Err(self.fatal("expected `while`, `for`, or `loop` after a label")) } if self.eat_keyword(keywords::Loop) { - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { @@ -2200,8 +2195,8 @@ impl<'a> Parser<'a> { } else { ExprKind::Continue(None) }; - let hi = self.prev_span.hi; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + let hi = self.prev_span; + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } if self.eat_keyword(keywords::Match) { return self.parse_match_expr(attrs); @@ -2215,13 +2210,13 @@ impl<'a> Parser<'a> { if self.is_catch_expr() { assert!(self.eat_keyword(keywords::Do)); assert!(self.eat_keyword(keywords::Catch)); - let lo = self.prev_span.lo; + let lo = self.prev_span; return self.parse_catch_expr(lo, attrs); } if self.eat_keyword(keywords::Return) { if self.token.can_begin_expr() { let e = self.parse_expr()?; - hi = e.span.hi; + hi = e.span; ex = ExprKind::Ret(Some(e)); } else { ex = ExprKind::Ret(None); @@ -2246,7 +2241,7 @@ impl<'a> Parser<'a> { None }; ex = ExprKind::Break(lt, e); - hi = self.prev_span.hi; + hi = self.prev_span; } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `check_strict_keywords`, so // that we can explicitly mention that let is not to be used as an expression @@ -2260,8 +2255,8 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // MACRO INVOCATION expression let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; - return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs)); + let hi = self.prev_span; + return Ok(self.mk_mac_expr(lo.to(hi), Mac_ { path: pth, tts: tts }, attrs)); } if self.check(&token::OpenDelim(token::Brace)) { // This is a struct literal, unless we're prohibited @@ -2274,12 +2269,12 @@ impl<'a> Parser<'a> { } } - hi = pth.span.hi; + hi = pth.span; ex = ExprKind::Path(None, pth); } else { match self.parse_lit() { Ok(lit) => { - hi = lit.span.hi; + hi = lit.span; ex = ExprKind::Lit(P(lit)); } Err(mut err) => { @@ -2293,10 +2288,10 @@ impl<'a> Parser<'a> { } } - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } - fn parse_struct_expr(&mut self, lo: BytePos, pth: ast::Path, mut attrs: ThinVec) + fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec) -> PResult<'a, P> { self.bump(); let mut fields = Vec::new(); @@ -2338,9 +2333,9 @@ impl<'a> Parser<'a> { } } - let hi = self.span.hi; + let span = lo.to(self.span); self.expect(&token::CloseDelim(token::Brace))?; - return Ok(self.mk_expr(lo, hi, ExprKind::Struct(pth, fields, base), attrs)); + return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); } fn parse_or_use_outer_attributes(&mut self, @@ -2354,7 +2349,7 @@ impl<'a> Parser<'a> { } /// Parse a block or unsafe block - pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode, + pub fn parse_block_expr(&mut self, lo: Span, blk_mode: BlockCheckMode, outer_attrs: ThinVec) -> PResult<'a, P> { @@ -2364,7 +2359,7 @@ impl<'a> Parser<'a> { attrs.extend(self.parse_inner_attributes()?); let blk = self.parse_block_tail(lo, blk_mode)?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), attrs)); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), attrs)); } /// parse a.b or a(13) or a[4] or just a @@ -2375,12 +2370,12 @@ impl<'a> Parser<'a> { let b = self.parse_bottom_expr(); let (span, b) = self.interpolated_or_expr_span(b)?; - self.parse_dot_or_call_expr_with(b, span.lo, attrs) + self.parse_dot_or_call_expr_with(b, span, attrs) } pub fn parse_dot_or_call_expr_with(&mut self, e0: P, - lo: BytePos, + lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Stitch the list of outer attributes onto the return value. @@ -2411,11 +2406,7 @@ impl<'a> Parser<'a> { // Assuming we have just parsed `.foo` (i.e., a dot and an ident), continue // parsing into an expression. - fn parse_dot_suffix(&mut self, - ident: Ident, - ident_span: Span, - self_value: P, - lo: BytePos) + fn parse_dot_suffix(&mut self, ident: Ident, ident_span: Span, self_value: P, lo: Span) -> PResult<'a, P> { let (_, tys, bindings) = if self.eat(&token::ModSep) { self.expect_lt()?; @@ -2440,12 +2431,12 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - let hi = self.prev_span.hi; + let hi = self.prev_span; es.insert(0, self_value); - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let nd = self.mk_method_call(id, tys, es); - self.mk_expr(lo, hi, nd, ThinVec::new()) + self.mk_expr(lo.to(hi), nd, ThinVec::new()) } // Field access. _ => { @@ -2456,32 +2447,30 @@ impl<'a> Parser<'a> { have type parameters"); } - let id = spanned(ident_span.lo, ident_span.hi, ident); + let id = respan(ident_span.to(ident_span), ident); let field = self.mk_field(self_value, id); - self.mk_expr(lo, ident_span.hi, field, ThinVec::new()) + self.mk_expr(lo.to(ident_span), field, ThinVec::new()) } }) } - fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: BytePos) -> PResult<'a, P> { + fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, P> { let mut e = e0; let mut hi; loop { // expr? while self.eat(&token::Question) { - let hi = self.prev_span.hi; - e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new()); + let hi = self.prev_span; + e = self.mk_expr(lo.to(hi), ExprKind::Try(e), ThinVec::new()); } // expr.f if self.eat(&token::Dot) { match self.token { token::Ident(i) => { - let dot_pos = self.prev_span.hi; - hi = self.span.hi; + let ident_span = self.span; self.bump(); - - e = self.parse_dot_suffix(i, mk_sp(dot_pos, hi), e, lo)?; + e = self.parse_dot_suffix(i, ident_span, e, lo)?; } token::Literal(token::Integer(n), suf) => { let sp = self.span; @@ -2489,16 +2478,16 @@ impl<'a> Parser<'a> { // A tuple index may not have a suffix self.expect_no_suffix(sp, "tuple index", suf); - let dot = self.prev_span.hi; - hi = self.span.hi; + let dot_span = self.prev_span; + hi = self.span; self.bump(); let index = n.as_str().parse::().ok(); match index { Some(n) => { - let id = spanned(dot, hi, n); + let id = respan(dot_span.to(hi), n); let field = self.mk_tup_field(e, id); - e = self.mk_expr(lo, hi, field, ThinVec::new()); + e = self.mk_expr(lo.to(hi), field, ThinVec::new()); } None => { let prev_span = self.prev_span; @@ -2541,10 +2530,8 @@ impl<'a> Parser<'a> { let actual = self.this_token_to_string(); self.span_err(self.span, &format!("unexpected token: `{}`", actual)); - let dot_pos = self.prev_span.hi; - e = self.parse_dot_suffix(keywords::Invalid.ident(), - mk_sp(dot_pos, dot_pos), - e, lo)?; + let dot_span = self.prev_span; + e = self.parse_dot_suffix(keywords::Invalid.ident(), dot_span, e, lo)?; } } continue; @@ -2559,10 +2546,10 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - hi = self.prev_span.hi; + hi = self.prev_span; let nd = self.mk_call(e, es); - e = self.mk_expr(lo, hi, nd, ThinVec::new()); + e = self.mk_expr(lo.to(hi), nd, ThinVec::new()); } // expr[...] @@ -2570,10 +2557,10 @@ impl<'a> Parser<'a> { token::OpenDelim(token::Bracket) => { self.bump(); let ix = self.parse_expr()?; - hi = self.span.hi; + hi = self.span; self.expect(&token::CloseDelim(token::Bracket))?; let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index, ThinVec::new()) + e = self.mk_expr(lo.to(hi), index, ThinVec::new()) } _ => return Ok(e) } @@ -2635,38 +2622,33 @@ impl<'a> Parser<'a> { already_parsed_attrs: Option>) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let hi; + let lo = self.span; // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() - let ex = match self.token { + let (hi, ex) = match self.token { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Not, e) + (span, self.mk_unary(UnOp::Not, e)) } token::BinOp(token::Minus) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Neg, e) + (span, self.mk_unary(UnOp::Neg, e)) } token::BinOp(token::Star) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - self.mk_unary(UnOp::Deref, e) + (span, self.mk_unary(UnOp::Deref, e)) } token::BinOp(token::And) | token::AndAnd => { self.expect_and()?; let m = self.parse_mutability(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::AddrOf(m, e) + (span, ExprKind::AddrOf(m, e)) } token::Ident(..) if self.token.is_keyword(keywords::In) => { self.bump(); @@ -2676,20 +2658,18 @@ impl<'a> Parser<'a> { )?; let blk = self.parse_block()?; let span = blk.span; - hi = span.hi; - let blk_expr = self.mk_expr(span.lo, hi, ExprKind::Block(blk), ThinVec::new()); - ExprKind::InPlace(place, blk_expr) + let blk_expr = self.mk_expr(span, ExprKind::Block(blk), ThinVec::new()); + (span, ExprKind::InPlace(place, blk_expr)) } token::Ident(..) if self.token.is_keyword(keywords::Box) => { self.bump(); let e = self.parse_prefix_expr(None); let (span, e) = self.interpolated_or_expr_span(e)?; - hi = span.hi; - ExprKind::Box(e) + (span, ExprKind::Box(e)) } _ => return self.parse_dot_or_call_expr(Some(attrs)) }; - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } /// Parse an associative expression @@ -2750,13 +2730,11 @@ impl<'a> Parser<'a> { // Special cases: if op == AssocOp::As { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Cast(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Cast(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::Colon { let rhs = self.parse_ty_no_plus()?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); - lhs = self.mk_expr(lo, hi, ExprKind::Type(lhs, rhs), ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs.span), ExprKind::Type(lhs, rhs), ThinVec::new()); continue } else if op == AssocOp::DotDot || op == AssocOp::DotDotDot { // If we didn’t have to handle `x..`/`x...`, it would be pretty easy to @@ -2782,7 +2760,7 @@ impl<'a> Parser<'a> { }; let r = try!(self.mk_range(Some(lhs), rhs, limits)); - lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, ThinVec::new()); + lhs = self.mk_expr(lhs_span.to(rhs_span), r, ThinVec::new()); break } @@ -2809,7 +2787,7 @@ impl<'a> Parser<'a> { }), }?; - let (lo, hi) = (lhs_span.lo, rhs.span.hi); + let span = lhs_span.to(rhs.span); lhs = match op { AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor | @@ -2818,12 +2796,12 @@ impl<'a> Parser<'a> { AssocOp::Greater | AssocOp::GreaterEqual => { let ast_op = op.to_ast_binop().unwrap(); let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs); - self.mk_expr(lo, hi, binary, ThinVec::new()) + self.mk_expr(span, binary, ThinVec::new()) } AssocOp::Assign => - self.mk_expr(lo, hi, ExprKind::Assign(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()), AssocOp::Inplace => - self.mk_expr(lo, hi, ExprKind::InPlace(lhs, rhs), ThinVec::new()), + self.mk_expr(span, ExprKind::InPlace(lhs, rhs), ThinVec::new()), AssocOp::AssignOp(k) => { let aop = match k { token::Plus => BinOpKind::Add, @@ -2838,7 +2816,7 @@ impl<'a> Parser<'a> { token::Shr => BinOpKind::Shr, }; let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs); - self.mk_expr(lo, hi, aopexpr, ThinVec::new()) + self.mk_expr(span, aopexpr, ThinVec::new()) } AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotDot => { self.bug("As, Colon, DotDot or DotDotDot branch reached") @@ -2858,7 +2836,7 @@ impl<'a> Parser<'a> { match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators - let op_span = mk_sp(op.span.lo, self.span.hi); + let op_span = op.span.to(self.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && @@ -2881,8 +2859,8 @@ impl<'a> Parser<'a> { debug_assert!(self.token == token::DotDot || self.token == token::DotDotDot); let tok = self.token.clone(); let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; - let lo = self.span.lo; - let mut hi = self.span.hi; + let lo = self.span; + let mut hi = self.span; self.bump(); let opt_end = if self.is_at_start_of_range_notation_rhs() { // RHS must be parsed with more associativity than the dots. @@ -2890,7 +2868,7 @@ impl<'a> Parser<'a> { Some(self.parse_assoc_expr_with(next_prec, LhsExpr::NotYetParsed) .map(|x|{ - hi = x.span.hi; + hi = x.span; x })?) } else { @@ -2905,7 +2883,7 @@ impl<'a> Parser<'a> { let r = try!(self.mk_range(None, opt_end, limits)); - Ok(self.mk_expr(lo, hi, r, attrs)) + Ok(self.mk_expr(lo.to(hi), r, attrs)) } fn is_at_start_of_range_notation_rhs(&self) -> bool { @@ -2925,23 +2903,23 @@ impl<'a> Parser<'a> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } - let lo = self.prev_span.lo; + let lo = self.prev_span; let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let thn = self.parse_block()?; let mut els: Option> = None; - let mut hi = thn.span.hi; + let mut hi = thn.span; if self.eat_keyword(keywords::Else) { let elexpr = self.parse_else_expr()?; - hi = elexpr.span.hi; + hi = elexpr.span; els = Some(elexpr); } - Ok(self.mk_expr(lo, hi, ExprKind::If(cond, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) } /// Parse an 'if let' expression ('if' token already eaten) pub fn parse_if_let_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.prev_span.lo; + let lo = self.prev_span; self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; self.expect(&token::Eq)?; @@ -2949,36 +2927,35 @@ impl<'a> Parser<'a> { let thn = self.parse_block()?; let (hi, els) = if self.eat_keyword(keywords::Else) { let expr = self.parse_else_expr()?; - (expr.span.hi, Some(expr)) + (expr.span, Some(expr)) } else { - (thn.span.hi, None) + (thn.span, None) }; - Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs)) + Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pat, expr, thn, els), attrs)) } // `move |args| expr` pub fn parse_lambda_expr(&mut self, - lo: BytePos, + lo: Span, capture_clause: CaptureBy, attrs: ThinVec) -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; - let decl_hi = self.prev_span.hi; + let decl_hi = self.prev_span; let body = match decl.output { FunctionRetTy::Default(_) => self.parse_expr()?, _ => { // If an explicit return type is given, require a // block to appear (RFC 968). - let body_lo = self.span.lo; + let body_lo = self.span; self.parse_block_expr(body_lo, BlockCheckMode::Default, ThinVec::new())? } }; Ok(self.mk_expr( - lo, - body.span.hi, - ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)), + lo.to(body.span), + ExprKind::Closure(capture_clause, decl, body, lo.to(decl_hi)), attrs)) } @@ -2988,13 +2965,13 @@ impl<'a> Parser<'a> { return self.parse_if_expr(ThinVec::new()); } else { let blk = self.parse_block()?; - return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), ThinVec::new())); + return Ok(self.mk_expr(blk.span, ExprKind::Block(blk), ThinVec::new())); } } /// Parse a 'for' .. 'in' expression ('for' token already eaten) pub fn parse_for_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { // Parse: `for in ` @@ -3004,16 +2981,13 @@ impl<'a> Parser<'a> { let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = self.prev_span.hi; - - Ok(self.mk_expr(span_lo, hi, - ExprKind::ForLoop(pat, expr, loop_block, opt_ident), - attrs)) + let hi = self.prev_span; + Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs)) } /// Parse a 'while' or 'while let' expression ('while' token already eaten) pub fn parse_while_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { if self.token.is_keyword(keywords::Let) { return self.parse_while_let_expr(opt_ident, span_lo, attrs); @@ -3021,14 +2995,13 @@ impl<'a> Parser<'a> { let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::While(cond, body, opt_ident), - attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs)); } /// Parse a 'while let' expression ('while' token already eaten) pub fn parse_while_let_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; @@ -3036,34 +3009,33 @@ impl<'a> Parser<'a> { let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - return Ok(self.mk_expr(span_lo, hi, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); + let span = span_lo.to(body.span); + return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); } // parse `loop {...}`, `loop` token already eaten pub fn parse_loop_expr(&mut self, opt_ident: Option, - span_lo: BytePos, + span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs)) + let span = span_lo.to(body.span); + Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs)) } /// Parse a `do catch {...}` expression (`do catch` token already eaten) - pub fn parse_catch_expr(&mut self, span_lo: BytePos, mut attrs: ThinVec) + pub fn parse_catch_expr(&mut self, span_lo: Span, mut attrs: ThinVec) -> PResult<'a, P> { let (iattrs, body) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = body.span.hi; - Ok(self.mk_expr(span_lo, hi, ExprKind::Catch(body), attrs)) + Ok(self.mk_expr(span_lo.to(body.span), ExprKind::Catch(body), attrs)) } // `match` token already eaten fn parse_match_expr(&mut self, mut attrs: ThinVec) -> PResult<'a, P> { let match_span = self.prev_span; - let lo = self.prev_span.lo; + let lo = self.prev_span; let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { @@ -3082,17 +3054,17 @@ impl<'a> Parser<'a> { // Recover by skipping to the end of the block. e.emit(); self.recover_stmt(); - let hi = self.span.hi; + let span = lo.to(self.span); if self.token == token::CloseDelim(token::Brace) { self.bump(); } - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(span, ExprKind::Match(discriminant, arms), attrs)); } } } - let hi = self.span.hi; + let hi = self.span; self.bump(); - return Ok(self.mk_expr(lo, hi, ExprKind::Match(discriminant, arms), attrs)); + return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } pub fn parse_arm(&mut self) -> PResult<'a, Arm> { @@ -3266,7 +3238,7 @@ impl<'a> Parser<'a> { } let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let hi; if self.check(&token::DotDot) { @@ -3286,16 +3258,16 @@ impl<'a> Parser<'a> { let fieldname = self.parse_field_name()?; self.bump(); let pat = self.parse_pat()?; - hi = pat.span.hi; + hi = pat.span; (pat, fieldname, false) } else { // Parsing a pattern of the form "(box) (ref) (mut) fieldname" let is_box = self.eat_keyword(keywords::Box); - let boxed_span_lo = self.span.lo; + let boxed_span = self.span; let is_ref = self.eat_keyword(keywords::Ref); let is_mut = self.eat_keyword(keywords::Mut); let fieldname = self.parse_ident()?; - hi = self.prev_span.hi; + hi = self.prev_span; let bind_type = match (is_ref, is_mut) { (true, true) => BindingMode::ByRef(Mutability::Mutable), @@ -3307,14 +3279,14 @@ impl<'a> Parser<'a> { let fieldpat = P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Ident(bind_type, fieldpath, None), - span: mk_sp(boxed_span_lo, hi), + span: boxed_span.to(hi), }); let subpat = if is_box { P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Box(fieldpat), - span: mk_sp(lo, hi), + span: lo.to(hi), }) } else { fieldpat @@ -3322,7 +3294,7 @@ impl<'a> Parser<'a> { (subpat, fieldname, true) }; - fields.push(codemap::Spanned { span: mk_sp(lo, hi), + fields.push(codemap::Spanned { span: lo.to(hi), node: ast::FieldPat { ident: fieldname, pat: subpat, @@ -3336,7 +3308,7 @@ impl<'a> Parser<'a> { fn parse_pat_range_end(&mut self) -> PResult<'a, P> { if self.token.is_path_start() { - let lo = self.span.lo; + let lo = self.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = @@ -3346,8 +3318,8 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - let hi = self.prev_span.hi; - Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new())) + let hi = self.prev_span; + Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_pat_literal_maybe_minus() } @@ -3373,7 +3345,7 @@ impl<'a> Parser<'a> { pub fn parse_pat(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtPat, |x| x); - let lo = self.span.lo; + let lo = self.span; let pat; match self.token { token::Underscore => { @@ -3439,7 +3411,7 @@ impl<'a> Parser<'a> { // Parse macro invocation self.bump(); let (_, tts) = self.expect_delimited_token_tree()?; - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: path, tts: tts }); pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDot => { @@ -3449,9 +3421,8 @@ impl<'a> Parser<'a> { _ => panic!("can only parse `..` or `...` for ranges (checked above)"), }; // Parse range - let hi = self.prev_span.hi; - let begin = - self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()); + let span = lo.to(self.prev_span); + let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; pat = PatKind::Range(begin, end, end_kind); @@ -3505,11 +3476,10 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; Ok(P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, - span: mk_sp(lo, hi), + span: lo.to(self.prev_span), })) } @@ -3545,7 +3515,7 @@ impl<'a> Parser<'a> { /// Parse a local variable declaration fn parse_local(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; let pat = self.parse_pat()?; let mut ty = None; @@ -3558,14 +3528,14 @@ impl<'a> Parser<'a> { pat: pat, init: init, id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), attrs: attrs, })) } /// Parse a structure field fn parse_name_and_ty(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec) -> PResult<'a, StructField> { @@ -3573,7 +3543,7 @@ impl<'a> Parser<'a> { self.expect(&token::Colon)?; let ty = self.parse_ty()?; Ok(StructField { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: Some(name), vis: vis, id: ast::DUMMY_NODE_ID, @@ -3683,7 +3653,7 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility) -> PResult<'a, Option>> { - let lo = self.span.lo; + let lo = self.span; match self.token { token::Ident(ident) if ident.name == "macro_rules" => { if self.look_ahead(1, |t| *t == token::Not) { @@ -3706,9 +3676,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; + let span = lo.to(self.prev_span); let kind = ItemKind::MacroDef(tts); - Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned()))) + Ok(Some(self.mk_item(span, id, kind, Visibility::Inherited, attrs.to_owned()))) } fn parse_stmt_without_recovery(&mut self, @@ -3717,19 +3687,19 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtStmt, |x| Some(x)); let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; Ok(Some(if self.eat_keyword(keywords::Let) { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Local(self.parse_local(attrs.into())?), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Item(macro_def), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), } // Starts like a simple path, but not a union item. } else if self.token.is_path_start() && @@ -3741,8 +3711,8 @@ impl<'a> Parser<'a> { let expr = if self.check(&token::OpenDelim(token::Brace)) { self.parse_struct_expr(lo, pth, ThinVec::new())? } else { - let hi = self.prev_span.hi; - self.mk_expr(lo, hi, ExprKind::Path(None, pth), ThinVec::new()) + let hi = self.prev_span; + self.mk_expr(lo.to(hi), ExprKind::Path(None, pth), ThinVec::new()) }; let expr = self.with_res(Restrictions::RESTRICTION_STMT_EXPR, |this| { @@ -3753,7 +3723,7 @@ impl<'a> Parser<'a> { return Ok(Some(Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Expr(expr), - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })); } @@ -3784,7 +3754,7 @@ impl<'a> Parser<'a> { }; let (_, tts) = self.expect_delimited_token_tree()?; - let hi = self.prev_span.hi; + let hi = self.prev_span; let style = if delim == token::Brace { MacStmtStyle::Braces @@ -3793,7 +3763,7 @@ impl<'a> Parser<'a> { }; if id.name == keywords::Invalid.name() { - let mac = spanned(lo, hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(hi), Mac_ { path: pth, tts: tts }); let node = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { StmtKind::Mac(P((mac, style, attrs.into()))) @@ -3813,14 +3783,14 @@ impl<'a> Parser<'a> { self.warn_missing_semicolon(); StmtKind::Mac(P((mac, style, attrs.into()))) } else { - let e = self.mk_mac_expr(lo, hi, mac.node, ThinVec::new()); + let e = self.mk_mac_expr(lo.to(hi), mac.node, ThinVec::new()); let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?; let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?; StmtKind::Expr(e) }; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), node: node, } } else { @@ -3835,13 +3805,14 @@ impl<'a> Parser<'a> { followed by a semicolon"); } } + let span = lo.to(hi); Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: span, node: StmtKind::Item({ self.mk_item( - lo, hi, id /*id is good here*/, - ItemKind::Mac(spanned(lo, hi, Mac_ { path: pth, tts: tts })), + span, id /*id is good here*/, + ItemKind::Mac(respan(span, Mac_ { path: pth, tts: tts })), Visibility::Inherited, attrs) }), @@ -3856,7 +3827,7 @@ impl<'a> Parser<'a> { match item { Some(i) => Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, i.span.hi), + span: lo.to(i.span), node: StmtKind::Item(i), }, None => { @@ -3887,7 +3858,7 @@ impl<'a> Parser<'a> { Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into()))?; Stmt { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, e.span.hi), + span: lo.to(e.span), node: StmtKind::Expr(e), } } @@ -3905,7 +3876,7 @@ impl<'a> Parser<'a> { pub fn parse_block(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtBlock, |x| x); - let lo = self.span.lo; + let lo = self.span; if !self.eat(&token::OpenDelim(token::Brace)) { let sp = self.span; @@ -3950,7 +3921,7 @@ impl<'a> Parser<'a> { fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec, P)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); - let lo = self.span.lo; + let lo = self.span; self.expect(&token::OpenDelim(token::Brace))?; Ok((self.parse_inner_attributes()?, self.parse_block_tail(lo, BlockCheckMode::Default)?)) @@ -3958,7 +3929,7 @@ impl<'a> Parser<'a> { /// Parse the rest of a block expression or function body /// Precondition: already parsed the '{'. - fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P> { + fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P> { let mut stmts = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { @@ -3976,7 +3947,7 @@ impl<'a> Parser<'a> { stmts: stmts, id: ast::DUMMY_NODE_ID, rules: s, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), })) } @@ -4042,10 +4013,10 @@ impl<'a> Parser<'a> { } bounds.push(RegionTyParamBound(self.expect_lifetime())); } else if self.check_keyword(keywords::For) || self.check_path() { - let lo = self.span.lo; + let lo = self.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; let path = self.parse_path(PathStyle::Type)?; - let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo, self.prev_span.hi); + let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_span)); let modifier = if question.is_some() { TraitBoundModifier::Maybe } else { @@ -4166,7 +4137,7 @@ impl<'a> Parser<'a> { pub fn parse_generics(&mut self) -> PResult<'a, ast::Generics> { maybe_whole!(self, NtGenerics, |x| x); - let span_lo = self.span.lo; + let span_lo = self.span; if self.eat_lt() { let (lifetime_defs, ty_params) = self.parse_generic_params()?; self.expect_gt()?; @@ -4177,7 +4148,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, - span: mk_sp(span_lo, self.prev_span.hi), + span: span_lo.to(self.prev_span), }) } else { Ok(ast::Generics::default()) @@ -4202,7 +4173,7 @@ impl<'a> Parser<'a> { } } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) { // Parse associated type binding. - let lo = self.span.lo; + let lo = self.span; let ident = self.parse_ident()?; self.bump(); let ty = self.parse_ty()?; @@ -4210,7 +4181,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, ident: ident, ty: ty, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), }); seen_binding = true; } else if self.check_type() { @@ -4267,7 +4238,7 @@ impl<'a> Parser<'a> { } loop { - let lo = self.span.lo; + let lo = self.span; if self.check_lifetime() && self.look_ahead(1, |t| t != &token::BinOp(token::Plus)) { let lifetime = self.expect_lifetime(); // Bounds starting with a colon are mandatory, but possibly empty. @@ -4275,7 +4246,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_lt_param_bounds(); where_clause.predicates.push(ast::WherePredicate::RegionPredicate( ast::WhereRegionPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lifetime: lifetime, bounds: bounds, } @@ -4296,7 +4267,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_ty_param_bounds()?; where_clause.predicates.push(ast::WherePredicate::BoundPredicate( ast::WhereBoundPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), bound_lifetimes: lifetime_defs, bounded_ty: ty, bounds: bounds, @@ -4307,7 +4278,7 @@ impl<'a> Parser<'a> { let rhs_ty = self.parse_ty()?; where_clause.predicates.push(ast::WherePredicate::EqPredicate( ast::WhereEqPredicate { - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), lhs_ty: ty, rhs_ty: rhs_ty, id: ast::DUMMY_NODE_ID, @@ -4404,7 +4375,7 @@ impl<'a> Parser<'a> { // Parse optional self parameter of a method. // Only a limited set of initial token sequences is considered self parameters, anything // else is parsed as a normal function parameter list, so some lookahead is required. - let eself_lo = self.span.lo; + let eself_lo = self.span; let (eself, eself_ident) = match self.token { token::BinOp(token::And) => { // &self @@ -4486,7 +4457,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), }; - let eself = codemap::respan(mk_sp(eself_lo, self.prev_span.hi), eself); + let eself = codemap::respan(eself_lo.to(self.prev_span), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } @@ -4558,8 +4529,7 @@ impl<'a> Parser<'a> { Ok((id, generics)) } - fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, - node: ItemKind, vis: Visibility, + fn mk_item(&mut self, span: Span, ident: Ident, node: ItemKind, vis: Visibility, attrs: Vec) -> P { P(Item { ident: ident, @@ -4567,7 +4537,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, node: node, vis: vis, - span: mk_sp(lo, hi) + span: span, }) } @@ -4625,7 +4595,7 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtImplItem, |x| x); let mut attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { @@ -4651,7 +4621,7 @@ impl<'a> Parser<'a> { Ok(ImplItem { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.prev_span.hi), + span: lo.to(self.prev_span), ident: name, vis: vis, defaultness: defaultness, @@ -4694,7 +4664,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis, prev_span); - let lo = self.span.lo; + let lo = self.span; let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; @@ -4704,7 +4674,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)? } - let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts }); Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(mac))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; @@ -4938,11 +4908,11 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| { let attrs = p.parse_outer_attributes()?; - let lo = p.span.lo; + let lo = p.span; let vis = p.parse_visibility(true)?; let ty = p.parse_ty()?; Ok(StructField { - span: mk_sp(lo, p.span.hi), + span: lo.to(p.span), vis: vis, ident: None, id: ast::DUMMY_NODE_ID, @@ -4956,7 +4926,7 @@ impl<'a> Parser<'a> { /// Parse a structure field declaration pub fn parse_single_struct_field(&mut self, - lo: BytePos, + lo: Span, vis: Visibility, attrs: Vec ) -> PResult<'a, StructField> { @@ -4978,7 +4948,7 @@ impl<'a> Parser<'a> { /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let vis = self.parse_visibility(false)?; self.parse_single_struct_field(lo, vis, attrs) } @@ -5056,7 +5026,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parse all of the items in a module - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); @@ -5070,11 +5040,11 @@ impl<'a> Parser<'a> { let hi = if self.span == syntax_pos::DUMMY_SP { inner_lo } else { - self.prev_span.hi + self.prev_span }; Ok(ast::Mod { - inner: mk_sp(inner_lo, hi), + inner: inner_lo.to(hi), items: items }) } @@ -5137,7 +5107,7 @@ impl<'a> Parser<'a> { let old_directory = self.directory.clone(); self.push_directory(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; - let mod_inner_lo = self.span.lo; + let mod_inner_lo = self.span; let attrs = self.parse_inner_attributes()?; let module = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; self.directory = old_directory; @@ -5280,7 +5250,7 @@ impl<'a> Parser<'a> { let mut p0 = new_sub_parser_from_file(self.sess, &path, directory_ownership, Some(name), id_sp); p0.cfg_mods = self.cfg_mods; - let mod_inner_lo = p0.span.lo; + let mod_inner_lo = p0.span; let mod_attrs = p0.parse_inner_attributes()?; let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; self.sess.included_mod_stack.borrow_mut().pop(); @@ -5288,42 +5258,42 @@ impl<'a> Parser<'a> { } /// Parse a function declaration from a foreign module - fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Fn)?; let (ident, mut generics) = self.parse_fn_header()?; let decl = self.parse_fn_decl(true)?; generics.where_clause = self.parse_where_clause()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Fn(decl, generics), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } /// Parse a static item from a foreign module - fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: BytePos, - attrs: Vec) -> PResult<'a, ForeignItem> { + fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec) + -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Static)?; let mutbl = self.eat_keyword(keywords::Mut); let ident = self.parse_ident()?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; - let hi = self.span.hi; + let hi = self.span; self.expect(&token::Semi)?; Ok(ForeignItem { ident: ident, attrs: attrs, node: ForeignItemKind::Static(ty, mutbl), id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), + span: lo.to(hi), vis: vis }) } @@ -5335,7 +5305,7 @@ impl<'a> Parser<'a> { /// extern crate foo; /// extern crate bar as foo; fn parse_item_extern_crate(&mut self, - lo: BytePos, + lo: Span, visibility: Visibility, attrs: Vec) -> PResult<'a, P> { @@ -5349,8 +5319,7 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - Ok(self.mk_item(lo, - prev_span.hi, + Ok(self.mk_item(lo.to(prev_span), ident, ItemKind::ExternCrate(maybe_path), visibility, @@ -5368,7 +5337,7 @@ impl<'a> Parser<'a> { /// extern "C" {} /// extern {} fn parse_item_foreign_mod(&mut self, - lo: BytePos, + lo: Span, opt_abi: Option, visibility: Visibility, mut attrs: Vec) @@ -5390,12 +5359,8 @@ impl<'a> Parser<'a> { abi: abi, items: foreign_items }; - Ok(self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - ItemKind::ForeignMod(m), - visibility, - attrs)) + let invalid = keywords::Invalid.ident(); + Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) } /// Parse type Foo = Bar; @@ -5416,7 +5381,7 @@ impl<'a> Parser<'a> { let mut any_disr = None; while self.token != token::CloseDelim(token::Brace) { let variant_attrs = self.parse_outer_attributes()?; - let vlo = self.span.lo; + let vlo = self.span; let struct_def; let mut disr_expr = None; @@ -5444,7 +5409,7 @@ impl<'a> Parser<'a> { data: struct_def, disr_expr: disr_expr, }; - variants.push(spanned(vlo, self.prev_span.hi, vr)); + variants.push(respan(vlo.to(self.prev_span), vr)); if !self.eat(&token::Comma) { break; } } @@ -5514,7 +5479,7 @@ impl<'a> Parser<'a> { Some(P(item)) }); - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; @@ -5524,12 +5489,8 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, - keywords::Invalid.ident(), - item_, - visibility, - attrs); + let invalid = keywords::Invalid.ident(); + let item = self.mk_item(lo.to(prev_span), invalid, item_, visibility, attrs); return Ok(Some(item)); } @@ -5549,8 +5510,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5572,8 +5532,7 @@ impl<'a> Parser<'a> { }; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5597,8 +5556,7 @@ impl<'a> Parser<'a> { respan(const_span, Constness::Const), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5615,8 +5573,7 @@ impl<'a> Parser<'a> { } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5632,8 +5589,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5648,8 +5604,7 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Impl)?; let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5665,8 +5620,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), Abi::Rust)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5689,8 +5643,7 @@ impl<'a> Parser<'a> { respan(fn_span, Constness::NotConst), abi)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5702,8 +5655,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5714,8 +5666,7 @@ impl<'a> Parser<'a> { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5726,8 +5677,7 @@ impl<'a> Parser<'a> { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5739,8 +5689,7 @@ impl<'a> Parser<'a> { let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5751,8 +5700,7 @@ impl<'a> Parser<'a> { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal)?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5763,8 +5711,7 @@ impl<'a> Parser<'a> { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5776,8 +5723,7 @@ impl<'a> Parser<'a> { self.bump(); let (ident, item_, extra_attrs) = self.parse_item_union()?; let prev_span = self.prev_span; - let item = self.mk_item(lo, - prev_span.hi, + let item = self.mk_item(lo.to(prev_span), ident, item_, visibility, @@ -5794,7 +5740,7 @@ impl<'a> Parser<'a> { /// Parse a foreign item. fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; - let lo = self.span.lo; + let lo = self.span; let visibility = self.parse_visibility(false)?; if self.check_keyword(keywords::Static) { @@ -5821,7 +5767,7 @@ impl<'a> Parser<'a> { attrs: Vec , macros_allowed: bool, attributes_allowed: bool, - lo: BytePos, + lo: Span, visibility: Visibility ) -> PResult<'a, Option>> { if macros_allowed && self.token.is_path_start() { @@ -5830,7 +5776,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; self.complain_if_pub_macro(&visibility, prev_span); - let mac_lo = self.span.lo; + let mac_lo = self.span; // item macro. let pth = self.parse_path(PathStyle::Mod)?; @@ -5856,9 +5802,9 @@ impl<'a> Parser<'a> { } } - let hi = self.prev_span.hi; - let mac = spanned(mac_lo, hi, Mac_ { path: pth, tts: tts }); - let item = self.mk_item(lo, hi, id, ItemKind::Mac(mac), visibility, attrs); + let hi = self.prev_span; + let mac = respan(mac_lo.to(hi), Mac_ { path: pth, tts: tts }); + let item = self.mk_item(lo.to(hi), id, ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); } @@ -5886,7 +5832,7 @@ impl<'a> Parser<'a> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), SeqSep::trailing_allowed(token::Comma), |this| { - let lo = this.span.lo; + let lo = this.span; let ident = if this.eat_keyword(keywords::SelfValue) { keywords::SelfValue.ident() } else { @@ -5898,8 +5844,7 @@ impl<'a> Parser<'a> { rename: rename, id: ast::DUMMY_NODE_ID }; - let hi = this.prev_span.hi; - Ok(spanned(lo, hi, node)) + Ok(respan(lo.to(this.prev_span), node)) }) } @@ -5917,21 +5862,21 @@ impl<'a> Parser<'a> { /// MOD_SEP? non_global_path MOD_SEP LBRACE item_seq RBRACE /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { - let lo = self.span.lo; + let lo = self.span; if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || self.is_import_coupler() { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { segments: vec![PathSegment::crate_root()], - span: mk_sp(lo, self.span.hi), + span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { ViewPathGlob(prefix) } else { ViewPathList(prefix, self.parse_path_list_items()?) }; - Ok(P(spanned(lo, self.span.hi, view_path_kind))) + Ok(P(respan(lo.to(self.span), view_path_kind))) } else { let prefix = self.parse_path(PathStyle::Mod)?.default_to_global(); if self.is_import_coupler() { @@ -5939,16 +5884,16 @@ impl<'a> Parser<'a> { self.bump(); if self.check(&token::BinOp(token::Star)) { self.bump(); - Ok(P(spanned(lo, self.span.hi, ViewPathGlob(prefix)))) + Ok(P(respan(lo.to(self.span), ViewPathGlob(prefix)))) } else { let items = self.parse_path_list_items()?; - Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + Ok(P(respan(lo.to(self.span), ViewPathList(prefix, items)))) } } else { // `foo::bar` or `foo::bar as baz` let rename = self.parse_rename()?. unwrap_or(prefix.segments.last().unwrap().identifier); - Ok(P(spanned(lo, self.prev_span.hi, ViewPathSimple(rename, prefix)))) + Ok(P(respan(lo.to(self.prev_span), ViewPathSimple(rename, prefix)))) } } } @@ -5964,11 +5909,11 @@ impl<'a> Parser<'a> { /// Parses a source module as a crate. This is the main /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { - let lo = self.span.lo; + let lo = self.span; Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, - span: mk_sp(lo, self.span.lo), + span: lo.to(self.span), }) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index aeb5b1e0a53..6f5ab50b2fe 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -559,11 +559,7 @@ impl<'a, 'b> Context<'a, 'b> { let name = self.ecx.ident_of(&format!("__arg{}", i)); pats.push(self.ecx.pat_ident(DUMMY_SP, name)); for ref arg_ty in self.arg_unique_types[i].iter() { - locals.push(Context::format_arg(self.ecx, - self.macsp, - e.span, - arg_ty, - self.ecx.expr_ident(e.span, name))); + locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); } heads.push(self.ecx.expr_addr_of(e.span, e)); } @@ -576,11 +572,7 @@ impl<'a, 'b> Context<'a, 'b> { Exact(i) => spans_pos[i], _ => panic!("should never happen"), }; - counts.push(Context::format_arg(self.ecx, - self.macsp, - span, - &Count, - self.ecx.expr_ident(span, name))); + counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count, name)); } // Now create a vector containing all the arguments @@ -643,9 +635,10 @@ impl<'a, 'b> Context<'a, 'b> { macsp: Span, mut sp: Span, ty: &ArgumentType, - arg: P) + arg: ast::Ident) -> P { sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); + let arg = ecx.expr_ident(sp, arg); let trait_ = match *ty { Placeholder(ref tyname) => { match &tyname[..] { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 9b45e364ecf..947192a0a23 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -174,6 +174,15 @@ impl Span { } result } + + pub fn to(self, end: Span) -> Span { + // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) + if end.ctxt == SyntaxContext::empty() { + Span { lo: self.lo, ..end } + } else { + Span { hi: end.hi, ..self } + } + } } #[derive(Clone, Debug)] @@ -208,7 +217,7 @@ impl serialize::UseSpecializedDecodable for Span { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; - Ok(mk_sp(lo, hi)) + Ok(Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }) }) } } @@ -696,11 +705,6 @@ pub struct FileLines { thread_local!(pub static SPAN_DEBUG: Cell fmt::Result> = Cell::new(default_span_debug)); -/* assuming that we're not in macro expansion */ -pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span {lo: lo, hi: hi, ctxt: NO_EXPANSION} -} - pub struct MacroBacktrace { /// span where macro was applied to generate this code pub call_site: Span, diff --git a/src/test/compile-fail/imports/macro-paths.rs b/src/test/compile-fail/imports/macro-paths.rs index 48e7ca0eee4..7c19917acc4 100644 --- a/src/test/compile-fail/imports/macro-paths.rs +++ b/src/test/compile-fail/imports/macro-paths.rs @@ -25,7 +25,6 @@ fn f() { bar::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod bar { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } @@ -37,6 +36,5 @@ fn g() { baz::m! { //~ ERROR ambiguous //~| NOTE macro-expanded items do not shadow when used in a macro invocation path mod baz { pub use two_macros::m; } //~ NOTE could refer to the name defined here - //~^^^ NOTE in this expansion } } diff --git a/src/test/compile-fail/imports/macros.rs b/src/test/compile-fail/imports/macros.rs index cfa7681dc22..06b0964a3b1 100644 --- a/src/test/compile-fail/imports/macros.rs +++ b/src/test/compile-fail/imports/macros.rs @@ -28,7 +28,6 @@ mod m2 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use foo::m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } @@ -43,7 +42,6 @@ mod m3 { m! { //~ ERROR ambiguous //~| NOTE macro-expanded macro imports do not shadow use two_macros::n as m; //~ NOTE could refer to the name imported here - //~^^^ NOTE in this expansion } } } diff --git a/src/test/compile-fail/imports/shadow_builtin_macros.rs b/src/test/compile-fail/imports/shadow_builtin_macros.rs index 2b3ba1b4aa7..a7f1cf3c9d3 100644 --- a/src/test/compile-fail/imports/shadow_builtin_macros.rs +++ b/src/test/compile-fail/imports/shadow_builtin_macros.rs @@ -31,7 +31,6 @@ mod m2 { mod m3 { ::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here - //~| NOTE in this expansion fn f() { panic!(); } //~ ERROR ambiguous //~| NOTE `panic` is also a builtin macro //~| NOTE macro-expanded macro imports do not shadow diff --git a/src/test/compile-fail/issue-25385.rs b/src/test/compile-fail/issue-25385.rs index 51d7baaf3e9..4aacb6840e9 100644 --- a/src/test/compile-fail/issue-25385.rs +++ b/src/test/compile-fail/issue-25385.rs @@ -21,5 +21,4 @@ fn main() { foo!(1i32.foo()); //~^ ERROR no method named `foo` found for type `i32` in the current scope - //~^^ NOTE in this expansion of foo! } diff --git a/src/test/run-pass/syntax-extension-source-utils.rs b/src/test/run-pass/syntax-extension-source-utils.rs index 3b5f033d07b..25c7417f7eb 100644 --- a/src/test/run-pass/syntax-extension-source-utils.rs +++ b/src/test/run-pass/syntax-extension-source-utils.rs @@ -22,7 +22,7 @@ macro_rules! indirect_line { () => ( line!() ) } pub fn main() { assert_eq!(line!(), 24); - assert_eq!(column!(), 4); + assert_eq!(column!(), 15); assert_eq!(indirect_line!(), 26); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); diff --git a/src/test/ui/macros/macro_path_as_generic_bound.stderr b/src/test/ui/macros/macro_path_as_generic_bound.stderr index 96635032105..e4044f5aaf2 100644 --- a/src/test/ui/macros/macro_path_as_generic_bound.stderr +++ b/src/test/ui/macros/macro_path_as_generic_bound.stderr @@ -2,10 +2,7 @@ error[E0433]: failed to resolve. Use of undeclared type or module `m` --> $DIR/macro_path_as_generic_bound.rs:17:6 | 17 | foo!(m::m2::A); - | -----^^^^^^^^-- - | | | - | | Use of undeclared type or module `m` - | in this macro invocation + | ^^^^^^^^ Use of undeclared type or module `m` error: cannot continue compilation due to previous error -- cgit 1.4.1-3-g733a5 From 8fde04b4a295792249d4a01f87a9f66143aa7c83 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 29 Mar 2017 07:17:18 +0000 Subject: Improve `Path` spans. --- src/libsyntax/attr.rs | 7 +++-- src/libsyntax/ext/base.rs | 21 +++++++++++++- src/libsyntax/ext/tt/macro_parser.rs | 2 +- src/libsyntax/ext/tt/macro_rules.rs | 2 +- src/libsyntax/ext/tt/quoted.rs | 13 +++++---- src/libsyntax/ext/tt/transcribe.rs | 9 +----- src/libsyntax/parse/mod.rs | 4 +-- src/libsyntax/parse/parser.rs | 56 ++++++++++++++++++++++-------------- src/libsyntax/parse/token.rs | 48 +++++++++++++++---------------- 9 files changed, 95 insertions(+), 67 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 5dcce2572af..6f5f52ff1e9 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -1015,9 +1015,10 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { - token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), - _ => None, + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt { + token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), + token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), + _ => return None, }, _ => return None, }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index a2d54b62ec6..fda026fec64 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -209,7 +209,26 @@ impl TTMacroExpander for F { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) -> Box { - (*self)(ecx, span, &input.trees().collect::>()) + struct AvoidInterpolatedIdents; + + impl Folder for AvoidInterpolatedIdents { + fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { + if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { + if let token::NtIdent(ident) = **nt { + return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node)); + } + } + fold::noop_fold_tt(tt, self) + } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(mac, self) + } + } + + let input: Vec<_> = + input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect(); + (*self)(ecx, span, &input) } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 9ee427eed35..6cd1fea2e75 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -492,7 +492,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { _ => {} } // check at the beginning and the parser checks after each bump - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); match name { "item" => match panictry!(p.parse_item()) { Some(i) => token::NtItem(i), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 66f5520b882..93348c8f083 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.name.as_str().to_string()); - p.check_unknown_macro_variable(); + p.process_potential_macro_variable(); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return Box::new(ParserAnyMacro { diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 12e746e024d..d216effbd45 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -136,11 +136,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { - let span = Span { lo: start_sp.lo, ..end_sp }; - result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue - } + Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { + Some(kind) => { + let span = Span { lo: start_sp.lo, ..end_sp }; + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue + } + _ => end_sp, + }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), }, tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 24004492be2..947089b0b9a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -12,7 +12,7 @@ use ast::Ident; use errors::Handler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; +use parse::token::{self, SubstNt, Token, NtTT}; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -154,13 +154,6 @@ pub fn transcribe(sp_diag: &Handler, None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { match **nt { - // sidestep the interpolation tricks for ident because - // (a) idents can be in lots of places, so it'd be a pain - // (b) we actually can, since it's a token. - NtIdent(ref sn) => { - let token = TokenTree::Token(sn.span, token::Ident(sn.node)); - result.push(token.into()); - } NtTT(ref tt) => result.push(tt.clone().into()), _ => { let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b5d0a46de49..c63a6524f74 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -218,9 +218,7 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream /// Given stream and the ParseSess, produce a parser pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { - let mut p = Parser::new(sess, stream, None, false); - p.check_unknown_macro_variable(); - p + Parser::new(sess, stream, None, false) } /// Parse a string representing a character literal into its final form. diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b0611d75290..db2878c6b1e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -160,6 +160,7 @@ pub struct Parser<'a> { /// the span of the current token: pub span: Span, /// the span of the previous token: + pub meta_var_span: Option, pub prev_span: Span, /// the previous token kind prev_token_kind: PrevTokenKind, @@ -417,6 +418,7 @@ impl<'a> Parser<'a> { token: token::Underscore, span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP, + meta_var_span: None, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), obsolete_set: HashSet::new(), @@ -443,6 +445,7 @@ impl<'a> Parser<'a> { parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span)); parser.directory.path.pop(); } + parser.process_potential_macro_variable(); parser } @@ -1012,7 +1015,7 @@ impl<'a> Parser<'a> { self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - self.prev_span = self.span; + self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token { @@ -1028,7 +1031,7 @@ impl<'a> Parser<'a> { self.token = next.tok; self.expected_tokens.clear(); // check after each token - self.check_unknown_macro_variable(); + self.process_potential_macro_variable(); } /// Advance the parser using provided token as a next one. Use this when @@ -1722,7 +1725,7 @@ impl<'a> Parser<'a> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { maybe_whole!(self, NtPath, |x| x); - let lo = self.span; + let lo = self.meta_var_span.unwrap_or(self.span); let is_global = self.eat(&token::ModSep); // Parse any number of segments and bound sets. A segment is an @@ -1744,13 +1747,9 @@ impl<'a> Parser<'a> { segments.insert(0, PathSegment::crate_root()); } - // Assemble the span. - // FIXME(#39450) This is bogus if part of the path is macro generated. - let span = lo.to(self.prev_span); - // Assemble the result. Ok(ast::Path { - span: span, + span: lo.to(self.prev_span), segments: segments, }) } @@ -1763,8 +1762,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) { self.bump(); @@ -1831,8 +1830,8 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; - let ident_span = self.prev_span; // If we do not see a `::`, stop. if !self.eat(&token::ModSep) { @@ -1873,10 +1872,11 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. + let ident_span = self.span; let identifier = self.parse_path_segment_ident()?; // Assemble and push the result. - segments.push(PathSegment::from_ident(identifier, self.prev_span)); + segments.push(PathSegment::from_ident(identifier, ident_span)); // If we do not see a `::` or see `::{`/`::*`, stop. if !self.check(&token::ModSep) || self.is_import_coupler() { @@ -1896,8 +1896,9 @@ impl<'a> Parser<'a> { fn expect_lifetime(&mut self) -> Lifetime { match self.token { token::Lifetime(ident) => { + let ident_span = self.span; self.bump(); - Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID } + Lifetime { name: ident.name, span: ident_span, id: ast::DUMMY_NODE_ID } } _ => self.span_bug(self.span, "not a lifetime") } @@ -2568,10 +2569,23 @@ impl<'a> Parser<'a> { return Ok(e); } - pub fn check_unknown_macro_variable(&mut self) { - if let token::SubstNt(name) = self.token { - self.fatal(&format!("unknown macro variable `{}`", name)).emit() - } + pub fn process_potential_macro_variable(&mut self) { + let ident = match self.token { + token::SubstNt(name) => { + self.fatal(&format!("unknown macro variable `{}`", name)).emit(); + return + } + token::Interpolated(ref nt) => { + self.meta_var_span = Some(self.span); + match **nt { + token::NtIdent(ident) => ident, + _ => return, + } + } + _ => return, + }; + self.token = token::Ident(ident.node); + self.span = ident.span; } /// parse a single token tree from the input. @@ -2589,9 +2603,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let token = mem::replace(&mut self.token, token::Underscore); + let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span); self.bump(); - TokenTree::Token(self.prev_span, token) + TokenTree::Token(span, token) } } } @@ -3489,9 +3503,9 @@ impl<'a> Parser<'a> { fn parse_pat_ident(&mut self, binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { + let ident_span = self.span; let ident = self.parse_ident()?; - let prev_span = self.prev_span; - let name = codemap::Spanned{span: prev_span, node: ident}; + let name = codemap::Spanned{span: ident_span, node: ident}; let sub = if self.eat(&token::At) { Some(self.parse_pat()?) } else { @@ -4364,7 +4378,7 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. - token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) } + token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 519d5bd98e4..74aa3984a9a 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -211,9 +211,7 @@ impl Token { ModSep => true, // global path Pound => true, // expression attributes Interpolated(ref nt) => match **nt { - NtExpr(..) => true, - NtBlock(..) => true, - NtPath(..) => true, + NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -236,8 +234,7 @@ impl Token { Lt | BinOp(Shl) => true, // associated path ModSep => true, // global path Interpolated(ref nt) => match **nt { - NtTy(..) => true, - NtPath(..) => true, + NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, _ => false, @@ -252,14 +249,22 @@ impl Token { } } - /// Returns `true` if the token is an identifier. - pub fn is_ident(&self) -> bool { + pub fn ident(&self) -> Option { match *self { - Ident(..) => true, - _ => false, + Ident(ident) => Some(ident), + Interpolated(ref nt) => match **nt { + NtIdent(ident) => Some(ident.node), + _ => None, + }, + _ => None, } } + /// Returns `true` if the token is an identifier. + pub fn is_ident(&self) -> bool { + self.ident().is_some() + } + /// Returns `true` if the token is a documentation comment. pub fn is_doc_comment(&self) -> bool { match *self { @@ -311,18 +316,15 @@ impl Token { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { - match *self { - Ident(id) => id.name == kw.name(), - _ => false, - } + self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match *self { - Ident(id) => id.name == keywords::Super.name() || - id.name == keywords::SelfValue.name() || - id.name == keywords::SelfType.name(), - _ => false, + match self.ident() { + Some(id) => id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name(), + None => false, } } @@ -333,18 +335,16 @@ impl Token { /// Returns `true` if the token is a strict keyword. pub fn is_strict_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::As.name() && - id.name <= keywords::While.name(), + match self.ident() { + Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(), _ => false, } } /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { - match *self { - Ident(id) => id.name >= keywords::Abstract.name() && - id.name <= keywords::Yield.name(), + match self.ident() { + Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(), _ => false, } } -- cgit 1.4.1-3-g733a5 From b3763862280946cab09cbedc4ad5626ebd95a5b2 Mon Sep 17 00:00:00 2001 From: Thomas Jespersen Date: Fri, 17 Mar 2017 23:11:27 +0100 Subject: Replace hardcoded forward slash with path::MAIN_SEPARATOR Fixes #40149 --- src/libstd/sys_common/backtrace.rs | 4 +-- src/libsyntax/parse/parser.rs | 9 ++++--- src/test/parse-fail/mod_file_not_exist.rs | 2 ++ src/test/parse-fail/mod_file_not_exist_windows.rs | 32 +++++++++++++++++++++++ 4 files changed, 41 insertions(+), 6 deletions(-) create mode 100644 src/test/parse-fail/mod_file_not_exist_windows.rs (limited to 'src/libsyntax') diff --git a/src/libstd/sys_common/backtrace.rs b/src/libstd/sys_common/backtrace.rs index 99297b781e4..f5c188f7a75 100644 --- a/src/libstd/sys_common/backtrace.rs +++ b/src/libstd/sys_common/backtrace.rs @@ -19,7 +19,7 @@ use io; use libc; use str; use sync::atomic::{self, Ordering}; -use path::Path; +use path::{self, Path}; use sys::mutex::Mutex; use ptr; @@ -262,7 +262,7 @@ fn output_fileline(w: &mut Write, file: &[u8], line: libc::c_int, if let Ok(cwd) = env::current_dir() { if let Ok(stripped) = file_path.strip_prefix(&cwd) { if let Some(s) = stripped.to_str() { - write!(w, " at ./{}:{}", s, line)?; + write!(w, " at .{}{}:{}", path::MAIN_SEPARATOR, s, line)?; already_printed = true; } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index db2878c6b1e..c2c3e5a6855 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -59,7 +59,7 @@ use util::ThinVec; use std::collections::HashSet; use std::{cmp, mem, slice}; -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; bitflags! { flags Restrictions: u8 { @@ -5146,7 +5146,7 @@ impl<'a> Parser<'a> { pub fn default_submod_path(id: ast::Ident, dir_path: &Path, codemap: &CodeMap) -> ModulePath { let mod_name = id.to_string(); let default_path_str = format!("{}.rs", mod_name); - let secondary_path_str = format!("{}/mod.rs", mod_name); + let secondary_path_str = format!("{}{}mod.rs", mod_name, path::MAIN_SEPARATOR); let default_path = dir_path.join(&default_path_str); let secondary_path = dir_path.join(&secondary_path_str); let default_exists = codemap.file_exists(&default_path); @@ -5224,8 +5224,9 @@ impl<'a> Parser<'a> { }; err.span_note(id_sp, &format!("maybe move this module `{0}` to its own directory \ - via `{0}/mod.rs`", - this_module)); + via `{0}{1}mod.rs`", + this_module, + path::MAIN_SEPARATOR)); if paths.path_exists { err.span_note(id_sp, &format!("... or maybe `use` the module `{}` instead \ diff --git a/src/test/parse-fail/mod_file_not_exist.rs b/src/test/parse-fail/mod_file_not_exist.rs index 7736394a6f5..4bc6e706d42 100644 --- a/src/test/parse-fail/mod_file_not_exist.rs +++ b/src/test/parse-fail/mod_file_not_exist.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-windows + // compile-flags: -Z parse-only mod not_a_real_file; //~ ERROR file not found for module `not_a_real_file` diff --git a/src/test/parse-fail/mod_file_not_exist_windows.rs b/src/test/parse-fail/mod_file_not_exist_windows.rs new file mode 100644 index 00000000000..c58603b4398 --- /dev/null +++ b/src/test/parse-fail/mod_file_not_exist_windows.rs @@ -0,0 +1,32 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-gnu +// ignore-android +// ignore-bitrig +// ignore-macos +// ignore-dragonfly +// ignore-freebsd +// ignore-haiku +// ignore-ios +// ignore-linux +// ignore-netbsd +// ignore-openbsd +// ignore-solaris +// ignore-emscripten + +// compile-flags: -Z parse-only + +mod not_a_real_file; //~ ERROR file not found for module `not_a_real_file` +//~^ HELP name the file either not_a_real_file.rs or not_a_real_file\mod.rs inside the directory + +fn main() { + assert_eq!(mod_file_aux::bar(), 10); +} -- cgit 1.4.1-3-g733a5 From 56847af9163284f928d5632a3d0d29399716414f Mon Sep 17 00:00:00 2001 From: Niko Matsakis Date: Fri, 17 Mar 2017 09:51:31 -0400 Subject: port the match code to use `CoerceMany` `match { }` now (correctly?) indicates divergence, which results in more unreachable warnings. We also avoid fallback to `!` if there is just one arm (see new test: `match-unresolved-one-arm.rs`). --- src/librustc_typeck/check/_match.rs | 90 ++++++++++------------ src/libsyntax/parse/obsolete.rs | 1 + .../match-no-arms-unreachable-after.rs | 22 ++++++ ...h-unreachable-warning-with-diverging-discrim.rs | 16 ++++ src/test/compile-fail/match-unresolved-one-arm.rs | 17 ++++ 5 files changed, 95 insertions(+), 51 deletions(-) create mode 100644 src/test/compile-fail/match-no-arms-unreachable-after.rs create mode 100644 src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs create mode 100644 src/test/compile-fail/match-unresolved-one-arm.rs (limited to 'src/libsyntax') diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index feed5752cf8..f0d2598a0fb 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -16,6 +16,7 @@ use rustc::infer::type_variable::TypeVariableOrigin; use rustc::traits::ObligationCauseCode; use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference}; use check::{FnCtxt, Expectation, Diverges}; +use check::coercion::CoerceMany; use util::nodemap::FxHashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; @@ -414,6 +415,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { discrim_ty = self.next_ty_var(TypeVariableOrigin::TypeInference(discrim.span)); self.check_expr_has_type(discrim, discrim_ty); }; + + // If the discriminant diverges, the match is pointless (e.g., + // `match (return) { }`). + self.warn_if_unreachable(expr.id, expr.span, "expression"); + + // If there are no arms, that is a diverging match; a special case. + if arms.is_empty() { + self.diverges.set(self.diverges.get() | Diverges::Always); + return tcx.types.never; + } + + // Otherwise, we have to union together the types that the + // arms produce and so forth. + let discrim_diverges = self.diverges.get(); self.diverges.set(Diverges::Maybe); @@ -426,6 +441,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_pat(&p, discrim_ty); all_pats_diverge &= self.diverges.get(); } + // As discussed with @eddyb, this is for disabling unreachable_code // warnings on patterns (they're now subsumed by unreachable_patterns // warnings). @@ -444,20 +460,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // on any empty type and is therefore unreachable; should the flow // of execution reach it, we will panic, so bottom is an appropriate // type in that case) - let expected = expected.adjust_for_branches(self); - let mut result_ty = self.next_diverging_ty_var( - TypeVariableOrigin::DivergingBlockExpr(expr.span)); let mut all_arms_diverge = Diverges::WarnedAlways; - let coerce_first = match expected { - // We don't coerce to `()` so that if the match expression is a - // statement it's branches can have any consistent type. That allows - // us to give better error messages (pointing to a usually better - // arm for inconsistent arms or to the whole match when a `()` type - // is required). - Expectation::ExpectHasType(ety) if ety != self.tcx.mk_nil() => { - ety - } - _ => result_ty + + let expected = expected.adjust_for_branches(self); + + let mut coercion = { + let coerce_first = match expected { + // We don't coerce to `()` so that if the match expression is a + // statement it's branches can have any consistent type. That allows + // us to give better error messages (pointing to a usually better + // arm for inconsistent arms or to the whole match when a `()` type + // is required). + Expectation::ExpectHasType(ety) if ety != self.tcx.mk_nil() => ety, + _ => self.next_ty_var(TypeVariableOrigin::MiscVariable(expr.span)), + }; + CoerceMany::new(coerce_first) }; for (i, (arm, pats_diverge)) in arms.iter().zip(all_arm_pats_diverge).enumerate() { @@ -470,11 +487,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let arm_ty = self.check_expr_with_expectation(&arm.body, expected); all_arms_diverge &= self.diverges.get(); - if result_ty.references_error() || arm_ty.references_error() { - result_ty = tcx.types.err; - continue; - } - // Handle the fallback arm of a desugared if-let like a missing else. let is_if_let_fallback = match match_src { hir::MatchSource::IfLetDesugar { contains_else_clause: false } => { @@ -483,47 +495,23 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { _ => false }; - let cause = if is_if_let_fallback { - self.cause(expr.span, ObligationCauseCode::IfExpressionWithNoElse) + if is_if_let_fallback { + let cause = self.cause(expr.span, ObligationCauseCode::IfExpressionWithNoElse); + assert!(arm_ty.is_nil()); + coercion.coerce_forced_unit(self, &cause); } else { - self.cause(expr.span, ObligationCauseCode::MatchExpressionArm { + let cause = self.cause(expr.span, ObligationCauseCode::MatchExpressionArm { arm_span: arm.body.span, source: match_src - }) - }; - - let result = if is_if_let_fallback { - self.eq_types(true, &cause, arm_ty, result_ty) - .map(|infer_ok| { - self.register_infer_ok_obligations(infer_ok); - arm_ty - }) - } else if i == 0 { - // Special-case the first arm, as it has no "previous expressions". - self.try_coerce(&arm.body, arm_ty, coerce_first) - } else { - let prev_arms = || arms[..i].iter().map(|arm| &*arm.body); - self.try_find_coercion_lub(&cause, prev_arms, result_ty, &arm.body, arm_ty) - }; - - result_ty = match result { - Ok(ty) => ty, - Err(e) => { - let (expected, found) = if is_if_let_fallback { - (arm_ty, result_ty) - } else { - (result_ty, arm_ty) - }; - self.report_mismatched_types(&cause, expected, found, e).emit(); - self.tcx.types.err - } - }; + }); + coercion.coerce(self, &cause, &arm.body, arm_ty); + } } // We won't diverge unless the discriminant or all arms diverge. self.diverges.set(discrim_diverges | all_arms_diverge); - result_ty + coercion.complete(self) } fn check_pat_struct(&self, diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index a46a788ca08..d5baec675e4 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -36,6 +36,7 @@ pub trait ParserObsoleteMethods { impl<'a> ParserObsoleteMethods for parser::Parser<'a> { /// Reports an obsolete syntax non-fatal error. #[allow(unused_variables)] + #[allow(unreachable_code)] fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) { let (kind_str, desc, error) = match kind { // Nothing here at the moment diff --git a/src/test/compile-fail/match-no-arms-unreachable-after.rs b/src/test/compile-fail/match-no-arms-unreachable-after.rs new file mode 100644 index 00000000000..db08f5e5e66 --- /dev/null +++ b/src/test/compile-fail/match-no-arms-unreachable-after.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(warnings)] +#![deny(unreachable_code)] + +enum Void { } + +fn foo(v: Void) { + match v { } + let x = 2; //~ ERROR unreachable +} + +fn main() { +} diff --git a/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs b/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs new file mode 100644 index 00000000000..aae0f3135d8 --- /dev/null +++ b/src/test/compile-fail/match-unreachable-warning-with-diverging-discrim.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(unused_parens)] +#![deny(unreachable_code)] + +fn main() { + match (return) { } //~ ERROR unreachable expression +} diff --git a/src/test/compile-fail/match-unresolved-one-arm.rs b/src/test/compile-fail/match-unresolved-one-arm.rs new file mode 100644 index 00000000000..ea0f8db99e8 --- /dev/null +++ b/src/test/compile-fail/match-unresolved-one-arm.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo() -> T { panic!("Rocks for my pillow") } + +fn main() { + let x = match () { //~ ERROR type annotations needed + () => foo() // T here should be unresolved + }; +} -- cgit 1.4.1-3-g733a5