diff options
| author | Baoshan <pangbw@gmail.com> | 2019-08-29 09:29:23 -0700 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2019-08-29 09:29:23 -0700 |
| commit | 043c19c69c0beac3696cb82d44476ba4298a6b07 (patch) | |
| tree | 1813e5dd1e4efd5806f401968b4ed34b8c9c76ac /src/libsyntax_ext | |
| parent | cae6d66d9989857e321e0963142b08b1517dc723 (diff) | |
| parent | 76f17219c71973fd4a58f2f8020eec4d8f5dcd11 (diff) | |
| download | rust-043c19c69c0beac3696cb82d44476ba4298a6b07.tar.gz rust-043c19c69c0beac3696cb82d44476ba4298a6b07.zip | |
Merge branch 'master' into bpang-runtest
Diffstat (limited to 'src/libsyntax_ext')
27 files changed, 188 insertions, 229 deletions
diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index c1c2732605c..644a44f1989 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -47,10 +47,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, -> Box<dyn base::MacResult + 'cx> { let mut inline_asm = match parse_inline_asm(cx, sp, tts) { Ok(Some(inline_asm)) => inline_asm, - Ok(None) => return DummyResult::expr(sp), + Ok(None) => return DummyResult::any(sp), Err(mut err) => { err.emit(); - return DummyResult::expr(sp); + return DummyResult::any(sp); } }; @@ -63,7 +63,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(inline_asm)), - span: sp, + span: sp.with_ctxt(cx.backtrace()), attrs: ThinVec::new(), })) } @@ -277,6 +277,5 @@ fn parse_inline_asm<'a>( volatile, alignstack, dialect, - ctxt: cx.backtrace(), })) } diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index d7571f43edd..6301283460a 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -1,7 +1,6 @@ use errors::{Applicability, DiagnosticBuilder}; use syntax::ast::{self, *}; -use syntax::source_map::Spanned; use syntax::ext::base::*; use syntax::parse::token::{self, TokenKind}; use syntax::parse::parser::Parser; @@ -20,12 +19,12 @@ pub fn expand_assert<'cx>( Ok(assert) => assert, Err(mut err) => { err.emit(); - return DummyResult::expr(sp); + return DummyResult::any(sp); } }; let sp = sp.apply_mark(cx.current_expansion.id); - let panic_call = Mac_ { + let panic_call = Mac { path: Path::from_ident(Ident::new(sym::panic, sp)), tts: custom_message.unwrap_or_else(|| { TokenStream::from(TokenTree::token( @@ -37,6 +36,7 @@ pub fn expand_assert<'cx>( )) }).into(), delim: MacDelimiter::Parenthesis, + span: sp, prior_type_ascription: None, }; let if_expr = cx.expr_if( @@ -44,10 +44,7 @@ pub fn expand_assert<'cx>( cx.expr(sp, ExprKind::Unary(UnOp::Not, cond_expr)), cx.expr( sp, - ExprKind::Mac(Spanned { - span: sp, - node: panic_call, - }), + ExprKind::Mac(panic_call), ), None, ); diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 84830e6ddda..0e52c1af908 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -25,7 +25,7 @@ pub fn expand_cfg( } Err(mut err) => { err.emit(); - DummyResult::expr(sp) + DummyResult::any(sp) } } } diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index f1d079eb053..4cd17531a45 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -1,5 +1,5 @@ use syntax::ast; -use syntax::ext::base; +use syntax::ext::base::{self, DummyResult}; use syntax::symbol::Symbol; use syntax::tokenstream; @@ -12,7 +12,7 @@ pub fn expand_syntax_ext( ) -> Box<dyn base::MacResult + 'static> { let es = match base::get_exprs_from_tts(cx, sp, tts) { Some(e) => e, - None => return base::DummyResult::expr(sp), + None => return DummyResult::any(sp), }; let mut accumulator = String::new(); let mut missing_literal = vec![]; @@ -55,9 +55,9 @@ pub fn expand_syntax_ext( let mut err = cx.struct_span_err(missing_literal, "expected a literal"); err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`"); err.emit(); - return base::DummyResult::expr(sp); + return DummyResult::any(sp); } else if has_errors { - return base::DummyResult::expr(sp); + return DummyResult::any(sp); } let sp = sp.apply_mark(cx.current_expansion.id); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 3b1edf90d6b..d030ea4a56e 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -35,7 +35,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>, match annitem.node { ItemKind::Struct(_, Generics { ref params, .. }) | ItemKind::Enum(_, Generics { ref params, .. }) => { - let container_id = cx.current_expansion.id.parent(); + let container_id = cx.current_expansion.id.expn_data().parent; if cx.resolver.has_derives(container_id, SpecialDerives::COPY) && !params.iter().any(|param| match param.kind { ast::GenericParamKind::Type { .. } => true, @@ -129,7 +129,7 @@ fn cs_clone_shallow(name: &str, if is_union { // let _: AssertParamIsCopy<Self>; let self_ty = - cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper))); + cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_dummy_span(kw::SelfUpper))); assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy"); } else { match *substr.fields { @@ -138,7 +138,7 @@ fn cs_clone_shallow(name: &str, } StaticEnum(enum_def, ..) => { for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.node.data); + process_variant(cx, &mut stmts, &variant.data); } } _ => cx.span_bug(trait_span, &format!("unexpected substructure in \ @@ -170,9 +170,9 @@ fn cs_clone(name: &str, vdata = vdata_; } EnumMatching(.., variant, ref af) => { - ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.node.ident]); + ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.ident]); all_fields = af; - vdata = &variant.node.data; + vdata = &variant.data; } EnumNonMatchingCollapsed(..) => { cx.span_bug(trait_span, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 1909729f4a9..54027c600b4 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::EQ); let inline = cx.meta_word(span, sym::inline); let hidden = cx.meta_list_item_word(span, sym::hidden); @@ -75,7 +75,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt<'_>, } StaticEnum(enum_def, ..) => { for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.node.data); + process_variant(cx, &mut stmts, &variant.data); } } _ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`") diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 885cfee3565..55687c3175b 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -43,17 +43,18 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt<'_>, } -pub fn ordering_collapsed(cx: &mut ExtCtxt<'_>, - span: Span, - self_arg_tags: &[ast::Ident]) - -> P<ast::Expr> { +pub fn ordering_collapsed( + cx: &mut ExtCtxt<'_>, + span: Span, + self_arg_tags: &[ast::Ident], +) -> P<ast::Expr> { let lft = cx.expr_ident(span, self_arg_tags[0]); let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); - cx.expr_method_call(span, lft, cx.ident_of("cmp"), vec![rgt]) + cx.expr_method_call(span, lft, ast::Ident::new(sym::cmp, span), vec![rgt]) } pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - let test_id = cx.ident_of("cmp").gensym(); + let test_id = ast::Ident::new(sym::cmp, span); let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]); @@ -75,34 +76,34 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P< // as the outermost one, and the last as the innermost. false, |cx, span, old, self_f, other_fs| { - // match new { - // ::std::cmp::Ordering::Equal => old, - // cmp => cmp - // } + // match new { + // ::std::cmp::Ordering::Equal => old, + // cmp => cmp + // } - let new = { - let other_f = match other_fs { - [o_f] => o_f, - _ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"), - }; + let new = { + let other_f = match other_fs { + [o_f] => o_f, + _ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"), + }; - let args = vec![ - cx.expr_addr_of(span, self_f), - cx.expr_addr_of(span, other_f.clone()), - ]; + let args = vec![ + cx.expr_addr_of(span, self_f), + cx.expr_addr_of(span, other_f.clone()), + ]; - cx.expr_call_global(span, cmp_path.clone(), args) - }; + cx.expr_call_global(span, cmp_path.clone(), args) + }; - let eq_arm = cx.arm(span, - vec![cx.pat_path(span, equals_path.clone())], - old); - let neq_arm = cx.arm(span, - vec![cx.pat_ident(span, test_id)], - cx.expr_ident(span, test_id)); + let eq_arm = cx.arm(span, + vec![cx.pat_path(span, equals_path.clone())], + old); + let neq_arm = cx.arm(span, + vec![cx.pat_ident(span, test_id)], + cx.expr_ident(span, test_id)); - cx.expr_match(span, new, vec![eq_arm, neq_arm]) - }, + cx.expr_match(span, new, vec![eq_arm, neq_arm]) + }, cx.expr_path(equals_path.clone()), Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| { if self_args.len() != 2 { diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 7d7c4ae22a8..91e1e80e4fb 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::PARTIAL_EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::PARTIAL_EQ); // structures are equal if all fields are equal, and non equal, if // any fields are not equal or if the enum variants are different diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 0ec30f5924f..740b92a9b79 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -94,11 +94,12 @@ pub enum OrderingOp { GeOp, } -pub fn some_ordering_collapsed(cx: &mut ExtCtxt<'_>, - span: Span, - op: OrderingOp, - self_arg_tags: &[ast::Ident]) - -> P<ast::Expr> { +pub fn some_ordering_collapsed( + cx: &mut ExtCtxt<'_>, + span: Span, + op: OrderingOp, + self_arg_tags: &[ast::Ident], +) -> P<ast::Expr> { let lft = cx.expr_ident(span, self_arg_tags[0]); let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); let op_str = match op { @@ -108,11 +109,11 @@ pub fn some_ordering_collapsed(cx: &mut ExtCtxt<'_>, GtOp => "gt", GeOp => "ge", }; - cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt]) + cx.expr_method_call(span, lft, ast::Ident::from_str_and_span(op_str, span), vec![rgt]) } pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> { - let test_id = cx.ident_of("cmp").gensym(); + let test_id = ast::Ident::new(sym::cmp, span); let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal])); let ordering_expr = cx.expr_path(ordering.clone()); let equals_expr = cx.expr_some(span, ordering_expr); diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index 0f709630bf4..44153541048 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -53,7 +53,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // based on the "shape". let (ident, vdata, fields) = match substr.fields { Struct(vdata, fields) => (substr.type_ident, *vdata, fields), - EnumMatching(_, _, v, fields) => (v.node.ident, &v.node.data, fields), + EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields), EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"), @@ -62,7 +62,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // We want to make sure we have the ctxt set so that we can use unstable methods let span = span.with_ctxt(cx.backtrace()); let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked)); - let builder = Ident::from_str("debug_trait_builder").gensym(); + let builder = Ident::from_str_and_span("debug_trait_builder", span); let builder_expr = cx.expr_ident(span, builder.clone()); let fmt = substr.nonself_args[0].clone(); @@ -73,7 +73,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // tuple struct/"normal" variant let expr = cx.expr_method_call(span, fmt, Ident::from_str("debug_tuple"), vec![name]); - stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr)); + stmts.push(cx.stmt_let(span, true, builder, expr)); for field in fields { // Use double indirection to make sure this works for unsized types @@ -82,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> let expr = cx.expr_method_call(span, builder_expr.clone(), - Ident::with_empty_ctxt(sym::field), + Ident::new(sym::field, span), vec![field]); // Use `let _ = expr;` to avoid triggering the @@ -106,7 +106,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> let field = cx.expr_addr_of(field.span, field); let expr = cx.expr_method_call(span, builder_expr.clone(), - Ident::with_empty_ctxt(sym::field), + Ident::new(sym::field, span), vec![name, field]); stmts.push(stmt_let_undescore(cx, span, expr)); } diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index 293c5a1e7e7..9b6f8518de0 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -1,6 +1,6 @@ -//! The compiler code necessary for `#[derive(Decodable)]`. See encodable.rs for more. +//! The compiler code necessary for `#[derive(RustcDecodable)]`. See encodable.rs for more. -use crate::deriving::{self, pathvec_std}; +use crate::deriving::pathvec_std; use crate::deriving::generic::*; use crate::deriving::generic::ty::*; @@ -17,7 +17,7 @@ pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt<'_>, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { let krate = "rustc_serialize"; - let typaram = &*deriving::hygienic_type_parameter(item, "__D"); + let typaram = "__D"; let trait_def = TraitDef { span, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index 6d0d3b96a56..8b18fb25e90 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -1,11 +1,12 @@ -//! The compiler code necessary to implement the `#[derive(Encodable)]` -//! (and `Decodable`, in `decodable.rs`) extension. The idea here is that -//! type-defining items may be tagged with `#[derive(Encodable, Decodable)]`. +//! The compiler code necessary to implement the `#[derive(RustcEncodable)]` +//! (and `RustcDecodable`, in `decodable.rs`) extension. The idea here is that +//! type-defining items may be tagged with +//! `#[derive(RustcEncodable, RustcDecodable)]`. //! //! For example, a type like: //! //! ``` -//! #[derive(Encodable, Decodable)] +//! #[derive(RustcEncodable, RustcDecodable)] //! struct Node { id: usize } //! ``` //! @@ -40,15 +41,17 @@ //! references other non-built-in types. A type definition like: //! //! ``` -//! # #[derive(Encodable, Decodable)] struct Span; -//! #[derive(Encodable, Decodable)] +//! # #[derive(RustcEncodable, RustcDecodable)] +//! # struct Span; +//! #[derive(RustcEncodable, RustcDecodable)] //! struct Spanned<T> { node: T, span: Span } //! ``` //! //! would yield functions like: //! //! ``` -//! # #[derive(Encodable, Decodable)] struct Span; +//! # #[derive(RustcEncodable, RustcDecodable)] +//! # struct Span; //! # struct Spanned<T> { node: T, span: Span } //! impl< //! S: Encoder<E>, @@ -82,7 +85,7 @@ //! } //! ``` -use crate::deriving::{self, pathvec_std}; +use crate::deriving::pathvec_std; use crate::deriving::generic::*; use crate::deriving::generic::ty::*; @@ -98,7 +101,7 @@ pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt<'_>, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { let krate = "rustc_serialize"; - let typaram = &*deriving::hygienic_type_parameter(item, "__S"); + let typaram = "__S"; let trait_def = TraitDef { span, @@ -238,7 +241,7 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, variant.node.ident.name); + let name = cx.expr_str(trait_span, variant.ident.name); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 7e6d9126c87..1475bac0688 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -187,7 +187,7 @@ use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind}; use syntax::ast::{VariantData, GenericParamKind, GenericArg}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives}; -use syntax::source_map::{self, respan}; +use syntax::source_map::respan; use syntax::util::map_in_place::MapInPlace; use syntax::ptr::P; use syntax::symbol::{Symbol, kw, sym}; @@ -425,7 +425,7 @@ impl<'a> TraitDef<'a> { return; } }; - let container_id = cx.current_expansion.id.parent(); + let container_id = cx.current_expansion.id.expn_data().parent; let is_always_copy = cx.resolver.has_derives(container_id, SpecialDerives::COPY) && has_no_type_params; @@ -758,7 +758,7 @@ impl<'a> TraitDef<'a> { let mut field_tys = Vec::new(); for variant in &enum_def.variants { - field_tys.extend(variant.node + field_tys.extend(variant .data .fields() .iter() @@ -890,7 +890,7 @@ impl<'a> MethodDef<'a> { for (ty, name) in self.args.iter() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(name).gensym(); + let ident = ast::Ident::from_str_and_span(name, trait_.span); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -928,7 +928,7 @@ impl<'a> MethodDef<'a> { let args = { let self_args = explicit_self.map(|explicit_self| { - let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span); + let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(trait_.span); ast::Arg::from_self(ThinVec::default(), explicit_self, ident) }); let nonself_args = arg_types.into_iter() @@ -1210,7 +1210,7 @@ impl<'a> MethodDef<'a> { let vi_idents = self_arg_names.iter() .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); - cx.ident_of(&vi_suffix[..]).gensym() + ast::Ident::from_str_and_span(&vi_suffix[..], trait_.span) }) .collect::<Vec<ast::Ident>>(); @@ -1220,7 +1220,7 @@ impl<'a> MethodDef<'a> { let catch_all_substructure = EnumNonMatchingCollapsed(self_arg_idents, &variants[..], &vi_idents[..]); - let first_fieldless = variants.iter().find(|v| v.node.data.fields().is_empty()); + let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty()); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -1229,7 +1229,7 @@ impl<'a> MethodDef<'a> { // where each tuple has length = self_args.len() let mut match_arms: Vec<ast::Arm> = variants.iter() .enumerate() - .filter(|&(_, v)| !(self.unify_fieldless_variants && v.node.data.fields().is_empty())) + .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) .map(|(index, variant)| { let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern(cx, @@ -1387,7 +1387,10 @@ impl<'a> MethodDef<'a> { let variant_value = deriving::call_intrinsic(cx, sp, "discriminant_value", vec![self_addr]); - let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name)); + let target_ty = cx.ty_ident( + sp, + ast::Ident::from_str_and_span(target_type_name, sp), + ); let variant_disr = cx.expr_cast(sp, variant_value, target_ty); let let_stmt = cx.stmt_let(sp, false, ident, variant_disr); index_let_stmts.push(let_stmt); @@ -1513,8 +1516,8 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let sp = v.span.with_ctxt(trait_.span.ctxt()); - let summary = trait_.summarise_struct(cx, &v.node.data); - (v.node.ident, sp, summary) + let summary = trait_.summarise_struct(cx, &v.data); + (v.ident, sp, summary) }) .collect(); self.call_substructure_method(cx, @@ -1588,7 +1591,7 @@ impl<'a> TraitDef<'a> { let mut ident_exprs = Vec::new(); for (i, struct_field) in struct_def.fields().iter().enumerate() { let sp = struct_field.span.with_ctxt(self.span.ctxt()); - let ident = cx.ident_of(&format!("{}_{}", prefix, i)).gensym(); + let ident = ast::Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span); paths.push(ident.with_span_pos(sp)); let val = cx.expr_path(cx.path_ident(sp, ident)); let val = if use_temporaries { @@ -1610,14 +1613,13 @@ impl<'a> TraitDef<'a> { if ident.is_none() { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } - source_map::Spanned { + ast::FieldPat { + ident: ident.unwrap(), + is_shorthand: false, + attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, span: pat.span.with_ctxt(self.span.ctxt()), - node: ast::FieldPat { - ident: ident.unwrap(), - pat, - is_shorthand: false, - attrs: ThinVec::new(), - }, + pat, } }) .collect(); @@ -1643,9 +1645,9 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { let sp = variant.span.with_ctxt(self.span.ctxt()); - let variant_path = cx.path(sp, vec![enum_ident, variant.node.ident]); + let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); let use_temporaries = false; // enums can't be repr(packed) - self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl, + self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) } } @@ -1776,7 +1778,7 @@ pub fn is_type_without_fields(item: &Annotatable) -> bool { if let Annotatable::Item(ref item) = *item { match item.node { ast::ItemKind::Enum(ref enum_def, _) => { - enum_def.variants.iter().all(|v| v.node.data.fields().is_empty()) + enum_def.variants.iter().all(|v| v.data.fields().is_empty()) } ast::ItemKind::Struct(ref variant_data, _) => variant_data.fields().is_empty(), _ => false, diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index 399829eaefd..7fcf036fc81 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -72,7 +72,7 @@ impl<'a> Path<'a> { self_ty: Ident, self_generics: &Generics) -> ast::Path { - let mut idents = self.path.iter().map(|s| cx.ident_of(*s)).collect(); + let mut idents = self.path.iter().map(|s| Ident::from_str_and_span(*s, span)).collect(); let lt = mk_lifetimes(cx, span, &self.lifetime); let tys: Vec<P<ast::Ty>> = self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect(); @@ -209,7 +209,7 @@ fn mk_ty_param(cx: &ExtCtxt<'_>, cx.trait_bound(path) }) .collect(); - cx.typaram(span, cx.ident_of(name), attrs.to_owned(), bounds, None) + cx.typaram(span, ast::Ident::from_str_and_span(name, span), attrs.to_owned(), bounds, None) } fn mk_generics(params: Vec<ast::GenericParam>, span: Span) -> Generics { diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs index 9787722e81d..2fc594abd70 100644 --- a/src/libsyntax_ext/deriving/hash.rs +++ b/src/libsyntax_ext/deriving/hash.rs @@ -16,7 +16,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt<'_>, let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std); - let typaram = &*deriving::hygienic_type_parameter(item, "__H"); + let typaram = "__H"; let arg = Path::new_local(typaram); let hash_trait_def = TraitDef { diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index 8cd2853e538..da68eea0c50 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -54,33 +54,6 @@ impl MultiItemModifier for BuiltinDerive { } } -/// Construct a name for the inner type parameter that can't collide with any type parameters of -/// the item. This is achieved by starting with a base and then concatenating the names of all -/// other type parameters. -// FIXME(aburka): use real hygiene when that becomes possible -fn hygienic_type_parameter(item: &Annotatable, base: &str) -> String { - let mut typaram = String::from(base); - if let Annotatable::Item(ref item) = *item { - match item.node { - ast::ItemKind::Struct(_, ast::Generics { ref params, .. }) | - ast::ItemKind::Enum(_, ast::Generics { ref params, .. }) => { - for param in params { - match param.kind { - ast::GenericParamKind::Type { .. } => { - typaram.push_str(¶m.ident.as_str()); - } - _ => {} - } - } - } - - _ => {} - } - } - - typaram -} - /// Constructs an expression that calls an intrinsic fn call_intrinsic(cx: &ExtCtxt<'_>, span: Span, diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 39fc90decc9..9834130fa23 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -16,20 +16,20 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>, tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> { let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some(v) => v, }; let sp = sp.apply_mark(cx.current_expansion.id); let e = match env::var(&*var.as_str()) { Err(..) => { - let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime)); + let lt = cx.lifetime(sp, Ident::with_dummy_span(kw::StaticLifetime)); cx.expr_path(cx.path_all(sp, true, cx.std_path(&[sym::option, sym::Option, sym::None]), vec![GenericArg::Type(cx.ty_rptr(sp, cx.ty_ident(sp, - Ident::with_empty_ctxt(sym::str)), + Ident::with_dummy_span(sym::str)), Some(lt), ast::Mutability::Immutable))], vec![])) @@ -50,21 +50,21 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>, let mut exprs = match get_exprs_from_tts(cx, sp, tts) { Some(ref exprs) if exprs.is_empty() => { cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::expr(sp); + return DummyResult::any(sp); } - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some(exprs) => exprs.into_iter(), }; let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some((v, _style)) => v, }; let msg = match exprs.next() { None => Symbol::intern(&format!("environment variable `{}` not defined", var)), Some(second) => { match expr_to_string(cx, second, "expected string literal") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some((s, _style)) => s, } } @@ -72,13 +72,13 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>, if exprs.next().is_some() { cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::expr(sp); + return DummyResult::any(sp); } let e = match env::var(&*var.as_str()) { Err(_) => { cx.span_err(sp, &msg.as_str()); - return DummyResult::expr(sp); + return DummyResult::any(sp); } Ok(s) => cx.expr_str(sp, Symbol::intern(&s)), }; diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 2ae13b66e28..83764205a19 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -805,7 +805,7 @@ fn expand_format_args_impl<'cx>( } Err(mut err) => { err.emit(); - DummyResult::expr(sp) + DummyResult::any(sp) } } } @@ -846,9 +846,9 @@ pub fn expand_preparsed_format_args( let msg = "format argument must be a string literal"; let fmt_sp = efmt.span; - let fmt = match expr_to_spanned_string(ecx, efmt, msg) { + let (fmt_str, fmt_style, fmt_span) = match expr_to_spanned_string(ecx, efmt, msg) { Ok(mut fmt) if append_newline => { - fmt.node.0 = Symbol::intern(&format!("{}\n", fmt.node.0)); + fmt.0 = Symbol::intern(&format!("{}\n", fmt.0)); fmt } Ok(fmt) => fmt, @@ -875,7 +875,7 @@ pub fn expand_preparsed_format_args( _ => (false, None), }; - let str_style = match fmt.node.1 { + let str_style = match fmt_style { ast::StrStyle::Cooked => None, ast::StrStyle::Raw(raw) => { Some(raw as usize) @@ -981,7 +981,7 @@ pub fn expand_preparsed_format_args( vec![] }; - let fmt_str = &*fmt.node.0.as_str(); // for the suggestions below + let fmt_str = &*fmt_str.as_str(); // for the suggestions below let mut parser = parse::Parser::new(fmt_str, str_style, skips, append_newline); let mut unverified_pieces = Vec::new(); @@ -995,7 +995,7 @@ pub fn expand_preparsed_format_args( if !parser.errors.is_empty() { let err = parser.errors.remove(0); - let sp = fmt.span.from_inner(err.span); + let sp = fmt_span.from_inner(err.span); let mut e = ecx.struct_span_err(sp, &format!("invalid format string: {}", err.description)); e.span_label(sp, err.label + " in format string"); @@ -1003,7 +1003,7 @@ pub fn expand_preparsed_format_args( e.note(¬e); } if let Some((label, span)) = err.secondary_label { - let sp = fmt.span.from_inner(span); + let sp = fmt_span.from_inner(span); e.span_label(sp, label); } e.emit(); @@ -1011,7 +1011,7 @@ pub fn expand_preparsed_format_args( } let arg_spans = parser.arg_places.iter() - .map(|span| fmt.span.from_inner(*span)) + .map(|span| fmt_span.from_inner(*span)) .collect(); let named_pos: FxHashSet<usize> = names.values().cloned().collect(); @@ -1034,7 +1034,7 @@ pub fn expand_preparsed_format_args( str_pieces: Vec::with_capacity(unverified_pieces.len()), all_pieces_simple: true, macsp, - fmtsp: fmt.span, + fmtsp: fmt_span, invalid_refs: Vec::new(), arg_spans, arg_with_formatting: Vec::new(), diff --git a/src/libsyntax_ext/global_allocator.rs b/src/libsyntax_ext/global_allocator.rs index f788b513804..d2121abe3b4 100644 --- a/src/libsyntax_ext/global_allocator.rs +++ b/src/libsyntax_ext/global_allocator.rs @@ -29,7 +29,7 @@ pub fn expand( }; // Generate a bunch of new items using the AllocFnFactory - let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); + let span = item.span.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id)); let f = AllocFnFactory { span, kind: AllocatorKind::Global, @@ -44,7 +44,7 @@ pub fn expand( let const_ty = ecx.ty(span, TyKind::Tup(Vec::new())); let const_body = ecx.expr_block(ecx.block(span, stmts)); let const_item = - ecx.item_const(span, Ident::with_empty_ctxt(kw::Underscore), const_ty, const_body); + ecx.item_const(span, Ident::with_dummy_span(kw::Underscore), const_ty, const_body); // Return the original item and the new methods. vec![Annotatable::Item(item), Annotatable::Item(const_item)] @@ -120,7 +120,7 @@ impl AllocFnFactory<'_, '_> { ) -> P<Expr> { match *ty { AllocatorTy::Layout => { - let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); + let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize)); let ty_usize = self.cx.ty_path(usize); let size = ident(); let align = ident(); @@ -178,12 +178,12 @@ impl AllocFnFactory<'_, '_> { } fn usize(&self) -> P<Ty> { - let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); + let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize)); self.cx.ty_path(usize) } fn ptr_u8(&self) -> P<Ty> { - let u8 = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::u8)); + let u8 = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::u8)); let ty_u8 = self.cx.ty_path(u8); self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable) } diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs index 112192fac5d..73ebeaec454 100644 --- a/src/libsyntax_ext/global_asm.rs +++ b/src/libsyntax_ext/global_asm.rs @@ -30,7 +30,7 @@ pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, id: ast::DUMMY_NODE_ID, node: ast::ItemKind::GlobalAsm(P(global_asm)), vis: respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited), - span: sp, + span: sp.with_ctxt(cx.backtrace()), tokens: None, })]) } @@ -61,8 +61,5 @@ fn parse_global_asm<'a>( None => return Ok(None), }; - Ok(Some(ast::GlobalAsm { - asm, - ctxt: cx.backtrace(), - })) + Ok(Some(ast::GlobalAsm { asm })) } diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 0f3f5c0cd0e..4add2261c6c 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -42,7 +42,7 @@ pub mod test_harness; pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) { let mut register = |name, kind| resolver.register_builtin_macro( - Ident::with_empty_ctxt(name), SyntaxExtension { + Ident::with_dummy_span(name), SyntaxExtension { is_builtin: true, ..SyntaxExtension::default(kind, edition) }, ); @@ -57,7 +57,6 @@ pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, e } register_bang! { - __rust_unstable_column: source_util::expand_column, asm: asm::expand_asm, assert: assert::expand_assert, cfg: cfg::expand_cfg, diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs index a725f5e46ad..dbfd8fe98f3 100644 --- a/src/libsyntax_ext/plugin_macro_defs.rs +++ b/src/libsyntax_ext/plugin_macro_defs.rs @@ -11,7 +11,7 @@ use syntax::source_map::respan; use syntax::symbol::sym; use syntax::tokenstream::*; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::hygiene::{ExpnId, ExpnInfo, ExpnKind, MacroKind}; +use syntax_pos::hygiene::{ExpnData, ExpnKind, MacroKind}; use std::mem; @@ -43,12 +43,12 @@ pub fn inject( ) { if !named_exts.is_empty() { let mut extra_items = Vec::new(); - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition, [sym::rustc_attrs][..].into(), )); for (name, ext) in named_exts { - resolver.register_builtin_macro(Ident::with_empty_ctxt(name), ext); + resolver.register_builtin_macro(Ident::with_dummy_span(name), ext); extra_items.push(plugin_macro_def(name, span)); } // The `macro_rules` items must be inserted before any other items. diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs index 7913a7442ed..e772eaf8349 100644 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ b/src/libsyntax_ext/proc_macro_harness.rs @@ -1,18 +1,16 @@ use std::mem; +use smallvec::smallvec; use syntax::ast::{self, Ident}; use syntax::attr; -use syntax::source_map::{ExpnInfo, ExpnKind, respan}; +use syntax::source_map::{ExpnData, ExpnKind, respan}; use syntax::ext::base::{ExtCtxt, MacroKind}; -use syntax::ext::expand::ExpansionConfig; -use syntax::ext::hygiene::ExpnId; +use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::proc_macro::is_proc_macro_attr; -use syntax::mut_visit::MutVisitor; use syntax::parse::ParseSess; use syntax::ptr::P; use syntax::symbol::{kw, sym}; use syntax::visit::{self, Visitor}; - use syntax_pos::{Span, DUMMY_SP}; struct ProcMacroDerive { @@ -329,7 +327,7 @@ fn mk_decls( custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef], ) -> P<ast::Item> { - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition, [sym::rustc_attrs, sym::proc_macro_internals][..].into(), )); @@ -338,7 +336,7 @@ fn mk_decls( let doc = cx.meta_list(span, sym::doc, vec![hidden]); let doc_hidden = cx.attribute(doc); - let proc_macro = Ident::with_empty_ctxt(sym::proc_macro); + let proc_macro = Ident::with_dummy_span(sym::proc_macro); let krate = cx.item(span, proc_macro, Vec::new(), @@ -350,7 +348,7 @@ fn mk_decls( let custom_derive = Ident::from_str("custom_derive"); let attr = Ident::from_str("attr"); let bang = Ident::from_str("bang"); - let crate_kw = Ident::with_empty_ctxt(kw::Crate); + let crate_kw = Ident::with_dummy_span(kw::Crate); let decls = { let local_path = |sp: Span, name| { @@ -409,5 +407,7 @@ fn mk_decls( i }); - cx.monotonic_expander().flat_map_item(module).pop().unwrap() + // Integrate the new module into existing module structures. + let module = AstFragment::Items(smallvec![module]); + cx.monotonic_expander().fully_expand_fragment(module).make_items().pop().unwrap() } diff --git a/src/libsyntax_ext/source_util.rs b/src/libsyntax_ext/source_util.rs index 2c8d53a2315..e008ed710e4 100644 --- a/src/libsyntax_ext/source_util.rs +++ b/src/libsyntax_ext/source_util.rs @@ -9,8 +9,6 @@ use syntax::tokenstream; use smallvec::SmallVec; use syntax_pos::{self, Pos, Span}; -use std::fs; -use std::io::ErrorKind; use rustc_data_structures::sync::Lrc; // These macros all relate to the file system; they either return @@ -111,26 +109,23 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To -> Box<dyn base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { Some(f) => f, - None => return DummyResult::expr(sp) + None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); - match fs::read_to_string(&file) { - Ok(src) => { - let interned_src = Symbol::intern(&src); - - // Add this input file to the code map to make it available as - // dependency information - cx.source_map().new_source_file(file.into(), src); - - base::MacEager::expr(cx.expr_str(sp, interned_src)) + match cx.source_map().load_binary_file(&file) { + Ok(bytes) => match std::str::from_utf8(&bytes) { + Ok(src) => { + let interned_src = Symbol::intern(&src); + base::MacEager::expr(cx.expr_str(sp, interned_src)) + } + Err(_) => { + cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); + DummyResult::any(sp) + } }, - Err(ref e) if e.kind() == ErrorKind::InvalidData => { - cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - DummyResult::expr(sp) - } Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::expr(sp) + DummyResult::any(sp) } } } @@ -139,26 +134,16 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream:: -> Box<dyn base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { Some(f) => f, - None => return DummyResult::expr(sp) + None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); - match fs::read(&file) { + match cx.source_map().load_binary_file(&file) { Ok(bytes) => { - // Add the contents to the source map if it contains UTF-8. - let (contents, bytes) = match String::from_utf8(bytes) { - Ok(s) => { - let bytes = s.as_bytes().to_owned(); - (s, bytes) - }, - Err(e) => (String::new(), e.into_bytes()), - }; - cx.source_map().new_source_file(file.into(), contents); - base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) }, Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::expr(sp) + DummyResult::any(sp) } } } diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs index 68b13bdd171..8ca376341fc 100644 --- a/src/libsyntax_ext/standard_library_imports.rs +++ b/src/libsyntax_ext/standard_library_imports.rs @@ -1,8 +1,8 @@ use syntax::{ast, attr}; use syntax::edition::Edition; -use syntax::ext::hygiene::{ExpnId, MacroKind}; +use syntax::ext::hygiene::MacroKind; use syntax::ptr::P; -use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan}; +use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned, respan}; use syntax::symbol::{Ident, Symbol, kw, sym}; use syntax_pos::DUMMY_SP; @@ -32,7 +32,7 @@ pub fn inject( // HACK(eddyb) gensym the injected crates on the Rust 2018 edition, // so they don't accidentally interfere with the new import paths. let orig_name_sym = Symbol::intern(orig_name_str); - let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym); + let orig_name_ident = Ident::with_dummy_span(orig_name_sym); let (rename, orig_name) = if rust_2018 { (orig_name_ident.gensym(), Some(orig_name_sym)) } else { @@ -40,7 +40,7 @@ pub fn inject( }; krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer( - attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use)) + attr::mk_word_item(ast::Ident::with_dummy_span(sym::macro_use)) )], vis: dummy_spanned(ast::VisibilityKind::Inherited), node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)), @@ -55,7 +55,7 @@ pub fn inject( // the prelude. let name = names[0]; - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition, [sym::prelude_import][..].into(), )); @@ -66,7 +66,7 @@ pub fn inject( vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), node: ast::ItemKind::Use(P(ast::UseTree { prefix: ast::Path { - segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot)) + segments: iter::once(ast::Ident::with_dummy_span(kw::PathRoot)) .chain( [name, "prelude", "v1"].iter().cloned() .map(ast::Ident::from_str) diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs index 993ef257527..08582e714cc 100644 --- a/src/libsyntax_ext/test.rs +++ b/src/libsyntax_ext/test.rs @@ -29,7 +29,7 @@ pub fn expand_test_case( if !ecx.ecfg.should_test { return vec![]; } - let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); + let sp = attr_sp.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id)); let mut item = anno_item.expect_item(); item = item.map(|mut item| { item.vis = respan(item.vis.span, ast::VisibilityKind::Public); @@ -93,7 +93,7 @@ pub fn expand_test_or_bench( return vec![Annotatable::Item(item)]; } - let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id); + let ctxt = SyntaxContext::root().apply_mark(cx.current_expansion.id); let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt)); // Gensym "test" so we can extern crate without conflicting with any local names diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs index eec8a3f8023..4a6ea0ebf85 100644 --- a/src/libsyntax_ext/test_harness.rs +++ b/src/libsyntax_ext/test_harness.rs @@ -5,14 +5,13 @@ use smallvec::{smallvec, SmallVec}; use syntax::ast::{self, Ident}; use syntax::attr; use syntax::entry::{self, EntryPointType}; -use syntax::ext::base::{ExtCtxt, Resolver}; -use syntax::ext::expand::ExpansionConfig; -use syntax::ext::hygiene::{ExpnId, MacroKind}; +use syntax::ext::base::{ExtCtxt, MacroKind, Resolver}; +use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::feature_gate::Features; use syntax::mut_visit::{*, ExpectOne}; use syntax::parse::ParseSess; use syntax::ptr::P; -use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned}; +use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned}; use syntax::symbol::{kw, sym, Symbol}; use syntax_pos::{Span, DUMMY_SP}; @@ -74,12 +73,7 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { noop_visit_crate(c, self); // Create a main function to run our tests - let test_main = { - let unresolved = mk_main(&mut self.cx); - self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() - }; - - c.module.items.push(test_main); + c.module.items.push(mk_main(&mut self.cx)); } fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { @@ -155,7 +149,7 @@ impl MutVisitor for EntryPointCleaner { EntryPointType::MainAttr | EntryPointType::Start => item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { - let allow_ident = Ident::with_empty_ctxt(sym::allow); + let allow_ident = Ident::with_dummy_span(sym::allow); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item); @@ -196,7 +190,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, tests: Vec<Ident>, tested_submods: Vec<(Ident, Ident)>) -> (P<ast::Item>, Ident) { - let super_ = Ident::with_empty_ctxt(kw::Super); + let super_ = Ident::with_dummy_span(kw::Super); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), @@ -216,7 +210,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, let name = Ident::from_str("__test_reexports").gensym(); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.id = cx.ext_cx.resolver.get_module_scope(parent); - let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { + let module = P(ast::Item { ident: name, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, @@ -224,9 +218,14 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, - })).pop().unwrap(); + }); - (it, name) + // Integrate the new module into existing module structures. + let module = AstFragment::Items(smallvec![module]); + let module = + cx.ext_cx.monotonic_expander().fully_expand_fragment(module).make_items().pop().unwrap(); + + (module, name) } /// Crawl over the crate, inserting test reexports and the test main function @@ -269,12 +268,12 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { // #![main] // test::test_main_static(&[..tests]); // } - let sp = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let sp = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition, [sym::main, sym::test, sym::rustc_attrs][..].into(), )); let ecx = &cx.ext_cx; - let test_id = Ident::with_empty_ctxt(sym::test); + let test_id = Ident::with_dummy_span(sym::test); // test::test_main_static(...) let mut test_runner = cx.test_runner.clone().unwrap_or( @@ -321,7 +320,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { None => Ident::from_str_and_span("main", sp).gensym(), }; - P(ast::Item { + let main = P(ast::Item { ident: main_id, attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, @@ -329,8 +328,11 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> { vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, - }) + }); + // Integrate the new item into existing module structures. + let main = AstFragment::Items(smallvec![main]); + cx.ext_cx.monotonic_expander().fully_expand_fragment(main).make_items().pop().unwrap() } fn path_name_i(idents: &[Ident]) -> String { |
