diff options
| author | Geoffry Song <goffrie@gmail.com> | 2015-03-05 15:06:49 -0500 |
|---|---|---|
| committer | Geoffry Song <goffrie@gmail.com> | 2015-04-25 21:42:10 -0400 |
| commit | 2d9831dea598d8a45c69e8c799503e8a397aacc0 (patch) | |
| tree | 01b440d423b022b089549022f8a5b411514360aa /src/libsyntax | |
| parent | da623844a9b3f9164723bf7ef2c4744b539af13f (diff) | |
| download | rust-2d9831dea598d8a45c69e8c799503e8a397aacc0.tar.gz rust-2d9831dea598d8a45c69e8c799503e8a397aacc0.zip | |
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements, etc. with almost-trivial ones that produce `Interpolated(*Nt(...))` pseudo-tokens. In this way, quasiquote now works the same way as macros do: already-parsed AST fragments are used as-is, not reparsed. The `ToSource` trait is removed. Quasiquote no longer involves pretty-printing at all, which removes the need for the `encode_with_hygiene` hack. All associated machinery is removed. A new `Nonterminal` is added, NtArm, which the parser now interpolates. This is just for quasiquote, not macros (although it could be in the future). `ToTokens` is no longer implemented for `Arg` (although this could be added again) and `Generics` (which I don't think makes sense). This breaks any compiler extensions that relied on the ability of `ToTokens` to turn AST fragments back into inspectable token trees. For this reason, this closes #16987. As such, this is a [breaking-change]. Fixes #16472. Fixes #15962. Fixes #17397. Fixes #16617.
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 363 | ||||
| -rw-r--r-- | src/libsyntax/fold.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 107 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 72 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 122 |
8 files changed, 189 insertions, 502 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 94dc36b16ba..07fb6cbe5c6 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -89,12 +89,6 @@ impl Ident { pub fn as_str<'a>(&'a self) -> &'a str { self.name.as_str() } - - pub fn encode_with_hygiene(&self) -> String { - format!("\x00name_{},ctxt_{}\x00", - self.name.usize(), - self.ctxt) - } } impl fmt::Debug for Ident { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 5776fa99740..e100b7705d8 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -30,16 +30,16 @@ pub mod rt { use ext::base::ExtCtxt; use parse::token; use parse; - use print::pprust; use ptr::P; + use std::rc::Rc; - use ast::{TokenTree, Generics, Expr}; + use ast::{TokenTree, Expr}; pub use parse::new_parser_from_tts; - pub use codemap::{BytePos, Span, dummy_spanned}; + pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; pub trait ToTokens { - fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> ; + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>; } impl ToTokens for TokenTree { @@ -70,277 +70,189 @@ pub mod rt { } } - /* Should be (when bugs in default methods are fixed): - - trait ToSource : ToTokens { - // Takes a thing and generates a string containing rust code for it. - pub fn to_source() -> String; - - // If you can make source, you can definitely make tokens. - pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] { - cx.parse_tts(self.to_source()) + impl ToTokens for ast::Ident { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))] } } - */ - - // FIXME: Move this trait to pprust and get rid of *_to_str? - pub trait ToSource { - // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> String; + impl ToTokens for ast::Path { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + } } - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - trait ToSourceWithHygiene : ToSource { - // Takes a thing and generates a string containing rust code - // for it, encoding Idents as special byte sequences to - // maintain hygiene across serialization and deserialization. - fn to_source_with_hygiene(&self) -> String; + impl ToTokens for ast::Ty { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + } } - macro_rules! impl_to_source { - (P<$t:ty>, $pp:ident) => ( - impl ToSource for P<$t> { - fn to_source(&self) -> String { - pprust::$pp(&**self) - } - } - impl ToSourceWithHygiene for P<$t> { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(&**self) - } - } - ); - ($t:ty, $pp:ident) => ( - impl ToSource for $t { - fn to_source(&self) -> String { - pprust::$pp(self) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(self) - } - } - ); + impl ToTokens for ast::Block { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] + } } - fn slice_to_source<'a, T: ToSource>(sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source()) - .collect::<Vec<String>>() - .connect(sep) - .to_string() + impl ToTokens for P<ast::Item> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))] + } } - fn slice_to_source_with_hygiene<'a, T: ToSourceWithHygiene>( - sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source_with_hygiene()) - .collect::<Vec<String>>() - .connect(sep) - .to_string() + impl ToTokens for P<ast::ImplItem> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + } } - macro_rules! impl_to_source_slice { - ($t:ty, $sep:expr) => ( - impl ToSource for [$t] { - fn to_source(&self) -> String { - slice_to_source($sep, self) - } - } - - impl ToSourceWithHygiene for [$t] { - fn to_source_with_hygiene(&self) -> String { - slice_to_source_with_hygiene($sep, self) - } - } - ) + impl ToTokens for P<ast::TraitItem> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + } } - impl ToSource for ast::Ident { - fn to_source(&self) -> String { - token::get_ident(*self).to_string() + impl ToTokens for P<ast::Stmt> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone())))] } } - impl ToSourceWithHygiene for ast::Ident { - fn to_source_with_hygiene(&self) -> String { - self.encode_with_hygiene() + impl ToTokens for P<ast::Expr> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } - impl_to_source! { ast::Path, path_to_string } - impl_to_source! { ast::Ty, ty_to_string } - impl_to_source! { ast::Block, block_to_string } - impl_to_source! { ast::Arg, arg_to_string } - impl_to_source! { Generics, generics_to_string } - impl_to_source! { ast::WhereClause, where_clause_to_string } - impl_to_source! { P<ast::Item>, item_to_string } - impl_to_source! { P<ast::ImplItem>, impl_item_to_string } - impl_to_source! { P<ast::TraitItem>, trait_item_to_string } - impl_to_source! { P<ast::Stmt>, stmt_to_string } - impl_to_source! { P<ast::Expr>, expr_to_string } - impl_to_source! { P<ast::Pat>, pat_to_string } - impl_to_source! { ast::Arm, arm_to_string } - impl_to_source_slice! { ast::Ty, ", " } - impl_to_source_slice! { P<ast::Item>, "\n\n" } - - impl ToSource for ast::Attribute_ { - fn to_source(&self) -> String { - pprust::attribute_to_string(&dummy_spanned(self.clone())) + impl ToTokens for P<ast::Pat> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))] } } - impl ToSourceWithHygiene for ast::Attribute_ { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Arm { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } - impl ToSource for str { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitStr( - token::intern_and_get_ident(self), ast::CookedStr)); - pprust::lit_to_string(&lit) - } + macro_rules! impl_to_tokens_slice { + ($t: ty, $sep: expr) => { + impl ToTokens for [$t] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + let mut v = vec![]; + for (i, x) in self.iter().enumerate() { + if i > 0 { + v.push_all(&$sep); + } + v.extend(x.to_tokens(cx)); + } + v + } + } + }; } - impl ToSourceWithHygiene for str { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] } + impl_to_tokens_slice! { P<ast::Item>, [] } + + impl ToTokens for P<ast::MetaItem> { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } - impl ToSource for () { - fn to_source(&self) -> String { - "()".to_string() + impl ToTokens for ast::Attribute { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + let mut r = vec![]; + // FIXME: The spans could be better + r.push(ast::TtToken(self.span, token::Pound)); + if self.node.style == ast::AttrInner { + r.push(ast::TtToken(self.span, token::Not)); + } + r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited { + delim: token::Bracket, + open_span: self.span, + tts: self.node.value.to_tokens(cx), + close_span: self.span, + }))); + r } } - impl ToSourceWithHygiene for () { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for str { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + let lit = ast::LitStr( + token::intern_and_get_ident(self), ast::CookedStr); + dummy_spanned(lit).to_tokens(cx) } } - impl ToSource for bool { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitBool(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for () { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { + vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited { + delim: token::Paren, + open_span: DUMMY_SP, + tts: vec![], + close_span: DUMMY_SP, + }))] } } - impl ToSourceWithHygiene for bool { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Lit { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + // FIXME: This is wrong + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprLit(P(self.clone())), + span: DUMMY_SP, + }).to_tokens(cx) } } - impl ToSource for char { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitChar(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for bool { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + dummy_spanned(ast::LitBool(*self)).to_tokens(cx) } } - impl ToSourceWithHygiene for char { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for char { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { + dummy_spanned(ast::LitChar(*self)).to_tokens(cx) } } - macro_rules! impl_to_source_int { + macro_rules! impl_to_tokens_int { (signed, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let lit = ast::LitInt(*self as u64, ast::SignedIntLit($tag, ast::Sign::new(*self))); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); (unsigned, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit($tag)); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); } - impl_to_source_int! { signed, isize, ast::TyIs } - impl_to_source_int! { signed, i8, ast::TyI8 } - impl_to_source_int! { signed, i16, ast::TyI16 } - impl_to_source_int! { signed, i32, ast::TyI32 } - impl_to_source_int! { signed, i64, ast::TyI64 } - - impl_to_source_int! { unsigned, usize, ast::TyUs } - impl_to_source_int! { unsigned, u8, ast::TyU8 } - impl_to_source_int! { unsigned, u16, ast::TyU16 } - impl_to_source_int! { unsigned, u32, ast::TyU32 } - impl_to_source_int! { unsigned, u64, ast::TyU64 } - - // Alas ... we write these out instead. All redundant. - - macro_rules! impl_to_tokens { - ($t:ty) => ( - impl ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } + impl_to_tokens_int! { signed, isize, ast::TyIs } + impl_to_tokens_int! { signed, i8, ast::TyI8 } + impl_to_tokens_int! { signed, i16, ast::TyI16 } + impl_to_tokens_int! { signed, i32, ast::TyI32 } + impl_to_tokens_int! { signed, i64, ast::TyI64 } - macro_rules! impl_to_tokens_lifetime { - ($t:ty) => ( - impl<'a> ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } - - impl_to_tokens! { ast::Ident } - impl_to_tokens! { ast::Path } - impl_to_tokens! { P<ast::Item> } - impl_to_tokens! { P<ast::ImplItem> } - impl_to_tokens! { P<ast::TraitItem> } - impl_to_tokens! { P<ast::Pat> } - impl_to_tokens! { ast::Arm } - impl_to_tokens_lifetime! { &'a [P<ast::Item>] } - impl_to_tokens! { ast::Ty } - impl_to_tokens_lifetime! { &'a [ast::Ty] } - impl_to_tokens! { Generics } - impl_to_tokens! { ast::WhereClause } - impl_to_tokens! { P<ast::Stmt> } - impl_to_tokens! { P<ast::Expr> } - impl_to_tokens! { ast::Block } - impl_to_tokens! { ast::Arg } - impl_to_tokens! { ast::Attribute_ } - impl_to_tokens_lifetime! { &'a str } - impl_to_tokens! { () } - impl_to_tokens! { char } - impl_to_tokens! { bool } - impl_to_tokens! { isize } - impl_to_tokens! { i8 } - impl_to_tokens! { i16 } - impl_to_tokens! { i32 } - impl_to_tokens! { i64 } - impl_to_tokens! { usize } - impl_to_tokens! { u8 } - impl_to_tokens! { u16 } - impl_to_tokens! { u32 } - impl_to_tokens! { u64 } + impl_to_tokens_int! { unsigned, usize, ast::TyUs } + impl_to_tokens_int! { unsigned, u8, ast::TyU8 } + impl_to_tokens_int! { unsigned, u16, ast::TyU16 } + impl_to_tokens_int! { unsigned, u32, ast::TyU32 } + impl_to_tokens_int! { unsigned, u64, ast::TyU64 } pub trait ExtParseUtils { fn parse_item(&self, s: String) -> P<ast::Item>; @@ -349,12 +261,6 @@ pub mod rt { fn parse_tts(&self, s: String) -> Vec<ast::TokenTree>; } - trait ExtParseUtilsWithHygiene { - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree>; - } - impl<'a> ExtParseUtils for ExtCtxt<'a> { fn parse_item(&self, s: String) -> P<ast::Item> { @@ -386,19 +292,6 @@ pub mod rt { self.parse_sess()) } } - - impl<'a> ExtParseUtilsWithHygiene for ExtCtxt<'a> { - - fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree> { - use parse::with_hygiene::parse_tts_from_source_str; - parse_tts_from_source_str("<quote expansion>".to_string(), - s, - self.cfg(), - self.parse_sess()) - } - - } - } pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index c857d4403cb..5352a191b09 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -682,6 +682,13 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T) token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))), + token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), + token::NtImplItem(arm) => + token::NtImplItem(fld.fold_impl_item(arm) + .expect_one("expected fold to produce exactly one item")), + token::NtTraitItem(arm) => + token::NtTraitItem(fld.fold_trait_item(arm) + .expect_one("expected fold to produce exactly one item")), } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d13ab65d72b..6b0674c9a41 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -19,7 +19,6 @@ use str::char_at; use std::borrow::Cow; use std::char; -use std::fmt; use std::mem::replace; use std::rc::Rc; @@ -71,11 +70,6 @@ pub struct StringReader<'a> { pub peek_tok: token::Token, pub peek_span: Span, - // FIXME (Issue #16472): This field should go away after ToToken impls - // are revised to go directly to token-trees. - /// Is \x00<name>,<ctxt>\x00 is interpreted as encoded ast::Ident? - read_embedded_ident: bool, - // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. source_text: Rc<String> @@ -130,17 +124,6 @@ impl<'a> Reader for TtReader<'a> { } } -// FIXME (Issue #16472): This function should go away after -// ToToken impls are revised to go directly to token-trees. -pub fn make_reader_with_embedded_idents<'b>(span_diagnostic: &'b SpanHandler, - filemap: Rc<codemap::FileMap>) - -> StringReader<'b> { - let mut sr = StringReader::new_raw(span_diagnostic, filemap); - sr.read_embedded_ident = true; - sr.advance_token(); - sr -} - impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into pos and curr pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler, @@ -162,7 +145,6 @@ impl<'a> StringReader<'a> { /* dummy values; not read */ peek_tok: token::Eof, peek_span: codemap::DUMMY_SP, - read_embedded_ident: false, source_text: source_text }; sr.bump(); @@ -578,81 +560,6 @@ impl<'a> StringReader<'a> { }) } - // FIXME (Issue #16472): The scan_embedded_hygienic_ident function - // should go away after we revise the syntax::ext::quote::ToToken - // impls to go directly to token-trees instead of thing -> string - // -> token-trees. (The function is currently used to resolve - // Issues #15750 and #15962.) - // - // Since this function is only used for certain internal macros, - // and the functionality it provides is not exposed to end user - // programs, pnkfelix deliberately chose to write it in a way that - // favors rustc debugging effectiveness over runtime efficiency. - - /// Scan through input of form \x00name_NNNNNN,ctxt_CCCCCCC\x00 - /// whence: `NNNNNN` is a string of characters forming an integer - /// (the name) and `CCCCCCC` is a string of characters forming an - /// integer (the ctxt), separate by a comma and delimited by a - /// `\x00` marker. - #[inline(never)] - fn scan_embedded_hygienic_ident(&mut self) -> ast::Ident { - fn bump_expecting_char<'a,D:fmt::Debug>(r: &mut StringReader<'a>, - c: char, - described_c: D, - whence: &str) { - match r.curr { - Some(r_c) if r_c == c => r.bump(), - Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence), - None => panic!("expected {:?}, hit EOF, {}", described_c, whence), - } - } - - let whence = "while scanning embedded hygienic ident"; - - // skip over the leading `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - // skip over the "name_" - for c in "name_".chars() { - bump_expecting_char(self, c, c, whence); - } - - let start_bpos = self.last_pos; - let base = 10; - - // find the integer representing the name - self.scan_digits(base, base); - let encoded_name : u32 = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]", - s, whence, start_bpos, self.last_pos); - }) - }); - - // skip over the `,` - bump_expecting_char(self, ',', "comma", whence); - - // skip over the "ctxt_" - for c in "ctxt_".chars() { - bump_expecting_char(self, c, c, whence); - } - - // find the integer representing the ctxt - let start_bpos = self.last_pos; - self.scan_digits(base, base); - let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a ctxt, got {:?}, {}", s, whence); - }) - }); - - // skip over the `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - ast::Ident { name: ast::Name(encoded_name), - ctxt: encoded_ctxt, } - } - /// Scan through any digits (base `scan_radix`) or underscores, /// and return how many digits there were. /// @@ -1020,20 +927,6 @@ impl<'a> StringReader<'a> { return token::Literal(num, suffix) } - if self.read_embedded_ident { - match (c.unwrap(), self.nextch(), self.nextnextch()) { - ('\x00', Some('n'), Some('a')) => { - let ast_ident = self.scan_embedded_hygienic_ident(); - return if self.curr_is(':') && self.nextch_is(':') { - token::Ident(ast_ident, token::ModName) - } else { - token::Ident(ast_ident, token::Plain) - }; - } - _ => {} - } - } - match c.expect("next_token_inner called at EOF") { // One-byte tokens. ';' => { self.bump(); return token::Semi; } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index dee3e5fee74..8c9ce5f78d4 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -166,9 +166,6 @@ pub fn parse_stmt_from_source_str(name: String, maybe_aborted(p.parse_stmt(), p) } -// Note: keep in sync with `with_hygiene::parse_tts_from_source_str` -// until #16472 is resolved. -// // Warning: This parses with quote_depth > 0, which is not the default. pub fn parse_tts_from_source_str(name: String, source: String, @@ -186,8 +183,6 @@ pub fn parse_tts_from_source_str(name: String, maybe_aborted(panictry!(p.parse_all_token_trees()),p) } -// Note: keep in sync with `with_hygiene::new_parser_from_source_str` -// until #16472 is resolved. // Create a new parser from a source string pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, @@ -220,8 +215,6 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, p } -// Note: keep this in sync with `with_hygiene::filemap_to_parser` until -// #16472 is resolved. /// Given a filemap and config, return a parser pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, @@ -277,8 +270,6 @@ pub fn string_to_filemap(sess: &ParseSess, source: String, path: String) sess.span_diagnostic.cm.new_filemap(path, source) } -// Note: keep this in sync with `with_hygiene::filemap_to_tts` (apart -// from the StringReader constructor), until #16472 is resolved. /// Given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<ast::TokenTree> { @@ -300,69 +291,6 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, p } -// FIXME (Issue #16472): The `with_hygiene` mod should go away after -// ToToken impls are revised to go directly to token-trees. -pub mod with_hygiene { - use ast; - use codemap::FileMap; - use parse::parser::Parser; - use std::rc::Rc; - use super::ParseSess; - use super::{maybe_aborted, string_to_filemap, tts_to_parser}; - - // Note: keep this in sync with `super::parse_tts_from_source_str` until - // #16472 is resolved. - // - // Warning: This parses with quote_depth > 0, which is not the default. - pub fn parse_tts_from_source_str(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &ParseSess) -> Vec<ast::TokenTree> { - let mut p = new_parser_from_source_str( - sess, - cfg, - name, - source - ); - p.quote_depth += 1; - // right now this is re-creating the token trees from ... token trees. - maybe_aborted(panictry!(p.parse_all_token_trees()),p) - } - - // Note: keep this in sync with `super::new_parser_from_source_str` until - // #16472 is resolved. - // Create a new parser from a source string - fn new_parser_from_source_str<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - name: String, - source: String) -> Parser<'a> { - filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) - } - - // Note: keep this in sync with `super::filemap_to_parserr` until - // #16472 is resolved. - /// Given a filemap and config, return a parser - fn filemap_to_parser<'a>(sess: &'a ParseSess, - filemap: Rc<FileMap>, - cfg: ast::CrateConfig) -> Parser<'a> { - tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) - } - - // Note: keep this in sync with `super::filemap_to_tts` until - // #16472 is resolved. - /// Given a filemap, produce a sequence of token-trees - fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) - -> Vec<ast::TokenTree> { - // it appears to me that the cfg doesn't matter here... indeed, - // parsing tt's probably shouldn't require a parser at all. - use super::lexer::make_reader_with_embedded_idents as make_reader; - let cfg = Vec::new(); - let srdr = make_reader(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); - panictry!(p1.parse_all_token_trees()) - } -} - /// Abort if necessary pub fn maybe_aborted<T>(result: T, p: Parser) -> T { p.abort_if_errors(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 47ea8d556fa..5f097256318 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1152,7 +1152,8 @@ impl<'a> Parser<'a> { &token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), seq_sep_none(), - |p| { + |p| -> PResult<P<TraitItem>> { + maybe_whole!(no_clone p, NtTraitItem); let mut attrs = p.parse_outer_attributes(); let lo = p.span.lo; @@ -2943,6 +2944,8 @@ impl<'a> Parser<'a> { } pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> { + maybe_whole!(no_clone self, NtArm); + let attrs = self.parse_outer_attributes(); let pats = try!(self.parse_pats()); let mut guard = None; @@ -4335,6 +4338,8 @@ impl<'a> Parser<'a> { /// Parse an impl item. pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> { + maybe_whole!(no_clone self, NtImplItem); + let mut attrs = self.parse_outer_attributes(); let lo = self.span.lo; let vis = try!(self.parse_visibility()); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index e33b1391a10..0106de913bb 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -381,6 +381,10 @@ pub enum Nonterminal { NtMeta(P<ast::MetaItem>), NtPath(Box<ast::Path>), NtTT(P<ast::TokenTree>), // needs P'ed to break a circularity + // These is not exposed to macros, but is used by quasiquote. + NtArm(ast::Arm), + NtImplItem(P<ast::ImplItem>), + NtTraitItem(P<ast::TraitItem>), } impl fmt::Debug for Nonterminal { @@ -396,6 +400,9 @@ impl fmt::Debug for Nonterminal { NtMeta(..) => f.pad("NtMeta(..)"), NtPath(..) => f.pad("NtPath(..)"), NtTT(..) => f.pad("NtTT(..)"), + NtArm(..) => f.pad("NtArm(..)"), + NtImplItem(..) => f.pad("NtImplItem(..)"), + NtTraitItem(..) => f.pad("NtTraitItem(..)"), } } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 980ce720026..36364eb9bf3 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -66,7 +66,6 @@ pub struct State<'a> { cur_cmnt_and_lit: CurrentCommentAndLiteral, boxes: Vec<pp::Breaks>, ann: &'a (PpAnn+'a), - encode_idents_with_hygiene: bool, } pub fn rust_printer<'a>(writer: Box<Write+'a>) -> State<'a> { @@ -87,7 +86,6 @@ pub fn rust_printer_annotated<'a>(writer: Box<Write+'a>, }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } @@ -179,7 +177,6 @@ impl<'a> State<'a> { }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } } @@ -290,103 +287,99 @@ pub fn token_to_string(tok: &Token) -> String { token::SpecialVarNt(var) => format!("${}", var.as_str()), token::Interpolated(ref nt) => match *nt { - token::NtExpr(ref e) => expr_to_string(&**e), - token::NtMeta(ref e) => meta_item_to_string(&**e), - token::NtTy(ref e) => ty_to_string(&**e), - token::NtPath(ref e) => path_to_string(&**e), - token::NtItem(..) => "an interpolated item".to_string(), - token::NtBlock(..) => "an interpolated block".to_string(), - token::NtStmt(..) => "an interpolated statement".to_string(), - token::NtPat(..) => "an interpolated pattern".to_string(), - token::NtIdent(..) => "an interpolated identifier".to_string(), - token::NtTT(..) => "an interpolated tt".to_string(), + token::NtExpr(ref e) => expr_to_string(&**e), + token::NtMeta(ref e) => meta_item_to_string(&**e), + token::NtTy(ref e) => ty_to_string(&**e), + token::NtPath(ref e) => path_to_string(&**e), + token::NtItem(..) => "an interpolated item".to_string(), + token::NtBlock(..) => "an interpolated block".to_string(), + token::NtStmt(..) => "an interpolated statement".to_string(), + token::NtPat(..) => "an interpolated pattern".to_string(), + token::NtIdent(..) => "an interpolated identifier".to_string(), + token::NtTT(..) => "an interpolated tt".to_string(), + token::NtArm(..) => "an interpolated arm".to_string(), + token::NtImplItem(..) => "an interpolated impl item".to_string(), + token::NtTraitItem(..) => "an interpolated trait item".to_string(), } } } -// FIXME (Issue #16472): the thing_to_string_impls macro should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instead of thing -> string -> token-trees. - -macro_rules! thing_to_string_impls { - ($to_string:ident) => { - pub fn ty_to_string(ty: &ast::Ty) -> String { - $to_string(|s| s.print_type(ty)) + to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String { - $to_string(|s| s.print_bounds("", bounds)) + to_string(|s| s.print_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { - $to_string(|s| s.print_pat(pat)) + to_string(|s| s.print_pat(pat)) } pub fn arm_to_string(arm: &ast::Arm) -> String { - $to_string(|s| s.print_arm(arm)) + to_string(|s| s.print_arm(arm)) } pub fn expr_to_string(e: &ast::Expr) -> String { - $to_string(|s| s.print_expr(e)) + to_string(|s| s.print_expr(e)) } pub fn lifetime_to_string(e: &ast::Lifetime) -> String { - $to_string(|s| s.print_lifetime(e)) + to_string(|s| s.print_lifetime(e)) } pub fn tt_to_string(tt: &ast::TokenTree) -> String { - $to_string(|s| s.print_tt(tt)) + to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[ast::TokenTree]) -> String { - $to_string(|s| s.print_tts(tts)) + to_string(|s| s.print_tts(tts)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { - $to_string(|s| s.print_stmt(stmt)) + to_string(|s| s.print_stmt(stmt)) } pub fn attr_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn item_to_string(i: &ast::Item) -> String { - $to_string(|s| s.print_item(i)) + to_string(|s| s.print_item(i)) } pub fn impl_item_to_string(i: &ast::ImplItem) -> String { - $to_string(|s| s.print_impl_item(i)) + to_string(|s| s.print_impl_item(i)) } pub fn trait_item_to_string(i: &ast::TraitItem) -> String { - $to_string(|s| s.print_trait_item(i)) + to_string(|s| s.print_trait_item(i)) } pub fn generics_to_string(generics: &ast::Generics) -> String { - $to_string(|s| s.print_generics(generics)) + to_string(|s| s.print_generics(generics)) } pub fn where_clause_to_string(i: &ast::WhereClause) -> String { - $to_string(|s| s.print_where_clause(i)) + to_string(|s| s.print_where_clause(i)) } pub fn fn_block_to_string(p: &ast::FnDecl) -> String { - $to_string(|s| s.print_fn_block_args(p)) + to_string(|s| s.print_fn_block_args(p)) } pub fn path_to_string(p: &ast::Path) -> String { - $to_string(|s| s.print_path(p, false, 0)) + to_string(|s| s.print_path(p, false, 0)) } pub fn ident_to_string(id: &ast::Ident) -> String { - $to_string(|s| s.print_ident(*id)) + to_string(|s| s.print_ident(*id)) } pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ident, opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics) -> String { - $to_string(|s| { + to_string(|s| { try!(s.head("")); try!(s.print_fn(decl, unsafety, abi::Rust, Some(name), generics, opt_explicit_self, ast::Inherited)); @@ -396,7 +389,7 @@ pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ide } pub fn block_to_string(blk: &ast::Block) -> String { - $to_string(|s| { + to_string(|s| { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); // head-ibox, will be closed by print-block after { @@ -406,59 +399,31 @@ pub fn block_to_string(blk: &ast::Block) -> String { } pub fn meta_item_to_string(mi: &ast::MetaItem) -> String { - $to_string(|s| s.print_meta_item(mi)) + to_string(|s| s.print_meta_item(mi)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn lit_to_string(l: &ast::Lit) -> String { - $to_string(|s| s.print_literal(l)) + to_string(|s| s.print_literal(l)) } pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String { - $to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) + to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } pub fn variant_to_string(var: &ast::Variant) -> String { - $to_string(|s| s.print_variant(var)) + to_string(|s| s.print_variant(var)) } pub fn arg_to_string(arg: &ast::Arg) -> String { - $to_string(|s| s.print_arg(arg)) + to_string(|s| s.print_arg(arg)) } pub fn mac_to_string(arg: &ast::Mac) -> String { - $to_string(|s| s.print_mac(arg, ::parse::token::Paren)) -} - -} } - -thing_to_string_impls! { to_string } - -// FIXME (Issue #16472): the whole `with_hygiene` mod should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instea of thing -> string -> token-trees. - -pub mod with_hygiene { - use abi; - use ast; - use std::io; - use super::indent_unit; - - // This function is the trick that all the rest of the routines - // hang on. - pub fn to_string_hyg<F>(f: F) -> String where - F: FnOnce(&mut super::State) -> io::Result<()>, - { - super::to_string(move |s| { - s.encode_idents_with_hygiene = true; - f(s) - }) - } - - thing_to_string_impls! { to_string_hyg } + to_string(|s| s.print_mac(arg, ::parse::token::Paren)) } pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String { @@ -2006,12 +1971,7 @@ impl<'a> State<'a> { } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { - if self.encode_idents_with_hygiene { - let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, &encoded[..])) - } else { - try!(word(&mut self.s, &token::get_ident(ident))) - } + try!(word(&mut self.s, &token::get_ident(ident))); self.ann.post(self, NodeIdent(&ident)) } |
