about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorGeoffry Song <goffrie@gmail.com>2015-03-05 15:06:49 -0500
committerGeoffry Song <goffrie@gmail.com>2015-04-25 21:42:10 -0400
commit2d9831dea598d8a45c69e8c799503e8a397aacc0 (patch)
tree01b440d423b022b089549022f8a5b411514360aa /src/libsyntax/parse
parentda623844a9b3f9164723bf7ef2c4744b539af13f (diff)
downloadrust-2d9831dea598d8a45c69e8c799503e8a397aacc0.tar.gz
rust-2d9831dea598d8a45c69e8c799503e8a397aacc0.zip
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.

The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.

A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).

`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).

This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.

As such, this is a [breaking-change].

Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs107
-rw-r--r--src/libsyntax/parse/mod.rs72
-rw-r--r--src/libsyntax/parse/parser.rs7
-rw-r--r--src/libsyntax/parse/token.rs7
4 files changed, 13 insertions, 180 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index d13ab65d72b..6b0674c9a41 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -19,7 +19,6 @@ use str::char_at;
 
 use std::borrow::Cow;
 use std::char;
-use std::fmt;
 use std::mem::replace;
 use std::rc::Rc;
 
@@ -71,11 +70,6 @@ pub struct StringReader<'a> {
     pub peek_tok: token::Token,
     pub peek_span: Span,
 
-    // FIXME (Issue #16472): This field should go away after ToToken impls
-    // are revised to go directly to token-trees.
-    /// Is \x00<name>,<ctxt>\x00 is interpreted as encoded ast::Ident?
-    read_embedded_ident: bool,
-
     // cache a direct reference to the source text, so that we don't have to
     // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
     source_text: Rc<String>
@@ -130,17 +124,6 @@ impl<'a> Reader for TtReader<'a> {
     }
 }
 
-// FIXME (Issue #16472): This function should go away after
-// ToToken impls are revised to go directly to token-trees.
-pub fn make_reader_with_embedded_idents<'b>(span_diagnostic: &'b SpanHandler,
-                                            filemap: Rc<codemap::FileMap>)
-                                            -> StringReader<'b> {
-    let mut sr = StringReader::new_raw(span_diagnostic, filemap);
-    sr.read_embedded_ident = true;
-    sr.advance_token();
-    sr
-}
-
 impl<'a> StringReader<'a> {
     /// For comments.rs, which hackily pokes into pos and curr
     pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler,
@@ -162,7 +145,6 @@ impl<'a> StringReader<'a> {
             /* dummy values; not read */
             peek_tok: token::Eof,
             peek_span: codemap::DUMMY_SP,
-            read_embedded_ident: false,
             source_text: source_text
         };
         sr.bump();
@@ -578,81 +560,6 @@ impl<'a> StringReader<'a> {
         })
     }
 
-    // FIXME (Issue #16472): The scan_embedded_hygienic_ident function
-    // should go away after we revise the syntax::ext::quote::ToToken
-    // impls to go directly to token-trees instead of thing -> string
-    // -> token-trees.  (The function is currently used to resolve
-    // Issues #15750 and #15962.)
-    //
-    // Since this function is only used for certain internal macros,
-    // and the functionality it provides is not exposed to end user
-    // programs, pnkfelix deliberately chose to write it in a way that
-    // favors rustc debugging effectiveness over runtime efficiency.
-
-    /// Scan through input of form \x00name_NNNNNN,ctxt_CCCCCCC\x00
-    /// whence: `NNNNNN` is a string of characters forming an integer
-    /// (the name) and `CCCCCCC` is a string of characters forming an
-    /// integer (the ctxt), separate by a comma and delimited by a
-    /// `\x00` marker.
-    #[inline(never)]
-    fn scan_embedded_hygienic_ident(&mut self) -> ast::Ident {
-        fn bump_expecting_char<'a,D:fmt::Debug>(r: &mut StringReader<'a>,
-                                                c: char,
-                                                described_c: D,
-                                                whence: &str) {
-            match r.curr {
-                Some(r_c) if r_c == c => r.bump(),
-                Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence),
-                None      => panic!("expected {:?}, hit EOF, {}", described_c, whence),
-            }
-        }
-
-        let whence = "while scanning embedded hygienic ident";
-
-        // skip over the leading `\x00`
-        bump_expecting_char(self, '\x00', "nul-byte", whence);
-
-        // skip over the "name_"
-        for c in "name_".chars() {
-            bump_expecting_char(self, c, c, whence);
-        }
-
-        let start_bpos = self.last_pos;
-        let base = 10;
-
-        // find the integer representing the name
-        self.scan_digits(base, base);
-        let encoded_name : u32 = self.with_str_from(start_bpos, |s| {
-            u32::from_str_radix(s, 10).unwrap_or_else(|_| {
-                panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]",
-                      s, whence, start_bpos, self.last_pos);
-            })
-        });
-
-        // skip over the `,`
-        bump_expecting_char(self, ',', "comma", whence);
-
-        // skip over the "ctxt_"
-        for c in "ctxt_".chars() {
-            bump_expecting_char(self, c, c, whence);
-        }
-
-        // find the integer representing the ctxt
-        let start_bpos = self.last_pos;
-        self.scan_digits(base, base);
-        let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| {
-            u32::from_str_radix(s, 10).unwrap_or_else(|_| {
-                panic!("expected digits representing a ctxt, got {:?}, {}", s, whence);
-            })
-        });
-
-        // skip over the `\x00`
-        bump_expecting_char(self, '\x00', "nul-byte", whence);
-
-        ast::Ident { name: ast::Name(encoded_name),
-                     ctxt: encoded_ctxt, }
-    }
-
     /// Scan through any digits (base `scan_radix`) or underscores,
     /// and return how many digits there were.
     ///
@@ -1020,20 +927,6 @@ impl<'a> StringReader<'a> {
             return token::Literal(num, suffix)
         }
 
-        if self.read_embedded_ident {
-            match (c.unwrap(), self.nextch(), self.nextnextch()) {
-                ('\x00', Some('n'), Some('a')) => {
-                    let ast_ident = self.scan_embedded_hygienic_ident();
-                    return if self.curr_is(':') && self.nextch_is(':') {
-                        token::Ident(ast_ident, token::ModName)
-                    } else {
-                        token::Ident(ast_ident, token::Plain)
-                    };
-                }
-                _ => {}
-            }
-        }
-
         match c.expect("next_token_inner called at EOF") {
           // One-byte tokens.
           ';' => { self.bump(); return token::Semi; }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index dee3e5fee74..8c9ce5f78d4 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -166,9 +166,6 @@ pub fn parse_stmt_from_source_str(name: String,
     maybe_aborted(p.parse_stmt(), p)
 }
 
-// Note: keep in sync with `with_hygiene::parse_tts_from_source_str`
-// until #16472 is resolved.
-//
 // Warning: This parses with quote_depth > 0, which is not the default.
 pub fn parse_tts_from_source_str(name: String,
                                  source: String,
@@ -186,8 +183,6 @@ pub fn parse_tts_from_source_str(name: String,
     maybe_aborted(panictry!(p.parse_all_token_trees()),p)
 }
 
-// Note: keep in sync with `with_hygiene::new_parser_from_source_str`
-// until #16472 is resolved.
 // Create a new parser from a source string
 pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
                                       cfg: ast::CrateConfig,
@@ -220,8 +215,6 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
     p
 }
 
-// Note: keep this in sync with `with_hygiene::filemap_to_parser` until
-// #16472 is resolved.
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
                              filemap: Rc<FileMap>,
@@ -277,8 +270,6 @@ pub fn string_to_filemap(sess: &ParseSess, source: String, path: String)
     sess.span_diagnostic.cm.new_filemap(path, source)
 }
 
-// Note: keep this in sync with `with_hygiene::filemap_to_tts` (apart
-// from the StringReader constructor), until #16472 is resolved.
 /// Given a filemap, produce a sequence of token-trees
 pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
     -> Vec<ast::TokenTree> {
@@ -300,69 +291,6 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
     p
 }
 
-// FIXME (Issue #16472): The `with_hygiene` mod should go away after
-// ToToken impls are revised to go directly to token-trees.
-pub mod with_hygiene {
-    use ast;
-    use codemap::FileMap;
-    use parse::parser::Parser;
-    use std::rc::Rc;
-    use super::ParseSess;
-    use super::{maybe_aborted, string_to_filemap, tts_to_parser};
-
-    // Note: keep this in sync with `super::parse_tts_from_source_str` until
-    // #16472 is resolved.
-    //
-    // Warning: This parses with quote_depth > 0, which is not the default.
-    pub fn parse_tts_from_source_str(name: String,
-                                     source: String,
-                                     cfg: ast::CrateConfig,
-                                     sess: &ParseSess) -> Vec<ast::TokenTree> {
-        let mut p = new_parser_from_source_str(
-            sess,
-            cfg,
-            name,
-            source
-        );
-        p.quote_depth += 1;
-        // right now this is re-creating the token trees from ... token trees.
-        maybe_aborted(panictry!(p.parse_all_token_trees()),p)
-    }
-
-    // Note: keep this in sync with `super::new_parser_from_source_str` until
-    // #16472 is resolved.
-    // Create a new parser from a source string
-    fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
-                                      cfg: ast::CrateConfig,
-                                      name: String,
-                                      source: String) -> Parser<'a> {
-        filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
-    }
-
-    // Note: keep this in sync with `super::filemap_to_parserr` until
-    // #16472 is resolved.
-    /// Given a filemap and config, return a parser
-    fn filemap_to_parser<'a>(sess: &'a ParseSess,
-                             filemap: Rc<FileMap>,
-                             cfg: ast::CrateConfig) -> Parser<'a> {
-        tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
-    }
-
-    // Note: keep this in sync with `super::filemap_to_tts` until
-    // #16472 is resolved.
-    /// Given a filemap, produce a sequence of token-trees
-    fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
-                      -> Vec<ast::TokenTree> {
-        // it appears to me that the cfg doesn't matter here... indeed,
-        // parsing tt's probably shouldn't require a parser at all.
-        use super::lexer::make_reader_with_embedded_idents as make_reader;
-        let cfg = Vec::new();
-        let srdr = make_reader(&sess.span_diagnostic, filemap);
-        let mut p1 = Parser::new(sess, cfg, Box::new(srdr));
-        panictry!(p1.parse_all_token_trees())
-    }
-}
-
 /// Abort if necessary
 pub fn maybe_aborted<T>(result: T, p: Parser) -> T {
     p.abort_if_errors();
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 47ea8d556fa..5f097256318 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -1152,7 +1152,8 @@ impl<'a> Parser<'a> {
             &token::OpenDelim(token::Brace),
             &token::CloseDelim(token::Brace),
             seq_sep_none(),
-            |p| {
+            |p| -> PResult<P<TraitItem>> {
+            maybe_whole!(no_clone p, NtTraitItem);
             let mut attrs = p.parse_outer_attributes();
             let lo = p.span.lo;
 
@@ -2943,6 +2944,8 @@ impl<'a> Parser<'a> {
     }
 
     pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> {
+        maybe_whole!(no_clone self, NtArm);
+
         let attrs = self.parse_outer_attributes();
         let pats = try!(self.parse_pats());
         let mut guard = None;
@@ -4335,6 +4338,8 @@ impl<'a> Parser<'a> {
 
     /// Parse an impl item.
     pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> {
+        maybe_whole!(no_clone self, NtImplItem);
+
         let mut attrs = self.parse_outer_attributes();
         let lo = self.span.lo;
         let vis = try!(self.parse_visibility());
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index e33b1391a10..0106de913bb 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -381,6 +381,10 @@ pub enum Nonterminal {
     NtMeta(P<ast::MetaItem>),
     NtPath(Box<ast::Path>),
     NtTT(P<ast::TokenTree>), // needs P'ed to break a circularity
+    // These is not exposed to macros, but is used by quasiquote.
+    NtArm(ast::Arm),
+    NtImplItem(P<ast::ImplItem>),
+    NtTraitItem(P<ast::TraitItem>),
 }
 
 impl fmt::Debug for Nonterminal {
@@ -396,6 +400,9 @@ impl fmt::Debug for Nonterminal {
             NtMeta(..) => f.pad("NtMeta(..)"),
             NtPath(..) => f.pad("NtPath(..)"),
             NtTT(..) => f.pad("NtTT(..)"),
+            NtArm(..) => f.pad("NtArm(..)"),
+            NtImplItem(..) => f.pad("NtImplItem(..)"),
+            NtTraitItem(..) => f.pad("NtTraitItem(..)"),
         }
     }
 }