about summary refs log tree commit diff
path: root/src/libsyntax/parse/mod.rs
diff options
context:
space:
mode:
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-02-21 05:05:59 +0000
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-03-03 02:15:37 +0000
commitf6eaaf350ea683ae8b33b4a79422ad1a10ea0987 (patch)
tree06ec138259c84d8ec6d46b33402d6470263a8880 /src/libsyntax/parse/mod.rs
parent8cd0c0885f841c9bfd0c330e3da21363427010e4 (diff)
downloadrust-f6eaaf350ea683ae8b33b4a79422ad1a10ea0987.tar.gz
rust-f6eaaf350ea683ae8b33b4a79422ad1a10ea0987.zip
Integrate `TokenStream`.
Diffstat (limited to 'src/libsyntax/parse/mod.rs')
-rw-r--r--src/libsyntax/parse/mod.rs29
1 files changed, 12 insertions, 17 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index f783e32d621..7207463e1b9 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -19,7 +19,7 @@ use parse::parser::Parser;
 use ptr::P;
 use str::char_at;
 use symbol::Symbol;
-use tokenstream;
+use tokenstream::{TokenStream, TokenTree};
 
 use std::cell::RefCell;
 use std::collections::HashSet;
@@ -141,9 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                     -> Vec<tokenstream::TokenTree> {
-    filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
+pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+                                        -> TokenStream {
+    filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
 }
 
 // Create a new parser from a source string
@@ -175,7 +175,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
     let end_pos = filemap.end_pos;
-    let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap));
+    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
     if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
         parser.span = syntax_pos::mk_sp(end_pos, end_pos);
@@ -186,13 +186,8 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>)
-                               -> Parser<'a> {
-    tts_to_parser(sess, tts)
-}
-
-pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
-    tts_to_parser(sess, ts.into_trees().collect())
+pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+    stream_to_parser(sess, tts.into_iter().collect())
 }
 
 
@@ -215,15 +210,15 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
+pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
     let mut srdr = lexer::StringReader::new(sess, filemap);
     srdr.real_token();
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given tts and the ParseSess, produce a parser
-pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
-    let mut p = Parser::new(sess, tts, None, false);
+/// Given stream and the ParseSess, produce a parser
+pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+    let mut p = Parser::new(sess, stream, None, false);
     p.check_unknown_macro_variable();
     p
 }
@@ -660,7 +655,7 @@ mod tests {
     #[test]
     fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
-        let tts: &[tokenstream::TokenTree] = &tts[..];
+        let tts: &[TokenTree] = &tts[..];
 
         match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
             (