about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs24
-rw-r--r--src/libsyntax/parse/mod.rs29
-rw-r--r--src/libsyntax/parse/parser.rs42
3 files changed, 46 insertions, 49 deletions
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index eafc3f77ab0..554a1fcfc71 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -12,32 +12,30 @@ use print::pprust::token_to_string;
 use parse::lexer::StringReader;
 use parse::{token, PResult};
 use syntax_pos::Span;
-use tokenstream::{Delimited, TokenTree};
-
-use std::rc::Rc;
+use tokenstream::{Delimited, TokenStream, TokenTree};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
-    pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+    pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
-            tts.push(self.parse_token_tree()?);
+            tts.push(self.parse_token_tree()?.into());
         }
-        Ok(tts)
+        Ok(TokenStream::concat(tts))
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
-    fn parse_token_trees_until_close_delim(&mut self) -> Vec<TokenTree> {
+    fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         let mut tts = vec![];
         loop {
             if let token::CloseDelim(..) = self.token {
-                return tts;
+                return TokenStream::concat(tts);
             }
             match self.parse_token_tree() {
-                Ok(tt) => tts.push(tt),
+                Ok(tt) => tts.push(tt.into()),
                 Err(mut e) => {
                     e.emit();
-                    return tts;
+                    return TokenStream::concat(tts);
                 }
             }
         }
@@ -111,10 +109,10 @@ impl<'a> StringReader<'a> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+                Ok(TokenTree::Delimited(span, Delimited {
                     delim: delim,
-                    tts: tts,
-                })))
+                    tts: tts.into(),
+                }))
             },
             token::CloseDelim(_) => {
                 // An unexpected closing delimiter (i.e., there is no
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index f783e32d621..7207463e1b9 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -19,7 +19,7 @@ use parse::parser::Parser;
 use ptr::P;
 use str::char_at;
 use symbol::Symbol;
-use tokenstream;
+use tokenstream::{TokenStream, TokenTree};
 
 use std::cell::RefCell;
 use std::collections::HashSet;
@@ -141,9 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                     -> Vec<tokenstream::TokenTree> {
-    filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
+pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+                                        -> TokenStream {
+    filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
 }
 
 // Create a new parser from a source string
@@ -175,7 +175,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
     let end_pos = filemap.end_pos;
-    let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap));
+    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
     if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
         parser.span = syntax_pos::mk_sp(end_pos, end_pos);
@@ -186,13 +186,8 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>)
-                               -> Parser<'a> {
-    tts_to_parser(sess, tts)
-}
-
-pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
-    tts_to_parser(sess, ts.into_trees().collect())
+pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+    stream_to_parser(sess, tts.into_iter().collect())
 }
 
 
@@ -215,15 +210,15 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
+pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
     let mut srdr = lexer::StringReader::new(sess, filemap);
     srdr.real_token();
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given tts and the ParseSess, produce a parser
-pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
-    let mut p = Parser::new(sess, tts, None, false);
+/// Given stream and the ParseSess, produce a parser
+pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+    let mut p = Parser::new(sess, stream, None, false);
     p.check_unknown_macro_variable();
     p
 }
@@ -660,7 +655,7 @@ mod tests {
     #[test]
     fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
-        let tts: &[tokenstream::TokenTree] = &tts[..];
+        let tts: &[TokenTree] = &tts[..];
 
         match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
             (
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index b12b0c03267..c88b859e036 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -53,7 +53,7 @@ use util::parser::{AssocOp, Fixity};
 use print::pprust;
 use ptr::P;
 use parse::PResult;
-use tokenstream::{self, Delimited, TokenTree, TokenStream};
+use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
 use symbol::{Symbol, keywords};
 use util::ThinVec;
 
@@ -200,7 +200,7 @@ impl TokenCursorFrame {
             delim: delimited.delim,
             span: sp,
             open_delim: delimited.delim == token::NoDelim,
-            tree_cursor: delimited.tts.iter().cloned().collect::<TokenStream>().into_trees(),
+            tree_cursor: delimited.stream().into_trees(),
             close_delim: delimited.delim == token::NoDelim,
         }
     }
@@ -211,12 +211,14 @@ impl TokenCursor {
         loop {
             let tree = if !self.frame.open_delim {
                 self.frame.open_delim = true;
-                Delimited { delim: self.frame.delim, tts: Vec::new() }.open_tt(self.frame.span)
+                Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+                    .open_tt(self.frame.span)
             } else if let Some(tree) = self.frame.tree_cursor.next() {
                 tree
             } else if !self.frame.close_delim {
                 self.frame.close_delim = true;
-                Delimited { delim: self.frame.delim, tts: Vec::new() }.close_tt(self.frame.span)
+                Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+                    .close_tt(self.frame.span)
             } else if let Some(frame) = self.stack.pop() {
                 self.frame = frame;
                 continue
@@ -255,21 +257,23 @@ impl TokenCursor {
             num_of_hashes = cmp::max(num_of_hashes, count);
         }
 
-        let body = TokenTree::Delimited(sp, Rc::new(Delimited {
+        let body = TokenTree::Delimited(sp, Delimited {
             delim: token::Bracket,
-            tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
-                      TokenTree::Token(sp, token::Eq),
-                      TokenTree::Token(sp, token::Literal(
-                          token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
-        }));
+            tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
+                  TokenTree::Token(sp, token::Eq),
+                  TokenTree::Token(sp, token::Literal(
+                      token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
+                .iter().cloned().collect::<TokenStream>().into(),
+        });
 
         self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
             delim: token::NoDelim,
             tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
                 [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
-                    .iter().cloned().collect()
+                    .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::Token(sp, token::Pound), body].iter().cloned().collect()
+                [TokenTree::Token(sp, token::Pound), body]
+                    .iter().cloned().collect::<TokenStream>().into()
             },
         })));
 
@@ -405,7 +409,7 @@ impl From<P<Expr>> for LhsExpr {
 
 impl<'a> Parser<'a> {
     pub fn new(sess: &'a ParseSess,
-               tokens: Vec<TokenTree>,
+               tokens: TokenStream,
                directory: Option<Directory>,
                desugar_doc_comments: bool)
                -> Self {
@@ -423,7 +427,7 @@ impl<'a> Parser<'a> {
             token_cursor: TokenCursor {
                 frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
                     delim: token::NoDelim,
-                    tts: tokens,
+                    tts: tokens.into(),
                 }),
                 stack: Vec::new(),
             },
@@ -2098,10 +2102,10 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> {
+    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
         match self.token {
             token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
-                TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()),
+                TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
                 _ => unreachable!(),
             }),
             _ => Err(self.fatal("expected open delimiter")),
@@ -2649,10 +2653,10 @@ impl<'a> Parser<'a> {
                                          self.token_cursor.stack.pop().unwrap());
                 self.span = frame.span;
                 self.bump();
-                return Ok(TokenTree::Delimited(frame.span, Rc::new(Delimited {
+                return Ok(TokenTree::Delimited(frame.span, Delimited {
                     delim: frame.delim,
-                    tts: frame.tree_cursor.original_stream().trees().collect(),
-                })));
+                    tts: frame.tree_cursor.original_stream().into(),
+                }));
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),