about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-02-21 05:05:59 +0000
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-03-03 02:15:37 +0000
commitf6eaaf350ea683ae8b33b4a79422ad1a10ea0987 (patch)
tree06ec138259c84d8ec6d46b33402d6470263a8880 /src/libsyntax/ext
parent8cd0c0885f841c9bfd0c330e3da21363427010e4 (diff)
downloadrust-f6eaaf350ea683ae8b33b4a79422ad1a10ea0987.tar.gz
rust-f6eaaf350ea683ae8b33b4a79422ad1a10ea0987.zip
Integrate `TokenStream`.
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/base.rs20
-rw-r--r--src/libsyntax/ext/expand.rs39
-rw-r--r--src/libsyntax/ext/placeholders.rs3
-rw-r--r--src/libsyntax/ext/quote.rs39
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs32
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs21
-rw-r--r--src/libsyntax/ext/tt/quoted.rs10
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs36
8 files changed, 93 insertions, 107 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index c7d2f0cd31d..e242cf2777f 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -188,10 +188,7 @@ impl<F> AttrProcMacro for F
 
 /// Represents a thing that maps token trees to Macro Results
 pub trait TTMacroExpander {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
-                   span: Span,
-                   token_tree: &[tokenstream::TokenTree])
+    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
                    -> Box<MacResult+'cx>;
 }
 
@@ -200,15 +197,11 @@ pub type MacroExpanderFn =
                 -> Box<MacResult+'cx>;
 
 impl<F> TTMacroExpander for F
-    where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
-                          -> Box<MacResult+'cx>
+    where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<MacResult+'cx>
 {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
-                   span: Span,
-                   token_tree: &[tokenstream::TokenTree])
+    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
                    -> Box<MacResult+'cx> {
-        (*self)(ecx, span, token_tree)
+        (*self)(ecx, span, &input.trees().collect::<Vec<_>>())
     }
 }
 
@@ -654,9 +647,8 @@ impl<'a> ExtCtxt<'a> {
         expand::MacroExpander::new(self, true)
     }
 
-    pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-        -> parser::Parser<'a> {
-        parse::tts_to_parser(self.parse_sess, tts.to_vec())
+    pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
+        parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
     }
     pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
     pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 8107696b8b9..f1662284a88 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast::{self, Block, Ident, Mac_, PatKind};
+use ast::{self, Block, Ident, PatKind};
 use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
 use attr::{self, HasAttrs};
 use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
@@ -20,16 +20,15 @@ use ext::placeholders::{placeholder, PlaceholderExpander};
 use feature_gate::{self, Features, is_builtin_attr};
 use fold;
 use fold::*;
+use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token};
 use parse::parser::Parser;
-use parse::token;
-use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts};
 use print::pprust;
 use ptr::P;
 use std_inject;
 use symbol::Symbol;
 use symbol::keywords;
 use syntax_pos::{self, Span, ExpnId};
-use tokenstream::{TokenTree, TokenStream};
+use tokenstream::TokenStream;
 use util::small_vector::SmallVector;
 use visit::Visitor;
 
@@ -462,8 +461,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 kind.expect_from_annotatables(items)
             }
             SyntaxExtension::AttrProcMacro(ref mac) => {
-                let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect();
-                let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect();
+                let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
+                let item_toks = stream_for_item(&item, &self.cx.parse_sess);
 
                 let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
                 self.parse_expansion(tok_result, kind, name, attr.span)
@@ -487,11 +486,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             InvocationKind::Bang { mac, ident, span } => (mac, ident, span),
             _ => unreachable!(),
         };
-        let Mac_ { path, tts, .. } = mac.node;
+        let path = &mac.node.path;
 
         let extname = path.segments.last().unwrap().identifier.name;
         let ident = ident.unwrap_or(keywords::Invalid.ident());
-        let marked_tts = mark_tts(&tts, mark);
+        let marked_tts = mark_tts(mac.node.stream(), mark);
         let opt_expanded = match *ext {
             NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
                 if ident.name != keywords::Invalid.name() {
@@ -510,7 +509,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     },
                 });
 
-                kind.make_from(expandfun.expand(self.cx, span, &marked_tts))
+                kind.make_from(expandfun.expand(self.cx, span, marked_tts))
             }
 
             IdentTT(ref expander, tt_span, allow_internal_unstable) => {
@@ -529,7 +528,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     }
                 });
 
-                kind.make_from(expander.expand(self.cx, span, ident, marked_tts))
+                let input: Vec<_> = marked_tts.into_trees().collect();
+                kind.make_from(expander.expand(self.cx, span, ident, input))
             }
 
             MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
@@ -563,8 +563,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                     },
                 });
 
-                let toks = marked_tts.into_iter().collect();
-                let tok_result = expandfun.expand(self.cx, span, toks);
+                let tok_result = expandfun.expand(self.cx, span, marked_tts);
                 Some(self.parse_expansion(tok_result, kind, extname, span))
             }
         };
@@ -821,23 +820,23 @@ fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
 // Therefore, we must use the pretty printer (yuck) to turn the AST node into a
 // string, which we then re-tokenise (double yuck), but first we have to patch
 // the pretty-printed string on to the end of the existing codemap (infinity-yuck).
-fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
     let text = match *item {
         Annotatable::Item(ref i) => pprust::item_to_string(i),
         Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
         Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
     };
-    string_to_tts(text, parse_sess)
+    string_to_stream(text, parse_sess)
 }
 
-fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream {
     use ast::MetaItemKind::*;
     use print::pp::Breaks;
     use print::pprust::PrintState;
 
     let token_string = match attr.value.node {
         // For `#[foo]`, an empty token
-        Word => return vec![],
+        Word => return TokenStream::empty(),
         // For `#[foo(bar, baz)]`, returns `(bar, baz)`
         List(ref items) => pprust::to_string(|s| {
             s.popen()?;
@@ -853,12 +852,12 @@ fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<Token
         }),
     };
 
-    string_to_tts(token_string, parse_sess)
+    string_to_stream(token_string, parse_sess)
 }
 
-fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
     let filename = String::from("<macro expansion>");
-    filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
+    filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
 }
 
 impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
@@ -1162,6 +1161,6 @@ impl Folder for Marker {
 }
 
 // apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
+pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
index 0636a78b215..e2fb1946e90 100644
--- a/src/libsyntax/ext/placeholders.rs
+++ b/src/libsyntax/ext/placeholders.rs
@@ -13,6 +13,7 @@ use codemap::{DUMMY_SP, dummy_spanned};
 use ext::base::ExtCtxt;
 use ext::expand::{Expansion, ExpansionKind};
 use ext::hygiene::Mark;
+use tokenstream::TokenStream;
 use fold::*;
 use ptr::P;
 use symbol::keywords;
@@ -26,7 +27,7 @@ pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
     fn mac_placeholder() -> ast::Mac {
         dummy_spanned(ast::Mac_ {
             path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
-            tts: Vec::new(),
+            tts: TokenStream::empty().into(),
         })
     }
 
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index b1b69c80f4d..69ff726e719 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -16,7 +16,7 @@ use ext::build::AstBuilder;
 use parse::parser::{Parser, PathStyle};
 use parse::token;
 use ptr::P;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
 
 
 /// Quasiquoting works via token trees.
@@ -35,7 +35,7 @@ pub mod rt {
     use std::rc::Rc;
     use symbol::Symbol;
 
-    use tokenstream::{self, TokenTree};
+    use tokenstream::{self, TokenTree, TokenStream};
 
     pub use parse::new_parser_from_tts;
     pub use syntax_pos::{BytePos, Span, DUMMY_SP};
@@ -227,10 +227,10 @@ pub mod rt {
             if self.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
-            r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
+            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
                 delim: token::Bracket,
-                tts: self.value.to_tokens(cx),
-            })));
+                tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
+            }));
             r
         }
     }
@@ -244,10 +244,10 @@ pub mod rt {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
+            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
                 delim: token::Paren,
-                tts: vec![],
-            }))]
+                tts: TokenStream::empty().into(),
+            })]
         }
     }
 
@@ -355,14 +355,15 @@ pub mod rt {
         }
 
         fn parse_tts(&self, s: String) -> Vec<TokenTree> {
-            parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
+            let source_name = "<quote expansion>".to_owned();
+            parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
+                .into_trees().collect()
         }
     }
 }
 
 // Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
 pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
-    use std::rc::Rc;
     use tokenstream::Delimited;
 
     let mut results = Vec::new();
@@ -373,8 +374,10 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
                 results.push(::std::mem::replace(&mut result, Vec::new()));
             }
             TokenTree::Token(span, token::CloseDelim(delim)) => {
-                let tree =
-                    TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
+                let tree = TokenTree::Delimited(span, Delimited {
+                    delim: delim,
+                    tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
+                });
                 result = results.pop().unwrap();
                 result.push(tree);
             }
@@ -747,7 +750,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
         },
         TokenTree::Delimited(span, ref delimed) => {
             let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
-            stmts.extend(statements_mk_tts(cx, &delimed.tts));
+            stmts.extend(statements_mk_tts(cx, delimed.stream()));
             stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
             stmts
         }
@@ -810,14 +813,14 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
     vec![stmt_let_sp, stmt_let_tt]
 }
 
-fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
+fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec<ast::Stmt> {
     let mut ss = Vec::new();
     let mut quoted = false;
-    for tt in tts {
-        quoted = match *tt {
+    for tt in tts.into_trees() {
+        quoted = match tt {
             TokenTree::Token(_, token::Dollar) if !quoted => true,
             _ => {
-                ss.extend(statements_mk_tt(cx, tt, quoted));
+                ss.extend(statements_mk_tt(cx, &tt, quoted));
                 false
             }
         }
@@ -829,7 +832,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast
     let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
 
     let mut vector = mk_stmts_let(cx, sp);
-    vector.extend(statements_mk_tts(cx, &tts[..]));
+    vector.extend(statements_mk_tts(cx, tts.iter().cloned().collect()));
     vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
     let block = cx.expr_block(cx.block(sp, vector));
     let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 6ab5123bc87..b9cb3d82d4f 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -82,13 +82,13 @@ use ast::Ident;
 use syntax_pos::{self, BytePos, mk_sp, Span};
 use codemap::Spanned;
 use errors::FatalError;
-use ext::tt::quoted;
+use ext::tt::quoted::{self, TokenTree};
 use parse::{Directory, ParseSess};
 use parse::parser::{PathStyle, Parser};
 use parse::token::{self, DocComment, Token, Nonterminal};
 use print::pprust;
 use symbol::keywords;
-use tokenstream::TokenTree;
+use tokenstream::TokenStream;
 use util::small_vector::SmallVector;
 
 use std::mem;
@@ -101,8 +101,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied};
 
 #[derive(Clone)]
 enum TokenTreeOrTokenTreeVec {
-    Tt(quoted::TokenTree),
-    TtSeq(Vec<quoted::TokenTree>),
+    Tt(TokenTree),
+    TtSeq(Vec<TokenTree>),
 }
 
 impl TokenTreeOrTokenTreeVec {
@@ -113,7 +113,7 @@ impl TokenTreeOrTokenTreeVec {
         }
     }
 
-    fn get_tt(&self, index: usize) -> quoted::TokenTree {
+    fn get_tt(&self, index: usize) -> TokenTree {
         match *self {
             TtSeq(ref v) => v[index].clone(),
             Tt(ref tt) => tt.get_tt(index),
@@ -144,9 +144,7 @@ struct MatcherPos {
 
 pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
 
-pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
-    use self::quoted::TokenTree;
-
+pub fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count + match *elt {
             TokenTree::Sequence(_, ref seq) => {
@@ -163,7 +161,7 @@ pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
     })
 }
 
-fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
+fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     let match_idx_hi = count_names(&ms[..]);
     let matches = create_matches(match_idx_hi);
     Box::new(MatcherPos {
@@ -202,10 +200,8 @@ pub enum NamedMatch {
     MatchedNonterminal(Rc<Nonterminal>)
 }
 
-fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I)
+fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[TokenTree], mut res: I)
                                              -> NamedParseResult {
-    use self::quoted::TokenTree;
-
     fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
              ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
              -> Result<(), (syntax_pos::Span, String)> {
@@ -289,9 +285,8 @@ fn inner_parse_loop(sess: &ParseSess,
                     eof_eis: &mut SmallVector<Box<MatcherPos>>,
                     bb_eis: &mut SmallVector<Box<MatcherPos>>,
                     token: &Token,
-                    span: &syntax_pos::Span) -> ParseResult<()> {
-    use self::quoted::TokenTree;
-
+                    span: &syntax_pos::Span)
+                    -> ParseResult<()> {
     while let Some(mut ei) = cur_eis.pop() {
         // When unzipped trees end, remove them
         while ei.idx >= ei.top_elts.len() {
@@ -419,13 +414,8 @@ fn inner_parse_loop(sess: &ParseSess,
     Success(())
 }
 
-pub fn parse(sess: &ParseSess,
-             tts: Vec<TokenTree>,
-             ms: &[quoted::TokenTree],
-             directory: Option<Directory>)
+pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Option<Directory>)
              -> NamedParseResult {
-    use self::quoted::TokenTree;
-
     let mut parser = Parser::new(sess, tts, directory, true);
     let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
     let mut next_eis = Vec::new(); // or proceed normally
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 193c06707c7..1d386c1a3ac 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -22,9 +22,8 @@ use parse::{Directory, ParseSess};
 use parse::parser::Parser;
 use parse::token::{self, NtTT};
 use parse::token::Token::*;
-use print;
 use symbol::Symbol;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
 
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
@@ -68,7 +67,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
     fn expand<'cx>(&self,
                    cx: &'cx mut ExtCtxt,
                    sp: Span,
-                   arg: &[TokenTree])
+                   input: TokenStream)
                    -> Box<MacResult+'cx> {
         if !self.valid {
             return DummyResult::any(sp);
@@ -76,7 +75,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
         generic_extension(cx,
                           sp,
                           self.name,
-                          arg,
+                          input,
                           &self.lhses,
                           &self.rhses)
     }
@@ -86,14 +85,12 @@ impl TTMacroExpander for MacroRulesMacroExpander {
 fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                           sp: Span,
                           name: ast::Ident,
-                          arg: &[TokenTree],
+                          arg: TokenStream,
                           lhses: &[quoted::TokenTree],
                           rhses: &[quoted::TokenTree])
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
-        println!("{}! {{ {} }}",
-                 name,
-                 print::pprust::tts_to_string(arg));
+        println!("{}! {{ {} }}", name, arg);
     }
 
     // Which arm's failure should we report? (the one furthest along)
@@ -106,7 +103,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
             _ => cx.span_bug(sp, "malformed macro lhs")
         };
 
-        match TokenTree::parse(cx, lhs_tt, arg) {
+        match TokenTree::parse(cx, lhs_tt, arg.clone()) {
             Success(named_matches) => {
                 let rhs = match rhses[i] {
                     // ignore delimiters
@@ -186,7 +183,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
     ];
 
     // Parse the macro_rules! invocation
-    let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
+    let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
         Success(m) => m,
         Failure(sp, tok) => {
             let s = parse_failure_msg(tok);
@@ -205,7 +202,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
+                        let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
                         valid &= check_lhs_nt_follows(sess, &tt);
                         return tt;
                     }
@@ -221,7 +218,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
+                        return quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
                     }
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
index 530824b2834..d56859d805c 100644
--- a/src/libsyntax/ext/tt/quoted.rs
+++ b/src/libsyntax/ext/tt/quoted.rs
@@ -124,10 +124,10 @@ impl TokenTree {
     }
 }
 
-pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
+pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess)
              -> Vec<TokenTree> {
     let mut result = Vec::new();
-    let mut trees = input.iter().cloned();
+    let mut trees = input.trees();
     while let Some(tree) = trees.next() {
         let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
         match tree {
@@ -161,13 +161,13 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
 {
     match tree {
         tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
-            Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
+            Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
                 if delimited.delim != token::Paren {
                     let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
                     let msg = format!("expected `(`, found `{}`", tok);
                     sess.span_diagnostic.span_err(span, &msg);
                 }
-                let sequence = parse(&delimited.tts, expect_matchers, sess);
+                let sequence = parse(delimited.tts.into(), expect_matchers, sess);
                 let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
                 let name_captures = macro_parser::count_names(&sequence);
                 TokenTree::Sequence(span, Rc::new(SequenceRepetition {
@@ -197,7 +197,7 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
         tokenstream::TokenTree::Delimited(span, delimited) => {
             TokenTree::Delimited(span, Rc::new(Delimited {
                 delim: delimited.delim,
-                tts: parse(&delimited.tts, expect_matchers, sess),
+                tts: parse(delimited.tts.into(), expect_matchers, sess),
             }))
         }
     }
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 90f64a5208f..24004492be2 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -14,7 +14,7 @@ use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
 use ext::tt::quoted;
 use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{TokenTree, Delimited};
+use tokenstream::{TokenStream, TokenTree, Delimited};
 use util::small_vector::SmallVector;
 
 use std::rc::Rc;
@@ -66,11 +66,11 @@ impl Iterator for Frame {
 pub fn transcribe(sp_diag: &Handler,
                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
-                  -> Vec<TokenTree> {
+                  -> TokenStream {
     let mut stack = SmallVector::one(Frame::new(src));
     let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
     let mut repeats = Vec::new();
-    let mut result = Vec::new();
+    let mut result: Vec<TokenStream> = Vec::new();
     let mut result_stack = Vec::new();
 
     loop {
@@ -84,8 +84,11 @@ pub fn transcribe(sp_diag: &Handler,
                     *idx = 0;
                     if let Some(sep) = sep.clone() {
                         // repeat same span, I guess
-                        let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP);
-                        result.push(TokenTree::Token(prev_span, sep));
+                        let prev_span = match result.last() {
+                            Some(stream) => stream.trees().next().unwrap().span(),
+                            None => DUMMY_SP,
+                        };
+                        result.push(TokenTree::Token(prev_span, sep).into());
                     }
                     continue
                 }
@@ -97,14 +100,14 @@ pub fn transcribe(sp_diag: &Handler,
                 }
                 Frame::Delimited { forest, span, .. } => {
                     if result_stack.is_empty() {
-                        return result;
+                        return TokenStream::concat(result);
                     }
-                    let tree = TokenTree::Delimited(span, Rc::new(Delimited {
+                    let tree = TokenTree::Delimited(span, Delimited {
                         delim: forest.delim,
-                        tts: result,
-                    }));
+                        tts: TokenStream::concat(result).into(),
+                    });
                     result = result_stack.pop().unwrap();
-                    result.push(tree);
+                    result.push(tree.into());
                 }
             }
             continue
@@ -148,19 +151,20 @@ pub fn transcribe(sp_diag: &Handler,
             // FIXME #2887: think about span stuff here
             quoted::TokenTree::Token(sp, SubstNt(ident)) => {
                 match lookup_cur_matched(ident, &interpolations, &repeats) {
-                    None => result.push(TokenTree::Token(sp, SubstNt(ident))),
+                    None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
                     Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
                         match **nt {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
                             NtIdent(ref sn) => {
-                                result.push(TokenTree::Token(sn.span, token::Ident(sn.node)));
+                                let token = TokenTree::Token(sn.span, token::Ident(sn.node));
+                                result.push(token.into());
                             }
-                            NtTT(ref tt) => result.push(tt.clone()),
+                            NtTT(ref tt) => result.push(tt.clone().into()),
                             _ => {
-                                // FIXME(pcwalton): Bad copy
-                                result.push(TokenTree::Token(sp, token::Interpolated(nt.clone())));
+                                let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+                                result.push(token.into());
                             }
                         }
                     } else {
@@ -174,7 +178,7 @@ pub fn transcribe(sp_diag: &Handler,
                 stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
                 result_stack.push(mem::replace(&mut result, Vec::new()));
             }
-            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
+            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()),
             quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
         }
     }