about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ext/base.rs227
-rw-r--r--src/libsyntax/ext/expand.rs81
-rw-r--r--src/libsyntax/ext/proc_macro_shim.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs11
-rw-r--r--src/libsyntax/parse/lexer/mod.rs35
-rw-r--r--src/libsyntax/parse/parser.rs27
-rw-r--r--src/libsyntax/tokenstream.rs7
7 files changed, 362 insertions, 32 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 9d0d74138cd..91742680711 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-pub use self::SyntaxExtension::*;
+pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT};
 
 use ast::{self, Attribute, Name, PatKind};
 use attr::HasAttrs;
@@ -18,8 +18,9 @@ use errors::DiagnosticBuilder;
 use ext::expand::{self, Invocation, Expansion};
 use ext::hygiene::Mark;
 use ext::tt::macro_rules;
+use fold;
 use parse;
-use parse::parser;
+use parse::parser::{self, Parser};
 use parse::token;
 use parse::token::{InternedString, str_to_ident};
 use ptr::P;
@@ -31,7 +32,8 @@ use feature_gate;
 use std::collections::HashMap;
 use std::path::PathBuf;
 use std::rc::Rc;
-use tokenstream;
+use std::default::Default;
+use tokenstream::{self, TokenStream};
 
 
 #[derive(Debug,Clone)]
@@ -146,6 +148,190 @@ impl Into<Vec<Annotatable>> for Annotatable {
     }
 }
 
+pub trait ProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt,
+                   span: Span,
+                   ts: TokenStream)
+                   -> TokenStream;
+}
+
+impl<F> ProcMacro for F
+    where F: Fn(TokenStream) -> TokenStream
+{
+    fn expand<'cx>(&self,
+                   _ecx: &'cx mut ExtCtxt,
+                   _span: Span,
+                   ts: TokenStream)
+                   -> TokenStream {
+        // FIXME setup implicit context in TLS before calling self.
+        (*self)(ts)
+    }
+}
+
+pub trait AttrProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt,
+                   span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream;
+}
+
+impl<F> AttrProcMacro for F
+    where F: Fn(TokenStream, TokenStream) -> TokenStream
+{
+    fn expand<'cx>(&self,
+                   _ecx: &'cx mut ExtCtxt,
+                   _span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream {
+        // FIXME setup implicit context in TLS before calling self.
+        (*self)(annotation, annotated)
+    }
+}
+
+pub struct TokResult<'a> {
+    pub parser: Parser<'a>,
+    pub span: Span,
+}
+
+impl<'a> TokResult<'a> {
+    // There is quite a lot of overlap here with ParserAnyMacro in ext/tt/macro_rules.rs
+    // We could probably share more code.
+    // FIXME(#36641) Unify TokResult and ParserAnyMacro.
+    fn ensure_complete_parse(&mut self, allow_semi: bool) {
+        let macro_span = &self.span;
+        self.parser.ensure_complete_parse(allow_semi, |parser| {
+            let token_str = parser.this_token_to_string();
+            let msg = format!("macro expansion ignores token `{}` and any following", token_str);
+            let span = parser.span;
+            parser.diagnostic()
+                  .struct_span_err(span, &msg)
+                  .span_note(*macro_span, "caused by the macro expansion here")
+                  .emit();
+        });
+    }
+}
+
+impl<'a> MacResult for TokResult<'a> {
+    fn make_items(mut self: Box<Self>) -> Option<SmallVector<P<ast::Item>>> {
+        if self.parser.sess.span_diagnostic.has_errors() {
+            return Some(SmallVector::zero());
+        }
+
+        let mut items = SmallVector::zero();
+        loop {
+            match self.parser.parse_item() {
+                Ok(Some(item)) => items.push(item),
+                Ok(None) => {
+                    self.ensure_complete_parse(false);
+                    return Some(items);
+                }
+                Err(mut e) => {
+                    e.emit();
+                    return Some(SmallVector::zero());
+                }
+            }
+        }
+    }
+
+    fn make_impl_items(mut self: Box<Self>) -> Option<SmallVector<ast::ImplItem>> {
+        let mut items = SmallVector::zero();
+        loop {
+            if self.parser.token == token::Eof {
+                break;
+            }
+            match self.parser.parse_impl_item() {
+                Ok(item) => items.push(item),
+                Err(mut e) => {
+                    e.emit();
+                    return Some(SmallVector::zero());
+                }
+            }
+        }
+        self.ensure_complete_parse(false);
+        Some(items)
+    }
+
+    fn make_trait_items(mut self: Box<Self>) -> Option<SmallVector<ast::TraitItem>> {
+        let mut items = SmallVector::zero();
+        loop {
+            if self.parser.token == token::Eof {
+                break;
+            }
+            match self.parser.parse_trait_item() {
+                Ok(item) => items.push(item),
+                Err(mut e) => {
+                    e.emit();
+                    return Some(SmallVector::zero());
+                }
+            }
+        }
+        self.ensure_complete_parse(false);
+        Some(items)
+    }
+
+    fn make_expr(mut self: Box<Self>) -> Option<P<ast::Expr>> {
+        match self.parser.parse_expr() {
+            Ok(e) => {
+                self.ensure_complete_parse(true);
+                Some(e)
+            }
+            Err(mut e) => {
+                e.emit();
+                Some(DummyResult::raw_expr(self.span))
+            }
+        }
+    }
+
+    fn make_pat(mut self: Box<Self>) -> Option<P<ast::Pat>> {
+        match self.parser.parse_pat() {
+            Ok(e) => {
+                self.ensure_complete_parse(false);
+                Some(e)
+            }
+            Err(mut e) => {
+                e.emit();
+                Some(P(DummyResult::raw_pat(self.span)))
+            }
+        }
+    }
+
+    fn make_stmts(mut self: Box<Self>) -> Option<SmallVector<ast::Stmt>> {
+        let mut stmts = SmallVector::zero();
+        loop {
+            if self.parser.token == token::Eof {
+                break;
+            }
+            match self.parser.parse_full_stmt(false) {
+                Ok(Some(stmt)) => stmts.push(stmt),
+                Ok(None) => { /* continue */ }
+                Err(mut e) => {
+                    e.emit();
+                    return Some(SmallVector::zero());
+                }
+            }
+        }
+        self.ensure_complete_parse(false);
+        Some(stmts)
+    }
+
+    fn make_ty(mut self: Box<Self>) -> Option<P<ast::Ty>> {
+        match self.parser.parse_ty() {
+            Ok(e) => {
+                self.ensure_complete_parse(false);
+                Some(e)
+            }
+            Err(mut e) => {
+                e.emit();
+                Some(DummyResult::raw_ty(self.span))
+            }
+        }
+    }
+}
+
 /// Represents a thing that maps token trees to Macro Results
 pub trait TTMacroExpander {
     fn expand<'cx>(&self,
@@ -439,11 +625,22 @@ pub enum SyntaxExtension {
     /// based upon it.
     ///
     /// `#[derive(...)]` is a `MultiItemDecorator`.
-    MultiDecorator(Box<MultiItemDecorator + 'static>),
+    ///
+    /// Prefer ProcMacro or MultiModifier since they are more flexible.
+    MultiDecorator(Box<MultiItemDecorator>),
 
     /// A syntax extension that is attached to an item and modifies it
-    /// in-place. More flexible version than Modifier.
-    MultiModifier(Box<MultiItemModifier + 'static>),
+    /// in-place. Also allows decoration, i.e., creating new items.
+    MultiModifier(Box<MultiItemModifier>),
+
+    /// A function-like procedural macro. TokenStream -> TokenStream.
+    ProcMacro(Box<ProcMacro>),
+
+    /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream.
+    /// The first TokenSteam is the attribute, the second is the annotated item.
+    /// Allows modification of the input items and adding new items, similar to
+    /// MultiModifier, but uses TokenStreams, rather than AST nodes.
+    AttrProcMacro(Box<AttrProcMacro>),
 
     /// A normal, function-like syntax extension.
     ///
@@ -451,12 +648,12 @@ pub enum SyntaxExtension {
     ///
     /// The `bool` dictates whether the contents of the macro can
     /// directly use `#[unstable]` things (true == yes).
-    NormalTT(Box<TTMacroExpander + 'static>, Option<Span>, bool),
+    NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
 
     /// A function-like syntax extension that has an extra ident before
     /// the block.
     ///
-    IdentTT(Box<IdentMacroExpander + 'static>, Option<Span>, bool),
+    IdentTT(Box<IdentMacroExpander>, Option<Span>, bool),
 }
 
 pub type NamedSyntaxExtension = (Name, SyntaxExtension);
@@ -817,3 +1014,17 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
     }
     Some(es)
 }
+
+pub struct ChangeSpan {
+    pub span: Span
+}
+
+impl Folder for ChangeSpan {
+    fn new_span(&mut self, _sp: Span) -> Span {
+        self.span
+    }
+
+    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
+        fold::noop_fold_mac(mac, self)
+    }
+}
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 5b291fb8dac..18b32e9d0b6 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -21,9 +21,12 @@ use ext::base::*;
 use feature_gate::{self, Features};
 use fold;
 use fold::*;
+use parse::{ParseSess, lexer};
+use parse::parser::Parser;
 use parse::token::{intern, keywords};
+use print::pprust;
 use ptr::P;
-use tokenstream::TokenTree;
+use tokenstream::{TokenTree, TokenStream};
 use util::small_vector::SmallVector;
 use visit::Visitor;
 
@@ -315,6 +318,20 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 items.push(item);
                 kind.expect_from_annotatables(items)
             }
+            SyntaxExtension::AttrProcMacro(ref mac) => {
+                let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess));
+                let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
+
+                let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
+                let parser = self.cx.new_parser_from_tts(&tok_result.to_tts());
+                let result = Box::new(TokResult { parser: parser, span: attr.span });
+
+                kind.make_from(result).unwrap_or_else(|| {
+                    let msg = format!("macro could not be expanded into {} position", kind.name());
+                    self.cx.span_err(attr.span, &msg);
+                    kind.dummy(attr.span)
+                })
+            }
             _ => unreachable!(),
         }
     }
@@ -384,11 +401,41 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
                 kind.make_from(expander.expand(self.cx, span, ident, marked_tts, attrs))
             }
 
-            MultiDecorator(..) | MultiModifier(..) => {
+            MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
                 self.cx.span_err(path.span,
                                  &format!("`{}` can only be used in attributes", extname));
                 return kind.dummy(span);
             }
+
+            SyntaxExtension::ProcMacro(ref expandfun) => {
+                if ident.name != keywords::Invalid.name() {
+                    let msg =
+                        format!("macro {}! expects no ident argument, given '{}'", extname, ident);
+                    self.cx.span_err(path.span, &msg);
+                    return kind.dummy(span);
+                }
+
+                self.cx.bt_push(ExpnInfo {
+                    call_site: span,
+                    callee: NameAndSpan {
+                        format: MacroBang(extname),
+                        // FIXME procedural macros do not have proper span info
+                        // yet, when they do, we should use it here.
+                        span: None,
+                        // FIXME probably want to follow macro_rules macros here.
+                        allow_internal_unstable: false,
+                    },
+                });
+
+
+                let tok_result = expandfun.expand(self.cx,
+                                                  span,
+                                                  TokenStream::from_tts(marked_tts));
+                let parser = self.cx.new_parser_from_tts(&tok_result.to_tts());
+                let result = Box::new(TokResult { parser: parser, span: span });
+                // FIXME better span info.
+                kind.make_from(result).map(|i| i.fold_with(&mut ChangeSpan { span: span }))
+            }
         };
 
         let expanded = if let Some(expanded) = opt_expanded {
@@ -460,6 +507,36 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
     }
 }
 
+// These are pretty nasty. Ideally, we would keep the tokens around, linked from
+// the AST. However, we don't so we need to create new ones. Since the item might
+// have come from a macro expansion (possibly only in part), we can't use the
+// existing codemap.
+//
+// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
+// string, which we then re-tokenise (double yuck), but first we have to patch
+// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
+fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
+    let text = match *item {
+        Annotatable::Item(ref i) => pprust::item_to_string(i),
+        Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
+        Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
+    };
+    string_to_tts(text, parse_sess)
+}
+
+fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
+    string_to_tts(pprust::attr_to_string(attr), parse_sess)
+}
+
+fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
+    let filemap = parse_sess.codemap()
+                            .new_filemap(String::from("<macro expansion>"), None, text);
+
+    let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap);
+    let mut parser = Parser::new(parse_sess, Vec::new(), Box::new(lexer));
+    panictry!(parser.parse_all_token_trees())
+}
+
 impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
     fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
         let mut expr = self.cfg.configure_expr(expr).unwrap();
diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs
index fa37e9b54e4..dc3a01f41bc 100644
--- a/src/libsyntax/ext/proc_macro_shim.rs
+++ b/src/libsyntax/ext/proc_macro_shim.rs
@@ -24,7 +24,9 @@ use ext::base::*;
 
 /// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses
 /// the TokenStream as a block and returns it as an `Expr`.
-pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStream)
+pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt,
+                                sp: Span,
+                                output: TokenStream)
                                 -> Box<MacResult + 'cx> {
     let parser = cx.new_parser_from_tts(&output.to_tts());
 
@@ -60,7 +62,7 @@ pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStr
 }
 
 pub mod prelude {
-    pub use ext::proc_macro_shim::build_block_emitter;
+    pub use super::build_block_emitter;
     pub use ast::Ident;
     pub use codemap::{DUMMY_SP, Span};
     pub use ext::base::{ExtCtxt, MacResult};
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index da82c9ffab1..3746a51d359 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -49,22 +49,19 @@ impl<'a> ParserAnyMacro<'a> {
     /// allowed to be there.
     fn ensure_complete_parse(&self, allow_semi: bool, context: &str) {
         let mut parser = self.parser.borrow_mut();
-        if allow_semi && parser.token == token::Semi {
-            parser.bump();
-        }
-        if parser.token != token::Eof {
+        parser.ensure_complete_parse(allow_semi, |parser| {
             let token_str = parser.this_token_to_string();
             let msg = format!("macro expansion ignores token `{}` and any \
                                following",
                               token_str);
             let span = parser.span;
-            let mut err = parser.diagnostic().struct_span_err(span, &msg[..]);
+            let mut err = parser.diagnostic().struct_span_err(span, &msg);
             let msg = format!("caused by the macro expansion here; the usage \
                                of `{}!` is likely invalid in {} context",
                                self.macro_ident, context);
-            err.span_note(self.site_span, &msg[..])
+            err.span_note(self.site_span, &msg)
                .emit();
-        }
+        });
     }
 }
 
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 9e9ea096460..6c0e2425d37 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -85,6 +85,12 @@ pub struct StringReader<'a> {
     /// The last character to be read
     pub curr: Option<char>,
     pub filemap: Rc<syntax_pos::FileMap>,
+    /// If Some, stop reading the source at this position (inclusive).
+    pub terminator: Option<BytePos>,
+    /// Whether to record new-lines in filemap. This is only necessary the first
+    /// time a filemap is lexed. If part of a filemap is being re-lexed, this
+    /// should be set to false.
+    pub save_new_lines: bool,
     // cached:
     pub peek_tok: token::Token,
     pub peek_span: Span,
@@ -96,7 +102,14 @@ pub struct StringReader<'a> {
 
 impl<'a> Reader for StringReader<'a> {
     fn is_eof(&self) -> bool {
-        self.curr.is_none()
+        if self.curr.is_none() {
+            return true;
+        }
+
+        match self.terminator {
+            Some(t) => self.pos > t,
+            None => false,
+        }
     }
     /// Return the next token. EFFECT: advances the string_reader.
     fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
@@ -164,6 +177,14 @@ impl<'a> StringReader<'a> {
     pub fn new_raw<'b>(span_diagnostic: &'b Handler,
                        filemap: Rc<syntax_pos::FileMap>)
                        -> StringReader<'b> {
+        let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap);
+        sr.bump();
+        sr
+    }
+
+    fn new_raw_internal<'b>(span_diagnostic: &'b Handler,
+                            filemap: Rc<syntax_pos::FileMap>)
+                            -> StringReader<'b> {
         if filemap.src.is_none() {
             span_diagnostic.bug(&format!("Cannot lex filemap \
                                           without source: {}",
@@ -172,21 +193,21 @@ impl<'a> StringReader<'a> {
 
         let source_text = (*filemap.src.as_ref().unwrap()).clone();
 
-        let mut sr = StringReader {
+        StringReader {
             span_diagnostic: span_diagnostic,
             pos: filemap.start_pos,
             last_pos: filemap.start_pos,
             col: CharPos(0),
             curr: Some('\n'),
             filemap: filemap,
+            terminator: None,
+            save_new_lines: true,
             // dummy values; not read
             peek_tok: token::Eof,
             peek_span: syntax_pos::DUMMY_SP,
             source_text: source_text,
             fatal_errs: Vec::new(),
-        };
-        sr.bump();
-        sr
+        }
     }
 
     pub fn new<'b>(span_diagnostic: &'b Handler,
@@ -405,7 +426,9 @@ impl<'a> StringReader<'a> {
             self.curr = Some(ch);
             self.col = self.col + CharPos(1);
             if last_char == '\n' {
-                self.filemap.next_line(self.last_pos);
+                if self.save_new_lines {
+                    self.filemap.next_line(self.last_pos);
+                }
                 self.col = CharPos(0);
             }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 5cd4a046577..23085fadc5e 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -3872,15 +3872,17 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_stmt_(&mut self, macro_expanded: bool) -> Option<Stmt> {
-        self.parse_stmt_without_recovery(macro_expanded).unwrap_or_else(|mut e| {
+    fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
+        self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
             e.emit();
             self.recover_stmt_(SemiColonMode::Break);
             None
         })
     }
 
-    fn parse_stmt_without_recovery(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
+    fn parse_stmt_without_recovery(&mut self,
+                                   macro_legacy_warnings: bool)
+                                   -> PResult<'a, Option<Stmt>> {
         maybe_whole!(Some deref self, NtStmt);
 
         let attrs = self.parse_outer_attributes()?;
@@ -3950,7 +3952,7 @@ impl<'a> Parser<'a> {
                 // We used to incorrectly stop parsing macro-expanded statements here.
                 // If the next token will be an error anyway but could have parsed with the
                 // earlier behavior, stop parsing here and emit a warning to avoid breakage.
-                else if macro_expanded && self.token.can_begin_expr() && match self.token {
+                else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
                     // These can continue an expression, so we can't stop parsing and warn.
                     token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
                     token::BinOp(token::Minus) | token::BinOp(token::Star) |
@@ -4125,8 +4127,8 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse a statement, including the trailing semicolon.
-    pub fn parse_full_stmt(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
-        let mut stmt = match self.parse_stmt_(macro_expanded) {
+    pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+        let mut stmt = match self.parse_stmt_(macro_legacy_warnings) {
             Some(stmt) => stmt,
             None => return Ok(None),
         };
@@ -4146,7 +4148,7 @@ impl<'a> Parser<'a> {
             }
             StmtKind::Local(..) => {
                 // We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
-                if macro_expanded && self.token != token::Semi {
+                if macro_legacy_warnings && self.token != token::Semi {
                     self.warn_missing_semicolon();
                 } else {
                     self.expect_one_of(&[token::Semi], &[])?;
@@ -6169,4 +6171,15 @@ impl<'a> Parser<'a> {
             _ =>  Err(self.fatal("expected string literal"))
         }
     }
+
+    pub fn ensure_complete_parse<F>(&mut self, allow_semi: bool, on_err: F)
+        where F: FnOnce(&Parser)
+    {
+        if allow_semi && self.token == token::Semi {
+            self.bump();
+        }
+        if self.token != token::Eof {
+            on_err(self);
+        }
+    }
 }
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 7b1df6f0e97..b35b4617ea1 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -33,6 +33,7 @@ use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::lexer;
 use parse;
 use parse::token::{self, Token, Lit, Nonterminal};
+use print::pprust;
 
 use std::fmt;
 use std::iter::*;
@@ -781,6 +782,12 @@ impl TokenStream {
     }
 }
 
+impl fmt::Display for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str(&pprust::tts_to_string(&self.to_tts()))
+    }
+}
+
 // FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
 // next leaf's iterator when the current one is exhausted.
 pub struct Iter<'a> {