about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>2016-11-03 10:44:25 +0000
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>2016-11-03 23:48:24 +0000
commit7ae083383d1a88f1d76e51297a88c2a423aaa3d1 (patch)
tree8fd511c7f7531dc1c2bf4fcdb12b33689ffb1f7e /src/libsyntax
parente2b3fec778453d06be6a07494eeaa66da57e4f82 (diff)
downloadrust-7ae083383d1a88f1d76e51297a88c2a423aaa3d1.tar.gz
rust-7ae083383d1a88f1d76e51297a88c2a423aaa3d1.zip
Move doc comment desugaring into the parser.
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs2
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs28
-rw-r--r--src/libsyntax/parse/lexer/mod.rs2
-rw-r--r--src/libsyntax/parse/parser.rs11
-rw-r--r--src/libsyntax/tokenstream.rs6
5 files changed, 16 insertions, 33 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 2ea01599006..fdec7a4c732 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -279,7 +279,7 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
 }
 
 pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
-    let mut parser = Parser::new(sess, Box::new(rdr));
+    let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
     let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
 
     loop {
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 9ca3b16b74e..96972f4dc88 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -12,9 +12,7 @@ use self::LockstepIterSize::*;
 use ast::Ident;
 use errors::{Handler, DiagnosticBuilder};
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use parse::token::{DocComment, MatchNt, SubstNt};
-use parse::token::{Token, NtIdent};
-use parse::token;
+use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
 use parse::lexer::TokenAndSpan;
 use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{self, TokenTree};
@@ -48,7 +46,6 @@ pub struct TtReader<'a> {
     pub cur_span: Span,
     pub next_tok: Option<TokenAndSpan>,
     /// Transform doc comments. Only useful in macro invocations
-    pub desugar_doc_comments: bool,
     pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
 }
 
@@ -59,20 +56,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
                      interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                      src: Vec<tokenstream::TokenTree>)
                      -> TtReader {
-    new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
-}
-
-/// The extra `desugar_doc_comments` flag enables reading doc comments
-/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
-///
-/// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
-/// (and should) be None.
-pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
-                                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
-                                   src: Vec<tokenstream::TokenTree>,
-                                   desugar_doc_comments: bool)
-                                   -> TtReader {
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: SmallVector::one(TtFrame {
@@ -91,7 +74,6 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
         },
         repeat_idx: Vec::new(),
         repeat_len: Vec::new(),
-        desugar_doc_comments: desugar_doc_comments,
         /* dummy values, never read: */
         cur_tok: token::Eof,
         cur_span: DUMMY_SP,
@@ -312,14 +294,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 });
                 // if this could be 0-length, we'd need to potentially recur here
             }
-            TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
-                r.stack.push(TtFrame {
-                   forest: TokenTree::Token(sp, DocComment(name)),
-                   idx: 0,
-                   dotdotdoted: false,
-                   sep: None
-                });
-            }
             TokenTree::Token(sp, tok) => {
                 r.cur_span = sp;
                 r.cur_tok = tok;
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 3d5dec31d2a..200fb0de2d2 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -22,7 +22,7 @@ use std::char;
 use std::mem::replace;
 use std::rc::Rc;
 
-pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag};
+pub use ext::tt::transcribe::{TtReader, new_tt_reader};
 
 pub mod comments;
 mod unicode_chars;
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 93ab09c89ab..e5bbeb3c648 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -211,6 +211,7 @@ pub struct Parser<'a> {
     pub root_module_name: Option<String>,
     pub expected_tokens: Vec<TokenType>,
     pub tts: Vec<(TokenTree, usize)>,
+    pub desugar_doc_comments: bool,
 }
 
 #[derive(PartialEq, Eq, Clone)]
@@ -275,6 +276,11 @@ impl From<P<Expr>> for LhsExpr {
 
 impl<'a> Parser<'a> {
     pub fn new(sess: &'a ParseSess, rdr: Box<Reader+'a>) -> Self {
+        Parser::new_with_doc_flag(sess, rdr, false)
+    }
+
+    pub fn new_with_doc_flag(sess: &'a ParseSess, rdr: Box<Reader+'a>, desugar_doc_comments: bool)
+                             -> Self {
         let mut parser = Parser {
             reader: rdr,
             sess: sess,
@@ -294,6 +300,7 @@ impl<'a> Parser<'a> {
             root_module_name: None,
             expected_tokens: Vec::new(),
             tts: Vec::new(),
+            desugar_doc_comments: desugar_doc_comments,
         };
 
         let tok = parser.next_tok();
@@ -326,6 +333,10 @@ impl<'a> Parser<'a> {
             loop {
                 let nt = match tok.tok {
                     token::Interpolated(ref nt) => nt.clone(),
+                    token::DocComment(name) if self.desugar_doc_comments => {
+                        self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
+                        continue 'outer
+                    }
                     _ => return tok,
                 };
                 match *nt {
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 204c885e361..9ef6c07e489 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -214,11 +214,9 @@ impl TokenTree {
                  mtch: &[TokenTree],
                  tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
+        let diag = &cx.parse_sess().span_diagnostic;
         // `None` is because we're not interpolating
-        let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
-                                                         None,
-                                                         tts.iter().cloned().collect(),
-                                                         true);
+        let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
         macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
     }