about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-01-13 04:49:20 +0000
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>2017-01-17 08:17:26 +0000
commitdebcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd (patch)
treef61d8ca01c5e888b1f18e25dcb516d80a54b875d /src/libsyntax/parse
parentde46b247585999ae70674f1fa0543d62f2889c7f (diff)
downloadrust-debcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd.tar.gz
rust-debcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd.zip
Refactor the parser to consume token trees.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs74
-rw-r--r--src/libsyntax/parse/mod.rs5
-rw-r--r--src/libsyntax/parse/parser.rs115
3 files changed, 20 insertions, 174 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 6c6161998d7..12b9130c474 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -12,7 +12,6 @@ use ast::{self, Ident};
 use syntax_pos::{self, BytePos, CharPos, Pos, Span};
 use codemap::CodeMap;
 use errors::{FatalError, DiagnosticBuilder};
-use ext::tt::transcribe::tt_next_token;
 use parse::{token, ParseSess};
 use str::char_at;
 use symbol::{Symbol, keywords};
@@ -23,53 +22,10 @@ use std::char;
 use std::mem::replace;
 use std::rc::Rc;
 
-pub use ext::tt::transcribe::{TtReader, new_tt_reader};
-
 pub mod comments;
 mod tokentrees;
 mod unicode_chars;
 
-pub trait Reader {
-    fn is_eof(&self) -> bool;
-    fn try_next_token(&mut self) -> Result<TokenAndSpan, ()>;
-    fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
-        let res = self.try_next_token();
-        self.unwrap_or_abort(res)
-    }
-    /// Report a fatal error with the current span.
-    fn fatal(&self, &str) -> FatalError;
-    /// Report a non-fatal error with the current span.
-    fn err(&self, &str);
-    fn emit_fatal_errors(&mut self);
-    fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
-        match res {
-            Ok(tok) => tok,
-            Err(_) => {
-                self.emit_fatal_errors();
-                panic!(FatalError);
-            }
-        }
-    }
-    fn peek(&self) -> TokenAndSpan;
-    /// Get a token the parser cares about.
-    fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
-        let mut t = self.try_next_token()?;
-        loop {
-            match t.tok {
-                token::Whitespace | token::Comment | token::Shebang(_) => {
-                    t = self.try_next_token()?;
-                }
-                _ => break,
-            }
-        }
-        Ok(t)
-    }
-    fn real_token(&mut self) -> TokenAndSpan {
-        let res = self.try_real_token();
-        self.unwrap_or_abort(res)
-    }
-}
-
 #[derive(Clone, PartialEq, Eq, Debug)]
 pub struct TokenAndSpan {
     pub tok: token::Token,
@@ -182,36 +138,6 @@ impl<'a> StringReader<'a> {
     }
 }
 
-impl<'a> Reader for TtReader<'a> {
-    fn is_eof(&self) -> bool {
-        self.peek().tok == token::Eof
-    }
-    fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
-        assert!(self.fatal_errs.is_empty());
-        let r = tt_next_token(self);
-        debug!("TtReader: r={:?}", r);
-        Ok(r)
-    }
-    fn fatal(&self, m: &str) -> FatalError {
-        self.sp_diag.span_fatal(self.cur_span, m)
-    }
-    fn err(&self, m: &str) {
-        self.sp_diag.span_err(self.cur_span, m);
-    }
-    fn emit_fatal_errors(&mut self) {
-        for err in &mut self.fatal_errs {
-            err.emit();
-        }
-        self.fatal_errs.clear();
-    }
-    fn peek(&self) -> TokenAndSpan {
-        TokenAndSpan {
-            tok: self.cur_tok.clone(),
-            sp: self.cur_span,
-        }
-    }
-}
-
 impl<'a> StringReader<'a> {
     /// For comments.rs, which hackily pokes into next_pos and ch
     pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 500e8285b4c..0937ef15b4d 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -45,7 +45,7 @@ pub mod obsolete;
 
 /// Info about a parsing session.
 pub struct ParseSess {
-    pub span_diagnostic: Handler, // better be the same as the one in the reader!
+    pub span_diagnostic: Handler,
     pub unstable_features: UnstableFeatures,
     pub config: CrateConfig,
     /// Used to determine and report recursive mod inclusions
@@ -227,8 +227,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream
 
 /// Given tts and the ParseSess, produce a parser
 pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
-    let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
-    let mut p = Parser::new(sess, Box::new(trdr), None, false);
+    let mut p = Parser::new(sess, tts, None, false);
     p.check_unknown_macro_variable();
     p
 }
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 9ba6d4d17f7..608f8688e88 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -46,7 +46,7 @@ use ext::tt::macro_parser;
 use parse;
 use parse::classify;
 use parse::common::SeqSep;
-use parse::lexer::{Reader, TokenAndSpan};
+use parse::lexer::TokenAndSpan;
 use parse::obsolete::ObsoleteSyntax;
 use parse::token::{self, MatchNt, SubstNt};
 use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
@@ -188,14 +188,11 @@ pub struct Parser<'a> {
     pub restrictions: Restrictions,
     pub quote_depth: usize, // not (yet) related to the quasiquoter
     parsing_token_tree: bool,
-    pub reader: Box<Reader+'a>,
     /// The set of seen errors about obsolete syntax. Used to suppress
     /// extra detail when the same error is seen twice
     pub obsolete_set: HashSet<ObsoleteSyntax>,
     /// Used to determine the path to externally loaded source files
     pub directory: Directory,
-    /// Stack of open delimiters and their spans. Used for error message.
-    pub open_braces: Vec<(token::DelimToken, Span)>,
     /// Name of the root module this parser originated from. If `None`, then the
     /// name is not known. This does not change while the parser is descending
     /// into modules, and sub-parsers have new values for this name.
@@ -203,7 +200,6 @@ pub struct Parser<'a> {
     pub expected_tokens: Vec<TokenType>,
     pub tts: Vec<(TokenTree, usize)>,
     pub desugar_doc_comments: bool,
-    pub allow_interpolated_tts: bool,
 }
 
 #[derive(PartialEq, Eq, Clone)]
@@ -269,12 +265,17 @@ impl From<P<Expr>> for LhsExpr {
 
 impl<'a> Parser<'a> {
     pub fn new(sess: &'a ParseSess,
-               rdr: Box<Reader+'a>,
+               tokens: Vec<TokenTree>,
                directory: Option<Directory>,
                desugar_doc_comments: bool)
                -> Self {
+        let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
+            delim: token::NoDelim,
+            open_span: syntax_pos::DUMMY_SP,
+            tts: tokens,
+            close_span: syntax_pos::DUMMY_SP,
+        }));
         let mut parser = Parser {
-            reader: rdr,
             sess: sess,
             token: token::Underscore,
             span: syntax_pos::DUMMY_SP,
@@ -286,12 +287,10 @@ impl<'a> Parser<'a> {
             parsing_token_tree: false,
             obsolete_set: HashSet::new(),
             directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
-            open_braces: Vec::new(),
             root_module_name: None,
             expected_tokens: Vec::new(),
-            tts: Vec::new(),
+            tts: if tt.len() > 0 { vec![(tt, 0)] } else { Vec::new() },
             desugar_doc_comments: desugar_doc_comments,
-            allow_interpolated_tts: true,
         };
 
         let tok = parser.next_tok();
@@ -320,7 +319,7 @@ impl<'a> Parser<'a> {
                     continue
                 }
             } else {
-                self.reader.real_token()
+                TokenAndSpan { tok: token::Eof, sp: self.span }
             };
 
             loop {
@@ -2688,94 +2687,28 @@ impl<'a> Parser<'a> {
         // whether something will be a nonterminal or a seq
         // yet.
         match self.token {
-            token::Eof => {
-                let mut err: DiagnosticBuilder<'a> =
-                    self.diagnostic().struct_span_err(self.span,
-                                                      "this file contains an un-closed delimiter");
-                for &(_, sp) in &self.open_braces {
-                    err.span_help(sp, "did you mean to close this delimiter?");
-                }
-
-                Err(err)
-            },
             token::OpenDelim(delim) => {
-                if self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
+                if self.quote_depth == 0 && self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
                     let tt = self.tts.pop().unwrap().0;
                     self.bump();
-                    return Ok(if self.allow_interpolated_tts {
-                        // avoid needlessly reparsing token trees in recursive macro expansions
-                        TokenTree::Token(tt.span(), token::Interpolated(Rc::new(token::NtTT(tt))))
-                    } else {
-                        tt
-                    });
+                    return Ok(tt);
                 }
 
                 let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
-                // The span for beginning of the delimited section
-                let pre_span = self.span;
-
-                // Parse the open delimiter.
-                self.open_braces.push((delim, self.span));
                 let open_span = self.span;
                 self.bump();
-
-                // Parse the token trees within the delimiters.
-                // We stop at any delimiter so we can try to recover if the user
-                // uses an incorrect delimiter.
                 let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
                                                             &token::CloseDelim(token::Paren),
                                                             &token::CloseDelim(token::Bracket)],
                                                           SeqSep::none(),
                                                           |p| p.parse_token_tree(),
                                                           |mut e| e.emit());
+                self.parsing_token_tree = parsing_token_tree;
 
                 let close_span = self.span;
-                // Expand to cover the entire delimited token tree
-                let span = Span { hi: close_span.hi, ..pre_span };
-
-                match self.token {
-                    // Correct delimiter.
-                    token::CloseDelim(d) if d == delim => {
-                        self.open_braces.pop().unwrap();
-
-                        // Parse the close delimiter.
-                        self.bump();
-                    }
-                    // Incorrect delimiter.
-                    token::CloseDelim(other) => {
-                        let token_str = self.this_token_to_string();
-                        let mut err = self.diagnostic().struct_span_err(self.span,
-                            &format!("incorrect close delimiter: `{}`", token_str));
-                        // This is a conservative error: only report the last unclosed delimiter.
-                        // The previous unclosed delimiters could actually be closed! The parser
-                        // just hasn't gotten to them yet.
-                        if let Some(&(_, sp)) = self.open_braces.last() {
-                            err.span_note(sp, "unclosed delimiter");
-                        };
-                        err.emit();
-
-                        self.open_braces.pop().unwrap();
-
-                        // If the incorrect delimiter matches an earlier opening
-                        // delimiter, then don't consume it (it can be used to
-                        // close the earlier one). Otherwise, consume it.
-                        // E.g., we try to recover from:
-                        // fn foo() {
-                        //     bar(baz(
-                        // }  // Incorrect delimiter but matches the earlier `{`
-                        if !self.open_braces.iter().any(|&(b, _)| b == other) {
-                            self.bump();
-                        }
-                    }
-                    token::Eof => {
-                        // Silently recover, the EOF token will be seen again
-                        // and an error emitted then. Thus we don't pop from
-                        // self.open_braces here.
-                    },
-                    _ => {}
-                }
+                self.bump();
 
-                self.parsing_token_tree = parsing_token_tree;
+                let span = Span { lo: open_span.lo, ..close_span };
                 Ok(TokenTree::Delimited(span, Rc::new(Delimited {
                     delim: delim,
                     open_span: open_span,
@@ -2783,21 +2716,9 @@ impl<'a> Parser<'a> {
                     close_span: close_span,
                 })))
             },
-            token::CloseDelim(_) => {
-                // An unexpected closing delimiter (i.e., there is no
-                // matching opening delimiter).
-                let token_str = self.this_token_to_string();
-                let err = self.diagnostic().struct_span_err(self.span,
-                    &format!("unexpected close delimiter: `{}`", token_str));
-                Err(err)
-            },
-            /* we ought to allow different depths of unquotation */
-            token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => {
-                self.parse_unquoted()
-            }
-            _ => {
-                Ok(TokenTree::Token(self.span, self.bump_and_get()))
-            }
+            token::CloseDelim(_) | token::Eof => unreachable!(),
+            token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(),
+            _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
         }
     }