diff options
| author | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-01-13 04:49:20 +0000 |
|---|---|---|
| committer | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-01-17 08:17:26 +0000 |
| commit | debcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd (patch) | |
| tree | f61d8ca01c5e888b1f18e25dcb516d80a54b875d /src/libsyntax/parse/lexer | |
| parent | de46b247585999ae70674f1fa0543d62f2889c7f (diff) | |
| download | rust-debcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd.tar.gz rust-debcbf0b8e8fcf6f1d44e8f79cc06c0866d8d1dd.zip | |
Refactor the parser to consume token trees.
Diffstat (limited to 'src/libsyntax/parse/lexer')
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 74 |
1 files changed, 0 insertions, 74 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 6c6161998d7..12b9130c474 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -12,7 +12,6 @@ use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span}; use codemap::CodeMap; use errors::{FatalError, DiagnosticBuilder}; -use ext::tt::transcribe::tt_next_token; use parse::{token, ParseSess}; use str::char_at; use symbol::{Symbol, keywords}; @@ -23,53 +22,10 @@ use std::char; use std::mem::replace; use std::rc::Rc; -pub use ext::tt::transcribe::{TtReader, new_tt_reader}; - pub mod comments; mod tokentrees; mod unicode_chars; -pub trait Reader { - fn is_eof(&self) -> bool; - fn try_next_token(&mut self) -> Result<TokenAndSpan, ()>; - fn next_token(&mut self) -> TokenAndSpan where Self: Sized { - let res = self.try_next_token(); - self.unwrap_or_abort(res) - } - /// Report a fatal error with the current span. - fn fatal(&self, &str) -> FatalError; - /// Report a non-fatal error with the current span. - fn err(&self, &str); - fn emit_fatal_errors(&mut self); - fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan { - match res { - Ok(tok) => tok, - Err(_) => { - self.emit_fatal_errors(); - panic!(FatalError); - } - } - } - fn peek(&self) -> TokenAndSpan; - /// Get a token the parser cares about. - fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> { - let mut t = self.try_next_token()?; - loop { - match t.tok { - token::Whitespace | token::Comment | token::Shebang(_) => { - t = self.try_next_token()?; - } - _ => break, - } - } - Ok(t) - } - fn real_token(&mut self) -> TokenAndSpan { - let res = self.try_real_token(); - self.unwrap_or_abort(res) - } -} - #[derive(Clone, PartialEq, Eq, Debug)] pub struct TokenAndSpan { pub tok: token::Token, @@ -182,36 +138,6 @@ impl<'a> StringReader<'a> { } } -impl<'a> Reader for TtReader<'a> { - fn is_eof(&self) -> bool { - self.peek().tok == token::Eof - } - fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { - assert!(self.fatal_errs.is_empty()); - let r = tt_next_token(self); - debug!("TtReader: r={:?}", r); - Ok(r) - } - fn fatal(&self, m: &str) -> FatalError { - self.sp_diag.span_fatal(self.cur_span, m) - } - fn err(&self, m: &str) { - self.sp_diag.span_err(self.cur_span, m); - } - fn emit_fatal_errors(&mut self) { - for err in &mut self.fatal_errs { - err.emit(); - } - self.fatal_errs.clear(); - } - fn peek(&self) -> TokenAndSpan { - TokenAndSpan { - tok: self.cur_tok.clone(), - sp: self.cur_span, - } - } -} - impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into next_pos and ch pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self { |
