about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2019-07-02 17:08:11 +0300
committerAleksey Kladov <aleksey.kladov@gmail.com>2019-07-04 09:01:37 +0300
commite9dc95c86ecb296e0a2067ca5813043f380b9ea6 (patch)
tree1925ce43ec26a56158d2a09304b7cecac06c7a7d /src/libsyntax/parse
parent830ff4a592cf6a5adc0e5482d4294779d7a91177 (diff)
downloadrust-e9dc95c86ecb296e0a2067ca5813043f380b9ea6.tar.gz
rust-e9dc95c86ecb296e0a2067ca5813043f380b9ea6.zip
remove peek_token from StringReader
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs24
2 files changed, 8 insertions, 18 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 97d3fc002e9..2ab0bebf929 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -268,7 +268,7 @@ fn read_block_comment(rdr: &mut StringReader<'_>,
         while level > 0 {
             debug!("=== block comment level {}", level);
             if rdr.is_eof() {
-                rdr.fatal("unterminated block comment").raise();
+                rdr.fatal_span_(rdr.pos, rdr.pos, "unterminated block comment").raise();
             }
             if rdr.ch_is('\n') {
                 trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 021b623d509..a24c72ecc24 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -39,7 +39,6 @@ pub struct StringReader<'a> {
     /// Stop reading src at this index.
     crate end_src_index: usize,
     // cached:
-    peek_token: Token,
     peek_span_src_raw: Span,
     fatal_errs: Vec<DiagnosticBuilder<'a>>,
     // cache a direct reference to the source text, so that we don't have to
@@ -78,9 +77,7 @@ impl<'a> StringReader<'a> {
     /// Returns the next token. EFFECT: advances the string_reader.
     pub fn try_next_token(&mut self) -> Result<Token, ()> {
         assert!(self.fatal_errs.is_empty());
-        let ret_val = self.peek_token.take();
-        self.advance_token()?;
-        Ok(ret_val)
+        self.advance_token()
     }
 
     fn try_real_token(&mut self) -> Result<Token, ()> {
@@ -120,10 +117,6 @@ impl<'a> StringReader<'a> {
         FatalError.raise();
     }
 
-    fn fatal(&self, m: &str) -> FatalError {
-        self.fatal_span(self.peek_token.span, m)
-    }
-
     crate fn emit_fatal_errors(&mut self) {
         for err in &mut self.fatal_errs {
             err.emit();
@@ -169,7 +162,6 @@ impl<'a> StringReader<'a> {
             ch: Some('\n'),
             source_file,
             end_src_index: src.len(),
-            peek_token: Token::dummy(),
             peek_span_src_raw: syntax_pos::DUMMY_SP,
             src,
             fatal_errs: Vec::new(),
@@ -267,11 +259,11 @@ impl<'a> StringReader<'a> {
 
     /// Advance peek_token to refer to the next token, and
     /// possibly update the interner.
-    fn advance_token(&mut self) -> Result<(), ()> {
+    fn advance_token(&mut self) -> Result<Token, ()> {
         match self.scan_whitespace_or_comment() {
             Some(comment) => {
                 self.peek_span_src_raw = comment.span;
-                self.peek_token = comment;
+                Ok(comment)
             }
             None => {
                 let (kind, start_pos, end_pos) = if self.is_eof() {
@@ -281,12 +273,10 @@ impl<'a> StringReader<'a> {
                     (self.next_token_inner()?, start_pos, self.pos)
                 };
                 let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
-                self.peek_token = Token::new(kind, real);
                 self.peek_span_src_raw = raw;
+                Ok(Token::new(kind, real))
             }
         }
-
-        Ok(())
     }
 
     #[inline]
@@ -1484,17 +1474,17 @@ mod tests {
             assert_eq!(tok1.kind, tok2.kind);
             assert_eq!(tok1.span, tok2.span);
             assert_eq!(string_reader.next_token(), token::Whitespace);
-            // the 'main' id is already read:
-            assert_eq!(string_reader.pos.clone(), BytePos(28));
             // read another token:
             let tok3 = string_reader.next_token();
+            assert_eq!(string_reader.pos.clone(), BytePos(28));
             let tok4 = Token::new(
                 mk_ident("main"),
                 Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
             );
             assert_eq!(tok3.kind, tok4.kind);
             assert_eq!(tok3.span, tok4.span);
-            // the lparen is already read:
+
+            assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
             assert_eq!(string_reader.pos.clone(), BytePos(29))
         })
     }