about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2019-07-02 13:44:38 +0300
committerAleksey Kladov <aleksey.kladov@gmail.com>2019-07-04 09:01:37 +0300
commit830ff4a592cf6a5adc0e5482d4294779d7a91177 (patch)
tree02a63590fed1084da3fefbc87764f03d592c1bf0 /src
parentb43eb4235ac43c822d903ad26ed806f34cc1a14a (diff)
downloadrust-830ff4a592cf6a5adc0e5482d4294779d7a91177.tar.gz
rust-830ff4a592cf6a5adc0e5482d4294779d7a91177.zip
remove StringReader::peek
The reader itself doesn't need ability to peek tokens, so it's better
if clients implement this functionality.

This hopefully becomes especially easy once we use iterator interface
for lexer, but this is not too easy at the moment, because of buffered
errors.
Diffstat (limited to 'src')
-rw-r--r--src/librustdoc/html/highlight.rs29
-rw-r--r--src/libsyntax/parse/lexer/mod.rs4
2 files changed, 20 insertions, 13 deletions
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 99ca8c43cfb..852c1e031de 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -79,6 +79,7 @@ pub fn render_with_highlighting(
 /// each span of text in sequence.
 struct Classifier<'a> {
     lexer: lexer::StringReader<'a>,
+    peek_token: Option<Token>,
     source_map: &'a SourceMap,
 
     // State of the classifier.
@@ -178,6 +179,7 @@ impl<'a> Classifier<'a> {
     fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> {
         Classifier {
             lexer,
+            peek_token: None,
             source_map,
             in_attribute: false,
             in_macro: false,
@@ -187,10 +189,19 @@ impl<'a> Classifier<'a> {
 
     /// Gets the next token out of the lexer.
     fn try_next_token(&mut self) -> Result<Token, HighlightError> {
-        match self.lexer.try_next_token() {
-            Ok(token) => Ok(token),
-            Err(_) => Err(HighlightError::LexError),
+        if let Some(token) = self.peek_token.take() {
+            return Ok(token);
         }
+        self.lexer.try_next_token().map_err(|()| HighlightError::LexError)
+    }
+
+    fn peek(&mut self) -> Result<&Token, HighlightError> {
+        if self.peek_token.is_none() {
+            self.peek_token = Some(
+                self.lexer.try_next_token().map_err(|()| HighlightError::LexError)?
+            );
+        }
+        Ok(self.peek_token.as_ref().unwrap())
     }
 
     /// Exhausts the `lexer` writing the output into `out`.
@@ -234,7 +245,7 @@ impl<'a> Classifier<'a> {
             // reference or dereference operator or a reference or pointer type, instead of the
             // bit-and or multiplication operator.
             token::BinOp(token::And) | token::BinOp(token::Star)
-                if self.lexer.peek() != &token::Whitespace => Class::RefKeyWord,
+                if self.peek()? != &token::Whitespace => Class::RefKeyWord,
 
             // Consider this as part of a macro invocation if there was a
             // leading identifier.
@@ -257,7 +268,7 @@ impl<'a> Classifier<'a> {
             token::Question => Class::QuestionMark,
 
             token::Dollar => {
-                if self.lexer.peek().is_ident() {
+                if self.peek()?.is_ident() {
                     self.in_macro_nonterminal = true;
                     Class::MacroNonTerminal
                 } else {
@@ -280,9 +291,9 @@ impl<'a> Classifier<'a> {
                 // as an attribute.
 
                 // Case 1: #![inner_attribute]
-                if self.lexer.peek() == &token::Not {
+                if self.peek()? == &token::Not {
                     self.try_next_token()?; // NOTE: consumes `!` token!
-                    if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
+                    if self.peek()? == &token::OpenDelim(token::Bracket) {
                         self.in_attribute = true;
                         out.enter_span(Class::Attribute)?;
                     }
@@ -292,7 +303,7 @@ impl<'a> Classifier<'a> {
                 }
 
                 // Case 2: #[outer_attribute]
-                if self.lexer.peek() == &token::OpenDelim(token::Bracket) {
+                if self.peek()? == &token::OpenDelim(token::Bracket) {
                     self.in_attribute = true;
                     out.enter_span(Class::Attribute)?;
                 }
@@ -341,7 +352,7 @@ impl<'a> Classifier<'a> {
                         if self.in_macro_nonterminal {
                             self.in_macro_nonterminal = false;
                             Class::MacroNonTerminal
-                        } else if self.lexer.peek() == &token::Not {
+                        } else if self.peek()? == &token::Not {
                             self.in_macro = true;
                             Class::Macro
                         } else {
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 49f714e4e46..021b623d509 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -142,10 +142,6 @@ impl<'a> StringReader<'a> {
         buffer
     }
 
-    pub fn peek(&self) -> &Token {
-        &self.peek_token
-    }
-
     /// For comments.rs, which hackily pokes into next_pos and ch
     fn new_raw(sess: &'a ParseSess,
                source_file: Lrc<syntax_pos::SourceFile>,