about summary refs log tree commit diff
path: root/src/libsyntax/parse/lexer
diff options
context:
space:
mode:
authorEduard-Mihai Burtescu <edy.burt@gmail.com>2016-10-19 08:00:01 +0300
committerGitHub <noreply@github.com>2016-10-19 08:00:01 +0300
commit094eaf025089218733305858fa262f826b6f11b8 (patch)
tree46370228599af762a75c9b05f8d66353d6be1139 /src/libsyntax/parse/lexer
parent45683187ea6887fd5ceab631b4534ed79e7f8397 (diff)
parent95a9e2a724019c11c9b69a45e7953f8ba1225df9 (diff)
downloadrust-094eaf025089218733305858fa262f826b6f11b8.tar.gz
rust-094eaf025089218733305858fa262f826b6f11b8.zip
Rollup merge of #37208 - jseyfried:fix_partially_consumed_tokens_in_macros, r=nrc
macros: fix partially consumed tokens in macro matchers

Fixes #37175.

This PR also avoids re-transcribing the tokens consumed by a matcher (and cloning the `TtReader` once per matcher), which improves expansion performance of the test case from #34630 by ~8%.

r? @nrc
Diffstat (limited to 'src/libsyntax/parse/lexer')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs27
1 files changed, 24 insertions, 3 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index aca41bd7b59..e62d0d925cd 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -144,7 +144,7 @@ impl<'a> Reader for StringReader<'a> {
 
 impl<'a> Reader for TtReader<'a> {
     fn is_eof(&self) -> bool {
-        self.cur_tok == token::Eof
+        self.peek().tok == token::Eof
     }
     fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
         assert!(self.fatal_errs.is_empty());
@@ -165,10 +165,31 @@ impl<'a> Reader for TtReader<'a> {
         self.fatal_errs.clear();
     }
     fn peek(&self) -> TokenAndSpan {
-        TokenAndSpan {
+        self.next_tok.clone().unwrap_or(TokenAndSpan {
             tok: self.cur_tok.clone(),
             sp: self.cur_span,
-        }
+        })
+    }
+}
+
+impl<'a, 'b> Reader for &'b mut TtReader<'a> {
+    fn is_eof(&self) -> bool {
+        (**self).is_eof()
+    }
+    fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
+        (**self).try_next_token()
+    }
+    fn fatal(&self, m: &str) -> FatalError {
+        (**self).fatal(m)
+    }
+    fn err(&self, m: &str) {
+        (**self).err(m)
+    }
+    fn emit_fatal_errors(&mut self) {
+        (**self).emit_fatal_errors()
+    }
+    fn peek(&self) -> TokenAndSpan {
+        (**self).peek()
     }
 }