about summary refs log tree commit diff
path: root/compiler/rustc_parse/src/parser
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-11-14 04:21:56 +0000
committerbors <bors@rust-lang.org>2020-11-14 04:21:56 +0000
commit50d3c2a3cb96b6af2b5e3b9d08578a556ac70ede (patch)
treede3dbe5ca3400808ddb1e44cb36afed9e83dca0c /compiler/rustc_parse/src/parser
parentb63d05a908ab667f37ea9fb66f4fab9dd89fce76 (diff)
parent2879ab793e10a1bf5c158e3301474be96192aa7a (diff)
downloadrust-50d3c2a3cb96b6af2b5e3b9d08578a556ac70ede.tar.gz
rust-50d3c2a3cb96b6af2b5e3b9d08578a556ac70ede.zip
Auto merge of #78736 - petrochenkov:lazyenum, r=Aaron1011
rustc_parse: Remove optimization for 0-length streams in `collect_tokens`

The optimization conflates empty token streams with unknown token stream, which is at least suspicious, and doesn't affect performance because 0-length token streams are very rare.

r? `@Aaron1011`
Diffstat (limited to 'compiler/rustc_parse/src/parser')
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs14
1 files changed, 5 insertions, 9 deletions
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index da1c54e88b5..40aa2db58c7 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -1180,8 +1180,7 @@ impl<'a> Parser<'a> {
     /// Records all tokens consumed by the provided callback,
     /// including the current token. These tokens are collected
     /// into a `LazyTokenStream`, and returned along with the result
-    /// of the callback. The returned `LazyTokenStream` will be `None`
-    /// if not tokens were captured.
+    /// of the callback.
     ///
     /// Note: If your callback consumes an opening delimiter
     /// (including the case where you call `collect_tokens`
@@ -1203,17 +1202,14 @@ impl<'a> Parser<'a> {
 
         let ret = f(self)?;
 
-        // We didn't capture any tokens
-        let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
-        if num_calls == 0 {
-            return Ok((ret, None));
-        }
-
         // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
         // and `num_calls`, we can reconstruct the `TokenStream` seen
         // by the callback. This allows us to avoid producing a `TokenStream`
         // if it is never needed - for example, a captured `macro_rules!`
         // argument that is never passed to a proc macro.
+        // In practice token stream creation happens rarely compared to
+        // calls to `collect_tokens` (see some statistics in #78736),
+        // so we are doing as little up-front work as possible.
         //
         // This also makes `Parser` very cheap to clone, since
         // there is no intermediate collection buffer to clone.
@@ -1247,8 +1243,8 @@ impl<'a> Parser<'a> {
 
         let lazy_impl = LazyTokenStreamImpl {
             start_token,
+            num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
             cursor_snapshot,
-            num_calls,
             desugar_doc_comments: self.desugar_doc_comments,
         };
         Ok((ret, Some(LazyTokenStream::new(lazy_impl))))