about summary refs log tree commit diff
path: root/compiler/rustc_parse/src/parser
diff options
context:
space:
mode:
authorNicholas Nethercote <n.nethercote@gmail.com>2022-04-19 11:36:13 +1000
committerNicholas Nethercote <n.nethercote@gmail.com>2022-04-19 17:02:48 +1000
commitb1e6dee59666d2f85a5121730ec128934519260f (patch)
tree4c0f09d21bf4a53a8aca01e5d772558f75e6463b /compiler/rustc_parse/src/parser
parent89ec75b0e95a62a2d1ac76f7918a469c7bb228ec (diff)
downloadrust-b1e6dee59666d2f85a5121730ec128934519260f.tar.gz
rust-b1e6dee59666d2f85a5121730ec128934519260f.zip
Merge `TokenCursor::{next,next_desugared}`.
And likewise for the inlined variants.

I did this for simplicity, but interesting it was a performance win as
well.
Diffstat (limited to 'compiler/rustc_parse/src/parser')
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs17
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs139
2 files changed, 71 insertions, 85 deletions
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 5ee9c339bb7..02749088c31 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -100,21 +100,16 @@ rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
 
 impl CreateTokenStream for LazyTokenStreamImpl {
     fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
-        // The token produced by the final call to `{,inlined_}next` or
-        // `{,inlined_}next_desugared` was not actually consumed by the
-        // callback. The combination of chaining the initial token and using
-        // `take` produces the desired result - we produce an empty
-        // `TokenStream` if no calls were made, and omit the final token
-        // otherwise.
+        // The token produced by the final call to `{,inlined_}next` was not
+        // actually consumed by the callback. The combination of chaining the
+        // initial token and using `take` produces the desired result - we
+        // produce an empty `TokenStream` if no calls were made, and omit the
+        // final token otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
         let tokens =
             std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
                 .chain((0..self.num_calls).map(|_| {
-                    let token = if cursor_snapshot.desugar_doc_comments {
-                        cursor_snapshot.next_desugared()
-                    } else {
-                        cursor_snapshot.next()
-                    };
+                    let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
                     (FlatToken::Token(token.0), token.1)
                 }))
                 .take(self.num_calls);
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 378a533edf9..b6f4cd119e0 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -206,9 +206,7 @@ struct TokenCursor {
     frame: TokenCursorFrame,
     stack: Vec<TokenCursorFrame>,
     desugar_doc_comments: bool,
-    // Counts the number of calls to `{,inlined_}next` or
-    // `{,inlined_}next_desugared`, depending on whether
-    // `desugar_doc_comments` is set.
+    // Counts the number of calls to `{,inlined_}next`.
     num_next_calls: usize,
     // During parsing, we may sometimes need to 'unglue' a
     // glued token into two component tokens
@@ -256,14 +254,14 @@ impl TokenCursorFrame {
 }
 
 impl TokenCursor {
-    fn next(&mut self) -> (Token, Spacing) {
-        self.inlined_next()
+    fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
+        self.inlined_next(desugar_doc_comments)
     }
 
     /// This always-inlined version should only be used on hot code paths.
     #[inline(always)]
-    fn inlined_next(&mut self) -> (Token, Spacing) {
-        loop {
+    fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
+        let (token, spacing) = loop {
             let (tree, spacing) = if !self.frame.open_delim {
                 self.frame.open_delim = true;
                 TokenTree::token(token::OpenDelim(self.frame.delim), self.frame.span.open).into()
@@ -281,77 +279,74 @@ impl TokenCursor {
 
             match tree {
                 TokenTree::Token(token) => {
-                    return (token, spacing);
+                    break (token, spacing);
                 }
                 TokenTree::Delimited(sp, delim, tts) => {
                     let frame = TokenCursorFrame::new(sp, delim, tts);
                     self.stack.push(mem::replace(&mut self.frame, frame));
                 }
             }
-        }
-    }
+        };
 
-    fn next_desugared(&mut self) -> (Token, Spacing) {
-        self.inlined_next_desugared()
-    }
+        match (desugar_doc_comments, &token) {
+            (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
+                // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
+                // required to wrap the text.
+                let mut num_of_hashes = 0;
+                let mut count = 0;
+                for ch in data.as_str().chars() {
+                    count = match ch {
+                        '"' => 1,
+                        '#' if count > 0 => count + 1,
+                        _ => 0,
+                    };
+                    num_of_hashes = cmp::max(num_of_hashes, count);
+                }
 
-    /// This always-inlined version should only be used on hot code paths.
-    #[inline(always)]
-    fn inlined_next_desugared(&mut self) -> (Token, Spacing) {
-        let (data, attr_style, sp) = match self.inlined_next() {
-            (Token { kind: token::DocComment(_, attr_style, data), span }, _) => {
-                (data, attr_style, span)
+                let delim_span = DelimSpan::from_single(span);
+                let body = TokenTree::Delimited(
+                    delim_span,
+                    token::Bracket,
+                    [
+                        TokenTree::token(token::Ident(sym::doc, false), span),
+                        TokenTree::token(token::Eq, span),
+                        TokenTree::token(
+                            TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
+                            span,
+                        ),
+                    ]
+                    .iter()
+                    .cloned()
+                    .collect::<TokenStream>(),
+                );
+
+                self.stack.push(mem::replace(
+                    &mut self.frame,
+                    TokenCursorFrame::new(
+                        delim_span,
+                        token::NoDelim,
+                        if attr_style == AttrStyle::Inner {
+                            [
+                                TokenTree::token(token::Pound, span),
+                                TokenTree::token(token::Not, span),
+                                body,
+                            ]
+                            .iter()
+                            .cloned()
+                            .collect::<TokenStream>()
+                        } else {
+                            [TokenTree::token(token::Pound, span), body]
+                                .iter()
+                                .cloned()
+                                .collect::<TokenStream>()
+                        },
+                    ),
+                ));
+
+                self.next(/* desugar_doc_comments */ false)
             }
-            tok => return tok,
-        };
-
-        // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
-        // required to wrap the text.
-        let mut num_of_hashes = 0;
-        let mut count = 0;
-        for ch in data.as_str().chars() {
-            count = match ch {
-                '"' => 1,
-                '#' if count > 0 => count + 1,
-                _ => 0,
-            };
-            num_of_hashes = cmp::max(num_of_hashes, count);
+            _ => (token, spacing),
         }
-
-        let delim_span = DelimSpan::from_single(sp);
-        let body = TokenTree::Delimited(
-            delim_span,
-            token::Bracket,
-            [
-                TokenTree::token(token::Ident(sym::doc, false), sp),
-                TokenTree::token(token::Eq, sp),
-                TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp),
-            ]
-            .iter()
-            .cloned()
-            .collect::<TokenStream>(),
-        );
-
-        self.stack.push(mem::replace(
-            &mut self.frame,
-            TokenCursorFrame::new(
-                delim_span,
-                token::NoDelim,
-                if attr_style == AttrStyle::Inner {
-                    [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body]
-                        .iter()
-                        .cloned()
-                        .collect::<TokenStream>()
-                } else {
-                    [TokenTree::token(token::Pound, sp), body]
-                        .iter()
-                        .cloned()
-                        .collect::<TokenStream>()
-                },
-            ),
-        ));
-
-        self.next()
     }
 }
 
@@ -1010,11 +1005,7 @@ impl<'a> Parser<'a> {
     pub fn bump(&mut self) {
         let fallback_span = self.token.span;
         loop {
-            let (mut next, spacing) = if self.desugar_doc_comments {
-                self.token_cursor.inlined_next_desugared()
-            } else {
-                self.token_cursor.inlined_next()
-            };
+            let (mut next, spacing) = self.token_cursor.inlined_next(self.desugar_doc_comments);
             self.token_cursor.num_next_calls += 1;
             // We've retrieved an token from the underlying
             // cursor, so we no longer need to worry about
@@ -1063,7 +1054,7 @@ impl<'a> Parser<'a> {
         let mut i = 0;
         let mut token = Token::dummy();
         while i < dist {
-            token = cursor.next().0;
+            token = cursor.next(/* desugar_doc_comments */ false).0;
             if matches!(
                 token.kind,
                 token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)