about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2024-07-03 16:31:16 +0000
committerbors <bors@rust-lang.org>2024-07-03 16:31:16 +0000
commit1cfd47fe0b78f48a04ac8fce792a406b638da40b (patch)
treefb7133fb063bcd032b9510eaac6833033b80a8c1 /compiler/rustc_parse/src
parent1086affd98ad746e1bb02e187562b9a037e854e3 (diff)
parentb212cd054ee567038243b8fc0eca925493b8c692 (diff)
downloadrust-1cfd47fe0b78f48a04ac8fce792a406b638da40b.tar.gz
rust-1cfd47fe0b78f48a04ac8fce792a406b638da40b.zip
Auto merge of #127278 - matthiaskrgr:rollup-fjexkdr, r=matthiaskrgr
Rollup of 8 pull requests

Successful merges:

 - #126803 (Change `asm-comments` to `verbose-asm`, always emit user comments)
 - #127050 (Make mtime of reproducible tarballs dependent on git commit)
 - #127145 (Add `as_lang_item` to `LanguageItems`, new trait solver)
 - #127202 (Remove global error count checks from typeck)
 - #127233 (Some parser cleanups)
 - #127248 (Add parse fail test using safe trait/impl trait)
 - #127264 (Small `run-make-support` API improvements)
 - #127270 (bootstrap: pass correct struct size to winapi)

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs113
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs6
2 files changed, 55 insertions, 64 deletions
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index b5480b6b7d2..13a647adfe3 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -9,6 +9,7 @@ use rustc_session::parse::ParseSess;
 use rustc_span::{sym, Span, DUMMY_SP};
 
 use std::ops::Range;
+use std::{iter, mem};
 
 /// A wrapper type to ensure that the parser handles outer attributes correctly.
 /// When we parse outer attributes, we need to ensure that we capture tokens
@@ -29,15 +30,15 @@ pub struct AttrWrapper {
     // The start of the outer attributes in the token cursor.
     // This allows us to create a `ReplaceRange` for the entire attribute
     // target, including outer attributes.
-    start_pos: usize,
+    start_pos: u32,
 }
 
 impl AttrWrapper {
-    pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
+    pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
         AttrWrapper { attrs, start_pos }
     }
     pub fn empty() -> AttrWrapper {
-        AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
+        AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
     }
 
     pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
@@ -53,7 +54,7 @@ impl AttrWrapper {
     // FIXME: require passing an NT to prevent misuse of this method
     pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
         let mut self_attrs = self.attrs;
-        std::mem::swap(attrs, &mut self_attrs);
+        mem::swap(attrs, &mut self_attrs);
         attrs.extend(self_attrs);
     }
 
@@ -91,7 +92,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
 struct LazyAttrTokenStreamImpl {
     start_token: (Token, Spacing),
     cursor_snapshot: TokenCursor,
-    num_calls: usize,
+    num_calls: u32,
     break_last_token: bool,
     replace_ranges: Box<[ReplaceRange]>,
 }
@@ -104,15 +105,16 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
         // produce an empty `TokenStream` if no calls were made, and omit the
         // final token otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
-        let tokens =
-            std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
-                .chain(std::iter::repeat_with(|| {
-                    let token = cursor_snapshot.next();
-                    (FlatToken::Token(token.0), token.1)
-                }))
-                .take(self.num_calls);
-
-        if !self.replace_ranges.is_empty() {
+        let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
+            .chain(iter::repeat_with(|| {
+                let token = cursor_snapshot.next();
+                (FlatToken::Token(token.0), token.1)
+            }))
+            .take(self.num_calls as usize);
+
+        if self.replace_ranges.is_empty() {
+            make_attr_token_stream(tokens, self.break_last_token)
+        } else {
             let mut tokens: Vec<_> = tokens.collect();
             let mut replace_ranges = self.replace_ranges.to_vec();
             replace_ranges.sort_by_key(|(range, _)| range.start);
@@ -156,7 +158,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
                 // This keeps the total length of `tokens` constant throughout the
                 // replacement process, allowing us to use all of the `ReplaceRanges` entries
                 // without adjusting indices.
-                let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
+                let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
                     .take(range.len() - new_tokens.len());
 
                 tokens.splice(
@@ -164,9 +166,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
                     new_tokens.into_iter().chain(filler),
                 );
             }
-            make_token_stream(tokens.into_iter(), self.break_last_token)
-        } else {
-            make_token_stream(tokens, self.break_last_token)
+            make_attr_token_stream(tokens.into_iter(), self.break_last_token)
         }
     }
 }
@@ -218,24 +218,23 @@ impl<'a> Parser<'a> {
         let start_token = (self.token.clone(), self.token_spacing);
         let cursor_snapshot = self.token_cursor.clone();
         let start_pos = self.num_bump_calls;
-
         let has_outer_attrs = !attrs.attrs.is_empty();
-        let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
         let replace_ranges_start = self.capture_state.replace_ranges.len();
 
-        let ret = f(self, attrs.attrs);
-
-        self.capture_state.capturing = prev_capturing;
-
-        let (mut ret, trailing) = ret?;
+        let (mut ret, trailing) = {
+            let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
+            let ret_and_trailing = f(self, attrs.attrs);
+            self.capture_state.capturing = prev_capturing;
+            ret_and_trailing?
+        };
 
         // When we're not in `capture-cfg` mode, then bail out early if:
         // 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
         //    so there's nothing for us to do.
         // 2. Our target already has tokens set (e.g. we've parsed something
-        // like `#[my_attr] $item`. The actual parsing code takes care of prepending
-        // any attributes to the nonterminal, so we don't need to modify the
-        // already captured tokens.
+        //    like `#[my_attr] $item`). The actual parsing code takes care of
+        //    prepending any attributes to the nonterminal, so we don't need to
+        //    modify the already captured tokens.
         // Note that this check is independent of `force_collect`- if we already
         // have tokens, or can't even store them, then there's never a need to
         // force collection of new tokens.
@@ -276,37 +275,32 @@ impl<'a> Parser<'a> {
 
         let replace_ranges_end = self.capture_state.replace_ranges.len();
 
-        let mut end_pos = self.num_bump_calls;
-
-        let mut captured_trailing = false;
-
         // Capture a trailing token if requested by the callback 'f'
-        match trailing {
-            TrailingToken::None => {}
+        let captured_trailing = match trailing {
+            TrailingToken::None => false,
             TrailingToken::Gt => {
                 assert_eq!(self.token.kind, token::Gt);
+                false
             }
             TrailingToken::Semi => {
                 assert_eq!(self.token.kind, token::Semi);
-                end_pos += 1;
-                captured_trailing = true;
+                true
             }
-            TrailingToken::MaybeComma => {
-                if self.token.kind == token::Comma {
-                    end_pos += 1;
-                    captured_trailing = true;
-                }
-            }
-        }
+            TrailingToken::MaybeComma => self.token.kind == token::Comma,
+        };
 
-        // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
-        // then extend the range of captured tokens to include it, since the parser
-        // was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
-        // into an `AttrTokenStream`, we will create the proper token.
-        if self.break_last_token {
-            assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
-            end_pos += 1;
-        }
+        assert!(
+            !(self.break_last_token && captured_trailing),
+            "Cannot set break_last_token and have trailing token"
+        );
+
+        let end_pos = self.num_bump_calls
+            + captured_trailing as u32
+            // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
+            // extend the range of captured tokens to include it, since the parser was not actually
+            // bumped past it. When the `LazyAttrTokenStream` gets converted into an
+            // `AttrTokenStream`, we will create the proper token.
+            + self.break_last_token as u32;
 
         let num_calls = end_pos - start_pos;
 
@@ -318,14 +312,11 @@ impl<'a> Parser<'a> {
             // Grab any replace ranges that occur *inside* the current AST node.
             // We will perform the actual replacement when we convert the `LazyAttrTokenStream`
             // to an `AttrTokenStream`.
-            let start_calls: u32 = start_pos.try_into().unwrap();
             self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
                 .iter()
                 .cloned()
                 .chain(inner_attr_replace_ranges.iter().cloned())
-                .map(|(range, tokens)| {
-                    ((range.start - start_calls)..(range.end - start_calls), tokens)
-                })
+                .map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
                 .collect()
         };
 
@@ -340,7 +331,7 @@ impl<'a> Parser<'a> {
         // If we support tokens at all
         if let Some(target_tokens) = ret.tokens_mut() {
             if target_tokens.is_none() {
-                // Store se our newly captured tokens into the AST node
+                // Store our newly captured tokens into the AST node.
                 *target_tokens = Some(tokens.clone());
             }
         }
@@ -382,10 +373,10 @@ impl<'a> Parser<'a> {
     }
 }
 
-/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
-/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
-/// of open and close delims.
-fn make_token_stream(
+/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
+/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
+/// close delims.
+fn make_attr_token_stream(
     mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
     break_last_token: bool,
 ) -> AttrTokenStream {
@@ -464,6 +455,6 @@ mod size_asserts {
     use rustc_data_structures::static_assert_size;
     // tidy-alphabetical-start
     static_assert_size!(AttrWrapper, 16);
-    static_assert_size!(LazyAttrTokenStreamImpl, 104);
+    static_assert_size!(LazyAttrTokenStreamImpl, 96);
     // tidy-alphabetical-end
 }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index cfd0a72c056..5f16a3e1f37 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -153,7 +153,7 @@ pub struct Parser<'a> {
     expected_tokens: Vec<TokenType>,
     token_cursor: TokenCursor,
     // The number of calls to `bump`, i.e. the position in the token stream.
-    num_bump_calls: usize,
+    num_bump_calls: u32,
     // During parsing we may sometimes need to 'unglue' a glued token into two
     // component tokens (e.g. '>>' into '>' and '>), so the parser can consume
     // them one at a time. This process bypasses the normal capturing mechanism
@@ -192,7 +192,7 @@ pub struct Parser<'a> {
 // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
 // it doesn't unintentionally get bigger.
 #[cfg(target_pointer_width = "64")]
-rustc_data_structures::static_assert_size!(Parser<'_>, 264);
+rustc_data_structures::static_assert_size!(Parser<'_>, 256);
 
 /// Stores span information about a closure.
 #[derive(Clone, Debug)]
@@ -1572,7 +1572,7 @@ impl<'a> Parser<'a> {
         self.expected_tokens.clear();
     }
 
-    pub fn approx_token_stream_pos(&self) -> usize {
+    pub fn approx_token_stream_pos(&self) -> u32 {
         self.num_bump_calls
     }
 }