about summary refs log tree commit diff
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs6
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs53
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs17
3 files changed, 34 insertions, 42 deletions
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 58fef9b6c45..a8fe35f45b3 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -282,7 +282,7 @@ impl<'a> Parser<'a> {
     pub fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
         let mut attrs = ast::AttrVec::new();
         loop {
-            let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
+            let start_pos = self.num_bump_calls;
             // Only try to parse if it is an inner attribute (has `!`).
             let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
                 Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@@ -303,7 +303,7 @@ impl<'a> Parser<'a> {
                 None
             };
             if let Some(attr) = attr {
-                let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
+                let end_pos = self.num_bump_calls;
                 // If we are currently capturing tokens, mark the location of this inner attribute.
                 // If capturing ends up creating a `LazyAttrTokenStream`, we will include
                 // this replace range with it, removing the inner attribute from the final
@@ -313,7 +313,7 @@ impl<'a> Parser<'a> {
                 // corresponding macro).
                 let range = start_pos..end_pos;
                 if let Capturing::Yes = self.capture_state.capturing {
-                    self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
+                    self.capture_state.inner_attr_ranges.insert(attr.id, (range, None));
                 }
                 attrs.push(attr);
             } else {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 13a647adfe3..38f18022e3c 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,6 +1,6 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing};
 use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
@@ -8,7 +8,6 @@ use rustc_errors::PResult;
 use rustc_session::parse::ParseSess;
 use rustc_span::{sym, Span, DUMMY_SP};
 
-use std::ops::Range;
 use std::{iter, mem};
 
 /// A wrapper type to ensure that the parser handles outer attributes correctly.
@@ -88,7 +87,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
 //
 // This also makes `Parser` very cheap to clone, since
 // there is no intermediate collection buffer to clone.
-#[derive(Clone)]
 struct LazyAttrTokenStreamImpl {
     start_token: (Token, Spacing),
     cursor_snapshot: TokenCursor,
@@ -146,24 +144,23 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
             // start position, we ensure that any replace range which encloses
             // another replace range will capture the *replaced* tokens for the inner
             // range, not the original tokens.
-            for (range, new_tokens) in replace_ranges.into_iter().rev() {
+            for (range, target) in replace_ranges.into_iter().rev() {
                 assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
-                // Replace ranges are only allowed to decrease the number of tokens.
-                assert!(
-                    range.len() >= new_tokens.len(),
-                    "Range {range:?} has greater len than {new_tokens:?}"
-                );
-
-                // Replace any removed tokens with `FlatToken::Empty`.
-                // This keeps the total length of `tokens` constant throughout the
-                // replacement process, allowing us to use all of the `ReplaceRanges` entries
-                // without adjusting indices.
-                let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
-                    .take(range.len() - new_tokens.len());
 
+                // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
+                // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
+                // total length of `tokens` constant throughout the replacement process, allowing
+                // us to use all of the `ReplaceRanges` entries without adjusting indices.
+                let target_len = target.is_some() as usize;
                 tokens.splice(
                     (range.start as usize)..(range.end as usize),
-                    new_tokens.into_iter().chain(filler),
+                    target
+                        .into_iter()
+                        .map(|target| (FlatToken::AttrsTarget(target), Spacing::Alone))
+                        .chain(
+                            iter::repeat((FlatToken::Empty, Spacing::Alone))
+                                .take(range.len() - target_len),
+                        ),
                 );
             }
             make_attr_token_stream(tokens.into_iter(), self.break_last_token)
@@ -316,7 +313,7 @@ impl<'a> Parser<'a> {
                 .iter()
                 .cloned()
                 .chain(inner_attr_replace_ranges.iter().cloned())
-                .map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
+                .map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data))
                 .collect()
         };
 
@@ -346,18 +343,14 @@ impl<'a> Parser<'a> {
             && matches!(self.capture_state.capturing, Capturing::Yes)
             && has_cfg_or_cfg_attr(final_attrs)
         {
-            let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
+            assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
 
-            // Replace the entire AST node that we just parsed, including attributes,
-            // with a `FlatToken::AttrTarget`. If this AST node is inside an item
-            // that has `#[derive]`, then this will allow us to cfg-expand this
-            // AST node.
+            // Replace the entire AST node that we just parsed, including attributes, with
+            // `target`. If this AST node is inside an item that has `#[derive]`, then this will
+            // allow us to cfg-expand this AST node.
             let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
-            let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
-
-            assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
-            let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
-            self.capture_state.replace_ranges.push((range, new_tokens));
+            let target = AttrsTarget { attrs: final_attrs.iter().cloned().collect(), tokens };
+            self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
             self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
         }
 
@@ -419,11 +412,11 @@ fn make_attr_token_stream(
                 .expect("Bottom token frame is missing!")
                 .inner
                 .push(AttrTokenTree::Token(token, spacing)),
-            FlatToken::AttrTarget(data) => stack
+            FlatToken::AttrsTarget(target) => stack
                 .last_mut()
                 .expect("Bottom token frame is missing!")
                 .inner
-                .push(AttrTokenTree::Attributes(data)),
+                .push(AttrTokenTree::AttrsTarget(target)),
             FlatToken::Empty => {}
         }
         token_and_spacing = iter.next();
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 5f16a3e1f37..45ca267fe5d 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -20,7 +20,7 @@ use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttrsTarget, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
@@ -203,13 +203,13 @@ struct ClosureSpans {
 }
 
 /// Indicates a range of tokens that should be replaced by
-/// the tokens in the provided vector. This is used in two
+/// the tokens in the provided `AttrsTarget`. This is used in two
 /// places during token collection:
 ///
 /// 1. During the parsing of an AST node that may have a `#[derive]`
 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
-/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
+/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion
 /// on an `AttrTokenStream`.
 ///
 /// 2. When we parse an inner attribute while collecting tokens. We
@@ -219,7 +219,7 @@ struct ClosureSpans {
 /// the first macro inner attribute to invoke a proc-macro).
 /// When create a `TokenStream`, the inner attributes get inserted
 /// into the proper place in the token stream.
-type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
+type ReplaceRange = (Range<u32>, Option<AttrsTarget>);
 
 /// Controls how we capture tokens. Capturing can be expensive,
 /// so we try to avoid performing capturing in cases where
@@ -1608,11 +1608,10 @@ enum FlatToken {
     /// A token - this holds both delimiter (e.g. '{' and '}')
     /// and non-delimiter tokens
     Token(Token),
-    /// Holds the `AttributesData` for an AST node. The
-    /// `AttributesData` is inserted directly into the
-    /// constructed `AttrTokenStream` as
-    /// an `AttrTokenTree::Attributes`.
-    AttrTarget(AttributesData),
+    /// Holds the `AttrsTarget` for an AST node. The `AttrsTarget` is inserted
+    /// directly into the constructed `AttrTokenStream` as an
+    /// `AttrTokenTree::AttrsTarget`.
+    AttrsTarget(AttrsTarget),
     /// A special 'empty' token that is ignored during the conversion
     /// to an `AttrTokenStream`. This is used to simplify the
     /// handling of replace ranges.