From 28236ab703d21483dc818108f157fbb2da5a2802 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 29 Apr 2025 11:18:08 +1000 Subject: Move various token stream things from `rustc_parse` to `rustc_ast`. Specifically: `TokenCursor`, `TokenTreeCursor`, `LazyAttrTokenStreamImpl`, `FlatToken`, `make_attr_token_stream`, `ParserRange`, `NodeRange`. `ParserReplacement`, and `NodeReplacement`. These are all related to token streams, rather than actual parsing. This will facilitate the simplifications in the next commit. --- compiler/rustc_parse/src/parser/attr_wrapper.rs | 169 +----------------------- 1 file changed, 6 insertions(+), 163 deletions(-) (limited to 'compiler/rustc_parse/src/parser/attr_wrapper.rs') diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 6061c9cb485..912045d8835 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -1,21 +1,18 @@ use std::borrow::Cow; -use std::{iter, mem}; +use std::mem; -use rustc_ast::token::{Delimiter, Token}; +use rustc_ast::token::Token; use rustc_ast::tokenstream::{ - AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream, - Spacing, ToAttrTokenStream, + AttrsTarget, LazyAttrTokenStream, LazyAttrTokenStreamImpl, NodeRange, ParserRange, Spacing, + TokenCursor, }; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::PResult; use rustc_session::parse::ParseSess; -use rustc_span::{DUMMY_SP, Span, sym}; +use rustc_span::{DUMMY_SP, sym}; -use super::{ - Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange, - TokenCursor, Trailing, -}; +use super::{Capturing, ForceCollect, Parser, Trailing}; // When collecting tokens, this fully captures the start point. Usually its // just after outer attributes, but occasionally it's before. @@ -94,95 +91,6 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool { }) } -// From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens`, by -// replaying the getting of the tokens. This saves us producing a `TokenStream` -// if it is never needed, e.g. a captured `macro_rules!` argument that is never -// passed to a proc macro. In practice, token stream creation happens rarely -// compared to calls to `collect_tokens` (see some statistics in #78736) so we -// are doing as little up-front work as possible. -// -// This also makes `Parser` very cheap to clone, since -// there is no intermediate collection buffer to clone. -struct LazyAttrTokenStreamImpl { - start_token: (Token, Spacing), - cursor_snapshot: TokenCursor, - num_calls: u32, - break_last_token: u32, - node_replacements: Box<[NodeReplacement]>, -} - -impl ToAttrTokenStream for LazyAttrTokenStreamImpl { - fn to_attr_token_stream(&self) -> AttrTokenStream { - // The token produced by the final call to `{,inlined_}next` was not - // actually consumed by the callback. The combination of chaining the - // initial token and using `take` produces the desired result - we - // produce an empty `TokenStream` if no calls were made, and omit the - // final token otherwise. - let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token)) - .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) - .take(self.num_calls as usize); - - if self.node_replacements.is_empty() { - make_attr_token_stream(tokens, self.break_last_token) - } else { - let mut tokens: Vec<_> = tokens.collect(); - let mut node_replacements = self.node_replacements.to_vec(); - node_replacements.sort_by_key(|(range, _)| range.0.start); - - #[cfg(debug_assertions)] - for [(node_range, tokens), (next_node_range, next_tokens)] in - node_replacements.array_windows() - { - assert!( - node_range.0.end <= next_node_range.0.start - || node_range.0.end >= next_node_range.0.end, - "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", - node_range, - tokens, - next_node_range, - next_tokens, - ); - } - - // Process the replace ranges, starting from the highest start - // position and working our way back. If have tokens like: - // - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // Then we will generate replace ranges for both - // the `#[cfg(FALSE)] field: bool` and the entire - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // By starting processing from the replace range with the greatest - // start position, we ensure that any (outer) replace range which - // encloses another (inner) replace range will fully overwrite the - // inner range's replacement. - for (node_range, target) in node_replacements.into_iter().rev() { - assert!( - !node_range.0.is_empty(), - "Cannot replace an empty node range: {:?}", - node_range.0 - ); - - // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus - // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the - // total length of `tokens` constant throughout the replacement process, allowing - // us to do all replacements without adjusting indices. - let target_len = target.is_some() as usize; - tokens.splice( - (node_range.0.start as usize)..(node_range.0.end as usize), - target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( - iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len), - ), - ); - } - make_attr_token_stream(tokens.into_iter(), self.break_last_token) - } - } -} - impl<'a> Parser<'a> { pub(super) fn collect_pos(&self) -> CollectPos { CollectPos { @@ -483,71 +391,6 @@ impl<'a> Parser<'a> { } } -/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an -/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and -/// close delims. -fn make_attr_token_stream( - iter: impl Iterator, - break_last_token: u32, -) -> AttrTokenStream { - #[derive(Debug)] - struct FrameData { - // This is `None` for the first frame, `Some` for all others. - open_delim_sp: Option<(Delimiter, Span, Spacing)>, - inner: Vec, - } - // The stack always has at least one element. Storing it separately makes for shorter code. - let mut stack_top = FrameData { open_delim_sp: None, inner: vec![] }; - let mut stack_rest = vec![]; - for flat_token in iter { - match flat_token { - FlatToken::Token((token @ Token { kind, span }, spacing)) => { - if let Some(delim) = kind.open_delim() { - stack_rest.push(mem::replace( - &mut stack_top, - FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] }, - )); - } else if let Some(delim) = kind.close_delim() { - let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap()); - let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap(); - assert!( - open_delim.eq_ignoring_invisible_origin(&delim), - "Mismatched open/close delims: open={open_delim:?} close={span:?}" - ); - let dspan = DelimSpan::from_pair(open_sp, span); - let dspacing = DelimSpacing::new(open_spacing, spacing); - let stream = AttrTokenStream::new(frame_data.inner); - let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream); - stack_top.inner.push(delimited); - } else { - stack_top.inner.push(AttrTokenTree::Token(token, spacing)) - } - } - FlatToken::AttrsTarget(target) => { - stack_top.inner.push(AttrTokenTree::AttrsTarget(target)) - } - FlatToken::Empty => {} - } - } - - if break_last_token > 0 { - let last_token = stack_top.inner.pop().unwrap(); - if let AttrTokenTree::Token(last_token, spacing) = last_token { - let (unglued, _) = last_token.kind.break_two_token_op(break_last_token).unwrap(); - - // Tokens are always ASCII chars, so we can use byte arithmetic here. - let mut first_span = last_token.span.shrink_to_lo(); - first_span = - first_span.with_hi(first_span.lo() + rustc_span::BytePos(break_last_token)); - - stack_top.inner.push(AttrTokenTree::Token(Token::new(unglued, first_span), spacing)); - } else { - panic!("Unexpected last token {last_token:?}") - } - } - AttrTokenStream::new(stack_top.inner) -} - /// Tokens are needed if: /// - any non-single-segment attributes (other than doc comments) are present, /// e.g. `rustfmt::skip`; or -- cgit 1.4.1-3-g733a5 From 298c56f4ba604d3c7025a44fe7bfe1134d6b56d6 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 29 Apr 2025 11:57:27 +1000 Subject: Simplify `LazyAttrTokenStream`. This commit does the following. - Changes it from `Lrc>` to `Lrc`. - Reworks `LazyAttrTokenStreamImpl` as `LazyAttrTokenStreamInner`, which is a two-variant enum. - Removes the `ToAttrTokenStream` trait and the two impls of it. The recursion limit must be increased in some crates otherwise rustdoc aborts. --- compiler/rustc_ast/src/lib.rs | 1 + compiler/rustc_ast/src/mut_visit.rs | 2 +- compiler/rustc_ast/src/tokenstream.rs | 214 +++++++++++++----------- compiler/rustc_attr_parsing/src/lib.rs | 1 + compiler/rustc_builtin_macros/src/lib.rs | 1 + compiler/rustc_codegen_ssa/src/lib.rs | 1 + compiler/rustc_expand/src/config.rs | 8 +- compiler/rustc_hir/src/lib.rs | 1 + compiler/rustc_middle/src/lib.rs | 1 + compiler/rustc_parse/src/lib.rs | 1 + compiler/rustc_parse/src/parser/attr_wrapper.rs | 24 +-- compiler/rustc_resolve/src/lib.rs | 1 + 12 files changed, 138 insertions(+), 118 deletions(-) (limited to 'compiler/rustc_parse/src/parser/attr_wrapper.rs') diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index bb16b48a641..e572ec99dab 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -20,6 +20,7 @@ #![feature(never_type)] #![feature(rustdoc_internals)] #![feature(stmt_expr_attributes)] +#![recursion_limit = "256"] // tidy-alphabetical-end pub mod util { diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 6aae2e481a5..28808716700 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -835,7 +835,7 @@ fn visit_lazy_tts_opt_mut(vis: &mut T, lazy_tts: Option<&mut Lazy if let Some(lazy_tts) = lazy_tts { let mut tts = lazy_tts.to_attr_token_stream(); visit_attr_tts(vis, &mut tts); - *lazy_tts = LazyAttrTokenStream::new(tts); + *lazy_tts = LazyAttrTokenStream::new_direct(tts); } } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 0f63a248fef..4f352e20da2 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -107,25 +107,30 @@ where } } -pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync { - fn to_attr_token_stream(&self) -> AttrTokenStream; -} - -impl ToAttrTokenStream for AttrTokenStream { - fn to_attr_token_stream(&self) -> AttrTokenStream { - self.clone() - } -} - -/// A lazy version of [`TokenStream`], which defers creation -/// of an actual `TokenStream` until it is needed. -/// `Box` is here only to reduce the structure size. +/// A lazy version of [`AttrTokenStream`], which defers creation of an actual +/// `AttrTokenStream` until it is needed. #[derive(Clone)] -pub struct LazyAttrTokenStream(Arc>); +pub struct LazyAttrTokenStream(Arc); impl LazyAttrTokenStream { - pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream { - LazyAttrTokenStream(Arc::new(Box::new(inner))) + pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream { + LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream))) + } + + pub fn new_pending( + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: u32, + break_last_token: u32, + node_replacements: Box<[NodeReplacement]>, + ) -> LazyAttrTokenStream { + LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending { + start_token, + cursor_snapshot, + num_calls, + break_last_token, + node_replacements, + })) } pub fn to_attr_token_stream(&self) -> AttrTokenStream { @@ -208,91 +213,109 @@ impl NodeRange { } } -// From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens`, by -// replaying the getting of the tokens. This saves us producing a `TokenStream` -// if it is never needed, e.g. a captured `macro_rules!` argument that is never -// passed to a proc macro. In practice, token stream creation happens rarely -// compared to calls to `collect_tokens` (see some statistics in #78736) so we -// are doing as little up-front work as possible. -// -// This also makes `Parser` very cheap to clone, since -// there is no intermediate collection buffer to clone. -pub struct LazyAttrTokenStreamImpl { - pub start_token: (Token, Spacing), - pub cursor_snapshot: TokenCursor, - pub num_calls: u32, - pub break_last_token: u32, - pub node_replacements: Box<[NodeReplacement]>, +enum LazyAttrTokenStreamInner { + // The token stream has already been produced. + Direct(AttrTokenStream), + + // From a value of this type we can reconstruct the `TokenStream` seen by + // the `f` callback passed to a call to `Parser::collect_tokens`, by + // replaying the getting of the tokens. This saves us producing a + // `TokenStream` if it is never needed, e.g. a captured `macro_rules!` + // argument that is never passed to a proc macro. In practice, token stream + // creation happens rarely compared to calls to `collect_tokens` (see some + // statistics in #78736) so we are doing as little up-front work as + // possible. + // + // This also makes `Parser` very cheap to clone, since there is no + // intermediate collection buffer to clone. + Pending { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: u32, + break_last_token: u32, + node_replacements: Box<[NodeReplacement]>, + }, } -impl ToAttrTokenStream for LazyAttrTokenStreamImpl { +impl LazyAttrTokenStreamInner { fn to_attr_token_stream(&self) -> AttrTokenStream { - // The token produced by the final call to `{,inlined_}next` was not - // actually consumed by the callback. The combination of chaining the - // initial token and using `take` produces the desired result - we - // produce an empty `TokenStream` if no calls were made, and omit the - // final token otherwise. - let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = iter::once(FlatToken::Token(self.start_token)) - .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) - .take(self.num_calls as usize); - - if self.node_replacements.is_empty() { - make_attr_token_stream(tokens, self.break_last_token) - } else { - let mut tokens: Vec<_> = tokens.collect(); - let mut node_replacements = self.node_replacements.to_vec(); - node_replacements.sort_by_key(|(range, _)| range.0.start); + match self { + LazyAttrTokenStreamInner::Direct(stream) => stream.clone(), + LazyAttrTokenStreamInner::Pending { + start_token, + cursor_snapshot, + num_calls, + break_last_token, + node_replacements, + } => { + // The token produced by the final call to `{,inlined_}next` was not + // actually consumed by the callback. The combination of chaining the + // initial token and using `take` produces the desired result - we + // produce an empty `TokenStream` if no calls were made, and omit the + // final token otherwise. + let mut cursor_snapshot = cursor_snapshot.clone(); + let tokens = iter::once(FlatToken::Token(*start_token)) + .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) + .take(*num_calls as usize); + + if node_replacements.is_empty() { + make_attr_token_stream(tokens, *break_last_token) + } else { + let mut tokens: Vec<_> = tokens.collect(); + let mut node_replacements = node_replacements.to_vec(); + node_replacements.sort_by_key(|(range, _)| range.0.start); - #[cfg(debug_assertions)] - for [(node_range, tokens), (next_node_range, next_tokens)] in - node_replacements.array_windows() - { - assert!( - node_range.0.end <= next_node_range.0.start - || node_range.0.end >= next_node_range.0.end, - "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", - node_range, - tokens, - next_node_range, - next_tokens, - ); - } + #[cfg(debug_assertions)] + for [(node_range, tokens), (next_node_range, next_tokens)] in + node_replacements.array_windows() + { + assert!( + node_range.0.end <= next_node_range.0.start + || node_range.0.end >= next_node_range.0.end, + "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", + node_range, + tokens, + next_node_range, + next_tokens, + ); + } - // Process the replace ranges, starting from the highest start - // position and working our way back. If have tokens like: - // - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // Then we will generate replace ranges for both - // the `#[cfg(FALSE)] field: bool` and the entire - // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` - // - // By starting processing from the replace range with the greatest - // start position, we ensure that any (outer) replace range which - // encloses another (inner) replace range will fully overwrite the - // inner range's replacement. - for (node_range, target) in node_replacements.into_iter().rev() { - assert!( - !node_range.0.is_empty(), - "Cannot replace an empty node range: {:?}", - node_range.0 - ); - - // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus - // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the - // total length of `tokens` constant throughout the replacement process, allowing - // us to do all replacements without adjusting indices. - let target_len = target.is_some() as usize; - tokens.splice( - (node_range.0.start as usize)..(node_range.0.end as usize), - target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( - iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len), - ), - ); + // Process the replace ranges, starting from the highest start + // position and working our way back. If have tokens like: + // + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` + // + // Then we will generate replace ranges for both + // the `#[cfg(FALSE)] field: bool` and the entire + // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }` + // + // By starting processing from the replace range with the greatest + // start position, we ensure that any (outer) replace range which + // encloses another (inner) replace range will fully overwrite the + // inner range's replacement. + for (node_range, target) in node_replacements.into_iter().rev() { + assert!( + !node_range.0.is_empty(), + "Cannot replace an empty node range: {:?}", + node_range.0 + ); + + // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, + // plus enough `FlatToken::Empty`s to fill up the rest of the range. This + // keeps the total length of `tokens` constant throughout the replacement + // process, allowing us to do all replacements without adjusting indices. + let target_len = target.is_some() as usize; + tokens.splice( + (node_range.0.start as usize)..(node_range.0.end as usize), + target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain( + iter::repeat(FlatToken::Empty) + .take(node_range.0.len() - target_len), + ), + ); + } + make_attr_token_stream(tokens.into_iter(), *break_last_token) + } } - make_attr_token_stream(tokens.into_iter(), self.break_last_token) } } } @@ -1011,6 +1034,7 @@ mod size_asserts { static_assert_size!(AttrTokenStream, 8); static_assert_size!(AttrTokenTree, 32); static_assert_size!(LazyAttrTokenStream, 8); + static_assert_size!(LazyAttrTokenStreamInner, 96); static_assert_size!(Option, 8); // must be small, used in many AST nodes static_assert_size!(TokenStream, 8); static_assert_size!(TokenTree, 32); diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs index b9692c01e2c..874fccf7ff6 100644 --- a/compiler/rustc_attr_parsing/src/lib.rs +++ b/compiler/rustc_attr_parsing/src/lib.rs @@ -80,6 +80,7 @@ #![cfg_attr(bootstrap, feature(let_chains))] #![doc(rust_logo)] #![feature(rustdoc_internals)] +#![recursion_limit = "256"] // tidy-alphabetical-end #[macro_use] diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs index 70e817db2a6..c2f5bf0f457 100644 --- a/compiler/rustc_builtin_macros/src/lib.rs +++ b/compiler/rustc_builtin_macros/src/lib.rs @@ -18,6 +18,7 @@ #![feature(rustdoc_internals)] #![feature(string_from_utf8_lossy_owned)] #![feature(try_blocks)] +#![recursion_limit = "256"] // tidy-alphabetical-end extern crate proc_macro; diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index c927aae2c4c..d799bb25c01 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -14,6 +14,7 @@ #![feature(string_from_utf8_lossy_owned)] #![feature(trait_alias)] #![feature(try_blocks)] +#![recursion_limit = "256"] // tidy-alphabetical-end //! This crate contains codegen code that is used by all codegen backends (LLVM and others). diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index d2e45d717d9..2df3281568b 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -162,7 +162,7 @@ pub(crate) fn attr_into_trace(mut attr: Attribute, trace_name: Symbol) -> Attrib let NormalAttr { item, tokens } = &mut **normal; item.path.segments[0].ident.name = trace_name; // This makes the trace attributes unobservable to token-based proc macros. - *tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default())); + *tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::default())); } AttrKind::DocComment(..) => unreachable!(), } @@ -192,7 +192,7 @@ impl<'a> StripUnconfigured<'a> { if self.config_tokens { if let Some(Some(tokens)) = node.tokens_mut() { let attr_stream = tokens.to_attr_token_stream(); - *tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream)); + *tokens = LazyAttrTokenStream::new_direct(self.configure_tokens(&attr_stream)); } } } @@ -223,7 +223,7 @@ impl<'a> StripUnconfigured<'a> { target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr)); if self.in_cfg(&target.attrs) { - target.tokens = LazyAttrTokenStream::new( + target.tokens = LazyAttrTokenStream::new_direct( self.configure_tokens(&target.tokens.to_attr_token_stream()), ); Some(AttrTokenTree::AttrsTarget(target)) @@ -361,7 +361,7 @@ impl<'a> StripUnconfigured<'a> { .to_attr_token_stream(), )); - let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees))); + let tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::new(trees))); let attr = ast::attr::mk_attr_from_item( &self.sess.psess.attr_id_generator, item, diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs index 32064f96dd6..5533920aee4 100644 --- a/compiler/rustc_hir/src/lib.rs +++ b/compiler/rustc_hir/src/lib.rs @@ -14,6 +14,7 @@ #![feature(never_type)] #![feature(rustc_attrs)] #![feature(variant_count)] +#![recursion_limit = "256"] // tidy-alphabetical-end extern crate self as rustc_hir; diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index df025aeebf0..711036865bb 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -61,6 +61,7 @@ #![feature(try_trait_v2_yeet)] #![feature(type_alias_impl_trait)] #![feature(yeet_expr)] +#![recursion_limit = "256"] // tidy-alphabetical-end #[cfg(test)] diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 37204702bcb..d06922f1e04 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -11,6 +11,7 @@ #![feature(if_let_guard)] #![feature(iter_intersperse)] #![feature(string_from_utf8_lossy_owned)] +#![recursion_limit = "256"] // tidy-alphabetical-end use std::path::{Path, PathBuf}; diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 912045d8835..835226a64af 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -3,8 +3,7 @@ use std::mem; use rustc_ast::token::Token; use rustc_ast::tokenstream::{ - AttrsTarget, LazyAttrTokenStream, LazyAttrTokenStreamImpl, NodeRange, ParserRange, Spacing, - TokenCursor, + AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor, }; use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens}; use rustc_data_structures::fx::FxHashSet; @@ -337,13 +336,13 @@ impl<'a> Parser<'a> { // - `attrs`: includes the outer and the inner attr. // - `tokens`: lazy tokens for `g` (with its inner attr deleted). - let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { - start_token: collect_pos.start_token, - cursor_snapshot: collect_pos.cursor_snapshot, + let tokens = LazyAttrTokenStream::new_pending( + collect_pos.start_token, + collect_pos.cursor_snapshot, num_calls, - break_last_token: self.break_last_token, + self.break_last_token, node_replacements, - }); + ); let mut tokens_used = false; // If in "definite capture mode" we need to register a replace range @@ -405,14 +404,3 @@ fn needs_tokens(attrs: &[ast::Attribute]) -> bool { } }) } - -// Some types are used a lot. Make sure they don't unintentionally get bigger. -#[cfg(target_pointer_width = "64")] -mod size_asserts { - use rustc_data_structures::static_assert_size; - - use super::*; - // tidy-alphabetical-start - static_assert_size!(LazyAttrTokenStreamImpl, 96); - // tidy-alphabetical-end -} diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 4a252a7b528..d2da3ac7d86 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -19,6 +19,7 @@ #![feature(iter_intersperse)] #![feature(rustc_attrs)] #![feature(rustdoc_internals)] +#![recursion_limit = "256"] // tidy-alphabetical-end use std::cell::{Cell, RefCell}; -- cgit 1.4.1-3-g733a5 From 880e6f716d741b4ef827d48e66c45c7887f82aa2 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 29 Apr 2025 12:05:38 +1000 Subject: Use `ThinVec` to shrink `LazyAttrTokenStreamInner`. --- compiler/rustc_ast/src/tokenstream.rs | 7 ++++--- compiler/rustc_parse/src/parser/attr_wrapper.rs | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) (limited to 'compiler/rustc_parse/src/parser/attr_wrapper.rs') diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 4f352e20da2..636c26bcde0 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -23,6 +23,7 @@ use rustc_data_structures::sync; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_serialize::{Decodable, Encodable}; use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym}; +use thin_vec::ThinVec; use crate::ast::AttrStyle; use crate::ast_traits::{HasAttrs, HasTokens}; @@ -122,7 +123,7 @@ impl LazyAttrTokenStream { cursor_snapshot: TokenCursor, num_calls: u32, break_last_token: u32, - node_replacements: Box<[NodeReplacement]>, + node_replacements: ThinVec, ) -> LazyAttrTokenStream { LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending { start_token, @@ -233,7 +234,7 @@ enum LazyAttrTokenStreamInner { cursor_snapshot: TokenCursor, num_calls: u32, break_last_token: u32, - node_replacements: Box<[NodeReplacement]>, + node_replacements: ThinVec, }, } @@ -1034,7 +1035,7 @@ mod size_asserts { static_assert_size!(AttrTokenStream, 8); static_assert_size!(AttrTokenTree, 32); static_assert_size!(LazyAttrTokenStream, 8); - static_assert_size!(LazyAttrTokenStreamInner, 96); + static_assert_size!(LazyAttrTokenStreamInner, 88); static_assert_size!(Option, 8); // must be small, used in many AST nodes static_assert_size!(TokenStream, 8); static_assert_size!(TokenTree, 32); diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 835226a64af..44fdf146f9c 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -10,6 +10,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_errors::PResult; use rustc_session::parse::ParseSess; use rustc_span::{DUMMY_SP, sym}; +use thin_vec::ThinVec; use super::{Capturing, ForceCollect, Parser, Trailing}; @@ -294,10 +295,10 @@ impl<'a> Parser<'a> { // This is hot enough for `deep-vector` that checking the conditions for an empty iterator // is measurably faster than actually executing the iterator. - let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end + let node_replacements = if parser_replacements_start == parser_replacements_end && inner_attr_parser_replacements.is_empty() { - Box::new([]) + ThinVec::new() } else { // Grab any replace ranges that occur *inside* the current AST node. Convert them // from `ParserRange` form to `NodeRange` form. We will perform the actual -- cgit 1.4.1-3-g733a5