diff options
| author | Nika Layzell <nika@thelayzells.com> | 2022-05-15 13:46:33 -0400 |
|---|---|---|
| committer | Nika Layzell <nika@thelayzells.com> | 2022-06-17 00:42:26 -0400 |
| commit | 4d45af9e734ed0e2350290b4705d7931f70349d4 (patch) | |
| tree | 982e7588463ee54a8acb9a10fced1e3db70fdd37 | |
| parent | 0a049fd30d564d1cbc2d60398de848612a6c8125 (diff) | |
| download | rust-4d45af9e734ed0e2350290b4705d7931f70349d4.tar.gz rust-4d45af9e734ed0e2350290b4705d7931f70349d4.zip | |
Try to reduce codegen complexity of TokenStream's FromIterator and Extend impls
This is an experimental patch to try to reduce the codegen complexity of TokenStream's FromIterator and Extend implementations for downstream crates, by moving the core logic into a helper type. This might help improve build performance of crates which depend on proc_macro as iterators are used less, and the compiler may take less time to do things like attempt specializations or other iterator optimizations. The change intentionally sacrifices some optimization opportunities, such as using the specializations for collecting iterators derived from Vec::into_iter() into Vec. This is one of the simpler potential approaches to reducing the amount of code generated in crates depending on proc_macro, so it seems worth trying before other more-involved changes.
| -rw-r--r-- | compiler/rustc_expand/src/proc_macro_server.rs | 4 | ||||
| -rw-r--r-- | library/proc_macro/src/lib.rs | 108 |
2 files changed, 94 insertions, 18 deletions
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 8b6d5bcd935..cc66eefac3e 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -502,8 +502,8 @@ impl server::TokenStream for Rustc<'_, '_> { &mut self, stream: Self::TokenStream, ) -> Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> { - // XXX: This is a raw port of the previous approach, and can probably be - // optimized. + // FIXME: This is a raw port of the previous approach, and can probably + // be optimized. let mut cursor = stream.into_trees(); let mut stack = Vec::new(); let mut tts = Vec::new(); diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index c21f365391c..6e645216c8d 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -233,14 +233,90 @@ impl From<TokenTree> for TokenStream { } } +/// Non-generic helper for implementing `FromIterator<TokenTree>` and +/// `Extend<TokenTree>` with less monomorphization in calling crates. +struct ExtendStreamWithTreesHelper { + trees: Vec< + bridge::TokenTree< + bridge::client::Group, + bridge::client::Punct, + bridge::client::Ident, + bridge::client::Literal, + >, + >, +} + +impl ExtendStreamWithTreesHelper { + fn new(capacity: usize) -> Self { + ExtendStreamWithTreesHelper { trees: Vec::with_capacity(capacity) } + } + + fn push(&mut self, tree: TokenTree) { + self.trees.push(tree_to_bridge_tree(tree)); + } + + fn build(self) -> TokenStream { + if self.trees.is_empty() { + TokenStream(None) + } else { + TokenStream(Some(bridge::client::TokenStream::concat_trees(None, self.trees))) + } + } + + fn extend(self, stream: &mut TokenStream) { + if self.trees.is_empty() { + return; + } + stream.0 = Some(bridge::client::TokenStream::concat_trees(stream.0.take(), self.trees)) + } +} + +/// Non-generic helper for implementing `FromIterator<TokenStream>` and +/// `Extend<TokenStream>` with less monomorphization in calling crates. +struct ExtendStreamWithStreamsHelper { + streams: Vec<bridge::client::TokenStream>, +} + +impl ExtendStreamWithStreamsHelper { + fn new(capacity: usize) -> Self { + ExtendStreamWithStreamsHelper { streams: Vec::with_capacity(capacity) } + } + + fn push(&mut self, stream: TokenStream) { + if let Some(stream) = stream.0 { + self.streams.push(stream); + } + } + + fn build(mut self) -> TokenStream { + if self.streams.len() <= 1 { + TokenStream(self.streams.pop()) + } else { + TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams))) + } + } + + fn extend(mut self, stream: &mut TokenStream) { + if self.streams.is_empty() { + return; + } + let base = stream.0.take(); + if base.is_none() && self.streams.len() == 1 { + stream.0 = self.streams.pop(); + } else { + stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams)); + } + } +} + /// Collects a number of token trees into a single stream. #[stable(feature = "proc_macro_lib2", since = "1.29.0")] impl iter::FromIterator<TokenTree> for TokenStream { fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { - TokenStream(Some(bridge::client::TokenStream::concat_trees( - None, - trees.into_iter().map(tree_to_bridge_tree).collect(), - ))) + let iter = trees.into_iter(); + let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + iter.for_each(|tree| builder.push(tree)); + builder.build() } } @@ -249,30 +325,30 @@ impl iter::FromIterator<TokenTree> for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl iter::FromIterator<TokenStream> for TokenStream { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { - TokenStream(Some(bridge::client::TokenStream::concat_streams( - None, - streams.into_iter().filter_map(|stream| stream.0).collect(), - ))) + let iter = streams.into_iter(); + let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + iter.for_each(|stream| builder.push(stream)); + builder.build() } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend<TokenTree> for TokenStream { fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) { - *self = TokenStream(Some(bridge::client::TokenStream::concat_trees( - self.0.take(), - trees.into_iter().map(|tree| tree_to_bridge_tree(tree)).collect(), - ))); + let iter = trees.into_iter(); + let mut builder = ExtendStreamWithTreesHelper::new(iter.size_hint().0); + iter.for_each(|tree| builder.push(tree)); + builder.extend(self); } } #[stable(feature = "token_stream_extend", since = "1.30.0")] impl Extend<TokenStream> for TokenStream { fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { - *self = TokenStream(Some(bridge::client::TokenStream::concat_streams( - self.0.take(), - streams.into_iter().filter_map(|stream| stream.0).collect(), - ))); + let iter = streams.into_iter(); + let mut builder = ExtendStreamWithStreamsHelper::new(iter.size_hint().0); + iter.for_each(|stream| builder.push(stream)); + builder.extend(self); } } |
