diff options
| author | Nicholas Nethercote <n.nethercote@gmail.com> | 2024-06-27 10:42:46 +1000 |
|---|---|---|
| committer | Nicholas Nethercote <n.nethercote@gmail.com> | 2024-07-02 10:46:44 +1000 |
| commit | f852568fa601171f20f924a50478c33fd2661fba (patch) | |
| tree | 3bb7e4906cea418ac2b22ca627199e1e2bd4289c /compiler/rustc_ast/src | |
| parent | d6c0b8117e4ccccd83c4a6e70eee8b12c51d1a18 (diff) | |
| download | rust-f852568fa601171f20f924a50478c33fd2661fba.tar.gz rust-f852568fa601171f20f924a50478c33fd2661fba.zip | |
Change `AttrTokenStream::to_tokenstream` to `to_token_trees`.
I.e. change the return type from `TokenStream` to `Vec<TokenTree>`. Most of the callsites require a `TokenStream`, but the recursive call used to create `target_tokens` requires a `Vec<TokenTree>`. It's easy to convert a `Vec<TokenTree>` to a `TokenStream` (just call `TokenStream::new`) but it's harder to convert a `TokenStream` to a `Vec<TokenTree>` (either iterate/clone/collect, or use `Lrc::into_inner` if appropriate). So this commit changes the return value to simplify that `target_tokens` call site.
Diffstat (limited to 'compiler/rustc_ast/src')
| -rw-r--r-- | compiler/rustc_ast/src/attr/mod.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_ast/src/tokenstream.rs | 23 |
2 files changed, 15 insertions, 22 deletions
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 593c78df3cd..65f1b5dbaf5 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -204,12 +204,14 @@ impl Attribute { pub fn tokens(&self) -> TokenStream { match &self.kind { - AttrKind::Normal(normal) => normal - .tokens - .as_ref() - .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) - .to_attr_token_stream() - .to_tokenstream(), + AttrKind::Normal(normal) => TokenStream::new( + normal + .tokens + .as_ref() + .unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}")) + .to_attr_token_stream() + .to_token_trees(), + ), &AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone( token::DocComment(comment_kind, self.style, data), self.span, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index b4ddbe20689..5b2d673316a 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -180,14 +180,13 @@ impl AttrTokenStream { AttrTokenStream(Lrc::new(tokens)) } - /// Converts this `AttrTokenStream` to a plain `TokenStream`. + /// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`. /// During conversion, `AttrTokenTree::Attributes` get 'flattened' /// back to a `TokenStream` of the form `outer_attr attr_target`. /// If there are inner attributes, they are inserted into the proper /// place in the attribute target tokens. - pub fn to_tokenstream(&self) -> TokenStream { - let trees: Vec<_> = self - .0 + pub fn to_token_trees(&self) -> Vec<TokenTree> { + self.0 .iter() .flat_map(|tree| match &tree { AttrTokenTree::Token(inner, spacing) => { @@ -198,7 +197,7 @@ impl AttrTokenStream { *span, *spacing, *delim, - stream.to_tokenstream() + TokenStream::new(stream.to_token_trees()) ),] .into_iter() } @@ -208,14 +207,7 @@ impl AttrTokenStream { .partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer)); let (outer_attrs, inner_attrs) = data.attrs.split_at(idx); - let mut target_tokens: Vec<_> = data - .tokens - .to_attr_token_stream() - .to_tokenstream() - .0 - .iter() - .cloned() - .collect(); + let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees(); if !inner_attrs.is_empty() { let mut found = false; // Check the last two trees (to account for a trailing semi) @@ -260,8 +252,7 @@ impl AttrTokenStream { flat.into_iter() } }) - .collect(); - TokenStream::new(trees) + .collect() } } @@ -461,7 +452,7 @@ impl TokenStream { AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() }; AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)]) }; - attr_stream.to_tokenstream() + TokenStream::new(attr_stream.to_token_trees()) } pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { |
