diff options
| author | Nicholas Nethercote <n.nethercote@gmail.com> | 2022-04-11 10:55:49 +1000 |
|---|---|---|
| committer | Nicholas Nethercote <n.nethercote@gmail.com> | 2022-04-14 09:01:23 +1000 |
| commit | 75fd391aaa3e21c7a1e9403f20eacf23c32aa7c7 (patch) | |
| tree | 7a73e84c3cba078c905047943b491b731638570e | |
| parent | 2657d8f7b3bbdebb7c6428b8d08279504fbfbc3f (diff) | |
| download | rust-75fd391aaa3e21c7a1e9403f20eacf23c32aa7c7.tar.gz rust-75fd391aaa3e21c7a1e9403f20eacf23c32aa7c7.zip | |
Introduce `TtHandle` and use it in `TokenSet`.
This removes the last use of `<mbe::TokenTree as Clone>`. It also removes two trivial methods on `Delimited`.
| -rw-r--r-- | compiler/rustc_expand/src/mbe.rs | 12 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe/macro_parser.rs | 7 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe/macro_rules.rs | 145 |
3 files changed, 111 insertions, 53 deletions
diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs index f78f8460dcf..845d1ff5b51 100644 --- a/compiler/rustc_expand/src/mbe.rs +++ b/compiler/rustc_expand/src/mbe.rs @@ -26,18 +26,6 @@ struct Delimited { tts: Vec<TokenTree>, } -impl Delimited { - /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. - fn open_tt(&self, span: DelimSpan) -> TokenTree { - TokenTree::token(token::OpenDelim(self.delim), span.open) - } - - /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. - fn close_tt(&self, span: DelimSpan) -> TokenTree { - TokenTree::token(token::CloseDelim(self.delim), span.close) - } -} - #[derive(PartialEq, Encodable, Decodable, Debug)] struct SequenceRepetition { /// The sequence of token trees diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index b5f56d7d6dc..74b8450f756 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -142,10 +142,13 @@ pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<Match locs.push(MatcherLoc::Token { token: token.clone() }); } TokenTree::Delimited(span, delimited) => { + let open_token = Token::new(token::OpenDelim(delimited.delim), span.open); + let close_token = Token::new(token::CloseDelim(delimited.delim), span.close); + locs.push(MatcherLoc::Delimited); - inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth); + locs.push(MatcherLoc::Token { token: open_token }); inner(sess, &delimited.tts, locs, next_metavar, seq_depth); - inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth); + locs.push(MatcherLoc::Token { token: close_token }); } TokenTree::Sequence(_, seq) => { // We can't determine `idx_first_after` and construct the final diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index c9a06728856..f9d24eeadd0 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -8,7 +8,7 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; use crate::mbe::transcribe::transcribe; use rustc_ast as ast; -use rustc_ast::token::{self, NonterminalKind, Token, TokenKind::*}; +use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; @@ -658,18 +658,18 @@ fn check_matcher( // that do not try to inject artificial span information. My plan is // to try to catch such cases ahead of time and not include them in // the precomputed mapping.) -struct FirstSets { +struct FirstSets<'tt> { // this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its // span in the original matcher to the First set for the inner sequence `tt ...`. // // If two sequences have the same span in a matcher, then map that // span to None (invalidating the mapping here and forcing the code to // use a slow path). - first: FxHashMap<Span, Option<TokenSet>>, + first: FxHashMap<Span, Option<TokenSet<'tt>>>, } -impl FirstSets { - fn new(tts: &[mbe::TokenTree]) -> FirstSets { +impl<'tt> FirstSets<'tt> { + fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> { use mbe::TokenTree; let mut sets = FirstSets { first: FxHashMap::default() }; @@ -679,7 +679,7 @@ impl FirstSets { // walks backward over `tts`, returning the FIRST for `tts` // and updating `sets` at the same time for all sequence // substructure we find within `tts`. - fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet { + fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { @@ -687,11 +687,14 @@ impl FirstSets { | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) | TokenTree::MetaVarExpr(..) => { - first.replace_with(tt.clone()); + first.replace_with(TtHandle::TtRef(tt)); } TokenTree::Delimited(span, ref delimited) => { build_recur(sets, &delimited.tts); - first.replace_with(delimited.open_tt(span)); + first.replace_with(TtHandle::from_token_kind( + token::OpenDelim(delimited.delim), + span.open, + )); } TokenTree::Sequence(sp, ref seq_rep) => { let subfirst = build_recur(sets, &seq_rep.tts); @@ -715,7 +718,7 @@ impl FirstSets { // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(sep.clone())); } // Reverse scan: Sequence comes before `first`. @@ -741,7 +744,7 @@ impl FirstSets { // walks forward over `tts` until all potential FIRST tokens are // identified. - fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet { + fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> { use mbe::TokenTree; let mut first = TokenSet::empty(); @@ -752,11 +755,14 @@ impl FirstSets { | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) | TokenTree::MetaVarExpr(..) => { - first.add_one(tt.clone()); + first.add_one(TtHandle::TtRef(tt)); return first; } TokenTree::Delimited(span, ref delimited) => { - first.add_one(delimited.open_tt(span)); + first.add_one(TtHandle::from_token_kind( + token::OpenDelim(delimited.delim), + span.open, + )); return first; } TokenTree::Sequence(sp, ref seq_rep) => { @@ -775,7 +781,7 @@ impl FirstSets { // If the sequence contents can be empty, then the first // token could be the separator token itself. if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sep.clone())); + first.add_one_maybe(TtHandle::from_token(sep.clone())); } assert!(first.maybe_empty); @@ -803,6 +809,62 @@ impl FirstSets { } } +// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined +// implicitly, such as opening/closing delimiters and sequence repetition ops. +// This type encapsulates both kinds. It implements `Clone` while avoiding the +// need for `mbe::TokenTree` to implement `Clone`. +#[derive(Debug)] +enum TtHandle<'tt> { + /// This is used in most cases. + TtRef(&'tt mbe::TokenTree), + + /// This is only used for implicit token trees. The `mbe::TokenTree` *must* + /// be `mbe::TokenTree::Token`. No other variants are allowed. We store an + /// `mbe::TokenTree` rather than a `Token` so that `get()` can return a + /// `&mbe::TokenTree`. + Token(mbe::TokenTree), +} + +impl<'tt> TtHandle<'tt> { + fn from_token(tok: Token) -> Self { + TtHandle::Token(mbe::TokenTree::Token(tok)) + } + + fn from_token_kind(kind: TokenKind, span: Span) -> Self { + TtHandle::from_token(Token::new(kind, span)) + } + + // Get a reference to a token tree. + fn get(&'tt self) -> &'tt mbe::TokenTree { + match self { + TtHandle::TtRef(tt) => tt, + TtHandle::Token(token_tt) => &token_tt, + } + } +} + +impl<'tt> PartialEq for TtHandle<'tt> { + fn eq(&self, other: &TtHandle<'tt>) -> bool { + self.get() == other.get() + } +} + +impl<'tt> Clone for TtHandle<'tt> { + fn clone(&self) -> Self { + match self { + TtHandle::TtRef(tt) => TtHandle::TtRef(tt), + + // This variant *must* contain a `mbe::TokenTree::Token`, and not + // any other variant of `mbe::TokenTree`. + TtHandle::Token(mbe::TokenTree::Token(tok)) => { + TtHandle::Token(mbe::TokenTree::Token(tok.clone())) + } + + _ => unreachable!(), + } + } +} + // A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s // (for macro-by-example syntactic variables). It also carries the // `maybe_empty` flag; that is true if and only if the matcher can @@ -814,12 +876,12 @@ impl FirstSets { // // (Notably, we must allow for *-op to occur zero times.) #[derive(Clone, Debug)] -struct TokenSet { - tokens: Vec<mbe::TokenTree>, +struct TokenSet<'tt> { + tokens: Vec<TtHandle<'tt>>, maybe_empty: bool, } -impl TokenSet { +impl<'tt> TokenSet<'tt> { // Returns a set for the empty sequence. fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } @@ -827,15 +889,15 @@ impl TokenSet { // Returns the set `{ tok }` for the single-token (and thus // non-empty) sequence [tok]. - fn singleton(tok: mbe::TokenTree) -> Self { - TokenSet { tokens: vec![tok], maybe_empty: false } + fn singleton(tt: TtHandle<'tt>) -> Self { + TokenSet { tokens: vec![tt], maybe_empty: false } } // Changes self to be the set `{ tok }`. // Since `tok` is always present, marks self as non-empty. - fn replace_with(&mut self, tok: mbe::TokenTree) { + fn replace_with(&mut self, tt: TtHandle<'tt>) { self.tokens.clear(); - self.tokens.push(tok); + self.tokens.push(tt); self.maybe_empty = false; } @@ -848,17 +910,17 @@ impl TokenSet { } // Adds `tok` to the set for `self`, marking sequence as non-empy. - fn add_one(&mut self, tok: mbe::TokenTree) { - if !self.tokens.contains(&tok) { - self.tokens.push(tok); + fn add_one(&mut self, tt: TtHandle<'tt>) { + if !self.tokens.contains(&tt) { + self.tokens.push(tt); } self.maybe_empty = false; } // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.) - fn add_one_maybe(&mut self, tok: mbe::TokenTree) { - if !self.tokens.contains(&tok) { - self.tokens.push(tok); + fn add_one_maybe(&mut self, tt: TtHandle<'tt>) { + if !self.tokens.contains(&tt) { + self.tokens.push(tt); } } @@ -870,9 +932,9 @@ impl TokenSet { // setting of the empty flag of `self`. If `other` is guaranteed // non-empty, then `self` is marked non-empty. fn add_all(&mut self, other: &Self) { - for tok in &other.tokens { - if !self.tokens.contains(tok) { - self.tokens.push(tok.clone()); + for tt in &other.tokens { + if !self.tokens.contains(tt) { + self.tokens.push(tt.clone()); } } if !other.maybe_empty { @@ -892,14 +954,14 @@ impl TokenSet { // // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. -fn check_matcher_core( +fn check_matcher_core<'tt>( sess: &ParseSess, features: &Features, def: &ast::Item, - first_sets: &FirstSets, - matcher: &[mbe::TokenTree], - follow: &TokenSet, -) -> TokenSet { + first_sets: &FirstSets<'tt>, + matcher: &'tt [mbe::TokenTree], + follow: &TokenSet<'tt>, +) -> TokenSet<'tt> { use mbe::TokenTree; let mut last = TokenSet::empty(); @@ -938,12 +1000,15 @@ fn check_matcher_core( // followed by anything against SUFFIX. continue 'each_token; } else { - last.replace_with(token.clone()); + last.replace_with(TtHandle::TtRef(token)); suffix_first = build_suffix_first(); } } TokenTree::Delimited(span, ref d) => { - let my_suffix = TokenSet::singleton(d.close_tt(span)); + let my_suffix = TokenSet::singleton(TtHandle::from_token_kind( + token::CloseDelim(d.delim), + span.close, + )); check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); @@ -967,7 +1032,7 @@ fn check_matcher_core( let mut new; let my_suffix = if let Some(sep) = &seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::Token(sep.clone())); + new.add_one_maybe(TtHandle::from_token(sep.clone())); &new } else { &suffix_first @@ -994,9 +1059,11 @@ fn check_matcher_core( // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. - for token in &last.tokens { - if let TokenTree::MetaVarDecl(span, name, Some(kind)) = *token { + for tt in &last.tokens { + if let &TokenTree::MetaVarDecl(span, name, Some(kind)) = tt.get() { for next_token in &suffix_first.tokens { + let next_token = next_token.get(); + // Check if the old pat is used and the next token is `|` // to warn about incompatibility with Rust 2021. // We only emit this lint if we're parsing the original |
