diff options
| author | Nicholas Nethercote <n.nethercote@gmail.com> | 2025-08-25 07:29:54 +1000 |
|---|---|---|
| committer | Nicholas Nethercote <n.nethercote@gmail.com> | 2025-08-25 08:02:52 +1000 |
| commit | a06c3887bd1ce85aeef491eef42b3a3d774413bc (patch) | |
| tree | afb5818d4318cd9f1e3c450e4f8ba5765cac3d1c /compiler/rustc_parse/src | |
| parent | 3776358beb5747d938bdefaf47a1c76723e6a372 (diff) | |
| download | rust-a06c3887bd1ce85aeef491eef42b3a3d774413bc.tar.gz rust-a06c3887bd1ce85aeef491eef42b3a3d774413bc.zip | |
Remove the lifetime from `ExpTokenPair`/`SeqSep`.
`TokenKind` now impls `Copy`, so we can store it directly rather than a reference.
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/parser/diagnostics.rs | 18 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 8 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/item.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 62 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/token_type.rs | 6 |
5 files changed, 49 insertions, 49 deletions
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 220e4ac18fc..a28af7833c3 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -463,8 +463,8 @@ impl<'a> Parser<'a> { pub(super) fn expected_one_of_not_found( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, ErrorGuaranteed> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { @@ -1092,7 +1092,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `closes` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) { + pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair]) { if let Err(err) = self .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree())) { @@ -1113,7 +1113,7 @@ impl<'a> Parser<'a> { pub(super) fn check_trailing_angle_brackets( &mut self, segment: &PathSegment, - end: &[ExpTokenPair<'_>], + end: &[ExpTokenPair], ) -> Option<ErrorGuaranteed> { if !self.may_recover() { return None; @@ -1196,7 +1196,7 @@ impl<'a> Parser<'a> { // second case. if self.look_ahead(position, |t| { trace!("check_trailing_angle_brackets: t={:?}", t); - end.iter().any(|exp| exp.tok == &t.kind) + end.iter().any(|exp| exp.tok == t.kind) }) { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. @@ -2120,8 +2120,8 @@ impl<'a> Parser<'a> { pub(super) fn recover_seq_parse_error( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, lo: Span, err: Diag<'a>, ) -> Box<Expr> { @@ -2386,8 +2386,8 @@ impl<'a> Parser<'a> { pub(super) fn consume_block( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, consume_close: ConsumeClosingDelim, ) { let mut brace_depth = 0; diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 1499808966c..f0f58e901a9 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -1598,7 +1598,7 @@ impl<'a> Parser<'a> { self.maybe_recover_from_bad_qpath(expr) } - fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, Box<Expr>> { + fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair) -> PResult<'a, Box<Expr>> { let lo = self.token.span; self.bump(); // `[` or other open delim @@ -3661,7 +3661,7 @@ impl<'a> Parser<'a> { &mut self, pth: ast::Path, recover: bool, - close: ExpTokenPair<'_>, + close: ExpTokenPair, ) -> PResult< 'a, ( @@ -3680,8 +3680,8 @@ impl<'a> Parser<'a> { errors::HelpUseLatestEdition::new().add_to_diag(e); }; - while self.token != *close.tok { - if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) { + while self.token != close.tok { + if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(&close.tok) { let exp_span = self.prev_token.span; // We permit `.. }` on the left-hand side of a destructuring assignment. if self.check(close) { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index fd9fb65417c..eb264f59fed 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -54,7 +54,7 @@ impl<'a> Parser<'a> { /// - `}` for mod items pub fn parse_mod( &mut self, - term: ExpTokenPair<'_>, + term: ExpTokenPair, ) -> PResult<'a, (AttrVec, ThinVec<Box<Item>>, ModSpans)> { let lo = self.token.span; let attrs = self.parse_inner_attributes()?; @@ -1201,7 +1201,7 @@ impl<'a> Parser<'a> { }?; let dash = exp!(Minus); - if self.token != *dash.tok { + if self.token != dash.tok { return Ok(ident); } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index d19114df812..15598f32429 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -261,19 +261,19 @@ struct CaptureState { /// A sequence separator. #[derive(Debug)] -struct SeqSep<'a> { +struct SeqSep { /// The separator token. - sep: Option<ExpTokenPair<'a>>, + sep: Option<ExpTokenPair>, /// `true` if a trailing separator is allowed. trailing_sep_allowed: bool, } -impl<'a> SeqSep<'a> { - fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> { +impl SeqSep { + fn trailing_allowed(sep: ExpTokenPair) -> SeqSep { SeqSep { sep: Some(sep), trailing_sep_allowed: true } } - fn none() -> SeqSep<'a> { + fn none() -> SeqSep { SeqSep { sep: None, trailing_sep_allowed: false } } } @@ -425,13 +425,13 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> { + pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> { if self.expected_token_types.is_empty() { - if self.token == *exp.tok { + if self.token == exp.tok { self.bump(); Ok(Recovered::No) } else { - self.unexpected_try_recover(exp.tok) + self.unexpected_try_recover(&exp.tok) } } else { self.expect_one_of(slice::from_ref(&exp), &[]) @@ -443,13 +443,13 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. fn expect_one_of( &mut self, - edible: &[ExpTokenPair<'_>], - inedible: &[ExpTokenPair<'_>], + edible: &[ExpTokenPair], + inedible: &[ExpTokenPair], ) -> PResult<'a, Recovered> { - if edible.iter().any(|exp| exp.tok == &self.token.kind) { + if edible.iter().any(|exp| exp.tok == self.token.kind) { self.bump(); Ok(Recovered::No) - } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) { + } else if inedible.iter().any(|exp| exp.tok == self.token.kind) { // leave it in the input Ok(Recovered::No) } else if self.token != token::Eof @@ -494,8 +494,8 @@ impl<'a> Parser<'a> { /// This method will automatically add `tok` to `expected_token_types` if `tok` is not /// encountered. #[inline] - pub fn check(&mut self, exp: ExpTokenPair<'_>) -> bool { - let is_present = self.token == *exp.tok; + pub fn check(&mut self, exp: ExpTokenPair) -> bool { + let is_present = self.token == exp.tok; if !is_present { self.expected_token_types.insert(exp.token_type); } @@ -542,7 +542,7 @@ impl<'a> Parser<'a> { /// Consumes a token 'tok' if it exists. Returns whether the given token was present. #[inline] #[must_use] - pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool { + pub fn eat(&mut self, exp: ExpTokenPair) -> bool { let is_present = self.check(exp); if is_present { self.bump() @@ -745,13 +745,13 @@ impl<'a> Parser<'a> { /// Eats the expected token if it's present possibly breaking /// compound tokens like multi-character operators in process. /// Returns `true` if the token was eaten. - fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool { - if self.token == *exp.tok { + fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool { + if self.token == exp.tok { self.bump(); return true; } match self.token.kind.break_two_token_op(1) { - Some((first, second)) if first == *exp.tok => { + Some((first, second)) if first == exp.tok => { let first_span = self.psess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); self.token = Token::new(first, first_span); @@ -826,7 +826,7 @@ impl<'a> Parser<'a> { /// Checks if the next token is contained within `closes`, and returns `true` if so. fn expect_any_with_type( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], ) -> bool { closes_expected.iter().any(|&close| self.check(close)) @@ -838,9 +838,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_tokens<T>( &mut self, - closes_expected: &[ExpTokenPair<'_>], + closes_expected: &[ExpTokenPair], closes_not_expected: &[&TokenKind], - sep: SeqSep<'_>, + sep: SeqSep, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { let mut first = true; @@ -869,7 +869,7 @@ impl<'a> Parser<'a> { } Err(mut expect_err) => { let sp = self.prev_token.span.shrink_to_hi(); - let token_str = pprust::token_kind_to_string(exp.tok); + let token_str = pprust::token_kind_to_string(&exp.tok); match self.current_closure.take() { Some(closure_spans) if self.token == TokenKind::Semi => { @@ -1039,8 +1039,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_before_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[close], &[], sep, f) @@ -1051,8 +1051,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_seq_to_end<T>( &mut self, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?; @@ -1070,9 +1070,9 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, - sep: SeqSep<'_>, + open: ExpTokenPair, + close: ExpTokenPair, + sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.expect(open)?; @@ -1084,8 +1084,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_delim_comma_seq<T>( &mut self, - open: ExpTokenPair<'_>, - close: ExpTokenPair<'_>, + open: ExpTokenPair, + close: ExpTokenPair, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec<T>, Trailing)> { self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f) diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs index b91548196a3..bd4bb368df0 100644 --- a/compiler/rustc_parse/src/parser/token_type.rs +++ b/compiler/rustc_parse/src/parser/token_type.rs @@ -416,8 +416,8 @@ impl TokenType { /// is always by used those methods. The second field is only used when the /// first field doesn't match. #[derive(Clone, Copy, Debug)] -pub struct ExpTokenPair<'a> { - pub tok: &'a TokenKind, +pub struct ExpTokenPair { + pub tok: TokenKind, pub token_type: TokenType, } @@ -444,7 +444,7 @@ macro_rules! exp { // `ExpTokenPair` helper rules. (@tok, $tok:ident) => { $crate::parser::token_type::ExpTokenPair { - tok: &rustc_ast::token::$tok, + tok: rustc_ast::token::$tok, token_type: $crate::parser::token_type::TokenType::$tok } }; |
