about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-04-22 01:15:06 +0000
committerbors <bors@rust-lang.org>2025-04-22 01:15:06 +0000
commitfae7785b60ea7fe1ad293352c057a5b7be73d245 (patch)
tree025811d983fd765b094568c658512388198ad8ba
parentd6c1e454aa8af5e7e59fbf5c4e7d3128d2f99582 (diff)
parentbf8ce32558a4657d077a6761eaa293d0645c2e16 (diff)
downloadrust-fae7785b60ea7fe1ad293352c057a5b7be73d245.tar.gz
rust-fae7785b60ea7fe1ad293352c057a5b7be73d245.zip
Auto merge of #139897 - nnethercote:rm-OpenDelim-CloseDelim, r=petrochenkov
Remove `token::{Open,Close}Delim`

By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.

PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.

This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
  pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
  `Delimiter` values to `TokenKind`.

Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
-   } else if self.token != token::CloseDelim(Delimiter::Brace) {
+   } else if self.token != token::CloseBrace {
```

r? `@petrochenkov`
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs5
-rw-r--r--compiler/rustc_ast/src/token.rs131
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs19
-rw-r--r--compiler/rustc_attr_parsing/src/parser.rs4
-rw-r--r--compiler/rustc_expand/src/config.rs5
-rw-r--r--compiler/rustc_expand/src/expand.rs5
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs24
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs8
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs4
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs12
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs85
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs14
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs44
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs83
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs91
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs5
-rw-r--r--compiler/rustc_parse/src/parser/item.rs56
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs126
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs14
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs49
-rw-r--r--compiler/rustc_parse/src/parser/path.rs11
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs33
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs60
-rw-r--r--compiler/rustc_parse/src/parser/token_type.rs24
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs12
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs4
-rw-r--r--src/tools/rustfmt/src/macros.rs12
-rw-r--r--src/tools/rustfmt/src/parse/macros/cfg_if.rs6
30 files changed, 457 insertions, 499 deletions
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index f104400a572..f165c4ddcdd 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -416,10 +416,7 @@ impl MetaItem {
                 // This path is currently unreachable in the test suite.
                 unreachable!()
             }
-            Some(TokenTree::Token(
-                Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. },
-                _,
-            )) => {
+            Some(TokenTree::Token(Token { kind, .. }, _)) if kind.is_delim() => {
                 panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tt);
             }
             _ => return None,
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index 055481f5d87..54781e8235e 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -32,6 +32,18 @@ pub enum InvisibleOrigin {
     ProcMacro,
 }
 
+impl InvisibleOrigin {
+    // Should the parser skip these invisible delimiters? Ideally this function
+    // will eventually disappear and no invisible delimiters will be skipped.
+    #[inline]
+    pub fn skip(&self) -> bool {
+        match self {
+            InvisibleOrigin::MetaVar(_) => false,
+            InvisibleOrigin::ProcMacro => true,
+        }
+    }
+}
+
 impl PartialEq for InvisibleOrigin {
     #[inline]
     fn eq(&self, _other: &InvisibleOrigin) -> bool {
@@ -125,8 +137,7 @@ impl Delimiter {
     pub fn skip(&self) -> bool {
         match self {
             Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false,
-            Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false,
-            Delimiter::Invisible(InvisibleOrigin::ProcMacro) => true,
+            Delimiter::Invisible(origin) => origin.skip(),
         }
     }
 
@@ -140,6 +151,24 @@ impl Delimiter {
             _ => false,
         }
     }
+
+    pub fn as_open_token_kind(&self) -> TokenKind {
+        match *self {
+            Delimiter::Parenthesis => OpenParen,
+            Delimiter::Brace => OpenBrace,
+            Delimiter::Bracket => OpenBracket,
+            Delimiter::Invisible(origin) => OpenInvisible(origin),
+        }
+    }
+
+    pub fn as_close_token_kind(&self) -> TokenKind {
+        match *self {
+            Delimiter::Parenthesis => CloseParen,
+            Delimiter::Brace => CloseBrace,
+            Delimiter::Bracket => CloseBracket,
+            Delimiter::Invisible(origin) => CloseInvisible(origin),
+        }
+    }
 }
 
 // Note that the suffix is *not* considered when deciding the `LitKind` in this
@@ -194,9 +223,9 @@ impl Lit {
         match token.uninterpolate().kind {
             Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
             Literal(token_lit) => Some(token_lit),
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            OpenInvisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Literal | MetaVarKind::Expr { .. },
-            ))) => {
+            )) => {
                 // Unreachable with the current test suite.
                 panic!("from_token metavar");
             }
@@ -426,10 +455,22 @@ pub enum TokenKind {
     Question,
     /// Used by proc macros for representing lifetimes, not generated by lexer right now.
     SingleQuote,
-    /// An opening delimiter (e.g., `{`).
-    OpenDelim(Delimiter),
-    /// A closing delimiter (e.g., `}`).
-    CloseDelim(Delimiter),
+    /// `(`
+    OpenParen,
+    /// `)`
+    CloseParen,
+    /// `{`
+    OpenBrace,
+    /// `}`
+    CloseBrace,
+    /// `[`
+    OpenBracket,
+    /// `]`
+    CloseBracket,
+    /// Invisible opening delimiter, produced by a macro.
+    OpenInvisible(InvisibleOrigin),
+    /// Invisible closing delimiter, produced by a macro.
+    CloseInvisible(InvisibleOrigin),
 
     /* Literals */
     Literal(Lit),
@@ -530,6 +571,37 @@ impl TokenKind {
     pub fn should_end_const_arg(&self) -> bool {
         matches!(self, Gt | Ge | Shr | ShrEq)
     }
+
+    pub fn is_delim(&self) -> bool {
+        self.open_delim().is_some() || self.close_delim().is_some()
+    }
+
+    pub fn open_delim(&self) -> Option<Delimiter> {
+        match *self {
+            OpenParen => Some(Delimiter::Parenthesis),
+            OpenBrace => Some(Delimiter::Brace),
+            OpenBracket => Some(Delimiter::Bracket),
+            OpenInvisible(origin) => Some(Delimiter::Invisible(origin)),
+            _ => None,
+        }
+    }
+
+    pub fn close_delim(&self) -> Option<Delimiter> {
+        match *self {
+            CloseParen => Some(Delimiter::Parenthesis),
+            CloseBrace => Some(Delimiter::Brace),
+            CloseBracket => Some(Delimiter::Bracket),
+            CloseInvisible(origin) => Some(Delimiter::Invisible(origin)),
+            _ => None,
+        }
+    }
+
+    pub fn is_close_delim_or_eof(&self) -> bool {
+        match self {
+            CloseParen | CloseBrace | CloseBracket | CloseInvisible(_) | Eof => true,
+            _ => false,
+        }
+    }
 }
 
 impl Token {
@@ -559,7 +631,8 @@ impl Token {
             | DotDotDot | DotDotEq | Comma | Semi | Colon | PathSep | RArrow | LArrow
             | FatArrow | Pound | Dollar | Question | SingleQuote => true,
 
-            OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
+            OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket
+            | OpenInvisible(_) | CloseInvisible(_) | Literal(..) | DocComment(..) | Ident(..)
             | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Eof => false,
         }
     }
@@ -573,11 +646,12 @@ impl Token {
     /// **NB**: Take care when modifying this function, since it will change
     /// the stable set of tokens that are allowed to match an expr nonterminal.
     pub fn can_begin_expr(&self) -> bool {
-        use Delimiter::*;
         match self.uninterpolate().kind {
             Ident(name, is_raw)              =>
                 ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
-            OpenDelim(Parenthesis | Brace | Bracket) | // tuple, array or block
+            OpenParen                         | // tuple
+            OpenBrace                         | // block
+            OpenBracket                       | // array
             Literal(..)                       | // literal
             Bang                              | // operator not
             Minus                             | // unary minus
@@ -591,12 +665,12 @@ impl Token {
             PathSep                           | // global path
             Lifetime(..)                      | // labeled loop
             Pound                             => true, // expression attributes
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            OpenInvisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Block |
                 MetaVarKind::Expr { .. } |
                 MetaVarKind::Literal |
                 MetaVarKind::Path
-            ))) => true,
+            )) => true,
             _ => false,
         }
     }
@@ -608,8 +682,8 @@ impl Token {
         match &self.uninterpolate().kind {
             // box, ref, mut, and other identifiers (can stricten)
             Ident(..) | NtIdent(..) |
-            OpenDelim(Delimiter::Parenthesis) |  // tuple pattern
-            OpenDelim(Delimiter::Bracket) |      // slice pattern
+            OpenParen |                          // tuple pattern
+            OpenBracket |                        // slice pattern
             And |                                // reference
             Minus |                              // negative literal
             AndAnd |                             // double reference
@@ -620,14 +694,14 @@ impl Token {
             Lt |                                 // path (UFCS constant)
             Shl => true,                         // path (double UFCS)
             Or => matches!(pat_kind, PatWithOr), // leading vert `|` or-pattern
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            OpenInvisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Expr { .. } |
                 MetaVarKind::Literal |
                 MetaVarKind::Meta { .. } |
                 MetaVarKind::Pat(_) |
                 MetaVarKind::Path |
                 MetaVarKind::Ty { .. }
-            ))) => true,
+            )) => true,
             _ => false,
         }
     }
@@ -637,8 +711,8 @@ impl Token {
         match self.uninterpolate().kind {
             Ident(name, is_raw) =>
                 ident_can_begin_type(name, self.span, is_raw), // type name or keyword
-            OpenDelim(Delimiter::Parenthesis) | // tuple
-            OpenDelim(Delimiter::Bracket)     | // array
+            OpenParen                         | // tuple
+            OpenBracket                       | // array
             Bang                              | // never
             Star                              | // raw pointer
             And                               | // reference
@@ -647,10 +721,10 @@ impl Token {
             Lifetime(..)                      | // lifetime bound in trait object
             Lt | Shl                          | // associated path
             PathSep => true,                    // global path
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            OpenInvisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Ty { .. } |
                 MetaVarKind::Path
-            ))) => true,
+            )) => true,
             // For anonymous structs or unions, which only appear in specific positions
             // (type of struct fields or union fields), we don't consider them as regular types
             _ => false,
@@ -660,11 +734,11 @@ impl Token {
     /// Returns `true` if the token can appear at the start of a const param.
     pub fn can_begin_const_arg(&self) -> bool {
         match self.kind {
-            OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
+            OpenBrace | Literal(..) | Minus => true,
             Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            OpenInvisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal,
-            ))) => true,
+            )) => true,
             _ => false,
         }
     }
@@ -711,7 +785,7 @@ impl Token {
         match self.uninterpolate().kind {
             Literal(..) | Minus => true,
             Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind))) => match mv_kind {
+            OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) => match mv_kind {
                 MetaVarKind::Literal => true,
                 MetaVarKind::Expr { can_begin_literal_maybe_minus, .. } => {
                     can_begin_literal_maybe_minus
@@ -725,7 +799,7 @@ impl Token {
     pub fn can_begin_string_literal(&self) -> bool {
         match self.uninterpolate().kind {
             Literal(..) => true,
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind))) => match mv_kind {
+            OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) => match mv_kind {
                 MetaVarKind::Literal => true,
                 MetaVarKind::Expr { can_begin_string_literal, .. } => can_begin_string_literal,
                 _ => false,
@@ -892,7 +966,7 @@ impl Token {
     /// from an expanded metavar?
     pub fn is_metavar_seq(&self) -> Option<MetaVarKind> {
         match self.kind {
-            OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => Some(kind),
+            OpenInvisible(InvisibleOrigin::MetaVar(kind)) => Some(kind),
             _ => None,
         }
     }
@@ -970,7 +1044,8 @@ impl Token {
                 Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | PlusEq | MinusEq | StarEq | SlashEq
                 | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
                 | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
-                | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
+                | OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket
+                | OpenInvisible(_) | CloseInvisible(_) | Literal(..) | Ident(..) | NtIdent(..)
                 | Lifetime(..) | NtLifetime(..) | DocComment(..) | Eof,
                 _,
             ) => {
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index 0985ebf945b..6959cbd87f1 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -770,12 +770,12 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                 self.bclose(span, empty);
             }
             delim => {
-                let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
+                let token_str = self.token_kind_to_string(&delim.as_open_token_kind());
                 self.word(token_str);
                 self.ibox(0);
                 self.print_tts(tts, convert_dollar_crate);
                 self.end();
-                let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
+                let token_str = self.token_kind_to_string(&delim.as_close_token_kind());
                 self.word(token_str);
             }
         }
@@ -932,14 +932,13 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
             token::RArrow => "->".into(),
             token::LArrow => "<-".into(),
             token::FatArrow => "=>".into(),
-            token::OpenDelim(Delimiter::Parenthesis) => "(".into(),
-            token::CloseDelim(Delimiter::Parenthesis) => ")".into(),
-            token::OpenDelim(Delimiter::Bracket) => "[".into(),
-            token::CloseDelim(Delimiter::Bracket) => "]".into(),
-            token::OpenDelim(Delimiter::Brace) => "{".into(),
-            token::CloseDelim(Delimiter::Brace) => "}".into(),
-            token::OpenDelim(Delimiter::Invisible(_))
-            | token::CloseDelim(Delimiter::Invisible(_)) => "".into(),
+            token::OpenParen => "(".into(),
+            token::CloseParen => ")".into(),
+            token::OpenBracket => "[".into(),
+            token::CloseBracket => "]".into(),
+            token::OpenBrace => "{".into(),
+            token::CloseBrace => "}".into(),
+            token::OpenInvisible(_) | token::CloseInvisible(_) => "".into(),
             token::Pound => "#".into(),
             token::Dollar => "$".into(),
             token::Question => "?".into(),
diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs
index 384fae59873..40aa39711d3 100644
--- a/compiler/rustc_attr_parsing/src/parser.rs
+++ b/compiler/rustc_attr_parsing/src/parser.rs
@@ -430,9 +430,7 @@ impl<'a> MetaItemListParserContext<'a> {
                 let span = span.with_hi(segments.last().unwrap().span.hi());
                 Some(AttrPath { segments: segments.into_boxed_slice(), span })
             }
-            TokenTree::Token(Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. }, _) => {
-                None
-            }
+            TokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => None,
             _ => {
                 // malformed attributes can get here. We can't crash, but somewhere else should've
                 // already warned for this.
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index c70e259b2cd..d2e45d717d9 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -237,10 +237,7 @@ impl<'a> StripUnconfigured<'a> {
                     inner = self.configure_tokens(&inner);
                     Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
                 }
-                AttrTokenTree::Token(
-                    Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. },
-                    _,
-                ) => {
+                AttrTokenTree::Token(Token { kind, .. }, _) if kind.is_delim() => {
                     panic!("Should be `AttrTokenTree::Delimited`, not delim tokens: {:?}", tree);
                 }
                 AttrTokenTree::Token(token, spacing) => Some(AttrTokenTree::Token(token, spacing)),
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 1e26d668194..1f430b0018f 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -7,13 +7,12 @@ use std::{iter, mem};
 use rustc_ast as ast;
 use rustc_ast::mut_visit::*;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::visit::{self, AssocCtxt, Visitor, VisitorResult, try_visit, walk_list};
 use rustc_ast::{
     AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind, ForeignItemKind,
     HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle, MetaItemInner, MetaItemKind, ModKind,
-    NodeId, PatKind, StmtKind, TyKind,
+    NodeId, PatKind, StmtKind, TyKind, token,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
@@ -1004,7 +1003,7 @@ pub fn parse_ast_fragment<'a>(
         AstFragmentKind::Stmts => {
             let mut stmts = SmallVec::new();
             // Won't make progress on a `}`.
-            while this.token != token::Eof && this.token != token::CloseDelim(Delimiter::Brace) {
+            while this.token != token::Eof && this.token != token::CloseBrace {
                 if let Some(stmt) = this.parse_full_stmt(AttemptLocalParseRecovery::Yes)? {
                     stmts.push(stmt);
                 }
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index b663e959744..698492f42e2 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -1,6 +1,6 @@
 use std::borrow::Cow;
 
-use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::token::{self, Token};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_errors::{Applicability, Diag, DiagCtxtHandle, DiagMessage};
 use rustc_macros::Subdiagnostic;
@@ -66,8 +66,8 @@ pub(super) fn failed_to_match_macro(
     }
 
     if let MatcherLoc::Token { token: expected_token } = &remaining_matcher
-        && (matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))
-            || matches!(token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_))))
+        && (matches!(expected_token.kind, token::OpenInvisible(_))
+            || matches!(token.kind, token::OpenInvisible(_)))
     {
         err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
         err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index 0065f83eb4e..c78beb40688 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -182,8 +182,8 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
                     locs.push(MatcherLoc::Token { token: *token });
                 }
                 TokenTree::Delimited(span, _, delimited) => {
-                    let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
-                    let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
+                    let open_token = Token::new(delimited.delim.as_open_token_kind(), span.open);
+                    let close_token = Token::new(delimited.delim.as_close_token_kind(), span.close);
 
                     locs.push(MatcherLoc::Delimited);
                     locs.push(MatcherLoc::Token { token: open_token });
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index c138b090877..93604a149f1 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -6,7 +6,7 @@ use std::{mem, slice};
 use ast::token::IdentIsRaw;
 use rustc_ast::token::NtPatKind::*;
 use rustc_ast::token::TokenKind::*;
-use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
+use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
 use rustc_ast::tokenstream::{DelimSpan, TokenStream};
 use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
 use rustc_ast_pretty::pprust;
@@ -784,7 +784,7 @@ impl<'tt> FirstSets<'tt> {
                     TokenTree::Delimited(span, _, delimited) => {
                         build_recur(sets, &delimited.tts);
                         first.replace_with(TtHandle::from_token_kind(
-                            token::OpenDelim(delimited.delim),
+                            delimited.delim.as_open_token_kind(),
                             span.open,
                         ));
                     }
@@ -852,7 +852,7 @@ impl<'tt> FirstSets<'tt> {
                 }
                 TokenTree::Delimited(span, _, delimited) => {
                     first.add_one(TtHandle::from_token_kind(
-                        token::OpenDelim(delimited.delim),
+                        delimited.delim.as_open_token_kind(),
                         span.open,
                     ));
                     return first;
@@ -1099,7 +1099,7 @@ fn check_matcher_core<'tt>(
             }
             TokenTree::Delimited(span, _, d) => {
                 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
-                    token::CloseDelim(d.delim),
+                    d.delim.as_close_token_kind(),
                     span.close,
                 ));
                 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
@@ -1299,7 +1299,9 @@ enum IsInFollow {
 fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
     use mbe::TokenTree;
 
-    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
+    if let TokenTree::Token(Token { kind, .. }) = tok
+        && kind.close_delim().is_some()
+    {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
         IsInFollow::Yes
@@ -1358,16 +1360,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
                 ];
                 match tok {
                     TokenTree::Token(token) => match token.kind {
-                        OpenDelim(Delimiter::Brace)
-                        | OpenDelim(Delimiter::Bracket)
-                        | Comma
-                        | FatArrow
-                        | Colon
-                        | Eq
-                        | Gt
-                        | Shr
-                        | Semi
-                        | Or => IsInFollow::Yes,
+                        OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
+                        | Semi | Or => IsInFollow::Yes,
                         Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
                             IsInFollow::Yes
                         }
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 3f037259956..0c2362f23bc 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -181,7 +181,10 @@ fn parse_tree<'a>(
                         if delim != Delimiter::Parenthesis {
                             span_dollar_dollar_or_metavar_in_the_lhs_err(
                                 sess,
-                                &Token { kind: token::OpenDelim(delim), span: delim_span.entire() },
+                                &Token {
+                                    kind: delim.as_open_token_kind(),
+                                    span: delim_span.entire(),
+                                },
                             );
                         }
                     } else {
@@ -217,7 +220,8 @@ fn parse_tree<'a>(
                             }
                             Delimiter::Parenthesis => {}
                             _ => {
-                                let token = pprust::token_kind_to_string(&token::OpenDelim(delim));
+                                let token =
+                                    pprust::token_kind_to_string(&delim.as_open_token_kind());
                                 sess.dcx().emit_err(errors::ExpectedParenOrBrace {
                                     span: delim_span.entire(),
                                     token,
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 4edaf68c89a..f00201ad202 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -308,8 +308,8 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                     }));
                 }
 
-                OpenDelim(..) | CloseDelim(..) => unreachable!(),
-                Eof => unreachable!(),
+                OpenParen | CloseParen | OpenBrace | CloseBrace | OpenBracket | CloseBracket
+                | OpenInvisible(_) | CloseInvisible(_) | Eof => unreachable!(),
             }
         }
         trees
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 4935fc03256..a8ec9a1e952 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -371,12 +371,12 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                 rustc_lexer::TokenKind::Semi => token::Semi,
                 rustc_lexer::TokenKind::Comma => token::Comma,
                 rustc_lexer::TokenKind::Dot => token::Dot,
-                rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
-                rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
-                rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
-                rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
-                rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
-                rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
+                rustc_lexer::TokenKind::OpenParen => token::OpenParen,
+                rustc_lexer::TokenKind::CloseParen => token::CloseParen,
+                rustc_lexer::TokenKind::OpenBrace => token::OpenBrace,
+                rustc_lexer::TokenKind::CloseBrace => token::CloseBrace,
+                rustc_lexer::TokenKind::OpenBracket => token::OpenBracket,
+                rustc_lexer::TokenKind::CloseBracket => token::CloseBracket,
                 rustc_lexer::TokenKind::At => token::At,
                 rustc_lexer::TokenKind::Pound => token::Pound,
                 rustc_lexer::TokenKind::Tilde => token::Tilde,
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index b3f83a32024..0ddd9a85df8 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -18,38 +18,33 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
 
         let mut buf = Vec::new();
         loop {
-            match self.token.kind {
-                token::OpenDelim(delim) => {
-                    // Invisible delimiters cannot occur here because `TokenTreesReader` parses
-                    // code directly from strings, with no macro expansion involved.
-                    debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
-                    buf.push(match self.lex_token_tree_open_delim(delim) {
-                        Ok(val) => val,
-                        Err(errs) => return Err(errs),
-                    })
-                }
-                token::CloseDelim(delim) => {
-                    // Invisible delimiters cannot occur here because `TokenTreesReader` parses
-                    // code directly from strings, with no macro expansion involved.
-                    debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
-                    return if is_delimited {
-                        Ok((open_spacing, TokenStream::new(buf)))
-                    } else {
-                        Err(vec![self.close_delim_err(delim)])
-                    };
-                }
-                token::Eof => {
-                    return if is_delimited {
-                        Err(vec![self.eof_err()])
-                    } else {
-                        Ok((open_spacing, TokenStream::new(buf)))
-                    };
-                }
-                _ => {
-                    // Get the next normal token.
-                    let (this_tok, this_spacing) = self.bump();
-                    buf.push(TokenTree::Token(this_tok, this_spacing));
-                }
+            if let Some(delim) = self.token.kind.open_delim() {
+                // Invisible delimiters cannot occur here because `TokenTreesReader` parses
+                // code directly from strings, with no macro expansion involved.
+                debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
+                buf.push(match self.lex_token_tree_open_delim(delim) {
+                    Ok(val) => val,
+                    Err(errs) => return Err(errs),
+                })
+            } else if let Some(delim) = self.token.kind.close_delim() {
+                // Invisible delimiters cannot occur here because `TokenTreesReader` parses
+                // code directly from strings, with no macro expansion involved.
+                debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
+                return if is_delimited {
+                    Ok((open_spacing, TokenStream::new(buf)))
+                } else {
+                    Err(vec![self.close_delim_err(delim)])
+                };
+            } else if self.token.kind == token::Eof {
+                return if is_delimited {
+                    Err(vec![self.eof_err()])
+                } else {
+                    Ok((open_spacing, TokenStream::new(buf)))
+                };
+            } else {
+                // Get the next normal token.
+                let (this_tok, this_spacing) = self.bump();
+                buf.push(TokenTree::Token(this_tok, this_spacing));
             }
         }
     }
@@ -111,9 +106,9 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
         let sm = self.psess.source_map();
 
-        let close_spacing = match self.token.kind {
-            // Correct delimiter.
-            token::CloseDelim(close_delim) if close_delim == open_delim => {
+        let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() {
+            if close_delim == open_delim {
+                // Correct delimiter.
                 let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
                 let close_brace_span = self.token.span;
 
@@ -134,9 +129,8 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
 
                 // Move past the closing delimiter.
                 self.bump_minimal()
-            }
-            // Incorrect delimiter.
-            token::CloseDelim(close_delim) => {
+            } else {
+                // Incorrect delimiter.
                 let mut unclosed_delimiter = None;
                 let mut candidate = None;
 
@@ -182,14 +176,13 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                     Spacing::Alone
                 }
             }
-            token::Eof => {
-                // Silently recover, the EOF token will be seen again
-                // and an error emitted then. Thus we don't pop from
-                // self.open_braces here. The choice of spacing value here
-                // doesn't matter.
-                Spacing::Alone
-            }
-            _ => unreachable!(),
+        } else {
+            assert_eq!(self.token.kind, token::Eof);
+            // Silently recover, the EOF token will be seen again
+            // and an error emitted then. Thus we don't pop from
+            // self.open_braces here. The choice of spacing value here
+            // doesn't matter.
+            Spacing::Alone
         };
 
         let spacing = DelimSpacing::new(open_spacing, close_spacing);
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 2bfa1ea4e05..751d13af433 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -5,7 +5,7 @@ use rustc_span::{BytePos, Pos, Span, kw};
 
 use super::Lexer;
 use crate::errors::TokenSubstitution;
-use crate::token::{self, Delimiter};
+use crate::token;
 
 #[rustfmt::skip] // for line breaks
 pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
@@ -315,12 +315,12 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
     ("!", "Exclamation Mark", Some(token::Bang)),
     ("?", "Question Mark", Some(token::Question)),
     (".", "Period", Some(token::Dot)),
-    ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
-    (")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
-    ("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
-    ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
-    ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
-    ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
+    ("(", "Left Parenthesis", Some(token::OpenParen)),
+    (")", "Right Parenthesis", Some(token::CloseParen)),
+    ("[", "Left Square Bracket", Some(token::OpenBracket)),
+    ("]", "Right Square Bracket", Some(token::CloseBracket)),
+    ("{", "Left Curly Brace", Some(token::OpenBrace)),
+    ("}", "Right Curly Brace", Some(token::CloseBrace)),
     ("*", "Asterisk", Some(token::Star)),
     ("/", "Slash", Some(token::Slash)),
     ("\\", "Backslash", None),
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index f1bd6a22730..6061c9cb485 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,7 +1,7 @@
 use std::borrow::Cow;
 use std::{iter, mem};
 
-use rustc_ast::token::{Delimiter, Token, TokenKind};
+use rustc_ast::token::{Delimiter, Token};
 use rustc_ast::tokenstream::{
     AttrTokenStream, AttrTokenTree, AttrsTarget, DelimSpacing, DelimSpan, LazyAttrTokenStream,
     Spacing, ToAttrTokenStream,
@@ -501,27 +501,27 @@ fn make_attr_token_stream(
     let mut stack_rest = vec![];
     for flat_token in iter {
         match flat_token {
-            FlatToken::Token((Token { kind: TokenKind::OpenDelim(delim), span }, spacing)) => {
-                stack_rest.push(mem::replace(
-                    &mut stack_top,
-                    FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] },
-                ));
-            }
-            FlatToken::Token((Token { kind: TokenKind::CloseDelim(delim), span }, spacing)) => {
-                let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
-                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
-                assert!(
-                    open_delim.eq_ignoring_invisible_origin(&delim),
-                    "Mismatched open/close delims: open={open_delim:?} close={span:?}"
-                );
-                let dspan = DelimSpan::from_pair(open_sp, span);
-                let dspacing = DelimSpacing::new(open_spacing, spacing);
-                let stream = AttrTokenStream::new(frame_data.inner);
-                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
-                stack_top.inner.push(delimited);
-            }
-            FlatToken::Token((token, spacing)) => {
-                stack_top.inner.push(AttrTokenTree::Token(token, spacing))
+            FlatToken::Token((token @ Token { kind, span }, spacing)) => {
+                if let Some(delim) = kind.open_delim() {
+                    stack_rest.push(mem::replace(
+                        &mut stack_top,
+                        FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] },
+                    ));
+                } else if let Some(delim) = kind.close_delim() {
+                    let frame_data = mem::replace(&mut stack_top, stack_rest.pop().unwrap());
+                    let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
+                    assert!(
+                        open_delim.eq_ignoring_invisible_origin(&delim),
+                        "Mismatched open/close delims: open={open_delim:?} close={span:?}"
+                    );
+                    let dspan = DelimSpan::from_pair(open_sp, span);
+                    let dspacing = DelimSpacing::new(open_spacing, spacing);
+                    let stream = AttrTokenStream::new(frame_data.inner);
+                    let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
+                    stack_top.inner.push(delimited);
+                } else {
+                    stack_top.inner.push(AttrTokenTree::Token(token, spacing))
+                }
             }
             FlatToken::AttrsTarget(target) => {
                 stack_top.inner.push(AttrTokenTree::AttrsTarget(target))
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 7c8e0146c3d..23c8db7bca7 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -4,7 +4,7 @@ use std::ops::{Deref, DerefMut};
 use ast::token::IdentIsRaw;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
+use rustc_ast::token::{self, Lit, LitKind, Token, TokenKind};
 use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
     AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
@@ -304,10 +304,10 @@ impl<'a> Parser<'a> {
             TokenKind::Comma,
             TokenKind::Semi,
             TokenKind::PathSep,
-            TokenKind::OpenDelim(Delimiter::Brace),
-            TokenKind::OpenDelim(Delimiter::Parenthesis),
-            TokenKind::CloseDelim(Delimiter::Brace),
-            TokenKind::CloseDelim(Delimiter::Parenthesis),
+            TokenKind::OpenBrace,
+            TokenKind::OpenParen,
+            TokenKind::CloseBrace,
+            TokenKind::CloseParen,
         ];
         if let TokenKind::DocComment(..) = self.prev_token.kind
             && valid_follow.contains(&self.token.kind)
@@ -507,7 +507,7 @@ impl<'a> Parser<'a> {
             } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
                 // The current token is in the same line as the prior token, not recoverable.
             } else if [token::Comma, token::Colon].contains(&self.token.kind)
-                && self.prev_token == token::CloseDelim(Delimiter::Parenthesis)
+                && self.prev_token == token::CloseParen
             {
                 // Likely typo: The current token is on a new line and is expected to be
                 // `.`, `;`, `?`, or an operator after a close delimiter token.
@@ -518,8 +518,7 @@ impl<'a> Parser<'a> {
                 //         ^
                 // https://github.com/rust-lang/rust/issues/72253
             } else if self.look_ahead(1, |t| {
-                t == &token::CloseDelim(Delimiter::Brace)
-                    || t.can_begin_expr() && *t != token::Colon
+                t == &token::CloseBrace || t.can_begin_expr() && *t != token::Colon
             }) && [token::Comma, token::Colon].contains(&self.token.kind)
             {
                 // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is
@@ -537,7 +536,7 @@ impl<'a> Parser<'a> {
                 self.bump();
                 return Ok(guar);
             } else if self.look_ahead(0, |t| {
-                t == &token::CloseDelim(Delimiter::Brace)
+                t == &token::CloseBrace
                     || ((t.can_begin_expr() || t.can_begin_item())
                         && t != &token::Semi
                         && t != &token::Pound)
@@ -675,8 +674,7 @@ impl<'a> Parser<'a> {
 
         // `pub` may be used for an item or `pub(crate)`
         if self.prev_token.is_ident_named(sym::public)
-            && (self.token.can_begin_item()
-                || self.token == TokenKind::OpenDelim(Delimiter::Parenthesis))
+            && (self.token.can_begin_item() || self.token == TokenKind::OpenParen)
         {
             err.span_suggestion_short(
                 self.prev_token.span,
@@ -843,9 +841,7 @@ impl<'a> Parser<'a> {
                 if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" },
             ),
         );
-        if self.token == token::Pound
-            && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
-        {
+        if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
             // We have
             // #[attr]
             // expr
@@ -1037,9 +1033,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Expr>> {
         err.span_label(lo.to(decl_hi), "while parsing the body of this closure");
         let guar = match before.kind {
-            token::OpenDelim(Delimiter::Brace)
-                if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
-            {
+            token::OpenBrace if token.kind != token::OpenBrace => {
                 // `{ || () }` should have been `|| { () }`
                 err.multipart_suggestion(
                     "you might have meant to open the body of the closure, instead of enclosing \
@@ -1054,9 +1048,7 @@ impl<'a> Parser<'a> {
                 self.eat_to_tokens(&[exp!(CloseBrace)]);
                 guar
             }
-            token::OpenDelim(Delimiter::Parenthesis)
-                if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
-            {
+            token::OpenParen if token.kind != token::OpenBrace => {
                 // We are within a function call or tuple, we can emit the error
                 // and recover.
                 self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]);
@@ -1071,7 +1063,7 @@ impl<'a> Parser<'a> {
                 );
                 err.emit()
             }
-            _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => {
+            _ if token.kind != token::OpenBrace => {
                 // We don't have a heuristic to correctly identify where the block
                 // should be closed.
                 err.multipart_suggestion_verbose(
@@ -1225,7 +1217,7 @@ impl<'a> Parser<'a> {
                         trailing_span = trailing_span.to(self.token.span);
                         self.bump();
                     }
-                    if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+                    if self.token == token::OpenParen {
                         // Recover from bad turbofish: `foo.collect::Vec<_>()`.
                         segment.args = Some(AngleBracketedArgs { args, span }.into());
 
@@ -1470,9 +1462,7 @@ impl<'a> Parser<'a> {
                         let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)];
                         self.consume_tts(1, &modifiers);
 
-                        if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
-                            .contains(&self.token.kind)
-                        {
+                        if !matches!(self.token.kind, token::OpenParen | token::PathSep) {
                             // We don't have `foo< bar >(` or `foo< bar >::`, so we rewind the
                             // parser and bail out.
                             self.restore_snapshot(snapshot);
@@ -1510,7 +1500,7 @@ impl<'a> Parser<'a> {
                                 Err(self.dcx().create_err(err))
                             }
                         }
-                    } else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+                    } else if self.token == token::OpenParen {
                         // We have high certainty that this was a bad turbofish at this point.
                         // `foo< bar >(`
                         if let ExprKind::Binary(o, ..) = inner_op.kind
@@ -1570,10 +1560,7 @@ impl<'a> Parser<'a> {
         self.bump(); // `(`
 
         // Consume the fn call arguments.
-        let modifiers = [
-            (token::OpenDelim(Delimiter::Parenthesis), 1),
-            (token::CloseDelim(Delimiter::Parenthesis), -1),
-        ];
+        let modifiers = [(token::OpenParen, 1), (token::CloseParen, -1)];
         self.consume_tts(1, &modifiers);
 
         if self.token == token::Eof {
@@ -1978,7 +1965,7 @@ impl<'a> Parser<'a> {
 
     fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
         let is_question = self.eat(exp!(Question)); // Handle `await? <expr>`.
-        let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
+        let expr = if self.token == token::OpenBrace {
             // Handle `await { <expr> }`.
             // This needs to be handled separately from the next arm to avoid
             // interpreting `await { <expr> }?` as `<expr>?.await`.
@@ -2014,9 +2001,7 @@ impl<'a> Parser<'a> {
 
     /// If encountering `future.await()`, consumes and emits an error.
     pub(super) fn recover_from_await_method_call(&mut self) {
-        if self.token == token::OpenDelim(Delimiter::Parenthesis)
-            && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
-        {
+        if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) {
             // future.await()
             let lo = self.token.span;
             self.bump(); // (
@@ -2029,9 +2014,7 @@ impl<'a> Parser<'a> {
     ///
     /// If encountering `x.use()`, consumes and emits an error.
     pub(super) fn recover_from_use(&mut self) {
-        if self.token == token::OpenDelim(Delimiter::Parenthesis)
-            && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
-        {
+        if self.token == token::OpenParen && self.look_ahead(1, |t| t == &token::CloseParen) {
             // var.use()
             let lo = self.token.span;
             self.bump(); // (
@@ -2045,7 +2028,7 @@ impl<'a> Parser<'a> {
     pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
         let is_try = self.token.is_keyword(kw::Try);
         let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
-        let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
+        let is_open = self.look_ahead(2, |t| t == &token::OpenParen); //check for (
 
         if is_try && is_questionmark && is_open {
             let lo = self.token.span;
@@ -2053,7 +2036,7 @@ impl<'a> Parser<'a> {
             self.bump(); //remove !
             let try_span = lo.to(self.token.span); //we take the try!( span
             self.bump(); //remove (
-            let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
+            let is_empty = self.token == token::CloseParen; //check if the block is empty
             self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block
             let hi = self.token.span;
             self.bump(); //remove )
@@ -2148,7 +2131,7 @@ impl<'a> Parser<'a> {
         loop {
             debug!("recover_stmt_ loop {:?}", self.token);
             match self.token.kind {
-                token::OpenDelim(Delimiter::Brace) => {
+                token::OpenBrace => {
                     brace_depth += 1;
                     self.bump();
                     if break_on_block == BlockMode::Break && brace_depth == 1 && bracket_depth == 0
@@ -2156,11 +2139,11 @@ impl<'a> Parser<'a> {
                         in_block = true;
                     }
                 }
-                token::OpenDelim(Delimiter::Bracket) => {
+                token::OpenBracket => {
                     bracket_depth += 1;
                     self.bump();
                 }
-                token::CloseDelim(Delimiter::Brace) => {
+                token::CloseBrace => {
                     if brace_depth == 0 {
                         debug!("recover_stmt_ return - close delim {:?}", self.token);
                         break;
@@ -2172,7 +2155,7 @@ impl<'a> Parser<'a> {
                         break;
                     }
                 }
-                token::CloseDelim(Delimiter::Bracket) => {
+                token::CloseBracket => {
                     bracket_depth -= 1;
                     if bracket_depth < 0 {
                         bracket_depth = 0;
@@ -2219,12 +2202,10 @@ impl<'a> Parser<'a> {
         if let token::DocComment(..) = self.token.kind {
             self.dcx().emit_err(DocCommentOnParamType { span: self.token.span });
             self.bump();
-        } else if self.token == token::Pound
-            && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
-        {
+        } else if self.token == token::Pound && self.look_ahead(1, |t| *t == token::OpenBracket) {
             let lo = self.token.span;
             // Skip every token until next possible arg.
-            while self.token != token::CloseDelim(Delimiter::Bracket) {
+            while self.token != token::CloseBracket {
                 self.bump();
             }
             let sp = lo.to(self.token.span);
@@ -2243,9 +2224,7 @@ impl<'a> Parser<'a> {
         // If we find a pattern followed by an identifier, it could be an (incorrect)
         // C-style parameter declaration.
         if self.check_ident()
-            && self.look_ahead(1, |t| {
-                *t == token::Comma || *t == token::CloseDelim(Delimiter::Parenthesis)
-            })
+            && self.look_ahead(1, |t| *t == token::Comma || *t == token::CloseParen)
         {
             // `fn foo(String s) {}`
             let ident = self.parse_ident().unwrap();
@@ -2261,7 +2240,7 @@ impl<'a> Parser<'a> {
         } else if require_name
             && (self.token == token::Comma
                 || self.token == token::Lt
-                || self.token == token::CloseDelim(Delimiter::Parenthesis))
+                || self.token == token::CloseParen)
         {
             let rfc_note = "anonymous parameters are removed in the 2018 edition (see RFC 1685)";
 
@@ -2872,7 +2851,7 @@ impl<'a> Parser<'a> {
         // Check for `'a : {`
         if !(self.check_lifetime()
             && self.look_ahead(1, |t| *t == token::Colon)
-            && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace)))
+            && self.look_ahead(2, |t| *t == token::OpenBrace))
         {
             return false;
         }
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 71cc814cb50..11e60175a1f 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -436,7 +436,7 @@ impl<'a> Parser<'a> {
     fn is_at_start_of_range_notation_rhs(&self) -> bool {
         if self.token.can_begin_expr() {
             // Parse `for i in 1.. { }` as infinite loop, not as `for i in (1..{})`.
-            if self.token == token::OpenDelim(Delimiter::Brace) {
+            if self.token == token::OpenBrace {
                 return !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
             }
             true
@@ -542,8 +542,8 @@ impl<'a> Parser<'a> {
             }
             // Recover from `++x`:
             token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
-                let starts_stmt = this.prev_token == token::Semi
-                    || this.prev_token == token::CloseDelim(Delimiter::Brace);
+                let starts_stmt =
+                    this.prev_token == token::Semi || this.prev_token == token::CloseBrace;
                 let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
                 // Eat both `+`s.
                 this.bump();
@@ -637,8 +637,8 @@ impl<'a> Parser<'a> {
     /// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
     fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
         match self.prev_token.kind {
-            TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span,
-            TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
+            token::NtIdent(..) | token::NtLifetime(..) => self.prev_token.span,
+            token::CloseInvisible(InvisibleOrigin::MetaVar(_)) => {
                 // `expr.span` is the interpolated span, because invisible open
                 // and close delims both get marked with the same span, one
                 // that covers the entire thing between them. (See
@@ -912,8 +912,8 @@ impl<'a> Parser<'a> {
                     return Ok(e);
                 }
                 e = match self.token.kind {
-                    token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e),
-                    token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?,
+                    token::OpenParen => self.parse_expr_fn_call(lo, e),
+                    token::OpenBracket => self.parse_expr_index(lo, e)?,
                     _ => return Ok(e),
                 }
             }
@@ -1002,7 +1002,7 @@ impl<'a> Parser<'a> {
             (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => {
                 (span.shrink_to_hi(), format!("`{}`", snippet))
             }
-            (token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))), _) => {
+            (token::CloseInvisible(InvisibleOrigin::MetaVar(_)), _) => {
                 // No need to report an error. This case will only occur when parsing a pasted
                 // metavariable, and we should have emitted an error when parsing the macro call in
                 // the first place. E.g. in this code:
@@ -1202,7 +1202,7 @@ impl<'a> Parser<'a> {
                 }
             }
 
-            if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) {
+            if self.token.kind.close_delim().is_some() || self.token.kind == token::Comma {
                 break;
             } else if trailing_dot.is_none() {
                 // This loop should only repeat if there is a trailing dot.
@@ -1232,7 +1232,7 @@ impl<'a> Parser<'a> {
 
     /// Parse a function call expression, `expr(...)`.
     fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
-        let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+        let snapshot = if self.token == token::OpenParen {
             Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
         } else {
             None
@@ -1676,14 +1676,11 @@ impl<'a> Parser<'a> {
             self.parse_expr_for(label, lo)
         } else if self.eat_keyword(exp!(Loop)) {
             self.parse_expr_loop(label, lo)
-        } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
-            || self.token.is_metavar_block()
-        {
+        } else if self.check_noexpect(&token::OpenBrace) || self.token.is_metavar_block() {
             self.parse_expr_block(label, lo, BlockCheckMode::Default)
         } else if !ate_colon
             && self.may_recover()
-            && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
-                || self.token.is_punct())
+            && (self.token.kind.close_delim().is_some() || self.token.is_punct())
             && could_be_unclosed_char_literal(label_.ident)
         {
             let (lit, _) =
@@ -1878,7 +1875,7 @@ impl<'a> Parser<'a> {
                 },
             });
             Some(lexpr)
-        } else if self.token != token::OpenDelim(Delimiter::Brace)
+        } else if self.token != token::OpenBrace
             || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
         {
             let mut expr = self.parse_expr_opt()?;
@@ -2016,7 +2013,7 @@ impl<'a> Parser<'a> {
 
         // Eat tokens until the macro call ends.
         if self.may_recover() {
-            while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) {
+            while !self.token.kind.is_close_delim_or_eof() {
                 self.bump();
             }
         }
@@ -2157,9 +2154,7 @@ impl<'a> Parser<'a> {
                 self.bump();
                 Some(token_lit)
             }
-            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
-                MetaVarKind::Literal,
-            ))) => {
+            token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Literal)) => {
                 let lit = self
                     .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
                     .expect("metavar seq literal");
@@ -2168,9 +2163,9 @@ impl<'a> Parser<'a> {
                 };
                 Some(token_lit)
             }
-            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+            token::OpenInvisible(InvisibleOrigin::MetaVar(
                 mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. },
-            ))) => {
+            )) => {
                 let expr = self
                     .eat_metavar_seq(mv_kind, |this| this.parse_expr())
                     .expect("metavar seq expr");
@@ -2275,7 +2270,7 @@ impl<'a> Parser<'a> {
     }
 
     fn is_array_like_block(&mut self) -> bool {
-        matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
+        self.token.kind == TokenKind::OpenBrace
             && self
                 .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
             && self.look_ahead(2, |t| t == &token::Comma)
@@ -2328,8 +2323,8 @@ impl<'a> Parser<'a> {
                 |p| p.parse_expr(),
             ) {
                 Ok(_)
-                    // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
-                    // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`.
+                    // When the close delim is `)`, `token.kind` is expected to be `token::CloseParen`,
+                    // but the actual `token.kind` is `token::CloseBracket`.
                     // This is because the `token.kind` of the close delim is treated as the same as
                     // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
                     // Therefore, `token.kind` should not be compared here.
@@ -2484,7 +2479,7 @@ impl<'a> Parser<'a> {
     fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P<Expr>> {
         if self.may_recover()
             && self.token.can_begin_expr()
-            && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
+            && self.token.kind != TokenKind::OpenBrace
             && !self.token.is_metavar_block()
         {
             let snapshot = self.create_snapshot_for_diagnostic();
@@ -2887,7 +2882,7 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
-        let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+        let begin_paren = if self.token == token::OpenParen {
             // Record whether we are about to parse `for (`.
             // This is used below for recovery in case of `for ( $stuff ) $block`
             // in which case we will suggest `for $stuff $block`.
@@ -2921,7 +2916,7 @@ impl<'a> Parser<'a> {
                         return Err(err);
                     }
                 };
-                return if self.token == token::CloseDelim(Delimiter::Parenthesis) {
+                return if self.token == token::CloseParen {
                     // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
                     // parser state and emit a targeted suggestion.
                     let span = vec![start_span, self.token.span];
@@ -2965,7 +2960,7 @@ impl<'a> Parser<'a> {
         let (pat, expr) = self.parse_for_head()?;
         // Recover from missing expression in `for` loop
         if matches!(expr.kind, ExprKind::Block(..))
-            && !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
+            && self.token.kind != token::OpenBrace
             && self.may_recover()
         {
             let guar = self
@@ -3114,7 +3109,7 @@ impl<'a> Parser<'a> {
         let attrs = self.parse_inner_attributes()?;
 
         let mut arms = ThinVec::new();
-        while self.token != token::CloseDelim(Delimiter::Brace) {
+        while self.token != token::CloseBrace {
             match self.parse_arm() {
                 Ok(arm) => arms.push(arm),
                 Err(e) => {
@@ -3122,7 +3117,7 @@ impl<'a> Parser<'a> {
                     let guar = e.emit();
                     self.recover_stmt();
                     let span = lo.to(self.token.span);
-                    if self.token == token::CloseDelim(Delimiter::Brace) {
+                    if self.token == token::CloseBrace {
                         self.bump();
                     }
                     // Always push at least one arm to make the match non-empty
@@ -3183,7 +3178,7 @@ impl<'a> Parser<'a> {
         // We might have either a `,` -> `;` typo, or a block without braces. We need
         // a more subtle parsing strategy.
         loop {
-            if self.token == token::CloseDelim(Delimiter::Brace) {
+            if self.token == token::CloseBrace {
                 // We have reached the closing brace of the `match` expression.
                 return Some(err(self, stmts));
             }
@@ -3242,7 +3237,7 @@ impl<'a> Parser<'a> {
             // this avoids the compiler saying that a `,` or `}` was expected even though
             // the pattern isn't a never pattern (and thus an arm body is required)
             let armless = (!is_fat_arrow && !is_almost_fat_arrow && pat.could_be_never_pattern())
-                || matches!(this.token.kind, token::Comma | token::CloseDelim(Delimiter::Brace));
+                || matches!(this.token.kind, token::Comma | token::CloseBrace);
 
             let mut result = if armless {
                 // A pattern without a body, allowed for never patterns.
@@ -3290,8 +3285,8 @@ impl<'a> Parser<'a> {
                         err
                     })?;
 
-                let require_comma = !classify::expr_is_complete(&expr)
-                    && this.token != token::CloseDelim(Delimiter::Brace);
+                let require_comma =
+                    !classify::expr_is_complete(&expr) && this.token != token::CloseBrace;
 
                 if !require_comma {
                     arm_body = Some(expr);
@@ -3442,7 +3437,7 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
-        if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+        if self.token == token::OpenParen {
             let left = self.token.span;
             let pat = self.parse_pat_no_top_guard(
                 None,
@@ -3488,7 +3483,7 @@ impl<'a> Parser<'a> {
         match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) {
             Ok((expr, _)) => Ok(expr),
             Err(mut err) => {
-                if self.prev_token == token::OpenDelim(Delimiter::Brace) {
+                if self.prev_token == token::OpenBrace {
                     let sugg_sp = self.prev_token.span.shrink_to_lo();
                     // Consume everything within the braces, let's avoid further parse
                     // errors.
@@ -3531,8 +3526,7 @@ impl<'a> Parser<'a> {
     fn is_do_catch_block(&self) -> bool {
         self.token.is_keyword(kw::Do)
             && self.is_keyword_ahead(1, &[kw::Catch])
-            && self
-                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
+            && self.look_ahead(2, |t| *t == token::OpenBrace || t.is_metavar_block())
             && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -3542,8 +3536,7 @@ impl<'a> Parser<'a> {
 
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try)
-            && self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
+            && self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
             && self.token_uninterpolated_span().at_least_rust_2018()
     }
 
@@ -3577,13 +3570,11 @@ impl<'a> Parser<'a> {
                 // `async move {`
                 self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
                     && self.look_ahead(lookahead + 2, |t| {
-                        *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
+                        *t == token::OpenBrace || t.is_metavar_block()
                     })
             ) || (
                 // `async {`
-                self.look_ahead(lookahead + 1, |t| {
-                    *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
-                })
+                self.look_ahead(lookahead + 1, |t| *t == token::OpenBrace || t.is_metavar_block())
             ))
     }
 
@@ -3707,11 +3698,7 @@ impl<'a> Parser<'a> {
                             AssocOp::from_token(t).is_some()
                                 || matches!(
                                     t.kind,
-                                    token::OpenDelim(
-                                        Delimiter::Parenthesis
-                                            | Delimiter::Bracket
-                                            | Delimiter::Brace
-                                    )
+                                    token::OpenParen | token::OpenBracket | token::OpenBrace
                                 )
                                 || *t == token::Dot
                         })
@@ -3868,8 +3855,8 @@ impl<'a> Parser<'a> {
                     t == &token::Colon
                         || t == &token::Eq
                         || t == &token::Comma
-                        || t == &token::CloseDelim(Delimiter::Brace)
-                        || t == &token::CloseDelim(Delimiter::Parenthesis)
+                        || t == &token::CloseBrace
+                        || t == &token::CloseParen
                 });
             if is_wrong {
                 return Err(this.dcx().create_err(errors::ExpectedStructField {
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index c3f71dd8b30..c05479feb61 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -1,4 +1,3 @@
-use ast::token::Delimiter;
 use rustc_ast::{
     self as ast, AttrVec, DUMMY_NODE_ID, GenericBounds, GenericParam, GenericParamKind, TyKind,
     WhereClause, token,
@@ -437,7 +436,7 @@ impl<'a> Parser<'a> {
 
         if let Some(struct_) = struct_
             && self.may_recover()
-            && self.token == token::OpenDelim(Delimiter::Parenthesis)
+            && self.token == token::OpenParen
         {
             snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
         };
@@ -548,7 +547,7 @@ impl<'a> Parser<'a> {
                         matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq)
                         // Recovery-only branch -- this could be removed,
                         // since it only affects diagnostics currently.
-                            || matches!(t.kind, token::Question)
+                            || t.kind == token::Question
                     })
                 || self.is_keyword_ahead(start + 1, &[kw::Const]))
     }
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 39a0291cb1e..39251f1ce27 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -399,14 +399,9 @@ impl<'a> Parser<'a> {
         let insert_span = ident_span.shrink_to_lo();
 
         let ident = if self.token.is_ident()
-            && (!is_const || self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)))
+            && (!is_const || self.look_ahead(1, |t| *t == token::OpenParen))
             && self.look_ahead(1, |t| {
-                [
-                    token::Lt,
-                    token::OpenDelim(Delimiter::Brace),
-                    token::OpenDelim(Delimiter::Parenthesis),
-                ]
-                .contains(&t.kind)
+                matches!(t.kind, token::Lt | token::OpenBrace | token::OpenParen)
             }) {
             self.parse_ident().unwrap()
         } else {
@@ -422,7 +417,7 @@ impl<'a> Parser<'a> {
 
         let err = if self.check(exp!(OpenBrace)) {
             // possible struct or enum definition where `struct` or `enum` was forgotten
-            if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Brace)) {
+            if self.look_ahead(1, |t| *t == token::CloseBrace) {
                 // `S {}` could be unit enum or struct
                 Some(errors::MissingKeywordForItemDefinition::EnumOrStruct { span })
             } else if self.look_ahead(2, |t| *t == token::Colon)
@@ -764,11 +759,12 @@ impl<'a> Parser<'a> {
             match parse_item(self) {
                 Ok(None) => {
                     let mut is_unnecessary_semicolon = !items.is_empty()
-                        // When the close delim is `)` in a case like the following, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
-                        // but the actual `token.kind` is `token::CloseDelim(Delimiter::Brace)`.
-                        // This is because the `token.kind` of the close delim is treated as the same as
-                        // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
-                        // Therefore, `token.kind` should not be compared here.
+                        // When the close delim is `)` in a case like the following, `token.kind`
+                        // is expected to be `token::CloseParen`, but the actual `token.kind` is
+                        // `token::CloseBrace`. This is because the `token.kind` of the close delim
+                        // is treated as the same as that of the open delim in
+                        // `TokenTreesReader::parse_token_tree`, even if the delimiters of them are
+                        // different. Therefore, `token.kind` should not be compared here.
                         //
                         // issue-60075.rs
                         // ```
@@ -787,8 +783,8 @@ impl<'a> Parser<'a> {
                     let mut semicolon_span = self.token.span;
                     if !is_unnecessary_semicolon {
                         // #105369, Detect spurious `;` before assoc fn body
-                        is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
-                            && self.prev_token == token::Semi;
+                        is_unnecessary_semicolon =
+                            self.token == token::OpenBrace && self.prev_token == token::Semi;
                         semicolon_span = self.prev_token.span;
                     }
                     // We have to bail or we'll potentially never make progress.
@@ -840,7 +836,7 @@ impl<'a> Parser<'a> {
     /// Recover on a doc comment before `}`.
     fn recover_doc_comment_before_brace(&mut self) -> bool {
         if let token::DocComment(..) = self.token.kind {
-            if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
+            if self.look_ahead(1, |tok| tok == &token::CloseBrace) {
                 // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
                 struct_span_code_err!(
                     self.dcx(),
@@ -1206,7 +1202,7 @@ impl<'a> Parser<'a> {
         // FIXME: This recovery should be tested better.
         if safety == Safety::Default
             && self.token.is_keyword(kw::Unsafe)
-            && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
+            && self.look_ahead(1, |t| *t == token::OpenBrace)
         {
             self.expect(exp!(OpenBrace)).unwrap_err().emit();
             safety = Safety::Unsafe(self.token.span);
@@ -1718,7 +1714,7 @@ impl<'a> Parser<'a> {
         } else if self.eat(exp!(Semi)) {
             VariantData::Unit(DUMMY_NODE_ID)
         // Record-style struct definition
-        } else if self.token == token::OpenDelim(Delimiter::Brace) {
+        } else if self.token == token::OpenBrace {
             let (fields, recovered) = self.parse_record_struct_body(
                 "struct",
                 ident.span,
@@ -1726,7 +1722,7 @@ impl<'a> Parser<'a> {
             )?;
             VariantData::Struct { fields, recovered }
         // Tuple-style struct definition with optional where-clause.
-        } else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+        } else if self.token == token::OpenParen {
             let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
             generics.where_clause = self.parse_where_clause()?;
             self.expect_semi()?;
@@ -1753,7 +1749,7 @@ impl<'a> Parser<'a> {
                 generics.where_clause.has_where_token,
             )?;
             VariantData::Struct { fields, recovered }
-        } else if self.token == token::OpenDelim(Delimiter::Brace) {
+        } else if self.token == token::OpenBrace {
             let (fields, recovered) = self.parse_record_struct_body(
                 "union",
                 ident.span,
@@ -1784,7 +1780,7 @@ impl<'a> Parser<'a> {
         let mut fields = ThinVec::new();
         let mut recovered = Recovered::No;
         if self.eat(exp!(OpenBrace)) {
-            while self.token != token::CloseDelim(Delimiter::Brace) {
+            while self.token != token::CloseBrace {
                 match self.parse_field_def(adt_ty) {
                     Ok(field) => {
                         fields.push(field);
@@ -1941,7 +1937,7 @@ impl<'a> Parser<'a> {
             token::Comma => {
                 self.bump();
             }
-            token::CloseDelim(Delimiter::Brace) => {}
+            token::CloseBrace => {}
             token::DocComment(..) => {
                 let previous_span = self.prev_token.span;
                 let mut err = errors::DocCommentDoesNotDocumentAnything {
@@ -1955,7 +1951,7 @@ impl<'a> Parser<'a> {
                 if !seen_comma && comma_after_doc_seen {
                     seen_comma = true;
                 }
-                if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
+                if comma_after_doc_seen || self.token == token::CloseBrace {
                     self.dcx().emit_err(err);
                 } else {
                     if !seen_comma {
@@ -1993,7 +1989,7 @@ impl<'a> Parser<'a> {
 
                 if self.token.is_ident()
                     || (self.token == TokenKind::Pound
-                        && (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
+                        && (self.look_ahead(1, |t| t == &token::OpenBracket)))
                 {
                     // This is likely another field, TokenKind::Pound is used for `#[..]`
                     // attribute for next field. Emit the diagnostic and continue parsing.
@@ -2447,7 +2443,7 @@ impl<'a> Parser<'a> {
         match self.expected_one_of_not_found(&[], expected) {
             Ok(error_guaranteed) => Ok(error_guaranteed),
             Err(mut err) => {
-                if self.token == token::CloseDelim(Delimiter::Brace) {
+                if self.token == token::CloseBrace {
                     // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
                     // the AST for typechecking.
                     err.span_label(ident_span, "while parsing this `fn`");
@@ -2874,7 +2870,7 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
         let mut first_param = true;
         // Parse the arguments, starting out with `self` being allowed...
-        if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis)
+        if self.token != TokenKind::OpenParen
         // might be typo'd trait impl, handled elsewhere
         && !self.token.is_keyword(kw::For)
         {
@@ -2892,7 +2888,7 @@ impl<'a> Parser<'a> {
                 // When parsing a param failed, we should check to make the span of the param
                 // not contain '(' before it.
                 // For example when parsing `*mut Self` in function `fn oof(*mut Self)`.
-                let lo = if let TokenKind::OpenDelim(Delimiter::Parenthesis) = p.prev_token.kind {
+                let lo = if let TokenKind::OpenParen = p.prev_token.kind {
                     p.prev_token.span.shrink_to_hi()
                 } else {
                     p.prev_token.span
@@ -2969,9 +2965,7 @@ impl<'a> Parser<'a> {
                         }
                     }
 
-                    if this.token != token::Comma
-                        && this.token != token::CloseDelim(Delimiter::Parenthesis)
-                    {
+                    if this.token != token::Comma && this.token != token::CloseParen {
                         // This wasn't actually a type, but a pattern looking like a type,
                         // so we are going to rollback and re-parse for recovery.
                         ty = this.unexpected_any();
@@ -3153,7 +3147,7 @@ impl<'a> Parser<'a> {
 
     fn is_named_param(&self) -> bool {
         let offset = match &self.token.kind {
-            token::OpenDelim(Delimiter::Invisible(origin)) => match origin {
+            token::OpenInvisible(origin) => match origin {
                 InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => {
                     return self.check_noexpect_past_close_delim(&token::Colon);
                 }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 2221a261b4c..d73adb39826 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -23,8 +23,7 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 use path::PathStyle;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{
-    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token,
-    TokenKind,
+    self, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token, TokenKind,
 };
 use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
@@ -327,10 +326,7 @@ impl TokenCursor {
             if let Some(tree) = self.curr.curr() {
                 match tree {
                     &TokenTree::Token(token, spacing) => {
-                        debug_assert!(!matches!(
-                            token.kind,
-                            token::OpenDelim(_) | token::CloseDelim(_)
-                        ));
+                        debug_assert!(!token.kind.is_delim());
                         let res = (token, spacing);
                         self.curr.bump();
                         return res;
@@ -339,7 +335,7 @@ impl TokenCursor {
                         let trees = TokenTreeCursor::new(tts.clone());
                         self.stack.push(mem::replace(&mut self.curr, trees));
                         if !delim.skip() {
-                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
+                            return (Token::new(delim.as_open_token_kind(), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
@@ -352,7 +348,7 @@ impl TokenCursor {
                 self.curr = parent;
                 self.curr.bump(); // move past the `Delimited`
                 if !delim.skip() {
-                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
+                    return (Token::new(delim.as_close_token_kind(), span.close), spacing.close);
                 }
                 // No close delimiter to return; continue on to the next iteration.
             } else {
@@ -423,7 +419,7 @@ impl TokenDescription {
             _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
             _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
             token::DocComment(..) => Some(TokenDescription::DocComment),
-            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
+            token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => {
                 Some(TokenDescription::MetaVar(kind))
             }
             _ => None,
@@ -620,9 +616,8 @@ impl<'a> Parser<'a> {
     // past the entire `TokenTree::Delimited` in a single step, avoiding the
     // need for unbounded token lookahead.
     //
-    // Primarily used when `self.token` matches
-    // `OpenDelim(Delimiter::Invisible(_))`, to look ahead through the current
-    // metavar expansion.
+    // Primarily used when `self.token` matches `OpenInvisible(_))`, to look
+    // ahead through the current metavar expansion.
     fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
         let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
         tree_cursor.bump();
@@ -756,8 +751,7 @@ impl<'a> Parser<'a> {
         match_mv_kind: impl Fn(MetaVarKind) -> bool,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> Option<T> {
-        if let token::OpenDelim(delim) = self.token.kind
-            && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
+        if let token::OpenInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
             && match_mv_kind(mv_kind)
         {
             self.bump();
@@ -776,8 +770,7 @@ impl<'a> Parser<'a> {
                 }
             };
 
-            if let token::CloseDelim(delim) = self.token.kind
-                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
+            if let token::CloseInvisible(InvisibleOrigin::MetaVar(mv_kind)) = self.token.kind
                 && match_mv_kind(mv_kind)
             {
                 self.bump();
@@ -838,10 +831,8 @@ impl<'a> Parser<'a> {
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
             && self.look_ahead(dist + 1, |t| match &t.kind {
-                token::OpenDelim(Delimiter::Brace) => true,
-                token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
-                    MetaVarKind::Block,
-                ))) => true,
+                token::OpenBrace => true,
+                token::OpenInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Block)) => true,
                 _ => false,
             })
     }
@@ -960,7 +951,7 @@ impl<'a> Parser<'a> {
         let mut v = ThinVec::new();
 
         while !self.expect_any_with_type(closes_expected, closes_not_expected) {
-            if let token::CloseDelim(..) | token::Eof = self.token.kind {
+            if self.token.kind.is_close_delim_or_eof() {
                 break;
             }
             if let Some(exp) = sep.sep {
@@ -1244,7 +1235,7 @@ impl<'a> Parser<'a> {
         }
         debug_assert!(!matches!(
             next.0.kind,
-            token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
+            token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
         ));
         self.inlined_bump_with(next)
     }
@@ -1269,7 +1260,7 @@ impl<'a> Parser<'a> {
                         TokenTree::Token(token, _) => return looker(token),
                         &TokenTree::Delimited(dspan, _, delim, _) => {
                             if !delim.skip() {
-                                return looker(&Token::new(token::OpenDelim(delim), dspan.open));
+                                return looker(&Token::new(delim.as_open_token_kind(), dspan.open));
                             }
                         }
                     }
@@ -1283,7 +1274,7 @@ impl<'a> Parser<'a> {
                     {
                         // We are not in the outermost token stream, so we have
                         // delimiters. Also, those delimiters are not skipped.
-                        return looker(&Token::new(token::CloseDelim(delim), span.close));
+                        return looker(&Token::new(delim.as_close_token_kind(), span.close));
                     }
                 }
             }
@@ -1298,7 +1289,7 @@ impl<'a> Parser<'a> {
             token = cursor.next().0;
             if matches!(
                 token.kind,
-                token::OpenDelim(delim) | token::CloseDelim(delim) if delim.skip()
+                token::OpenInvisible(origin) | token::CloseInvisible(origin) if origin.skip()
             ) {
                 continue;
             }
@@ -1386,8 +1377,7 @@ impl<'a> Parser<'a> {
     fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
         // Avoid const blocks and const closures to be parsed as const items
         if (self.check_const_closure() == is_closure)
-            && !self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
+            && !self.look_ahead(1, |t| *t == token::OpenBrace || t.is_metavar_block())
             && self.eat_keyword_case(exp!(Const), case)
         {
             Const::Yes(self.prev_token_uninterpolated_span())
@@ -1486,48 +1476,46 @@ impl<'a> Parser<'a> {
 
     /// Parses a single token tree from the input.
     pub fn parse_token_tree(&mut self) -> TokenTree {
-        match self.token.kind {
-            token::OpenDelim(..) => {
-                // Clone the `TokenTree::Delimited` that we are currently
-                // within. That's what we are going to return.
-                let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
-                debug_assert_matches!(tree, TokenTree::Delimited(..));
-
-                // Advance the token cursor through the entire delimited
-                // sequence. After getting the `OpenDelim` we are *within* the
-                // delimited sequence, i.e. at depth `d`. After getting the
-                // matching `CloseDelim` we are *after* the delimited sequence,
-                // i.e. at depth `d - 1`.
-                let target_depth = self.token_cursor.stack.len() - 1;
-                loop {
-                    // Advance one token at a time, so `TokenCursor::next()`
-                    // can capture these tokens if necessary.
-                    self.bump();
-                    if self.token_cursor.stack.len() == target_depth {
-                        debug_assert_matches!(self.token.kind, token::CloseDelim(_));
-                        break;
-                    }
-                }
-
-                // Consume close delimiter
+        if self.token.kind.open_delim().is_some() {
+            // Clone the `TokenTree::Delimited` that we are currently
+            // within. That's what we are going to return.
+            let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
+            debug_assert_matches!(tree, TokenTree::Delimited(..));
+
+            // Advance the token cursor through the entire delimited
+            // sequence. After getting the `OpenDelim` we are *within* the
+            // delimited sequence, i.e. at depth `d`. After getting the
+            // matching `CloseDelim` we are *after* the delimited sequence,
+            // i.e. at depth `d - 1`.
+            let target_depth = self.token_cursor.stack.len() - 1;
+            loop {
+                // Advance one token at a time, so `TokenCursor::next()`
+                // can capture these tokens if necessary.
                 self.bump();
-                tree
-            }
-            token::CloseDelim(_) | token::Eof => unreachable!(),
-            _ => {
-                let prev_spacing = self.token_spacing;
-                self.bump();
-                TokenTree::Token(self.prev_token, prev_spacing)
+                if self.token_cursor.stack.len() == target_depth {
+                    debug_assert!(self.token.kind.close_delim().is_some());
+                    break;
+                }
             }
+
+            // Consume close delimiter
+            self.bump();
+            tree
+        } else {
+            assert!(!self.token.kind.is_close_delim_or_eof());
+            let prev_spacing = self.token_spacing;
+            self.bump();
+            TokenTree::Token(self.prev_token, prev_spacing)
         }
     }
 
     pub fn parse_tokens(&mut self) -> TokenStream {
         let mut result = Vec::new();
         loop {
-            match self.token.kind {
-                token::Eof | token::CloseDelim(..) => break,
-                _ => result.push(self.parse_token_tree()),
+            if self.token.kind.is_close_delim_or_eof() {
+                break;
+            } else {
+                result.push(self.parse_token_tree());
             }
         }
         TokenStream::new(result)
@@ -1590,7 +1578,7 @@ impl<'a> Parser<'a> {
                     kind: vis,
                     tokens: None,
                 });
-            } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
+            } else if self.look_ahead(2, |t| t == &token::CloseParen)
                 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
             {
                 // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
@@ -1687,9 +1675,7 @@ impl<'a> Parser<'a> {
 
     /// `::{` or `::*`
     fn is_import_coupler(&mut self) -> bool {
-        self.check_path_sep_and_look_ahead(|t| {
-            matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
-        })
+        self.check_path_sep_and_look_ahead(|t| matches!(t.kind, token::OpenBrace | token::Star))
     }
 
     // Debug view of the parser's token stream, up to `{lookahead}` tokens.
@@ -1744,9 +1730,7 @@ impl<'a> Parser<'a> {
     pub fn token_uninterpolated_span(&self) -> Span {
         match &self.token.kind {
             token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
-            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
-                self.look_ahead(1, |t| t.span)
-            }
+            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(1, |t| t.span),
             _ => self.token.span,
         }
     }
@@ -1755,9 +1739,7 @@ impl<'a> Parser<'a> {
     pub fn prev_token_uninterpolated_span(&self) -> Span {
         match &self.prev_token.kind {
             token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
-            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
-                self.look_ahead(0, |t| t.span)
-            }
+            token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => self.look_ahead(0, |t| t.span),
             _ => self.prev_token.span,
         }
     }
@@ -1776,7 +1758,7 @@ pub(crate) fn make_unclosed_delims_error(
     };
     let err = psess.dcx().create_err(MismatchedClosingDelimiter {
         spans,
-        delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
+        delimiter: pprust::token_kind_to_string(&found_delim.as_close_token_kind()).to_string(),
         unmatched: unmatched.found_span,
         opening_candidate: unmatched.candidate_span,
         unclosed: unmatched.unclosed_span,
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index b6e89cd7fa4..7c83e96c160 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,7 +1,7 @@
 use rustc_ast::ptr::P;
 use rustc_ast::token::NtExprKind::*;
 use rustc_ast::token::NtPatKind::*;
-use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
+use rustc_ast::token::{self, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
 use rustc_errors::PResult;
 use rustc_span::{Ident, kw};
 
@@ -69,13 +69,13 @@ impl<'a> Parser<'a> {
                 | token::Ident(..)
                 | token::NtIdent(..)
                 | token::NtLifetime(..)
-                | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true,
+                | token::OpenInvisible(InvisibleOrigin::MetaVar(_)) => true,
                 _ => token.can_begin_type(),
             },
             NonterminalKind::Block => match &token.kind {
-                token::OpenDelim(Delimiter::Brace) => true,
+                token::OpenBrace => true,
                 token::NtLifetime(..) => true,
-                token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
+                token::OpenInvisible(InvisibleOrigin::MetaVar(k)) => match k {
                     MetaVarKind::Block
                     | MetaVarKind::Stmt
                     | MetaVarKind::Expr { .. }
@@ -94,9 +94,7 @@ impl<'a> Parser<'a> {
             },
             NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
-                token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
-                    may_be_ident(*kind)
-                }
+                token::OpenInvisible(InvisibleOrigin::MetaVar(kind)) => may_be_ident(*kind),
                 _ => false,
             },
             NonterminalKind::Pat(pat_kind) => token.can_begin_pattern(pat_kind),
@@ -105,7 +103,7 @@ impl<'a> Parser<'a> {
                 _ => false,
             },
             NonterminalKind::TT | NonterminalKind::Item | NonterminalKind::Stmt => {
-                !matches!(token.kind, token::CloseDelim(_))
+                token.kind.close_delim().is_none()
             }
         }
     }
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index d5f469f9aa9..d6ff80b2eb4 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -3,7 +3,7 @@ use std::ops::Bound;
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::ptr::P;
 use rustc_ast::token::NtPatKind::*;
-use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token};
+use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token};
 use rustc_ast::util::parser::ExprPrecedence;
 use rustc_ast::visit::{self, Visitor};
 use rustc_ast::{
@@ -323,7 +323,7 @@ impl<'a> Parser<'a> {
     fn eat_or_separator(&mut self, lo: Option<Span>) -> EatOrResult {
         if self.recover_trailing_vert(lo) {
             EatOrResult::TrailingVert
-        } else if matches!(self.token.kind, token::OrOr) {
+        } else if self.token.kind == token::OrOr {
             // Found `||`; Recover and pretend we parsed `|`.
             self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
             self.bump();
@@ -352,9 +352,9 @@ impl<'a> Parser<'a> {
                 | token::Semi // e.g. `let a |;`.
                 | token::Colon // e.g. `let a | :`.
                 | token::Comma // e.g. `let (a |,)`.
-                | token::CloseDelim(Delimiter::Bracket) // e.g. `let [a | ]`.
-                | token::CloseDelim(Delimiter::Parenthesis) // e.g. `let (a | )`.
-                | token::CloseDelim(Delimiter::Brace) // e.g. `let A { f: a | }`.
+                | token::CloseBracket // e.g. `let [a | ]`.
+                | token::CloseParen // e.g. `let (a | )`.
+                | token::CloseBrace // e.g. `let A { f: a | }`.
             )
         });
         match (is_end_ahead, &self.token.kind) {
@@ -364,7 +364,7 @@ impl<'a> Parser<'a> {
                     span: self.token.span,
                     start: lo,
                     token: self.token,
-                    note_double_vert: matches!(self.token.kind, token::OrOr),
+                    note_double_vert: self.token.kind == token::OrOr,
                 });
                 self.bump();
                 true
@@ -438,8 +438,8 @@ impl<'a> Parser<'a> {
                 | token::Caret | token::And | token::Shl | token::Shr // excludes `Or`
             )
             || self.token == token::Question
-            || (self.token == token::OpenDelim(Delimiter::Bracket)
-                && self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]`
+            || (self.token == token::OpenBracket
+                && self.look_ahead(1, |t| *t != token::CloseBracket)) // excludes `[]`
             || self.token.is_keyword(kw::As);
 
         if !has_dot_expr && !has_trailing_operator {
@@ -481,7 +481,7 @@ impl<'a> Parser<'a> {
         let is_bound = is_end_bound
             // is_start_bound: either `..` or `)..`
             || self.token.is_range_separator()
-            || self.token == token::CloseDelim(Delimiter::Parenthesis)
+            || self.token == token::CloseParen
                 && self.look_ahead(1, Token::is_range_separator);
 
         let span = expr.span;
@@ -835,7 +835,7 @@ impl<'a> Parser<'a> {
             // because we never have `'a: label {}` in a pattern position anyways, but it does
             // keep us from suggesting something like `let 'a: Ty = ..` => `let 'a': Ty = ..`
             && could_be_unclosed_char_literal(lt)
-            && !self.look_ahead(1, |token| matches!(token.kind, token::Colon))
+            && !self.look_ahead(1, |token| token.kind == token::Colon)
         {
             // Recover a `'a` as a `'a'` literal
             let lt = self.expect_lifetime();
@@ -1255,8 +1255,8 @@ impl<'a> Parser<'a> {
                 || t.is_metavar_expr()
                 || t.is_lifetime() // recover `'a` instead of `'a'`
                 || (self.may_recover() // recover leading `(`
-                    && *t == token::OpenDelim(Delimiter::Parenthesis)
-                    && self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis))
+                    && *t == token::OpenParen
+                    && self.look_ahead(dist + 1, |t| *t != token::OpenParen)
                     && self.is_pat_range_end_start(dist + 1))
             })
     }
@@ -1264,9 +1264,8 @@ impl<'a> Parser<'a> {
     /// Parse a range pattern end bound
     fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
         // recover leading `(`
-        let open_paren = (self.may_recover()
-            && self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis)))
-        .then_some(self.prev_token.span);
+        let open_paren = (self.may_recover() && self.eat_noexpect(&token::OpenParen))
+            .then_some(self.prev_token.span);
 
         let bound = if self.check_inline_const(0) {
             self.parse_const_block(self.token.span, true)
@@ -1322,8 +1321,8 @@ impl<'a> Parser<'a> {
         // Avoid `in`. Due to recovery in the list parser this messes with `for ( $pat in $expr )`.
         && !self.token.is_keyword(kw::In)
         // Try to do something more complex?
-        && self.look_ahead(1, |t| !matches!(t.kind, token::OpenDelim(Delimiter::Parenthesis) // A tuple struct pattern.
-            | token::OpenDelim(Delimiter::Brace) // A struct pattern.
+        && self.look_ahead(1, |t| !matches!(t.kind, token::OpenParen // A tuple struct pattern.
+            | token::OpenBrace // A struct pattern.
             | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
             | token::PathSep // A tuple / struct variant pattern.
             | token::Bang)) // A macro expanding to a pattern.
@@ -1361,7 +1360,7 @@ impl<'a> Parser<'a> {
         // This shortly leads to a parse error. Note that if there is no explicit
         // binding mode then we do not end up here, because the lookahead
         // will direct us over to `parse_enum_variant()`.
-        if self.token == token::OpenDelim(Delimiter::Parenthesis) {
+        if self.token == token::OpenParen {
             return Err(self
                 .dcx()
                 .create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }));
@@ -1429,9 +1428,9 @@ impl<'a> Parser<'a> {
             token::Comma,
             token::Semi,
             token::At,
-            token::OpenDelim(Delimiter::Brace),
-            token::CloseDelim(Delimiter::Brace),
-            token::CloseDelim(Delimiter::Parenthesis),
+            token::OpenBrace,
+            token::CloseBrace,
+            token::CloseParen,
         ]
         .contains(&self.token.kind)
     }
@@ -1489,7 +1488,7 @@ impl<'a> Parser<'a> {
         let mut first_etc_and_maybe_comma_span = None;
         let mut last_non_comma_dotdot_span = None;
 
-        while self.token != token::CloseDelim(Delimiter::Brace) {
+        while self.token != token::CloseBrace {
             // check that a comma comes after every field
             if !ate_comma {
                 let err = if self.token == token::At {
@@ -1538,7 +1537,7 @@ impl<'a> Parser<'a> {
                 self.recover_bad_dot_dot();
                 self.bump(); // `..` || `...` || `_`
 
-                if self.token == token::CloseDelim(Delimiter::Brace) {
+                if self.token == token::CloseBrace {
                     break;
                 }
                 let token_str = super::token_descr(&self.token);
@@ -1561,7 +1560,7 @@ impl<'a> Parser<'a> {
                     ate_comma = true;
                 }
 
-                if self.token == token::CloseDelim(Delimiter::Brace) {
+                if self.token == token::CloseBrace {
                     // If the struct looks otherwise well formed, recover and continue.
                     if let Some(sp) = comma_sp {
                         err.span_suggestion_short(
@@ -1681,7 +1680,7 @@ impl<'a> Parser<'a> {
             // We found `ref mut? ident:`, try to parse a `name,` or `name }`.
             && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span))
             && self.look_ahead(2, |t| {
-                t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace)
+                t == &token::Comma || t == &token::CloseBrace
             })
         {
             let span = last.pat.span.with_hi(ident.span.lo());
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 02883655662..1a02d45f0e3 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -2,7 +2,7 @@ use std::mem;
 
 use ast::token::IdentIsRaw;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, MetaVarKind, Token, TokenKind};
+use rustc_ast::token::{self, MetaVarKind, Token, TokenKind};
 use rustc_ast::{
     self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemConstraint,
     AssocItemConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
@@ -302,10 +302,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, PathSegment> {
         let ident = self.parse_path_segment_ident()?;
         let is_args_start = |token: &Token| {
-            matches!(
-                token.kind,
-                token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow
-            )
+            matches!(token.kind, token::Lt | token::Shl | token::OpenParen | token::LArrow)
         };
         let check_args_start = |this: &mut Self| {
             this.expected_token_types.insert(TokenType::Lt);
@@ -366,7 +363,7 @@ impl<'a> Parser<'a> {
                     })?;
                     let span = lo.to(self.prev_token.span);
                     AngleBracketedArgs { args, span }.into()
-                } else if self.token == token::OpenDelim(Delimiter::Parenthesis)
+                } else if self.token == token::OpenParen
                     // FIXME(return_type_notation): Could also recover `...` here.
                     && self.look_ahead(1, |t| *t == token::DotDot)
                 {
@@ -852,7 +849,7 @@ impl<'a> Parser<'a> {
     /// the caller.
     pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
         // Parse const argument.
-        let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
+        let value = if self.token.kind == token::OpenBrace {
             self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?
         } else {
             self.handle_unambiguous_unbraced_const_arg()?
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 0cc8b605018..885a65d4de7 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -162,7 +162,7 @@ impl<'a> Parser<'a> {
             // Do not attempt to parse an expression if we're done here.
             self.error_outer_attrs(attrs);
             self.mk_stmt(lo, StmtKind::Empty)
-        } else if self.token != token::CloseDelim(Delimiter::Brace) {
+        } else if self.token != token::CloseBrace {
             // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
             // above.
             let restrictions =
@@ -254,9 +254,7 @@ impl<'a> Parser<'a> {
                 self.token.kind,
                 token::Semi
                     | token::Eof
-                    | token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
-                        MetaVarKind::Stmt
-                    )))
+                    | token::CloseInvisible(InvisibleOrigin::MetaVar(MetaVarKind::Stmt))
             ) {
             StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
         } else {
@@ -547,7 +545,7 @@ impl<'a> Parser<'a> {
             //                                            +   +
             Ok(Some(_))
                 if (!self.token.is_keyword(kw::Else)
-                    && self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)))
+                    && self.look_ahead(1, |t| t == &token::OpenBrace))
                     || do_not_suggest_help => {}
             // Do not suggest `if foo println!("") {;}` (as would be seen in test for #46836).
             Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
@@ -584,9 +582,7 @@ impl<'a> Parser<'a> {
         stmt_kind: &StmtKind,
     ) {
         match (&self.token.kind, &stmt_kind) {
-            (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr))
-                if let ExprKind::Call(..) = expr.kind =>
-            {
+            (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Call(..) = expr.kind => {
                 // for _ in x y() {}
                 e.span_suggestion_verbose(
                     between,
@@ -595,9 +591,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
             }
-            (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr))
-                if let ExprKind::Field(..) = expr.kind =>
-            {
+            (token::OpenBrace, StmtKind::Expr(expr)) if let ExprKind::Field(..) = expr.kind => {
                 // for _ in x y.z {}
                 e.span_suggestion_verbose(
                     between,
@@ -606,7 +600,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
             }
-            (token::CloseDelim(Delimiter::Brace), StmtKind::Expr(expr))
+            (token::CloseBrace, StmtKind::Expr(expr))
                 if let ExprKind::Struct(expr) = &expr.kind
                     && let None = expr.qself
                     && expr.path.segments.len() == 1 =>
@@ -621,7 +615,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
             }
-            (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr))
+            (token::OpenBrace, StmtKind::Expr(expr))
                 if let ExprKind::Lit(lit) = expr.kind
                     && let None = lit.suffix
                     && let token::LitKind::Integer | token::LitKind::Float = lit.kind =>
@@ -635,7 +629,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
             }
-            (token::OpenDelim(Delimiter::Brace), StmtKind::Expr(expr))
+            (token::OpenBrace, StmtKind::Expr(expr))
                 if let ExprKind::Loop(..)
                 | ExprKind::If(..)
                 | ExprKind::While(..)
@@ -658,7 +652,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
             }
-            (token::OpenDelim(Delimiter::Brace), _) => {}
+            (token::OpenBrace, _) => {}
             (_, _) => {
                 e.multipart_suggestion(
                     "you might have meant to write this as part of a block",
@@ -809,7 +803,7 @@ impl<'a> Parser<'a> {
             // Likely `foo bar`
         } else if self.prev_token.kind == token::Question {
             // `foo? bar`
-        } else if self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis) {
+        } else if self.prev_token.kind == token::CloseParen {
             // `foo() bar`
         } else {
             return;
@@ -826,7 +820,7 @@ impl<'a> Parser<'a> {
                 Applicability::MaybeIncorrect,
             );
         }
-        if self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis)) {
+        if self.look_ahead(1, |t| t.kind == token::OpenParen) {
             err.span_suggestion_verbose(
                 self.prev_token.span.between(self.token.span),
                 "you might have meant to write a method call",
@@ -870,8 +864,7 @@ impl<'a> Parser<'a> {
             StmtKind::Expr(expr)
                 if classify::expr_requires_semi_to_be_stmt(expr)
                     && !expr.attrs.is_empty()
-                    && ![token::Eof, token::Semi, token::CloseDelim(Delimiter::Brace)]
-                        .contains(&self.token.kind) =>
+                    && !matches!(self.token.kind, token::Eof | token::Semi | token::CloseBrace) =>
             {
                 // The user has written `#[attr] expr` which is unsupported. (#106020)
                 let guar = self.attr_on_non_tail_expr(&expr);
@@ -919,7 +912,7 @@ impl<'a> Parser<'a> {
                                                     token::Ident(
                                                         kw::For | kw::Loop | kw::While,
                                                         token::IdentIsRaw::No
-                                                    ) | token::OpenDelim(Delimiter::Brace)
+                                                    ) | token::OpenBrace
                                                 )
                                         })
                                     {
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 2f958f4d492..8285070839a 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -2573,14 +2573,14 @@ fn look_ahead() {
         // Current position is the `fn`.
         look(&p, 0, token::Ident(kw::Fn, raw_no));
         look(&p, 1, token::Ident(sym_f, raw_no));
-        look(&p, 2, token::OpenDelim(Delimiter::Parenthesis));
+        look(&p, 2, token::OpenParen);
         look(&p, 3, token::Ident(sym_x, raw_no));
         look(&p, 4, token::Colon);
         look(&p, 5, token::Ident(sym::u32, raw_no));
-        look(&p, 6, token::CloseDelim(Delimiter::Parenthesis));
-        look(&p, 7, token::OpenDelim(Delimiter::Brace));
+        look(&p, 6, token::CloseParen);
+        look(&p, 7, token::OpenBrace);
         look(&p, 8, token::Ident(sym_x, raw_no));
-        look(&p, 9, token::CloseDelim(Delimiter::Brace));
+        look(&p, 9, token::CloseBrace);
         look(&p, 10, token::Ident(kw::Struct, raw_no));
         look(&p, 11, token::Ident(sym_S, raw_no));
         look(&p, 12, token::Semi);
@@ -2597,10 +2597,10 @@ fn look_ahead() {
         look(&p, 0, token::Ident(sym_x, raw_no));
         look(&p, 1, token::Colon);
         look(&p, 2, token::Ident(sym::u32, raw_no));
-        look(&p, 3, token::CloseDelim(Delimiter::Parenthesis));
-        look(&p, 4, token::OpenDelim(Delimiter::Brace));
+        look(&p, 3, token::CloseParen);
+        look(&p, 4, token::OpenBrace);
         look(&p, 5, token::Ident(sym_x, raw_no));
-        look(&p, 6, token::CloseDelim(Delimiter::Brace));
+        look(&p, 6, token::CloseBrace);
         look(&p, 7, token::Ident(kw::Struct, raw_no));
         look(&p, 8, token::Ident(sym_S, raw_no));
         look(&p, 9, token::Semi);
@@ -2652,18 +2652,18 @@ fn look_ahead_non_outermost_stream() {
         }
         look(&p, 0, token::Ident(kw::Fn, raw_no));
         look(&p, 1, token::Ident(sym_f, raw_no));
-        look(&p, 2, token::OpenDelim(Delimiter::Parenthesis));
+        look(&p, 2, token::OpenParen);
         look(&p, 3, token::Ident(sym_x, raw_no));
         look(&p, 4, token::Colon);
         look(&p, 5, token::Ident(sym::u32, raw_no));
-        look(&p, 6, token::CloseDelim(Delimiter::Parenthesis));
-        look(&p, 7, token::OpenDelim(Delimiter::Brace));
+        look(&p, 6, token::CloseParen);
+        look(&p, 7, token::OpenBrace);
         look(&p, 8, token::Ident(sym_x, raw_no));
-        look(&p, 9, token::CloseDelim(Delimiter::Brace));
+        look(&p, 9, token::CloseBrace);
         look(&p, 10, token::Ident(kw::Struct, raw_no));
         look(&p, 11, token::Ident(sym_S, raw_no));
         look(&p, 12, token::Semi);
-        look(&p, 13, token::CloseDelim(Delimiter::Brace));
+        look(&p, 13, token::CloseBrace);
         // Any lookahead past the end of the token stream returns `Eof`.
         look(&p, 14, token::Eof);
         look(&p, 15, token::Eof);
@@ -2723,9 +2723,7 @@ fn debug_lookahead() {
             \"f\",
             No,
         ),
-        OpenDelim(
-            Parenthesis,
-        ),
+        OpenParen,
         Ident(
             \"x\",
             No,
@@ -2735,9 +2733,7 @@ fn debug_lookahead() {
             \"u32\",
             No,
         ),
-        CloseDelim(
-            Parenthesis,
-        ),
+        CloseParen,
     ],
     approx_token_stream_pos: 0,
     ..
@@ -2768,9 +2764,7 @@ fn debug_lookahead() {
             \"f\",
             No,
         ),
-        OpenDelim(
-            Parenthesis,
-        ),
+        OpenParen,
         Ident(
             \"x\",
             No,
@@ -2780,19 +2774,13 @@ fn debug_lookahead() {
             \"u32\",
             No,
         ),
-        CloseDelim(
-            Parenthesis,
-        ),
-        OpenDelim(
-            Brace,
-        ),
+        CloseParen,
+        OpenBrace,
         Ident(
             \"x\",
             No,
         ),
-        CloseDelim(
-            Brace,
-        ),
+        CloseBrace,
         Ident(
             \"struct\",
             No,
@@ -2817,9 +2805,7 @@ fn debug_lookahead() {
             &format!("{:#?}", p.debug_lookahead(1)),
             "Parser {
     prev_token: Token {
-        kind: OpenDelim(
-            Brace,
-        ),
+        kind: OpenBrace,
         span: Span {
             lo: BytePos(
                 13,
@@ -2844,9 +2830,7 @@ fn debug_lookahead() {
             &format!("{:#?}", p.debug_lookahead(4)),
             "Parser {
     prev_token: Token {
-        kind: OpenDelim(
-            Brace,
-        ),
+        kind: OpenBrace,
         span: Span {
             lo: BytePos(
                 13,
@@ -2862,9 +2846,7 @@ fn debug_lookahead() {
             \"x\",
             No,
         ),
-        CloseDelim(
-            Brace,
-        ),
+        CloseBrace,
         Ident(
             \"struct\",
             No,
diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs
index add3c970201..b91548196a3 100644
--- a/compiler/rustc_parse/src/parser/token_type.rs
+++ b/compiler/rustc_parse/src/parser/token_type.rs
@@ -448,18 +448,6 @@ macro_rules! exp {
             token_type: $crate::parser::token_type::TokenType::$tok
         }
     };
-    (@open, $delim:ident, $token_type:ident) => {
-        $crate::parser::token_type::ExpTokenPair {
-            tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
-            token_type: $crate::parser::token_type::TokenType::$token_type,
-        }
-    };
-    (@close, $delim:ident, $token_type:ident) => {
-        $crate::parser::token_type::ExpTokenPair {
-            tok: &rustc_ast::token::CloseDelim(rustc_ast::token::Delimiter::$delim),
-            token_type: $crate::parser::token_type::TokenType::$token_type,
-        }
-    };
 
     // `ExpKeywordPair` helper rules.
     (@kw, $kw:ident, $token_type:ident) => {
@@ -504,12 +492,12 @@ macro_rules! exp {
     (Question)       => { exp!(@tok, Question) };
     (Eof)            => { exp!(@tok, Eof) };
 
-    (OpenParen)      => { exp!(@open,  Parenthesis, OpenParen) };
-    (OpenBrace)      => { exp!(@open,  Brace,       OpenBrace) };
-    (OpenBracket)    => { exp!(@open,  Bracket,     OpenBracket) };
-    (CloseParen)     => { exp!(@close, Parenthesis, CloseParen) };
-    (CloseBrace)     => { exp!(@close, Brace,       CloseBrace) };
-    (CloseBracket)   => { exp!(@close, Bracket,     CloseBracket) };
+    (OpenParen)      => { exp!(@tok, OpenParen) };
+    (OpenBrace)      => { exp!(@tok, OpenBrace) };
+    (OpenBracket)    => { exp!(@tok, OpenBracket) };
+    (CloseParen)     => { exp!(@tok, CloseParen) };
+    (CloseBrace)     => { exp!(@tok, CloseBrace) };
+    (CloseBracket)   => { exp!(@tok, CloseBracket) };
 
     (As)             => { exp!(@kw, As,         KwAs) };
     (Async)          => { exp!(@kw, Async,      KwAsync) };
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 42ebf26784d..17481731b11 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,5 +1,5 @@
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
+use rustc_ast::token::{self, IdentIsRaw, MetaVarKind, Token, TokenKind};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
     self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
@@ -98,7 +98,7 @@ fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
         || t.is_lifetime()
         || t == &TokenKind::Question
         || t.is_keyword(kw::For)
-        || t == &TokenKind::OpenDelim(Delimiter::Parenthesis)
+        || t == &TokenKind::OpenParen
 }
 
 impl<'a> Parser<'a> {
@@ -355,7 +355,7 @@ impl<'a> Parser<'a> {
                 }
             }
         } else if self.check_keyword(exp!(Unsafe))
-            && self.look_ahead(1, |tok| matches!(tok.kind, token::Lt))
+            && self.look_ahead(1, |tok| tok.kind == token::Lt)
         {
             self.parse_unsafe_binder_ty()?
         } else {
@@ -534,7 +534,7 @@ impl<'a> Parser<'a> {
         let elt_ty = match self.parse_ty() {
             Ok(ty) => ty,
             Err(err)
-                if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket))
+                if self.look_ahead(1, |t| *t == token::CloseBracket)
                     | self.look_ahead(1, |t| *t == token::Semi) =>
             {
                 // Recover from `[LIT; EXPR]` and `[LIT]`
@@ -1154,7 +1154,7 @@ impl<'a> Parser<'a> {
         }
 
         let mut path = if self.token.is_keyword(kw::Fn)
-            && self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis))
+            && self.look_ahead(1, |t| *t == TokenKind::OpenParen)
             && let Some(path) = self.recover_path_from_fn()
         {
             path
@@ -1208,7 +1208,7 @@ impl<'a> Parser<'a> {
             self.parse_path(PathStyle::Type)?
         };
 
-        if self.may_recover() && self.token == TokenKind::OpenDelim(Delimiter::Parenthesis) {
+        if self.may_recover() && self.token == TokenKind::OpenParen {
             self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?;
         }
 
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index 31f9c284d7d..fc99dd08b78 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -96,7 +96,7 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
             }
         }
         TokenTree::Delimited(_span, _spacing, delim, tts) => {
-            let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
+            let open_delim = printer.token_kind_to_string(&delim.as_open_token_kind());
             printer.word(open_delim);
             if !tts.is_empty() {
                 if *delim == Delimiter::Brace {
@@ -107,7 +107,7 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
                     printer.space();
                 }
             }
-            let close_delim = printer.token_kind_to_string(&token::CloseDelim(*delim));
+            let close_delim = printer.token_kind_to_string(&delim.as_close_token_kind());
             printer.word(close_delim);
         }
     }
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index 1e16aace304..0ff0aad7a2d 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -722,7 +722,7 @@ fn last_tok(tt: &TokenTree) -> Token {
     match *tt {
         TokenTree::Token(ref t, _) => t.clone(),
         TokenTree::Delimited(delim_span, _, delim, _) => Token {
-            kind: TokenKind::CloseDelim(delim),
+            kind: delim.as_open_token_kind(),
             span: delim_span.close,
         },
     }
@@ -1124,8 +1124,14 @@ fn next_space(tok: &TokenKind) -> SpaceState {
         TokenKind::PathSep
         | TokenKind::Pound
         | TokenKind::Dollar
-        | TokenKind::OpenDelim(_)
-        | TokenKind::CloseDelim(_) => SpaceState::Never,
+        | TokenKind::OpenParen
+        | TokenKind::CloseParen
+        | TokenKind::OpenBrace
+        | TokenKind::CloseBrace
+        | TokenKind::OpenBracket
+        | TokenKind::CloseBracket
+        | TokenKind::OpenInvisible(_)
+        | TokenKind::CloseInvisible(_) => SpaceState::Never,
 
         TokenKind::Literal(..) | TokenKind::Ident(..) | TokenKind::Lifetime(..) => {
             SpaceState::Ident
diff --git a/src/tools/rustfmt/src/parse/macros/cfg_if.rs b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
index 0b7b6c4d361..30b83373c17 100644
--- a/src/tools/rustfmt/src/parse/macros/cfg_if.rs
+++ b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
@@ -1,7 +1,7 @@
 use std::panic::{AssertUnwindSafe, catch_unwind};
 
 use rustc_ast::ast;
-use rustc_ast::token::{Delimiter, TokenKind};
+use rustc_ast::token::TokenKind;
 use rustc_parse::exp;
 use rustc_parse::parser::ForceCollect;
 use rustc_span::symbol::kw;
@@ -60,9 +60,7 @@ fn parse_cfg_if_inner<'a>(
             return Err("Expected an opening brace");
         }
 
-        while parser.token != TokenKind::CloseDelim(Delimiter::Brace)
-            && parser.token.kind != TokenKind::Eof
-        {
+        while parser.token != TokenKind::CloseBrace && parser.token.kind != TokenKind::Eof {
             let item = match parser.parse_item(ForceCollect::No) {
                 Ok(Some(item_ptr)) => item_ptr.into_inner(),
                 Ok(None) => continue,