about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-04-14 03:56:55 +0000
committerbors <bors@rust-lang.org>2025-04-14 03:56:55 +0000
commitf836ae4e663b6e8938096b8559e094d18361be55 (patch)
tree210d28f13b7c2f09b8cc78ef0c4c146c0cc28322
parent15f58c46da79399961a09db0c650a2f90f442e6b (diff)
parent1830245a224c523f86ad3c62be76be3f336a9fb0 (diff)
downloadrust-f836ae4e663b6e8938096b8559e094d18361be55.tar.gz
rust-f836ae4e663b6e8938096b8559e094d18361be55.zip
Auto merge of #124141 - nnethercote:rm-Nonterminal-and-TokenKind-Interpolated, r=petrochenkov
Remove `Nonterminal` and `TokenKind::Interpolated`

A third attempt at this; the first attempt was #96724 and the second was #114647.

r? `@ghost`
-rw-r--r--compiler/rustc_ast/src/ast_traits.rs14
-rw-r--r--compiler/rustc_ast/src/lib.rs1
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs40
-rw-r--r--compiler/rustc_ast/src/token.rs133
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs62
-rw-r--r--compiler/rustc_ast_lowering/src/lib.rs3
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/mod.rs6
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs12
-rw-r--r--compiler/rustc_attr_parsing/src/context.rs2
-rw-r--r--compiler/rustc_attr_parsing/src/lib.rs1
-rw-r--r--compiler/rustc_attr_parsing/src/parser.rs20
-rw-r--r--compiler/rustc_borrowck/src/lib.rs1
-rw-r--r--compiler/rustc_builtin_macros/src/cfg_eval.rs10
-rw-r--r--compiler/rustc_builtin_macros/src/lib.rs1
-rw-r--r--compiler/rustc_codegen_ssa/src/lib.rs1
-rw-r--r--compiler/rustc_const_eval/src/lib.rs1
-rw-r--r--compiler/rustc_driver/src/lib.rs1
-rw-r--r--compiler/rustc_driver_impl/src/lib.rs1
-rw-r--r--compiler/rustc_expand/src/config.rs12
-rw-r--r--compiler/rustc_expand/src/mbe/diagnostics.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs8
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs23
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs9
-rw-r--r--compiler/rustc_hir/src/lib.rs1
-rw-r--r--compiler/rustc_hir_analysis/src/lib.rs1
-rw-r--r--compiler/rustc_hir_typeck/src/lib.rs1
-rw-r--r--compiler/rustc_incremental/src/lib.rs1
-rw-r--r--compiler/rustc_lint/src/lib.rs1
-rw-r--r--compiler/rustc_metadata/src/lib.rs1
-rw-r--r--compiler/rustc_middle/src/lib.rs1
-rw-r--r--compiler/rustc_mir_build/src/lib.rs1
-rw-r--r--compiler/rustc_mir_dataflow/src/lib.rs1
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs1
-rw-r--r--compiler/rustc_monomorphize/src/lib.rs1
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs2
-rw-r--r--compiler/rustc_parse/src/lib.rs1
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs4
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs10
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs58
-rw-r--r--compiler/rustc_parse/src/parser/item.rs7
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs41
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs124
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs6
-rw-r--r--compiler/rustc_parse/src/parser/path.rs4
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs12
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs2
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs4
-rw-r--r--compiler/rustc_passes/src/lib.rs1
-rw-r--r--compiler/rustc_privacy/src/lib.rs1
-rw-r--r--compiler/rustc_query_impl/src/lib.rs1
-rw-r--r--compiler/rustc_sanitizers/src/lib.rs1
-rw-r--r--compiler/rustc_smir/src/lib.rs1
-rw-r--r--compiler/rustc_symbol_mangling/src/lib.rs1
-rw-r--r--compiler/rustc_transmute/src/lib.rs1
-rw-r--r--compiler/rustc_ty_utils/src/lib.rs1
-rw-r--r--src/tools/rustfmt/src/macros.rs14
-rw-r--r--tests/ui/macros/macro-as-fn-body.rs2
-rw-r--r--tests/ui/macros/syntax-error-recovery.rs4
-rw-r--r--tests/ui/macros/syntax-error-recovery.stderr6
61 files changed, 164 insertions, 532 deletions
diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs
index c9e2e9911ef..7f98e7ba8a6 100644
--- a/compiler/rustc_ast/src/ast_traits.rs
+++ b/compiler/rustc_ast/src/ast_traits.rs
@@ -6,7 +6,6 @@ use std::fmt;
 use std::marker::PhantomData;
 
 use crate::ptr::P;
-use crate::token::Nonterminal;
 use crate::tokenstream::LazyAttrTokenStream;
 use crate::{
     Arm, AssocItem, AttrItem, AttrKind, AttrVec, Attribute, Block, Crate, Expr, ExprField,
@@ -206,19 +205,6 @@ impl HasTokens for Attribute {
     }
 }
 
-impl HasTokens for Nonterminal {
-    fn tokens(&self) -> Option<&LazyAttrTokenStream> {
-        match self {
-            Nonterminal::NtBlock(block) => block.tokens(),
-        }
-    }
-    fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
-        match self {
-            Nonterminal::NtBlock(block) => block.tokens_mut(),
-        }
-    }
-}
-
 /// A trait for AST nodes having (or not having) attributes.
 pub trait HasAttrs {
     /// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner
diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs
index da510e4967d..294c6c9ba7a 100644
--- a/compiler/rustc_ast/src/lib.rs
+++ b/compiler/rustc_ast/src/lib.rs
@@ -6,7 +6,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(
     html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
     test(attr(deny(warnings)))
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index b083e878905..6aae2e481a5 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -844,9 +844,9 @@ fn visit_lazy_tts<T: MutVisitor>(vis: &mut T, lazy_tts: &mut Option<LazyAttrToke
     visit_lazy_tts_opt_mut(vis, lazy_tts.as_mut());
 }
 
-/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
-/// In practice the ident part is not actually used by specific visitors right now,
-/// but there's a test below checking that it works.
+/// Applies ident visitor if it's an ident. In practice this is not actually
+/// used by specific visitors right now, but there's a test below checking that
+/// it works.
 // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
 pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
     let Token { kind, span } = t;
@@ -864,46 +864,12 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) {
         token::NtLifetime(ident, _is_raw) => {
             vis.visit_ident(ident);
         }
-        token::Interpolated(nt) => {
-            let nt = Arc::make_mut(nt);
-            visit_nonterminal(vis, nt);
-        }
         _ => {}
     }
     vis.visit_span(span);
 }
 
 // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
-/// Applies the visitor to elements of interpolated nodes.
-//
-// N.B., this can occur only when applying a visitor to partially expanded
-// code, where parsed pieces have gotten implanted ito *other* macro
-// invocations. This is relevant for macro hygiene, but possibly not elsewhere.
-//
-// One problem here occurs because the types for flat_map_item, flat_map_stmt,
-// etc., allow the visitor to return *multiple* items; this is a problem for the
-// nodes here, because they insist on having exactly one piece. One solution
-// would be to mangle the MutVisitor trait to include one-to-many and
-// one-to-one versions of these entry points, but that would probably confuse a
-// lot of people and help very few. Instead, I'm just going to put in dynamic
-// checks. I think the performance impact of this will be pretty much
-// nonexistent. The danger is that someone will apply a `MutVisitor` to a
-// partially expanded node, and will be confused by the fact that their
-// `flat_map_item` or `flat_map_stmt` isn't getting called on `NtItem` or `NtStmt`
-// nodes. Hopefully they'll wind up reading this comment, and doing something
-// appropriate.
-//
-// BTW, design choice: I considered just changing the type of, e.g., `NtItem` to
-// contain multiple items, but decided against it when I looked at
-// `parse_item_or_view_item` and tried to figure out what I would do with
-// multiple items there....
-fn visit_nonterminal<T: MutVisitor>(vis: &mut T, nt: &mut token::Nonterminal) {
-    match nt {
-        token::NtBlock(block) => vis.visit_block(block),
-    }
-}
-
-// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
 fn visit_defaultness<T: MutVisitor>(vis: &mut T, defaultness: &mut Defaultness) {
     match defaultness {
         Defaultness::Default(span) => vis.visit_span(span),
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index d57a369eebf..055481f5d87 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -1,13 +1,10 @@
 use std::borrow::Cow;
 use std::fmt;
-use std::sync::Arc;
 
 pub use LitKind::*;
-pub use Nonterminal::*;
 pub use NtExprKind::*;
 pub use NtPatKind::*;
 pub use TokenKind::*;
-use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
 use rustc_macros::{Decodable, Encodable, HashStable_Generic};
 use rustc_span::edition::Edition;
 use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
@@ -16,7 +13,6 @@ use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym};
 use rustc_span::{Ident, Symbol};
 
 use crate::ast;
-use crate::ptr::P;
 use crate::util::case::Case;
 
 #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
@@ -34,10 +30,6 @@ pub enum InvisibleOrigin {
     // Converted from `proc_macro::Delimiter` in
     // `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro.
     ProcMacro,
-
-    // Converted from `TokenKind::Interpolated` in
-    // `TokenStream::flatten_token`. Treated similarly to `ProcMacro`.
-    FlattenToken,
 }
 
 impl PartialEq for InvisibleOrigin {
@@ -134,9 +126,7 @@ impl Delimiter {
         match self {
             Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false,
             Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false,
-            Delimiter::Invisible(InvisibleOrigin::FlattenToken | InvisibleOrigin::ProcMacro) => {
-                true
-            }
+            Delimiter::Invisible(InvisibleOrigin::ProcMacro) => true,
         }
     }
 
@@ -337,9 +327,7 @@ impl From<IdentIsRaw> for bool {
     }
 }
 
-// SAFETY: due to the `Clone` impl below, all fields of all variants other than
-// `Interpolated` must impl `Copy`.
-#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
+#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
 pub enum TokenKind {
     /* Expression-operator symbols. */
     /// `=`
@@ -468,21 +456,6 @@ pub enum TokenKind {
     /// the `lifetime` metavariable in the macro's RHS.
     NtLifetime(Ident, IdentIsRaw),
 
-    /// An embedded AST node, as produced by a macro. This only exists for
-    /// historical reasons. We'd like to get rid of it, for multiple reasons.
-    /// - It's conceptually very strange. Saying a token can contain an AST
-    ///   node is like saying, in natural language, that a word can contain a
-    ///   sentence.
-    /// - It requires special handling in a bunch of places in the parser.
-    /// - It prevents `Token` from implementing `Copy`.
-    /// It adds complexity and likely slows things down. Please don't add new
-    /// occurrences of this token kind!
-    ///
-    /// The span in the surrounding `Token` is that of the metavariable in the
-    /// macro's RHS. The span within the Nonterminal is that of the fragment
-    /// passed to the macro at the call site.
-    Interpolated(Arc<Nonterminal>),
-
     /// A doc comment token.
     /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc)
     /// similarly to symbols in string literal tokens.
@@ -492,20 +465,7 @@ pub enum TokenKind {
     Eof,
 }
 
-impl Clone for TokenKind {
-    fn clone(&self) -> Self {
-        // `TokenKind` would impl `Copy` if it weren't for `Interpolated`. So
-        // for all other variants, this implementation of `clone` is just like
-        // a copy. This is faster than the `derive(Clone)` version which has a
-        // separate path for every variant.
-        match self {
-            Interpolated(nt) => Interpolated(Arc::clone(nt)),
-            _ => unsafe { std::ptr::read(self) },
-        }
-    }
-}
-
-#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
+#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
 pub struct Token {
     pub kind: TokenKind,
     pub span: Span,
@@ -600,7 +560,7 @@ impl Token {
             | FatArrow | Pound | Dollar | Question | SingleQuote => true,
 
             OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..)
-            | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false,
+            | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Eof => false,
         }
     }
 
@@ -631,7 +591,6 @@ impl Token {
             PathSep                           | // global path
             Lifetime(..)                      | // labeled loop
             Pound                             => true, // expression attributes
-            Interpolated(ref nt) => matches!(&**nt, NtBlock(..)),
             OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Block |
                 MetaVarKind::Expr { .. } |
@@ -703,7 +662,6 @@ impl Token {
         match self.kind {
             OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true,
             Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
-            Interpolated(ref nt) => matches!(&**nt, NtBlock(..)),
             OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
                 MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal,
             ))) => true,
@@ -831,31 +789,20 @@ impl Token {
     /// Is this a pre-parsed expression dropped into the token stream
     /// (which happens while parsing the result of macro expansion)?
     pub fn is_metavar_expr(&self) -> bool {
-        #[allow(irrefutable_let_patterns)] // FIXME: temporary
-        if let Interpolated(nt) = &self.kind
-            && let NtBlock(_) = &**nt
-        {
-            true
-        } else if matches!(
+        matches!(
             self.is_metavar_seq(),
-            Some(MetaVarKind::Expr { .. } | MetaVarKind::Literal | MetaVarKind::Path)
-        ) {
-            true
-        } else {
-            matches!(self.is_metavar_seq(), Some(MetaVarKind::Path))
-        }
+            Some(
+                MetaVarKind::Expr { .. }
+                    | MetaVarKind::Literal
+                    | MetaVarKind::Path
+                    | MetaVarKind::Block
+            )
+        )
     }
 
-    /// Is the token an interpolated block (`$b:block`)?
-    pub fn is_whole_block(&self) -> bool {
-        #[allow(irrefutable_let_patterns)] // FIXME: temporary
-        if let Interpolated(nt) = &self.kind
-            && let NtBlock(..) = &**nt
-        {
-            return true;
-        }
-
-        false
+    /// Are we at a block from a metavar (`$b:block`)?
+    pub fn is_metavar_block(&self) -> bool {
+        matches!(self.is_metavar_seq(), Some(MetaVarKind::Block))
     }
 
     /// Returns `true` if the token is either the `mut` or `const` keyword.
@@ -1024,7 +971,7 @@ impl Token {
                 | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq
                 | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question
                 | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..)
-                | Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof,
+                | Lifetime(..) | NtLifetime(..) | DocComment(..) | Eof,
                 _,
             ) => {
                 return None;
@@ -1063,12 +1010,6 @@ pub enum NtExprKind {
     Expr2021 { inferred: bool },
 }
 
-#[derive(Clone, Encodable, Decodable)]
-/// For interpolation during macro expansion.
-pub enum Nonterminal {
-    NtBlock(P<ast::Block>),
-}
-
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
 pub enum NonterminalKind {
     Item,
@@ -1152,47 +1093,6 @@ impl fmt::Display for NonterminalKind {
     }
 }
 
-impl Nonterminal {
-    pub fn use_span(&self) -> Span {
-        match self {
-            NtBlock(block) => block.span,
-        }
-    }
-
-    pub fn descr(&self) -> &'static str {
-        match self {
-            NtBlock(..) => "block",
-        }
-    }
-}
-
-impl PartialEq for Nonterminal {
-    fn eq(&self, _rhs: &Self) -> bool {
-        // FIXME: Assume that all nonterminals are not equal, we can't compare them
-        // correctly based on data from AST. This will prevent them from matching each other
-        // in macros. The comparison will become possible only when each nonterminal has an
-        // attached token stream from which it was parsed.
-        false
-    }
-}
-
-impl fmt::Debug for Nonterminal {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match *self {
-            NtBlock(..) => f.pad("NtBlock(..)"),
-        }
-    }
-}
-
-impl<CTX> HashStable<CTX> for Nonterminal
-where
-    CTX: crate::HashStableContext,
-{
-    fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
-        panic!("interpolated tokens should not be present in the HIR")
-    }
-}
-
 // Some types are used a lot. Make sure they don't unintentionally get bigger.
 #[cfg(target_pointer_width = "64")]
 mod size_asserts {
@@ -1202,7 +1102,6 @@ mod size_asserts {
     // tidy-alphabetical-start
     static_assert_size!(Lit, 12);
     static_assert_size!(LitKind, 2);
-    static_assert_size!(Nonterminal, 8);
     static_assert_size!(Token, 24);
     static_assert_size!(TokenKind, 16);
     // tidy-alphabetical-end
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index fb331e74aeb..43d25d18075 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -25,7 +25,7 @@ use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
 
 use crate::ast::AttrStyle;
 use crate::ast_traits::{HasAttrs, HasTokens};
-use crate::token::{self, Delimiter, InvisibleOrigin, Nonterminal, Token, TokenKind};
+use crate::token::{self, Delimiter, Token, TokenKind};
 use crate::{AttrVec, Attribute};
 
 /// Part of a `TokenStream`.
@@ -305,11 +305,6 @@ pub struct AttrsTarget {
 }
 
 /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
-///
-/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
-/// instead of a representation of the abstract syntax tree.
-/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
-/// backwards compatibility.
 #[derive(Clone, Debug, Default, Encodable, Decodable)]
 pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
 
@@ -476,61 +471,6 @@ impl TokenStream {
         TokenStream::new(tts)
     }
 
-    pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
-        match nt {
-            Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
-        }
-    }
-
-    fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
-        match token.kind {
-            token::NtIdent(ident, is_raw) => {
-                TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
-            }
-            token::NtLifetime(ident, is_raw) => TokenTree::Delimited(
-                DelimSpan::from_single(token.span),
-                DelimSpacing::new(Spacing::JointHidden, spacing),
-                Delimiter::Invisible(InvisibleOrigin::FlattenToken),
-                TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span),
-            ),
-            token::Interpolated(ref nt) => TokenTree::Delimited(
-                DelimSpan::from_single(token.span),
-                DelimSpacing::new(Spacing::JointHidden, spacing),
-                Delimiter::Invisible(InvisibleOrigin::FlattenToken),
-                TokenStream::from_nonterminal_ast(&nt).flattened(),
-            ),
-            _ => TokenTree::Token(token.clone(), spacing),
-        }
-    }
-
-    fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
-        match tree {
-            TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
-            TokenTree::Delimited(span, spacing, delim, tts) => {
-                TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
-            }
-        }
-    }
-
-    #[must_use]
-    pub fn flattened(&self) -> TokenStream {
-        fn can_skip(stream: &TokenStream) -> bool {
-            stream.iter().all(|tree| match tree {
-                TokenTree::Token(token, _) => !matches!(
-                    token.kind,
-                    token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
-                ),
-                TokenTree::Delimited(.., inner) => can_skip(inner),
-            })
-        }
-
-        if can_skip(self) {
-            return self.clone();
-        }
-
-        self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
-    }
-
     // If `vec` is not empty, try to glue `tt` onto its last token. The return
     // value indicates if gluing took place.
     fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index 22a293d9b50..b1d1c35e64a 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -32,7 +32,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
@@ -917,7 +916,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
     }
 
     fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
-        DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() }
+        DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.clone() }
     }
 
     /// Lower an associated item constraint.
diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs
index 97cb6e52d56..551506f2aef 100644
--- a/compiler/rustc_ast_pretty/src/pprust/mod.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs
@@ -5,14 +5,10 @@ pub mod state;
 use std::borrow::Cow;
 
 use rustc_ast as ast;
-use rustc_ast::token::{Nonterminal, Token, TokenKind};
+use rustc_ast::token::{Token, TokenKind};
 use rustc_ast::tokenstream::{TokenStream, TokenTree};
 pub use state::{AnnNode, Comments, PpAnn, PrintState, State, print_crate};
 
-pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
-    State::new().nonterminal_to_string(nt)
-}
-
 /// Print the token kind precisely, without converting `$crate` into its respective crate name.
 pub fn token_kind_to_string(tok: &TokenKind) -> Cow<'static, str> {
     State::new().token_kind_to_string(tok)
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index 90caad1c845..0985ebf945b 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -11,7 +11,7 @@ use std::sync::Arc;
 
 use rustc_ast::attr::AttrIdGenerator;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Nonterminal, Token, TokenKind};
+use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
 use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
 use rustc_ast::util::classify;
 use rustc_ast::util::comments::{Comment, CommentStyle};
@@ -876,14 +876,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
         }
     }
 
-    fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
-        // We extract the token stream from the AST fragment and pretty print
-        // it, rather than using AST pretty printing, because `Nonterminal` is
-        // slated for removal in #124141. (This method will also then be
-        // removed.)
-        self.tts_to_string(&TokenStream::from_nonterminal_ast(nt))
-    }
-
     /// Print the token kind precisely, without converting `$crate` into its respective crate name.
     fn token_kind_to_string(&self, tok: &TokenKind) -> Cow<'static, str> {
         self.token_kind_to_string_ext(tok, None)
@@ -976,8 +968,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                 doc_comment_to_string(comment_kind, attr_style, data).into()
             }
             token::Eof => "<eof>".into(),
-
-            token::Interpolated(ref nt) => self.nonterminal_to_string(&nt).into(),
         }
     }
 
diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs
index a68d4578b40..3bf03f84ce8 100644
--- a/compiler/rustc_attr_parsing/src/context.rs
+++ b/compiler/rustc_attr_parsing/src/context.rs
@@ -320,7 +320,7 @@ impl<'sess> AttributeParser<'sess> {
             ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs {
                 dspan: args.dspan,
                 delim: args.delim,
-                tokens: args.tokens.flattened(),
+                tokens: args.tokens.clone(),
             }),
             // This is an inert key-value attribute - it will never be visible to macros
             // after it gets lowered to HIR. Therefore, we can extract literals to handle
diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs
index a7465847e18..249e71ef70d 100644
--- a/compiler/rustc_attr_parsing/src/lib.rs
+++ b/compiler/rustc_attr_parsing/src/lib.rs
@@ -77,7 +77,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(let_chains)]
 #![feature(rustdoc_internals)]
diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs
index a8a1460591c..384fae59873 100644
--- a/compiler/rustc_attr_parsing/src/parser.rs
+++ b/compiler/rustc_attr_parsing/src/parser.rs
@@ -485,25 +485,7 @@ impl<'a> MetaItemListParserContext<'a> {
         }
 
         // or a path.
-        let path =
-            if let Some(TokenTree::Token(Token { kind: token::Interpolated(_), span, .. }, _)) =
-                self.inside_delimiters.peek()
-            {
-                self.inside_delimiters.next();
-                // We go into this path if an expr ended up in an attribute that
-                // expansion did not turn into a literal. Say, `#[repr(align(macro!()))]`
-                // where the macro didn't expand to a literal. An error is already given
-                // for this at this point, and then we do continue. This makes this path
-                // reachable...
-                let e = self.dcx.span_delayed_bug(
-                    *span,
-                    "expr in place where literal is expected (builtin attr parsing)",
-                );
-
-                return Some(MetaItemOrLitParser::Err(*span, e));
-            } else {
-                self.next_path()?
-            };
+        let path = self.next_path()?;
 
         // Paths can be followed by:
         // - `(more meta items)` (another list)
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index 64ad1c96856..9a521915667 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -2,7 +2,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_builtin_macros/src/cfg_eval.rs b/compiler/rustc_builtin_macros/src/cfg_eval.rs
index b3ba9073118..da01e3e9607 100644
--- a/compiler/rustc_builtin_macros/src/cfg_eval.rs
+++ b/compiler/rustc_builtin_macros/src/cfg_eval.rs
@@ -92,11 +92,7 @@ impl CfgEval<'_> {
         // the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
         // process is lossless, so this process is invisible to proc-macros.
 
-        // 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
-        // to `None`-delimited groups containing the corresponding tokens. This
-        // is normally delayed until the proc-macro server actually needs to
-        // provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier,
-        // so that we can handle cases like:
+        // Interesting cases:
         //
         // ```rust
         // #[cfg_eval] #[cfg] $item
@@ -104,8 +100,8 @@ impl CfgEval<'_> {
         //
         // where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
         // sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
-        // way to do this is to do a single parse of a stream without any nonterminals.
-        let orig_tokens = annotatable.to_tokens().flattened();
+        // way to do this is to do a single parse of the token stream.
+        let orig_tokens = annotatable.to_tokens();
 
         // Re-parse the tokens, setting the `capture_cfg` flag to save extra information
         // to the captured `AttrTokenStream` (specifically, we capture
diff --git a/compiler/rustc_builtin_macros/src/lib.rs b/compiler/rustc_builtin_macros/src/lib.rs
index 606e85577f7..bcd40f980e6 100644
--- a/compiler/rustc_builtin_macros/src/lib.rs
+++ b/compiler/rustc_builtin_macros/src/lib.rs
@@ -5,7 +5,6 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs
index 153935f2531..5b2ed69535b 100644
--- a/compiler/rustc_codegen_ssa/src/lib.rs
+++ b/compiler/rustc_codegen_ssa/src/lib.rs
@@ -2,7 +2,6 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs
index e03849c32f9..da52d60ae59 100644
--- a/compiler/rustc_const_eval/src/lib.rs
+++ b/compiler/rustc_const_eval/src/lib.rs
@@ -1,7 +1,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs
index 381309f83b2..a03834c519d 100644
--- a/compiler/rustc_driver/src/lib.rs
+++ b/compiler/rustc_driver/src/lib.rs
@@ -3,7 +3,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(rust_logo)]
 #![feature(rustdoc_internals)]
 // tidy-alphabetical-end
diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs
index 11d24dcf91d..40cc82727a5 100644
--- a/compiler/rustc_driver_impl/src/lib.rs
+++ b/compiler/rustc_driver_impl/src/lib.rs
@@ -7,7 +7,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(decl_macro)]
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index bcc2703c39b..c70e259b2cd 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -238,18 +238,6 @@ impl<'a> StripUnconfigured<'a> {
                     Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
                 }
                 AttrTokenTree::Token(
-                    Token {
-                        kind:
-                            TokenKind::NtIdent(..)
-                            | TokenKind::NtLifetime(..)
-                            | TokenKind::Interpolated(..),
-                        ..
-                    },
-                    _,
-                ) => {
-                    panic!("Nonterminal should have been flattened: {:?}", tree);
-                }
-                AttrTokenTree::Token(
                     Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. },
                     _,
                 ) => {
diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs
index e60a9e83184..b663e959744 100644
--- a/compiler/rustc_expand/src/mbe/diagnostics.rs
+++ b/compiler/rustc_expand/src/mbe/diagnostics.rs
@@ -66,9 +66,7 @@ pub(super) fn failed_to_match_macro(
     }
 
     if let MatcherLoc::Token { token: expected_token } = &remaining_matcher
-        && (matches!(expected_token.kind, TokenKind::Interpolated(_))
-            || matches!(token.kind, TokenKind::Interpolated(_))
-            || matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))
+        && (matches!(expected_token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_)))
             || matches!(token.kind, TokenKind::OpenDelim(Delimiter::Invisible(_))))
     {
         err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
@@ -162,7 +160,7 @@ impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'match
                     .is_none_or(|failure| failure.is_better_position(*approx_position))
                 {
                     self.best_failure = Some(BestFailure {
-                        token: token.clone(),
+                        token: *token,
                         position_in_tokenstream: *approx_position,
                         msg,
                         remaining_matcher: self
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index d709fd79281..0065f83eb4e 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -179,7 +179,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
         for tt in tts {
             match tt {
                 TokenTree::Token(token) => {
-                    locs.push(MatcherLoc::Token { token: token.clone() });
+                    locs.push(MatcherLoc::Token { token: *token });
                 }
                 TokenTree::Delimited(span, _, delimited) => {
                     let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
@@ -648,7 +648,7 @@ impl TtParser {
                     // There are no possible next positions AND we aren't waiting for the black-box
                     // parser: syntax error.
                     return Failure(T::build_failure(
-                        parser.token.clone(),
+                        parser.token,
                         parser.approx_token_stream_pos(),
                         "no rules expected this token in macro call",
                     ));
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 460a06f9c06..c138b090877 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -810,7 +810,7 @@ impl<'tt> FirstSets<'tt> {
                         // token could be the separator token itself.
 
                         if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
-                            first.add_one_maybe(TtHandle::from_token(sep.clone()));
+                            first.add_one_maybe(TtHandle::from_token(*sep));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -873,7 +873,7 @@ impl<'tt> FirstSets<'tt> {
                     // If the sequence contents can be empty, then the first
                     // token could be the separator token itself.
                     if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
-                        first.add_one_maybe(TtHandle::from_token(sep.clone()));
+                        first.add_one_maybe(TtHandle::from_token(*sep));
                     }
 
                     assert!(first.maybe_empty);
@@ -949,7 +949,7 @@ impl<'tt> Clone for TtHandle<'tt> {
             // This variant *must* contain a `mbe::TokenTree::Token`, and not
             // any other variant of `mbe::TokenTree`.
             TtHandle::Token(mbe::TokenTree::Token(tok)) => {
-                TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
+                TtHandle::Token(mbe::TokenTree::Token(*tok))
             }
 
             _ => unreachable!(),
@@ -1125,7 +1125,7 @@ fn check_matcher_core<'tt>(
                 let mut new;
                 let my_suffix = if let Some(sep) = &seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TtHandle::from_token(sep.clone()));
+                    new.add_one_maybe(TtHandle::from_token(*sep));
                     &new
                 } else {
                     &suffix_first
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 0ea53627fe7..3f037259956 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -283,7 +283,7 @@ fn parse_tree<'a>(
         }
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(token.clone()),
+        tokenstream::TokenTree::Token(token, _) => TokenTree::Token(*token),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
@@ -321,7 +321,7 @@ fn parse_kleene_op(
     match iter.next() {
         Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
             Some(op) => Ok(Ok((op, token.span))),
-            None => Ok(Err(token.clone())),
+            None => Ok(Err(*token)),
         },
         tree => Err(tree.map_or(span, tokenstream::TokenTree::span)),
     }
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 6e47ed6eb67..39186319b1c 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -1,5 +1,4 @@
 use std::mem;
-use std::sync::Arc;
 
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::token::{
@@ -165,7 +164,7 @@ pub(super) fn transcribe<'a>(
                 if repeat_idx < repeat_len {
                     frame.idx = 0;
                     if let Some(sep) = sep {
-                        result.push(TokenTree::Token(sep.clone(), Spacing::Alone));
+                        result.push(TokenTree::Token(*sep, Spacing::Alone));
                     }
                     continue;
                 }
@@ -307,7 +306,9 @@ pub(super) fn transcribe<'a>(
                     let tt = match cur_matched {
                         MatchedSingle(ParseNtResult::Tt(tt)) => {
                             // `tt`s are emitted into the output stream directly as "raw tokens",
-                            // without wrapping them into groups.
+                            // without wrapping them into groups. Other variables are emitted into
+                            // the output stream as groups with `Delimiter::Invisible` to maintain
+                            // parsing priorities.
                             maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
                         }
                         MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
@@ -325,6 +326,11 @@ pub(super) fn transcribe<'a>(
                         MatchedSingle(ParseNtResult::Item(item)) => {
                             mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
                         }
+                        MatchedSingle(ParseNtResult::Block(block)) => mk_delimited(
+                            block.span,
+                            MetaVarKind::Block,
+                            TokenStream::from_ast(block),
+                        ),
                         MatchedSingle(ParseNtResult::Stmt(stmt)) => {
                             let stream = if let StmtKind::Empty = stmt.kind {
                                 // FIXME: Properly collect tokens for empty statements.
@@ -385,15 +391,6 @@ pub(super) fn transcribe<'a>(
                         MatchedSingle(ParseNtResult::Vis(vis)) => {
                             mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
                         }
-                        MatchedSingle(ParseNtResult::Nt(nt)) => {
-                            // Other variables are emitted into the output stream as groups with
-                            // `Delimiter::Invisible` to maintain parsing priorities.
-                            // `Interpolated` is currently used for such groups in rustc parser.
-                            marker.visit_span(&mut sp);
-                            let use_span = nt.use_span();
-                            with_metavar_spans(|mspans| mspans.insert(use_span, sp));
-                            TokenTree::token_alone(token::Interpolated(Arc::clone(nt)), sp)
-                        }
                         MatchedSeq(..) => {
                             // We were unable to descend far enough. This is an error.
                             return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
@@ -441,7 +438,7 @@ pub(super) fn transcribe<'a>(
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
             mbe::TokenTree::Token(token) => {
-                let mut token = token.clone();
+                let mut token = *token;
                 mut_visit::visit_token(&mut marker, &mut token);
                 let tt = TokenTree::Token(token, Spacing::Alone);
                 result.push(tt);
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index ee6306e3961..2706a5eb3d7 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -309,15 +309,6 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                     }));
                 }
 
-                Interpolated(nt) => {
-                    let stream = TokenStream::from_nonterminal_ast(&nt);
-                    trees.push(TokenTree::Group(Group {
-                        delimiter: pm::Delimiter::None,
-                        stream: Some(stream),
-                        span: DelimSpan::from_single(span),
-                    }))
-                }
-
                 OpenDelim(..) | CloseDelim(..) => unreachable!(),
                 Eof => unreachable!(),
             }
diff --git a/compiler/rustc_hir/src/lib.rs b/compiler/rustc_hir/src/lib.rs
index 6bc0f797cca..a84857e3597 100644
--- a/compiler/rustc_hir/src/lib.rs
+++ b/compiler/rustc_hir/src/lib.rs
@@ -4,7 +4,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(associated_type_defaults)]
 #![feature(box_patterns)]
 #![feature(closure_track_caller)]
diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs
index e7ecd727a85..e1ad8124aea 100644
--- a/compiler/rustc_hir_analysis/src/lib.rs
+++ b/compiler/rustc_hir_analysis/src/lib.rs
@@ -59,7 +59,6 @@ This API is completely unstable and subject to change.
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs
index 1d86ff14471..d30272b8236 100644
--- a/compiler/rustc_hir_typeck/src/lib.rs
+++ b/compiler/rustc_hir_typeck/src/lib.rs
@@ -1,7 +1,6 @@
 // tidy-alphabetical-start
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_incremental/src/lib.rs b/compiler/rustc_incremental/src/lib.rs
index dabfb6a90ca..299ee487638 100644
--- a/compiler/rustc_incremental/src/lib.rs
+++ b/compiler/rustc_incremental/src/lib.rs
@@ -2,7 +2,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![deny(missing_docs)]
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index 12384690ff3..1789a2970d6 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -21,7 +21,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(array_windows)]
diff --git a/compiler/rustc_metadata/src/lib.rs b/compiler/rustc_metadata/src/lib.rs
index 028d5c8b609..3b44c44fcb9 100644
--- a/compiler/rustc_metadata/src/lib.rs
+++ b/compiler/rustc_metadata/src/lib.rs
@@ -1,6 +1,5 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(coroutines)]
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index 1e6178144c9..8fe2cc7101b 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -29,7 +29,6 @@
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::potential_query_instability)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(allocator_api)]
diff --git a/compiler/rustc_mir_build/src/lib.rs b/compiler/rustc_mir_build/src/lib.rs
index a25697ba086..8e96d46dac2 100644
--- a/compiler/rustc_mir_build/src/lib.rs
+++ b/compiler/rustc_mir_build/src/lib.rs
@@ -3,7 +3,6 @@
 // tidy-alphabetical-start
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(assert_matches)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index 82c57ef5678..a0efc623b8e 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -1,5 +1,4 @@
 // tidy-alphabetical-start
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(assert_matches)]
 #![feature(associated_type_defaults)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index dfd07f0fb16..4d74ecddfb0 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -1,5 +1,4 @@
 // tidy-alphabetical-start
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_monomorphize/src/lib.rs b/compiler/rustc_monomorphize/src/lib.rs
index 5dbae50c499..8f6914f3d72 100644
--- a/compiler/rustc_monomorphize/src/lib.rs
+++ b/compiler/rustc_monomorphize/src/lib.rs
@@ -1,5 +1,4 @@
 // tidy-alphabetical-start
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(file_buffered)]
 #![feature(if_let_guard)]
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index ff03b42484b..2bfa1ea4e05 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -376,7 +376,7 @@ pub(super) fn check_for_substitution(
             ascii_name,
         })
     };
-    (token.clone(), sugg)
+    (*token, sugg)
 }
 
 /// Extract string if found at current position with given delimiters
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 79939aab7fc..2edc8c83017 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -4,7 +4,6 @@
 #![allow(internal_features)]
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::untranslatable_diagnostic)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(array_windows)]
 #![feature(assert_matches)]
 #![feature(box_patterns)]
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index cff998fa137..f1bd6a22730 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
         // produce an empty `TokenStream` if no calls were made, and omit the
         // final token otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
-        let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
+        let tokens = iter::once(FlatToken::Token(self.start_token))
             .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
             .take(self.num_calls as usize);
 
@@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
 impl<'a> Parser<'a> {
     pub(super) fn collect_pos(&self) -> CollectPos {
         CollectPos {
-            start_token: (self.token.clone(), self.token_spacing),
+            start_token: (self.token, self.token_spacing),
             cursor_snapshot: self.token_cursor.clone(),
             start_pos: self.num_bump_calls,
         }
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index ef044fe9d63..bb63e91aecf 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -322,7 +322,7 @@ impl<'a> Parser<'a> {
         let mut recovered_ident = None;
         // we take this here so that the correct original token is retained in
         // the diagnostic, regardless of eager recovery.
-        let bad_token = self.token.clone();
+        let bad_token = self.token;
 
         // suggest prepending a keyword in identifier position with `r#`
         let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
@@ -382,7 +382,7 @@ impl<'a> Parser<'a> {
             // if the previous token is a valid keyword
             // that might use a generic, then suggest a correct
             // generic placement (later on)
-            let maybe_keyword = self.prev_token.clone();
+            let maybe_keyword = self.prev_token;
             if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
                 // if we have a valid keyword, attempt to parse generics
                 // also obtain the keywords symbol
@@ -530,7 +530,7 @@ impl<'a> Parser<'a> {
                 //   let y = 42;
                 let guar = self.dcx().emit_err(ExpectedSemi {
                     span: self.token.span,
-                    token: self.token.clone(),
+                    token: self.token,
                     unexpected_token_label: None,
                     sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
                 });
@@ -555,7 +555,7 @@ impl<'a> Parser<'a> {
                 let span = self.prev_token.span.shrink_to_hi();
                 let guar = self.dcx().emit_err(ExpectedSemi {
                     span,
-                    token: self.token.clone(),
+                    token: self.token,
                     unexpected_token_label: Some(self.token.span),
                     sugg: ExpectedSemiSugg::AddSemi(span),
                 });
@@ -801,7 +801,7 @@ impl<'a> Parser<'a> {
         let span = self.prev_token.span.shrink_to_hi();
         let mut err = self.dcx().create_err(ExpectedSemi {
             span,
-            token: self.token.clone(),
+            token: self.token,
             unexpected_token_label: Some(self.token.span),
             sugg: ExpectedSemiSugg::AddSemi(span),
         });
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index d52e36fcfac..14bc98f66da 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -344,7 +344,7 @@ impl<'a> Parser<'a> {
     fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
         self.dcx().emit_err(errors::FoundExprWouldBeStmt {
             span: self.token.span,
-            token: self.token.clone(),
+            token: self.token,
             suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
         });
     }
@@ -417,7 +417,7 @@ impl<'a> Parser<'a> {
         cur_op_span: Span,
     ) -> PResult<'a, P<Expr>> {
         let rhs = if self.is_at_start_of_range_notation_rhs() {
-            let maybe_lt = self.token.clone();
+            let maybe_lt = self.token;
             let attrs = self.parse_outer_attributes()?;
             Some(
                 self.parse_expr_assoc_with(Bound::Excluded(prec), attrs)
@@ -611,7 +611,7 @@ impl<'a> Parser<'a> {
 
     /// Recover on `not expr` in favor of `!expr`.
     fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        let negated_token = self.look_ahead(1, |t| t.clone());
+        let negated_token = self.look_ahead(1, |t| *t);
 
         let sub_diag = if negated_token.is_numeric_lit() {
             errors::NotAsNegationOperatorSub::SuggestNotBitwise
@@ -637,9 +637,7 @@ impl<'a> Parser<'a> {
     /// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
     fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
         match self.prev_token.kind {
-            TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => {
-                self.prev_token.span
-            }
+            TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span,
             TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
                 // `expr.span` is the interpolated span, because invisible open
                 // and close delims both get marked with the same span, one
@@ -1386,15 +1384,7 @@ impl<'a> Parser<'a> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
 
         let span = self.token.span;
-        if let token::Interpolated(nt) = &self.token.kind {
-            match &**nt {
-                token::NtBlock(block) => {
-                    let block = block.clone();
-                    self.bump();
-                    return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None)));
-                }
-            };
-        } else if let Some(expr) = self.eat_metavar_seq_with_matcher(
+        if let Some(expr) = self.eat_metavar_seq_with_matcher(
             |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
             |this| {
                 // Force collection (as opposed to just `parse_expr`) is required to avoid the
@@ -1415,9 +1405,13 @@ impl<'a> Parser<'a> {
             self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
         {
             return Ok(lit);
-        } else if let Some(path) = self.eat_metavar_seq(MetaVarKind::Path, |this| {
-            this.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))
-        }) {
+        } else if let Some(block) =
+            self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block())
+        {
+            return Ok(self.mk_expr(span, ExprKind::Block(block, None)));
+        } else if let Some(path) =
+            self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
+        {
             return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
         }
 
@@ -1612,7 +1606,7 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
-        let maybe_eq_tok = self.prev_token.clone();
+        let maybe_eq_tok = self.prev_token;
         let (qself, path) = if self.eat_lt() {
             let lt_span = self.prev_token.span;
             let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
@@ -1671,7 +1665,7 @@ impl<'a> Parser<'a> {
         } else if self.eat_keyword(exp!(Loop)) {
             self.parse_expr_loop(label, lo)
         } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
-            || self.token.is_whole_block()
+            || self.token.is_metavar_block()
         {
             self.parse_expr_block(label, lo, BlockCheckMode::Default)
         } else if !ate_colon
@@ -2073,7 +2067,7 @@ impl<'a> Parser<'a> {
         &mut self,
         mk_lit_char: impl FnOnce(Symbol, Span) -> L,
     ) -> PResult<'a, L> {
-        let token = self.token.clone();
+        let token = self.token;
         let err = |self_: &Self| {
             let msg = format!("unexpected token: {}", super::token_descr(&token));
             self_.dcx().struct_span_err(token.span, msg)
@@ -2354,7 +2348,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        if self.token.is_whole_block() {
+        if self.token.is_metavar_block() {
             self.dcx().emit_err(errors::InvalidBlockMacroSegment {
                 span: self.token.span,
                 context: lo.to(self.token.span),
@@ -2379,7 +2373,7 @@ impl<'a> Parser<'a> {
     fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
-        let before = self.prev_token.clone();
+        let before = self.prev_token;
         let binder = if self.check_keyword(exp!(For)) {
             let lo = self.token.span;
             let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
@@ -2411,8 +2405,8 @@ impl<'a> Parser<'a> {
             FnRetTy::Default(_) => {
                 let restrictions =
                     self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
-                let prev = self.prev_token.clone();
-                let token = self.token.clone();
+                let prev = self.prev_token;
+                let token = self.token;
                 let attrs = self.parse_outer_attributes()?;
                 match self.parse_expr_res(restrictions, attrs) {
                     Ok((expr, _)) => expr,
@@ -2477,7 +2471,7 @@ impl<'a> Parser<'a> {
         if self.may_recover()
             && self.token.can_begin_expr()
             && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
-            && !self.token.is_whole_block()
+            && !self.token.is_metavar_block()
         {
             let snapshot = self.create_snapshot_for_diagnostic();
             let restrictions =
@@ -2659,7 +2653,7 @@ impl<'a> Parser<'a> {
             }
         } else {
             let attrs = self.parse_outer_attributes()?; // For recovery.
-            let maybe_fatarrow = self.token.clone();
+            let maybe_fatarrow = self.token;
             let block = if self.check(exp!(OpenBrace)) {
                 self.parse_block()?
             } else if let Some(block) = recover_block_from_condition(self) {
@@ -3524,7 +3518,7 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw::Do)
             && self.is_keyword_ahead(1, &[kw::Catch])
             && self
-                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
             && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -3535,7 +3529,7 @@ impl<'a> Parser<'a> {
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try)
             && self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
             && self.token_uninterpolated_span().at_least_rust_2018()
     }
 
@@ -3569,12 +3563,12 @@ impl<'a> Parser<'a> {
                 // `async move {`
                 self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
                     && self.look_ahead(lookahead + 2, |t| {
-                        *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                        *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
                     })
             ) || (
                 // `async {`
                 self.look_ahead(lookahead + 1, |t| {
-                    *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                    *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
                 })
             ))
     }
@@ -3867,7 +3861,7 @@ impl<'a> Parser<'a> {
                 return Err(this.dcx().create_err(errors::ExpectedStructField {
                     span: this.look_ahead(1, |t| t.span),
                     ident_span: this.token.span,
-                    token: this.look_ahead(1, |t| t.clone()),
+                    token: this.look_ahead(1, |t| *t),
                 }));
             }
             let (ident, expr) = if is_shorthand {
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 06501816340..39a0291cb1e 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1732,8 +1732,7 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             body
         } else {
-            let err =
-                errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
+            let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
             return Err(self.dcx().create_err(err));
         };
 
@@ -2303,7 +2302,7 @@ impl<'a> Parser<'a> {
             || self.token.is_keyword(kw::Union))
             && self.look_ahead(1, |t| t.is_ident())
         {
-            let kw_token = self.token.clone();
+            let kw_token = self.token;
             let kw_str = pprust::token_to_string(&kw_token);
             let item = self.parse_item(ForceCollect::No)?;
             let mut item = item.unwrap().span;
@@ -2536,7 +2535,7 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             *sig_hi = self.prev_token.span;
             (AttrVec::new(), None)
-        } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() {
+        } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() {
             self.parse_block_common(self.token.span, BlockCheckMode::Default, None)
                 .map(|(attrs, body)| (attrs, Some(body)))?
         } else if self.token == token::Eq {
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index fafd1b1ae00..2221a261b4c 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -13,7 +13,6 @@ mod ty;
 
 use std::assert_matches::debug_assert_matches;
 use std::ops::Range;
-use std::sync::Arc;
 use std::{fmt, mem, slice};
 
 use attr_wrapper::{AttrWrapper, UsePreAttrPos};
@@ -24,8 +23,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 use path::PathStyle;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{
-    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, NtExprKind, NtPatKind,
-    Token, TokenKind,
+    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token,
+    TokenKind,
 };
 use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
@@ -98,21 +97,6 @@ pub enum ForceCollect {
     No,
 }
 
-#[macro_export]
-macro_rules! maybe_whole {
-    ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
-        #[allow(irrefutable_let_patterns)] // FIXME: temporary
-        if let token::Interpolated(nt) = &$p.token.kind
-            && let token::$constructor(x) = &**nt
-        {
-            #[allow(unused_mut)]
-            let mut $x = x.clone();
-            $p.bump();
-            return Ok($e);
-        }
-    };
-}
-
 /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
 #[macro_export]
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
@@ -342,12 +326,12 @@ impl TokenCursor {
             // below can be removed.
             if let Some(tree) = self.curr.curr() {
                 match tree {
-                    &TokenTree::Token(ref token, spacing) => {
+                    &TokenTree::Token(token, spacing) => {
                         debug_assert!(!matches!(
                             token.kind,
                             token::OpenDelim(_) | token::CloseDelim(_)
                         ));
-                        let res = (token.clone(), spacing);
+                        let res = (token, spacing);
                         self.curr.bump();
                         return res;
                     }
@@ -459,7 +443,6 @@ pub fn token_descr(token: &Token) -> String {
         (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
         (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
         (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
-        (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
         (None, _) => format!("`{s}`"),
     }
 }
@@ -855,8 +838,10 @@ impl<'a> Parser<'a> {
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
             && self.look_ahead(dist + 1, |t| match &t.kind {
-                token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
                 token::OpenDelim(Delimiter::Brace) => true,
+                token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                    MetaVarKind::Block,
+                ))) => true,
                 _ => false,
             })
     }
@@ -1402,7 +1387,7 @@ impl<'a> Parser<'a> {
         // Avoid const blocks and const closures to be parsed as const items
         if (self.check_const_closure() == is_closure)
             && !self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
             && self.eat_keyword_case(exp!(Const), case)
         {
             Const::Yes(self.prev_token_uninterpolated_span())
@@ -1532,7 +1517,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let prev_spacing = self.token_spacing;
                 self.bump();
-                TokenTree::Token(self.prev_token.clone(), prev_spacing)
+                TokenTree::Token(self.prev_token, prev_spacing)
             }
         }
     }
@@ -1718,7 +1703,7 @@ impl<'a> Parser<'a> {
             dbg_fmt.field("prev_token", &self.prev_token);
             let mut tokens = vec![];
             for i in 0..lookahead {
-                let tok = self.look_ahead(i, |tok| tok.kind.clone());
+                let tok = self.look_ahead(i, |tok| tok.kind);
                 let is_eof = tok == TokenKind::Eof;
                 tokens.push(tok);
                 if is_eof {
@@ -1759,7 +1744,6 @@ impl<'a> Parser<'a> {
     pub fn token_uninterpolated_span(&self) -> Span {
         match &self.token.kind {
             token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
-            token::Interpolated(nt) => nt.use_span(),
             token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
                 self.look_ahead(1, |t| t.span)
             }
@@ -1771,7 +1755,6 @@ impl<'a> Parser<'a> {
     pub fn prev_token_uninterpolated_span(&self) -> Span {
         match &self.prev_token.kind {
             token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
-            token::Interpolated(nt) => nt.use_span(),
             token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
                 self.look_ahead(0, |t| t.span)
             }
@@ -1828,6 +1811,7 @@ pub enum ParseNtResult {
     Ident(Ident, IdentIsRaw),
     Lifetime(Ident, IdentIsRaw),
     Item(P<ast::Item>),
+    Block(P<ast::Block>),
     Stmt(P<ast::Stmt>),
     Pat(P<ast::Pat>, NtPatKind),
     Expr(P<ast::Expr>, NtExprKind),
@@ -1836,7 +1820,4 @@ pub enum ParseNtResult {
     Meta(P<ast::AttrItem>),
     Path(P<ast::Path>),
     Vis(P<ast::Visibility>),
-
-    /// This variant will eventually be removed, along with `Token::Interpolate`.
-    Nt(Arc<Nonterminal>),
 }
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index b4e540d670d..b6e89cd7fa4 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,14 +1,7 @@
-use std::sync::Arc;
-
-use rustc_ast::HasTokens;
 use rustc_ast::ptr::P;
-use rustc_ast::token::Nonterminal::*;
 use rustc_ast::token::NtExprKind::*;
 use rustc_ast::token::NtPatKind::*;
-use rustc_ast::token::{
-    self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token,
-};
-use rustc_ast_pretty::pprust;
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
 use rustc_errors::PResult;
 use rustc_span::{Ident, kw};
 
@@ -45,13 +38,6 @@ impl<'a> Parser<'a> {
             }
         }
 
-        /// Old variant of `may_be_ident`. Being phased out.
-        fn nt_may_be_ident(nt: &Nonterminal) -> bool {
-            match nt {
-                NtBlock(_) => false,
-            }
-        }
-
         match kind {
             // `expr_2021` and earlier
             NonterminalKind::Expr(Expr2021 { .. }) => {
@@ -83,16 +69,12 @@ impl<'a> Parser<'a> {
                 | token::Ident(..)
                 | token::NtIdent(..)
                 | token::NtLifetime(..)
-                | token::Interpolated(_)
                 | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true,
                 _ => token.can_begin_type(),
             },
             NonterminalKind::Block => match &token.kind {
                 token::OpenDelim(Delimiter::Brace) => true,
                 token::NtLifetime(..) => true,
-                token::Interpolated(nt) => match &**nt {
-                    NtBlock(_) => true,
-                },
                 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
                     MetaVarKind::Block
                     | MetaVarKind::Stmt
@@ -112,7 +94,6 @@ impl<'a> Parser<'a> {
             },
             NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
-                token::Interpolated(nt) => nt_may_be_ident(nt),
                 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
                     may_be_ident(*kind)
                 }
@@ -136,110 +117,85 @@ impl<'a> Parser<'a> {
         // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
         // which requires having captured tokens available. Since we cannot determine
         // in advance whether or not a proc-macro will be (transitively) invoked,
-        // we always capture tokens for any `Nonterminal` which needs them.
-        let mut nt = match kind {
+        // we always capture tokens for any nonterminal that needs them.
+        match kind {
             // Note that TT is treated differently to all the others.
-            NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())),
+            NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())),
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
-                Some(item) => return Ok(ParseNtResult::Item(item)),
-                None => {
-                    return Err(self
-                        .dcx()
-                        .create_err(UnexpectedNonterminal::Item(self.token.span)));
-                }
+                Some(item) => Ok(ParseNtResult::Item(item)),
+                None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))),
             },
             NonterminalKind::Block => {
                 // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
                 // the ':block' matcher does not support them
-                NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
+                Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?))
             }
             NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
-                Some(stmt) => return Ok(ParseNtResult::Stmt(P(stmt))),
+                Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))),
                 None => {
-                    return Err(self
-                        .dcx()
-                        .create_err(UnexpectedNonterminal::Statement(self.token.span)));
+                    Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span)))
                 }
             },
-            NonterminalKind::Pat(pat_kind) => {
-                return Ok(ParseNtResult::Pat(
-                    self.collect_tokens_no_attrs(|this| match pat_kind {
-                        PatParam { .. } => this.parse_pat_no_top_alt(None, None),
-                        PatWithOr => this.parse_pat_no_top_guard(
-                            None,
-                            RecoverComma::No,
-                            RecoverColon::No,
-                            CommaRecoveryMode::EitherTupleOrPipe,
-                        ),
-                    })?,
-                    pat_kind,
-                ));
-            }
+            NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat(
+                self.collect_tokens_no_attrs(|this| match pat_kind {
+                    PatParam { .. } => this.parse_pat_no_top_alt(None, None),
+                    PatWithOr => this.parse_pat_no_top_guard(
+                        None,
+                        RecoverComma::No,
+                        RecoverColon::No,
+                        CommaRecoveryMode::EitherTupleOrPipe,
+                    ),
+                })?,
+                pat_kind,
+            )),
             NonterminalKind::Expr(expr_kind) => {
-                return Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind));
+                Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind))
             }
             NonterminalKind::Literal => {
                 // The `:literal` matcher does not support attributes.
-                return Ok(ParseNtResult::Literal(
+                Ok(ParseNtResult::Literal(
                     self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
-                ));
-            }
-            NonterminalKind::Ty => {
-                return Ok(ParseNtResult::Ty(
-                    self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
-                ));
+                ))
             }
-            // this could be handled like a token, since it is one
+            NonterminalKind::Ty => Ok(ParseNtResult::Ty(
+                self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
+            )),
+            // This could be handled like a token, since it is one.
             NonterminalKind::Ident => {
-                return if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
+                if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
                     self.bump();
                     Ok(ParseNtResult::Ident(ident, is_raw))
                 } else {
                     Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
                         span: self.token.span,
-                        token: self.token.clone(),
+                        token: self.token,
                     }))
-                };
-            }
-            NonterminalKind::Path => {
-                return Ok(ParseNtResult::Path(P(
-                    self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
-                )));
+                }
             }
+            NonterminalKind::Path => Ok(ParseNtResult::Path(P(
+                self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
+            ))),
             NonterminalKind::Meta => {
-                return Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)));
+                Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)))
             }
             NonterminalKind::Vis => {
-                return Ok(ParseNtResult::Vis(P(self.collect_tokens_no_attrs(|this| {
-                    this.parse_visibility(FollowedByType::Yes)
-                })?)));
+                Ok(ParseNtResult::Vis(P(self
+                    .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)))
             }
             NonterminalKind::Lifetime => {
                 // We want to keep `'keyword` parsing, just like `keyword` is still
                 // an ident for nonterminal purposes.
-                return if let Some((ident, is_raw)) = self.token.lifetime() {
+                if let Some((ident, is_raw)) = self.token.lifetime() {
                     self.bump();
                     Ok(ParseNtResult::Lifetime(ident, is_raw))
                 } else {
                     Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
                         span: self.token.span,
-                        token: self.token.clone(),
+                        token: self.token,
                     }))
-                };
+                }
             }
-        };
-
-        // If tokens are supported at all, they should be collected.
-        if matches!(nt.tokens_mut(), Some(None)) {
-            panic!(
-                "Missing tokens for nt {:?} at {:?}: {:?}",
-                nt,
-                nt.use_span(),
-                pprust::nonterminal_to_string(&nt)
-            );
         }
-
-        Ok(ParseNtResult::Nt(Arc::new(nt)))
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 9612f71b2af..d5f469f9aa9 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -363,7 +363,7 @@ impl<'a> Parser<'a> {
                 self.dcx().emit_err(TrailingVertNotAllowed {
                     span: self.token.span,
                     start: lo,
-                    token: self.token.clone(),
+                    token: self.token,
                     note_double_vert: matches!(self.token.kind, token::OrOr),
                 });
                 self.bump();
@@ -1519,8 +1519,8 @@ impl<'a> Parser<'a> {
                 etc = PatFieldsRest::Rest;
                 let mut etc_sp = self.token.span;
                 if first_etc_and_maybe_comma_span.is_none() {
-                    if let Some(comma_tok) = self
-                        .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
+                    if let Some(comma_tok) =
+                        self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None })
                     {
                         let nw_span = self
                             .psess
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 9c6830c3672..30fb96c6ea9 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -393,8 +393,8 @@ impl<'a> Parser<'a> {
                 } else {
                     // `(T, U) -> R`
 
-                    let prev_token_before_parsing = self.prev_token.clone();
-                    let token_before_parsing = self.token.clone();
+                    let prev_token_before_parsing = self.prev_token;
+                    let token_before_parsing = self.token;
                     let mut snapshot = None;
                     if self.may_recover()
                         && prev_token_before_parsing == token::PathSep
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index e00fd40ecee..a50a9e224cf 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -23,8 +23,8 @@ use super::{
     AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
     Trailing, UsePreAttrPos,
 };
-use crate::errors::MalformedLoopLabel;
-use crate::{errors, exp, maybe_whole};
+use crate::errors::{self, MalformedLoopLabel};
+use crate::exp;
 
 impl<'a> Parser<'a> {
     /// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -696,9 +696,11 @@ impl<'a> Parser<'a> {
         blk_mode: BlockCheckMode,
         loop_header: Option<Span>,
     ) -> PResult<'a, (AttrVec, P<Block>)> {
-        maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block));
+        if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) {
+            return Ok((AttrVec::new(), block));
+        }
 
-        let maybe_ident = self.prev_token.clone();
+        let maybe_ident = self.prev_token;
         self.maybe_recover_unexpected_block_label(loop_header);
         if !self.eat(exp!(OpenBrace)) {
             return self.error_block_no_opening_brace();
@@ -911,7 +913,7 @@ impl<'a> Parser<'a> {
                                 {
                                     if self.token == token::Colon
                                         && self.look_ahead(1, |token| {
-                                            token.is_whole_block()
+                                            token.is_metavar_block()
                                                 || matches!(
                                                     token.kind,
                                                     token::Ident(
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 49ae6cb9b72..2f958f4d492 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) {
     // Do the `assert_eq` outside the closure so that `track_caller` works.
     // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure
     // doesn't give the line number in the test below if the assertion fails.)
-    let tok = p.look_ahead(dist, |tok| tok.clone());
+    let tok = p.look_ahead(dist, |tok| *tok);
     assert_eq!(kind, tok.kind);
 }
 
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 93705da22c4..d0cff42a209 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -547,7 +547,7 @@ impl<'a> Parser<'a> {
 
             // Recovery
             mutbl = Mutability::Mut;
-            let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing);
+            let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing);
             self.bump();
             self.bump_with((dyn_tok, dyn_tok_sp));
         }
@@ -886,7 +886,7 @@ impl<'a> Parser<'a> {
     /// ```
     fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
         let lo = self.token.span;
-        let leading_token = self.prev_token.clone();
+        let leading_token = self.prev_token;
         let has_parens = self.eat(exp!(OpenParen));
 
         let bound = if self.token.is_lifetime() {
diff --git a/compiler/rustc_passes/src/lib.rs b/compiler/rustc_passes/src/lib.rs
index 6f6115af96c..93ff0f66d69 100644
--- a/compiler/rustc_passes/src/lib.rs
+++ b/compiler/rustc_passes/src/lib.rs
@@ -6,7 +6,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(let_chains)]
diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs
index 643b82f4753..c7bab828659 100644
--- a/compiler/rustc_privacy/src/lib.rs
+++ b/compiler/rustc_privacy/src/lib.rs
@@ -1,6 +1,5 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(associated_type_defaults)]
diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index 30a9e718d23..3c329dd0a0e 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -3,7 +3,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(unused_parens)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(min_specialization)]
diff --git a/compiler/rustc_sanitizers/src/lib.rs b/compiler/rustc_sanitizers/src/lib.rs
index c4a9cab24d0..e4792563e71 100644
--- a/compiler/rustc_sanitizers/src/lib.rs
+++ b/compiler/rustc_sanitizers/src/lib.rs
@@ -4,7 +4,6 @@
 //! compiler.
 
 // tidy-alphabetical-start
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(let_chains)]
 // tidy-alphabetical-end
 
diff --git a/compiler/rustc_smir/src/lib.rs b/compiler/rustc_smir/src/lib.rs
index 5d465bca4ee..77e5f3423d1 100644
--- a/compiler/rustc_smir/src/lib.rs
+++ b/compiler/rustc_smir/src/lib.rs
@@ -9,7 +9,6 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![allow(rustc::usage_of_ty_tykind)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(
     html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
     test(attr(allow(unused_variables), deny(warnings)))
diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs
index c9b15151a2c..cc33974cc62 100644
--- a/compiler/rustc_symbol_mangling/src/lib.rs
+++ b/compiler/rustc_symbol_mangling/src/lib.rs
@@ -89,7 +89,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(let_chains)]
diff --git a/compiler/rustc_transmute/src/lib.rs b/compiler/rustc_transmute/src/lib.rs
index 7d800e49ff4..00928137d29 100644
--- a/compiler/rustc_transmute/src/lib.rs
+++ b/compiler/rustc_transmute/src/lib.rs
@@ -1,5 +1,4 @@
 // tidy-alphabetical-start
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![feature(never_type)]
 // tidy-alphabetical-end
 
diff --git a/compiler/rustc_ty_utils/src/lib.rs b/compiler/rustc_ty_utils/src/lib.rs
index 143b7d53880..35cc6f39856 100644
--- a/compiler/rustc_ty_utils/src/lib.rs
+++ b/compiler/rustc_ty_utils/src/lib.rs
@@ -6,7 +6,6 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
-#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(assert_matches)]
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index ddf3d2ce96a..1e16aace304 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -858,18 +858,18 @@ impl MacroArgParser {
         };
 
         self.result.push(ParsedMacroArg {
-            kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()),
+            kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok),
         });
         Some(())
     }
 
-    fn update_buffer(&mut self, t: &Token) {
+    fn update_buffer(&mut self, t: Token) {
         if self.buf.is_empty() {
-            self.start_tok = t.clone();
+            self.start_tok = t;
         } else {
             let needs_space = match next_space(&self.last_tok.kind) {
-                SpaceState::Ident => ident_like(t),
-                SpaceState::Punctuation => !ident_like(t),
+                SpaceState::Ident => ident_like(&t),
+                SpaceState::Punctuation => !ident_like(&t),
                 SpaceState::Always => true,
                 SpaceState::Never => false,
             };
@@ -878,7 +878,7 @@ impl MacroArgParser {
             }
         }
 
-        self.buf.push_str(&pprust::token_to_string(t));
+        self.buf.push_str(&pprust::token_to_string(&t));
     }
 
     fn need_space_prefix(&self) -> bool {
@@ -937,7 +937,7 @@ impl MacroArgParser {
                 ) if self.is_meta_var => {
                     self.add_meta_variable(&mut iter)?;
                 }
-                TokenTree::Token(ref t, _) => self.update_buffer(t),
+                &TokenTree::Token(t, _) => self.update_buffer(t),
                 &TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
                     if !self.buf.is_empty() {
                         if next_space(&self.last_tok.kind) == SpaceState::Always {
diff --git a/tests/ui/macros/macro-as-fn-body.rs b/tests/ui/macros/macro-as-fn-body.rs
index e0542edc2a5..188c7f7f728 100644
--- a/tests/ui/macros/macro-as-fn-body.rs
+++ b/tests/ui/macros/macro-as-fn-body.rs
@@ -1,7 +1,7 @@
 //
 //@ run-pass
 //
-// Description - ensure Interpolated blocks can act as valid function bodies
+// Description - ensure block metavariables can act as valid function bodies
 // Covered cases: free functions, struct methods, and default trait functions
 
 macro_rules! def_fn {
diff --git a/tests/ui/macros/syntax-error-recovery.rs b/tests/ui/macros/syntax-error-recovery.rs
index 6cf9d54e826..e1681ea32a2 100644
--- a/tests/ui/macros/syntax-error-recovery.rs
+++ b/tests/ui/macros/syntax-error-recovery.rs
@@ -5,14 +5,14 @@ macro_rules! values {
             $(
                 #[$attr]
                 $token $($inner)? = $value,
+                //~^ ERROR expected one of `!` or `::`, found `<eof>`
             )*
         }
     };
 }
-//~^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
+//~^^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
 //~| ERROR macro expansion ignores `ty` metavariable and any tokens following
 
 values!(STRING(1) as (String) => cfg(test),);
-//~^ ERROR expected one of `!` or `::`, found `<eof>`
 
 fn main() {}
diff --git a/tests/ui/macros/syntax-error-recovery.stderr b/tests/ui/macros/syntax-error-recovery.stderr
index 61758fb9d7d..a2059aa1aa8 100644
--- a/tests/ui/macros/syntax-error-recovery.stderr
+++ b/tests/ui/macros/syntax-error-recovery.stderr
@@ -22,10 +22,10 @@ LL | values!(STRING(1) as (String) => cfg(test),);
    = note: the usage of `values!` is likely invalid in item context
 
 error: expected one of `!` or `::`, found `<eof>`
-  --> $DIR/syntax-error-recovery.rs:15:9
+  --> $DIR/syntax-error-recovery.rs:7:17
    |
-LL | values!(STRING(1) as (String) => cfg(test),);
-   |         ^^^^^^ expected one of `!` or `::`
+LL |                 $token $($inner)? = $value,
+   |                 ^^^^^^ expected one of `!` or `::`
 
 error: aborting due to 3 previous errors