about summary refs log tree commit diff
diff options
context:
space:
mode:
authorTrevor Gross <tmgross@umich.edu>2025-06-19 07:53:22 +0000
committerTrevor Gross <tmgross@umich.edu>2025-06-19 16:28:52 +0000
commit67a9fb2fb7703ffe2148ba30462b1dbcf559649b (patch)
tree1df12a101bb6a88385f75b9059325b35604b7646
parent496cbe1aa93f99b9988ab9b272426e9706be3fc9 (diff)
downloadrust-67a9fb2fb7703ffe2148ba30462b1dbcf559649b.tar.gz
rust-67a9fb2fb7703ffe2148ba30462b1dbcf559649b.zip
mbe: Refactor transcription
Introduce `MacroTcbCtx` that holds everything relevant to transcription.
This allows for the following changes:

* Split `transcribe_sequence` and `transcribe_metavar` out of the
  heavily nested `transcribe`
* Split `metavar_expr_concat` out of `transcribe_metavar_expr`

This is a nonfunctional change.
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs737
1 files changed, 389 insertions, 348 deletions
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index d1393a240cc..0520be5fbae 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -9,7 +9,7 @@ use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
 use rustc_parse::lexer::nfc_normalize;
 use rustc_parse::parser::ParseNtResult;
-use rustc_session::parse::{ParseSess, SymbolGallery};
+use rustc_session::parse::ParseSess;
 use rustc_span::hygiene::{LocalExpnId, Transparency};
 use rustc_span::{
     Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
@@ -25,20 +25,77 @@ use crate::mbe::macro_parser::NamedMatch::*;
 use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
 use crate::mbe::{self, KleeneOp, MetaVarExpr};
 
-// A Marker adds the given mark to the syntax context.
-struct Marker(LocalExpnId, Transparency, FxHashMap<SyntaxContext, SyntaxContext>);
+/// Context needed to perform transcription of metavariable expressions.
+struct TranscrCtx<'psess, 'itp> {
+    psess: &'psess ParseSess,
+
+    /// Map from metavars to matched tokens
+    interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
+
+    /// Allow marking spans.
+    marker: Marker,
+
+    /// The stack of things yet to be completely expanded.
+    ///
+    /// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
+    /// we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
+    /// choice of spacing values doesn't matter.
+    stack: SmallVec<[Frame<'itp>; 1]>,
+
+    /// A stack of where we are in the repeat expansion.
+    ///
+    /// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
+    /// `repeats` keeps track of where we are in matching at each level, with the last element
+    /// being the most deeply nested sequence. This is used as a stack.
+    repeats: Vec<(usize, usize)>,
+
+    /// The resulting token stream from the `TokenTree` we just finished processing.
+    ///
+    /// At the end, this will contain the full result of transcription, but at arbitrary points
+    /// during `transcribe`, `result` will contain subsets of the final result.
+    ///
+    /// Specifically, as we descend into each TokenTree, we will push the existing results onto the
+    /// `result_stack` and clear `results`. We will then produce the results of transcribing the
+    /// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
+    /// `result_stack` and append `results` too it to produce the new `results` up to that point.
+    ///
+    /// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
+    /// again, and we are done transcribing.
+    result: Vec<TokenTree>,
+
+    /// The in-progress `result` lives at the top of this stack. Each entered `TokenTree` adds a
+    /// new entry.
+    result_stack: Vec<Vec<TokenTree>>,
+}
+
+impl<'psess> TranscrCtx<'psess, '_> {
+    /// Span marked with the correct expansion and transparency.
+    fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
+        let mut span = dspan.entire();
+        self.marker.mark_span(&mut span);
+        span
+    }
+}
+
+/// A Marker adds the given mark to the syntax context.
+struct Marker {
+    expand_id: LocalExpnId,
+    transparency: Transparency,
+    cache: FxHashMap<SyntaxContext, SyntaxContext>,
+}
 
 impl Marker {
+    /// Mark a span with the stored expansion ID and transparency.
     fn mark_span(&mut self, span: &mut Span) {
         // `apply_mark` is a relatively expensive operation, both due to taking hygiene lock, and
         // by itself. All tokens in a macro body typically have the same syntactic context, unless
         // it's some advanced case with macro-generated macros. So if we cache the marked version
         // of that context once, we'll typically have a 100% cache hit rate after that.
-        let Marker(expn_id, transparency, ref mut cache) = *self;
         *span = span.map_ctxt(|ctxt| {
-            *cache
+            *self
+                .cache
                 .entry(ctxt)
-                .or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency))
+                .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
         });
     }
 }
@@ -116,52 +173,36 @@ pub(super) fn transcribe<'a>(
         return Ok(TokenStream::default());
     }
 
-    // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
-    // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
-    // choice of spacing values doesn't matter.
-    let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new_delimited(
-        src,
-        src_span,
-        DelimSpacing::new(Spacing::Alone, Spacing::Alone)
-    )];
-
-    // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
-    // `repeats` keeps track of where we are in matching at each level, with the last element being
-    // the most deeply nested sequence. This is used as a stack.
-    let mut repeats: Vec<(usize, usize)> = Vec::new();
-
-    // `result` contains resulting token stream from the TokenTree we just finished processing. At
-    // the end, this will contain the full result of transcription, but at arbitrary points during
-    // `transcribe`, `result` will contain subsets of the final result.
-    //
-    // Specifically, as we descend into each TokenTree, we will push the existing results onto the
-    // `result_stack` and clear `results`. We will then produce the results of transcribing the
-    // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
-    // `result_stack` and append `results` too it to produce the new `results` up to that point.
-    //
-    // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
-    // again, and we are done transcribing.
-    let mut result: Vec<TokenTree> = Vec::new();
-    let mut result_stack = Vec::new();
-    let mut marker = Marker(expand_id, transparency, Default::default());
-
-    let dcx = psess.dcx();
+    let mut tscx = TranscrCtx {
+        psess,
+        interp,
+        marker: Marker { expand_id, transparency, cache: Default::default() },
+        repeats: Vec::new(),
+        stack: smallvec![Frame::new_delimited(
+            src,
+            src_span,
+            DelimSpacing::new(Spacing::Alone, Spacing::Alone)
+        )],
+        result: Vec::new(),
+        result_stack: Vec::new(),
+    };
+
     loop {
         // Look at the last frame on the stack.
         // If it still has a TokenTree we have not looked at yet, use that tree.
-        let Some(tree) = stack.last_mut().unwrap().next() else {
+        let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
             // This else-case never produces a value for `tree` (it `continue`s or `return`s).
 
             // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
             // go back to the beginning of the sequence.
-            let frame = stack.last_mut().unwrap();
+            let frame = tscx.stack.last_mut().unwrap();
             if let FrameKind::Sequence { sep, .. } = &frame.kind {
-                let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
+                let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
                 *repeat_idx += 1;
                 if repeat_idx < repeat_len {
                     frame.idx = 0;
                     if let Some(sep) = sep {
-                        result.push(TokenTree::Token(*sep, Spacing::Alone));
+                        tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
                     }
                     continue;
                 }
@@ -170,10 +211,10 @@ pub(super) fn transcribe<'a>(
             // We are done with the top of the stack. Pop it. Depending on what it was, we do
             // different things. Note that the outermost item must be the delimited, wrapped RHS
             // that was passed in originally to `transcribe`.
-            match stack.pop().unwrap().kind {
+            match tscx.stack.pop().unwrap().kind {
                 // Done with a sequence. Pop from repeats.
                 FrameKind::Sequence { .. } => {
-                    repeats.pop();
+                    tscx.repeats.pop();
                 }
 
                 // We are done processing a Delimited. If this is the top-level delimited, we are
@@ -185,15 +226,16 @@ pub(super) fn transcribe<'a>(
                     if delim == Delimiter::Bracket {
                         spacing.close = Spacing::Alone;
                     }
-                    if result_stack.is_empty() {
+                    if tscx.result_stack.is_empty() {
                         // No results left to compute! We are back at the top-level.
-                        return Ok(TokenStream::new(result));
+                        return Ok(TokenStream::new(tscx.result));
                     }
 
                     // Step back into the parent Delimited.
-                    let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
-                    result = result_stack.pop().unwrap();
-                    result.push(tree);
+                    let tree =
+                        TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
+                    tscx.result = tscx.result_stack.pop().unwrap();
+                    tscx.result.push(tree);
                 }
             }
             continue;
@@ -202,223 +244,19 @@ pub(super) fn transcribe<'a>(
         // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
         // `tree` contains the next `TokenTree` to be processed.
         match tree {
-            // We are descending into a sequence. We first make sure that the matchers in the RHS
-            // and the matches in `interp` have the same shape. Otherwise, either the caller or the
-            // macro writer has made a mistake.
+            // Replace the sequence with its expansion.
             seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
-                match lockstep_iter_size(seq, interp, &repeats) {
-                    LockstepIterSize::Unconstrained => {
-                        return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
-                    }
-
-                    LockstepIterSize::Contradiction(msg) => {
-                        // FIXME: this really ought to be caught at macro definition time... It
-                        // happens when two meta-variables are used in the same repetition in a
-                        // sequence, but they come from different sequence matchers and repeat
-                        // different amounts.
-                        return Err(
-                            dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })
-                        );
-                    }
-
-                    LockstepIterSize::Constraint(len, _) => {
-                        // We do this to avoid an extra clone above. We know that this is a
-                        // sequence already.
-                        let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
-
-                        // Is the repetition empty?
-                        if len == 0 {
-                            if seq.kleene.op == KleeneOp::OneOrMore {
-                                // FIXME: this really ought to be caught at macro definition
-                                // time... It happens when the Kleene operator in the matcher and
-                                // the body for the same meta-variable do not match.
-                                return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
-                            }
-                        } else {
-                            // 0 is the initial counter (we have done 0 repetitions so far). `len`
-                            // is the total number of repetitions we should generate.
-                            repeats.push((0, len));
-
-                            // The first time we encounter the sequence we push it to the stack. It
-                            // then gets reused (see the beginning of the loop) until we are done
-                            // repeating.
-                            stack.push(Frame::new_sequence(
-                                seq_rep,
-                                seq.separator.clone(),
-                                seq.kleene.op,
-                            ));
-                        }
-                    }
-                }
+                transcribe_sequence(&mut tscx, seq, seq_rep)?;
             }
 
             // Replace the meta-var with the matched token tree from the invocation.
-            &mbe::TokenTree::MetaVar(mut sp, mut original_ident) => {
-                // Find the matched nonterminal from the macro invocation, and use it to replace
-                // the meta-var.
-                //
-                // We use `Spacing::Alone` everywhere here, because that's the conservative choice
-                // and spacing of declarative macros is tricky. E.g. in this macro:
-                // ```
-                // macro_rules! idents {
-                //     ($($a:ident,)*) => { stringify!($($a)*) }
-                // }
-                // ```
-                // `$a` has no whitespace after it and will be marked `JointHidden`. If you then
-                // call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
-                // if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
-                // producing "xyz", which is bad because it effectively merges tokens.
-                // `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
-                // some of the unnecessary whitespace.
-                let ident = MacroRulesNormalizedIdent::new(original_ident);
-                if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
-                    // We wrap the tokens in invisible delimiters, unless they are already wrapped
-                    // in invisible delimiters with the same `MetaVarKind`. Because some proc
-                    // macros can't handle multiple layers of invisible delimiters of the same
-                    // `MetaVarKind`. This loses some span info, though it hopefully won't matter.
-                    let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
-                        if stream.len() == 1 {
-                            let tree = stream.iter().next().unwrap();
-                            if let TokenTree::Delimited(_, _, delim, inner) = tree
-                                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
-                                && mv_kind == *mvk
-                            {
-                                stream = inner.clone();
-                            }
-                        }
-
-                        // Emit as a token stream within `Delimiter::Invisible` to maintain
-                        // parsing priorities.
-                        marker.mark_span(&mut sp);
-                        with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
-                        // Both the open delim and close delim get the same span, which covers the
-                        // `$foo` in the decl macro RHS.
-                        TokenTree::Delimited(
-                            DelimSpan::from_single(sp),
-                            DelimSpacing::new(Spacing::Alone, Spacing::Alone),
-                            Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
-                            stream,
-                        )
-                    };
-                    let tt = match cur_matched {
-                        MatchedSingle(ParseNtResult::Tt(tt)) => {
-                            // `tt`s are emitted into the output stream directly as "raw tokens",
-                            // without wrapping them into groups. Other variables are emitted into
-                            // the output stream as groups with `Delimiter::Invisible` to maintain
-                            // parsing priorities.
-                            maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
-                        }
-                        MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
-                            marker.mark_span(&mut sp);
-                            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
-                            let kind = token::NtIdent(*ident, *is_raw);
-                            TokenTree::token_alone(kind, sp)
-                        }
-                        MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
-                            marker.mark_span(&mut sp);
-                            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
-                            let kind = token::NtLifetime(*ident, *is_raw);
-                            TokenTree::token_alone(kind, sp)
-                        }
-                        MatchedSingle(ParseNtResult::Item(item)) => {
-                            mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
-                        }
-                        MatchedSingle(ParseNtResult::Block(block)) => mk_delimited(
-                            block.span,
-                            MetaVarKind::Block,
-                            TokenStream::from_ast(block),
-                        ),
-                        MatchedSingle(ParseNtResult::Stmt(stmt)) => {
-                            let stream = if let StmtKind::Empty = stmt.kind {
-                                // FIXME: Properly collect tokens for empty statements.
-                                TokenStream::token_alone(token::Semi, stmt.span)
-                            } else {
-                                TokenStream::from_ast(stmt)
-                            };
-                            mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
-                        }
-                        MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => mk_delimited(
-                            pat.span,
-                            MetaVarKind::Pat(*pat_kind),
-                            TokenStream::from_ast(pat),
-                        ),
-                        MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
-                            let (can_begin_literal_maybe_minus, can_begin_string_literal) =
-                                match &expr.kind {
-                                    ExprKind::Lit(_) => (true, true),
-                                    ExprKind::Unary(UnOp::Neg, e)
-                                        if matches!(&e.kind, ExprKind::Lit(_)) =>
-                                    {
-                                        (true, false)
-                                    }
-                                    _ => (false, false),
-                                };
-                            mk_delimited(
-                                expr.span,
-                                MetaVarKind::Expr {
-                                    kind: *kind,
-                                    can_begin_literal_maybe_minus,
-                                    can_begin_string_literal,
-                                },
-                                TokenStream::from_ast(expr),
-                            )
-                        }
-                        MatchedSingle(ParseNtResult::Literal(lit)) => {
-                            mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
-                        }
-                        MatchedSingle(ParseNtResult::Ty(ty)) => {
-                            let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
-                            mk_delimited(
-                                ty.span,
-                                MetaVarKind::Ty { is_path },
-                                TokenStream::from_ast(ty),
-                            )
-                        }
-                        MatchedSingle(ParseNtResult::Meta(attr_item)) => {
-                            let has_meta_form = attr_item.meta_kind().is_some();
-                            mk_delimited(
-                                attr_item.span(),
-                                MetaVarKind::Meta { has_meta_form },
-                                TokenStream::from_ast(attr_item),
-                            )
-                        }
-                        MatchedSingle(ParseNtResult::Path(path)) => {
-                            mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
-                        }
-                        MatchedSingle(ParseNtResult::Vis(vis)) => {
-                            mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
-                        }
-                        MatchedSeq(..) => {
-                            // We were unable to descend far enough. This is an error.
-                            return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
-                        }
-                    };
-                    result.push(tt)
-                } else {
-                    // If we aren't able to match the meta-var, we push it back into the result but
-                    // with modified syntax context. (I believe this supports nested macros).
-                    marker.mark_span(&mut sp);
-                    marker.mark_span(&mut original_ident.span);
-                    result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
-                    result.push(TokenTree::Token(
-                        Token::from_ast_ident(original_ident),
-                        Spacing::Alone,
-                    ));
-                }
+            &mbe::TokenTree::MetaVar(sp, original_ident) => {
+                transcribe_metavar(&mut tscx, sp, original_ident)?;
             }
 
             // Replace meta-variable expressions with the result of their expansion.
-            mbe::TokenTree::MetaVarExpr(sp, expr) => {
-                transcribe_metavar_expr(
-                    dcx,
-                    expr,
-                    interp,
-                    &mut marker,
-                    &repeats,
-                    &mut result,
-                    sp,
-                    &psess.symbol_gallery,
-                )?;
+            mbe::TokenTree::MetaVarExpr(dspan, expr) => {
+                transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
             }
 
             // If we are entering a new delimiter, we push its contents to the `stack` to be
@@ -427,21 +265,21 @@ pub(super) fn transcribe<'a>(
             // jump back out of the Delimited, pop the result_stack and add the new results back to
             // the previous results (from outside the Delimited).
             &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
-                marker.mark_span(&mut span.open);
-                marker.mark_span(&mut span.close);
-                stack.push(Frame::new_delimited(delimited, span, *spacing));
-                result_stack.push(mem::take(&mut result));
+                tscx.marker.mark_span(&mut span.open);
+                tscx.marker.mark_span(&mut span.close);
+                tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
+                tscx.result_stack.push(mem::take(&mut tscx.result));
             }
 
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
             &mbe::TokenTree::Token(mut token) => {
-                marker.mark_span(&mut token.span);
+                tscx.marker.mark_span(&mut token.span);
                 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
-                    marker.mark_span(&mut ident.span);
+                    tscx.marker.mark_span(&mut ident.span);
                 }
                 let tt = TokenTree::Token(token, Spacing::Alone);
-                result.push(tt);
+                tscx.result.push(tt);
             }
 
             // There should be no meta-var declarations in the invocation of a macro.
@@ -450,102 +288,305 @@ pub(super) fn transcribe<'a>(
     }
 }
 
-fn transcribe_metavar_expr<'a>(
-    dcx: DiagCtxtHandle<'a>,
-    expr: &MetaVarExpr,
-    interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
-    marker: &mut Marker,
-    repeats: &[(usize, usize)],
-    result: &mut Vec<TokenTree>,
-    sp: &DelimSpan,
-    symbol_gallery: &SymbolGallery,
-) -> PResult<'a, ()> {
-    let mut visited_span = || {
-        let mut span = sp.entire();
-        marker.mark_span(&mut span);
-        span
-    };
-    match *expr {
-        MetaVarExpr::Concat(ref elements) => {
-            let mut concatenated = String::new();
-            for element in elements.into_iter() {
-                let symbol = match element {
-                    MetaVarExprConcatElem::Ident(elem) => elem.name,
-                    MetaVarExprConcatElem::Literal(elem) => *elem,
-                    MetaVarExprConcatElem::Var(ident) => {
-                        match matched_from_ident(dcx, *ident, interp)? {
-                            NamedMatch::MatchedSeq(named_matches) => {
-                                let Some((curr_idx, _)) = repeats.last() else {
-                                    return Err(dcx.struct_span_err(sp.entire(), "invalid syntax"));
-                                };
-                                match &named_matches[*curr_idx] {
-                                    // FIXME(c410-f3r) Nested repetitions are unimplemented
-                                    MatchedSeq(_) => unimplemented!(),
-                                    MatchedSingle(pnr) => {
-                                        extract_symbol_from_pnr(dcx, pnr, ident.span)?
-                                    }
-                                }
-                            }
-                            NamedMatch::MatchedSingle(pnr) => {
-                                extract_symbol_from_pnr(dcx, pnr, ident.span)?
-                            }
-                        }
-                    }
-                };
-                concatenated.push_str(symbol.as_str());
+/// Turn `$(...)*` sequences into tokens.
+fn transcribe_sequence<'tx, 'itp>(
+    tscx: &mut TranscrCtx<'tx, 'itp>,
+    seq: &mbe::TokenTree,
+    seq_rep: &'itp mbe::SequenceRepetition,
+) -> PResult<'tx, ()> {
+    let dcx = tscx.psess.dcx();
+
+    // We are descending into a sequence. We first make sure that the matchers in the RHS
+    // and the matches in `interp` have the same shape. Otherwise, either the caller or the
+    // macro writer has made a mistake.
+    match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
+        LockstepIterSize::Unconstrained => {
+            return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
+        }
+
+        LockstepIterSize::Contradiction(msg) => {
+            // FIXME: this really ought to be caught at macro definition time... It
+            // happens when two meta-variables are used in the same repetition in a
+            // sequence, but they come from different sequence matchers and repeat
+            // different amounts.
+            return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
+        }
+
+        LockstepIterSize::Constraint(len, _) => {
+            // We do this to avoid an extra clone above. We know that this is a
+            // sequence already.
+            let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
+
+            // Is the repetition empty?
+            if len == 0 {
+                if seq.kleene.op == KleeneOp::OneOrMore {
+                    // FIXME: this really ought to be caught at macro definition
+                    // time... It happens when the Kleene operator in the matcher and
+                    // the body for the same meta-variable do not match.
+                    return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
+                }
+            } else {
+                // 0 is the initial counter (we have done 0 repetitions so far). `len`
+                // is the total number of repetitions we should generate.
+                tscx.repeats.push((0, len));
+
+                // The first time we encounter the sequence we push it to the stack. It
+                // then gets reused (see the beginning of the loop) until we are done
+                // repeating.
+                tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
             }
-            let symbol = nfc_normalize(&concatenated);
-            let concatenated_span = visited_span();
-            if !rustc_lexer::is_ident(symbol.as_str()) {
-                return Err(dcx.struct_span_err(
-                    concatenated_span,
-                    "`${concat(..)}` is not generating a valid identifier",
-                ));
+        }
+    }
+
+    Ok(())
+}
+
+/// Find the matched nonterminal from the macro invocation, and use it to replace
+/// the meta-var.
+///
+/// We use `Spacing::Alone` everywhere here, because that's the conservative choice
+/// and spacing of declarative macros is tricky. E.g. in this macro:
+/// ```
+/// macro_rules! idents {
+///     ($($a:ident,)*) => { stringify!($($a)*) }
+/// }
+/// ```
+/// `$a` has no whitespace after it and will be marked `JointHidden`. If you then
+/// call `idents!(x,y,z,)`, each of `x`, `y`, and `z` will be marked as `Joint`. So
+/// if you choose to use `$x`'s spacing or the identifier's spacing, you'll end up
+/// producing "xyz", which is bad because it effectively merges tokens.
+/// `Spacing::Alone` is the safer option. Fortunately, `space_between` will avoid
+/// some of the unnecessary whitespace.
+fn transcribe_metavar<'tx>(
+    tscx: &mut TranscrCtx<'tx, '_>,
+    mut sp: Span,
+    mut original_ident: Ident,
+) -> PResult<'tx, ()> {
+    let dcx = tscx.psess.dcx();
+
+    let ident = MacroRulesNormalizedIdent::new(original_ident);
+    let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
+        // If we aren't able to match the meta-var, we push it back into the result but
+        // with modified syntax context. (I believe this supports nested macros).
+        tscx.marker.mark_span(&mut sp);
+        tscx.marker.mark_span(&mut original_ident.span);
+        tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
+        tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
+        return Ok(());
+    };
+
+    // We wrap the tokens in invisible delimiters, unless they are already wrapped
+    // in invisible delimiters with the same `MetaVarKind`. Because some proc
+    // macros can't handle multiple layers of invisible delimiters of the same
+    // `MetaVarKind`. This loses some span info, though it hopefully won't matter.
+    let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
+        if stream.len() == 1 {
+            let tree = stream.iter().next().unwrap();
+            if let TokenTree::Delimited(_, _, delim, inner) = tree
+                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
+                && mv_kind == *mvk
+            {
+                stream = inner.clone();
             }
-            symbol_gallery.insert(symbol, concatenated_span);
-            // The current implementation marks the span as coming from the macro regardless of
-            // contexts of the concatenated identifiers but this behavior may change in the
-            // future.
-            result.push(TokenTree::Token(
-                Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
-                Spacing::Alone,
-            ));
         }
+
+        // Emit as a token stream within `Delimiter::Invisible` to maintain
+        // parsing priorities.
+        tscx.marker.mark_span(&mut sp);
+        with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
+        // Both the open delim and close delim get the same span, which covers the
+        // `$foo` in the decl macro RHS.
+        TokenTree::Delimited(
+            DelimSpan::from_single(sp),
+            DelimSpacing::new(Spacing::Alone, Spacing::Alone),
+            Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
+            stream,
+        )
+    };
+
+    let tt = match cur_matched {
+        MatchedSingle(ParseNtResult::Tt(tt)) => {
+            // `tt`s are emitted into the output stream directly as "raw tokens",
+            // without wrapping them into groups. Other variables are emitted into
+            // the output stream as groups with `Delimiter::Invisible` to maintain
+            // parsing priorities.
+            maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
+        }
+        MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
+            tscx.marker.mark_span(&mut sp);
+            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
+            let kind = token::NtIdent(*ident, *is_raw);
+            TokenTree::token_alone(kind, sp)
+        }
+        MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
+            tscx.marker.mark_span(&mut sp);
+            with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
+            let kind = token::NtLifetime(*ident, *is_raw);
+            TokenTree::token_alone(kind, sp)
+        }
+        MatchedSingle(ParseNtResult::Item(item)) => {
+            mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
+        }
+        MatchedSingle(ParseNtResult::Block(block)) => {
+            mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
+        }
+        MatchedSingle(ParseNtResult::Stmt(stmt)) => {
+            let stream = if let StmtKind::Empty = stmt.kind {
+                // FIXME: Properly collect tokens for empty statements.
+                TokenStream::token_alone(token::Semi, stmt.span)
+            } else {
+                TokenStream::from_ast(stmt)
+            };
+            mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
+        }
+        MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
+            mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
+        }
+        MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
+            let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
+                ExprKind::Lit(_) => (true, true),
+                ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
+                    (true, false)
+                }
+                _ => (false, false),
+            };
+            mk_delimited(
+                expr.span,
+                MetaVarKind::Expr {
+                    kind: *kind,
+                    can_begin_literal_maybe_minus,
+                    can_begin_string_literal,
+                },
+                TokenStream::from_ast(expr),
+            )
+        }
+        MatchedSingle(ParseNtResult::Literal(lit)) => {
+            mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
+        }
+        MatchedSingle(ParseNtResult::Ty(ty)) => {
+            let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
+            mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
+        }
+        MatchedSingle(ParseNtResult::Meta(attr_item)) => {
+            let has_meta_form = attr_item.meta_kind().is_some();
+            mk_delimited(
+                attr_item.span(),
+                MetaVarKind::Meta { has_meta_form },
+                TokenStream::from_ast(attr_item),
+            )
+        }
+        MatchedSingle(ParseNtResult::Path(path)) => {
+            mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
+        }
+        MatchedSingle(ParseNtResult::Vis(vis)) => {
+            mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
+        }
+        MatchedSeq(..) => {
+            // We were unable to descend far enough. This is an error.
+            return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
+        }
+    };
+
+    tscx.result.push(tt);
+    Ok(())
+}
+
+/// Turn `${expr(...)}` metavariable expressionss into tokens.
+fn transcribe_metavar_expr<'tx>(
+    tscx: &mut TranscrCtx<'tx, '_>,
+    dspan: DelimSpan,
+    expr: &MetaVarExpr,
+) -> PResult<'tx, ()> {
+    let dcx = tscx.psess.dcx();
+    let tt = match *expr {
+        MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
         MetaVarExpr::Count(original_ident, depth) => {
-            let matched = matched_from_ident(dcx, original_ident, interp)?;
-            let count = count_repetitions(dcx, depth, matched, repeats, sp)?;
-            let tt = TokenTree::token_alone(
+            let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
+            let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
+            TokenTree::token_alone(
                 TokenKind::lit(token::Integer, sym::integer(count), None),
-                visited_span(),
-            );
-            result.push(tt);
+                tscx.visited_dspan(dspan),
+            )
         }
         MetaVarExpr::Ignore(original_ident) => {
             // Used to ensure that `original_ident` is present in the LHS
-            let _ = matched_from_ident(dcx, original_ident, interp)?;
-        }
-        MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
-            Some((index, _)) => {
-                result.push(TokenTree::token_alone(
-                    TokenKind::lit(token::Integer, sym::integer(*index), None),
-                    visited_span(),
-                ));
+            let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
+            return Ok(());
+        }
+        MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
+            Some((index, _)) => TokenTree::token_alone(
+                TokenKind::lit(token::Integer, sym::integer(*index), None),
+                tscx.visited_dspan(dspan),
+            ),
+            None => {
+                return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
             }
-            None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")),
         },
-        MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) {
-            Some((_, length)) => {
-                result.push(TokenTree::token_alone(
-                    TokenKind::lit(token::Integer, sym::integer(*length), None),
-                    visited_span(),
-                ));
+        MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
+            Some((_, length)) => TokenTree::token_alone(
+                TokenKind::lit(token::Integer, sym::integer(*length), None),
+                tscx.visited_dspan(dspan),
+            ),
+            None => {
+                return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
             }
-            None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")),
         },
-    }
+    };
+    tscx.result.push(tt);
     Ok(())
 }
 
+/// Handle the `${concat(...)}` metavariable expression.
+fn metavar_expr_concat<'tx>(
+    tscx: &mut TranscrCtx<'tx, '_>,
+    dspan: DelimSpan,
+    elements: &[MetaVarExprConcatElem],
+) -> PResult<'tx, TokenTree> {
+    let dcx = tscx.psess.dcx();
+    let mut concatenated = String::new();
+    for element in elements.into_iter() {
+        let symbol = match element {
+            MetaVarExprConcatElem::Ident(elem) => elem.name,
+            MetaVarExprConcatElem::Literal(elem) => *elem,
+            MetaVarExprConcatElem::Var(ident) => {
+                match matched_from_ident(dcx, *ident, tscx.interp)? {
+                    NamedMatch::MatchedSeq(named_matches) => {
+                        let Some((curr_idx, _)) = tscx.repeats.last() else {
+                            return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
+                        };
+                        match &named_matches[*curr_idx] {
+                            // FIXME(c410-f3r) Nested repetitions are unimplemented
+                            MatchedSeq(_) => unimplemented!(),
+                            MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
+                        }
+                    }
+                    NamedMatch::MatchedSingle(pnr) => {
+                        extract_symbol_from_pnr(dcx, pnr, ident.span)?
+                    }
+                }
+            }
+        };
+        concatenated.push_str(symbol.as_str());
+    }
+    let symbol = nfc_normalize(&concatenated);
+    let concatenated_span = tscx.visited_dspan(dspan);
+    if !rustc_lexer::is_ident(symbol.as_str()) {
+        return Err(dcx.struct_span_err(
+            concatenated_span,
+            "`${concat(..)}` is not generating a valid identifier",
+        ));
+    }
+    tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
+
+    // The current implementation marks the span as coming from the macro regardless of
+    // contexts of the concatenated identifiers but this behavior may change in the
+    // future.
+    Ok(TokenTree::Token(
+        Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
+        Spacing::Alone,
+    ))
+}
+
 /// Store the metavariable span for this original span into a side table.
 /// FIXME: Try to put the metavariable span into `SpanData` instead of a side table (#118517).
 /// An optimal encoding for inlined spans will need to be selected to minimize regressions.
@@ -767,13 +808,13 @@ fn lockstep_iter_size(
 /// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
 /// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
 ///   declared inside a single repetition and the index `1` implies two nested repetitions.
-fn count_repetitions<'a>(
-    dcx: DiagCtxtHandle<'a>,
+fn count_repetitions<'dx>(
+    dcx: DiagCtxtHandle<'dx>,
     depth_user: usize,
     mut matched: &NamedMatch,
     repeats: &[(usize, usize)],
     sp: &DelimSpan,
-) -> PResult<'a, usize> {
+) -> PResult<'dx, usize> {
     // Recursively count the number of matches in `matched` at given depth
     // (or at the top-level of `matched` if no depth is given).
     fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {