about summary refs log tree commit diff
diff options
context:
space:
mode:
authorNicholas Nethercote <n.nethercote@gmail.com>2023-10-12 15:36:14 +1100
committerNicholas Nethercote <n.nethercote@gmail.com>2023-12-11 09:36:40 +1100
commit4cfdbd328b7171b2328d11b950b1af0978d6b1ef (patch)
tree544de5216adec4838810b031c084a025a787573d
parent925f7fad576b6a8659d93846faf8d9610e59bab0 (diff)
downloadrust-4cfdbd328b7171b2328d11b950b1af0978d6b1ef.tar.gz
rust-4cfdbd328b7171b2328d11b950b1af0978d6b1ef.zip
Add spacing information to delimiters.
This is an extension of the previous commit. It means the output of
something like this:
```
stringify!(let a: Vec<u32> = vec![];)
```
goes from this:
```
let a: Vec<u32> = vec![] ;
```
With this PR, it now produces this string:
```
let a: Vec<u32> = vec![];
```
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs6
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs4
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs55
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs10
-rw-r--r--compiler/rustc_expand/src/config.rs10
-rw-r--r--compiler/rustc_expand/src/mbe.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs12
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs18
-rw-r--r--compiler/rustc_expand/src/mbe/metavar_expr.rs2
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs7
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs40
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs15
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs14
-rw-r--r--compiler/rustc_lint/src/builtin.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs87
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs14
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs2
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs35
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs2
-rw-r--r--src/tools/rustfmt/src/macros.rs6
-rw-r--r--tests/ui/async-await/issues/issue-60674.stdout4
-rw-r--r--tests/ui/infinite/issue-41731-infinite-macro-print.stderr8
-rw-r--r--tests/ui/infinite/issue-41731-infinite-macro-println.stderr8
-rw-r--r--tests/ui/macros/stringify.rs30
-rw-r--r--tests/ui/macros/trace-macro.stderr2
-rw-r--r--tests/ui/macros/trace_faulty_macros.stderr12
-rw-r--r--tests/ui/proc-macro/allowed-attr-stmt-expr.stdout4
-rw-r--r--tests/ui/proc-macro/attr-complex-fn.stdout4
-rw-r--r--tests/ui/proc-macro/attr-stmt-expr.stdout4
-rw-r--r--tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs2
-rw-r--r--tests/ui/proc-macro/cfg-eval-inner.stdout2
-rw-r--r--tests/ui/proc-macro/dollar-crate-issue-57089.stdout6
-rw-r--r--tests/ui/proc-macro/dollar-crate-issue-62325.stdout6
-rw-r--r--tests/ui/proc-macro/dollar-crate.stdout18
-rw-r--r--tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout11
-rw-r--r--tests/ui/proc-macro/inner-attrs.stdout8
-rw-r--r--tests/ui/proc-macro/issue-75734-pp-paren.stdout2
-rw-r--r--tests/ui/proc-macro/issue-75930-derive-cfg.stdout12
-rw-r--r--tests/ui/proc-macro/macro-rules-derive-cfg.stdout2
-rw-r--r--tests/ui/proc-macro/meta-macro-hygiene.stdout4
-rw-r--r--tests/ui/proc-macro/nonterminal-token-hygiene.stdout4
-rw-r--r--tests/ui/proc-macro/pretty-print-tts.stdout2
-rw-r--r--tests/ui/proc-macro/weird-braces.stdout8
-rw-r--r--tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs16
47 files changed, 307 insertions, 231 deletions
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index 851ab46345f..98138cedb24 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -387,7 +387,7 @@ impl MetaItemKind {
         tokens: &mut impl Iterator<Item = &'a TokenTree>,
     ) -> Option<MetaItemKind> {
         match tokens.next() {
-            Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
                 MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
             }
             Some(TokenTree::Token(token, _)) => {
@@ -401,7 +401,7 @@ impl MetaItemKind {
         tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
     ) -> Option<MetaItemKind> {
         match tokens.peek() {
-            Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
                 let inner_tokens = inner_tokens.clone();
                 tokens.next();
                 MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
@@ -524,7 +524,7 @@ impl NestedMetaItem {
                 tokens.next();
                 return Some(NestedMetaItem::Lit(lit));
             }
-            Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
                 tokens.next();
                 return NestedMetaItem::from_tokens(&mut inner_tokens.trees().peekable());
             }
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index 01defcff9ef..10b2025f937 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -682,7 +682,7 @@ pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
         AttrTokenTree::Token(token, _) => {
             visit_token(token, vis);
         }
-        AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+        AttrTokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
             vis.visit_span(open);
             vis.visit_span(close);
             visit_attr_tts(tts, vis);
@@ -709,7 +709,7 @@ pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
         TokenTree::Token(token, _) => {
             visit_token(token, vis);
         }
-        TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+        TokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
             vis.visit_span(open);
             vis.visit_span(close);
             visit_tts(tts, vis);
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index fc67567c013..4c0c496584e 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -46,7 +46,7 @@ pub enum TokenTree {
     /// delimiters are implicitly represented by `Delimited`.
     Token(Token, Spacing),
     /// A delimited sequence of token trees.
-    Delimited(DelimSpan, Delimiter, TokenStream),
+    Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
 }
 
 // Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
@@ -62,11 +62,11 @@ where
 }
 
 impl TokenTree {
-    /// Checks if this `TokenTree` is equal to the other, regardless of span information.
+    /// Checks if this `TokenTree` is equal to the other, regardless of span/spacing information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
             (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
-            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
+            (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(tts2)
             }
             _ => false,
@@ -188,7 +188,7 @@ pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
 #[derive(Clone, Debug, Encodable, Decodable)]
 pub enum AttrTokenTree {
     Token(Token, Spacing),
-    Delimited(DelimSpan, Delimiter, AttrTokenStream),
+    Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
     /// Stores the attributes for an attribute target,
     /// along with the tokens for that attribute target.
     /// See `AttributesData` for more information
@@ -213,9 +213,14 @@ impl AttrTokenStream {
                 AttrTokenTree::Token(inner, spacing) => {
                     smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
                 }
-                AttrTokenTree::Delimited(span, delim, stream) => {
-                    smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
-                        .into_iter()
+                AttrTokenTree::Delimited(span, spacing, delim, stream) => {
+                    smallvec![TokenTree::Delimited(
+                        *span,
+                        *spacing,
+                        *delim,
+                        stream.to_tokenstream()
+                    ),]
+                    .into_iter()
                 }
                 AttrTokenTree::Attributes(data) => {
                     let idx = data
@@ -235,7 +240,7 @@ impl AttrTokenStream {
                         let mut found = false;
                         // Check the last two trees (to account for a trailing semi)
                         for tree in target_tokens.iter_mut().rev().take(2) {
-                            if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
+                            if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree {
                                 // Inner attributes are only supported on extern blocks, functions,
                                 // impls, and modules. All of these have their inner attributes
                                 // placed at the beginning of the rightmost outermost braced group:
@@ -255,7 +260,7 @@ impl AttrTokenStream {
                                     stream.push_stream(inner_attr.tokens());
                                 }
                                 stream.push_stream(delim_tokens.clone());
-                                *tree = TokenTree::Delimited(*span, *delim, stream);
+                                *tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
                                 found = true;
                                 break;
                             }
@@ -477,11 +482,6 @@ impl TokenStream {
         TokenStream::new(vec![TokenTree::token_alone(kind, span)])
     }
 
-    /// Create a token stream containing a single `Delimited`.
-    pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
-        TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
-    }
-
     pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
         let Some(tokens) = node.tokens() else {
             panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
@@ -528,6 +528,7 @@ impl TokenStream {
             }
             token::Interpolated(nt) => TokenTree::Delimited(
                 DelimSpan::from_single(token.span),
+                DelimSpacing::new(Spacing::JointHidden, spacing),
                 Delimiter::Invisible,
                 TokenStream::from_nonterminal_ast(&nt.0).flattened(),
             ),
@@ -538,8 +539,8 @@ impl TokenStream {
     fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
         match tree {
             TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
-            TokenTree::Delimited(span, delim, tts) => {
-                TokenTree::Delimited(*span, *delim, tts.flattened())
+            TokenTree::Delimited(span, spacing, delim, tts) => {
+                TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
             }
         }
     }
@@ -549,7 +550,7 @@ impl TokenStream {
         fn can_skip(stream: &TokenStream) -> bool {
             stream.trees().all(|tree| match tree {
                 TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)),
-                TokenTree::Delimited(_, _, inner) => can_skip(inner),
+                TokenTree::Delimited(.., inner) => can_skip(inner),
             })
         }
 
@@ -638,9 +639,10 @@ impl TokenStream {
 
                     &TokenTree::Token(..) => i += 1,
 
-                    &TokenTree::Delimited(sp, delim, ref delim_stream) => {
+                    &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
                         if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
-                            let new_tt = TokenTree::Delimited(sp, delim, desugared_delim_stream);
+                            let new_tt =
+                                TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
                             Lrc::make_mut(&mut stream.0)[i] = new_tt;
                             modified = true;
                         }
@@ -668,10 +670,11 @@ impl TokenStream {
                 num_of_hashes = cmp::max(num_of_hashes, count);
             }
 
-            // `/// foo` becomes `doc = r"foo"`.
+            // `/// foo` becomes `[doc = r"foo"]`.
             let delim_span = DelimSpan::from_single(span);
             let body = TokenTree::Delimited(
                 delim_span,
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
                 Delimiter::Bracket,
                 [
                     TokenTree::token_alone(token::Ident(sym::doc, false), span),
@@ -784,6 +787,18 @@ impl DelimSpan {
     }
 }
 
+#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
+pub struct DelimSpacing {
+    pub open: Spacing,
+    pub close: Spacing,
+}
+
+impl DelimSpacing {
+    pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
+        DelimSpacing { open, close }
+    }
+}
+
 // Some types are used a lot. Make sure they don't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
 mod size_asserts {
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index 8bec7647587..0fae2c78e27 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -183,10 +183,10 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
         //
         // FIXME: Incorrect cases:
         // - Let: `let(a, b) = (1, 2)`
-        (Tok(Token { kind: Ident(..), .. }, _), Del(_, Parenthesis, _)) => false,
+        (Tok(Token { kind: Ident(..), .. }, _), Del(_, _, Parenthesis, _)) => false,
 
         // `#` + `[`: `#[attr]`
-        (Tok(Token { kind: Pound, .. }, _), Del(_, Bracket, _)) => false,
+        (Tok(Token { kind: Pound, .. }, _), Del(_, _, Bracket, _)) => false,
 
         _ => true,
     }
@@ -519,7 +519,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                 }
                 *spacing
             }
-            TokenTree::Delimited(dspan, delim, tts) => {
+            TokenTree::Delimited(dspan, spacing, delim, tts) => {
                 self.print_mac_common(
                     None,
                     false,
@@ -529,9 +529,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                     convert_dollar_crate,
                     dspan.entire(),
                 );
-                // FIXME: add two `Spacing` fields to `TokenTree::Delimited`
-                // and use the close delim one here.
-                Spacing::Alone
+                spacing.close
             }
         }
     }
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 3106fad0432..b4693f5a71a 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -6,8 +6,7 @@ use crate::errors::{
 };
 use rustc_ast::ptr::P;
 use rustc_ast::token::{Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
-use rustc_ast::tokenstream::{DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{LazyAttrTokenStream, TokenTree};
 use rustc_ast::NodeId;
 use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem};
@@ -242,7 +241,7 @@ impl<'a> StripUnconfigured<'a> {
             stream.0.iter().all(|tree| match tree {
                 AttrTokenTree::Attributes(_) => false,
                 AttrTokenTree::Token(..) => true,
-                AttrTokenTree::Delimited(_, _, inner) => can_skip(inner),
+                AttrTokenTree::Delimited(.., inner) => can_skip(inner),
             })
         }
 
@@ -266,9 +265,9 @@ impl<'a> StripUnconfigured<'a> {
                         None.into_iter()
                     }
                 }
-                AttrTokenTree::Delimited(sp, delim, mut inner) => {
+                AttrTokenTree::Delimited(sp, spacing, delim, mut inner) => {
                     inner = self.configure_tokens(&inner);
-                    Some(AttrTokenTree::Delimited(sp, delim, inner)).into_iter()
+                    Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)).into_iter()
                 }
                 AttrTokenTree::Token(ref token, _)
                     if let TokenKind::Interpolated(nt) = &token.kind =>
@@ -376,6 +375,7 @@ impl<'a> StripUnconfigured<'a> {
         // in `#[attr]`, so just use the span of the `#` token.
         let bracket_group = AttrTokenTree::Delimited(
             DelimSpan::from_single(pound_span),
+            DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
             Delimiter::Bracket,
             item.tokens
                 .as_ref()
diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs
index a43b2a00188..ca4a1f327ad 100644
--- a/compiler/rustc_expand/src/mbe.rs
+++ b/compiler/rustc_expand/src/mbe.rs
@@ -13,7 +13,7 @@ pub(crate) mod transcribe;
 
 use metavar_expr::MetaVarExpr;
 use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
-use rustc_ast::tokenstream::DelimSpan;
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan};
 use rustc_span::symbol::Ident;
 use rustc_span::Span;
 
@@ -68,7 +68,7 @@ pub(crate) enum KleeneOp {
 enum TokenTree {
     Token(Token),
     /// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
-    Delimited(DelimSpan, Delimited),
+    Delimited(DelimSpan, DelimSpacing, Delimited),
     /// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
     Sequence(DelimSpan, SequenceRepetition),
     /// e.g., `$var`.
@@ -99,7 +99,7 @@ impl TokenTree {
             TokenTree::Token(Token { span, .. })
             | TokenTree::MetaVar(span, _)
             | TokenTree::MetaVarDecl(span, _, _) => span,
-            TokenTree::Delimited(span, _)
+            TokenTree::Delimited(span, ..)
             | TokenTree::MetaVarExpr(span, _)
             | TokenTree::Sequence(span, _) => span.entire(),
         }
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index 42c91824baf..0b1f25b67c8 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -290,7 +290,7 @@ fn check_binders(
         }
         // `MetaVarExpr` can not appear in the LHS of a macro arm
         TokenTree::MetaVarExpr(..) => {}
-        TokenTree::Delimited(_, ref del) => {
+        TokenTree::Delimited(.., ref del) => {
             for tt in &del.tts {
                 check_binders(sess, node_id, tt, macros, binders, ops, valid);
             }
@@ -353,7 +353,7 @@ fn check_occurrences(
             };
             check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
         }
-        TokenTree::Delimited(_, ref del) => {
+        TokenTree::Delimited(.., ref del) => {
             check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
         }
         TokenTree::Sequence(_, ref seq) => {
@@ -435,8 +435,8 @@ fn check_nested_occurrences(
                 // We check that the meta-variable is correctly used.
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
-            (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(_, del))
-            | (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(.., del))
+            | (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Brace =>
             {
                 let macro_rules = state == NestedMacroState::MacroRulesNotName;
@@ -466,7 +466,7 @@ fn check_nested_occurrences(
                 // We check that the meta-variable is correctly used.
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
-            (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Parenthesis =>
             {
                 state = NestedMacroState::MacroNameParen;
@@ -481,7 +481,7 @@ fn check_nested_occurrences(
                     valid,
                 );
             }
-            (NestedMacroState::MacroNameParen, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroNameParen, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Brace =>
             {
                 state = NestedMacroState::Empty;
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index 965beb9bf84..b248a1fe349 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -184,7 +184,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
                 TokenTree::Token(token) => {
                     locs.push(MatcherLoc::Token { token: token.clone() });
                 }
-                TokenTree::Delimited(span, delimited) => {
+                TokenTree::Delimited(span, _, delimited) => {
                     let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
                     let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
 
@@ -335,7 +335,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
         .map(|tt| match tt {
             TokenTree::MetaVarDecl(..) => 1,
             TokenTree::Sequence(_, seq) => seq.num_captures,
-            TokenTree::Delimited(_, delim) => count_metavar_decls(&delim.tts),
+            TokenTree::Delimited(.., delim) => count_metavar_decls(&delim.tts),
             TokenTree::Token(..) => 0,
             TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
         })
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 19734394382..393eec3997b 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -207,7 +207,7 @@ fn expand_macro<'cx>(
     match try_success_result {
         Ok((i, named_matches)) => {
             let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
-                mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
+                mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span),
                 _ => cx.span_bug(sp, "malformed macro rhs"),
             };
             let arm_span = rhses[i].span();
@@ -589,7 +589,7 @@ pub fn compile_declarative_macro(
             .map(|lhs| {
                 // Ignore the delimiters around the matcher.
                 match lhs {
-                    mbe::TokenTree::Delimited(_, delimited) => {
+                    mbe::TokenTree::Delimited(.., delimited) => {
                         mbe::macro_parser::compute_locs(&delimited.tts)
                     }
                     _ => sess.diagnostic().span_bug(def.span, "malformed macro lhs"),
@@ -615,7 +615,7 @@ pub fn compile_declarative_macro(
 fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    if let mbe::TokenTree::Delimited(_, delimited) = lhs {
+    if let mbe::TokenTree::Delimited(.., delimited) = lhs {
         check_matcher(sess, def, &delimited.tts)
     } else {
         let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
@@ -668,7 +668,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
             | TokenTree::MetaVar(..)
             | TokenTree::MetaVarDecl(..)
             | TokenTree::MetaVarExpr(..) => (),
-            TokenTree::Delimited(_, del) => {
+            TokenTree::Delimited(.., del) => {
                 if !check_lhs_no_empty_seq(sess, &del.tts) {
                     return false;
                 }
@@ -709,14 +709,14 @@ fn check_matcher(sess: &ParseSess, def: &ast::Item, matcher: &[mbe::TokenTree])
 
 fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
     match rhs {
-        mbe::TokenTree::Delimited(_sp, d) => {
+        mbe::TokenTree::Delimited(.., d) => {
             let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
                 if let mbe::TokenTree::Token(ident) = ident
                     && let TokenKind::Ident(ident, _) = ident.kind
                     && ident == sym::compile_error
                     && let mbe::TokenTree::Token(bang) = bang
                     && let TokenKind::Not = bang.kind
-                    && let mbe::TokenTree::Delimited(_, del) = args
+                    && let mbe::TokenTree::Delimited(.., del) = args
                     && del.delim != Delimiter::Invisible
                 {
                     true
@@ -773,7 +773,7 @@ impl<'tt> FirstSets<'tt> {
                     | TokenTree::MetaVarExpr(..) => {
                         first.replace_with(TtHandle::TtRef(tt));
                     }
-                    TokenTree::Delimited(span, delimited) => {
+                    TokenTree::Delimited(span, _, delimited) => {
                         build_recur(sets, &delimited.tts);
                         first.replace_with(TtHandle::from_token_kind(
                             token::OpenDelim(delimited.delim),
@@ -842,7 +842,7 @@ impl<'tt> FirstSets<'tt> {
                     first.add_one(TtHandle::TtRef(tt));
                     return first;
                 }
-                TokenTree::Delimited(span, delimited) => {
+                TokenTree::Delimited(span, _, delimited) => {
                     first.add_one(TtHandle::from_token_kind(
                         token::OpenDelim(delimited.delim),
                         span.open,
@@ -1087,7 +1087,7 @@ fn check_matcher_core<'tt>(
                     suffix_first = build_suffix_first();
                 }
             }
-            TokenTree::Delimited(span, d) => {
+            TokenTree::Delimited(span, _, d) => {
                 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
                     token::CloseDelim(d.delim),
                     span.close,
diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs
index 7cb279a9812..c29edc3dc9f 100644
--- a/compiler/rustc_expand/src/mbe/metavar_expr.rs
+++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs
@@ -35,7 +35,7 @@ impl MetaVarExpr {
     ) -> PResult<'sess, MetaVarExpr> {
         let mut tts = input.trees();
         let ident = parse_ident(&mut tts, sess, outer_span)?;
-        let Some(TokenTree::Delimited(_, Delimiter::Parenthesis, args)) = tts.next() else {
+        let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else {
             let msg = "meta-variable expression parameter must be wrapped in parentheses";
             return Err(sess.span_diagnostic.struct_span_err(ident.span, msg));
         };
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 6a99412fc5b..ab9fb20b364 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -151,7 +151,7 @@ fn parse_tree<'a>(
             // during parsing.
             let mut next = outer_trees.next();
             let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
-            if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
+            if let Some(tokenstream::TokenTree::Delimited(.., Delimiter::Invisible, tts)) = next {
                 trees = Box::new(tts.trees());
                 next = trees.next();
             } else {
@@ -160,7 +160,7 @@ fn parse_tree<'a>(
 
             match next {
                 // `tree` is followed by a delimited set of token trees.
-                Some(&tokenstream::TokenTree::Delimited(delim_span, delim, ref tts)) => {
+                Some(&tokenstream::TokenTree::Delimited(delim_span, _, delim, ref tts)) => {
                     if parsing_patterns {
                         if delim != Delimiter::Parenthesis {
                             span_dollar_dollar_or_metavar_in_the_lhs_err(
@@ -258,8 +258,9 @@ fn parse_tree<'a>(
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
-        &tokenstream::TokenTree::Delimited(span, delim, ref tts) => TokenTree::Delimited(
+        &tokenstream::TokenTree::Delimited(span, spacing, delim, ref tts) => TokenTree::Delimited(
             span,
+            spacing,
             Delimited {
                 delim,
                 tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 854f551ea1b..18707cebcd5 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -7,7 +7,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree,
 use crate::mbe::{self, MetaVarExpr};
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{pluralize, PResult};
 use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
@@ -31,14 +31,24 @@ impl MutVisitor for Marker {
 
 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame<'a> {
-    Delimited { tts: &'a [mbe::TokenTree], idx: usize, delim: Delimiter, span: DelimSpan },
-    Sequence { tts: &'a [mbe::TokenTree], idx: usize, sep: Option<Token> },
+    Delimited {
+        tts: &'a [mbe::TokenTree],
+        idx: usize,
+        delim: Delimiter,
+        span: DelimSpan,
+        spacing: DelimSpacing,
+    },
+    Sequence {
+        tts: &'a [mbe::TokenTree],
+        idx: usize,
+        sep: Option<Token>,
+    },
 }
 
 impl<'a> Frame<'a> {
     /// Construct a new frame around the delimited set of tokens.
-    fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
-        Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span }
+    fn new(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
+        Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span, spacing }
     }
 }
 
@@ -89,8 +99,10 @@ pub(super) fn transcribe<'a>(
     }
 
     // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
-    // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
-    let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(src, src_span)];
+    // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
+    // choice of spacing values doesn't matter.
+    let mut stack: SmallVec<[Frame<'_>; 1]> =
+        smallvec![Frame::new(src, src_span, DelimSpacing::new(Spacing::Alone, Spacing::Alone))];
 
     // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
     // `repeats` keeps track of where we are in matching at each level, with the last element being
@@ -144,14 +156,19 @@ pub(super) fn transcribe<'a>(
                 // We are done processing a Delimited. If this is the top-level delimited, we are
                 // done. Otherwise, we unwind the result_stack to append what we have produced to
                 // any previous results.
-                Frame::Delimited { delim, span, .. } => {
+                Frame::Delimited { delim, span, mut spacing, .. } => {
+                    // Hack to force-insert a space after `]` in certain case.
+                    // See discussion of the `hex-literal` crate in #114571.
+                    if delim == Delimiter::Bracket {
+                        spacing.close = Spacing::Alone;
+                    }
                     if result_stack.is_empty() {
                         // No results left to compute! We are back at the top-level.
                         return Ok(TokenStream::new(result));
                     }
 
                     // Step back into the parent Delimited.
-                    let tree = TokenTree::Delimited(span, delim, TokenStream::new(result));
+                    let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
                     result = result_stack.pop().unwrap();
                     result.push(tree);
                 }
@@ -258,13 +275,14 @@ pub(super) fn transcribe<'a>(
             // We will produce all of the results of the inside of the `Delimited` and then we will
             // jump back out of the Delimited, pop the result_stack and add the new results back to
             // the previous results (from outside the Delimited).
-            mbe::TokenTree::Delimited(mut span, delimited) => {
+            mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
                 mut_visit::visit_delim_span(&mut span, &mut marker);
                 stack.push(Frame::Delimited {
                     tts: &delimited.tts,
                     delim: delimited.delim,
                     idx: 0,
                     span,
+                    spacing: *spacing,
                 });
                 result_stack.push(mem::take(&mut result));
             }
@@ -374,7 +392,7 @@ fn lockstep_iter_size(
 ) -> LockstepIterSize {
     use mbe::TokenTree;
     match tree {
-        TokenTree::Delimited(_, delimited) => {
+        TokenTree::Delimited(.., delimited) => {
             delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
                 size.with(lockstep_iter_size(tt, interpolations, repeats))
             })
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index a6c6d82b575..7a888250ca1 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -4,7 +4,7 @@ use crate::tests::{
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast::visit;
 use rustc_ast::{self as ast, PatKind};
 use rustc_ast_pretty::pprust::item_to_string;
@@ -77,14 +77,14 @@ fn string_to_tts_macro() {
                 TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
                 TokenTree::Token(Token { kind: token::Not, .. }, _),
                 TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
-                TokenTree::Delimited(_, macro_delim, macro_tts),
+                TokenTree::Delimited(.., macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
                 let tts = &macro_tts.trees().collect::<Vec<_>>();
                 match &tts[..] {
                     [
-                        TokenTree::Delimited(_, first_delim, first_tts),
+                        TokenTree::Delimited(.., first_delim, first_tts),
                         TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
-                        TokenTree::Delimited(_, second_delim, second_tts),
+                        TokenTree::Delimited(.., second_delim, second_tts),
                     ] if macro_delim == &Delimiter::Parenthesis => {
                         let tts = &first_tts.trees().collect::<Vec<_>>();
                         match &tts[..] {
@@ -123,6 +123,9 @@ fn string_to_tts_1() {
             TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
             TokenTree::Delimited(
                 DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
+                // `JointHidden` because the `(` is followed immediately by
+                // `b`, `Alone` because the `)` is followed by whitespace.
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
                 Delimiter::Parenthesis,
                 TokenStream::new(vec![
                     TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)),
@@ -134,6 +137,10 @@ fn string_to_tts_1() {
             ),
             TokenTree::Delimited(
                 DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
+                // First `Alone` because the `{` is followed by whitespace,
+                // second `Alone` because the `}` is followed immediately by
+                // EOF.
+                DelimSpacing::new(Spacing::Alone, Spacing::Alone),
                 Delimiter::Brace,
                 TokenStream::new(vec![
                     TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)),
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index b3b78330801..5308e338d7f 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -5,7 +5,7 @@ use pm::bridge::{
 use pm::{Delimiter, Level};
 use rustc_ast as ast;
 use rustc_ast::token;
-use rustc_ast::tokenstream::{self, Spacing, TokenStream};
+use rustc_ast::tokenstream::{self, DelimSpacing, Spacing, TokenStream};
 use rustc_ast::util::literal::escape_byte_str_symbol;
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
@@ -98,7 +98,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
 
         while let Some(tree) = cursor.next() {
             let (Token { kind, span }, joint) = match tree.clone() {
-                tokenstream::TokenTree::Delimited(span, delim, tts) => {
+                tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
                     let delimiter = pm::Delimiter::from_internal(delim);
                     trees.push(TokenTree::Group(Group {
                         delimiter,
@@ -284,10 +284,11 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
     fn to_internal(self) -> SmallVec<[tokenstream::TokenTree; 2]> {
         use rustc_ast::token::*;
 
-        // The code below is conservative and uses `token_alone` in most
-        // places. When the resulting code is pretty-printed by `print_tts` it
-        // ends up with spaces between most tokens, which is safe but ugly.
-        // It's hard in general to do better when working at the token level.
+        // The code below is conservative, using `token_alone`/`Spacing::Alone`
+        // in most places. When the resulting code is pretty-printed by
+        // `print_tts` it ends up with spaces between most tokens, which is
+        // safe but ugly. It's hard in general to do better when working at the
+        // token level.
         let (tree, rustc) = self;
         match tree {
             TokenTree::Punct(Punct { ch, joint, span }) => {
@@ -330,6 +331,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
             TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => {
                 smallvec![tokenstream::TokenTree::Delimited(
                     tokenstream::DelimSpan { open, close },
+                    DelimSpacing::new(Spacing::Alone, Spacing::Alone),
                     delimiter.to_internal(),
                     stream.unwrap_or_default(),
                 )]
diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs
index 930e29f2389..32eb67ba3a1 100644
--- a/compiler/rustc_lint/src/builtin.rs
+++ b/compiler/rustc_lint/src/builtin.rs
@@ -1835,7 +1835,7 @@ impl KeywordIdents {
                         self.check_ident_token(cx, UnderMacro(true), ident);
                     }
                 }
-                TokenTree::Delimited(_, _, tts) => self.check_tokens(cx, tts),
+                TokenTree::Delimited(.., tts) => self.check_tokens(cx, tts),
             }
         }
     }
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 92df2da8710..b1dc1f98777 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
     let (stream, res, unmatched_delims) =
         tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
     match res {
-        Ok(()) if unmatched_delims.is_empty() => Ok(stream),
+        Ok(_open_spacing) if unmatched_delims.is_empty() => Ok(stream),
         _ => {
             // Return error if there are unmatched delimiters or unclosed delimiters.
             // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index a80e8fac178..8cbadc26635 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -3,7 +3,7 @@ use super::diagnostics::same_indentation_level;
 use super::diagnostics::TokenTreeDiagInfo;
 use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
 use rustc_errors::{Applicability, PErr};
 use rustc_span::symbol::kw;
@@ -25,59 +25,46 @@ impl<'a> TokenTreesReader<'a> {
             token: Token::dummy(),
             diag_info: TokenTreeDiagInfo::default(),
         };
-        let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false);
+        let (_open_spacing, stream, res) =
+            tt_reader.parse_token_trees(/* is_delimited */ false);
         (stream, res, tt_reader.diag_info.unmatched_delims)
     }
 
-    // Parse a stream of tokens into a list of `TokenTree`s.
+    // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
+    // the result is that of the opening delimiter.
     fn parse_token_trees(
         &mut self,
         is_delimited: bool,
-    ) -> (TokenStream, Result<(), Vec<PErr<'a>>>) {
-        self.token = self.string_reader.next_token().0;
+    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
+        // Move past the opening delimiter.
+        let (_, open_spacing) = self.bump(false);
+
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
                 token::OpenDelim(delim) => {
                     buf.push(match self.parse_token_tree_open_delim(delim) {
                         Ok(val) => val,
-                        Err(errs) => return (TokenStream::new(buf), Err(errs)),
+                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
                     })
                 }
                 token::CloseDelim(delim) => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
                     );
                 }
                 token::Eof => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
                     );
                 }
                 _ => {
-                    // Get the next normal token. This might require getting multiple adjacent
-                    // single-char tokens and joining them together.
-                    let (this_spacing, next_tok) = loop {
-                        let (next_tok, is_next_tok_preceded_by_whitespace) =
-                            self.string_reader.next_token();
-                        if is_next_tok_preceded_by_whitespace {
-                            break (Spacing::Alone, next_tok);
-                        } else if let Some(glued) = self.token.glue(&next_tok) {
-                            self.token = glued;
-                        } else {
-                            let this_spacing = if next_tok.is_punct() {
-                                Spacing::Joint
-                            } else if next_tok.kind == token::Eof {
-                                Spacing::Alone
-                            } else {
-                                Spacing::JointHidden
-                            };
-                            break (this_spacing, next_tok);
-                        }
-                    };
-                    let this_tok = std::mem::replace(&mut self.token, next_tok);
+                    // Get the next normal token.
+                    let (this_tok, this_spacing) = self.bump(true);
                     buf.push(TokenTree::Token(this_tok, this_spacing));
                 }
             }
@@ -121,7 +108,7 @@ impl<'a> TokenTreesReader<'a> {
         // Parse the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let (tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
         if let Err(errs) = res {
             return Err(self.unclosed_delim_err(tts, errs));
         }
@@ -130,7 +117,7 @@ impl<'a> TokenTreesReader<'a> {
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
         let sm = self.string_reader.sess.source_map();
 
-        match self.token.kind {
+        let close_spacing = match self.token.kind {
             // Correct delimiter.
             token::CloseDelim(close_delim) if close_delim == open_delim => {
                 let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
@@ -152,7 +139,7 @@ impl<'a> TokenTreesReader<'a> {
                 }
 
                 // Move past the closing delimiter.
-                self.token = self.string_reader.next_token().0;
+                self.bump(false).1
             }
             // Incorrect delimiter.
             token::CloseDelim(close_delim) => {
@@ -196,18 +183,50 @@ impl<'a> TokenTreesReader<'a> {
                 //     bar(baz(
                 // }  // Incorrect delimiter but matches the earlier `{`
                 if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
-                    self.token = self.string_reader.next_token().0;
+                    self.bump(false).1
+                } else {
+                    // The choice of value here doesn't matter.
+                    Spacing::Alone
                 }
             }
             token::Eof => {
                 // Silently recover, the EOF token will be seen again
                 // and an error emitted then. Thus we don't pop from
-                // self.open_braces here.
+                // self.open_braces here. The choice of spacing value here
+                // doesn't matter.
+                Spacing::Alone
             }
             _ => unreachable!(),
-        }
+        };
+
+        let spacing = DelimSpacing::new(open_spacing, close_spacing);
 
-        Ok(TokenTree::Delimited(delim_span, open_delim, tts))
+        Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
+    }
+
+    // Move on to the next token, returning the current token and its spacing.
+    // Will glue adjacent single-char tokens together if `glue` is set.
+    fn bump(&mut self, glue: bool) -> (Token, Spacing) {
+        let (this_spacing, next_tok) = loop {
+            let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token();
+
+            if is_next_tok_preceded_by_whitespace {
+                break (Spacing::Alone, next_tok);
+            } else if glue && let Some(glued) = self.token.glue(&next_tok) {
+                self.token = glued;
+            } else {
+                let this_spacing = if next_tok.is_punct() {
+                    Spacing::Joint
+                } else if next_tok.kind == token::Eof {
+                    Spacing::Alone
+                } else {
+                    Spacing::JointHidden
+                };
+                break (this_spacing, next_tok);
+            }
+        };
+        let this_tok = std::mem::replace(&mut self.token, next_tok);
+        (this_tok, this_spacing)
     }
 
     fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index c66a7176aab..5e8447030f1 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,7 +1,7 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream};
-use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
 use rustc_errors::PResult;
@@ -389,7 +389,7 @@ fn make_token_stream(
     #[derive(Debug)]
     struct FrameData {
         // This is `None` for the first frame, `Some` for all others.
-        open_delim_sp: Option<(Delimiter, Span)>,
+        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
         inner: Vec<AttrTokenTree>,
     }
     let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
@@ -397,21 +397,23 @@ fn make_token_stream(
     while let Some((token, spacing)) = token_and_spacing {
         match token {
             FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
-                stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
+                stack
+                    .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] });
             }
             FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
                 let frame_data = stack
                     .pop()
                     .unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
 
-                let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
+                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
                 assert_eq!(
                     open_delim, delim,
                     "Mismatched open/close delims: open={open_delim:?} close={span:?}"
                 );
                 let dspan = DelimSpan::from_pair(open_sp, span);
+                let dspacing = DelimSpacing::new(open_spacing, spacing);
                 let stream = AttrTokenStream::new(frame_data.inner);
-                let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
+                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
                 stack
                     .last_mut()
                     .unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 406a6def019..3c0627526be 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -2276,7 +2276,7 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.kind == TokenKind::Semi
-            && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
+            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
             && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 29709d92fad..2baedb2766f 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -20,7 +20,7 @@ pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::AttrId;
@@ -240,7 +240,7 @@ struct TokenCursor {
     // Token streams surrounding the current one. The delimiters for stack[n]'s
     // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
     // because it's the outermost token stream which never has delimiters.
-    stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
 }
 
 impl TokenCursor {
@@ -264,24 +264,25 @@ impl TokenCursor {
                         ));
                         return (token.clone(), spacing);
                     }
-                    &TokenTree::Delimited(sp, delim, ref tts) => {
+                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
                         let trees = tts.clone().into_trees();
-                        self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
+                        self.stack.push((
+                            mem::replace(&mut self.tree_cursor, trees),
+                            sp,
+                            spacing,
+                            delim,
+                        ));
                         if delim != Delimiter::Invisible {
-                            // FIXME: add two `Spacing` fields to `TokenTree::Delimited`
-                            // and use the open delim one here.
-                            return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
+                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
                 // We have exhausted this token stream. Move back to its parent token stream.
                 self.tree_cursor = tree_cursor;
                 if delim != Delimiter::Invisible {
-                    // FIXME: add two `Spacing` fields to `TokenTree::Delimited` and
-                    // use the close delim one here.
-                    return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
+                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
                 // No close delimiter to return; continue on to the next iteration.
             } else {
@@ -1074,7 +1075,7 @@ impl<'a> Parser<'a> {
             return looker(&self.token);
         }
 
-        if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+        if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
             && delim != Delimiter::Invisible
         {
             // We are not in the outermost token stream, and the token stream
@@ -1083,7 +1084,7 @@ impl<'a> Parser<'a> {
             let tree_cursor = &self.token_cursor.tree_cursor;
             let all_normal = (0..dist).all(|i| {
                 let token = tree_cursor.look_ahead(i);
-                !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
+                !matches!(token, Some(TokenTree::Delimited(.., Delimiter::Invisible, _)))
             });
             if all_normal {
                 // There were no skipped delimiters. Do lookahead by plain indexing.
@@ -1092,7 +1093,7 @@ impl<'a> Parser<'a> {
                         // Indexing stayed within the current token stream.
                         match tree {
                             TokenTree::Token(token, _) => looker(token),
-                            TokenTree::Delimited(dspan, delim, _) => {
+                            TokenTree::Delimited(dspan, _, delim, _) => {
                                 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
                             }
                         }
@@ -1270,7 +1271,7 @@ impl<'a> Parser<'a> {
             || self.check(&token::OpenDelim(Delimiter::Brace));
 
         delimited.then(|| {
-            let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else {
+            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
                 unreachable!()
             };
             DelimArgs { dspan, delim, tokens }
@@ -1294,7 +1295,7 @@ impl<'a> Parser<'a> {
             token::OpenDelim(..) => {
                 // Grab the tokens within the delimiters.
                 let stream = self.token_cursor.tree_cursor.stream.clone();
-                let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
+                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1314,7 +1315,7 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, delim, stream)
+                TokenTree::Delimited(span, spacing, delim, stream)
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index 66d10f2368b..605f9e496c7 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -40,7 +40,7 @@ pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Stri
     printer.zerobreak();
     printer.ibox(0);
     match matcher {
-        TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
+        TokenTree::Delimited(_span, _spacing, _delim, tts) => print_tts(&mut printer, tts),
         // Matcher which is not a Delimited is unexpected and should've failed
         // to compile, but we render whatever it is wrapped in parens.
         TokenTree::Token(..) => print_tt(&mut printer, matcher),
@@ -97,7 +97,7 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
                 printer.hardbreak()
             }
         }
-        TokenTree::Delimited(_span, delim, tts) => {
+        TokenTree::Delimited(_span, _spacing, delim, tts) => {
             let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
             printer.word(open_delim);
             if !tts.is_empty() {
@@ -158,7 +158,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
                 (_, token::Pound) => (true, Pound),
                 (_, _) => (true, Other),
             },
-            TokenTree::Delimited(_, delim, _) => match (state, delim) {
+            TokenTree::Delimited(.., delim, _) => match (state, delim) {
                 (Dollar, Delimiter::Parenthesis) => (false, DollarParen),
                 (Pound | PoundBang, Delimiter::Bracket) => (false, Other),
                 (Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
index d4828778be2..b1aa472aa03 100644
--- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -92,7 +92,7 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
         {
             return Some(span);
         }
-        if let TokenTree::Delimited(_, _, tts) = &curr {
+        if let TokenTree::Delimited(.., tts) = &curr {
             let span = contains_unhygienic_crate_reference(tts);
             if span.is_some() {
                 return span;
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index 76553466e48..b4c58d2fefb 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -708,7 +708,7 @@ struct MacroArgParser {
 fn last_tok(tt: &TokenTree) -> Token {
     match *tt {
         TokenTree::Token(ref t, _) => t.clone(),
-        TokenTree::Delimited(delim_span, delim, _) => Token {
+        TokenTree::Delimited(delim_span, _, delim, _) => Token {
             kind: TokenKind::CloseDelim(delim),
             span: delim_span.close,
         },
@@ -925,7 +925,7 @@ impl MacroArgParser {
                     self.add_meta_variable(&mut iter)?;
                 }
                 TokenTree::Token(ref t, _) => self.update_buffer(t),
-                &TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
+                &TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
                     if !self.buf.is_empty() {
                         if next_space(&self.last_tok.kind) == SpaceState::Always {
                             self.add_separator();
@@ -1167,7 +1167,7 @@ impl<'a> MacroParser<'a> {
         let tok = self.toks.next()?;
         let (lo, args_paren_kind) = match tok {
             TokenTree::Token(..) => return None,
-            &TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
+            &TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
         };
         let args = TokenStream::new(vec![tok.clone()]);
         match self.toks.next()? {
diff --git a/tests/ui/async-await/issues/issue-60674.stdout b/tests/ui/async-await/issues/issue-60674.stdout
index 5ca715bbded..df129b033d2 100644
--- a/tests/ui/async-await/issues/issue-60674.stdout
+++ b/tests/ui/async-await/issues/issue-60674.stdout
@@ -1,3 +1,3 @@
 async fn f(mut x: u8) {}
-async fn g((mut x, y, mut z) : (u8, u8, u8)) {}
-async fn g(mut x: u8, (a, mut b, c) : (u8, u8, u8), y: u8) {}
+async fn g((mut x, y, mut z): (u8, u8, u8)) {}
+async fn g(mut x: u8, (a, mut b, c): (u8, u8, u8), y: u8) {}
diff --git a/tests/ui/infinite/issue-41731-infinite-macro-print.stderr b/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
index e30b2039d69..71510816d0b 100644
--- a/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
+++ b/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
@@ -14,13 +14,13 @@ LL |     stack!("overflow");
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `stack! { "overflow" }`
-   = note: to `print! (stack! ("overflow")) ;`
+   = note: to `print! (stack! ("overflow"));`
    = note: expanding `print! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))); }`
    = note: expanding `stack! { "overflow" }`
-   = note: to `print! (stack! ("overflow")) ;`
+   = note: to `print! (stack! ("overflow"));`
    = note: expanding `print! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))); }`
 
 error: format argument must be a string literal
   --> $DIR/issue-41731-infinite-macro-print.rs:14:5
diff --git a/tests/ui/infinite/issue-41731-infinite-macro-println.stderr b/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
index 66b466dafa0..645176d45cb 100644
--- a/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
+++ b/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
@@ -14,13 +14,13 @@ LL |     stack!("overflow");
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `stack! { "overflow" }`
-   = note: to `println! (stack! ("overflow")) ;`
+   = note: to `println! (stack! ("overflow"));`
    = note: expanding `println! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))); }`
    = note: expanding `stack! { "overflow" }`
-   = note: to `println! (stack! ("overflow")) ;`
+   = note: to `println! (stack! ("overflow"));`
    = note: expanding `println! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))); }`
 
 error: format argument must be a string literal
   --> $DIR/issue-41731-infinite-macro-println.rs:14:5
diff --git a/tests/ui/macros/stringify.rs b/tests/ui/macros/stringify.rs
index e7a21010a0b..a9e9c4e7e5f 100644
--- a/tests/ui/macros/stringify.rs
+++ b/tests/ui/macros/stringify.rs
@@ -87,7 +87,7 @@ fn test_expr() {
     c1!(expr, [ f::<'a, u8, 1>() ], "f::<'a, u8, 1>()");
     c1!(expr, [ f(true) ], "f(true)");
     c2!(expr, [ f(true,) ], "f(true)", "f(true,)");
-    c2!(expr, [ ()() ], "()()", "() ()");
+    c1!(expr, [ ()() ], "()()");
 
     // ExprKind::MethodCall
     c1!(expr, [ x.f() ], "x.f()");
@@ -303,7 +303,7 @@ fn test_expr() {
     c1!(expr, [ Struct { x: true, ..base } ], "Struct { x: true, ..base }");
 
     // ExprKind::Repeat
-    c2!(expr, [ [(); 0] ], "[(); 0]", "[() ; 0]");
+    c1!(expr, [ [(); 0] ], "[(); 0]");
 
     // ExprKind::Paren
     c1!(expr, [ (expr) ], "(expr)");
@@ -340,19 +340,19 @@ fn test_item() {
     c2!(item,
         [ pub use crate::{a, b::c}; ],
         "pub use crate::{a, b::c};",
-        "pub use crate::{ a, b::c } ;"
+        "pub use crate::{ a, b::c };" // FIXME
     );
     c1!(item, [ pub use A::*; ], "pub use A::*;");
 
     // ItemKind::Static
-    c2!(item, [ pub static S: () = {}; ], "pub static S: () = {};", "pub static S: () = {} ;");
-    c2!(item, [ static mut S: () = {}; ], "static mut S: () = {};", "static mut S: () = {} ;");
-    c2!(item, [ static S: (); ], "static S: ();", "static S: () ;");
-    c2!(item, [ static mut S: (); ], "static mut S: ();", "static mut S: () ;");
+    c1!(item, [ pub static S: () = {}; ], "pub static S: () = {};");
+    c1!(item, [ static mut S: () = {}; ], "static mut S: () = {};");
+    c1!(item, [ static S: (); ], "static S: ();");
+    c1!(item, [ static mut S: (); ], "static mut S: ();");
 
     // ItemKind::Const
-    c2!(item, [ pub const S: () = {}; ], "pub const S: () = {};", "pub const S: () = {} ;");
-    c2!(item, [ const S: (); ], "const S: ();", "const S: () ;");
+    c1!(item, [ pub const S: () = {}; ], "pub const S: () = {};");
+    c1!(item, [ const S: (); ], "const S: ();");
 
     // ItemKind::Fn
     c1!(item,
@@ -429,8 +429,8 @@ fn test_item() {
 
     // ItemKind::Struct
     c1!(item, [ pub struct Unit; ], "pub struct Unit;");
-    c2!(item, [ struct Tuple(); ], "struct Tuple();", "struct Tuple() ;");
-    c2!(item, [ struct Tuple(T); ], "struct Tuple(T);", "struct Tuple(T) ;");
+    c1!(item, [ struct Tuple(); ], "struct Tuple();");
+    c1!(item, [ struct Tuple(T); ], "struct Tuple(T);");
     c1!(item, [ struct Struct {} ], "struct Struct {}");
     c2!(item,
         [
@@ -489,8 +489,8 @@ fn test_item() {
     c1!(item, [ impl ~const Struct {} ], "impl ~const Struct {}");
 
     // ItemKind::MacCall
-    c2!(item, [ mac!(...); ], "mac!(...);", "mac!(...) ;");
-    c2!(item, [ mac![...]; ], "mac![...];", "mac![...] ;");
+    c1!(item, [ mac!(...); ], "mac!(...);");
+    c1!(item, [ mac![...]; ], "mac![...];");
     c1!(item, [ mac! { ... } ], "mac! { ... }");
 
     // ItemKind::MacroDef
@@ -500,7 +500,7 @@ fn test_item() {
                 () => {};
             }
         ],
-        "macro_rules! stringify { () => {} ; }"
+        "macro_rules! stringify { () => {}; }"
     );
     c2!(item,
         [ pub macro stringify() {} ],
@@ -627,7 +627,7 @@ fn test_stmt() {
     c2!(stmt,
         [ let (a, b): (u32, u32) = (1, 2) ],
         "let (a, b): (u32, u32) = (1, 2);",
-        "let(a, b) : (u32, u32) = (1, 2)"
+        "let(a, b): (u32, u32) = (1, 2)" // FIXME
     );
 
     // StmtKind::Item
diff --git a/tests/ui/macros/trace-macro.stderr b/tests/ui/macros/trace-macro.stderr
index 43272248c28..dac4cc12f84 100644
--- a/tests/ui/macros/trace-macro.stderr
+++ b/tests/ui/macros/trace-macro.stderr
@@ -5,5 +5,5 @@ LL |     println!("Hello, World!");
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `println! { "Hello, World!" }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! ("Hello, World!")) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! ("Hello, World!")); }`
 
diff --git a/tests/ui/macros/trace_faulty_macros.stderr b/tests/ui/macros/trace_faulty_macros.stderr
index d39b4105d35..81047c7a21a 100644
--- a/tests/ui/macros/trace_faulty_macros.stderr
+++ b/tests/ui/macros/trace_faulty_macros.stderr
@@ -20,7 +20,7 @@ LL |     my_faulty_macro!();
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `my_faulty_macro! {  }`
-   = note: to `my_faulty_macro! (bcd) ;`
+   = note: to `my_faulty_macro! (bcd);`
    = note: expanding `my_faulty_macro! { bcd }`
 
 error: recursion limit reached while expanding `my_recursive_macro!`
@@ -42,13 +42,13 @@ LL |     my_recursive_macro!();
    |     ^^^^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
 
 error: expected expression, found pattern `A { a: a, b: 0, c: _, .. }`
   --> $DIR/trace_faulty_macros.rs:16:9
@@ -98,7 +98,7 @@ LL |     let a = pat_macro!();
    |             ^^^^^^^^^^^^
    |
    = note: expanding `pat_macro! {  }`
-   = note: to `pat_macro! (A { a : a, b : 0, c : _, .. }) ;`
+   = note: to `pat_macro! (A { a : a, b : 0, c : _, .. });`
    = note: expanding `pat_macro! { A { a : a, b : 0, c : _, .. } }`
    = note: to `A { a: a, b: 0, c: _, .. }`
 
diff --git a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
index 0234c2a2b09..8459f4e6305 100644
--- a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
+++ b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
@@ -89,7 +89,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:57:33: 57:34 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string) ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string);
 PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
@@ -143,7 +143,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:61:28: 61:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {}) ;
+PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {});
 PRINT-ATTR RE-COLLECTED (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/attr-complex-fn.stdout b/tests/ui/proc-macro/attr-complex-fn.stdout
index fb08f528dc5..7c23d1ecae4 100644
--- a/tests/ui/proc-macro/attr-complex-fn.stdout
+++ b/tests/ui/proc-macro/attr-complex-fn.stdout
@@ -1,4 +1,4 @@
-PRINT-ATTR INPUT (DISPLAY): fn foo<T: MyTrait<MyStruct<{ true } >>>() {}
+PRINT-ATTR INPUT (DISPLAY): fn foo<T: MyTrait<MyStruct<{ true }>>>() {}
 PRINT-ATTR RE-COLLECTED (DISPLAY): fn foo < T : MyTrait < MyStruct < { true } >>> () {}
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
@@ -77,7 +77,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-complex-fn.rs:19:42: 19:44 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl<T> MyTrait<T> for MyStruct<{ true } > { #![rustc_dummy] }
+PRINT-ATTR INPUT (DISPLAY): impl<T> MyTrait<T> for MyStruct<{ true }> { #![rustc_dummy] }
 PRINT-ATTR RE-COLLECTED (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { #![rustc_dummy] }
 PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { #! [rustc_dummy] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
diff --git a/tests/ui/proc-macro/attr-stmt-expr.stdout b/tests/ui/proc-macro/attr-stmt-expr.stdout
index 9e671c188e9..5e09198bbd4 100644
--- a/tests/ui/proc-macro/attr-stmt-expr.stdout
+++ b/tests/ui/proc-macro/attr-stmt-expr.stdout
@@ -72,7 +72,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-stmt-expr.rs:49:33: 49:34 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string) ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string);
 PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
@@ -126,7 +126,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-stmt-expr.rs:53:28: 53:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {}) ;
+PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {});
 PRINT-ATTR RE-COLLECTED (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs b/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
index 7b6bcb90dc5..76346d9a172 100644
--- a/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
+++ b/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
@@ -17,7 +17,7 @@ pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream {
 #[proc_macro_attribute]
 pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
-    assert_eq!(item.to_string(), "println!(\"{}\", string) ;");
+    assert_eq!(item.to_string(), "println!(\"{}\", string);");
     item
 }
 
diff --git a/tests/ui/proc-macro/cfg-eval-inner.stdout b/tests/ui/proc-macro/cfg-eval-inner.stdout
index e297783286f..43ae47c201f 100644
--- a/tests/ui/proc-macro/cfg-eval-inner.stdout
+++ b/tests/ui/proc-macro/cfg-eval-inner.stdout
@@ -3,7 +3,7 @@ Foo<[u8;
 {
     #![rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
     { field: [u8; { #![rustc_dummy(another_cursed_inner)] 1 }] } 0
-}] > { #![rustc_dummy(evaluated_attr)] fn bar() {} }
+}]> { #![rustc_dummy(evaluated_attr)] fn bar() {} }
 PRINT-ATTR RE-COLLECTED (DISPLAY): impl Foo <
 [u8;
 {
diff --git a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
index de4f0c000b6..7f97ac9dbc2 100644
--- a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
+++ b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
@@ -1,4 +1,5 @@
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -38,7 +39,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate-issue-57089.rs:17:32: 17:33 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
index c7e72bf4ff5..96161049e30 100644
--- a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
+++ b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): struct A(identity! ($crate :: S)) ;
+PRINT-ATTR INPUT (DISPLAY): struct A(identity! ($crate :: S));
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A(identity! ($crate :: S)) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -53,7 +54,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate-issue-62325.rs:19:35: 19:36 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct B(identity! ($crate :: S)) ;
+PRINT-ATTR INPUT (DISPLAY): struct B(identity! ($crate :: S));
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct B(identity! ($crate :: S)) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/dollar-crate.stdout b/tests/ui/proc-macro/dollar-crate.stdout
index 0f5f87ceca2..a6bdab00441 100644
--- a/tests/ui/proc-macro/dollar-crate.stdout
+++ b/tests/ui/proc-macro/dollar-crate.stdout
@@ -1,4 +1,5 @@
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -38,7 +39,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:20:36: 20:37 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -78,7 +80,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:24:32: 24:33 (#3),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
+PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S);
+PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ;
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -118,7 +121,8 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:27:32: 27:33 (#3),
     },
 ]
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -158,7 +162,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/auxiliary/dollar-crate-external.rs:7:32: 7:33 (#14),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -198,7 +203,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/auxiliary/dollar-crate-external.rs:11:28: 11:29 (#14),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
+PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S);
+PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ;
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
index 40181efc0b8..eda2f433bce 100644
--- a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
+++ b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
@@ -122,7 +122,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #3 bytes(306..355),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -202,7 +202,8 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #7 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} }; 0 }, }
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "enum",
@@ -280,7 +281,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #11 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -358,7 +359,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #15 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -449,7 +450,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #19 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/inner-attrs.stdout b/tests/ui/proc-macro/inner-attrs.stdout
index 7c10388a9dc..037ec044e42 100644
--- a/tests/ui/proc-macro/inner-attrs.stdout
+++ b/tests/ui/proc-macro/inner-attrs.stdout
@@ -579,7 +579,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-DERIVE INPUT (DISPLAY): struct MyDerivePrint
-{ field: [u8; { match true { _ => { #![rustc_dummy(third)] true } } ; 0 }] }
+{ field: [u8; { match true { _ => { #![rustc_dummy(third)] true } }; 0 }] }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct MyDerivePrint
 {
     field :
@@ -717,7 +717,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:49:29: 49:40 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 });
+PRINT-ATTR RE-COLLECTED (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Group {
@@ -832,7 +833,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:56:29: 56:40 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 });
+PRINT-ATTR RE-COLLECTED (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Group {
diff --git a/tests/ui/proc-macro/issue-75734-pp-paren.stdout b/tests/ui/proc-macro/issue-75734-pp-paren.stdout
index 0668e8c42f5..ee135366fb6 100644
--- a/tests/ui/proc-macro/issue-75734-pp-paren.stdout
+++ b/tests/ui/proc-macro/issue-75734-pp-paren.stdout
@@ -1,4 +1,4 @@
-PRINT-ATTR INPUT (DISPLAY): fn main() { &|_: u8| {} ; mul_2!(1 + 1) ; }
+PRINT-ATTR INPUT (DISPLAY): fn main() { &|_: u8| {}; mul_2!(1 + 1); }
 PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn main() { &| _ : u8 | {} ; mul_2! (1 + 1) ; }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
index 4e48253ad83..47f26451d1c 100644
--- a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
+++ b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
@@ -10,7 +10,7 @@ struct Foo<#[cfg(FALSE)] A, B>
         {
             #[cfg(FALSE)] true => {}, #[cfg_attr(not(FALSE), allow(warnings))]
             false => {}, _ => {}
-        } ; #[print_helper(should_be_removed)] fn removed_fn()
+        }; #[print_helper(should_be_removed)] fn removed_fn()
         { #![cfg(FALSE)] } #[print_helper(c)] #[cfg(not(FALSE))] fn kept_fn()
         { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
         {
@@ -18,7 +18,7 @@ struct Foo<#[cfg(FALSE)] A, B>
             #[cfg(FALSE)] String, u8)
         } struct
         TupleStruct(#[cfg(FALSE)] String, #[cfg(not(FALSE))] i32,
-        #[cfg(FALSE)] bool, u8) ; fn plain_removed_fn()
+        #[cfg(FALSE)] bool, u8); fn plain_removed_fn()
         { #![cfg_attr(not(FALSE), cfg(FALSE))] } 0
     }], #[print_helper(d)] fourth: B
 }
@@ -34,7 +34,7 @@ struct Foo <#[cfg(FALSE)] A, B >
         {
             #[cfg(FALSE)] true => {}, #[cfg_attr(not(FALSE), allow(warnings))]
             false => {}, _ => {}
-        } ; #[print_helper(should_be_removed)] fn removed_fn()
+        }; #[print_helper(should_be_removed)] fn removed_fn()
         { #![cfg(FALSE)] } #[print_helper(c)] #[cfg(not(FALSE))] fn kept_fn()
         { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
         {
@@ -42,7 +42,7 @@ struct Foo <#[cfg(FALSE)] A, B >
             #[cfg(FALSE)] String, u8)
         } struct
         TupleStruct(#[cfg(FALSE)] String, #[cfg(not(FALSE))] i32,
-        #[cfg(FALSE)] bool, u8) ; fn plain_removed_fn()
+        #[cfg(FALSE)] bool, u8); fn plain_removed_fn()
         { #![cfg_attr(not(FALSE), cfg(FALSE))] } 0
     }], #[print_helper(d)] fourth: B
 }
@@ -1326,11 +1326,11 @@ PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_hel
     [u8;
     {
         #[cfg(not(FALSE))] struct Inner; match true
-        { #[allow(warnings)] false => {}, _ => {} } ; #[print_helper(c)]
+        { #[allow(warnings)] false => {}, _ => {} }; #[print_helper(c)]
         #[cfg(not(FALSE))] fn kept_fn()
         { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
         { Foo(#[cfg(not(FALSE))] i32, u8) } struct
-        TupleStruct(#[cfg(not(FALSE))] i32, u8) ; 0
+        TupleStruct(#[cfg(not(FALSE))] i32, u8); 0
     }], #[print_helper(d)] fourth: B
 }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo <B >
diff --git a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
index 60beef1e33a..89817a1efdc 100644
--- a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
+++ b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
@@ -4,7 +4,7 @@ PRINT-DERIVE INPUT (DISPLAY): struct Foo
     [bool ;
     {
         let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
-        { #![allow(unused)] 30 } ; 0
+        { #![allow(unused)] 30 }; 0
     }]
 }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct Foo
diff --git a/tests/ui/proc-macro/meta-macro-hygiene.stdout b/tests/ui/proc-macro/meta-macro-hygiene.stdout
index 4b2b6aa058d..6d10cc604c2 100644
--- a/tests/ui/proc-macro/meta-macro-hygiene.stdout
+++ b/tests/ui/proc-macro/meta-macro-hygiene.stdout
@@ -32,13 +32,13 @@ macro_rules! produce_it
     */ {
     () =>
     {
-        meta_macro::print_def_site!($crate::dummy!()) ;
+        meta_macro::print_def_site!($crate::dummy!());
         // `print_def_site!` will respan the `$crate` identifier
         // with `Span::def_site()`. This should cause it to resolve
         // relative to `meta_macro`, *not* `make_macro` (despite
         // the fact that `print_def_site` is produced by a
         // `macro_rules!` macro in `make_macro`).
-    } ;
+    };
 }
 
 fn main /* 0#0 */() { ; }
diff --git a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
index 220d3a6530d..5c70e780f74 100644
--- a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
+++ b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
@@ -53,9 +53,9 @@ macro_rules! outer
     */ {
     ($item:item) =>
     {
-        macro inner() { print_bang! { $item } } inner!() ;
+        macro inner() { print_bang! { $item } } inner!();
 
-    } ;
+    };
 }
 
 struct S /* 0#0 */;
diff --git a/tests/ui/proc-macro/pretty-print-tts.stdout b/tests/ui/proc-macro/pretty-print-tts.stdout
index 8bdc6fdf9f7..fbe8640a01a 100644
--- a/tests/ui/proc-macro/pretty-print-tts.stdout
+++ b/tests/ui/proc-macro/pretty-print-tts.stdout
@@ -1,4 +1,4 @@
-PRINT-BANG INPUT (DISPLAY): { #![rustc_dummy] let a = "hello".len() ; matches!(a, 5) ; }
+PRINT-BANG INPUT (DISPLAY): { #![rustc_dummy] let a = "hello".len(); matches!(a, 5); }
 PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): { #! [rustc_dummy] let a = "hello".len() ; matches! (a, 5) ; }
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
diff --git a/tests/ui/proc-macro/weird-braces.stdout b/tests/ui/proc-macro/weird-braces.stdout
index e92f6083cf2..7da769ef0d2 100644
--- a/tests/ui/proc-macro/weird-braces.stdout
+++ b/tests/ui/proc-macro/weird-braces.stdout
@@ -5,7 +5,7 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:16:25: 16:36 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second_outer)] impl Bar<{ 1 > 0 } > for Foo<{ true } >
+PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second_outer)] impl Bar<{ 1 > 0 }> for Foo<{ true }>
 {
     #![print_target_and_args(first_inner)]
     #![print_target_and_args(second_inner)]
@@ -191,7 +191,7 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 } > for Foo<{ true } >
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }>
 {
     #![print_target_and_args(first_inner)]
     #![print_target_and_args(second_inner)]
@@ -350,7 +350,7 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 } > for Foo<{ true } >
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }>
 { #![print_target_and_args(second_inner)] }
 PRINT-ATTR RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
 { #![print_target_and_args(second_inner)] }
@@ -470,7 +470,7 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 } > for Foo<{ true } > {}
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }> {}
 PRINT-ATTR RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > {}
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs b/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
index f2d5f568601..67de50a1d92 100644
--- a/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
+++ b/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
@@ -17,20 +17,20 @@ macro_rules! checker {
     }
 }
 
-checker!(attr_extern, r#"extern "C" { fn ffi(#[a1] arg1: i32, #[a2] ...) ; }"#);
+checker!(attr_extern, r#"extern "C" { fn ffi(#[a1] arg1: i32, #[a2] ...); }"#);
 checker!(attr_extern_cvar, r#"unsafe extern "C" fn cvar(arg1: i32, #[a1] mut args: ...) {}"#);
-checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...) ;");
-checker!(attr_free, "fn free(#[a1] arg1: u8) { let lam = |#[a2] W(x), #[a3] y| () ; }");
+checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...);");
+checker!(attr_free, "fn free(#[a1] arg1: u8) { let lam = |#[a2] W(x), #[a3] y| (); }");
 checker!(attr_inherent_1, "fn inherent1(#[a1] self, #[a2] arg1: u8) {}");
 checker!(attr_inherent_2, "fn inherent2(#[a1] &self, #[a2] arg1: u8) {}");
 checker!(attr_inherent_3, "fn inherent3<'a>(#[a1] &'a mut self, #[a2] arg1: u8) {}");
 checker!(attr_inherent_4, "fn inherent4<'a>(#[a1] self: Box<Self>, #[a2] arg1: u8) {}");
 checker!(attr_inherent_issue_64682, "fn inherent5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8) {}");
-checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1: u8) ;");
-checker!(attr_trait_2, "fn trait2(#[a1] &self, #[a2] arg1: u8) ;");
-checker!(attr_trait_3, "fn trait3<'a>(#[a1] &'a mut self, #[a2] arg1: u8) ;");
-checker!(attr_trait_4, r#"fn trait4<'a>(#[a1] self: Box<Self>, #[a2] arg1: u8, #[a3] Vec<u8>) ;"#);
-checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8) ;");
+checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1: u8);");
+checker!(attr_trait_2, "fn trait2(#[a1] &self, #[a2] arg1: u8);");
+checker!(attr_trait_3, "fn trait3<'a>(#[a1] &'a mut self, #[a2] arg1: u8);");
+checker!(attr_trait_4, r#"fn trait4<'a>(#[a1] self: Box<Self>, #[a2] arg1: u8, #[a3] Vec<u8>);"#);
+checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8);");
 checker!(rename_params, r#"impl Foo
 {
     fn hello(#[angery(true)] a: i32, #[a2] b: i32, #[what = "how"] c: u32) {}