about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2023-12-11 00:03:56 +0000
committerbors <bors@rust-lang.org>2023-12-11 00:03:56 +0000
commita9cb8ee82132b6ef0f8db3e3bc25f98dfae08c04 (patch)
tree87a7c129158bcc45cb80cde25b87cc0260a167fc
parentd86d65bbc19b928387f68427fcc3a0da498d8a19 (diff)
parent940c885bc461afa2ec3eef901d6d685665686162 (diff)
downloadrust-a9cb8ee82132b6ef0f8db3e3bc25f98dfae08c04.tar.gz
rust-a9cb8ee82132b6ef0f8db3e3bc25f98dfae08c04.zip
Auto merge of #114571 - nnethercote:improve-print_tts, r=petrochenkov
Improve `print_tts`

By slightly changing the meaning of `tokenstream::Spacing` we can greatly improve the output of `print_tts`.

r? `@ghost`
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs6
-rw-r--r--compiler/rustc_ast/src/mut_visit.rs4
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs137
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs33
-rw-r--r--compiler/rustc_builtin_macros/src/assert/context.rs4
-rw-r--r--compiler/rustc_expand/src/config.rs36
-rw-r--r--compiler/rustc_expand/src/mbe.rs6
-rw-r--r--compiler/rustc_expand/src/mbe/macro_check.rs12
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs4
-rw-r--r--compiler/rustc_expand/src/mbe/macro_rules.rs18
-rw-r--r--compiler/rustc_expand/src/mbe/metavar_expr.rs2
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs7
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs42
-rw-r--r--compiler/rustc_expand/src/parse/tests.rs35
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs44
-rw-r--r--compiler/rustc_lint/src/builtin.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs82
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs14
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs2
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs44
-rw-r--r--library/proc_macro/src/lib.rs26
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs2
-rw-r--r--src/tools/rustfmt/src/macros.rs6
-rw-r--r--tests/pretty/cast-lt.pp2
-rw-r--r--tests/run-make/rustc-macro-dep-files/foo.rs2
-rw-r--r--tests/ui/async-await/issues/issue-60674.stdout6
-rw-r--r--tests/ui/hygiene/unpretty-debug.stdout2
-rw-r--r--tests/ui/infinite/issue-41731-infinite-macro-print.stderr8
-rw-r--r--tests/ui/infinite/issue-41731-infinite-macro-println.stderr8
-rw-r--r--tests/ui/macros/stringify.rs352
-rw-r--r--tests/ui/macros/syntax-extension-source-utils.rs4
-rw-r--r--tests/ui/macros/trace-macro.stderr2
-rw-r--r--tests/ui/macros/trace_faulty_macros.stderr14
-rw-r--r--tests/ui/proc-macro/allowed-attr-stmt-expr.stdout15
-rw-r--r--tests/ui/proc-macro/attr-complex-fn.stdout7
-rw-r--r--tests/ui/proc-macro/attr-stmt-expr.stdout9
-rw-r--r--tests/ui/proc-macro/attribute-after-derive.stdout6
-rw-r--r--tests/ui/proc-macro/attribute-spans-preserved.stdout2
-rw-r--r--tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs7
-rw-r--r--tests/ui/proc-macro/auxiliary/attr-stmt-expr.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-a.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-atob.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-b-rpass.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-ctod.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-same-struct.rs4
-rw-r--r--tests/ui/proc-macro/auxiliary/derive-union.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/expand-with-a-macro.rs2
-rw-r--r--tests/ui/proc-macro/auxiliary/issue-79825.rs2
-rw-r--r--tests/ui/proc-macro/capture-macro-rules-invoke.stdout2
-rw-r--r--tests/ui/proc-macro/capture-unglued-token.stdout2
-rw-r--r--tests/ui/proc-macro/cfg-eval-inner.stdout20
-rw-r--r--tests/ui/proc-macro/cfg-eval.stdout3
-rw-r--r--tests/ui/proc-macro/derive-same-struct.stdout2
-rw-r--r--tests/ui/proc-macro/doc-comment-preserved.stdout2
-rw-r--r--tests/ui/proc-macro/dollar-crate-issue-57089.stdout6
-rw-r--r--tests/ui/proc-macro/dollar-crate-issue-62325.stdout6
-rw-r--r--tests/ui/proc-macro/dollar-crate.stdout18
-rw-r--r--tests/ui/proc-macro/expand-to-derive.stdout5
-rw-r--r--tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout11
-rw-r--r--tests/ui/proc-macro/inert-attribute-order.stdout12
-rw-r--r--tests/ui/proc-macro/inner-attr-non-inline-mod.stdout3
-rw-r--r--tests/ui/proc-macro/inner-attrs.stdout26
-rw-r--r--tests/ui/proc-macro/issue-75734-pp-paren.stdout3
-rw-r--r--tests/ui/proc-macro/issue-75930-derive-cfg.stdout65
-rw-r--r--tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout2
-rw-r--r--tests/ui/proc-macro/keep-expr-tokens.stdout3
-rw-r--r--tests/ui/proc-macro/macro-rules-derive-cfg.stdout11
-rw-r--r--tests/ui/proc-macro/meta-macro-hygiene.stdout4
-rw-r--r--tests/ui/proc-macro/nested-derive-cfg.stdout2
-rw-r--r--tests/ui/proc-macro/nonterminal-expansion.stdout2
-rw-r--r--tests/ui/proc-macro/nonterminal-token-hygiene.stdout8
-rw-r--r--tests/ui/proc-macro/pretty-print-tts.stdout3
-rw-r--r--tests/ui/proc-macro/trailing-plus.stdout3
-rw-r--r--tests/ui/proc-macro/weird-braces.stdout34
-rw-r--r--tests/ui/rfcs/rfc-2361-dbg-macro/dbg-macro-expected-behavior.run.stderr2
-rw-r--r--tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs39
78 files changed, 816 insertions, 525 deletions
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index 851ab46345f..98138cedb24 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -387,7 +387,7 @@ impl MetaItemKind {
         tokens: &mut impl Iterator<Item = &'a TokenTree>,
     ) -> Option<MetaItemKind> {
         match tokens.next() {
-            Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
                 MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
             }
             Some(TokenTree::Token(token, _)) => {
@@ -401,7 +401,7 @@ impl MetaItemKind {
         tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
     ) -> Option<MetaItemKind> {
         match tokens.peek() {
-            Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
                 let inner_tokens = inner_tokens.clone();
                 tokens.next();
                 MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
@@ -524,7 +524,7 @@ impl NestedMetaItem {
                 tokens.next();
                 return Some(NestedMetaItem::Lit(lit));
             }
-            Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
+            Some(TokenTree::Delimited(.., Delimiter::Invisible, inner_tokens)) => {
                 tokens.next();
                 return NestedMetaItem::from_tokens(&mut inner_tokens.trees().peekable());
             }
diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs
index 01defcff9ef..10b2025f937 100644
--- a/compiler/rustc_ast/src/mut_visit.rs
+++ b/compiler/rustc_ast/src/mut_visit.rs
@@ -682,7 +682,7 @@ pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
         AttrTokenTree::Token(token, _) => {
             visit_token(token, vis);
         }
-        AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+        AttrTokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
             vis.visit_span(open);
             vis.visit_span(close);
             visit_attr_tts(tts, vis);
@@ -709,7 +709,7 @@ pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
         TokenTree::Token(token, _) => {
             visit_token(token, vis);
         }
-        TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
+        TokenTree::Delimited(DelimSpan { open, close }, _spacing, _delim, tts) => {
             vis.visit_span(open);
             vis.visit_span(close);
             visit_tts(tts, vis);
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index 48854bbae24..4c0c496584e 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -46,7 +46,7 @@ pub enum TokenTree {
     /// delimiters are implicitly represented by `Delimited`.
     Token(Token, Spacing),
     /// A delimited sequence of token trees.
-    Delimited(DelimSpan, Delimiter, TokenStream),
+    Delimited(DelimSpan, DelimSpacing, Delimiter, TokenStream),
 }
 
 // Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
@@ -62,11 +62,11 @@ where
 }
 
 impl TokenTree {
-    /// Checks if this `TokenTree` is equal to the other, regardless of span information.
+    /// Checks if this `TokenTree` is equal to the other, regardless of span/spacing information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
             (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
-            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
+            (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(tts2)
             }
             _ => false,
@@ -99,6 +99,11 @@ impl TokenTree {
         TokenTree::Token(Token::new(kind, span), Spacing::Joint)
     }
 
+    /// Create a `TokenTree::Token` with joint-hidden spacing.
+    pub fn token_joint_hidden(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span), Spacing::JointHidden)
+    }
+
     pub fn uninterpolate(&self) -> Cow<'_, TokenTree> {
         match self {
             TokenTree::Token(token, spacing) => match token.uninterpolate() {
@@ -183,7 +188,7 @@ pub struct AttrTokenStream(pub Lrc<Vec<AttrTokenTree>>);
 #[derive(Clone, Debug, Encodable, Decodable)]
 pub enum AttrTokenTree {
     Token(Token, Spacing),
-    Delimited(DelimSpan, Delimiter, AttrTokenStream),
+    Delimited(DelimSpan, DelimSpacing, Delimiter, AttrTokenStream),
     /// Stores the attributes for an attribute target,
     /// along with the tokens for that attribute target.
     /// See `AttributesData` for more information
@@ -208,9 +213,14 @@ impl AttrTokenStream {
                 AttrTokenTree::Token(inner, spacing) => {
                     smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
                 }
-                AttrTokenTree::Delimited(span, delim, stream) => {
-                    smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
-                        .into_iter()
+                AttrTokenTree::Delimited(span, spacing, delim, stream) => {
+                    smallvec![TokenTree::Delimited(
+                        *span,
+                        *spacing,
+                        *delim,
+                        stream.to_tokenstream()
+                    ),]
+                    .into_iter()
                 }
                 AttrTokenTree::Attributes(data) => {
                     let idx = data
@@ -230,7 +240,7 @@ impl AttrTokenStream {
                         let mut found = false;
                         // Check the last two trees (to account for a trailing semi)
                         for tree in target_tokens.iter_mut().rev().take(2) {
-                            if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
+                            if let TokenTree::Delimited(span, spacing, delim, delim_tokens) = tree {
                                 // Inner attributes are only supported on extern blocks, functions,
                                 // impls, and modules. All of these have their inner attributes
                                 // placed at the beginning of the rightmost outermost braced group:
@@ -250,7 +260,7 @@ impl AttrTokenStream {
                                     stream.push_stream(inner_attr.tokens());
                                 }
                                 stream.push_stream(delim_tokens.clone());
-                                *tree = TokenTree::Delimited(*span, *delim, stream);
+                                *tree = TokenTree::Delimited(*span, *spacing, *delim, stream);
                                 found = true;
                                 break;
                             }
@@ -303,21 +313,64 @@ pub struct AttributesData {
 #[derive(Clone, Debug, Default, Encodable, Decodable)]
 pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
 
-/// Similar to `proc_macro::Spacing`, but for tokens.
-///
-/// Note that all `ast::TokenTree::Token` instances have a `Spacing`, but when
-/// we convert to `proc_macro::TokenTree` for proc macros only `Punct`
-/// `TokenTree`s have a `proc_macro::Spacing`.
+/// Indicates whether a token can join with the following token to form a
+/// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
+/// guide pretty-printing, which is where the `JointHidden` value (which isn't
+/// part of `proc_macro::Spacing`) comes in useful.
 #[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
 pub enum Spacing {
-    /// The token is not immediately followed by an operator token (as
-    /// determined by `Token::is_op`). E.g. a `+` token is `Alone` in `+ =`,
-    /// `+/*foo*/=`, `+ident`, and `+()`.
+    /// The token cannot join with the following token to form a compound
+    /// token.
+    ///
+    /// In token streams parsed from source code, the compiler will use `Alone`
+    /// for any token immediately followed by whitespace, a non-doc comment, or
+    /// EOF.
+    ///
+    /// When constructing token streams within the compiler, use this for each
+    /// token that (a) should be pretty-printed with a space after it, or (b)
+    /// is the last token in the stream. (In the latter case the choice of
+    /// spacing doesn't matter because it is never used for the last token. We
+    /// arbitrarily use `Alone`.)
+    ///
+    /// Converts to `proc_macro::Spacing::Alone`, and
+    /// `proc_macro::Spacing::Alone` converts back to this.
     Alone,
 
-    /// The token is immediately followed by an operator token. E.g. a `+`
-    /// token is `Joint` in `+=` and `++`.
+    /// The token can join with the following token to form a compound token.
+    ///
+    /// In token streams parsed from source code, the compiler will use `Joint`
+    /// for any token immediately followed by punctuation (as determined by
+    /// `Token::is_punct`).
+    ///
+    /// When constructing token streams within the compiler, use this for each
+    /// token that (a) should be pretty-printed without a space after it, and
+    /// (b) is followed by a punctuation token.
+    ///
+    /// Converts to `proc_macro::Spacing::Joint`, and
+    /// `proc_macro::Spacing::Joint` converts back to this.
     Joint,
+
+    /// The token can join with the following token to form a compound token,
+    /// but this will not be visible at the proc macro level. (This is what the
+    /// `Hidden` means; see below.)
+    ///
+    /// In token streams parsed from source code, the compiler will use
+    /// `JointHidden` for any token immediately followed by anything not
+    /// covered by the `Alone` and `Joint` cases: an identifier, lifetime,
+    /// literal, delimiter, doc comment.
+    ///
+    /// When constructing token streams, use this for each token that (a)
+    /// should be pretty-printed without a space after it, and (b) is followed
+    /// by a non-punctuation token.
+    ///
+    /// Converts to `proc_macro::Spacing::Alone`, but
+    /// `proc_macro::Spacing::Alone` converts back to `token::Spacing::Alone`.
+    /// Because of that, pretty-printing of `TokenStream`s produced by proc
+    /// macros is unavoidably uglier (with more whitespace between tokens) than
+    /// pretty-printing of `TokenStream`'s produced by other means (i.e. parsed
+    /// source code, internally constructed token streams, and token streams
+    /// produced by declarative macros).
+    JointHidden,
 }
 
 impl TokenStream {
@@ -421,21 +474,14 @@ impl TokenStream {
         self
     }
 
-    /// Create a token stream containing a single token with alone spacing.
+    /// Create a token stream containing a single token with alone spacing. The
+    /// spacing used for the final token in a constructed stream doesn't matter
+    /// because it's never used. In practice we arbitrarily use
+    /// `Spacing::Alone`.
     pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
         TokenStream::new(vec![TokenTree::token_alone(kind, span)])
     }
 
-    /// Create a token stream containing a single token with joint spacing.
-    pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
-        TokenStream::new(vec![TokenTree::token_joint(kind, span)])
-    }
-
-    /// Create a token stream containing a single `Delimited`.
-    pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
-        TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
-    }
-
     pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
         let Some(tokens) = node.tokens() else {
             panic!("missing tokens for node at {:?}: {:?}", node.span(), node);
@@ -482,6 +528,7 @@ impl TokenStream {
             }
             token::Interpolated(nt) => TokenTree::Delimited(
                 DelimSpan::from_single(token.span),
+                DelimSpacing::new(Spacing::JointHidden, spacing),
                 Delimiter::Invisible,
                 TokenStream::from_nonterminal_ast(&nt.0).flattened(),
             ),
@@ -492,8 +539,8 @@ impl TokenStream {
     fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
         match tree {
             TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
-            TokenTree::Delimited(span, delim, tts) => {
-                TokenTree::Delimited(*span, *delim, tts.flattened())
+            TokenTree::Delimited(span, spacing, delim, tts) => {
+                TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
             }
         }
     }
@@ -503,7 +550,7 @@ impl TokenStream {
         fn can_skip(stream: &TokenStream) -> bool {
             stream.trees().all(|tree| match tree {
                 TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)),
-                TokenTree::Delimited(_, _, inner) => can_skip(inner),
+                TokenTree::Delimited(.., inner) => can_skip(inner),
             })
         }
 
@@ -517,7 +564,7 @@ impl TokenStream {
     // If `vec` is not empty, try to glue `tt` onto its last token. The return
     // value indicates if gluing took place.
     fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {
-        if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = vec.last()
+        if let Some(TokenTree::Token(last_tok, Spacing::Joint | Spacing::JointHidden)) = vec.last()
             && let TokenTree::Token(tok, spacing) = tt
             && let Some(glued_tok) = last_tok.glue(tok)
         {
@@ -592,9 +639,10 @@ impl TokenStream {
 
                     &TokenTree::Token(..) => i += 1,
 
-                    &TokenTree::Delimited(sp, delim, ref delim_stream) => {
+                    &TokenTree::Delimited(sp, spacing, delim, ref delim_stream) => {
                         if let Some(desugared_delim_stream) = desugar_inner(delim_stream.clone()) {
-                            let new_tt = TokenTree::Delimited(sp, delim, desugared_delim_stream);
+                            let new_tt =
+                                TokenTree::Delimited(sp, spacing, delim, desugared_delim_stream);
                             Lrc::make_mut(&mut stream.0)[i] = new_tt;
                             modified = true;
                         }
@@ -622,10 +670,11 @@ impl TokenStream {
                 num_of_hashes = cmp::max(num_of_hashes, count);
             }
 
-            // `/// foo` becomes `doc = r"foo"`.
+            // `/// foo` becomes `[doc = r"foo"]`.
             let delim_span = DelimSpan::from_single(span);
             let body = TokenTree::Delimited(
                 delim_span,
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
                 Delimiter::Bracket,
                 [
                     TokenTree::token_alone(token::Ident(sym::doc, false), span),
@@ -641,7 +690,7 @@ impl TokenStream {
 
             if attr_style == AttrStyle::Inner {
                 vec![
-                    TokenTree::token_alone(token::Pound, span),
+                    TokenTree::token_joint(token::Pound, span),
                     TokenTree::token_alone(token::Not, span),
                     body,
                 ]
@@ -738,6 +787,18 @@ impl DelimSpan {
     }
 }
 
+#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
+pub struct DelimSpacing {
+    pub open: Spacing,
+    pub close: Spacing,
+}
+
+impl DelimSpacing {
+    pub fn new(open: Spacing, close: Spacing) -> DelimSpacing {
+        DelimSpacing { open, close }
+    }
+}
+
 // Some types are used a lot. Make sure they don't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
 mod size_asserts {
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index ff36e6c2845..0fae2c78e27 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -10,7 +10,7 @@ use crate::pp::{self, Breaks};
 use rustc_ast::attr::AttrIdGenerator;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
 use rustc_ast::util::classify;
 use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
 use rustc_ast::util::parser;
@@ -183,10 +183,10 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
         //
         // FIXME: Incorrect cases:
         // - Let: `let(a, b) = (1, 2)`
-        (Tok(Token { kind: Ident(..), .. }, _), Del(_, Parenthesis, _)) => false,
+        (Tok(Token { kind: Ident(..), .. }, _), Del(_, _, Parenthesis, _)) => false,
 
         // `#` + `[`: `#[attr]`
-        (Tok(Token { kind: Pound, .. }, _), Del(_, Bracket, _)) => false,
+        (Tok(Token { kind: Pound, .. }, _), Del(_, _, Bracket, _)) => false,
 
         _ => true,
     }
@@ -509,16 +509,17 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
     /// appropriate macro, transcribe back into the grammar we just parsed from,
     /// and then pretty-print the resulting AST nodes (so, e.g., we print
     /// expression arguments as expressions). It can be done! I think.
-    fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
+    fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) -> Spacing {
         match tt {
-            TokenTree::Token(token, _) => {
+            TokenTree::Token(token, spacing) => {
                 let token_str = self.token_to_string_ext(token, convert_dollar_crate);
                 self.word(token_str);
                 if let token::DocComment(..) = token.kind {
                     self.hardbreak()
                 }
+                *spacing
             }
-            TokenTree::Delimited(dspan, delim, tts) => {
+            TokenTree::Delimited(dspan, spacing, delim, tts) => {
                 self.print_mac_common(
                     None,
                     false,
@@ -528,6 +529,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                     convert_dollar_crate,
                     dspan.entire(),
                 );
+                spacing.close
             }
         }
     }
@@ -535,9 +537,20 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
     fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
         let mut iter = tts.trees().peekable();
         while let Some(tt) = iter.next() {
-            self.print_tt(tt, convert_dollar_crate);
+            let spacing = self.print_tt(tt, convert_dollar_crate);
             if let Some(next) = iter.peek() {
-                if space_between(tt, next) {
+                // Should we print a space after `tt`? There are two guiding
+                // factors.
+                // - `spacing` is the more important and accurate one. Most
+                //   tokens have good spacing information, and
+                //   `Joint`/`JointHidden` get used a lot.
+                // - `space_between` is the backup. Code produced by proc
+                //   macros has worse spacing information, with no
+                //   `JointHidden` usage and too much `Alone` usage, which
+                //   would result in over-spaced output such as
+                //   `( x () , y . z )`. `space_between` avoids some of the
+                //   excess whitespace.
+                if spacing == Spacing::Alone && space_between(tt, next) {
                     self.space();
                 }
             }
@@ -1797,7 +1810,9 @@ impl<'a> State<'a> {
     }
 
     pub(crate) fn tt_to_string(&self, tt: &TokenTree) -> String {
-        Self::to_string(|s| s.print_tt(tt, false))
+        Self::to_string(|s| {
+            s.print_tt(tt, false);
+        })
     }
 
     pub(crate) fn tts_to_string(&self, tokens: &TokenStream) -> String {
diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs
index 2a4bfe9e200..b54e119189a 100644
--- a/compiler/rustc_builtin_macros/src/assert/context.rs
+++ b/compiler/rustc_builtin_macros/src/assert/context.rs
@@ -151,7 +151,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
     fn build_panic(&self, expr_str: &str, panic_path: Path) -> P<Expr> {
         let escaped_expr_str = escape_to_fmt(expr_str);
         let initial = [
-            TokenTree::token_alone(
+            TokenTree::token_joint_hidden(
                 token::Literal(token::Lit {
                     kind: token::LitKind::Str,
                     symbol: Symbol::intern(&if self.fmt_string.is_empty() {
@@ -170,7 +170,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
         ];
         let captures = self.capture_decls.iter().flat_map(|cap| {
             [
-                TokenTree::token_alone(token::Ident(cap.ident.name, false), cap.ident.span),
+                TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span),
                 TokenTree::token_alone(token::Comma, self.span),
             ]
         });
diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs
index 5ccef343b17..b4693f5a71a 100644
--- a/compiler/rustc_expand/src/config.rs
+++ b/compiler/rustc_expand/src/config.rs
@@ -6,8 +6,7 @@ use crate::errors::{
 };
 use rustc_ast::ptr::P;
 use rustc_ast::token::{Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
-use rustc_ast::tokenstream::{DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{LazyAttrTokenStream, TokenTree};
 use rustc_ast::NodeId;
 use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem};
@@ -242,7 +241,7 @@ impl<'a> StripUnconfigured<'a> {
             stream.0.iter().all(|tree| match tree {
                 AttrTokenTree::Attributes(_) => false,
                 AttrTokenTree::Token(..) => true,
-                AttrTokenTree::Delimited(_, _, inner) => can_skip(inner),
+                AttrTokenTree::Delimited(.., inner) => can_skip(inner),
             })
         }
 
@@ -266,9 +265,9 @@ impl<'a> StripUnconfigured<'a> {
                         None.into_iter()
                     }
                 }
-                AttrTokenTree::Delimited(sp, delim, mut inner) => {
+                AttrTokenTree::Delimited(sp, spacing, delim, mut inner) => {
                     inner = self.configure_tokens(&inner);
-                    Some(AttrTokenTree::Delimited(sp, delim, inner)).into_iter()
+                    Some(AttrTokenTree::Delimited(sp, spacing, delim, inner)).into_iter()
                 }
                 AttrTokenTree::Token(ref token, _)
                     if let TokenKind::Interpolated(nt) = &token.kind =>
@@ -372,27 +371,32 @@ impl<'a> StripUnconfigured<'a> {
         };
         let pound_span = pound_token.span;
 
-        let mut trees = vec![AttrTokenTree::Token(pound_token, Spacing::Alone)];
-        if attr.style == AttrStyle::Inner {
-            // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
-            let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
-                orig_trees.next().unwrap().clone()
-            else {
-                panic!("Bad tokens for attribute {attr:?}");
-            };
-            trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
-        }
         // We don't really have a good span to use for the synthesized `[]`
         // in `#[attr]`, so just use the span of the `#` token.
         let bracket_group = AttrTokenTree::Delimited(
             DelimSpan::from_single(pound_span),
+            DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
             Delimiter::Bracket,
             item.tokens
                 .as_ref()
                 .unwrap_or_else(|| panic!("Missing tokens for {item:?}"))
                 .to_attr_token_stream(),
         );
-        trees.push(bracket_group);
+        let trees = if attr.style == AttrStyle::Inner {
+            // For inner attributes, we do the same thing for the `!` in `#![some_attr]`
+            let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
+                orig_trees.next().unwrap().clone()
+            else {
+                panic!("Bad tokens for attribute {attr:?}");
+            };
+            vec![
+                AttrTokenTree::Token(pound_token, Spacing::Joint),
+                AttrTokenTree::Token(bang_token, Spacing::JointHidden),
+                bracket_group,
+            ]
+        } else {
+            vec![AttrTokenTree::Token(pound_token, Spacing::JointHidden), bracket_group]
+        };
         let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
         let attr = attr::mk_attr_from_item(
             &self.sess.parse_sess.attr_id_generator,
diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs
index a43b2a00188..ca4a1f327ad 100644
--- a/compiler/rustc_expand/src/mbe.rs
+++ b/compiler/rustc_expand/src/mbe.rs
@@ -13,7 +13,7 @@ pub(crate) mod transcribe;
 
 use metavar_expr::MetaVarExpr;
 use rustc_ast::token::{Delimiter, NonterminalKind, Token, TokenKind};
-use rustc_ast::tokenstream::DelimSpan;
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan};
 use rustc_span::symbol::Ident;
 use rustc_span::Span;
 
@@ -68,7 +68,7 @@ pub(crate) enum KleeneOp {
 enum TokenTree {
     Token(Token),
     /// A delimited sequence, e.g. `($e:expr)` (RHS) or `{ $e }` (LHS).
-    Delimited(DelimSpan, Delimited),
+    Delimited(DelimSpan, DelimSpacing, Delimited),
     /// A kleene-style repetition sequence, e.g. `$($e:expr)*` (RHS) or `$($e),*` (LHS).
     Sequence(DelimSpan, SequenceRepetition),
     /// e.g., `$var`.
@@ -99,7 +99,7 @@ impl TokenTree {
             TokenTree::Token(Token { span, .. })
             | TokenTree::MetaVar(span, _)
             | TokenTree::MetaVarDecl(span, _, _) => span,
-            TokenTree::Delimited(span, _)
+            TokenTree::Delimited(span, ..)
             | TokenTree::MetaVarExpr(span, _)
             | TokenTree::Sequence(span, _) => span.entire(),
         }
diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs
index 42c91824baf..0b1f25b67c8 100644
--- a/compiler/rustc_expand/src/mbe/macro_check.rs
+++ b/compiler/rustc_expand/src/mbe/macro_check.rs
@@ -290,7 +290,7 @@ fn check_binders(
         }
         // `MetaVarExpr` can not appear in the LHS of a macro arm
         TokenTree::MetaVarExpr(..) => {}
-        TokenTree::Delimited(_, ref del) => {
+        TokenTree::Delimited(.., ref del) => {
             for tt in &del.tts {
                 check_binders(sess, node_id, tt, macros, binders, ops, valid);
             }
@@ -353,7 +353,7 @@ fn check_occurrences(
             };
             check_ops_is_prefix(sess, node_id, macros, binders, ops, dl.entire(), name);
         }
-        TokenTree::Delimited(_, ref del) => {
+        TokenTree::Delimited(.., ref del) => {
             check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
         }
         TokenTree::Sequence(_, ref seq) => {
@@ -435,8 +435,8 @@ fn check_nested_occurrences(
                 // We check that the meta-variable is correctly used.
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
-            (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(_, del))
-            | (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(.., del))
+            | (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Brace =>
             {
                 let macro_rules = state == NestedMacroState::MacroRulesNotName;
@@ -466,7 +466,7 @@ fn check_nested_occurrences(
                 // We check that the meta-variable is correctly used.
                 check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
             }
-            (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroName, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Parenthesis =>
             {
                 state = NestedMacroState::MacroNameParen;
@@ -481,7 +481,7 @@ fn check_nested_occurrences(
                     valid,
                 );
             }
-            (NestedMacroState::MacroNameParen, TokenTree::Delimited(_, del))
+            (NestedMacroState::MacroNameParen, TokenTree::Delimited(.., del))
                 if del.delim == Delimiter::Brace =>
             {
                 state = NestedMacroState::Empty;
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index 965beb9bf84..b248a1fe349 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -184,7 +184,7 @@ pub(super) fn compute_locs(matcher: &[TokenTree]) -> Vec<MatcherLoc> {
                 TokenTree::Token(token) => {
                     locs.push(MatcherLoc::Token { token: token.clone() });
                 }
-                TokenTree::Delimited(span, delimited) => {
+                TokenTree::Delimited(span, _, delimited) => {
                     let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
                     let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
 
@@ -335,7 +335,7 @@ pub(super) fn count_metavar_decls(matcher: &[TokenTree]) -> usize {
         .map(|tt| match tt {
             TokenTree::MetaVarDecl(..) => 1,
             TokenTree::Sequence(_, seq) => seq.num_captures,
-            TokenTree::Delimited(_, delim) => count_metavar_decls(&delim.tts),
+            TokenTree::Delimited(.., delim) => count_metavar_decls(&delim.tts),
             TokenTree::Token(..) => 0,
             TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
         })
diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs
index 19734394382..393eec3997b 100644
--- a/compiler/rustc_expand/src/mbe/macro_rules.rs
+++ b/compiler/rustc_expand/src/mbe/macro_rules.rs
@@ -207,7 +207,7 @@ fn expand_macro<'cx>(
     match try_success_result {
         Ok((i, named_matches)) => {
             let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] {
-                mbe::TokenTree::Delimited(span, delimited) => (&delimited, *span),
+                mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span),
                 _ => cx.span_bug(sp, "malformed macro rhs"),
             };
             let arm_span = rhses[i].span();
@@ -589,7 +589,7 @@ pub fn compile_declarative_macro(
             .map(|lhs| {
                 // Ignore the delimiters around the matcher.
                 match lhs {
-                    mbe::TokenTree::Delimited(_, delimited) => {
+                    mbe::TokenTree::Delimited(.., delimited) => {
                         mbe::macro_parser::compute_locs(&delimited.tts)
                     }
                     _ => sess.diagnostic().span_bug(def.span, "malformed macro lhs"),
@@ -615,7 +615,7 @@ pub fn compile_declarative_macro(
 fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
-    if let mbe::TokenTree::Delimited(_, delimited) = lhs {
+    if let mbe::TokenTree::Delimited(.., delimited) = lhs {
         check_matcher(sess, def, &delimited.tts)
     } else {
         let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
@@ -668,7 +668,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
             | TokenTree::MetaVar(..)
             | TokenTree::MetaVarDecl(..)
             | TokenTree::MetaVarExpr(..) => (),
-            TokenTree::Delimited(_, del) => {
+            TokenTree::Delimited(.., del) => {
                 if !check_lhs_no_empty_seq(sess, &del.tts) {
                     return false;
                 }
@@ -709,14 +709,14 @@ fn check_matcher(sess: &ParseSess, def: &ast::Item, matcher: &[mbe::TokenTree])
 
 fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
     match rhs {
-        mbe::TokenTree::Delimited(_sp, d) => {
+        mbe::TokenTree::Delimited(.., d) => {
             let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
                 if let mbe::TokenTree::Token(ident) = ident
                     && let TokenKind::Ident(ident, _) = ident.kind
                     && ident == sym::compile_error
                     && let mbe::TokenTree::Token(bang) = bang
                     && let TokenKind::Not = bang.kind
-                    && let mbe::TokenTree::Delimited(_, del) = args
+                    && let mbe::TokenTree::Delimited(.., del) = args
                     && del.delim != Delimiter::Invisible
                 {
                     true
@@ -773,7 +773,7 @@ impl<'tt> FirstSets<'tt> {
                     | TokenTree::MetaVarExpr(..) => {
                         first.replace_with(TtHandle::TtRef(tt));
                     }
-                    TokenTree::Delimited(span, delimited) => {
+                    TokenTree::Delimited(span, _, delimited) => {
                         build_recur(sets, &delimited.tts);
                         first.replace_with(TtHandle::from_token_kind(
                             token::OpenDelim(delimited.delim),
@@ -842,7 +842,7 @@ impl<'tt> FirstSets<'tt> {
                     first.add_one(TtHandle::TtRef(tt));
                     return first;
                 }
-                TokenTree::Delimited(span, delimited) => {
+                TokenTree::Delimited(span, _, delimited) => {
                     first.add_one(TtHandle::from_token_kind(
                         token::OpenDelim(delimited.delim),
                         span.open,
@@ -1087,7 +1087,7 @@ fn check_matcher_core<'tt>(
                     suffix_first = build_suffix_first();
                 }
             }
-            TokenTree::Delimited(span, d) => {
+            TokenTree::Delimited(span, _, d) => {
                 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
                     token::CloseDelim(d.delim),
                     span.close,
diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs
index 7cb279a9812..c29edc3dc9f 100644
--- a/compiler/rustc_expand/src/mbe/metavar_expr.rs
+++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs
@@ -35,7 +35,7 @@ impl MetaVarExpr {
     ) -> PResult<'sess, MetaVarExpr> {
         let mut tts = input.trees();
         let ident = parse_ident(&mut tts, sess, outer_span)?;
-        let Some(TokenTree::Delimited(_, Delimiter::Parenthesis, args)) = tts.next() else {
+        let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else {
             let msg = "meta-variable expression parameter must be wrapped in parentheses";
             return Err(sess.span_diagnostic.struct_span_err(ident.span, msg));
         };
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 6a99412fc5b..ab9fb20b364 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -151,7 +151,7 @@ fn parse_tree<'a>(
             // during parsing.
             let mut next = outer_trees.next();
             let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
-            if let Some(tokenstream::TokenTree::Delimited(_, Delimiter::Invisible, tts)) = next {
+            if let Some(tokenstream::TokenTree::Delimited(.., Delimiter::Invisible, tts)) = next {
                 trees = Box::new(tts.trees());
                 next = trees.next();
             } else {
@@ -160,7 +160,7 @@ fn parse_tree<'a>(
 
             match next {
                 // `tree` is followed by a delimited set of token trees.
-                Some(&tokenstream::TokenTree::Delimited(delim_span, delim, ref tts)) => {
+                Some(&tokenstream::TokenTree::Delimited(delim_span, _, delim, ref tts)) => {
                     if parsing_patterns {
                         if delim != Delimiter::Parenthesis {
                             span_dollar_dollar_or_metavar_in_the_lhs_err(
@@ -258,8 +258,9 @@ fn parse_tree<'a>(
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
-        &tokenstream::TokenTree::Delimited(span, delim, ref tts) => TokenTree::Delimited(
+        &tokenstream::TokenTree::Delimited(span, spacing, delim, ref tts) => TokenTree::Delimited(
             span,
+            spacing,
             Delimited {
                 delim,
                 tts: parse(tts, parsing_patterns, sess, node_id, features, edition),
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index bc03fc0d1b1..18707cebcd5 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -7,7 +7,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree,
 use crate::mbe::{self, MetaVarExpr};
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{pluralize, PResult};
 use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
@@ -31,14 +31,24 @@ impl MutVisitor for Marker {
 
 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame<'a> {
-    Delimited { tts: &'a [mbe::TokenTree], idx: usize, delim: Delimiter, span: DelimSpan },
-    Sequence { tts: &'a [mbe::TokenTree], idx: usize, sep: Option<Token> },
+    Delimited {
+        tts: &'a [mbe::TokenTree],
+        idx: usize,
+        delim: Delimiter,
+        span: DelimSpan,
+        spacing: DelimSpacing,
+    },
+    Sequence {
+        tts: &'a [mbe::TokenTree],
+        idx: usize,
+        sep: Option<Token>,
+    },
 }
 
 impl<'a> Frame<'a> {
     /// Construct a new frame around the delimited set of tokens.
-    fn new(src: &'a mbe::Delimited, span: DelimSpan) -> Frame<'a> {
-        Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span }
+    fn new(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
+        Frame::Delimited { tts: &src.tts, idx: 0, delim: src.delim, span, spacing }
     }
 }
 
@@ -89,8 +99,10 @@ pub(super) fn transcribe<'a>(
     }
 
     // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
-    // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
-    let mut stack: SmallVec<[Frame<'_>; 1]> = smallvec![Frame::new(src, src_span)];
+    // we have yet to expand/are still expanding. We start the stack off with the whole RHS. The
+    // choice of spacing values doesn't matter.
+    let mut stack: SmallVec<[Frame<'_>; 1]> =
+        smallvec![Frame::new(src, src_span, DelimSpacing::new(Spacing::Alone, Spacing::Alone))];
 
     // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
     // `repeats` keeps track of where we are in matching at each level, with the last element being
@@ -144,14 +156,19 @@ pub(super) fn transcribe<'a>(
                 // We are done processing a Delimited. If this is the top-level delimited, we are
                 // done. Otherwise, we unwind the result_stack to append what we have produced to
                 // any previous results.
-                Frame::Delimited { delim, span, .. } => {
+                Frame::Delimited { delim, span, mut spacing, .. } => {
+                    // Hack to force-insert a space after `]` in certain case.
+                    // See discussion of the `hex-literal` crate in #114571.
+                    if delim == Delimiter::Bracket {
+                        spacing.close = Spacing::Alone;
+                    }
                     if result_stack.is_empty() {
                         // No results left to compute! We are back at the top-level.
                         return Ok(TokenStream::new(result));
                     }
 
                     // Step back into the parent Delimited.
-                    let tree = TokenTree::Delimited(span, delim, TokenStream::new(result));
+                    let tree = TokenTree::Delimited(span, spacing, delim, TokenStream::new(result));
                     result = result_stack.pop().unwrap();
                     result.push(tree);
                 }
@@ -240,7 +257,7 @@ pub(super) fn transcribe<'a>(
                     // with modified syntax context. (I believe this supports nested macros).
                     marker.visit_span(&mut sp);
                     marker.visit_ident(&mut original_ident);
-                    result.push(TokenTree::token_alone(token::Dollar, sp));
+                    result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
                     result.push(TokenTree::Token(
                         Token::from_ast_ident(original_ident),
                         Spacing::Alone,
@@ -258,13 +275,14 @@ pub(super) fn transcribe<'a>(
             // We will produce all of the results of the inside of the `Delimited` and then we will
             // jump back out of the Delimited, pop the result_stack and add the new results back to
             // the previous results (from outside the Delimited).
-            mbe::TokenTree::Delimited(mut span, delimited) => {
+            mbe::TokenTree::Delimited(mut span, spacing, delimited) => {
                 mut_visit::visit_delim_span(&mut span, &mut marker);
                 stack.push(Frame::Delimited {
                     tts: &delimited.tts,
                     delim: delimited.delim,
                     idx: 0,
                     span,
+                    spacing: *spacing,
                 });
                 result_stack.push(mem::take(&mut result));
             }
@@ -374,7 +392,7 @@ fn lockstep_iter_size(
 ) -> LockstepIterSize {
     use mbe::TokenTree;
     match tree {
-        TokenTree::Delimited(_, delimited) => {
+        TokenTree::Delimited(.., delimited) => {
             delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
                 size.with(lockstep_iter_size(tt, interpolations, repeats))
             })
diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs
index bdc20882a9d..7a888250ca1 100644
--- a/compiler/rustc_expand/src/parse/tests.rs
+++ b/compiler/rustc_expand/src/parse/tests.rs
@@ -4,7 +4,7 @@ use crate::tests::{
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast::visit;
 use rustc_ast::{self as ast, PatKind};
 use rustc_ast_pretty::pprust::item_to_string;
@@ -77,14 +77,14 @@ fn string_to_tts_macro() {
                 TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
                 TokenTree::Token(Token { kind: token::Not, .. }, _),
                 TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
-                TokenTree::Delimited(_, macro_delim, macro_tts),
+                TokenTree::Delimited(.., macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
                 let tts = &macro_tts.trees().collect::<Vec<_>>();
                 match &tts[..] {
                     [
-                        TokenTree::Delimited(_, first_delim, first_tts),
+                        TokenTree::Delimited(.., first_delim, first_tts),
                         TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
-                        TokenTree::Delimited(_, second_delim, second_tts),
+                        TokenTree::Delimited(.., second_delim, second_tts),
                     ] if macro_delim == &Delimiter::Parenthesis => {
                         let tts = &first_tts.trees().collect::<Vec<_>>();
                         match &tts[..] {
@@ -116,27 +116,36 @@ fn string_to_tts_macro() {
 #[test]
 fn string_to_tts_1() {
     create_default_session_globals_then(|| {
-        let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
+        let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
 
         let expected = TokenStream::new(vec![
             TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)),
-            TokenTree::token_alone(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
+            TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
             TokenTree::Delimited(
-                DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
+                DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
+                // `JointHidden` because the `(` is followed immediately by
+                // `b`, `Alone` because the `)` is followed by whitespace.
+                DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
                 Delimiter::Parenthesis,
                 TokenStream::new(vec![
-                    TokenTree::token_alone(token::Ident(Symbol::intern("b"), false), sp(6, 7)),
-                    TokenTree::token_alone(token::Colon, sp(8, 9)),
-                    TokenTree::token_alone(token::Ident(sym::i32, false), sp(10, 13)),
+                    TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)),
+                    TokenTree::token_alone(token::Colon, sp(6, 7)),
+                    // `JointHidden` because the `i32` is immediately followed by the `)`.
+                    TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)),
                 ])
                 .into(),
             ),
             TokenTree::Delimited(
-                DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
+                DelimSpan::from_pair(sp(13, 14), sp(18, 19)),
+                // First `Alone` because the `{` is followed by whitespace,
+                // second `Alone` because the `}` is followed immediately by
+                // EOF.
+                DelimSpacing::new(Spacing::Alone, Spacing::Alone),
                 Delimiter::Brace,
                 TokenStream::new(vec![
-                    TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(17, 18)),
-                    TokenTree::token_alone(token::Semi, sp(18, 19)),
+                    TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)),
+                    // `Alone` because the `;` is followed by whitespace.
+                    TokenTree::token_alone(token::Semi, sp(16, 17)),
                 ])
                 .into(),
             ),
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index b057a645f81..5308e338d7f 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -5,7 +5,7 @@ use pm::bridge::{
 use pm::{Delimiter, Level};
 use rustc_ast as ast;
 use rustc_ast::token;
-use rustc_ast::tokenstream::{self, Spacing::*, TokenStream};
+use rustc_ast::tokenstream::{self, DelimSpacing, Spacing, TokenStream};
 use rustc_ast::util::literal::escape_byte_str_symbol;
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
@@ -98,7 +98,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
 
         while let Some(tree) = cursor.next() {
             let (Token { kind, span }, joint) = match tree.clone() {
-                tokenstream::TokenTree::Delimited(span, delim, tts) => {
+                tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
                     let delimiter = pm::Delimiter::from_internal(delim);
                     trees.push(TokenTree::Group(Group {
                         delimiter,
@@ -111,7 +111,22 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                     }));
                     continue;
                 }
-                tokenstream::TokenTree::Token(token, spacing) => (token, spacing == Joint),
+                tokenstream::TokenTree::Token(token, spacing) => {
+                    // Do not be tempted to check here that the `spacing`
+                    // values are "correct" w.r.t. the token stream (e.g. that
+                    // `Spacing::Joint` is actually followed by a `Punct` token
+                    // tree). Because the problem in #76399 was introduced that
+                    // way.
+                    //
+                    // This is where the `Hidden` in `JointHidden` applies,
+                    // because the jointness is effectively hidden from proc
+                    // macros.
+                    let joint = match spacing {
+                        Spacing::Alone | Spacing::JointHidden => false,
+                        Spacing::Joint => true,
+                    };
+                    (token, joint)
+                }
             };
 
             // Split the operator into one or more `Punct`s, one per character.
@@ -133,7 +148,8 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
                     } else {
                         span
                     };
-                    TokenTree::Punct(Punct { ch, joint: if is_final { joint } else { true }, span })
+                    let joint = if is_final { joint } else { true };
+                    TokenTree::Punct(Punct { ch, joint, span })
                 }));
             };
 
@@ -268,6 +284,11 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
     fn to_internal(self) -> SmallVec<[tokenstream::TokenTree; 2]> {
         use rustc_ast::token::*;
 
+        // The code below is conservative, using `token_alone`/`Spacing::Alone`
+        // in most places. When the resulting code is pretty-printed by
+        // `print_tts` it ends up with spaces between most tokens, which is
+        // safe but ugly. It's hard in general to do better when working at the
+        // token level.
         let (tree, rustc) = self;
         match tree {
             TokenTree::Punct(Punct { ch, joint, span }) => {
@@ -296,6 +317,11 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
                     b'\'' => SingleQuote,
                     _ => unreachable!(),
                 };
+                // We never produce `token::Spacing::JointHidden` here, which
+                // means the pretty-printing of code produced by proc macros is
+                // ugly, with lots of whitespace between tokens. This is
+                // unavoidable because `proc_macro::Spacing` only applies to
+                // `Punct` token trees.
                 smallvec![if joint {
                     tokenstream::TokenTree::token_joint(kind, span)
                 } else {
@@ -305,6 +331,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
             TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => {
                 smallvec![tokenstream::TokenTree::Delimited(
                     tokenstream::DelimSpan { open, close },
+                    DelimSpacing::new(Spacing::Alone, Spacing::Alone),
                     delimiter.to_internal(),
                     stream.unwrap_or_default(),
                 )]
@@ -322,7 +349,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::token_alone(minus, span);
+                let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
                 let b = tokenstream::TokenTree::token_alone(integer, span);
                 smallvec![a, b]
             }
@@ -335,7 +362,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::token_alone(minus, span);
+                let a = tokenstream::TokenTree::token_joint_hidden(minus, span);
                 let b = tokenstream::TokenTree::token_alone(float, span);
                 smallvec![a, b]
             }
@@ -546,7 +573,10 @@ impl server::TokenStream for Rustc<'_, '_> {
                         Ok(Self::TokenStream::from_iter([
                             // FIXME: The span of the `-` token is lost when
                             // parsing, so we cannot faithfully recover it here.
-                            tokenstream::TokenTree::token_alone(token::BinOp(token::Minus), e.span),
+                            tokenstream::TokenTree::token_joint_hidden(
+                                token::BinOp(token::Minus),
+                                e.span,
+                            ),
                             tokenstream::TokenTree::token_alone(token::Literal(*token_lit), e.span),
                         ]))
                     }
diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs
index 930e29f2389..32eb67ba3a1 100644
--- a/compiler/rustc_lint/src/builtin.rs
+++ b/compiler/rustc_lint/src/builtin.rs
@@ -1835,7 +1835,7 @@ impl KeywordIdents {
                         self.check_ident_token(cx, UnderMacro(true), ident);
                     }
                 }
-                TokenTree::Delimited(_, _, tts) => self.check_tokens(cx, tts),
+                TokenTree::Delimited(.., tts) => self.check_tokens(cx, tts),
             }
         }
     }
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 92df2da8710..b1dc1f98777 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -67,7 +67,7 @@ pub(crate) fn parse_token_trees<'a>(
     let (stream, res, unmatched_delims) =
         tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
     match res {
-        Ok(()) if unmatched_delims.is_empty() => Ok(stream),
+        Ok(_open_spacing) if unmatched_delims.is_empty() => Ok(stream),
         _ => {
             // Return error if there are unmatched delimiters or unclosed delimiters.
             // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index e646f5dfd85..8cbadc26635 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -3,7 +3,7 @@ use super::diagnostics::same_indentation_level;
 use super::diagnostics::TokenTreeDiagInfo;
 use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
 use rustc_errors::{Applicability, PErr};
 use rustc_span::symbol::kw;
@@ -25,54 +25,46 @@ impl<'a> TokenTreesReader<'a> {
             token: Token::dummy(),
             diag_info: TokenTreeDiagInfo::default(),
         };
-        let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false);
+        let (_open_spacing, stream, res) =
+            tt_reader.parse_token_trees(/* is_delimited */ false);
         (stream, res, tt_reader.diag_info.unmatched_delims)
     }
 
-    // Parse a stream of tokens into a list of `TokenTree`s.
+    // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
+    // the result is that of the opening delimiter.
     fn parse_token_trees(
         &mut self,
         is_delimited: bool,
-    ) -> (TokenStream, Result<(), Vec<PErr<'a>>>) {
-        self.token = self.string_reader.next_token().0;
+    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'a>>>) {
+        // Move past the opening delimiter.
+        let (_, open_spacing) = self.bump(false);
+
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
                 token::OpenDelim(delim) => {
                     buf.push(match self.parse_token_tree_open_delim(delim) {
                         Ok(val) => val,
-                        Err(errs) => return (TokenStream::new(buf), Err(errs)),
+                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
                     })
                 }
                 token::CloseDelim(delim) => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
                     );
                 }
                 token::Eof => {
                     return (
+                        open_spacing,
                         TokenStream::new(buf),
                         if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
                     );
                 }
                 _ => {
-                    // Get the next normal token. This might require getting multiple adjacent
-                    // single-char tokens and joining them together.
-                    let (this_spacing, next_tok) = loop {
-                        let (next_tok, is_next_tok_preceded_by_whitespace) =
-                            self.string_reader.next_token();
-                        if is_next_tok_preceded_by_whitespace {
-                            break (Spacing::Alone, next_tok);
-                        } else if let Some(glued) = self.token.glue(&next_tok) {
-                            self.token = glued;
-                        } else {
-                            let this_spacing =
-                                if next_tok.is_punct() { Spacing::Joint } else { Spacing::Alone };
-                            break (this_spacing, next_tok);
-                        }
-                    };
-                    let this_tok = std::mem::replace(&mut self.token, next_tok);
+                    // Get the next normal token.
+                    let (this_tok, this_spacing) = self.bump(true);
                     buf.push(TokenTree::Token(this_tok, this_spacing));
                 }
             }
@@ -116,7 +108,7 @@ impl<'a> TokenTreesReader<'a> {
         // Parse the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let (tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
         if let Err(errs) = res {
             return Err(self.unclosed_delim_err(tts, errs));
         }
@@ -125,7 +117,7 @@ impl<'a> TokenTreesReader<'a> {
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
         let sm = self.string_reader.sess.source_map();
 
-        match self.token.kind {
+        let close_spacing = match self.token.kind {
             // Correct delimiter.
             token::CloseDelim(close_delim) if close_delim == open_delim => {
                 let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
@@ -147,7 +139,7 @@ impl<'a> TokenTreesReader<'a> {
                 }
 
                 // Move past the closing delimiter.
-                self.token = self.string_reader.next_token().0;
+                self.bump(false).1
             }
             // Incorrect delimiter.
             token::CloseDelim(close_delim) => {
@@ -191,18 +183,50 @@ impl<'a> TokenTreesReader<'a> {
                 //     bar(baz(
                 // }  // Incorrect delimiter but matches the earlier `{`
                 if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
-                    self.token = self.string_reader.next_token().0;
+                    self.bump(false).1
+                } else {
+                    // The choice of value here doesn't matter.
+                    Spacing::Alone
                 }
             }
             token::Eof => {
                 // Silently recover, the EOF token will be seen again
                 // and an error emitted then. Thus we don't pop from
-                // self.open_braces here.
+                // self.open_braces here. The choice of spacing value here
+                // doesn't matter.
+                Spacing::Alone
             }
             _ => unreachable!(),
-        }
+        };
+
+        let spacing = DelimSpacing::new(open_spacing, close_spacing);
 
-        Ok(TokenTree::Delimited(delim_span, open_delim, tts))
+        Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
+    }
+
+    // Move on to the next token, returning the current token and its spacing.
+    // Will glue adjacent single-char tokens together if `glue` is set.
+    fn bump(&mut self, glue: bool) -> (Token, Spacing) {
+        let (this_spacing, next_tok) = loop {
+            let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token();
+
+            if is_next_tok_preceded_by_whitespace {
+                break (Spacing::Alone, next_tok);
+            } else if glue && let Some(glued) = self.token.glue(&next_tok) {
+                self.token = glued;
+            } else {
+                let this_spacing = if next_tok.is_punct() {
+                    Spacing::Joint
+                } else if next_tok.kind == token::Eof {
+                    Spacing::Alone
+                } else {
+                    Spacing::JointHidden
+                };
+                break (this_spacing, next_tok);
+            }
+        };
+        let this_tok = std::mem::replace(&mut self.token, next_tok);
+        (this_tok, this_spacing)
     }
 
     fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index c66a7176aab..5e8447030f1 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,7 +1,7 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream};
-use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
 use rustc_errors::PResult;
@@ -389,7 +389,7 @@ fn make_token_stream(
     #[derive(Debug)]
     struct FrameData {
         // This is `None` for the first frame, `Some` for all others.
-        open_delim_sp: Option<(Delimiter, Span)>,
+        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
         inner: Vec<AttrTokenTree>,
     }
     let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
@@ -397,21 +397,23 @@ fn make_token_stream(
     while let Some((token, spacing)) = token_and_spacing {
         match token {
             FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
-                stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
+                stack
+                    .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] });
             }
             FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
                 let frame_data = stack
                     .pop()
                     .unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
 
-                let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
+                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
                 assert_eq!(
                     open_delim, delim,
                     "Mismatched open/close delims: open={open_delim:?} close={span:?}"
                 );
                 let dspan = DelimSpan::from_pair(open_sp, span);
+                let dspacing = DelimSpacing::new(open_spacing, spacing);
                 let stream = AttrTokenStream::new(frame_data.inner);
-                let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
+                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
                 stack
                     .last_mut()
                     .unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 406a6def019..3c0627526be 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -2276,7 +2276,7 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.kind == TokenKind::Semi
-            && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
+            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
             && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 7a306823ed4..2baedb2766f 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -20,7 +20,7 @@ pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::AttrId;
@@ -130,7 +130,7 @@ pub struct Parser<'a> {
     pub sess: &'a ParseSess,
     /// The current token.
     pub token: Token,
-    /// The spacing for the current token
+    /// The spacing for the current token.
     pub token_spacing: Spacing,
     /// The previous token.
     pub prev_token: Token,
@@ -240,7 +240,7 @@ struct TokenCursor {
     // Token streams surrounding the current one. The delimiters for stack[n]'s
     // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
     // because it's the outermost token stream which never has delimiters.
-    stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
 }
 
 impl TokenCursor {
@@ -264,24 +264,31 @@ impl TokenCursor {
                         ));
                         return (token.clone(), spacing);
                     }
-                    &TokenTree::Delimited(sp, delim, ref tts) => {
+                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
                         let trees = tts.clone().into_trees();
-                        self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
+                        self.stack.push((
+                            mem::replace(&mut self.tree_cursor, trees),
+                            sp,
+                            spacing,
+                            delim,
+                        ));
                         if delim != Delimiter::Invisible {
-                            return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
+                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
                 // We have exhausted this token stream. Move back to its parent token stream.
                 self.tree_cursor = tree_cursor;
                 if delim != Delimiter::Invisible {
-                    return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
+                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
                 // No close delimiter to return; continue on to the next iteration.
             } else {
-                // We have exhausted the outermost token stream.
+                // We have exhausted the outermost token stream. The use of
+                // `Spacing::Alone` is arbitrary and immaterial, because the
+                // `Eof` token's spacing is never used.
                 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
             }
         }
@@ -699,8 +706,8 @@ impl<'a> Parser<'a> {
                 // is not needed (we'll capture the entire 'glued' token),
                 // and `bump` will set this field to `None`
                 self.break_last_token = true;
-                // Use the spacing of the glued token as the spacing
-                // of the unglued second token.
+                // Use the spacing of the glued token as the spacing of the
+                // unglued second token.
                 self.bump_with((Token::new(second, second_span), self.token_spacing));
                 true
             }
@@ -1068,7 +1075,7 @@ impl<'a> Parser<'a> {
             return looker(&self.token);
         }
 
-        if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+        if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
             && delim != Delimiter::Invisible
         {
             // We are not in the outermost token stream, and the token stream
@@ -1077,7 +1084,7 @@ impl<'a> Parser<'a> {
             let tree_cursor = &self.token_cursor.tree_cursor;
             let all_normal = (0..dist).all(|i| {
                 let token = tree_cursor.look_ahead(i);
-                !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
+                !matches!(token, Some(TokenTree::Delimited(.., Delimiter::Invisible, _)))
             });
             if all_normal {
                 // There were no skipped delimiters. Do lookahead by plain indexing.
@@ -1086,7 +1093,7 @@ impl<'a> Parser<'a> {
                         // Indexing stayed within the current token stream.
                         match tree {
                             TokenTree::Token(token, _) => looker(token),
-                            TokenTree::Delimited(dspan, delim, _) => {
+                            TokenTree::Delimited(dspan, _, delim, _) => {
                                 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
                             }
                         }
@@ -1264,7 +1271,7 @@ impl<'a> Parser<'a> {
             || self.check(&token::OpenDelim(Delimiter::Brace));
 
         delimited.then(|| {
-            let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else {
+            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
                 unreachable!()
             };
             DelimArgs { dspan, delim, tokens }
@@ -1288,7 +1295,7 @@ impl<'a> Parser<'a> {
             token::OpenDelim(..) => {
                 // Grab the tokens within the delimiters.
                 let stream = self.token_cursor.tree_cursor.stream.clone();
-                let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
+                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1308,12 +1315,13 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, delim, stream)
+                TokenTree::Delimited(span, spacing, delim, stream)
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
+                let prev_spacing = self.token_spacing;
                 self.bump();
-                TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
+                TokenTree::Token(self.prev_token.clone(), prev_spacing)
             }
         }
     }
diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs
index d3f1fba9369..7ebb6810b3e 100644
--- a/library/proc_macro/src/lib.rs
+++ b/library/proc_macro/src/lib.rs
@@ -925,13 +925,12 @@ impl !Sync for Punct {}
 pub enum Spacing {
     /// A `Punct` token can join with the following token to form a multi-character operator.
     ///
-    /// In token streams constructed using proc macro interfaces `Joint` punctuation tokens can be
-    /// followed by any other tokens. \
-    /// However, in token streams parsed from source code compiler will only set spacing to `Joint`
-    /// in the following cases:
-    /// - A `Punct` is immediately followed by another `Punct` without a whitespace. \
-    ///   E.g. `+` is `Joint` in `+=` and `++`.
-    /// - A single quote `'` is immediately followed by an identifier without a whitespace. \
+    /// In token streams constructed using proc macro interfaces, `Joint` punctuation tokens can be
+    /// followed by any other tokens. However, in token streams parsed from source code, the
+    /// compiler will only set spacing to `Joint` in the following cases.
+    /// - When a `Punct` is immediately followed by another `Punct` without a whitespace. E.g. `+`
+    ///   is `Joint` in `+=` and `++`.
+    /// - When a single quote `'` is immediately followed by an identifier without a whitespace.
     ///   E.g. `'` is `Joint` in `'lifetime`.
     ///
     /// This list may be extended in the future to enable more token combinations.
@@ -939,11 +938,10 @@ pub enum Spacing {
     Joint,
     /// A `Punct` token cannot join with the following token to form a multi-character operator.
     ///
-    /// `Alone` punctuation tokens can be followed by any other tokens. \
-    /// In token streams parsed from source code compiler will set spacing to `Alone` in all cases
-    /// not covered by the conditions for `Joint` above. \
-    /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
-    /// In particular, token not followed by anything  will also be marked as `Alone`.
+    /// `Alone` punctuation tokens can be followed by any other tokens. In token streams parsed
+    /// from source code, the compiler will set spacing to `Alone` in all cases not covered by the
+    /// conditions for `Joint` above. E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`. In
+    /// particular, tokens not followed by anything will be marked as `Alone`.
     #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
     Alone,
 }
@@ -978,8 +976,8 @@ impl Punct {
     }
 
     /// Returns the spacing of this punctuation character, indicating whether it can be potentially
-    /// combined into a multi-character operator with the following token (`Joint`), or the operator
-    /// has certainly ended (`Alone`).
+    /// combined into a multi-character operator with the following token (`Joint`), or whether the
+    /// operator has definitely ended (`Alone`).
     #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
     pub fn spacing(&self) -> Spacing {
         if self.0.joint { Spacing::Joint } else { Spacing::Alone }
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index 66d10f2368b..605f9e496c7 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -40,7 +40,7 @@ pub(super) fn render_macro_matcher(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Stri
     printer.zerobreak();
     printer.ibox(0);
     match matcher {
-        TokenTree::Delimited(_span, _delim, tts) => print_tts(&mut printer, tts),
+        TokenTree::Delimited(_span, _spacing, _delim, tts) => print_tts(&mut printer, tts),
         // Matcher which is not a Delimited is unexpected and should've failed
         // to compile, but we render whatever it is wrapped in parens.
         TokenTree::Token(..) => print_tt(&mut printer, matcher),
@@ -97,7 +97,7 @@ fn print_tt(printer: &mut Printer<'_>, tt: &TokenTree) {
                 printer.hardbreak()
             }
         }
-        TokenTree::Delimited(_span, delim, tts) => {
+        TokenTree::Delimited(_span, _spacing, delim, tts) => {
             let open_delim = printer.token_kind_to_string(&token::OpenDelim(*delim));
             printer.word(open_delim);
             if !tts.is_empty() {
@@ -158,7 +158,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
                 (_, token::Pound) => (true, Pound),
                 (_, _) => (true, Other),
             },
-            TokenTree::Delimited(_, delim, _) => match (state, delim) {
+            TokenTree::Delimited(.., delim, _) => match (state, delim) {
                 (Dollar, Delimiter::Parenthesis) => (false, DollarParen),
                 (Pound | PoundBang, Delimiter::Bracket) => (false, Other),
                 (Ident, Delimiter::Parenthesis | Delimiter::Bracket) => (false, Other),
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
index d4828778be2..b1aa472aa03 100644
--- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -92,7 +92,7 @@ fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
         {
             return Some(span);
         }
-        if let TokenTree::Delimited(_, _, tts) = &curr {
+        if let TokenTree::Delimited(.., tts) = &curr {
             let span = contains_unhygienic_crate_reference(tts);
             if span.is_some() {
                 return span;
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index 76553466e48..b4c58d2fefb 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -708,7 +708,7 @@ struct MacroArgParser {
 fn last_tok(tt: &TokenTree) -> Token {
     match *tt {
         TokenTree::Token(ref t, _) => t.clone(),
-        TokenTree::Delimited(delim_span, delim, _) => Token {
+        TokenTree::Delimited(delim_span, _, delim, _) => Token {
             kind: TokenKind::CloseDelim(delim),
             span: delim_span.close,
         },
@@ -925,7 +925,7 @@ impl MacroArgParser {
                     self.add_meta_variable(&mut iter)?;
                 }
                 TokenTree::Token(ref t, _) => self.update_buffer(t),
-                &TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
+                &TokenTree::Delimited(_dspan, _spacing, delimited, ref tts) => {
                     if !self.buf.is_empty() {
                         if next_space(&self.last_tok.kind) == SpaceState::Always {
                             self.add_separator();
@@ -1167,7 +1167,7 @@ impl<'a> MacroParser<'a> {
         let tok = self.toks.next()?;
         let (lo, args_paren_kind) = match tok {
             TokenTree::Token(..) => return None,
-            &TokenTree::Delimited(delimited_span, d, _) => (delimited_span.open.lo(), d),
+            &TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
         };
         let args = TokenStream::new(vec![tok.clone()]);
         match self.toks.next()? {
diff --git a/tests/pretty/cast-lt.pp b/tests/pretty/cast-lt.pp
index e6c4d195691..61cd8f59712 100644
--- a/tests/pretty/cast-lt.pp
+++ b/tests/pretty/cast-lt.pp
@@ -8,6 +8,6 @@ extern crate std;
 // pretty-mode:expanded
 // pp-exact:cast-lt.pp
 
-macro_rules! negative { ($e : expr) => { $e < 0 } }
+macro_rules! negative { ($e:expr) => { $e < 0 } }
 
 fn main() { (1 as i32) < 0; }
diff --git a/tests/run-make/rustc-macro-dep-files/foo.rs b/tests/run-make/rustc-macro-dep-files/foo.rs
index 66db1a21736..00b1c26d43f 100644
--- a/tests/run-make/rustc-macro-dep-files/foo.rs
+++ b/tests/run-make/rustc-macro-dep-files/foo.rs
@@ -7,6 +7,6 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(A)]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert!(input.contains("struct A ;"));
+    assert!(input.contains("struct A;"));
     "struct B;".parse().unwrap()
 }
diff --git a/tests/ui/async-await/issues/issue-60674.stdout b/tests/ui/async-await/issues/issue-60674.stdout
index 6f980e60664..df129b033d2 100644
--- a/tests/ui/async-await/issues/issue-60674.stdout
+++ b/tests/ui/async-await/issues/issue-60674.stdout
@@ -1,3 +1,3 @@
-async fn f(mut x : u8) {}
-async fn g((mut x, y, mut z) : (u8, u8, u8)) {}
-async fn g(mut x : u8, (a, mut b, c) : (u8, u8, u8), y : u8) {}
+async fn f(mut x: u8) {}
+async fn g((mut x, y, mut z): (u8, u8, u8)) {}
+async fn g(mut x: u8, (a, mut b, c): (u8, u8, u8), y: u8) {}
diff --git a/tests/ui/hygiene/unpretty-debug.stdout b/tests/ui/hygiene/unpretty-debug.stdout
index 51c21043db8..3d686f95df9 100644
--- a/tests/ui/hygiene/unpretty-debug.stdout
+++ b/tests/ui/hygiene/unpretty-debug.stdout
@@ -8,7 +8,7 @@
 #![feature /* 0#0 */(no_core)]
 #![no_core /* 0#0 */]
 
-macro_rules! foo /* 0#0 */ { ($x : ident) => { y + $x } }
+macro_rules! foo /* 0#0 */ { ($x: ident) => { y + $x } }
 
 fn bar /* 0#0 */() {
     let x /* 0#0 */ = 1;
diff --git a/tests/ui/infinite/issue-41731-infinite-macro-print.stderr b/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
index e30b2039d69..71510816d0b 100644
--- a/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
+++ b/tests/ui/infinite/issue-41731-infinite-macro-print.stderr
@@ -14,13 +14,13 @@ LL |     stack!("overflow");
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `stack! { "overflow" }`
-   = note: to `print! (stack! ("overflow")) ;`
+   = note: to `print! (stack! ("overflow"));`
    = note: expanding `print! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))); }`
    = note: expanding `stack! { "overflow" }`
-   = note: to `print! (stack! ("overflow")) ;`
+   = note: to `print! (stack! ("overflow"));`
    = note: expanding `print! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args! (stack! ("overflow"))); }`
 
 error: format argument must be a string literal
   --> $DIR/issue-41731-infinite-macro-print.rs:14:5
diff --git a/tests/ui/infinite/issue-41731-infinite-macro-println.stderr b/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
index 66b466dafa0..645176d45cb 100644
--- a/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
+++ b/tests/ui/infinite/issue-41731-infinite-macro-println.stderr
@@ -14,13 +14,13 @@ LL |     stack!("overflow");
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `stack! { "overflow" }`
-   = note: to `println! (stack! ("overflow")) ;`
+   = note: to `println! (stack! ("overflow"));`
    = note: expanding `println! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))); }`
    = note: expanding `stack! { "overflow" }`
-   = note: to `println! (stack! ("overflow")) ;`
+   = note: to `println! (stack! ("overflow"));`
    = note: expanding `println! { stack! ("overflow") }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! (stack! ("overflow"))); }`
 
 error: format argument must be a string literal
   --> $DIR/issue-41731-infinite-macro-println.rs:14:5
diff --git a/tests/ui/macros/stringify.rs b/tests/ui/macros/stringify.rs
index c0eb7f01fdb..d65f7a8ea8c 100644
--- a/tests/ui/macros/stringify.rs
+++ b/tests/ui/macros/stringify.rs
@@ -59,19 +59,13 @@ fn test_block() {
     c1!(block, [ {} ], "{}");
     c1!(block, [ { true } ], "{ true }");
     c1!(block, [ { return } ], "{ return }");
-    c2!(block, [ {
-            return;
-        } ],
-        "{ return; }",
-        "{ return ; }"
-    );
-    c2!(block,
+    c1!(block, [ { return; } ], "{ return; }");
+    c1!(block,
         [ {
             let _;
             true
         } ],
-        "{ let _; true }",
-        "{ let _ ; true }"
+        "{ let _; true }"
     );
 }
 
@@ -88,17 +82,18 @@ fn test_expr() {
 
     // ExprKind::Call
     c1!(expr, [ f() ], "f()");
-    c2!(expr, [ f::<u8>() ], "f::<u8>()", "f :: < u8 > ()");
-    c2!(expr, [ f::<1>() ], "f::<1>()", "f :: < 1 > ()");
-    c2!(expr, [ f::<'a, u8, 1>() ], "f::<'a, u8, 1>()", "f :: < 'a, u8, 1 > ()");
+    c1!(expr, [ f::<u8>() ], "f::<u8>()");
+    c2!(expr, [ f ::  < u8>( ) ], "f::<u8>()", "f :: < u8>()");
+    c1!(expr, [ f::<1>() ], "f::<1>()");
+    c1!(expr, [ f::<'a, u8, 1>() ], "f::<'a, u8, 1>()");
     c1!(expr, [ f(true) ], "f(true)");
     c2!(expr, [ f(true,) ], "f(true)", "f(true,)");
-    c2!(expr, [ ()() ], "()()", "() ()");
+    c1!(expr, [ ()() ], "()()");
 
     // ExprKind::MethodCall
     c1!(expr, [ x.f() ], "x.f()");
-    c2!(expr, [ x.f::<u8>() ], "x.f::<u8>()", "x.f :: < u8 > ()");
-    c2!(expr, [ x.collect::<Vec<_>>() ], "x.collect::<Vec<_>>()", "x.collect :: < Vec < _ >> ()");
+    c1!(expr, [ x.f::<u8>() ], "x.f::<u8>()");
+    c1!(expr, [ x.collect::<Vec<_>>() ], "x.collect::<Vec<_>>()");
 
     // ExprKind::Tup
     c1!(expr, [ () ], "()");
@@ -110,18 +105,14 @@ fn test_expr() {
     c1!(expr, [ true || false ], "true || false");
     c1!(expr, [ true || false && false ], "true || false && false");
     c1!(expr, [ a < 1 && 2 < b && c > 3 && 4 > d ], "a < 1 && 2 < b && c > 3 && 4 > d");
-    c2!(expr, [ a & b & !c ], "a & b & !c", "a & b &! c"); // FIXME
-    c2!(expr,
-        [ a + b * c - d + -1 * -2 - -3],
-        "a + b * c - d + -1 * -2 - -3",
-        "a + b * c - d + - 1 * - 2 - - 3"
-    );
-    c2!(expr, [ x = !y ], "x = !y", "x =! y"); // FIXME
+    c2!(expr, [ a & b & !c ], "a & b & !c", "a & b &!c"); // FIXME
+    c1!(expr, [ a + b * c - d + -1 * -2 - -3], "a + b * c - d + -1 * -2 - -3");
+    c2!(expr, [ x = !y ], "x = !y", "x =!y"); // FIXME
 
     // ExprKind::Unary
-    c2!(expr, [ *expr ], "*expr", "* expr");
-    c2!(expr, [ !expr ], "!expr", "! expr");
-    c2!(expr, [ -expr ], "-expr", "- expr");
+    c1!(expr, [ *expr ], "*expr");
+    c1!(expr, [ !expr ], "!expr");
+    c1!(expr, [ -expr ], "-expr");
 
     // ExprKind::Lit
     c1!(expr, [ 'x' ], "'x'");
@@ -130,7 +121,7 @@ fn test_expr() {
 
     // ExprKind::Cast
     c1!(expr, [ expr as T ], "expr as T");
-    c2!(expr, [ expr as T<u8> ], "expr as T<u8>", "expr as T < u8 >");
+    c1!(expr, [ expr as T<u8> ], "expr as T<u8>");
 
     // ExprKind::Type: there is no syntax for type ascription.
 
@@ -139,12 +130,8 @@ fn test_expr() {
 
     // ExprKind::If
     c1!(expr, [ if true {} ], "if true {}");
-    c2!(expr, [ if !true {} ], "if !true {}", "if! true {}"); // FIXME
-    c2!(expr,
-        [ if ::std::blah() { } else { } ],
-        "if ::std::blah() {} else {}",
-        "if :: std :: blah() {} else {}"
-    );
+    c2!(expr, [ if !true {} ], "if !true {}", "if!true {}"); // FIXME
+    c1!(expr, [ if ::std::blah() { } else { } ], "if ::std::blah() {} else {}");
     c1!(expr, [ if let true = true {} else {} ], "if let true = true {} else {}");
     c1!(expr,
         [ if true {
@@ -159,7 +146,7 @@ fn test_expr() {
         } ],
         "if true {} else if false {} else {}"
     );
-    c2!(expr,
+    c1!(expr,
         [ if true {
             return;
         } else if false {
@@ -167,22 +154,21 @@ fn test_expr() {
         } else {
             0
         } ],
-        "if true { return; } else if false { 0 } else { 0 }",
-        "if true { return ; } else if false { 0 } else { 0 }"
+        "if true { return; } else if false { 0 } else { 0 }"
     );
 
     // ExprKind::While
     c1!(expr, [ while true {} ], "while true {}");
-    c2!(expr, [ 'a: while true {} ], "'a: while true {}", "'a : while true {}");
+    c1!(expr, [ 'a: while true {} ], "'a: while true {}");
     c1!(expr, [ while let true = true {} ], "while let true = true {}");
 
     // ExprKind::ForLoop
     c1!(expr, [ for _ in x {} ], "for _ in x {}");
-    c2!(expr, [ 'a: for _ in x {} ], "'a: for _ in x {}", "'a : for _ in x {}");
+    c1!(expr, [ 'a: for _ in x {} ], "'a: for _ in x {}");
 
     // ExprKind::Loop
     c1!(expr, [ loop {} ], "loop {}");
-    c2!(expr, [ 'a: loop {} ], "'a: loop {}", "'a : loop {}");
+    c1!(expr, [ 'a: loop {} ], "'a: loop {}");
 
     // ExprKind::Match
     c1!(expr, [ match self {} ], "match self {}");
@@ -202,8 +188,8 @@ fn test_expr() {
 
     // ExprKind::Closure
     c1!(expr, [ || {} ], "|| {}");
-    c2!(expr, [ |x| {} ], "|x| {}", "| x | {}");
-    c2!(expr, [ |x: u8| {} ], "|x: u8| {}", "| x : u8 | {}");
+    c1!(expr, [ |x| {} ], "|x| {}");
+    c1!(expr, [ |x: u8| {} ], "|x: u8| {}");
     c1!(expr, [ || () ], "|| ()");
     c1!(expr, [ move || self ], "move || self");
     c1!(expr, [ async || self ], "async || self");
@@ -218,7 +204,7 @@ fn test_expr() {
     // ExprKind::Block
     c1!(expr, [ {} ], "{}");
     c1!(expr, [ unsafe {} ], "unsafe {}");
-    c2!(expr, [ 'a: {} ], "'a: {}", "'a : {}");
+    c1!(expr, [ 'a: {} ], "'a: {}");
     c1!(expr, [ #[attr] {} ], "#[attr] {}");
     c2!(expr,
         [
@@ -229,7 +215,7 @@ fn test_expr() {
         "{\n\
         \x20   #![attr]\n\
         }",
-        "{ #! [attr] }"
+        "{ #![attr] }"
     );
 
     // ExprKind::Async
@@ -253,34 +239,35 @@ fn test_expr() {
     c1!(expr, [ expr.0 ], "expr.0");
 
     // ExprKind::Index
-    c2!(expr, [ expr[true] ], "expr[true]", "expr [true]");
+    c1!(expr, [ expr[true] ], "expr[true]");
 
     // ExprKind::Range
     c1!(expr, [ .. ], "..");
-    c2!(expr, [ ..hi ], "..hi", ".. hi");
-    c2!(expr, [ lo.. ], "lo..", "lo ..");
-    c2!(expr, [ lo..hi ], "lo..hi", "lo .. hi");
-    c2!(expr, [ ..=hi ], "..=hi", "..= hi");
-    c2!(expr, [ lo..=hi ], "lo..=hi", "lo ..= hi");
-    c2!(expr, [ -2..=-1 ], "-2..=-1", "- 2 ..= - 1");
+    c1!(expr, [ ..hi ], "..hi");
+    c1!(expr, [ lo.. ], "lo..");
+    c1!(expr, [ lo..hi ], "lo..hi");
+    c2!(expr, [ lo .. hi ], "lo..hi", "lo .. hi");
+    c1!(expr, [ ..=hi ], "..=hi");
+    c1!(expr, [ lo..=hi ], "lo..=hi");
+    c1!(expr, [ -2..=-1 ], "-2..=-1");
 
     // ExprKind::Underscore
     // FIXME: todo
 
     // ExprKind::Path
     c1!(expr, [ thing ], "thing");
-    c2!(expr, [ m::thing ], "m::thing", "m :: thing");
-    c2!(expr, [ self::thing ], "self::thing", "self :: thing");
-    c2!(expr, [ crate::thing ], "crate::thing", "crate :: thing");
-    c2!(expr, [ Self::thing ], "Self::thing", "Self :: thing");
-    c2!(expr, [ <Self as T>::thing ], "<Self as T>::thing", "< Self as T > :: thing");
-    c2!(expr, [ Self::<'static> ], "Self::<'static>", "Self :: < 'static >");
+    c1!(expr, [ m::thing ], "m::thing");
+    c1!(expr, [ self::thing ], "self::thing");
+    c1!(expr, [ crate::thing ], "crate::thing");
+    c1!(expr, [ Self::thing ], "Self::thing");
+    c1!(expr, [ <Self as T>::thing ], "<Self as T>::thing");
+    c1!(expr, [ Self::<'static> ], "Self::<'static>");
 
     // ExprKind::AddrOf
-    c2!(expr, [ &expr ], "&expr", "& expr");
-    c2!(expr, [ &mut expr ], "&mut expr", "& mut expr");
-    c2!(expr, [ &raw const expr ], "&raw const expr", "& raw const expr");
-    c2!(expr, [ &raw mut expr ], "&raw mut expr", "& raw mut expr");
+    c1!(expr, [ &expr ], "&expr");
+    c1!(expr, [ &mut expr ], "&mut expr");
+    c1!(expr, [ &raw const expr ], "&raw const expr");
+    c1!(expr, [ &raw mut expr ], "&raw mut expr");
 
     // ExprKind::Break
     c1!(expr, [ break ], "break");
@@ -301,38 +288,30 @@ fn test_expr() {
     // ExprKind::OffsetOf: untestable because this test works pre-expansion.
 
     // ExprKind::MacCall
-    c2!(expr, [ mac!(...) ], "mac!(...)", "mac! (...)");
-    c2!(expr, [ mac![...] ], "mac![...]", "mac! [...]");
+    c1!(expr, [ mac!(...) ], "mac!(...)");
+    c1!(expr, [ mac![...] ], "mac![...]");
     c1!(expr, [ mac! { ... } ], "mac! { ... }");
 
     // ExprKind::Struct
     c1!(expr, [ Struct {} ], "Struct {}");
-    c2!(expr,
-        [ <Struct as Trait>::Type {} ],
-        "<Struct as Trait>::Type {}",
-        "< Struct as Trait > :: Type {}"
-    );
+    c1!(expr, [ <Struct as Trait>::Type {} ], "<Struct as Trait>::Type {}");
     c1!(expr, [ Struct { .. } ], "Struct { .. }");
-    c2!(expr, [ Struct { ..base } ], "Struct { ..base }", "Struct { .. base }");
+    c1!(expr, [ Struct { ..base } ], "Struct { ..base }");
     c1!(expr, [ Struct { x } ], "Struct { x }");
     c1!(expr, [ Struct { x, .. } ], "Struct { x, .. }");
-    c2!(expr, [ Struct { x, ..base } ], "Struct { x, ..base }", "Struct { x, .. base }");
-    c2!(expr, [ Struct { x: true } ], "Struct { x: true }", "Struct { x : true }");
-    c2!(expr, [ Struct { x: true, .. } ], "Struct { x: true, .. }", "Struct { x : true, .. }");
-    c2!(expr,
-        [ Struct { x: true, ..base } ],
-        "Struct { x: true, ..base }",
-        "Struct { x : true, .. base }"
-    );
+    c1!(expr, [ Struct { x, ..base } ], "Struct { x, ..base }");
+    c1!(expr, [ Struct { x: true } ], "Struct { x: true }");
+    c1!(expr, [ Struct { x: true, .. } ], "Struct { x: true, .. }");
+    c1!(expr, [ Struct { x: true, ..base } ], "Struct { x: true, ..base }");
 
     // ExprKind::Repeat
-    c2!(expr, [ [(); 0] ], "[(); 0]", "[() ; 0]");
+    c1!(expr, [ [(); 0] ], "[(); 0]");
 
     // ExprKind::Paren
     c1!(expr, [ (expr) ], "(expr)");
 
     // ExprKind::Try
-    c2!(expr, [ expr? ], "expr?", "expr ?");
+    c1!(expr, [ expr? ], "expr?");
 
     // ExprKind::Yield
     c1!(expr, [ yield ], "yield");
@@ -356,51 +335,42 @@ fn test_expr() {
 #[test]
 fn test_item() {
     // ItemKind::ExternCrate
-    c2!(item, [ extern crate std; ], "extern crate std;", "extern crate std ;");
-    c2!(item,
-        [ pub extern crate self as std; ],
-        "pub extern crate self as std;",
-        "pub extern crate self as std ;"
-    );
+    c1!(item, [ extern crate std; ], "extern crate std;");
+    c1!(item, [ pub extern crate self as std; ], "pub extern crate self as std;");
 
     // ItemKind::Use
     c2!(item,
         [ pub use crate::{a, b::c}; ],
         "pub use crate::{a, b::c};",
-        "pub use crate :: { a, b :: c } ;"
+        "pub use crate::{ a, b::c };" // FIXME
     );
-    c2!(item, [ pub use A::*; ], "pub use A::*;", "pub use A :: * ;");
+    c1!(item, [ pub use A::*; ], "pub use A::*;");
 
     // ItemKind::Static
-    c2!(item, [ pub static S: () = {}; ], "pub static S: () = {};", "pub static S : () = {} ;");
-    c2!(item, [ static mut S: () = {}; ], "static mut S: () = {};", "static mut S : () = {} ;");
-    c2!(item, [ static S: (); ], "static S: ();", "static S : () ;");
-    c2!(item, [ static mut S: (); ], "static mut S: ();", "static mut S : () ;");
+    c1!(item, [ pub static S: () = {}; ], "pub static S: () = {};");
+    c1!(item, [ static mut S: () = {}; ], "static mut S: () = {};");
+    c1!(item, [ static S: (); ], "static S: ();");
+    c1!(item, [ static mut S: (); ], "static mut S: ();");
 
     // ItemKind::Const
-    c2!(item, [ pub const S: () = {}; ], "pub const S: () = {};", "pub const S : () = {} ;");
-    c2!(item, [ const S: (); ], "const S: ();", "const S : () ;");
+    c1!(item, [ pub const S: () = {}; ], "pub const S: () = {};");
+    c1!(item, [ const S: (); ], "const S: ();");
 
     // ItemKind::Fn
     c1!(item,
         [ pub default const async unsafe extern "C" fn f() {} ],
         "pub default const async unsafe extern \"C\" fn f() {}"
     );
-    c2!(item,
-        [ fn g<T>(t: Vec<Vec<Vec<T>>>) {} ],
-        "fn g<T>(t: Vec<Vec<Vec<T>>>) {}",
-        "fn g < T > (t : Vec < Vec < Vec < T >> >) {}"
-    );
-    c2!(item,
+    c1!(item, [ fn g<T>(t: Vec<Vec<Vec<T>>>) {} ], "fn g<T>(t: Vec<Vec<Vec<T>>>) {}");
+    c1!(item,
         [ fn h<'a>(t: &'a Vec<Cell<dyn D>>) {} ],
-        "fn h<'a>(t: &'a Vec<Cell<dyn D>>) {}",
-        "fn h < 'a > (t : & 'a Vec < Cell < dyn D >>) {}"
+        "fn h<'a>(t: &'a Vec<Cell<dyn D>>) {}"
     );
 
     // ItemKind::Mod
-    c2!(item, [ pub mod m; ], "pub mod m;", "pub mod m ;");
+    c1!(item, [ pub mod m; ], "pub mod m;");
     c1!(item, [ mod m {} ], "mod m {}");
-    c2!(item, [ unsafe mod m; ], "unsafe mod m;", "unsafe mod m ;");
+    c1!(item, [ unsafe mod m; ], "unsafe mod m;");
     c1!(item, [ unsafe mod m {} ], "unsafe mod m {}");
 
     // ItemKind::ForeignMod
@@ -423,7 +393,7 @@ fn test_item() {
             = T;
         ],
         "pub default type Type<'a>: Bound where Self: 'a = T;",
-        "pub default type Type < 'a > : Bound where Self : 'a, = T ;"
+        "pub default type Type<'a>: Bound where Self: 'a, = T;"
     );
 
     // ItemKind::Enum
@@ -456,13 +426,13 @@ fn test_item() {
         \x20       t: T,\n\
         \x20   },\n\
         }",
-        "enum Enum < T > where T : 'a, { Unit, Tuple(T), Struct { t : T }, }"
+        "enum Enum<T> where T: 'a, { Unit, Tuple(T), Struct { t: T }, }"
     );
 
     // ItemKind::Struct
-    c2!(item, [ pub struct Unit; ], "pub struct Unit;", "pub struct Unit ;");
-    c2!(item, [ struct Tuple(); ], "struct Tuple();", "struct Tuple() ;");
-    c2!(item, [ struct Tuple(T); ], "struct Tuple(T);", "struct Tuple(T) ;");
+    c1!(item, [ pub struct Unit; ], "pub struct Unit;");
+    c1!(item, [ struct Tuple(); ], "struct Tuple();");
+    c1!(item, [ struct Tuple(T); ], "struct Tuple(T);");
     c1!(item, [ struct Struct {} ], "struct Struct {}");
     c2!(item,
         [
@@ -476,7 +446,7 @@ fn test_item() {
         "struct Struct<T> where T: 'a {\n\
         \x20   t: T,\n\
         }",
-        "struct Struct < T > where T : 'a, { t : T, }"
+        "struct Struct<T> where T: 'a, { t: T, }"
     );
 
     // ItemKind::Union
@@ -490,7 +460,7 @@ fn test_item() {
         "union Union<T> where T: 'a {\n\
         \x20   t: T,\n\
         }",
-        "union Union < T > where T : 'a { t : T, }"
+        "union Union<T> where T: 'a { t: T, }"
     );
 
     // ItemKind::Trait
@@ -504,30 +474,25 @@ fn test_item() {
             }
         ],
         "trait Trait<'a>: Sized where Self: 'a {}",
-        "trait Trait < 'a > : Sized where Self : 'a, {}"
+        "trait Trait<'a>: Sized where Self: 'a, {}"
     );
 
     // ItemKind::TraitAlias
-    c2!(item,
+    c1!(item,
         [ pub trait Trait<T> = Sized where T: 'a; ],
-        "pub trait Trait<T> = Sized where T: 'a;",
-        "pub trait Trait < T > = Sized where T : 'a ;"
+        "pub trait Trait<T> = Sized where T: 'a;"
     );
 
     // ItemKind::Impl
     c1!(item, [ pub impl Struct {} ], "pub impl Struct {}");
-    c2!(item, [ impl<T> Struct<T> {} ], "impl<T> Struct<T> {}", "impl < T > Struct < T > {}");
+    c1!(item, [ impl<T> Struct<T> {} ], "impl<T> Struct<T> {}");
     c1!(item, [ pub impl Trait for Struct {} ], "pub impl Trait for Struct {}");
-    c2!(item,
-        [ impl<T> const Trait for T {} ],
-        "impl<T> const Trait for T {}",
-        "impl < T > const Trait for T {}"
-    );
-    c2!(item, [ impl ~const Struct {} ], "impl ~const Struct {}", "impl ~ const Struct {}");
+    c1!(item, [ impl<T> const Trait for T {} ], "impl<T> const Trait for T {}");
+    c1!(item, [ impl ~const Struct {} ], "impl ~const Struct {}");
 
     // ItemKind::MacCall
-    c2!(item, [ mac!(...); ], "mac!(...);", "mac! (...) ;");
-    c2!(item, [ mac![...]; ], "mac![...];", "mac! [...] ;");
+    c1!(item, [ mac!(...); ], "mac!(...);");
+    c1!(item, [ mac![...]; ], "mac![...];");
     c1!(item, [ mac! { ... } ], "mac! { ... }");
 
     // ItemKind::MacroDef
@@ -537,7 +502,7 @@ fn test_item() {
                 () => {};
             }
         ],
-        "macro_rules! stringify { () => {} ; }"
+        "macro_rules! stringify { () => {}; }"
     );
     c2!(item,
         [ pub macro stringify() {} ],
@@ -551,7 +516,7 @@ fn test_meta() {
     c1!(meta, [ k ], "k");
     c1!(meta, [ k = "v" ], "k = \"v\"");
     c1!(meta, [ list(k1, k2 = "v") ], "list(k1, k2 = \"v\")");
-    c2!(meta, [ serde::k ], "serde::k", "serde :: k");
+    c1!(meta, [ serde::k ], "serde::k");
 }
 
 #[test]
@@ -568,42 +533,35 @@ fn test_pat() {
 
     // PatKind::Struct
     c1!(pat, [ Struct {} ], "Struct {}");
-    c2!(pat, [ Struct::<u8> {} ], "Struct::<u8> {}", "Struct :: < u8 > {}");
-    c2!(pat, [ Struct::<'static> {} ], "Struct::<'static> {}", "Struct :: < 'static > {}");
+    c1!(pat, [ Struct::<u8> {} ], "Struct::<u8> {}");
+    c2!(pat, [ Struct ::< u8 > {} ], "Struct::<u8> {}", "Struct ::< u8 > {}");
+    c1!(pat, [ Struct::<'static> {} ], "Struct::<'static> {}");
     c1!(pat, [ Struct { x } ], "Struct { x }");
-    c2!(pat, [ Struct { x: _x } ], "Struct { x: _x }", "Struct { x : _x }");
+    c1!(pat, [ Struct { x: _x } ], "Struct { x: _x }");
     c1!(pat, [ Struct { .. } ], "Struct { .. }");
     c1!(pat, [ Struct { x, .. } ], "Struct { x, .. }");
-    c2!(pat, [ Struct { x: _x, .. } ], "Struct { x: _x, .. }", "Struct { x : _x, .. }");
-    c2!(pat,
-        [ <Struct as Trait>::Type {} ],
-        "<Struct as Trait>::Type {}",
-        "< Struct as Trait > :: Type {}"
-    );
+    c1!(pat, [ Struct { x: _x, .. } ], "Struct { x: _x, .. }");
+    c1!(pat, [ <Struct as Trait>::Type {} ], "<Struct as Trait>::Type {}");
 
     // PatKind::TupleStruct
     c1!(pat, [ Tuple() ], "Tuple()");
-    c2!(pat, [ Tuple::<u8>() ], "Tuple::<u8>()", "Tuple :: < u8 > ()");
-    c2!(pat, [ Tuple::<'static>() ], "Tuple::<'static>()", "Tuple :: < 'static > ()");
+    c1!(pat, [ Tuple::<u8>() ], "Tuple::<u8>()");
+    c1!(pat, [ Tuple::<'static>() ], "Tuple::<'static>()");
     c1!(pat, [ Tuple(x) ], "Tuple(x)");
     c1!(pat, [ Tuple(..) ], "Tuple(..)");
     c1!(pat, [ Tuple(x, ..) ], "Tuple(x, ..)");
-    c2!(pat,
-        [ <Struct as Trait>::Type() ],
-        "<Struct as Trait>::Type()",
-        "< Struct as Trait > :: Type()"
-    );
+    c1!(pat, [ <Struct as Trait>::Type() ], "<Struct as Trait>::Type()");
 
     // PatKind::Or
     c1!(pat, [ true | false ], "true | false");
     c2!(pat, [ | true ], "true", "| true");
-    c2!(pat, [ |true| false ], "true | false", "| true | false");
+    c2!(pat, [ |true| false ], "true | false", "|true| false");
 
     // PatKind::Path
-    c2!(pat, [ crate::Path ], "crate::Path", "crate :: Path");
-    c2!(pat, [ Path::<u8> ], "Path::<u8>", "Path :: < u8 >");
-    c2!(pat, [ Path::<'static> ], "Path::<'static>", "Path :: < 'static >");
-    c2!(pat, [ <Struct as Trait>::Type ], "<Struct as Trait>::Type", "< Struct as Trait > :: Type");
+    c1!(pat, [ crate::Path ], "crate::Path");
+    c1!(pat, [ Path::<u8> ], "Path::<u8>");
+    c1!(pat, [ Path::<'static> ], "Path::<'static>");
+    c1!(pat, [ <Struct as Trait>::Type ], "<Struct as Trait>::Type");
 
     // PatKind::Tuple
     c1!(pat, [ () ], "()");
@@ -614,18 +572,18 @@ fn test_pat() {
     c1!(pat, [ box pat ], "box pat");
 
     // PatKind::Ref
-    c2!(pat, [ &pat ], "&pat", "& pat");
-    c2!(pat, [ &mut pat ], "&mut pat", "& mut pat");
+    c1!(pat, [ &pat ], "&pat");
+    c1!(pat, [ &mut pat ], "&mut pat");
 
     // PatKind::Lit
     c1!(pat, [ 1_000_i8 ], "1_000_i8");
 
     // PatKind::Range
-    c2!(pat, [ ..1 ], "..1", ".. 1");
-    c2!(pat, [ 0.. ], "0..", "0 ..");
-    c2!(pat, [ 0..1 ], "0..1", "0 .. 1");
-    c2!(pat, [ 0..=1 ], "0..=1", "0 ..= 1");
-    c2!(pat, [ -2..=-1 ], "-2..=-1", "- 2 ..= - 1");
+    c1!(pat, [ ..1 ], "..1");
+    c1!(pat, [ 0.. ], "0..");
+    c1!(pat, [ 0..1 ], "0..1");
+    c1!(pat, [ 0..=1 ], "0..=1");
+    c1!(pat, [ -2..=-1 ], "-2..=-1");
 
     // PatKind::Slice
     c1!(pat, [ [] ], "[]");
@@ -644,20 +602,20 @@ fn test_pat() {
     c1!(pat, [ (pat) ], "(pat)");
 
     // PatKind::MacCall
-    c2!(pat, [ mac!(...) ], "mac!(...)", "mac! (...)");
-    c2!(pat, [ mac![...] ], "mac![...]", "mac! [...]");
+    c1!(pat, [ mac!(...) ], "mac!(...)");
+    c1!(pat, [ mac![...] ], "mac![...]");
     c1!(pat, [ mac! { ... } ], "mac! { ... }");
 }
 
 #[test]
 fn test_path() {
     c1!(path, [ thing ], "thing");
-    c2!(path, [ m::thing ], "m::thing", "m :: thing");
-    c2!(path, [ self::thing ], "self::thing", "self :: thing");
-    c2!(path, [ crate::thing ], "crate::thing", "crate :: thing");
-    c2!(path, [ Self::thing ], "Self::thing", "Self :: thing");
-    c2!(path, [ Self<'static> ], "Self<'static>", "Self < 'static >");
-    c2!(path, [ Self::<'static> ], "Self<'static>", "Self :: < 'static >");
+    c1!(path, [ m::thing ], "m::thing");
+    c1!(path, [ self::thing ], "self::thing");
+    c1!(path, [ crate::thing ], "crate::thing");
+    c1!(path, [ Self::thing ], "Self::thing");
+    c1!(path, [ Self<'static> ], "Self<'static>");
+    c2!(path, [ Self::<'static> ], "Self<'static>", "Self::<'static>");
     c1!(path, [ Self() ], "Self()");
     c1!(path, [ Self() -> () ], "Self() -> ()");
 }
@@ -667,16 +625,16 @@ fn test_stmt() {
     // StmtKind::Local
     c2!(stmt, [ let _ ], "let _;", "let _");
     c2!(stmt, [ let x = true ], "let x = true;", "let x = true");
-    c2!(stmt, [ let x: bool = true ], "let x: bool = true;", "let x : bool = true");
+    c2!(stmt, [ let x: bool = true ], "let x: bool = true;", "let x: bool = true");
     c2!(stmt, [ let (a, b) = (1, 2) ], "let (a, b) = (1, 2);", "let(a, b) = (1, 2)"); // FIXME
     c2!(stmt,
         [ let (a, b): (u32, u32) = (1, 2) ],
         "let (a, b): (u32, u32) = (1, 2);",
-        "let(a, b) : (u32, u32) = (1, 2)"
+        "let(a, b): (u32, u32) = (1, 2)" // FIXME
     );
 
     // StmtKind::Item
-    c2!(stmt, [ struct S; ], "struct S;", "struct S ;");
+    c1!(stmt, [ struct S; ], "struct S;");
     c1!(stmt, [ struct S {} ], "struct S {}");
 
     // StmtKind::Expr
@@ -689,8 +647,8 @@ fn test_stmt() {
     c1!(stmt, [ ; ], ";");
 
     // StmtKind::MacCall
-    c2!(stmt, [ mac!(...) ], "mac!(...)", "mac! (...)");
-    c2!(stmt, [ mac![...] ], "mac![...]", "mac! [...]");
+    c1!(stmt, [ mac!(...) ], "mac!(...)");
+    c1!(stmt, [ mac![...] ], "mac![...]");
     c1!(stmt, [ mac! { ... } ], "mac! { ... }");
 }
 
@@ -700,26 +658,27 @@ fn test_ty() {
     c1!(ty, [ [T] ], "[T]");
 
     // TyKind::Array
-    c2!(ty, [ [T; 0] ], "[T; 0]", "[T ; 0]");
+    c1!(ty, [ [T; 0] ], "[T; 0]");
 
     // TyKind::Ptr
-    c2!(ty, [ *const T ], "*const T", "* const T");
-    c2!(ty, [ *mut T ], "*mut T", "* mut T");
+    c1!(ty, [ *const T ], "*const T");
+    c1!(ty, [ *mut T ], "*mut T");
 
     // TyKind::Ref
-    c2!(ty, [ &T ], "&T", "& T");
-    c2!(ty, [ &mut T ], "&mut T", "& mut T");
-    c2!(ty, [ &'a T ], "&'a T", "& 'a T");
-    c2!(ty, [ &'a mut [T] ], "&'a mut [T]", "& 'a mut [T]");
-    c2!(ty, [ &A<B<C<D<E>>>> ], "&A<B<C<D<E>>>>", "& A < B < C < D < E >> >>");
+    c1!(ty, [ &T ], "&T");
+    c1!(ty, [ &mut T ], "&mut T");
+    c1!(ty, [ &'a T ], "&'a T");
+    c1!(ty, [ &'a mut [T] ], "&'a mut [T]");
+    c1!(ty, [ &A<B<C<D<E>>>> ], "&A<B<C<D<E>>>>");
+    c2!(ty, [ &A<B<C<D<E> > > > ], "&A<B<C<D<E>>>>", "&A<B<C<D<E> > > >");
 
     // TyKind::BareFn
     c1!(ty, [ fn() ], "fn()");
     c1!(ty, [ fn() -> () ], "fn() -> ()");
     c1!(ty, [ fn(u8) ], "fn(u8)");
-    c2!(ty, [ fn(x: u8) ], "fn(x: u8)", "fn(x : u8)");
-    c2!(ty, [ for<> fn() ], "fn()", "for < > fn()");
-    c2!(ty, [ for<'a> fn() ], "for<'a> fn()", "for < 'a > fn()");
+    c1!(ty, [ fn(x: u8) ], "fn(x: u8)");
+    c2!(ty, [ for<> fn() ], "fn()", "for<> fn()");
+    c1!(ty, [ for<'a> fn() ], "for<'a> fn()");
 
     // TyKind::Never
     c1!(ty, [ ! ], "!");
@@ -735,28 +694,28 @@ fn test_ty() {
 
     // TyKind::Path
     c1!(ty, [ T ], "T");
-    c2!(ty, [ Ref<'a> ], "Ref<'a>", "Ref < 'a >");
-    c2!(ty, [ PhantomData<T> ], "PhantomData<T>", "PhantomData < T >");
-    c2!(ty, [ PhantomData::<T> ], "PhantomData<T>", "PhantomData :: < T >");
+    c1!(ty, [ Ref<'a> ], "Ref<'a>");
+    c1!(ty, [ PhantomData<T> ], "PhantomData<T>");
+    c2!(ty, [ PhantomData::<T> ], "PhantomData<T>", "PhantomData::<T>");
     c2!(ty, [ Fn() -> ! ], "Fn() -> !", "Fn() ->!");
     c2!(ty, [ Fn(u8) -> ! ], "Fn(u8) -> !", "Fn(u8) ->!"); // FIXME
-    c2!(ty, [ <Struct as Trait>::Type ], "<Struct as Trait>::Type", "< Struct as Trait > :: Type");
+    c1!(ty, [ <Struct as Trait>::Type ], "<Struct as Trait>::Type");
 
     // TyKind::TraitObject
     c1!(ty, [ dyn Send ], "dyn Send");
     c1!(ty, [ dyn Send + 'a ], "dyn Send + 'a");
     c1!(ty, [ dyn 'a + Send ], "dyn 'a + Send");
-    c2!(ty, [ dyn ?Sized ], "dyn ?Sized", "dyn ? Sized");
-    c2!(ty, [ dyn ~const Clone ], "dyn ~const Clone", "dyn ~ const Clone");
-    c2!(ty, [ dyn for<'a> Send ], "dyn for<'a> Send", "dyn for < 'a > Send");
+    c1!(ty, [ dyn ?Sized ], "dyn ?Sized");
+    c1!(ty, [ dyn ~const Clone ], "dyn ~const Clone");
+    c1!(ty, [ dyn for<'a> Send ], "dyn for<'a> Send");
 
     // TyKind::ImplTrait
     c1!(ty, [ impl Send ], "impl Send");
     c1!(ty, [ impl Send + 'a ], "impl Send + 'a");
     c1!(ty, [ impl 'a + Send ], "impl 'a + Send");
-    c2!(ty, [ impl ?Sized ], "impl ?Sized", "impl ? Sized");
-    c2!(ty, [ impl ~const Clone ], "impl ~const Clone", "impl ~ const Clone");
-    c2!(ty, [ impl for<'a> Send ], "impl for<'a> Send", "impl for < 'a > Send");
+    c1!(ty, [ impl ?Sized ], "impl ?Sized");
+    c1!(ty, [ impl ~const Clone ], "impl ~const Clone");
+    c1!(ty, [ impl for<'a> Send ], "impl for<'a> Send");
 
     // TyKind::Paren
     c1!(ty, [ (T) ], "(T)");
@@ -769,8 +728,8 @@ fn test_ty() {
     // TyKind::ImplicitSelf: there is no syntax for this.
 
     // TyKind::MacCall
-    c2!(ty, [ mac!(...) ], "mac!(...)", "mac! (...)");
-    c2!(ty, [ mac![...] ], "mac![...]", "mac! [...]");
+    c1!(ty, [ mac!(...) ], "mac!(...)");
+    c1!(ty, [ mac![...] ], "mac![...]");
     c1!(ty, [ mac! { ... } ], "mac! { ... }");
 
     // TyKind::Err: untestable.
@@ -791,13 +750,13 @@ fn test_vis() {
     c2!(vis, [ pub(in crate) ], "pub(in crate) ", "pub(in crate)");
     c2!(vis, [ pub(in self) ], "pub(in self) ", "pub(in self)");
     c2!(vis, [ pub(in super) ], "pub(in super) ", "pub(in super)");
-    c2!(vis, [ pub(in path::to) ], "pub(in path::to) ", "pub(in path :: to)");
-    c2!(vis, [ pub(in ::path::to) ], "pub(in ::path::to) ", "pub(in :: path :: to)");
-    c2!(vis, [ pub(in self::path::to) ], "pub(in self::path::to) ", "pub(in self :: path :: to)");
+    c2!(vis, [ pub(in path::to) ], "pub(in path::to) ", "pub(in path::to)");
+    c2!(vis, [ pub(in ::path::to) ], "pub(in ::path::to) ", "pub(in ::path::to)");
+    c2!(vis, [ pub(in self::path::to) ], "pub(in self::path::to) ", "pub(in self::path::to)");
     c2!(vis,
         [ pub(in super::path::to) ],
         "pub(in super::path::to) ",
-        "pub(in super :: path :: to)"
+        "pub(in super::path::to)"
     );
 
     // VisibilityKind::Inherited
@@ -815,7 +774,7 @@ macro_rules! p {
 
 #[test]
 fn test_punct() {
-    // For all these cases, we must preserve spaces between the tokens.
+    // For all these cases, we should preserve spaces between the tokens.
     // Otherwise, any old proc macro that parses pretty-printed code might glue
     // together tokens that shouldn't be glued.
     p!([ = = < < <= <= == == != != >= >= > > ], "= = < < <= <= == == != != >= >= > >");
@@ -828,10 +787,7 @@ fn test_punct() {
     p!([ + + += += - - -= -= * * *= *= / / /= /= ], "+ + += += - - -= -= * * *= *= / / /= /=");
     p!([ % % %= %= ^ ^ ^= ^= << << <<= <<= >> >> >>= >>= ],
         "% % %= %= ^ ^ ^= ^= << << <<= <<= >> >> >>= >>=");
-
-    // For these one we must insert spaces between adjacent tokens, again due
-    // to proc macros.
-    p!([ +! ?= |> >>@ --> <-- $$ =====> ], "+! ? = | > >> @ - -> <- - $$== == =>"); // FIXME
-    p!([ ,; ;, ** @@ $+$ >< <> ?? +== ], ", ; ;, * * @ @ $+ $> < < > ? ? += ="); // FIXME
-    p!([ :#!@|$=&*,+;*~? ], ": #! @ | $= & *, + ; * ~ ?"); // FIXME
+    p!([ +! ?= |> >>@ --> <-- $$ =====> ], "+! ?= |> >>@ --> <-- $$=====>");
+    p!([ ,; ;, ** @@ $+$ >< <> ?? +== ], ",; ;, ** @@ $+$>< <> ?? +=="); // FIXME: `$ >` -> `$>`
+    p!([ :#!@|$=&*,+;*~? ], ":#!@|$=&*,+;*~?");
 }
diff --git a/tests/ui/macros/syntax-extension-source-utils.rs b/tests/ui/macros/syntax-extension-source-utils.rs
index f41faddddf6..aa894c83910 100644
--- a/tests/ui/macros/syntax-extension-source-utils.rs
+++ b/tests/ui/macros/syntax-extension-source-utils.rs
@@ -17,7 +17,7 @@ pub fn main() {
     assert_eq!(column!(), 16);
     assert_eq!(indirect_line!(), 18);
     assert!((file!().ends_with("syntax-extension-source-utils.rs")));
-    assert_eq!(stringify!((2*3) + 5).to_string(), "(2 * 3) + 5".to_string());
+    assert_eq!(stringify!((2*3) + 5).to_string(), "(2*3) + 5".to_string());
     assert!(include!("syntax-extension-source-utils-files/includeme.\
                       fragment").to_string()
            == "victory robot 6".to_string());
@@ -32,5 +32,5 @@ pub fn main() {
     // The Windows tests are wrapped in an extra module for some reason
     assert!((m1::m2::where_am_i().ends_with("m1::m2")));
 
-    assert_eq!((35, "(2 * 3) + 5"), (line!(), stringify!((2*3) + 5)));
+    assert_eq!((35, "(2*3) + 5"), (line!(), stringify!((2*3) + 5)));
 }
diff --git a/tests/ui/macros/trace-macro.stderr b/tests/ui/macros/trace-macro.stderr
index 43272248c28..dac4cc12f84 100644
--- a/tests/ui/macros/trace-macro.stderr
+++ b/tests/ui/macros/trace-macro.stderr
@@ -5,5 +5,5 @@ LL |     println!("Hello, World!");
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `println! { "Hello, World!" }`
-   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! ("Hello, World!")) ; }`
+   = note: to `{ $crate :: io :: _print($crate :: format_args_nl! ("Hello, World!")); }`
 
diff --git a/tests/ui/macros/trace_faulty_macros.stderr b/tests/ui/macros/trace_faulty_macros.stderr
index 6a89d3bfd09..81047c7a21a 100644
--- a/tests/ui/macros/trace_faulty_macros.stderr
+++ b/tests/ui/macros/trace_faulty_macros.stderr
@@ -20,7 +20,7 @@ LL |     my_faulty_macro!();
    |     ^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `my_faulty_macro! {  }`
-   = note: to `my_faulty_macro! (bcd) ;`
+   = note: to `my_faulty_macro! (bcd);`
    = note: expanding `my_faulty_macro! { bcd }`
 
 error: recursion limit reached while expanding `my_recursive_macro!`
@@ -42,13 +42,13 @@ LL |     my_recursive_macro!();
    |     ^^^^^^^^^^^^^^^^^^^^^
    |
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
    = note: expanding `my_recursive_macro! {  }`
-   = note: to `my_recursive_macro! () ;`
+   = note: to `my_recursive_macro! ();`
 
 error: expected expression, found pattern `A { a: a, b: 0, c: _, .. }`
   --> $DIR/trace_faulty_macros.rs:16:9
@@ -98,7 +98,7 @@ LL |     let a = pat_macro!();
    |             ^^^^^^^^^^^^
    |
    = note: expanding `pat_macro! {  }`
-   = note: to `pat_macro! (A { a : a, b : 0, c : _, .. }) ;`
+   = note: to `pat_macro! (A { a : a, b : 0, c : _, .. });`
    = note: expanding `pat_macro! { A { a : a, b : 0, c : _, .. } }`
    = note: to `A { a: a, b: 0, c: _, .. }`
 
@@ -108,7 +108,7 @@ note: trace_macro
 LL |     test!(let x = 1+1);
    |     ^^^^^^^^^^^^^^^^^^
    |
-   = note: expanding `test! { let x = 1 + 1 }`
+   = note: expanding `test! { let x = 1+1 }`
    = note: to `test! ((x, 1 + 1))`
    = note: expanding `test! { (x, 1 + 1) }`
    = note: to `let x = 1 + 1 ;`
diff --git a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
index 4f8730053ee..8459f4e6305 100644
--- a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
+++ b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): struct ItemWithSemi ;
+PRINT-ATTR INPUT (DISPLAY): struct ItemWithSemi;
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct ItemWithSemi ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -45,7 +46,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:53:27: 53:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!";
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_let] let string = "Hello, world!" ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
@@ -87,7 +89,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:57:33: 57:34 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string);
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
@@ -140,7 +143,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:61:28: 61:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
+PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {});
+PRINT-ATTR RE-COLLECTED (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "second_make_stmt",
@@ -288,7 +292,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/allowed-attr-stmt-expr.rs:68:18: 68:20 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct NonBracedStruct ;
+PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct NonBracedStruct;
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[rustc_dummy] struct NonBracedStruct ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
diff --git a/tests/ui/proc-macro/attr-complex-fn.stdout b/tests/ui/proc-macro/attr-complex-fn.stdout
index b12eb587fc7..7c23d1ecae4 100644
--- a/tests/ui/proc-macro/attr-complex-fn.stdout
+++ b/tests/ui/proc-macro/attr-complex-fn.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): fn foo < T : MyTrait < MyStruct < { true } >> > () {}
+PRINT-ATTR INPUT (DISPLAY): fn foo<T: MyTrait<MyStruct<{ true }>>>() {}
+PRINT-ATTR RE-COLLECTED (DISPLAY): fn foo < T : MyTrait < MyStruct < { true } >>> () {}
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "fn",
@@ -76,7 +77,9 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-complex-fn.rs:19:42: 19:44 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { #! [rustc_dummy] }
+PRINT-ATTR INPUT (DISPLAY): impl<T> MyTrait<T> for MyStruct<{ true }> { #![rustc_dummy] }
+PRINT-ATTR RE-COLLECTED (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { #![rustc_dummy] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > { #! [rustc_dummy] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "impl",
diff --git a/tests/ui/proc-macro/attr-stmt-expr.stdout b/tests/ui/proc-macro/attr-stmt-expr.stdout
index c6d77e0ed0c..5e09198bbd4 100644
--- a/tests/ui/proc-macro/attr-stmt-expr.stdout
+++ b/tests/ui/proc-macro/attr-stmt-expr.stdout
@@ -29,7 +29,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-stmt-expr.rs:45:27: 45:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!";
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_let] let string = "Hello, world!" ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
@@ -71,7 +72,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-stmt-expr.rs:49:33: 49:34 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
+PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro!("{}", string);
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[expect_my_macro_stmt] my_macro! ("{}", string) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
@@ -124,7 +126,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/attr-stmt-expr.rs:53:28: 53:29 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
+PRINT-ATTR INPUT (DISPLAY): second_make_stmt!(#[allow(dead_code)] struct Bar {});
+PRINT-ATTR RE-COLLECTED (DISPLAY): second_make_stmt! (#[allow(dead_code)] struct Bar {}) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "second_make_stmt",
diff --git a/tests/ui/proc-macro/attribute-after-derive.stdout b/tests/ui/proc-macro/attribute-after-derive.stdout
index 1b17d60476a..6d9531df8ca 100644
--- a/tests/ui/proc-macro/attribute-after-derive.stdout
+++ b/tests/ui/proc-macro/attribute-after-derive.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): #[derive(Print)] struct AttributeDerive { #[cfg(FALSE)] field : u8, }
+PRINT-ATTR INPUT (DISPLAY): #[derive(Print)] struct AttributeDerive { #[cfg(FALSE)] field: u8, }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[derive(Print)] struct AttributeDerive { #[cfg(FALSE)] field : u8, }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
@@ -130,7 +131,8 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: $DIR/attribute-after-derive.rs:23:24: 26:2 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct DeriveAttribute { #[cfg(FALSE)] field : u8, }
+PRINT-ATTR INPUT (DISPLAY): struct DeriveAttribute { #[cfg(FALSE)] field: u8, }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): struct DeriveAttribute { #[cfg(FALSE)] field : u8, }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/attribute-spans-preserved.stdout b/tests/ui/proc-macro/attribute-spans-preserved.stdout
index cf9a97491f0..190098d0211 100644
--- a/tests/ui/proc-macro/attribute-spans-preserved.stdout
+++ b/tests/ui/proc-macro/attribute-spans-preserved.stdout
@@ -1 +1 @@
-fn main() { let y : u32 = "z" ; { let x : u32 = "y" ; } }
+fn main() { let y : u32 = "z" ; { let x: u32 = "y"; } }
diff --git a/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs b/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
index 5b386b46bb7..76346d9a172 100644
--- a/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
+++ b/tests/ui/proc-macro/auxiliary/attr-stmt-expr-rpass.rs
@@ -10,14 +10,14 @@ use proc_macro::TokenStream;
 #[proc_macro_attribute]
 pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
-    assert_eq!(item.to_string(), "let string = \"Hello, world!\" ;");
+    assert_eq!(item.to_string(), "let string = \"Hello, world!\";");
     item
 }
 
 #[proc_macro_attribute]
 pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
-    assert_eq!(item.to_string(), "println! (\"{}\", string) ;");
+    assert_eq!(item.to_string(), "println!(\"{}\", string);");
     item
 }
 
@@ -31,7 +31,7 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
 #[proc_macro_attribute]
 pub fn expect_print_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
-    assert_eq!(item.to_string(), "println! (\"{}\", string)");
+    assert_eq!(item.to_string(), "println!(\"{}\", string)");
     item
 }
 
@@ -40,7 +40,6 @@ pub fn no_output(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
     assert!(!item.to_string().is_empty());
     "".parse().unwrap()
-
 }
 
 #[proc_macro_attribute]
diff --git a/tests/ui/proc-macro/auxiliary/attr-stmt-expr.rs b/tests/ui/proc-macro/auxiliary/attr-stmt-expr.rs
index 4d6dc06b4a4..7a29894bbcf 100644
--- a/tests/ui/proc-macro/auxiliary/attr-stmt-expr.rs
+++ b/tests/ui/proc-macro/auxiliary/attr-stmt-expr.rs
@@ -31,7 +31,7 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
 #[proc_macro_attribute]
 pub fn expect_my_macro_expr(attr: TokenStream, item: TokenStream) -> TokenStream {
     assert!(attr.to_string().is_empty());
-    assert_eq!(item.to_string(), "my_macro! (\"{}\", string)");
+    assert_eq!(item.to_string(), "my_macro!(\"{}\", string)");
     item
 }
 
diff --git a/tests/ui/proc-macro/auxiliary/derive-a.rs b/tests/ui/proc-macro/auxiliary/derive-a.rs
index 79a3864bf99..cd2be5fd84d 100644
--- a/tests/ui/proc-macro/auxiliary/derive-a.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-a.rs
@@ -10,6 +10,6 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(A)]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert!(input.contains("struct A ;"));
+    assert!(input.contains("struct A;"));
     "".parse().unwrap()
 }
diff --git a/tests/ui/proc-macro/auxiliary/derive-atob.rs b/tests/ui/proc-macro/auxiliary/derive-atob.rs
index 207b7fd3203..e78e5bb8f4c 100644
--- a/tests/ui/proc-macro/auxiliary/derive-atob.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-atob.rs
@@ -10,6 +10,6 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(AToB)]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert_eq!(input, "struct A ;");
+    assert_eq!(input, "struct A;");
     "struct B;".parse().unwrap()
 }
diff --git a/tests/ui/proc-macro/auxiliary/derive-b-rpass.rs b/tests/ui/proc-macro/auxiliary/derive-b-rpass.rs
index 641a95f78c1..3e6af67a9f4 100644
--- a/tests/ui/proc-macro/auxiliary/derive-b-rpass.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-b-rpass.rs
@@ -10,7 +10,7 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(B, attributes(B, C))]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert!(input.contains("#[B [arbitrary tokens]]"));
+    assert!(input.contains("#[B[arbitrary tokens]]"));
     assert!(input.contains("struct B {"));
     assert!(input.contains("#[C]"));
     "".parse().unwrap()
diff --git a/tests/ui/proc-macro/auxiliary/derive-ctod.rs b/tests/ui/proc-macro/auxiliary/derive-ctod.rs
index 2efe5a91340..dbf44ed1b05 100644
--- a/tests/ui/proc-macro/auxiliary/derive-ctod.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-ctod.rs
@@ -10,6 +10,6 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(CToD)]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert_eq!(input, "struct C ;");
+    assert_eq!(input, "struct C;");
     "struct D;".parse().unwrap()
 }
diff --git a/tests/ui/proc-macro/auxiliary/derive-same-struct.rs b/tests/ui/proc-macro/auxiliary/derive-same-struct.rs
index 7598d632cb6..ce7a50d2381 100644
--- a/tests/ui/proc-macro/auxiliary/derive-same-struct.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-same-struct.rs
@@ -10,12 +10,12 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(AToB)]
 pub fn derive1(input: TokenStream) -> TokenStream {
     println!("input1: {:?}", input.to_string());
-    assert_eq!(input.to_string(), "struct A ;");
+    assert_eq!(input.to_string(), "struct A;");
     "#[derive(BToC)] struct B;".parse().unwrap()
 }
 
 #[proc_macro_derive(BToC)]
 pub fn derive2(input: TokenStream) -> TokenStream {
-    assert_eq!(input.to_string(), "struct B ;");
+    assert_eq!(input.to_string(), "struct B;");
     "struct C;".parse().unwrap()
 }
diff --git a/tests/ui/proc-macro/auxiliary/derive-union.rs b/tests/ui/proc-macro/auxiliary/derive-union.rs
index 05883170c6c..d950e1e773c 100644
--- a/tests/ui/proc-macro/auxiliary/derive-union.rs
+++ b/tests/ui/proc-macro/auxiliary/derive-union.rs
@@ -12,7 +12,7 @@ pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
     assert!(input.contains("#[repr(C)]"));
     assert!(input.contains("union Test {"));
-    assert!(input.contains("a : u8,"));
+    assert!(input.contains("a: u8,"));
     assert!(input.contains("}"));
     "".parse().unwrap()
 }
diff --git a/tests/ui/proc-macro/auxiliary/expand-with-a-macro.rs b/tests/ui/proc-macro/auxiliary/expand-with-a-macro.rs
index d779d57af14..5155a4b8558 100644
--- a/tests/ui/proc-macro/auxiliary/expand-with-a-macro.rs
+++ b/tests/ui/proc-macro/auxiliary/expand-with-a-macro.rs
@@ -11,7 +11,7 @@ use proc_macro::TokenStream;
 #[proc_macro_derive(A)]
 pub fn derive(input: TokenStream) -> TokenStream {
     let input = input.to_string();
-    assert!(input.contains("struct A ;"));
+    assert!(input.contains("struct A;"));
     r#"
         impl A {
             fn a(&self) {
diff --git a/tests/ui/proc-macro/auxiliary/issue-79825.rs b/tests/ui/proc-macro/auxiliary/issue-79825.rs
index 930891b1d43..69cf5904f90 100644
--- a/tests/ui/proc-macro/auxiliary/issue-79825.rs
+++ b/tests/ui/proc-macro/auxiliary/issue-79825.rs
@@ -8,7 +8,7 @@ use proc_macro::TokenStream;
 
 #[proc_macro_attribute]
 pub fn assert_input(args: TokenStream, input: TokenStream) -> TokenStream {
-    assert_eq!(input.to_string(), "trait Alias = Sized ;");
+    assert_eq!(input.to_string(), "trait Alias = Sized;");
     assert!(args.is_empty());
     TokenStream::new()
 }
diff --git a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout
index 01d71ff989b..71e34119ba7 100644
--- a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout
+++ b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout
@@ -14,6 +14,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
 PRINT-BANG INPUT (DISPLAY): 1 + 1, { "a" }, let a = 1;, String, my_name, 'a, my_val = 30,
 std::option::Option, pub(in some::path) , [a b c], -30
 PRINT-BANG RE-COLLECTED (DISPLAY): 1 + 1, { "a" }, let a = 1, String, my_name, 'a, my_val = 30,
+std::option::Option, pub(in some::path), [a b c], -30
+PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): 1 + 1, { "a" }, let a = 1, String, my_name, 'a, my_val = 30,
 std :: option :: Option, pub(in some :: path), [a b c], - 30
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
diff --git a/tests/ui/proc-macro/capture-unglued-token.stdout b/tests/ui/proc-macro/capture-unglued-token.stdout
index a0d2178f000..84b7ca35524 100644
--- a/tests/ui/proc-macro/capture-unglued-token.stdout
+++ b/tests/ui/proc-macro/capture-unglued-token.stdout
@@ -1,5 +1,5 @@
 PRINT-BANG INPUT (DISPLAY): Vec<u8>
-PRINT-BANG RE-COLLECTED (DISPLAY): Vec < u8 >
+PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): Vec < u8 >
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
         delimiter: None,
diff --git a/tests/ui/proc-macro/cfg-eval-inner.stdout b/tests/ui/proc-macro/cfg-eval-inner.stdout
index 9d25def587c..43ae47c201f 100644
--- a/tests/ui/proc-macro/cfg-eval-inner.stdout
+++ b/tests/ui/proc-macro/cfg-eval-inner.stdout
@@ -1,4 +1,16 @@
-PRINT-ATTR INPUT (DISPLAY): impl Foo <
+PRINT-ATTR INPUT (DISPLAY): impl
+Foo<[u8;
+{
+    #![rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
+    { field: [u8; { #![rustc_dummy(another_cursed_inner)] 1 }] } 0
+}]> { #![rustc_dummy(evaluated_attr)] fn bar() {} }
+PRINT-ATTR RE-COLLECTED (DISPLAY): impl Foo <
+[u8;
+{
+    #![rustc_dummy(cursed_inner)] #![allow(unused)] struct Inner
+    { field: [u8; { #![rustc_dummy(another_cursed_inner)] 1 }] } 0
+}] > { #![rustc_dummy(evaluated_attr)] fn bar() {} }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl Foo <
 [u8 ;
 {
     #! [rustc_dummy(cursed_inner)] #! [allow(unused)] struct Inner
@@ -35,7 +47,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
                 stream: TokenStream [
                     Punct {
                         ch: '#',
-                        spacing: Alone,
+                        spacing: Joint,
                         span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
                     },
                     Punct {
@@ -130,7 +142,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
                                         stream: TokenStream [
                                             Punct {
                                                 ch: '#',
-                                                spacing: Alone,
+                                                spacing: Joint,
                                                 span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
                                             },
                                             Punct {
@@ -195,7 +207,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         stream: TokenStream [
             Punct {
                 ch: '#',
-                spacing: Alone,
+                spacing: Joint,
                 span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
             },
             Punct {
diff --git a/tests/ui/proc-macro/cfg-eval.stdout b/tests/ui/proc-macro/cfg-eval.stdout
index 6732caf08dd..e26e16f5a8c 100644
--- a/tests/ui/proc-macro/cfg-eval.stdout
+++ b/tests/ui/proc-macro/cfg-eval.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): struct S1 { #[cfg(all())] #[allow()] field_true : u8, }
+PRINT-ATTR INPUT (DISPLAY): struct S1 { #[cfg(all())] #[allow()] field_true: u8, }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): struct S1 { #[cfg(all())] #[allow()] field_true : u8, }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/derive-same-struct.stdout b/tests/ui/proc-macro/derive-same-struct.stdout
index 7478d974140..77605de5e33 100644
--- a/tests/ui/proc-macro/derive-same-struct.stdout
+++ b/tests/ui/proc-macro/derive-same-struct.stdout
@@ -1 +1 @@
-input1: "struct A ;"
+input1: "struct A;"
diff --git a/tests/ui/proc-macro/doc-comment-preserved.stdout b/tests/ui/proc-macro/doc-comment-preserved.stdout
index f4160d7da80..b0ea180d9bd 100644
--- a/tests/ui/proc-macro/doc-comment-preserved.stdout
+++ b/tests/ui/proc-macro/doc-comment-preserved.stdout
@@ -5,7 +5,7 @@ PRINT-BANG INPUT (DISPLAY): /**
 * DOC *
 *******
 */
- pub struct S ;
+ pub struct S;
 PRINT-BANG RE-COLLECTED (DISPLAY): #[doc = "\n*******\n* DOC *\n* DOC *\n* DOC *\n*******\n"] pub struct S ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Punct {
diff --git a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
index de4f0c000b6..7f97ac9dbc2 100644
--- a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
+++ b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout
@@ -1,4 +1,5 @@
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -38,7 +39,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate-issue-57089.rs:17:32: 17:33 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
index c7e72bf4ff5..96161049e30 100644
--- a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
+++ b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): struct A(identity! ($crate :: S)) ;
+PRINT-ATTR INPUT (DISPLAY): struct A(identity! ($crate :: S));
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A(identity! ($crate :: S)) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -53,7 +54,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate-issue-62325.rs:19:35: 19:36 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct B(identity! ($crate :: S)) ;
+PRINT-ATTR INPUT (DISPLAY): struct B(identity! ($crate :: S));
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct B(identity! ($crate :: S)) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/dollar-crate.stdout b/tests/ui/proc-macro/dollar-crate.stdout
index 0f5f87ceca2..a6bdab00441 100644
--- a/tests/ui/proc-macro/dollar-crate.stdout
+++ b/tests/ui/proc-macro/dollar-crate.stdout
@@ -1,4 +1,5 @@
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -38,7 +39,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:20:36: 20:37 (#3),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -78,7 +80,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:24:32: 24:33 (#3),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
+PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S);
+PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ;
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -118,7 +121,8 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: $DIR/dollar-crate.rs:27:32: 27:33 (#3),
     },
 ]
-PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ;
+PRINT-BANG INPUT (DISPLAY): struct M($crate :: S);
+PRINT-BANG RE-COLLECTED (DISPLAY): struct M($crate :: S) ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -158,7 +162,8 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
         span: $DIR/auxiliary/dollar-crate-external.rs:7:32: 7:33 (#14),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ;
+PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S);
+PRINT-ATTR RE-COLLECTED (DISPLAY): struct A($crate :: S) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
@@ -198,7 +203,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/auxiliary/dollar-crate-external.rs:11:28: 11:29 (#14),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ;
+PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S);
+PRINT-DERIVE RE-COLLECTED (DISPLAY): struct D($crate :: S) ;
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "struct",
diff --git a/tests/ui/proc-macro/expand-to-derive.stdout b/tests/ui/proc-macro/expand-to-derive.stdout
index 39f00918329..9f03a469aeb 100644
--- a/tests/ui/proc-macro/expand-to-derive.stdout
+++ b/tests/ui/proc-macro/expand-to-derive.stdout
@@ -1,6 +1,11 @@
 PRINT-DERIVE INPUT (DISPLAY): struct Foo
 {
     field :
+    [bool ; { #[rustc_dummy] struct Inner { other_inner_field: u8, } 0 }]
+}
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct Foo
+{
+    field :
     [bool ; { #[rustc_dummy] struct Inner { other_inner_field : u8, } 0 }]
 }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
diff --git a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
index 40181efc0b8..eda2f433bce 100644
--- a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
+++ b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout
@@ -122,7 +122,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #3 bytes(306..355),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -202,7 +202,8 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #7 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} }; 0 }, }
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
         ident: "enum",
@@ -280,7 +281,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #11 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -358,7 +359,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #15 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
@@ -449,7 +450,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
         span: #19 bytes(430..483),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; } ; 0 }, }
+PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; }; 0 }, }
 PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; 0 }, }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/inert-attribute-order.stdout b/tests/ui/proc-macro/inert-attribute-order.stdout
index cc215545952..86b27217974 100644
--- a/tests/ui/proc-macro/inert-attribute-order.stdout
+++ b/tests/ui/proc-macro/inert-attribute-order.stdout
@@ -1,7 +1,11 @@
 PRINT-ATTR INPUT (DISPLAY): /// 1
-#[rustfmt :: attr2] #[doc = "3"] #[doc = "4"] #[rustfmt :: attr5] /// 6
-#[print_attr(nodebug)] struct S ;
-PRINT-ATTR RE-COLLECTED (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"]
+#[rustfmt::attr2] #[doc = "3"] #[doc = "4"] #[rustfmt::attr5] /// 6
+#[print_attr(nodebug)] struct S;
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[doc = " 1"] #[rustfmt::attr2] #[doc = "3"] #[doc = "4"] #[rustfmt::attr5]
+#[doc = " 6"] #[print_attr(nodebug)] struct S ;
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"]
 #[rustfmt :: attr5] #[doc = " 6"] #[print_attr(nodebug)] struct S ;
-PRINT-ATTR INPUT (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"]
+PRINT-ATTR INPUT (DISPLAY): #[doc = " 1"] #[rustfmt::attr2] #[doc = "3"] #[doc = "4"] #[rustfmt::attr5]
+#[doc = " 6"] struct S ;
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[doc = " 1"] #[rustfmt :: attr2] #[doc = "3"] #[doc = "4"]
 #[rustfmt :: attr5] #[doc = " 6"] struct S ;
diff --git a/tests/ui/proc-macro/inner-attr-non-inline-mod.stdout b/tests/ui/proc-macro/inner-attr-non-inline-mod.stdout
index 6261d82e2d0..aaec40669e6 100644
--- a/tests/ui/proc-macro/inner-attr-non-inline-mod.stdout
+++ b/tests/ui/proc-macro/inner-attr-non-inline-mod.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): #[deny(unused_attributes)] mod module_with_attrs { #! [rustfmt :: skip] }
+PRINT-ATTR INPUT (DISPLAY): #[deny(unused_attributes)] mod module_with_attrs { #![rustfmt::skip] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[deny(unused_attributes)] mod module_with_attrs { #! [rustfmt :: skip] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
diff --git a/tests/ui/proc-macro/inner-attrs.stdout b/tests/ui/proc-macro/inner-attrs.stdout
index ee8adf0b4a9..037ec044e42 100644
--- a/tests/ui/proc-macro/inner-attrs.stdout
+++ b/tests/ui/proc-macro/inner-attrs.stdout
@@ -6,6 +6,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second)] fn foo()
+{ #![print_target_and_args(third)] #![print_target_and_args(fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[print_target_and_args(second)] fn foo()
 { #! [print_target_and_args(third)] #! [print_target_and_args(fourth)] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
@@ -121,6 +123,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-ATTR INPUT (DISPLAY): fn foo()
+{ #![print_target_and_args(third)] #![print_target_and_args(fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn foo()
 { #! [print_target_and_args(third)] #! [print_target_and_args(fourth)] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
@@ -210,7 +214,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:20:30: 20:35 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): fn foo() { #! [print_target_and_args(fourth)] }
+PRINT-ATTR INPUT (DISPLAY): fn foo() { #![print_target_and_args(fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn foo() { #! [print_target_and_args(fourth)] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "fn",
@@ -298,6 +303,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(mod_second)] mod inline_mod
+{ #![print_target_and_args(mod_third)] #![print_target_and_args(mod_fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[print_target_and_args(mod_second)] mod inline_mod
 {
     #! [print_target_and_args(mod_third)] #!
     [print_target_and_args(mod_fourth)]
@@ -411,6 +418,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-ATTR INPUT (DISPLAY): mod inline_mod
+{ #![print_target_and_args(mod_third)] #![print_target_and_args(mod_fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): mod inline_mod
 {
     #! [print_target_and_args(mod_third)] #!
     [print_target_and_args(mod_fourth)]
@@ -498,7 +507,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:27:30: 27:39 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): mod inline_mod { #! [print_target_and_args(mod_fourth)] }
+PRINT-ATTR INPUT (DISPLAY): mod inline_mod { #![print_target_and_args(mod_fourth)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): mod inline_mod { #! [print_target_and_args(mod_fourth)] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "mod",
@@ -569,6 +579,8 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
     },
 ]
 PRINT-DERIVE INPUT (DISPLAY): struct MyDerivePrint
+{ field: [u8; { match true { _ => { #![rustc_dummy(third)] true } }; 0 }] }
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct MyDerivePrint
 {
     field :
     [u8 ; { match true { _ => { #! [rustc_dummy(third)] true } } ; 0 }]
@@ -639,7 +651,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
                                         stream: TokenStream [
                                             Punct {
                                                 ch: '#',
-                                                spacing: Alone,
+                                                spacing: Joint,
                                                 span: $DIR/inner-attrs.rs:40:17: 40:18 (#0),
                                             },
                                             Punct {
@@ -705,7 +717,9 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:49:29: 49:40 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 });
+PRINT-ATTR RE-COLLECTED (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Group {
         delimiter: Parenthesis,
@@ -819,7 +833,9 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/inner-attrs.rs:56:29: 56:40 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR INPUT (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 });
+PRINT-ATTR RE-COLLECTED (DISPLAY): (3, 4, { #![cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): (3, 4, { #! [cfg_attr(not(FALSE), rustc_dummy(innermost))] 5 }) ;
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Group {
         delimiter: Parenthesis,
diff --git a/tests/ui/proc-macro/issue-75734-pp-paren.stdout b/tests/ui/proc-macro/issue-75734-pp-paren.stdout
index 2f7c013e958..ee135366fb6 100644
--- a/tests/ui/proc-macro/issue-75734-pp-paren.stdout
+++ b/tests/ui/proc-macro/issue-75734-pp-paren.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): fn main() { & | _ : u8 | {} ; mul_2! (1 + 1) ; }
+PRINT-ATTR INPUT (DISPLAY): fn main() { &|_: u8| {}; mul_2!(1 + 1); }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): fn main() { &| _ : u8 | {} ; mul_2! (1 + 1) ; }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "fn",
diff --git a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
index 31a1c44b6df..47f26451d1c 100644
--- a/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
+++ b/tests/ui/proc-macro/issue-75930-derive-cfg.stdout
@@ -1,5 +1,53 @@
 PRINT-ATTR INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[derive(Print)] #[print_helper(b)]
-struct Foo < #[cfg(FALSE)] A, B >
+struct Foo<#[cfg(FALSE)] A, B>
+{
+    #[cfg(FALSE)] first: String, #[cfg_attr(FALSE, deny(warnings))] second:
+    bool, third:
+    [u8;
+    {
+        #[cfg(FALSE)] struct Bar; #[cfg(not(FALSE))] struct Inner;
+        #[cfg(FALSE)] let a = 25; match true
+        {
+            #[cfg(FALSE)] true => {}, #[cfg_attr(not(FALSE), allow(warnings))]
+            false => {}, _ => {}
+        }; #[print_helper(should_be_removed)] fn removed_fn()
+        { #![cfg(FALSE)] } #[print_helper(c)] #[cfg(not(FALSE))] fn kept_fn()
+        { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
+        {
+            Foo(#[cfg(FALSE)] u8, #[cfg(FALSE)] bool, #[cfg(not(FALSE))] i32,
+            #[cfg(FALSE)] String, u8)
+        } struct
+        TupleStruct(#[cfg(FALSE)] String, #[cfg(not(FALSE))] i32,
+        #[cfg(FALSE)] bool, u8); fn plain_removed_fn()
+        { #![cfg_attr(not(FALSE), cfg(FALSE))] } 0
+    }], #[print_helper(d)] fourth: B
+}
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[derive(Print)] #[print_helper(b)]
+struct Foo <#[cfg(FALSE)] A, B >
+{
+    #[cfg(FALSE)] first: String, #[cfg_attr(FALSE, deny(warnings))] second:
+    bool, third:
+    [u8;
+    {
+        #[cfg(FALSE)] struct Bar; #[cfg(not(FALSE))] struct Inner;
+        #[cfg(FALSE)] let a = 25; match true
+        {
+            #[cfg(FALSE)] true => {}, #[cfg_attr(not(FALSE), allow(warnings))]
+            false => {}, _ => {}
+        }; #[print_helper(should_be_removed)] fn removed_fn()
+        { #![cfg(FALSE)] } #[print_helper(c)] #[cfg(not(FALSE))] fn kept_fn()
+        { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
+        {
+            Foo(#[cfg(FALSE)] u8, #[cfg(FALSE)] bool, #[cfg(not(FALSE))] i32,
+            #[cfg(FALSE)] String, u8)
+        } struct
+        TupleStruct(#[cfg(FALSE)] String, #[cfg(not(FALSE))] i32,
+        #[cfg(FALSE)] bool, u8); fn plain_removed_fn()
+        { #![cfg_attr(not(FALSE), cfg(FALSE))] } 0
+    }], #[print_helper(d)] fourth: B
+}
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[derive(Print)] #[print_helper(b)]
+struct Foo <#[cfg(FALSE)] A, B >
 {
     #[cfg(FALSE)] first : String, #[cfg_attr(FALSE, deny(warnings))] second :
     bool, third :
@@ -1272,7 +1320,20 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
         span: $DIR/issue-75930-derive-cfg.rs:55:32: 102:2 (#0),
     },
 ]
-PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo < B >
+PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo <B >
+{
+    second: bool, third:
+    [u8;
+    {
+        #[cfg(not(FALSE))] struct Inner; match true
+        { #[allow(warnings)] false => {}, _ => {} }; #[print_helper(c)]
+        #[cfg(not(FALSE))] fn kept_fn()
+        { #![cfg(not(FALSE))] let my_val = true; } enum TupleEnum
+        { Foo(#[cfg(not(FALSE))] i32, u8) } struct
+        TupleStruct(#[cfg(not(FALSE))] i32, u8); 0
+    }], #[print_helper(d)] fourth: B
+}
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_helper(b)] struct Foo <B >
 {
     second : bool, third :
     [u8 ;
diff --git a/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
index ae5e9400809..37ecf3a8df3 100644
--- a/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
+++ b/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout
@@ -46,7 +46,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
                         stream: TokenStream [
                             Punct {
                                 ch: '#',
-                                spacing: Alone,
+                                spacing: Joint,
                                 span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
                             },
                             Punct {
diff --git a/tests/ui/proc-macro/keep-expr-tokens.stdout b/tests/ui/proc-macro/keep-expr-tokens.stdout
index fcd72a0e017..8e4d4785e2e 100644
--- a/tests/ui/proc-macro/keep-expr-tokens.stdout
+++ b/tests/ui/proc-macro/keep-expr-tokens.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] { 1 + 1 ; }
+PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] { 1 +1; }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[rustc_dummy] { 1 + 1 ; }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Punct {
         ch: '#',
diff --git a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
index aee0f966d0f..89817a1efdc 100644
--- a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
+++ b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout
@@ -4,6 +4,15 @@ PRINT-DERIVE INPUT (DISPLAY): struct Foo
     [bool ;
     {
         let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
+        { #![allow(unused)] 30 }; 0
+    }]
+}
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct Foo
+{
+    val :
+    [bool ;
+    {
+        let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
         { #! [allow(unused)] 30 } ; 0
     }]
 }
@@ -111,7 +120,7 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
                                 stream: TokenStream [
                                     Punct {
                                         ch: '#',
-                                        spacing: Alone,
+                                        spacing: Joint,
                                         span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
                                     },
                                     Punct {
diff --git a/tests/ui/proc-macro/meta-macro-hygiene.stdout b/tests/ui/proc-macro/meta-macro-hygiene.stdout
index eeb7179e6fd..6d10cc604c2 100644
--- a/tests/ui/proc-macro/meta-macro-hygiene.stdout
+++ b/tests/ui/proc-macro/meta-macro-hygiene.stdout
@@ -32,13 +32,13 @@ macro_rules! produce_it
     */ {
     () =>
     {
-        meta_macro :: print_def_site! ($crate :: dummy! ()) ;
+        meta_macro::print_def_site!($crate::dummy!());
         // `print_def_site!` will respan the `$crate` identifier
         // with `Span::def_site()`. This should cause it to resolve
         // relative to `meta_macro`, *not* `make_macro` (despite
         // the fact that `print_def_site` is produced by a
         // `macro_rules!` macro in `make_macro`).
-    } ;
+    };
 }
 
 fn main /* 0#0 */() { ; }
diff --git a/tests/ui/proc-macro/nested-derive-cfg.stdout b/tests/ui/proc-macro/nested-derive-cfg.stdout
index 9a562c971c8..c94808c3c0f 100644
--- a/tests/ui/proc-macro/nested-derive-cfg.stdout
+++ b/tests/ui/proc-macro/nested-derive-cfg.stdout
@@ -1,4 +1,6 @@
 PRINT-DERIVE INPUT (DISPLAY): struct Foo
+{ my_array: [bool; { struct Inner { non_removed_inner_field: usize } 0 }] }
+PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): struct Foo
 { my_array : [bool ; { struct Inner { non_removed_inner_field : usize } 0 }] }
 PRINT-DERIVE INPUT (DEBUG): TokenStream [
     Ident {
diff --git a/tests/ui/proc-macro/nonterminal-expansion.stdout b/tests/ui/proc-macro/nonterminal-expansion.stdout
index b2557af18ca..c671f623925 100644
--- a/tests/ui/proc-macro/nonterminal-expansion.stdout
+++ b/tests/ui/proc-macro/nonterminal-expansion.stdout
@@ -1,5 +1,5 @@
 PRINT-ATTR_ARGS INPUT (DISPLAY): a, line!(), b
-PRINT-ATTR_ARGS RE-COLLECTED (DISPLAY): a, line! (), b
+PRINT-ATTR_ARGS DEEP-RE-COLLECTED (DISPLAY): a, line! (), b
 PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
     Ident {
         ident: "a",
diff --git a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
index c437853ac72..5c70e780f74 100644
--- a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
+++ b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout
@@ -1,5 +1,5 @@
 PRINT-BANG INPUT (DISPLAY): struct S;
-PRINT-BANG RE-COLLECTED (DISPLAY): struct S ;
+PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): struct S ;
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
         delimiter: None,
@@ -51,11 +51,11 @@ macro_rules! outer
     /*
     0#0
     */ {
-    ($item : item) =>
+    ($item:item) =>
     {
-        macro inner() { print_bang! { $item } } inner! () ;
+        macro inner() { print_bang! { $item } } inner!();
 
-    } ;
+    };
 }
 
 struct S /* 0#0 */;
diff --git a/tests/ui/proc-macro/pretty-print-tts.stdout b/tests/ui/proc-macro/pretty-print-tts.stdout
index f52e97a8604..fbe8640a01a 100644
--- a/tests/ui/proc-macro/pretty-print-tts.stdout
+++ b/tests/ui/proc-macro/pretty-print-tts.stdout
@@ -1,4 +1,5 @@
-PRINT-BANG INPUT (DISPLAY): { #! [rustc_dummy] let a = "hello".len() ; matches! (a, 5) ; }
+PRINT-BANG INPUT (DISPLAY): { #![rustc_dummy] let a = "hello".len(); matches!(a, 5); }
+PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): { #! [rustc_dummy] let a = "hello".len() ; matches! (a, 5) ; }
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
         delimiter: Brace,
diff --git a/tests/ui/proc-macro/trailing-plus.stdout b/tests/ui/proc-macro/trailing-plus.stdout
index b90057cd6d5..b20401190d8 100644
--- a/tests/ui/proc-macro/trailing-plus.stdout
+++ b/tests/ui/proc-macro/trailing-plus.stdout
@@ -1,4 +1,5 @@
-PRINT-ATTR INPUT (DISPLAY): fn foo < T > () where T : Copy + {}
+PRINT-ATTR INPUT (DISPLAY): fn foo<T>() where T: Copy + {}
+PRINT-ATTR RE-COLLECTED (DISPLAY): fn foo < T > () where T : Copy + {}
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "fn",
diff --git a/tests/ui/proc-macro/weird-braces.stdout b/tests/ui/proc-macro/weird-braces.stdout
index 9bf56221734..7da769ef0d2 100644
--- a/tests/ui/proc-macro/weird-braces.stdout
+++ b/tests/ui/proc-macro/weird-braces.stdout
@@ -5,7 +5,18 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:16:25: 16:36 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second_outer)] impl Bar < { 1 > 0 } > for Foo <
+PRINT-ATTR INPUT (DISPLAY): #[print_target_and_args(second_outer)] impl Bar<{ 1 > 0 }> for Foo<{ true }>
+{
+    #![print_target_and_args(first_inner)]
+    #![print_target_and_args(second_inner)]
+}
+PRINT-ATTR RE-COLLECTED (DISPLAY): #[print_target_and_args(second_outer)] impl Bar < { 1 > 0 } > for Foo <
+{ true } >
+{
+    #![print_target_and_args(first_inner)]
+    #![print_target_and_args(second_inner)]
+}
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): #[print_target_and_args(second_outer)] impl Bar < { 1 > 0 } > for Foo <
 { true } >
 {
     #! [print_target_and_args(first_inner)] #!
@@ -180,7 +191,17 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }>
+{
+    #![print_target_and_args(first_inner)]
+    #![print_target_and_args(second_inner)]
+}
+PRINT-ATTR RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
+{
+    #![print_target_and_args(first_inner)]
+    #![print_target_and_args(second_inner)]
+}
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
 {
     #! [print_target_and_args(first_inner)] #!
     [print_target_and_args(second_inner)]
@@ -329,7 +350,11 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }>
+{ #![print_target_and_args(second_inner)] }
+PRINT-ATTR RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
+{ #![print_target_and_args(second_inner)] }
+PRINT-ATTR DEEP-RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
 { #! [print_target_and_args(second_inner)] }
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
@@ -445,7 +470,8 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
         span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
     },
 ]
-PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > {}
+PRINT-ATTR INPUT (DISPLAY): impl Bar<{ 1 > 0 }> for Foo<{ true }> {}
+PRINT-ATTR RE-COLLECTED (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > {}
 PRINT-ATTR INPUT (DEBUG): TokenStream [
     Ident {
         ident: "impl",
diff --git a/tests/ui/rfcs/rfc-2361-dbg-macro/dbg-macro-expected-behavior.run.stderr b/tests/ui/rfcs/rfc-2361-dbg-macro/dbg-macro-expected-behavior.run.stderr
index a20a6062c13..1a26f34a52d 100644
--- a/tests/ui/rfcs/rfc-2361-dbg-macro/dbg-macro-expected-behavior.run.stderr
+++ b/tests/ui/rfcs/rfc-2361-dbg-macro/dbg-macro-expected-behavior.run.stderr
@@ -12,7 +12,7 @@
 [$DIR/dbg-macro-expected-behavior.rs:42] &a = NoCopy(
     1337,
 )
-[$DIR/dbg-macro-expected-behavior.rs:42] dbg!(& a) = NoCopy(
+[$DIR/dbg-macro-expected-behavior.rs:42] dbg!(&a) = NoCopy(
     1337,
 )
 [$DIR/dbg-macro-expected-behavior.rs:47] f(&42) = 42
diff --git a/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs b/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
index 82c4120b4c7..67de50a1d92 100644
--- a/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
+++ b/tests/ui/rfcs/rfc-2565-param-attrs/auxiliary/param-attrs.rs
@@ -17,27 +17,26 @@ macro_rules! checker {
     }
 }
 
-checker!(attr_extern, r#"extern "C" { fn ffi(#[a1] arg1 : i32, #[a2] ...) ; }"#);
-checker!(attr_extern_cvar, r#"unsafe extern "C" fn cvar(arg1 : i32, #[a1] mut args : ...) {}"#);
-checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...) ;");
-checker!(attr_free, "fn free(#[a1] arg1 : u8) { let lam = | #[a2] W(x), #[a3] y | () ; }");
-checker!(attr_inherent_1, "fn inherent1(#[a1] self, #[a2] arg1 : u8) {}");
-checker!(attr_inherent_2, "fn inherent2(#[a1] & self, #[a2] arg1 : u8) {}");
-checker!(attr_inherent_3, "fn inherent3 < 'a > (#[a1] & 'a mut self, #[a2] arg1 : u8) {}");
-checker!(attr_inherent_4, "fn inherent4 < 'a > (#[a1] self : Box < Self >, #[a2] arg1 : u8) {}");
-checker!(attr_inherent_issue_64682, "fn inherent5(#[a1] #[a2] arg1 : u8, #[a3] arg2 : u8) {}");
-checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1 : u8) ;");
-checker!(attr_trait_2, "fn trait2(#[a1] & self, #[a2] arg1 : u8) ;");
-checker!(attr_trait_3, "fn trait3 < 'a > (#[a1] & 'a mut self, #[a2] arg1 : u8) ;");
-checker!(attr_trait_4, r#"fn trait4 < 'a >
-(#[a1] self : Box < Self >, #[a2] arg1 : u8, #[a3] Vec < u8 >) ;"#);
-checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1 : u8, #[a3] arg2 : u8) ;");
+checker!(attr_extern, r#"extern "C" { fn ffi(#[a1] arg1: i32, #[a2] ...); }"#);
+checker!(attr_extern_cvar, r#"unsafe extern "C" fn cvar(arg1: i32, #[a1] mut args: ...) {}"#);
+checker!(attr_alias, "type Alias = fn(#[a1] u8, #[a2] ...);");
+checker!(attr_free, "fn free(#[a1] arg1: u8) { let lam = |#[a2] W(x), #[a3] y| (); }");
+checker!(attr_inherent_1, "fn inherent1(#[a1] self, #[a2] arg1: u8) {}");
+checker!(attr_inherent_2, "fn inherent2(#[a1] &self, #[a2] arg1: u8) {}");
+checker!(attr_inherent_3, "fn inherent3<'a>(#[a1] &'a mut self, #[a2] arg1: u8) {}");
+checker!(attr_inherent_4, "fn inherent4<'a>(#[a1] self: Box<Self>, #[a2] arg1: u8) {}");
+checker!(attr_inherent_issue_64682, "fn inherent5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8) {}");
+checker!(attr_trait_1, "fn trait1(#[a1] self, #[a2] arg1: u8);");
+checker!(attr_trait_2, "fn trait2(#[a1] &self, #[a2] arg1: u8);");
+checker!(attr_trait_3, "fn trait3<'a>(#[a1] &'a mut self, #[a2] arg1: u8);");
+checker!(attr_trait_4, r#"fn trait4<'a>(#[a1] self: Box<Self>, #[a2] arg1: u8, #[a3] Vec<u8>);"#);
+checker!(attr_trait_issue_64682, "fn trait5(#[a1] #[a2] arg1: u8, #[a3] arg2: u8);");
 checker!(rename_params, r#"impl Foo
 {
-    fn hello(#[angery(true)] a : i32, #[a2] b : i32, #[what = "how"] c : u32)
+    fn hello(#[angery(true)] a: i32, #[a2] b: i32, #[what = "how"] c: u32) {}
+    fn
+    hello2(#[a1] #[a2] a: i32, #[what = "how"] b: i32, #[angery(true)] c: u32)
     {} fn
-    hello2(#[a1] #[a2] a : i32, #[what = "how"] b : i32, #[angery(true)] c :
-    u32) {} fn
-    hello_self(#[a1] #[a2] & self, #[a1] #[a2] a : i32, #[what = "how"] b :
-    i32, #[angery(true)] c : u32) {}
+    hello_self(#[a1] #[a2] &self, #[a1] #[a2] a: i32, #[what = "how"] b: i32,
+    #[angery(true)] c: u32) {}
 }"#);