about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-10-14 00:26:50 +0000
committerbors <bors@rust-lang.org>2020-10-14 00:26:50 +0000
commit4ba5068815137eef403383582d3f17f3b6802217 (patch)
tree1d8b7619774abe52c162e43db1c8d02226a2e37f
parentf243a2ad904705a1e340a08639dca105605b4175 (diff)
parent9a6ea386472acb7e1e1dd24370ef9d60d07463f5 (diff)
downloadrust-4ba5068815137eef403383582d3f17f3b6802217.tar.gz
rust-4ba5068815137eef403383582d3f17f3b6802217.zip
Auto merge of #77135 - Aaron1011:pretty-ignore-paren, r=petrochenkov
Refactor AST pretty-printing to allow skipping insertion of extra parens

Fixes #75734
Makes progress towards #43081
Unblocks PR #76130

When pretty-printing an AST node, we may insert additional parenthesis
to ensure that precedence is properly preserved in code we output.
However, the proc macro implementation relies on comparing a
pretty-printed AST node to the captured `TokenStream`. Inserting extra
parenthesis changes the structure of the reparsed `TokenStream`, making
the comparison fail.

This PR refactors the AST pretty-printing code to allow skipping the
insertion of additional parenthesis. Several freestanding methods are
moved to trait methods on `PrintState`, which keep track of an internal
`insert_extra_parens` flag. This flag is normally `true`, but we expose
a public method which allows pretty-printing a nonterminal with
`insert_extra_parens = false`.

To avoid changing the public interface of `rustc_ast_pretty`, the
freestanding `_to_string` methods are changed to delegate to a
newly-crated `State`. The main pretty-printing code is moved to a new
`state` module to ensure that it does not accidentally call any of these
public helper functions (instead, the internal functions with the same
name should be used).
-rw-r--r--compiler/rustc_ast/src/token.rs21
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/mod.rs104
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs (renamed from compiler/rustc_ast_pretty/src/pprust.rs)403
-rw-r--r--compiler/rustc_hir_pretty/src/lib.rs3
-rw-r--r--compiler/rustc_parse/src/lib.rs78
-rw-r--r--compiler/rustc_span/src/symbol.rs1
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs7
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs7
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs7
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs7
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs28
-rw-r--r--src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout4
-rw-r--r--src/test/ui/proc-macro/issue-75734-pp-paren.rs26
-rw-r--r--src/test/ui/proc-macro/issue-75734-pp-paren.stdout134
14 files changed, 633 insertions, 197 deletions
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index d5b3e87adc3..9635750fb40 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -810,10 +810,10 @@ impl Nonterminal {
             if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind {
                 let filename = source_map.span_to_filename(orig_span);
                 if let FileName::Real(RealFileName::Named(path)) = filename {
-                    let matches_prefix = |prefix| {
-                        // Check for a path that ends with 'prefix*/src/lib.rs'
+                    let matches_prefix = |prefix, filename| {
+                        // Check for a path that ends with 'prefix*/src/<filename>'
                         let mut iter = path.components().rev();
-                        iter.next().and_then(|p| p.as_os_str().to_str()) == Some("lib.rs")
+                        iter.next().and_then(|p| p.as_os_str().to_str()) == Some(filename)
                             && iter.next().and_then(|p| p.as_os_str().to_str()) == Some("src")
                             && iter
                                 .next()
@@ -821,14 +821,25 @@ impl Nonterminal {
                                 .map_or(false, |p| p.starts_with(prefix))
                     };
 
-                    if (macro_name == sym::impl_macros && matches_prefix("time-macros-impl"))
-                        || (macro_name == sym::arrays && matches_prefix("js-sys"))
+                    if (macro_name == sym::impl_macros
+                        && matches_prefix("time-macros-impl", "lib.rs"))
+                        || (macro_name == sym::arrays && matches_prefix("js-sys", "lib.rs"))
                     {
                         let snippet = source_map.span_to_snippet(orig_span);
                         if snippet.as_deref() == Ok("$name") {
                             return Some((*ident, *is_raw));
                         }
                     }
+
+                    if macro_name == sym::tuple_from_req
+                        && (matches_prefix("actix-web", "extract.rs")
+                            || matches_prefix("actori-web", "extract.rs"))
+                    {
+                        let snippet = source_map.span_to_snippet(orig_span);
+                        if snippet.as_deref() == Ok("$T") {
+                            return Some((*ident, *is_raw));
+                        }
+                    }
                 }
             }
         }
diff --git a/compiler/rustc_ast_pretty/src/pprust/mod.rs b/compiler/rustc_ast_pretty/src/pprust/mod.rs
new file mode 100644
index 00000000000..b34ea41ab55
--- /dev/null
+++ b/compiler/rustc_ast_pretty/src/pprust/mod.rs
@@ -0,0 +1,104 @@
+#[cfg(test)]
+mod tests;
+
+pub mod state;
+pub use state::{print_crate, AnnNode, Comments, PpAnn, PrintState, State};
+
+use rustc_ast as ast;
+use rustc_ast::token::{Nonterminal, Token, TokenKind};
+use rustc_ast::tokenstream::{TokenStream, TokenTree};
+
+pub fn nonterminal_to_string_no_extra_parens(nt: &Nonterminal) -> String {
+    let state = State::without_insert_extra_parens();
+    state.nonterminal_to_string(nt)
+}
+
+pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
+    State::new().nonterminal_to_string(nt)
+}
+
+/// Print the token kind precisely, without converting `$crate` into its respective crate name.
+pub fn token_kind_to_string(tok: &TokenKind) -> String {
+    State::new().token_kind_to_string(tok)
+}
+
+/// Print the token precisely, without converting `$crate` into its respective crate name.
+pub fn token_to_string(token: &Token) -> String {
+    State::new().token_to_string(token)
+}
+
+pub fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
+    State::new().token_to_string_ext(token, convert_dollar_crate)
+}
+
+pub fn ty_to_string(ty: &ast::Ty) -> String {
+    State::new().ty_to_string(ty)
+}
+
+pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String {
+    State::new().bounds_to_string(bounds)
+}
+
+pub fn pat_to_string(pat: &ast::Pat) -> String {
+    State::new().pat_to_string(pat)
+}
+
+pub fn expr_to_string(e: &ast::Expr) -> String {
+    State::new().expr_to_string(e)
+}
+
+pub fn tt_to_string(tt: &TokenTree) -> String {
+    State::new().tt_to_string(tt)
+}
+
+pub fn tts_to_string(tokens: &TokenStream) -> String {
+    State::new().tts_to_string(tokens)
+}
+
+pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
+    State::new().stmt_to_string(stmt)
+}
+
+pub fn item_to_string(i: &ast::Item) -> String {
+    State::new().item_to_string(i)
+}
+
+pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
+    State::new().generic_params_to_string(generic_params)
+}
+
+pub fn path_to_string(p: &ast::Path) -> String {
+    State::new().path_to_string(p)
+}
+
+pub fn path_segment_to_string(p: &ast::PathSegment) -> String {
+    State::new().path_segment_to_string(p)
+}
+
+pub fn vis_to_string(v: &ast::Visibility) -> String {
+    State::new().vis_to_string(v)
+}
+
+pub fn block_to_string(blk: &ast::Block) -> String {
+    State::new().block_to_string(blk)
+}
+
+pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
+    State::new().meta_list_item_to_string(li)
+}
+
+pub fn attr_item_to_string(ai: &ast::AttrItem) -> String {
+    State::new().attr_item_to_string(ai)
+}
+
+pub fn attribute_to_string(attr: &ast::Attribute) -> String {
+    State::new().attribute_to_string(attr)
+}
+
+pub fn param_to_string(arg: &ast::Param) -> String {
+    State::new().param_to_string(arg)
+}
+
+pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
+    State::new().to_string(f)
+}
diff --git a/compiler/rustc_ast_pretty/src/pprust.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index d16b541c699..9aa066370bb 100644
--- a/compiler/rustc_ast_pretty/src/pprust.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -20,9 +20,6 @@ use rustc_span::{BytePos, FileName, Span};
 
 use std::borrow::Cow;
 
-#[cfg(test)]
-mod tests;
-
 pub enum MacHeader<'a> {
     Path(&'a ast::Path),
     Keyword(&'static str),
@@ -91,6 +88,13 @@ pub struct State<'a> {
     comments: Option<Comments<'a>>,
     ann: &'a (dyn PpAnn + 'a),
     is_expanded: bool,
+    // If `true`, additional parenthesis (separate from `ExprKind::Paren`)
+    // are inserted to ensure that proper precedence is preserved
+    // in the pretty-printed output.
+    //
+    // This is usually `true`, except when performing the pretty-print/reparse
+    // check in `nt_to_tokenstream`
+    insert_extra_parens: bool,
 }
 
 crate const INDENT_UNIT: usize = 4;
@@ -112,6 +116,7 @@ pub fn print_crate<'a>(
         comments: Some(Comments::new(sm, filename, input)),
         ann,
         is_expanded,
+        insert_extra_parens: true,
     };
 
     if is_expanded && has_injected_crate {
@@ -142,13 +147,6 @@ pub fn print_crate<'a>(
     s.s.eof()
 }
 
-pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
-    let mut printer =
-        State { s: pp::mk_printer(), comments: None, ann: &NoAnn, is_expanded: false };
-    f(&mut printer);
-    printer.s.eof()
-}
-
 // This makes printed token streams look slightly nicer,
 // and also addresses some specific regressions described in #63896 and #73345.
 fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool {
@@ -231,173 +229,8 @@ pub fn literal_to_string(lit: token::Lit) -> String {
     out
 }
 
-/// Print the token kind precisely, without converting `$crate` into its respective crate name.
-pub fn token_kind_to_string(tok: &TokenKind) -> String {
-    token_kind_to_string_ext(tok, None)
-}
-
-fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>) -> String {
-    match *tok {
-        token::Eq => "=".to_string(),
-        token::Lt => "<".to_string(),
-        token::Le => "<=".to_string(),
-        token::EqEq => "==".to_string(),
-        token::Ne => "!=".to_string(),
-        token::Ge => ">=".to_string(),
-        token::Gt => ">".to_string(),
-        token::Not => "!".to_string(),
-        token::Tilde => "~".to_string(),
-        token::OrOr => "||".to_string(),
-        token::AndAnd => "&&".to_string(),
-        token::BinOp(op) => binop_to_string(op).to_string(),
-        token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
-
-        /* Structural symbols */
-        token::At => "@".to_string(),
-        token::Dot => ".".to_string(),
-        token::DotDot => "..".to_string(),
-        token::DotDotDot => "...".to_string(),
-        token::DotDotEq => "..=".to_string(),
-        token::Comma => ",".to_string(),
-        token::Semi => ";".to_string(),
-        token::Colon => ":".to_string(),
-        token::ModSep => "::".to_string(),
-        token::RArrow => "->".to_string(),
-        token::LArrow => "<-".to_string(),
-        token::FatArrow => "=>".to_string(),
-        token::OpenDelim(token::Paren) => "(".to_string(),
-        token::CloseDelim(token::Paren) => ")".to_string(),
-        token::OpenDelim(token::Bracket) => "[".to_string(),
-        token::CloseDelim(token::Bracket) => "]".to_string(),
-        token::OpenDelim(token::Brace) => "{".to_string(),
-        token::CloseDelim(token::Brace) => "}".to_string(),
-        token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
-        token::Pound => "#".to_string(),
-        token::Dollar => "$".to_string(),
-        token::Question => "?".to_string(),
-        token::SingleQuote => "'".to_string(),
-
-        /* Literals */
-        token::Literal(lit) => literal_to_string(lit),
-
-        /* Name components */
-        token::Ident(s, is_raw) => IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string(),
-        token::Lifetime(s) => s.to_string(),
-
-        /* Other */
-        token::DocComment(comment_kind, attr_style, data) => {
-            doc_comment_to_string(comment_kind, attr_style, data)
-        }
-        token::Eof => "<eof>".to_string(),
-
-        token::Interpolated(ref nt) => nonterminal_to_string(nt),
-    }
-}
-
-/// Print the token precisely, without converting `$crate` into its respective crate name.
-pub fn token_to_string(token: &Token) -> String {
-    token_to_string_ext(token, false)
-}
-
-fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
-    let convert_dollar_crate = convert_dollar_crate.then_some(token.span);
-    token_kind_to_string_ext(&token.kind, convert_dollar_crate)
-}
-
-pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
-    match *nt {
-        token::NtExpr(ref e) => expr_to_string(e),
-        token::NtMeta(ref e) => attr_item_to_string(e),
-        token::NtTy(ref e) => ty_to_string(e),
-        token::NtPath(ref e) => path_to_string(e),
-        token::NtItem(ref e) => item_to_string(e),
-        token::NtBlock(ref e) => block_to_string(e),
-        token::NtStmt(ref e) => stmt_to_string(e),
-        token::NtPat(ref e) => pat_to_string(e),
-        token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
-        token::NtLifetime(e) => e.to_string(),
-        token::NtLiteral(ref e) => expr_to_string(e),
-        token::NtTT(ref tree) => tt_to_string(tree),
-        token::NtVis(ref e) => vis_to_string(e),
-    }
-}
-
-pub fn ty_to_string(ty: &ast::Ty) -> String {
-    to_string(|s| s.print_type(ty))
-}
-
-pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String {
-    to_string(|s| s.print_type_bounds("", bounds))
-}
-
-pub fn pat_to_string(pat: &ast::Pat) -> String {
-    to_string(|s| s.print_pat(pat))
-}
-
-pub fn expr_to_string(e: &ast::Expr) -> String {
-    to_string(|s| s.print_expr(e))
-}
-
-pub fn tt_to_string(tt: &TokenTree) -> String {
-    to_string(|s| s.print_tt(tt, false))
-}
-
-pub fn tts_to_string(tokens: &TokenStream) -> String {
-    to_string(|s| s.print_tts(tokens, false))
-}
-
-pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
-    to_string(|s| s.print_stmt(stmt))
-}
-
-pub fn item_to_string(i: &ast::Item) -> String {
-    to_string(|s| s.print_item(i))
-}
-
-pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
-    to_string(|s| s.print_generic_params(generic_params))
-}
-
-pub fn path_to_string(p: &ast::Path) -> String {
-    to_string(|s| s.print_path(p, false, 0))
-}
-
-pub fn path_segment_to_string(p: &ast::PathSegment) -> String {
-    to_string(|s| s.print_path_segment(p, false))
-}
-
-pub fn vis_to_string(v: &ast::Visibility) -> String {
-    to_string(|s| s.print_visibility(v))
-}
-
-fn block_to_string(blk: &ast::Block) -> String {
-    to_string(|s| {
-        // Containing cbox, will be closed by `print_block` at `}`.
-        s.cbox(INDENT_UNIT);
-        // Head-ibox, will be closed by `print_block` after `{`.
-        s.ibox(0);
-        s.print_block(blk)
-    })
-}
-
-pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
-    to_string(|s| s.print_meta_list_item(li))
-}
-
-fn attr_item_to_string(ai: &ast::AttrItem) -> String {
-    to_string(|s| s.print_attr_item(ai, ai.path.span))
-}
-
-pub fn attribute_to_string(attr: &ast::Attribute) -> String {
-    to_string(|s| s.print_attribute(attr))
-}
-
-pub fn param_to_string(arg: &ast::Param) -> String {
-    to_string(|s| s.print_param(arg, false))
-}
-
 fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String {
-    format!("{}{}", to_string(|s| s.print_visibility(vis)), s)
+    format!("{}{}", State::new().to_string(|s| s.print_visibility(vis)), s)
 }
 
 impl std::ops::Deref for State<'_> {
@@ -414,6 +247,7 @@ impl std::ops::DerefMut for State<'_> {
 }
 
 pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::DerefMut {
+    fn insert_extra_parens(&self) -> bool;
     fn comments(&mut self) -> &mut Option<Comments<'a>>;
     fn print_ident(&mut self, ident: Ident);
     fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool);
@@ -679,7 +513,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
     fn print_tt(&mut self, tt: &TokenTree, convert_dollar_crate: bool) {
         match tt {
             TokenTree::Token(token) => {
-                self.word(token_to_string_ext(&token, convert_dollar_crate));
+                let token_str = self.token_to_string_ext(&token, convert_dollar_crate);
+                self.word(token_str);
                 if let token::DocComment(..) = token.kind {
                     self.hardbreak()
                 }
@@ -745,14 +580,20 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
                     self.space();
                 }
             }
-            _ => self.word(token_kind_to_string(&token::OpenDelim(delim))),
+            _ => {
+                let token_str = self.token_kind_to_string(&token::OpenDelim(delim));
+                self.word(token_str)
+            }
         }
         self.ibox(0);
         self.print_tts(tts, convert_dollar_crate);
         self.end();
         match delim {
             DelimToken::Brace => self.bclose(span),
-            _ => self.word(token_kind_to_string(&token::CloseDelim(delim))),
+            _ => {
+                let token_str = self.token_kind_to_string(&token::CloseDelim(delim));
+                self.word(token_str)
+            }
         }
     }
 
@@ -818,9 +659,190 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
             }
         }
     }
+
+    fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
+        match *nt {
+            token::NtExpr(ref e) => self.expr_to_string(e),
+            token::NtMeta(ref e) => self.attr_item_to_string(e),
+            token::NtTy(ref e) => self.ty_to_string(e),
+            token::NtPath(ref e) => self.path_to_string(e),
+            token::NtItem(ref e) => self.item_to_string(e),
+            token::NtBlock(ref e) => self.block_to_string(e),
+            token::NtStmt(ref e) => self.stmt_to_string(e),
+            token::NtPat(ref e) => self.pat_to_string(e),
+            token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(),
+            token::NtLifetime(e) => e.to_string(),
+            token::NtLiteral(ref e) => self.expr_to_string(e),
+            token::NtTT(ref tree) => self.tt_to_string(tree),
+            token::NtVis(ref e) => self.vis_to_string(e),
+        }
+    }
+
+    /// Print the token kind precisely, without converting `$crate` into its respective crate name.
+    fn token_kind_to_string(&self, tok: &TokenKind) -> String {
+        self.token_kind_to_string_ext(tok, None)
+    }
+
+    fn token_kind_to_string_ext(
+        &self,
+        tok: &TokenKind,
+        convert_dollar_crate: Option<Span>,
+    ) -> String {
+        match *tok {
+            token::Eq => "=".to_string(),
+            token::Lt => "<".to_string(),
+            token::Le => "<=".to_string(),
+            token::EqEq => "==".to_string(),
+            token::Ne => "!=".to_string(),
+            token::Ge => ">=".to_string(),
+            token::Gt => ">".to_string(),
+            token::Not => "!".to_string(),
+            token::Tilde => "~".to_string(),
+            token::OrOr => "||".to_string(),
+            token::AndAnd => "&&".to_string(),
+            token::BinOp(op) => binop_to_string(op).to_string(),
+            token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
+
+            /* Structural symbols */
+            token::At => "@".to_string(),
+            token::Dot => ".".to_string(),
+            token::DotDot => "..".to_string(),
+            token::DotDotDot => "...".to_string(),
+            token::DotDotEq => "..=".to_string(),
+            token::Comma => ",".to_string(),
+            token::Semi => ";".to_string(),
+            token::Colon => ":".to_string(),
+            token::ModSep => "::".to_string(),
+            token::RArrow => "->".to_string(),
+            token::LArrow => "<-".to_string(),
+            token::FatArrow => "=>".to_string(),
+            token::OpenDelim(token::Paren) => "(".to_string(),
+            token::CloseDelim(token::Paren) => ")".to_string(),
+            token::OpenDelim(token::Bracket) => "[".to_string(),
+            token::CloseDelim(token::Bracket) => "]".to_string(),
+            token::OpenDelim(token::Brace) => "{".to_string(),
+            token::CloseDelim(token::Brace) => "}".to_string(),
+            token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
+            token::Pound => "#".to_string(),
+            token::Dollar => "$".to_string(),
+            token::Question => "?".to_string(),
+            token::SingleQuote => "'".to_string(),
+
+            /* Literals */
+            token::Literal(lit) => literal_to_string(lit),
+
+            /* Name components */
+            token::Ident(s, is_raw) => {
+                IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string()
+            }
+            token::Lifetime(s) => s.to_string(),
+
+            /* Other */
+            token::DocComment(comment_kind, attr_style, data) => {
+                doc_comment_to_string(comment_kind, attr_style, data)
+            }
+            token::Eof => "<eof>".to_string(),
+
+            token::Interpolated(ref nt) => self.nonterminal_to_string(nt),
+        }
+    }
+
+    /// Print the token precisely, without converting `$crate` into its respective crate name.
+    fn token_to_string(&self, token: &Token) -> String {
+        self.token_to_string_ext(token, false)
+    }
+
+    fn token_to_string_ext(&self, token: &Token, convert_dollar_crate: bool) -> String {
+        let convert_dollar_crate = convert_dollar_crate.then_some(token.span);
+        self.token_kind_to_string_ext(&token.kind, convert_dollar_crate)
+    }
+
+    fn ty_to_string(&self, ty: &ast::Ty) -> String {
+        self.to_string(|s| s.print_type(ty))
+    }
+
+    fn bounds_to_string(&self, bounds: &[ast::GenericBound]) -> String {
+        self.to_string(|s| s.print_type_bounds("", bounds))
+    }
+
+    fn pat_to_string(&self, pat: &ast::Pat) -> String {
+        self.to_string(|s| s.print_pat(pat))
+    }
+
+    fn expr_to_string(&self, e: &ast::Expr) -> String {
+        self.to_string(|s| s.print_expr(e))
+    }
+
+    fn tt_to_string(&self, tt: &TokenTree) -> String {
+        self.to_string(|s| s.print_tt(tt, false))
+    }
+
+    fn tts_to_string(&self, tokens: &TokenStream) -> String {
+        self.to_string(|s| s.print_tts(tokens, false))
+    }
+
+    fn stmt_to_string(&self, stmt: &ast::Stmt) -> String {
+        self.to_string(|s| s.print_stmt(stmt))
+    }
+
+    fn item_to_string(&self, i: &ast::Item) -> String {
+        self.to_string(|s| s.print_item(i))
+    }
+
+    fn generic_params_to_string(&self, generic_params: &[ast::GenericParam]) -> String {
+        self.to_string(|s| s.print_generic_params(generic_params))
+    }
+
+    fn path_to_string(&self, p: &ast::Path) -> String {
+        self.to_string(|s| s.print_path(p, false, 0))
+    }
+
+    fn path_segment_to_string(&self, p: &ast::PathSegment) -> String {
+        self.to_string(|s| s.print_path_segment(p, false))
+    }
+
+    fn vis_to_string(&self, v: &ast::Visibility) -> String {
+        self.to_string(|s| s.print_visibility(v))
+    }
+
+    fn block_to_string(&self, blk: &ast::Block) -> String {
+        self.to_string(|s| {
+            // Containing cbox, will be closed by `print_block` at `}`.
+            s.cbox(INDENT_UNIT);
+            // Head-ibox, will be closed by `print_block` after `{`.
+            s.ibox(0);
+            s.print_block(blk)
+        })
+    }
+
+    fn meta_list_item_to_string(&self, li: &ast::NestedMetaItem) -> String {
+        self.to_string(|s| s.print_meta_list_item(li))
+    }
+
+    fn attr_item_to_string(&self, ai: &ast::AttrItem) -> String {
+        self.to_string(|s| s.print_attr_item(ai, ai.path.span))
+    }
+
+    fn attribute_to_string(&self, attr: &ast::Attribute) -> String {
+        self.to_string(|s| s.print_attribute(attr))
+    }
+
+    fn param_to_string(&self, arg: &ast::Param) -> String {
+        self.to_string(|s| s.print_param(arg, false))
+    }
+
+    fn to_string(&self, f: impl FnOnce(&mut State<'_>)) -> String {
+        let mut printer = State::new();
+        printer.insert_extra_parens = self.insert_extra_parens();
+        f(&mut printer);
+        printer.s.eof()
+    }
 }
 
 impl<'a> PrintState<'a> for State<'a> {
+    fn insert_extra_parens(&self) -> bool {
+        self.insert_extra_parens
+    }
     fn comments(&mut self) -> &mut Option<Comments<'a>> {
         &mut self.comments
     }
@@ -856,6 +878,20 @@ impl<'a> PrintState<'a> for State<'a> {
 }
 
 impl<'a> State<'a> {
+    pub fn new() -> State<'a> {
+        State {
+            s: pp::mk_printer(),
+            comments: None,
+            ann: &NoAnn,
+            is_expanded: false,
+            insert_extra_parens: true,
+        }
+    }
+
+    pub(super) fn without_insert_extra_parens() -> State<'a> {
+        State { insert_extra_parens: false, ..State::new() }
+    }
+
     // Synthesizes a comment that was not textually present in the original source
     // file.
     pub fn synth_comment(&mut self, text: String) {
@@ -1139,7 +1175,7 @@ impl<'a> State<'a> {
                 self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs);
             }
             ast::ItemKind::Mod(ref _mod) => {
-                self.head(to_string(|s| {
+                self.head(self.to_string(|s| {
                     s.print_visibility(&item.vis);
                     s.print_unsafety(_mod.unsafety);
                     s.word("mod");
@@ -1158,7 +1194,7 @@ impl<'a> State<'a> {
                 }
             }
             ast::ItemKind::ForeignMod(ref nmod) => {
-                self.head(to_string(|s| {
+                self.head(self.to_string(|s| {
                     s.print_unsafety(nmod.unsafety);
                     s.word("extern");
                 }));
@@ -1366,7 +1402,7 @@ impl<'a> State<'a> {
                 ast::CrateSugar::JustCrate => self.word_nbsp("crate"),
             },
             ast::VisibilityKind::Restricted { ref path, .. } => {
-                let path = to_string(|s| s.print_path(path, false, 0));
+                let path = self.to_string(|s| s.print_path(path, false, 0));
                 if path == "self" || path == "super" {
                     self.word_nbsp(format!("pub({})", path))
                 } else {
@@ -1658,7 +1694,8 @@ impl<'a> State<'a> {
     }
 
     /// Prints `expr` or `(expr)` when `needs_par` holds.
-    fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) {
+    fn print_expr_cond_paren(&mut self, expr: &ast::Expr, mut needs_par: bool) {
+        needs_par &= self.insert_extra_parens;
         if needs_par {
             self.popen();
         }
diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs
index f6e4b1fb418..72011f04d9a 100644
--- a/compiler/rustc_hir_pretty/src/lib.rs
+++ b/compiler/rustc_hir_pretty/src/lib.rs
@@ -141,6 +141,9 @@ impl std::ops::DerefMut for State<'_> {
 }
 
 impl<'a> PrintState<'a> for State<'a> {
+    fn insert_extra_parens(&self) -> bool {
+        true
+    }
     fn comments(&mut self) -> &mut Option<Comments<'a>> {
         &mut self.comments
     }
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index b68d36c9a8e..cceaa08daa4 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -7,7 +7,7 @@
 #![feature(or_patterns)]
 
 use rustc_ast as ast;
-use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
+use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
 use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
@@ -297,7 +297,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
     };
 
     // FIXME(#43081): Avoid this pretty-print + reparse hack
-    let source = pprust::nonterminal_to_string(nt);
+    // Pretty-print the AST struct without inserting any parenthesis
+    // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`).
+    // The resulting stream may have incorrect precedence, but it's only
+    // ever used for a comparison against the capture tokenstream.
+    let source = pprust::nonterminal_to_string_no_extra_parens(nt);
     let filename = FileName::macro_expansion_source_code(&source);
     let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span));
 
@@ -325,15 +329,43 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
     // modifications, including adding/removing typically non-semantic
     // tokens such as extra braces and commas, don't happen.
     if let Some(tokens) = tokens {
-        if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) {
+        // Compare with a non-relaxed delim match to start.
+        if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) {
             return tokens;
         }
+
+        // The check failed. This time, we pretty-print the AST struct with parenthesis
+        // inserted to preserve precedence. This may cause `None`-delimiters in the captured
+        // token stream to match up with inserted parenthesis in the reparsed stream.
+        let source_with_parens = pprust::nonterminal_to_string(nt);
+        let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens);
+        let reparsed_tokens_with_parens = parse_stream_from_source_str(
+            filename_with_parens,
+            source_with_parens,
+            sess,
+            Some(span),
+        );
+
+        // Compare with a relaxed delim match - we want inserted parenthesis in the
+        // reparsed stream to match `None`-delimiters in the original stream.
+        if tokenstream_probably_equal_for_proc_macro(
+            &tokens,
+            &reparsed_tokens_with_parens,
+            sess,
+            true,
+        ) {
+            return tokens;
+        }
+
         info!(
             "cached tokens found, but they're not \"probably equal\", \
                 going with stringified version"
         );
-        info!("cached tokens: {:?}", tokens);
-        info!("reparsed tokens: {:?}", reparsed_tokens);
+        info!("cached   tokens: {}", pprust::tts_to_string(&tokens));
+        info!("reparsed tokens: {}", pprust::tts_to_string(&reparsed_tokens_with_parens));
+
+        info!("cached   tokens debug: {:?}", tokens);
+        info!("reparsed tokens debug: {:?}", reparsed_tokens_with_parens);
     }
     reparsed_tokens
 }
@@ -347,6 +379,7 @@ pub fn tokenstream_probably_equal_for_proc_macro(
     tokens: &TokenStream,
     reparsed_tokens: &TokenStream,
     sess: &ParseSess,
+    relaxed_delim_match: bool,
 ) -> bool {
     // When checking for `probably_eq`, we ignore certain tokens that aren't
     // preserved in the AST. Because they are not preserved, the pretty
@@ -472,7 +505,9 @@ pub fn tokenstream_probably_equal_for_proc_macro(
     let tokens = tokens.trees().flat_map(|t| expand_token(t, sess));
     let reparsed_tokens = reparsed_tokens.trees().flat_map(|t| expand_token(t, sess));
 
-    tokens.eq_by(reparsed_tokens, |t, rt| tokentree_probably_equal_for_proc_macro(&t, &rt, sess))
+    tokens.eq_by(reparsed_tokens, |t, rt| {
+        tokentree_probably_equal_for_proc_macro(&t, &rt, sess, relaxed_delim_match)
+    })
 }
 
 // See comments in `Nonterminal::to_tokenstream` for why we care about
@@ -484,6 +519,7 @@ pub fn tokentree_probably_equal_for_proc_macro(
     token: &TokenTree,
     reparsed_token: &TokenTree,
     sess: &ParseSess,
+    relaxed_delim_match: bool,
 ) -> bool {
     match (token, reparsed_token) {
         (TokenTree::Token(token), TokenTree::Token(reparsed_token)) => {
@@ -492,9 +528,33 @@ pub fn tokentree_probably_equal_for_proc_macro(
         (
             TokenTree::Delimited(_, delim, tokens),
             TokenTree::Delimited(_, reparsed_delim, reparsed_tokens),
-        ) => {
-            delim == reparsed_delim
-                && tokenstream_probably_equal_for_proc_macro(tokens, reparsed_tokens, sess)
+        ) if delim == reparsed_delim => tokenstream_probably_equal_for_proc_macro(
+            tokens,
+            reparsed_tokens,
+            sess,
+            relaxed_delim_match,
+        ),
+        (TokenTree::Delimited(_, DelimToken::NoDelim, tokens), reparsed_token) => {
+            if relaxed_delim_match {
+                if let TokenTree::Delimited(_, DelimToken::Paren, reparsed_tokens) = reparsed_token
+                {
+                    if tokenstream_probably_equal_for_proc_macro(
+                        tokens,
+                        reparsed_tokens,
+                        sess,
+                        relaxed_delim_match,
+                    ) {
+                        return true;
+                    }
+                }
+            }
+            tokens.len() == 1
+                && tokentree_probably_equal_for_proc_macro(
+                    &tokens.trees().next().unwrap(),
+                    reparsed_token,
+                    sess,
+                    relaxed_delim_match,
+                )
         }
         _ => false,
     }
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 223a0758f00..28fef65da07 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -1114,6 +1114,7 @@ symbols! {
         try_trait,
         tt,
         tuple,
+        tuple_from_req,
         tuple_indexing,
         two_phase,
         ty,
diff --git a/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs
new file mode 100644
index 00000000000..2d4f6010012
--- /dev/null
+++ b/src/test/ui/proc-macro/group-compat-hack/actix-web-2.0.0/src/extract.rs
@@ -0,0 +1,7 @@
+// ignore-test this is not a test
+
+macro_rules! tuple_from_req {
+    ($T:ident) => {
+        #[my_macro] struct Three($T);
+    }
+}
diff --git a/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs
new file mode 100644
index 00000000000..2d4f6010012
--- /dev/null
+++ b/src/test/ui/proc-macro/group-compat-hack/actix-web/src/extract.rs
@@ -0,0 +1,7 @@
+// ignore-test this is not a test
+
+macro_rules! tuple_from_req {
+    ($T:ident) => {
+        #[my_macro] struct Three($T);
+    }
+}
diff --git a/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs
new file mode 100644
index 00000000000..9ec6aba63f3
--- /dev/null
+++ b/src/test/ui/proc-macro/group-compat-hack/actori-web-2.0.0/src/extract.rs
@@ -0,0 +1,7 @@
+// ignore-test this is not a test
+
+macro_rules! tuple_from_req {
+    ($T:ident) => {
+        #[my_macro] struct Four($T);
+    }
+}
diff --git a/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs b/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs
new file mode 100644
index 00000000000..9ec6aba63f3
--- /dev/null
+++ b/src/test/ui/proc-macro/group-compat-hack/actori-web/src/extract.rs
@@ -0,0 +1,7 @@
+// ignore-test this is not a test
+
+macro_rules! tuple_from_req {
+    ($T:ident) => {
+        #[my_macro] struct Four($T);
+    }
+}
diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs
index bc82a2ff196..652fabf34ac 100644
--- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs
+++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.rs
@@ -45,5 +45,33 @@ mod with_version {
     other!(Foo);
 }
 
+mod actix_web_test {
+    include!("actix-web/src/extract.rs");
+
+    struct Foo;
+    tuple_from_req!(Foo);
+}
+
+mod actix_web_version_test {
+    include!("actix-web-2.0.0/src/extract.rs");
+
+    struct Foo;
+    tuple_from_req!(Foo);
+}
+
+mod actori_web_test {
+    include!("actori-web/src/extract.rs");
+
+    struct Foo;
+    tuple_from_req!(Foo);
+}
+
+mod actori_web_version_test {
+    include!("actori-web-2.0.0/src/extract.rs");
+
+    struct Foo;
+    tuple_from_req!(Foo);
+}
+
 
 fn main() {}
diff --git a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout
index e83bc9f8fca..c6b18ab674b 100644
--- a/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout
+++ b/src/test/ui/proc-macro/group-compat-hack/group-compat-hack.stdout
@@ -4,3 +4,7 @@ Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/gro
 Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:21: 5:27 (#20) }, Ident { ident: "One", span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:28: 5:31 (#20) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:43:18: 43:21 (#0) }], span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:31: 5:38 (#20) }, Punct { ch: ';', spacing: Alone, span: $DIR/time-macros-impl-0.1.0/src/lib.rs:5:38: 5:39 (#20) }]
 Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/js-sys-0.3.17/src/lib.rs:5:21: 5:27 (#24) }, Ident { ident: "Two", span: $DIR/js-sys-0.3.17/src/lib.rs:5:28: 5:31 (#24) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:44:13: 44:16 (#0) }], span: $DIR/js-sys-0.3.17/src/lib.rs:5:31: 5:38 (#24) }, Punct { ch: ';', spacing: Alone, span: $DIR/js-sys-0.3.17/src/lib.rs:5:38: 5:39 (#24) }]
 Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/group-compat-hack.rs:38:25: 38:31 (#28) }, Ident { ident: "Three", span: $DIR/group-compat-hack.rs:38:32: 38:37 (#28) }, Group { delimiter: Parenthesis, stream: TokenStream [Group { delimiter: None, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:45:12: 45:15 (#0) }], span: $DIR/group-compat-hack.rs:38:38: 38:43 (#28) }], span: $DIR/group-compat-hack.rs:38:37: 38:44 (#28) }, Punct { ch: ';', spacing: Alone, span: $DIR/group-compat-hack.rs:38:44: 38:45 (#28) }]
+Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actix-web/src/extract.rs:5:21: 5:27 (#33) }, Ident { ident: "Three", span: $DIR/actix-web/src/extract.rs:5:28: 5:33 (#33) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:52:21: 52:24 (#0) }], span: $DIR/actix-web/src/extract.rs:5:33: 5:37 (#33) }, Punct { ch: ';', spacing: Alone, span: $DIR/actix-web/src/extract.rs:5:37: 5:38 (#33) }]
+Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actix-web-2.0.0/src/extract.rs:5:21: 5:27 (#38) }, Ident { ident: "Three", span: $DIR/actix-web-2.0.0/src/extract.rs:5:28: 5:33 (#38) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:59:21: 59:24 (#0) }], span: $DIR/actix-web-2.0.0/src/extract.rs:5:33: 5:37 (#38) }, Punct { ch: ';', spacing: Alone, span: $DIR/actix-web-2.0.0/src/extract.rs:5:37: 5:38 (#38) }]
+Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actori-web/src/extract.rs:5:21: 5:27 (#43) }, Ident { ident: "Four", span: $DIR/actori-web/src/extract.rs:5:28: 5:32 (#43) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:66:21: 66:24 (#0) }], span: $DIR/actori-web/src/extract.rs:5:32: 5:36 (#43) }, Punct { ch: ';', spacing: Alone, span: $DIR/actori-web/src/extract.rs:5:36: 5:37 (#43) }]
+Called proc_macro_hack with TokenStream [Ident { ident: "struct", span: $DIR/actori-web-2.0.0/src/extract.rs:5:21: 5:27 (#48) }, Ident { ident: "Four", span: $DIR/actori-web-2.0.0/src/extract.rs:5:28: 5:32 (#48) }, Group { delimiter: Parenthesis, stream: TokenStream [Ident { ident: "Foo", span: $DIR/group-compat-hack.rs:73:21: 73:24 (#0) }], span: $DIR/actori-web-2.0.0/src/extract.rs:5:32: 5:36 (#48) }, Punct { ch: ';', spacing: Alone, span: $DIR/actori-web-2.0.0/src/extract.rs:5:36: 5:37 (#48) }]
diff --git a/src/test/ui/proc-macro/issue-75734-pp-paren.rs b/src/test/ui/proc-macro/issue-75734-pp-paren.rs
new file mode 100644
index 00000000000..faa93787d13
--- /dev/null
+++ b/src/test/ui/proc-macro/issue-75734-pp-paren.rs
@@ -0,0 +1,26 @@
+// Regression test for issue #75734
+// Ensures that we don't lose tokens when pretty-printing would
+// normally insert extra parentheses.
+
+// check-pass
+// aux-build:test-macros.rs
+// compile-flags: -Z span-debug
+
+#![no_std] // Don't load unnecessary hygiene information from std
+extern crate std;
+
+#[macro_use]
+extern crate test_macros;
+
+macro_rules! mul_2 {
+    ($val:expr) => {
+        print_bang!($val * 2);
+    };
+}
+
+
+#[print_attr]
+fn main() {
+    &|_: u8| {};
+    mul_2!(1 + 1);
+}
diff --git a/src/test/ui/proc-macro/issue-75734-pp-paren.stdout b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout
new file mode 100644
index 00000000000..b33b85f1705
--- /dev/null
+++ b/src/test/ui/proc-macro/issue-75734-pp-paren.stdout
@@ -0,0 +1,134 @@
+PRINT-ATTR INPUT (DISPLAY): fn main() { & | _ : u8 | { } ; mul_2 ! (1 + 1) ; }
+PRINT-ATTR INPUT (DEBUG): TokenStream [
+    Ident {
+        ident: "fn",
+        span: $DIR/issue-75734-pp-paren.rs:23:1: 23:3 (#0),
+    },
+    Ident {
+        ident: "main",
+        span: $DIR/issue-75734-pp-paren.rs:23:4: 23:8 (#0),
+    },
+    Group {
+        delimiter: Parenthesis,
+        stream: TokenStream [],
+        span: $DIR/issue-75734-pp-paren.rs:23:8: 23:10 (#0),
+    },
+    Group {
+        delimiter: Brace,
+        stream: TokenStream [
+            Punct {
+                ch: '&',
+                spacing: Joint,
+                span: $DIR/issue-75734-pp-paren.rs:24:5: 24:6 (#0),
+            },
+            Punct {
+                ch: '|',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:24:6: 24:7 (#0),
+            },
+            Ident {
+                ident: "_",
+                span: $DIR/issue-75734-pp-paren.rs:24:7: 24:8 (#0),
+            },
+            Punct {
+                ch: ':',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:24:8: 24:9 (#0),
+            },
+            Ident {
+                ident: "u8",
+                span: $DIR/issue-75734-pp-paren.rs:24:10: 24:12 (#0),
+            },
+            Punct {
+                ch: '|',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:24:12: 24:13 (#0),
+            },
+            Group {
+                delimiter: Brace,
+                stream: TokenStream [],
+                span: $DIR/issue-75734-pp-paren.rs:24:14: 24:16 (#0),
+            },
+            Punct {
+                ch: ';',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:24:16: 24:17 (#0),
+            },
+            Ident {
+                ident: "mul_2",
+                span: $DIR/issue-75734-pp-paren.rs:25:5: 25:10 (#0),
+            },
+            Punct {
+                ch: '!',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:25:10: 25:11 (#0),
+            },
+            Group {
+                delimiter: Parenthesis,
+                stream: TokenStream [
+                    Literal {
+                        kind: Integer,
+                        symbol: "1",
+                        suffix: None,
+                        span: $DIR/issue-75734-pp-paren.rs:25:12: 25:13 (#0),
+                    },
+                    Punct {
+                        ch: '+',
+                        spacing: Alone,
+                        span: $DIR/issue-75734-pp-paren.rs:25:14: 25:15 (#0),
+                    },
+                    Literal {
+                        kind: Integer,
+                        symbol: "1",
+                        suffix: None,
+                        span: $DIR/issue-75734-pp-paren.rs:25:16: 25:17 (#0),
+                    },
+                ],
+                span: $DIR/issue-75734-pp-paren.rs:25:11: 25:18 (#0),
+            },
+            Punct {
+                ch: ';',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:25:18: 25:19 (#0),
+            },
+        ],
+        span: $DIR/issue-75734-pp-paren.rs:23:11: 26:2 (#0),
+    },
+]
+PRINT-BANG INPUT (DISPLAY): 1 + 1 * 2
+PRINT-BANG INPUT (DEBUG): TokenStream [
+    Group {
+        delimiter: None,
+        stream: TokenStream [
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/issue-75734-pp-paren.rs:25:12: 25:13 (#0),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/issue-75734-pp-paren.rs:25:14: 25:15 (#0),
+            },
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/issue-75734-pp-paren.rs:25:16: 25:17 (#0),
+            },
+        ],
+        span: $DIR/issue-75734-pp-paren.rs:17:21: 17:25 (#7),
+    },
+    Punct {
+        ch: '*',
+        spacing: Alone,
+        span: $DIR/issue-75734-pp-paren.rs:17:26: 17:27 (#7),
+    },
+    Literal {
+        kind: Integer,
+        symbol: "2",
+        suffix: None,
+        span: $DIR/issue-75734-pp-paren.rs:17:28: 17:29 (#7),
+    },
+]