about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs91
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs28
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs2
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs14
-rw-r--r--compiler/rustc_parse/src/lib.rs1
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs61
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs4
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs291
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs615
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs65
-rw-r--r--compiler/rustc_parse/src/parser/item.rs516
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs238
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs125
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs70
-rw-r--r--compiler/rustc_parse/src/parser/path.rs42
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs161
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs6
-rw-r--r--compiler/rustc_parse/src/parser/token_type.rs31
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs112
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs25
20 files changed, 1461 insertions, 1037 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index dc03d6f9521..44b4e1a3e47 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -7,8 +7,7 @@ use rustc_ast::util::parser::ExprPrecedence;
 use rustc_ast::{Path, Visibility};
 use rustc_errors::codes::*;
 use rustc_errors::{
-    Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, SubdiagMessageOp,
-    Subdiagnostic,
+    Applicability, Diag, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level, Subdiagnostic,
 };
 use rustc_macros::{Diagnostic, Subdiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
@@ -30,7 +29,6 @@ pub(crate) struct AmbiguousPlus {
 #[derive(Diagnostic)]
 #[diag(parse_maybe_recover_from_bad_type_plus, code = E0178)]
 pub(crate) struct BadTypePlus {
-    pub ty: String,
     #[primary_span]
     pub span: Span,
     #[subdiagnostic]
@@ -106,6 +104,19 @@ pub(crate) struct IncorrectUseOfAwait {
     pub span: Span,
 }
 
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_use)]
+pub(crate) struct IncorrectUseOfUse {
+    #[primary_span]
+    #[suggestion(
+        parse_parentheses_suggestion,
+        style = "verbose",
+        code = "",
+        applicability = "machine-applicable"
+    )]
+    pub span: Span,
+}
+
 #[derive(Subdiagnostic)]
 #[multipart_suggestion(
     parse_incorrect_use_of_await_postfix_suggestion,
@@ -772,7 +783,6 @@ pub(crate) struct LabeledLoopInBreak {
 }
 
 #[derive(Subdiagnostic)]
-
 pub(crate) enum WrapInParentheses {
     #[multipart_suggestion(
         parse_sugg_wrap_expression_in_parentheses,
@@ -798,16 +808,16 @@ pub(crate) enum WrapInParentheses {
 
 #[derive(Diagnostic)]
 #[diag(parse_array_brackets_instead_of_braces)]
-pub(crate) struct ArrayBracketsInsteadOfSpaces {
+pub(crate) struct ArrayBracketsInsteadOfBraces {
     #[primary_span]
     pub span: Span,
     #[subdiagnostic]
-    pub sub: ArrayBracketsInsteadOfSpacesSugg,
+    pub sub: ArrayBracketsInsteadOfBracesSugg,
 }
 
 #[derive(Subdiagnostic)]
 #[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
-pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
+pub(crate) struct ArrayBracketsInsteadOfBracesSugg {
     #[suggestion_part(code = "[")]
     pub left: Span,
     #[suggestion_part(code = "]")]
@@ -1025,7 +1035,7 @@ pub(crate) struct SuffixedLiteralInAttribute {
 pub(crate) struct InvalidMetaItem {
     #[primary_span]
     pub span: Span,
-    pub token: Token,
+    pub descr: String,
     #[subdiagnostic]
     pub quote_ident_sugg: Option<InvalidMetaItemQuoteIdentSugg>,
 }
@@ -1261,24 +1271,6 @@ pub(crate) struct StructLiteralBodyWithoutPathSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parse_struct_literal_needing_parens)]
-pub(crate) struct StructLiteralNeedingParens {
-    #[primary_span]
-    pub span: Span,
-    #[subdiagnostic]
-    pub sugg: StructLiteralNeedingParensSugg,
-}
-
-#[derive(Subdiagnostic)]
-#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
-pub(crate) struct StructLiteralNeedingParensSugg {
-    #[suggestion_part(code = "(")]
-    pub before: Span,
-    #[suggestion_part(code = ")")]
-    pub after: Span,
-}
-
-#[derive(Diagnostic)]
 #[diag(parse_unmatched_angle_brackets)]
 pub(crate) struct UnmatchedAngleBrackets {
     #[primary_span]
@@ -1501,6 +1493,14 @@ pub(crate) struct AsyncMoveOrderIncorrect {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_async_use_order_incorrect)]
+pub(crate) struct AsyncUseOrderIncorrect {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "async use", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_double_colon_in_bound)]
 pub(crate) struct DoubleColonInBound {
     #[primary_span]
@@ -1549,11 +1549,7 @@ pub(crate) struct FnTraitMissingParen {
 }
 
 impl Subdiagnostic for FnTraitMissingParen {
-    fn add_to_diag_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
-        self,
-        diag: &mut Diag<'_, G>,
-        _: &F,
-    ) {
+    fn add_to_diag<G: EmissionGuarantee>(self, diag: &mut Diag<'_, G>) {
         diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren);
         diag.span_suggestion_short(
             self.span.shrink_to_hi(),
@@ -1596,9 +1592,6 @@ pub(crate) struct PathSingleColon {
 
     #[suggestion(applicability = "machine-applicable", code = ":", style = "verbose")]
     pub suggestion: Span,
-
-    #[note(parse_type_ascription_removed)]
-    pub type_ascription: bool,
 }
 
 #[derive(Diagnostic)]
@@ -1615,9 +1608,6 @@ pub(crate) struct ColonAsSemi {
     #[primary_span]
     #[suggestion(applicability = "machine-applicable", code = ";", style = "verbose")]
     pub span: Span,
-
-    #[note(parse_type_ascription_removed)]
-    pub type_ascription: bool,
 }
 
 #[derive(Diagnostic)]
@@ -1669,6 +1659,13 @@ pub(crate) struct AsyncMoveBlockIn2015 {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_async_use_block_in_2015)]
+pub(crate) struct AsyncUseBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_async_bound_modifier_in_2015)]
 pub(crate) struct AsyncBoundModifierIn2015 {
     #[primary_span]
@@ -1687,10 +1684,10 @@ pub(crate) struct SelfArgumentPointer {
 
 #[derive(Diagnostic)]
 #[diag(parse_unexpected_token_after_dot)]
-pub(crate) struct UnexpectedTokenAfterDot<'a> {
+pub(crate) struct UnexpectedTokenAfterDot {
     #[primary_span]
     pub span: Span,
-    pub actual: Cow<'a, str>,
+    pub actual: String,
 }
 
 #[derive(Diagnostic)]
@@ -2760,17 +2757,6 @@ pub(crate) enum UnexpectedExpressionInPatternSugg {
         /// The statement's block's indentation.
         indentation: String,
     },
-
-    #[multipart_suggestion(
-        parse_unexpected_expr_in_pat_inline_const_sugg,
-        applicability = "maybe-incorrect"
-    )]
-    InlineConst {
-        #[suggestion_part(code = "const {{ ")]
-        start_span: Span,
-        #[suggestion_part(code = " }}")]
-        end_span: Span,
-    },
 }
 
 #[derive(Diagnostic)]
@@ -2808,6 +2794,8 @@ pub(crate) struct ReturnTypesUseThinArrow {
 pub(crate) struct NeedPlusAfterTraitObjectLifetime {
     #[primary_span]
     pub span: Span,
+    #[suggestion(code = " + /* Trait */", applicability = "has-placeholders")]
+    pub suggestion: Span,
 }
 
 #[derive(Diagnostic)]
@@ -2923,8 +2911,9 @@ pub(crate) struct AddBoxNew {
 #[diag(parse_bad_return_type_notation_output)]
 pub(crate) struct BadReturnTypeNotationOutput {
     #[primary_span]
-    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
     pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect", style = "verbose")]
+    pub suggestion: Span,
 }
 
 #[derive(Diagnostic)]
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 792e2cc26ef..4935fc03256 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -6,8 +6,8 @@ use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
 use rustc_errors::codes::*;
 use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
-use rustc_lexer::unescape::{self, EscapeError, Mode};
 use rustc_lexer::{Base, Cursor, DocStyle, LiteralKind, RawStrError};
+use rustc_literal_escaper::{EscapeError, Mode, unescape_mixed, unescape_unicode};
 use rustc_session::lint::BuiltinLintDiag;
 use rustc_session::lint::builtin::{
     RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
@@ -384,17 +384,17 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
                 rustc_lexer::TokenKind::Colon => token::Colon,
                 rustc_lexer::TokenKind::Dollar => token::Dollar,
                 rustc_lexer::TokenKind::Eq => token::Eq,
-                rustc_lexer::TokenKind::Bang => token::Not,
+                rustc_lexer::TokenKind::Bang => token::Bang,
                 rustc_lexer::TokenKind::Lt => token::Lt,
                 rustc_lexer::TokenKind::Gt => token::Gt,
-                rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
-                rustc_lexer::TokenKind::And => token::BinOp(token::And),
-                rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
-                rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
-                rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
-                rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
-                rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
-                rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
+                rustc_lexer::TokenKind::Minus => token::Minus,
+                rustc_lexer::TokenKind::And => token::And,
+                rustc_lexer::TokenKind::Or => token::Or,
+                rustc_lexer::TokenKind::Plus => token::Plus,
+                rustc_lexer::TokenKind::Star => token::Star,
+                rustc_lexer::TokenKind::Slash => token::Slash,
+                rustc_lexer::TokenKind::Caret => token::Caret,
+                rustc_lexer::TokenKind::Percent => token::Percent,
 
                 rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
                     // Don't emit diagnostics for sequences of the same invalid token
@@ -970,9 +970,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         postfix_len: u32,
     ) -> (token::LitKind, Symbol) {
         self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
-            unescape::unescape_unicode(src, mode, &mut |span, result| {
-                callback(span, result.map(drop))
-            })
+            unescape_unicode(src, mode, &mut |span, result| callback(span, result.map(drop)))
         })
     }
 
@@ -986,9 +984,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
         postfix_len: u32,
     ) -> (token::LitKind, Symbol) {
         self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
-            unescape::unescape_mixed(src, mode, &mut |span, result| {
-                callback(span, result.map(drop))
-            })
+            unescape_mixed(src, mode, &mut |span, result| callback(span, result.map(drop)))
         })
     }
 }
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 2e066f0179c..ec59a1a0131 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -4,7 +4,7 @@ use std::iter::once;
 use std::ops::Range;
 
 use rustc_errors::{Applicability, DiagCtxtHandle, ErrorGuaranteed};
-use rustc_lexer::unescape::{EscapeError, Mode};
+use rustc_literal_escaper::{EscapeError, Mode};
 use rustc_span::{BytePos, Span};
 use tracing::debug;
 
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 648a352efd9..2bfa1ea4e05 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -308,11 +308,11 @@ pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
 const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
     (" ", "Space", None),
     ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
-    ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
+    ("-", "Minus/Hyphen", Some(token::Minus)),
     (",", "Comma", Some(token::Comma)),
     (";", "Semicolon", Some(token::Semi)),
     (":", "Colon", Some(token::Colon)),
-    ("!", "Exclamation Mark", Some(token::Not)),
+    ("!", "Exclamation Mark", Some(token::Bang)),
     ("?", "Question Mark", Some(token::Question)),
     (".", "Period", Some(token::Dot)),
     ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
@@ -321,11 +321,11 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
     ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
     ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
     ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
-    ("*", "Asterisk", Some(token::BinOp(token::Star))),
-    ("/", "Slash", Some(token::BinOp(token::Slash))),
+    ("*", "Asterisk", Some(token::Star)),
+    ("/", "Slash", Some(token::Slash)),
     ("\\", "Backslash", None),
-    ("&", "Ampersand", Some(token::BinOp(token::And))),
-    ("+", "Plus Sign", Some(token::BinOp(token::Plus))),
+    ("&", "Ampersand", Some(token::And)),
+    ("+", "Plus Sign", Some(token::Plus)),
     ("<", "Less-Than Sign", Some(token::Lt)),
     ("=", "Equals Sign", Some(token::Eq)),
     ("==", "Double Equals Sign", Some(token::EqEq)),
@@ -376,7 +376,7 @@ pub(super) fn check_for_substitution(
             ascii_name,
         })
     };
-    (token.clone(), sugg)
+    (*token, sugg)
 }
 
 /// Extract string if found at current position with given delimiters
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 1a104ff5e33..2edc8c83017 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -12,7 +12,6 @@
 #![feature(iter_intersperse)]
 #![feature(let_chains)]
 #![feature(string_from_utf8_lossy_owned)]
-#![warn(unreachable_pub)]
 // tidy-alphabetical-end
 
 use std::path::{Path, PathBuf};
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 2691e6f56d6..53614049f08 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,4 +1,6 @@
-use rustc_ast::{self as ast, Attribute, attr, token};
+use rustc_ast as ast;
+use rustc_ast::token::{self, MetaVarKind};
+use rustc_ast::{Attribute, attr};
 use rustc_errors::codes::*;
 use rustc_errors::{Diag, PResult};
 use rustc_span::{BytePos, Span};
@@ -9,7 +11,7 @@ use super::{
     AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
     UsePreAttrPos,
 };
-use crate::{errors, exp, fluent_generated as fluent, maybe_whole};
+use crate::{errors, exp, fluent_generated as fluent};
 
 // Public for rustfmt usage
 #[derive(Debug)]
@@ -125,12 +127,29 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         // Attributes can't have attributes of their own [Editor's note: not with that attitude]
         self.collect_tokens_no_attrs(|this| {
+            let pound_hi = this.token.span.hi();
             assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
 
+            let not_lo = this.token.span.lo();
             let style =
-                if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
+                if this.eat(exp!(Bang)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
 
-            this.expect(exp!(OpenBracket))?;
+            let mut bracket_res = this.expect(exp!(OpenBracket));
+            // If `#!` is not followed by `[`
+            if let Err(err) = &mut bracket_res
+                && style == ast::AttrStyle::Inner
+                && pound_hi == not_lo
+            {
+                err.note(
+                    "the token sequence `#!` here looks like the start of \
+                    a shebang interpreter directive but it is not",
+                );
+                err.help(
+                    "if you meant this to be a shebang interpreter directive, \
+                    move it to the very start of the file",
+                );
+            }
+            bracket_res?;
             let item = this.parse_attr_item(ForceCollect::No)?;
             this.expect(exp!(CloseBracket))?;
             let attr_sp = lo.to(this.prev_token.span);
@@ -269,7 +288,12 @@ impl<'a> Parser<'a> {
     ///     PATH `=` UNSUFFIXED_LIT
     /// The delimiters or `=` are still put into the resulting token stream.
     pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> {
-        maybe_whole!(self, NtMeta, |attr| attr.into_inner());
+        if let Some(item) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Meta { .. }),
+            |this| this.parse_attr_item(force_collect),
+        ) {
+            return Ok(item);
+        }
 
         // Attr items don't have attributes.
         self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
@@ -305,7 +329,7 @@ impl<'a> Parser<'a> {
         loop {
             let start_pos = self.num_bump_calls;
             // Only try to parse if it is an inner attribute (has `!`).
-            let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
+            let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Bang) {
                 Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
             } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
                 if attr_style == ast::AttrStyle::Inner {
@@ -396,18 +420,17 @@ impl<'a> Parser<'a> {
         &mut self,
         unsafe_allowed: AllowLeadingUnsafe,
     ) -> PResult<'a, ast::MetaItem> {
-        // We can't use `maybe_whole` here because it would bump in the `None`
-        // case, which we don't want.
-        if let token::Interpolated(nt) = &self.token.kind
-            && let token::NtMeta(attr_item) = &**nt
-        {
-            match attr_item.meta(attr_item.path.span) {
-                Some(meta) => {
-                    self.bump();
-                    return Ok(meta);
-                }
-                None => self.unexpected()?,
-            }
+        if let Some(MetaVarKind::Meta { has_meta_form }) = self.token.is_metavar_seq() {
+            return if has_meta_form {
+                let attr_item = self
+                    .eat_metavar_seq(MetaVarKind::Meta { has_meta_form: true }, |this| {
+                        this.parse_attr_item(ForceCollect::No)
+                    })
+                    .unwrap();
+                Ok(attr_item.meta(attr_item.path.span).unwrap())
+            } else {
+                self.unexpected_any()
+            };
         }
 
         let lo = self.token.span;
@@ -464,7 +487,7 @@ impl<'a> Parser<'a> {
 
         let mut err = errors::InvalidMetaItem {
             span: self.token.span,
-            token: self.token.clone(),
+            descr: super::token_descr(&self.token),
             quote_ident_sugg: None,
         };
 
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index cff998fa137..f1bd6a22730 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -120,7 +120,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
         // produce an empty `TokenStream` if no calls were made, and omit the
         // final token otherwise.
         let mut cursor_snapshot = self.cursor_snapshot.clone();
-        let tokens = iter::once(FlatToken::Token(self.start_token.clone()))
+        let tokens = iter::once(FlatToken::Token(self.start_token))
             .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
             .take(self.num_calls as usize);
 
@@ -186,7 +186,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
 impl<'a> Parser<'a> {
     pub(super) fn collect_pos(&self) -> CollectPos {
         CollectPos {
-            start_token: (self.token.clone(), self.token_spacing),
+            start_token: (self.token, self.token_spacing),
             cursor_snapshot: self.token_cursor.clone(),
             start_pos: self.num_bump_calls,
         }
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 67abc2d5394..7c8e0146c3d 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -1,17 +1,15 @@
 use std::mem::take;
 use std::ops::{Deref, DerefMut};
-use std::sync::Arc;
 
 use ast::token::IdentIsRaw;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
-use rustc_ast::tokenstream::AttrTokenTree;
 use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
     AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
-    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
-    PatKind, Path, PathSegment, QSelf, Recovered, Ty, TyKind,
+    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
+    Path, PathSegment, QSelf, Recovered, Ty, TyKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
@@ -33,18 +31,17 @@ use super::{
     SeqSep, TokenType,
 };
 use crate::errors::{
-    AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, AwaitSuggestion,
-    BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi, ComparisonOperatorsCannotBeChained,
-    ComparisonOperatorsCannotBeChainedSugg, ConstGenericWithoutBraces,
-    ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, DocCommentOnParamType,
-    DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
+    AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AsyncUseBlockIn2015, AttributeOnParamType,
+    AwaitSuggestion, BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi,
+    ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
+    ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything,
+    DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
     GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
     HelpIdentifierStartsWithNumber, HelpUseLatestEdition, InInTypo, IncorrectAwait,
-    IncorrectSemicolon, IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType,
-    QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
-    StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens, StructLiteralNeedingParensSugg,
-    SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator,
-    UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
+    IncorrectSemicolon, IncorrectUseOfAwait, IncorrectUseOfUse, PatternMethodParamWithoutBody,
+    QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
+    StructLiteralBodyWithoutPathSugg, SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma,
+    TernaryOperator, UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
     UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
 };
 use crate::parser::attr::InnerAttrPolicy;
@@ -325,7 +322,7 @@ impl<'a> Parser<'a> {
         let mut recovered_ident = None;
         // we take this here so that the correct original token is retained in
         // the diagnostic, regardless of eager recovery.
-        let bad_token = self.token.clone();
+        let bad_token = self.token;
 
         // suggest prepending a keyword in identifier position with `r#`
         let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
@@ -385,7 +382,7 @@ impl<'a> Parser<'a> {
             // if the previous token is a valid keyword
             // that might use a generic, then suggest a correct
             // generic placement (later on)
-            let maybe_keyword = self.prev_token.clone();
+            let maybe_keyword = self.prev_token;
             if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
                 // if we have a valid keyword, attempt to parse generics
                 // also obtain the keywords symbol
@@ -533,7 +530,7 @@ impl<'a> Parser<'a> {
                 //   let y = 42;
                 let guar = self.dcx().emit_err(ExpectedSemi {
                     span: self.token.span,
-                    token: self.token.clone(),
+                    token: self.token,
                     unexpected_token_label: None,
                     sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
                 });
@@ -558,7 +555,7 @@ impl<'a> Parser<'a> {
                 let span = self.prev_token.span.shrink_to_hi();
                 let guar = self.dcx().emit_err(ExpectedSemi {
                     span,
-                    token: self.token.clone(),
+                    token: self.token,
                     unexpected_token_label: Some(self.token.span),
                     sugg: ExpectedSemiSugg::AddSemi(span),
                 });
@@ -574,10 +571,17 @@ impl<'a> Parser<'a> {
             return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
         }
 
-        if self.token.is_keyword(kw::Move) && self.prev_token.is_keyword(kw::Async) {
-            // The 2015 edition is in use because parsing of `async move` has failed.
+        if (self.token.is_keyword(kw::Move) || self.token.is_keyword(kw::Use))
+            && self.prev_token.is_keyword(kw::Async)
+        {
+            // The 2015 edition is in use because parsing of `async move` or `async use` has failed.
             let span = self.prev_token.span.to(self.token.span);
-            return Err(self.dcx().create_err(AsyncMoveBlockIn2015 { span }));
+            if self.token.is_keyword(kw::Move) {
+                return Err(self.dcx().create_err(AsyncMoveBlockIn2015 { span }));
+            } else {
+                // kw::Use
+                return Err(self.dcx().create_err(AsyncUseBlockIn2015 { span }));
+            }
         }
 
         let expect = tokens_to_string(&expected);
@@ -605,6 +609,8 @@ impl<'a> Parser<'a> {
         // FIXME: translation requires list formatting (for `expect`)
         let mut err = self.dcx().struct_span_err(self.token.span, msg_exp);
 
+        self.label_expected_raw_ref(&mut err);
+
         // Look for usages of '=>' where '>=' was probably intended
         if self.token == token::FatArrow
             && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le))
@@ -746,6 +752,25 @@ impl<'a> Parser<'a> {
         Err(err)
     }
 
+    /// Adds a label when `&raw EXPR` was written instead of `&raw const EXPR`/`&raw mut EXPR`.
+    ///
+    /// Given that not all parser diagnostics flow through `expected_one_of_not_found`, this
+    /// label may need added to other diagnostics emission paths as needed.
+    pub(super) fn label_expected_raw_ref(&mut self, err: &mut Diag<'_>) {
+        if self.prev_token.is_keyword(kw::Raw)
+            && self.expected_token_types.contains(TokenType::KwMut)
+            && self.expected_token_types.contains(TokenType::KwConst)
+            && self.token.can_begin_expr()
+        {
+            err.span_suggestions(
+                self.prev_token.span.shrink_to_hi(),
+                "`&raw` must be followed by `const` or `mut` to be a raw reference expression",
+                [" const".to_string(), " mut".to_string()],
+                Applicability::MaybeIncorrect,
+            );
+        }
+    }
+
     /// Checks if the current token or the previous token are misspelled keywords
     /// and adds a helpful suggestion.
     fn check_for_misspelled_kw(&self, err: &mut Diag<'_>, expected: &[TokenType]) {
@@ -797,7 +822,7 @@ impl<'a> Parser<'a> {
         let span = self.prev_token.span.shrink_to_hi();
         let mut err = self.dcx().create_err(ExpectedSemi {
             span,
-            token: self.token.clone(),
+            token: self.token,
             unexpected_token_label: Some(self.token.span),
             sugg: ExpectedSemiSugg::AddSemi(span),
         });
@@ -944,7 +969,6 @@ impl<'a> Parser<'a> {
         lo: Span,
         s: BlockCheckMode,
         maybe_struct_name: token::Token,
-        can_be_struct_literal: bool,
     ) -> Option<PResult<'a, P<Block>>> {
         if self.token.is_ident() && self.look_ahead(1, |t| t == &token::Colon) {
             // We might be having a struct literal where people forgot to include the path:
@@ -970,47 +994,23 @@ impl<'a> Parser<'a> {
                     // fn foo() -> Foo { Path {
                     //     field: value,
                     // } }
-                    let guar = err.delay_as_bug();
+                    err.cancel();
                     self.restore_snapshot(snapshot);
-                    let mut tail = self.mk_block(
+                    let guar = self.dcx().emit_err(StructLiteralBodyWithoutPath {
+                        span: expr.span,
+                        sugg: StructLiteralBodyWithoutPathSugg {
+                            before: expr.span.shrink_to_lo(),
+                            after: expr.span.shrink_to_hi(),
+                        },
+                    });
+                    Ok(self.mk_block(
                         thin_vec![self.mk_stmt_err(expr.span, guar)],
                         s,
                         lo.to(self.prev_token.span),
-                    );
-                    tail.could_be_bare_literal = true;
-                    if maybe_struct_name.is_ident() && can_be_struct_literal {
-                        // Account for `if Example { a: one(), }.is_pos() {}`.
-                        // expand `before` so that we take care of module path such as:
-                        // `foo::Bar { ... } `
-                        // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })`
-                        let sm = self.psess.source_map();
-                        let before = maybe_struct_name.span.shrink_to_lo();
-                        if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| {
-                            t.is_alphanumeric() || t == ':' || t == '_'
-                        }) {
-                            Err(self.dcx().create_err(StructLiteralNeedingParens {
-                                span: maybe_struct_name.span.to(expr.span),
-                                sugg: StructLiteralNeedingParensSugg {
-                                    before: extend_before.shrink_to_lo(),
-                                    after: expr.span.shrink_to_hi(),
-                                },
-                            }))
-                        } else {
-                            return None;
-                        }
-                    } else {
-                        self.dcx().emit_err(StructLiteralBodyWithoutPath {
-                            span: expr.span,
-                            sugg: StructLiteralBodyWithoutPathSugg {
-                                before: expr.span.shrink_to_lo(),
-                                after: expr.span.shrink_to_hi(),
-                            },
-                        });
-                        Ok(tail)
-                    }
+                    ))
                 }
                 (Err(err), Ok(tail)) => {
-                    // We have a block tail that contains a somehow valid type ascription expr.
+                    // We have a block tail that contains a somehow valid expr.
                     err.cancel();
                     Ok(tail)
                 }
@@ -1020,10 +1020,7 @@ impl<'a> Parser<'a> {
                     self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
                     Err(err)
                 }
-                (Ok(_), Ok(mut tail)) => {
-                    tail.could_be_bare_literal = true;
-                    Ok(tail)
-                }
+                (Ok(_), Ok(tail)) => Ok(tail),
             });
         }
         None
@@ -1169,7 +1166,7 @@ impl<'a> Parser<'a> {
         let mut number_of_gt = 0;
         while self.look_ahead(position, |t| {
             trace!("check_trailing_angle_brackets: t={:?}", t);
-            if *t == token::BinOp(token::BinOpToken::Shr) {
+            if *t == token::Shr {
                 number_of_shr += 1;
                 true
             } else if *t == token::Gt {
@@ -1224,7 +1221,7 @@ impl<'a> Parser<'a> {
                     let span = lo.to(self.prev_token.span);
                     // Detect trailing `>` like in `x.collect::Vec<_>>()`.
                     let mut trailing_span = self.prev_token.span.shrink_to_hi();
-                    while self.token == token::BinOp(token::Shr) || self.token == token::Gt {
+                    while self.token == token::Shr || self.token == token::Gt {
                         trailing_span = trailing_span.to(self.token.span);
                         self.bump();
                     }
@@ -1350,13 +1347,13 @@ impl<'a> Parser<'a> {
             }
             return match (op.node, &outer_op.node) {
                 // `x == y == z`
-                (BinOpKind::Eq, AssocOp::Equal) |
+                (BinOpKind::Eq, AssocOp::Binary(BinOpKind::Eq)) |
                 // `x < y < z` and friends.
-                (BinOpKind::Lt, AssocOp::Less | AssocOp::LessEqual) |
-                (BinOpKind::Le, AssocOp::LessEqual | AssocOp::Less) |
+                (BinOpKind::Lt, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) |
+                (BinOpKind::Le, AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le)) |
                 // `x > y > z` and friends.
-                (BinOpKind::Gt, AssocOp::Greater | AssocOp::GreaterEqual) |
-                (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
+                (BinOpKind::Gt, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) |
+                (BinOpKind::Ge, AssocOp::Binary(BinOpKind::Gt | BinOpKind::Ge)) => {
                     let expr_to_str = |e: &Expr| {
                         self.span_to_snippet(e.span)
                             .unwrap_or_else(|_| pprust::expr_to_string(e))
@@ -1368,7 +1365,10 @@ impl<'a> Parser<'a> {
                     false // Keep the current parse behavior, where the AST is `(x < y) < z`.
                 }
                 // `x == y < z`
-                (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => {
+                (
+                    BinOpKind::Eq,
+                    AssocOp::Binary(BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge)
+                ) => {
                     // Consume `z`/outer-op-rhs.
                     let snapshot = self.create_snapshot_for_diagnostic();
                     match self.parse_expr() {
@@ -1389,7 +1389,10 @@ impl<'a> Parser<'a> {
                     }
                 }
                 // `x > y == z`
-                (BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge, AssocOp::Equal) => {
+                (
+                    BinOpKind::Lt | BinOpKind::Le | BinOpKind::Gt | BinOpKind::Ge,
+                    AssocOp::Binary(BinOpKind::Eq)
+                ) => {
                     let snapshot = self.create_snapshot_for_diagnostic();
                     // At this point it is always valid to enclose the lhs in parentheses, no
                     // further checks are necessary.
@@ -1457,15 +1460,14 @@ impl<'a> Parser<'a> {
 
                 // Include `<` to provide this recommendation even in a case like
                 // `Foo<Bar<Baz<Qux, ()>>>`
-                if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Less
-                    || outer_op.node == AssocOp::Greater
+                if op.node == BinOpKind::Lt && outer_op.node == AssocOp::Binary(BinOpKind::Lt)
+                    || outer_op.node == AssocOp::Binary(BinOpKind::Gt)
                 {
-                    if outer_op.node == AssocOp::Less {
+                    if outer_op.node == AssocOp::Binary(BinOpKind::Lt) {
                         let snapshot = self.create_snapshot_for_diagnostic();
                         self.bump();
                         // So far we have parsed `foo<bar<`, consume the rest of the type args.
-                        let modifiers =
-                            [(token::Lt, 1), (token::Gt, -1), (token::BinOp(token::Shr), -2)];
+                        let modifiers = [(token::Lt, 1), (token::Gt, -1), (token::Shr, -2)];
                         self.consume_tts(1, &modifiers);
 
                         if !&[token::OpenDelim(Delimiter::Parenthesis), token::PathSep]
@@ -1655,19 +1657,19 @@ impl<'a> Parser<'a> {
 
         self.bump(); // `+`
         let _bounds = self.parse_generic_bounds()?;
-        let sum_span = ty.span.to(self.prev_token.span);
-
         let sub = match &ty.kind {
             TyKind::Ref(_lifetime, mut_ty) => {
                 let lo = mut_ty.ty.span.shrink_to_lo();
                 let hi = self.prev_token.span.shrink_to_hi();
                 BadTypePlusSub::AddParen { suggestion: AddParen { lo, hi } }
             }
-            TyKind::Ptr(..) | TyKind::BareFn(..) => BadTypePlusSub::ForgotParen { span: sum_span },
-            _ => BadTypePlusSub::ExpectPath { span: sum_span },
+            TyKind::Ptr(..) | TyKind::BareFn(..) => {
+                BadTypePlusSub::ForgotParen { span: ty.span.to(self.prev_token.span) }
+            }
+            _ => BadTypePlusSub::ExpectPath { span: ty.span },
         };
 
-        self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
+        self.dcx().emit_err(BadTypePlus { span: ty.span, sub });
 
         Ok(())
     }
@@ -1941,10 +1943,7 @@ impl<'a> Parser<'a> {
             && self.token == token::Colon
             && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
         {
-            self.dcx().emit_err(ColonAsSemi {
-                span: self.token.span,
-                type_ascription: self.psess.unstable_features.is_nightly_build(),
-            });
+            self.dcx().emit_err(ColonAsSemi { span: self.token.span });
             self.bump();
             return true;
         }
@@ -1958,7 +1957,7 @@ impl<'a> Parser<'a> {
         &mut self,
         await_sp: Span,
     ) -> PResult<'a, P<Expr>> {
-        let (hi, expr, is_question) = if self.token == token::Not {
+        let (hi, expr, is_question) = if self.token == token::Bang {
             // Handle `await!(<expr>)`.
             self.recover_await_macro()?
         } else {
@@ -1970,7 +1969,7 @@ impl<'a> Parser<'a> {
     }
 
     fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
-        self.expect(exp!(Not))?;
+        self.expect(exp!(Bang))?;
         self.expect(exp!(OpenParen))?;
         let expr = self.parse_expr()?;
         self.expect(exp!(CloseParen))?;
@@ -1988,7 +1987,7 @@ impl<'a> Parser<'a> {
             self.parse_expr()
         }
         .map_err(|mut err| {
-            err.span_label(await_sp, "while parsing this incorrect await expression");
+            err.span_label(await_sp, format!("while parsing this incorrect await expression"));
             err
         })?;
         Ok((expr.span, expr, is_question))
@@ -2027,10 +2026,25 @@ impl<'a> Parser<'a> {
             self.dcx().emit_err(IncorrectUseOfAwait { span });
         }
     }
+    ///
+    /// If encountering `x.use()`, consumes and emits an error.
+    pub(super) fn recover_from_use(&mut self) {
+        if self.token == token::OpenDelim(Delimiter::Parenthesis)
+            && self.look_ahead(1, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
+        {
+            // var.use()
+            let lo = self.token.span;
+            self.bump(); // (
+            let span = lo.to(self.token.span);
+            self.bump(); // )
+
+            self.dcx().emit_err(IncorrectUseOfUse { span });
+        }
+    }
 
     pub(super) fn try_macro_suggestion(&mut self) -> PResult<'a, P<Expr>> {
         let is_try = self.token.is_keyword(kw::Try);
-        let is_questionmark = self.look_ahead(1, |t| t == &token::Not); //check for !
+        let is_questionmark = self.look_ahead(1, |t| t == &token::Bang); //check for !
         let is_open = self.look_ahead(2, |t| t == &token::OpenDelim(Delimiter::Parenthesis)); //check for (
 
         if is_try && is_questionmark && is_open {
@@ -2082,7 +2096,7 @@ impl<'a> Parser<'a> {
                     ast::GenericBound::Trait(poly) => Some(poly),
                     _ => None,
                 })
-                .last()
+                .next_back()
         {
             err.span_suggestion_verbose(
                 poly.span.shrink_to_hi(),
@@ -2400,52 +2414,6 @@ impl<'a> Parser<'a> {
             err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
         err.span_label(span, "expected expression");
-
-        // Walk the chain of macro expansions for the current token to point at how the original
-        // code was interpreted. This helps the user realize when a macro argument of one type is
-        // later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
-        // in a subsequent macro invocation (#71039).
-        let mut tok = self.token.clone();
-        let mut labels = vec![];
-        while let TokenKind::Interpolated(nt) = &tok.kind {
-            let tokens = nt.tokens();
-            labels.push(Arc::clone(nt));
-            if let Some(tokens) = tokens
-                && let tokens = tokens.to_attr_token_stream()
-                && let tokens = tokens.0.deref()
-                && let [AttrTokenTree::Token(token, _)] = &tokens[..]
-            {
-                tok = token.clone();
-            } else {
-                break;
-            }
-        }
-        let mut iter = labels.into_iter().peekable();
-        let mut show_link = false;
-        while let Some(nt) = iter.next() {
-            let descr = nt.descr();
-            if let Some(next) = iter.peek() {
-                let next_descr = next.descr();
-                if next_descr != descr {
-                    err.span_label(next.use_span(), format!("this is expected to be {next_descr}"));
-                    err.span_label(
-                        nt.use_span(),
-                        format!(
-                            "this is interpreted as {}, but it is expected to be {}",
-                            next_descr, descr,
-                        ),
-                    );
-                    show_link = true;
-                }
-            }
-        }
-        if show_link {
-            err.note(
-                "when forwarding a matched fragment to another macro-by-example, matchers in the \
-                 second macro will see an opaque AST of the fragment type, not the underlying \
-                 tokens",
-            );
-        }
         err
     }
 
@@ -2635,10 +2603,12 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, GenericArg> {
         let is_op_or_dot = AssocOp::from_token(&self.token)
             .and_then(|op| {
-                if let AssocOp::Greater
-                | AssocOp::Less
-                | AssocOp::ShiftRight
-                | AssocOp::GreaterEqual
+                if let AssocOp::Binary(
+                    BinOpKind::Gt
+                    | BinOpKind::Lt
+                    | BinOpKind::Shr
+                    | BinOpKind::Ge
+                )
                 // Don't recover from `foo::<bar = baz>`, because this could be an attempt to
                 // assign a value to a defaulted generic parameter.
                 | AssocOp::Assign
@@ -2653,8 +2623,7 @@ impl<'a> Parser<'a> {
             || self.token == TokenKind::Dot;
         // This will be true when a trait object type `Foo +` or a path which was a `const fn` with
         // type params has been parsed.
-        let was_op =
-            matches!(self.prev_token.kind, token::BinOp(token::Plus | token::Shr) | token::Gt);
+        let was_op = matches!(self.prev_token.kind, token::Plus | token::Shr | token::Gt);
         if !is_op_or_dot && !was_op {
             // We perform these checks and early return to avoid taking a snapshot unnecessarily.
             return Err(err);
@@ -2894,7 +2863,12 @@ impl<'a> Parser<'a> {
         first_pat
     }
 
-    pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
+    pub(crate) fn maybe_recover_unexpected_block_label(
+        &mut self,
+        loop_header: Option<Span>,
+    ) -> bool {
         // Check for `'a : {`
         if !(self.check_lifetime()
             && self.look_ahead(1, |t| *t == token::Colon)
@@ -2905,16 +2879,28 @@ impl<'a> Parser<'a> {
         let label = self.eat_label().expect("just checked if a label exists");
         self.bump(); // eat `:`
         let span = label.ident.span.to(self.prev_token.span);
-        self.dcx()
+        let mut diag = self
+            .dcx()
             .struct_span_err(span, "block label not supported here")
-            .with_span_label(span, "not supported here")
-            .with_tool_only_span_suggestion(
+            .with_span_label(span, "not supported here");
+        if let Some(loop_header) = loop_header {
+            diag.multipart_suggestion(
+                "if you meant to label the loop, move this label before the loop",
+                vec![
+                    (label.ident.span.until(self.token.span), String::from("")),
+                    (loop_header.shrink_to_lo(), format!("{}: ", label.ident)),
+                ],
+                Applicability::MachineApplicable,
+            );
+        } else {
+            diag.tool_only_span_suggestion(
                 label.ident.span.until(self.token.span),
                 "remove this block label",
                 "",
                 Applicability::MachineApplicable,
-            )
-            .emit();
+            );
+        }
+        diag.emit();
         true
     }
 
@@ -3032,8 +3018,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_vcs_conflict_marker(&mut self) {
         // <<<<<<<
-        let Some(start) = self.conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt)
-        else {
+        let Some(start) = self.conflict_marker(&TokenKind::Shl, &TokenKind::Lt) else {
             return;
         };
         let mut spans = Vec::with_capacity(3);
@@ -3048,15 +3033,13 @@ impl<'a> Parser<'a> {
             if self.token == TokenKind::Eof {
                 break;
             }
-            if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or))
-            {
+            if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::Or) {
                 middlediff3 = Some(span);
             }
             if let Some(span) = self.conflict_marker(&TokenKind::EqEq, &TokenKind::Eq) {
                 middle = Some(span);
             }
-            if let Some(span) = self.conflict_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt)
-            {
+            if let Some(span) = self.conflict_marker(&TokenKind::Shr, &TokenKind::Gt) {
                 spans.push(span);
                 end = Some(span);
                 break;
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 21e01ff535a..71cc814cb50 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -7,21 +7,21 @@ use ast::mut_visit::{self, MutVisitor};
 use ast::token::IdentIsRaw;
 use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, Token, TokenKind};
 use rustc_ast::tokenstream::TokenTree;
 use rustc_ast::util::case::Case;
 use rustc_ast::util::classify;
 use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
 use rustc_ast::visit::{Visitor, walk_expr};
 use rustc_ast::{
-    self as ast, AnonConst, Arm, AttrStyle, AttrVec, BinOp, BinOpKind, BlockCheckMode, CaptureBy,
-    ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl, FnRetTy, Label, MacCall,
-    MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind, UnOp, UnsafeBinderCastKind,
+    self as ast, AnonConst, Arm, AssignOp, AssignOpKind, AttrStyle, AttrVec, BinOp, BinOpKind,
+    BlockCheckMode, CaptureBy, ClosureBinder, DUMMY_NODE_ID, Expr, ExprField, ExprKind, FnDecl,
+    FnRetTy, Label, MacCall, MetaItemLit, Movability, Param, RangeLimits, StmtKind, Ty, TyKind,
+    UnOp, UnsafeBinderCastKind, YieldKind,
 };
-use rustc_ast_pretty::pprust;
 use rustc_data_structures::stack::ensure_sufficient_stack;
 use rustc_errors::{Applicability, Diag, PResult, StashKey, Subdiagnostic};
-use rustc_lexer::unescape::unescape_char;
+use rustc_literal_escaper::unescape_char;
 use rustc_macros::Subdiagnostic;
 use rustc_session::errors::{ExprParenthesesNeeded, report_lit_error};
 use rustc_session::lint::BuiltinLintDiag;
@@ -171,7 +171,7 @@ impl<'a> Parser<'a> {
                 break;
             }
             // Check for deprecated `...` syntax
-            if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq {
+            if self.token == token::DotDotDot && op.node == AssocOp::Range(RangeLimits::Closed) {
                 self.err_dotdotdot_syntax(self.token.span);
             }
 
@@ -188,17 +188,12 @@ impl<'a> Parser<'a> {
             }
 
             // Look for JS' `===` and `!==` and recover
-            if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual)
+            if let AssocOp::Binary(bop @ BinOpKind::Eq | bop @ BinOpKind::Ne) = op.node
                 && self.token == token::Eq
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
                 let sp = op.span.to(self.token.span);
-                let sugg = match op.node {
-                    AssocOp::Equal => "==",
-                    AssocOp::NotEqual => "!=",
-                    _ => unreachable!(),
-                }
-                .into();
+                let sugg = bop.as_str().into();
                 let invalid = format!("{sugg}=");
                 self.dcx().emit_err(errors::InvalidComparisonOperator {
                     span: sp,
@@ -213,7 +208,7 @@ impl<'a> Parser<'a> {
             }
 
             // Look for PHP's `<>` and recover
-            if op.node == AssocOp::Less
+            if op.node == AssocOp::Binary(BinOpKind::Lt)
                 && self.token == token::Gt
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
@@ -231,7 +226,7 @@ impl<'a> Parser<'a> {
             }
 
             // Look for C++'s `<=>` and recover
-            if op.node == AssocOp::LessEqual
+            if op.node == AssocOp::Binary(BinOpKind::Le)
                 && self.token == token::Gt
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
@@ -244,8 +239,8 @@ impl<'a> Parser<'a> {
                 self.bump();
             }
 
-            if self.prev_token == token::BinOp(token::Plus)
-                && self.token == token::BinOp(token::Plus)
+            if self.prev_token == token::Plus
+                && self.token == token::Plus
                 && self.prev_token.span.between(self.token.span).is_empty()
             {
                 let op_span = self.prev_token.span.to(self.token.span);
@@ -255,8 +250,8 @@ impl<'a> Parser<'a> {
                 continue;
             }
 
-            if self.prev_token == token::BinOp(token::Minus)
-                && self.token == token::BinOp(token::Minus)
+            if self.prev_token == token::Minus
+                && self.token == token::Minus
                 && self.prev_token.span.between(self.token.span).is_empty()
                 && !self.look_ahead(1, |tok| tok.can_begin_expr())
             {
@@ -269,13 +264,13 @@ impl<'a> Parser<'a> {
 
             let op = op.node;
             // Special cases:
-            if op == AssocOp::As {
+            if op == AssocOp::Cast {
                 lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
                 continue;
-            } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
+            } else if let AssocOp::Range(limits) = op {
                 // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
                 // generalise it to the Fixity::None code.
-                lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?;
+                lhs = self.parse_expr_range(prec, lhs, limits, cur_op_span)?;
                 break;
             }
 
@@ -290,46 +285,16 @@ impl<'a> Parser<'a> {
 
             let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
             lhs = match op {
-                AssocOp::Add
-                | AssocOp::Subtract
-                | AssocOp::Multiply
-                | AssocOp::Divide
-                | AssocOp::Modulus
-                | AssocOp::LAnd
-                | AssocOp::LOr
-                | AssocOp::BitXor
-                | AssocOp::BitAnd
-                | AssocOp::BitOr
-                | AssocOp::ShiftLeft
-                | AssocOp::ShiftRight
-                | AssocOp::Equal
-                | AssocOp::Less
-                | AssocOp::LessEqual
-                | AssocOp::NotEqual
-                | AssocOp::Greater
-                | AssocOp::GreaterEqual => {
-                    let ast_op = op.to_ast_binop().unwrap();
+                AssocOp::Binary(ast_op) => {
                     let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
                     self.mk_expr(span, binary)
                 }
                 AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)),
-                AssocOp::AssignOp(k) => {
-                    let aop = match k {
-                        token::Plus => BinOpKind::Add,
-                        token::Minus => BinOpKind::Sub,
-                        token::Star => BinOpKind::Mul,
-                        token::Slash => BinOpKind::Div,
-                        token::Percent => BinOpKind::Rem,
-                        token::Caret => BinOpKind::BitXor,
-                        token::And => BinOpKind::BitAnd,
-                        token::Or => BinOpKind::BitOr,
-                        token::Shl => BinOpKind::Shl,
-                        token::Shr => BinOpKind::Shr,
-                    };
+                AssocOp::AssignOp(aop) => {
                     let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
                     self.mk_expr(span, aopexpr)
                 }
-                AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => {
+                AssocOp::Cast | AssocOp::Range(_) => {
                     self.dcx().span_bug(span, "AssocOp should have been handled by special case")
                 }
             };
@@ -347,13 +312,14 @@ impl<'a> Parser<'a> {
             // An exhaustive check is done in the following block, but these are checked first
             // because they *are* ambiguous but also reasonable looking incorrect syntax, so we
             // want to keep their span info to improve diagnostics in these cases in a later stage.
-            (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
-            (true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
-            (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus)
-            (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
-            (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure)
-            (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42`
-            => {
+            (true, Some(AssocOp::Binary(
+                BinOpKind::Mul | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
+                BinOpKind::Sub | // `{ 42 } -5`
+                BinOpKind::Add | // `{ 42 } + 42` (unary plus)
+                BinOpKind::And | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+                BinOpKind::Or | // `{ 42 } || 42` ("logical or" or closure)
+                BinOpKind::BitOr // `{ 42 } | 42` or `{ 42 } |x| 42`
+            ))) => {
                 // These cases are ambiguous and can't be identified in the parser alone.
                 //
                 // Bitwise AND is left out because guessing intent is hard. We can make
@@ -378,7 +344,7 @@ impl<'a> Parser<'a> {
     fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
         self.dcx().emit_err(errors::FoundExprWouldBeStmt {
             span: self.token.span,
-            token: self.token.clone(),
+            token: self.token,
             suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
         });
     }
@@ -392,23 +358,21 @@ impl<'a> Parser<'a> {
             // When parsing const expressions, stop parsing when encountering `>`.
             (
                 Some(
-                    AssocOp::ShiftRight
-                    | AssocOp::Greater
-                    | AssocOp::GreaterEqual
-                    | AssocOp::AssignOp(token::BinOpToken::Shr),
+                    AssocOp::Binary(BinOpKind::Shr | BinOpKind::Gt | BinOpKind::Ge)
+                    | AssocOp::AssignOp(AssignOpKind::ShrAssign),
                 ),
                 _,
             ) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
                 return None;
             }
-            // When recovering patterns as expressions, stop parsing when encountering an assignment `=`, an alternative `|`, or a range `..`.
+            // When recovering patterns as expressions, stop parsing when encountering an
+            // assignment `=`, an alternative `|`, or a range `..`.
             (
                 Some(
                     AssocOp::Assign
                     | AssocOp::AssignOp(_)
-                    | AssocOp::BitOr
-                    | AssocOp::DotDot
-                    | AssocOp::DotDotEq,
+                    | AssocOp::Binary(BinOpKind::BitOr)
+                    | AssocOp::Range(_),
                 ),
                 _,
             ) if self.restrictions.contains(Restrictions::IS_PAT) => {
@@ -423,7 +387,7 @@ impl<'a> Parser<'a> {
                     incorrect: "and".into(),
                     sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
                 });
-                (AssocOp::LAnd, span)
+                (AssocOp::Binary(BinOpKind::And), span)
             }
             (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
                 self.dcx().emit_err(errors::InvalidLogicalOperator {
@@ -431,7 +395,7 @@ impl<'a> Parser<'a> {
                     incorrect: "or".into(),
                     sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
                 });
-                (AssocOp::LOr, span)
+                (AssocOp::Binary(BinOpKind::Or), span)
             }
             _ => return None,
         };
@@ -449,11 +413,11 @@ impl<'a> Parser<'a> {
         &mut self,
         prec: ExprPrecedence,
         lhs: P<Expr>,
-        op: AssocOp,
+        limits: RangeLimits,
         cur_op_span: Span,
     ) -> PResult<'a, P<Expr>> {
         let rhs = if self.is_at_start_of_range_notation_rhs() {
-            let maybe_lt = self.token.clone();
+            let maybe_lt = self.token;
             let attrs = self.parse_outer_attributes()?;
             Some(
                 self.parse_expr_assoc_with(Bound::Excluded(prec), attrs)
@@ -465,8 +429,6 @@ impl<'a> Parser<'a> {
         };
         let rhs_span = rhs.as_ref().map_or(cur_op_span, |x| x.span);
         let span = self.mk_expr_sp(&lhs, lhs.span, rhs_span);
-        let limits =
-            if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed };
         let range = self.mk_range(Some(lhs), rhs, limits);
         Ok(self.mk_expr(span, range))
     }
@@ -543,23 +505,23 @@ impl<'a> Parser<'a> {
         // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
         match this.token.uninterpolate().kind {
             // `!expr`
-            token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
+            token::Bang => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
             // `~expr`
             token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
             // `-expr`
-            token::BinOp(token::Minus) => {
+            token::Minus => {
                 make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
             }
             // `*expr`
-            token::BinOp(token::Star) => {
+            token::Star => {
                 make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
             }
             // `&expr` and `&&expr`
-            token::BinOp(token::And) | token::AndAnd => {
+            token::And | token::AndAnd => {
                 make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
             }
             // `+lit`
-            token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
+            token::Plus if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
                 let mut err = errors::LeadingPlusNotSupported {
                     span: lo,
                     remove_plus: None,
@@ -579,9 +541,7 @@ impl<'a> Parser<'a> {
                 this.parse_expr_prefix(attrs)
             }
             // Recover from `++x`:
-            token::BinOp(token::Plus)
-                if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
-            {
+            token::Plus if this.look_ahead(1, |t| *t == token::Plus) => {
                 let starts_stmt = this.prev_token == token::Semi
                     || this.prev_token == token::CloseDelim(Delimiter::Brace);
                 let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
@@ -605,7 +565,11 @@ impl<'a> Parser<'a> {
     fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
         self.bump();
         let attrs = self.parse_outer_attributes()?;
-        let expr = self.parse_expr_prefix(attrs)?;
+        let expr = if self.token.is_range_separator() {
+            self.parse_expr_prefix_range(attrs)
+        } else {
+            self.parse_expr_prefix(attrs)
+        }?;
         let span = self.interpolated_or_expr_span(&expr);
         Ok((lo.to(span), expr))
     }
@@ -640,14 +604,14 @@ impl<'a> Parser<'a> {
             // can't continue an expression after an ident
             token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw),
             token::Literal(..) | token::Pound => true,
-            _ => t.is_whole_expr(),
+            _ => t.is_metavar_expr(),
         };
         self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr)
     }
 
     /// Recover on `not expr` in favor of `!expr`.
     fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        let negated_token = self.look_ahead(1, |t| t.clone());
+        let negated_token = self.look_ahead(1, |t| *t);
 
         let sub_diag = if negated_token.is_numeric_lit() {
             errors::NotAsNegationOperatorSub::SuggestNotBitwise
@@ -673,7 +637,12 @@ impl<'a> Parser<'a> {
     /// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
     fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
         match self.prev_token.kind {
-            TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) | TokenKind::Interpolated(..) => {
+            TokenKind::NtIdent(..) | TokenKind::NtLifetime(..) => self.prev_token.span,
+            TokenKind::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
+                // `expr.span` is the interpolated span, because invisible open
+                // and close delims both get marked with the same span, one
+                // that covers the entire thing between them. (See
+                // `rustc_expand::mbe::transcribe::transcribe`.)
                 self.prev_token.span
             }
             _ => expr.span,
@@ -761,14 +730,12 @@ impl<'a> Parser<'a> {
                                     suggestion,
                                 })
                             }
-                            token::BinOp(token::Shl) => {
-                                self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
-                                    shift: self.token.span,
-                                    r#type: path,
-                                    args: args_span,
-                                    suggestion,
-                                })
-                            }
+                            token::Shl => self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
+                                shift: self.token.span,
+                                r#type: path,
+                                args: args_span,
+                                suggestion,
+                            }),
                             _ => {
                                 // We can end up here even without `<` being the next token, for
                                 // example because `parse_ty_no_plus` returns `Err` on keywords,
@@ -816,6 +783,7 @@ impl<'a> Parser<'a> {
                     ExprKind::MethodCall(_) => "a method call",
                     ExprKind::Call(_, _) => "a function call",
                     ExprKind::Await(_, _) => "`.await`",
+                    ExprKind::Use(_, _) => "`.use`",
                     ExprKind::Match(_, _, MatchKind::Postfix) => "a postfix match",
                     ExprKind::Err(_) => return Ok(with_postfix),
                     _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
@@ -859,6 +827,18 @@ impl<'a> Parser<'a> {
         if let Some(lt) = lifetime {
             self.error_remove_borrow_lifetime(span, lt.ident.span.until(expr.span));
         }
+
+        // Add expected tokens if we parsed `&raw` as an expression.
+        // This will make sure we see "expected `const`, `mut`", and
+        // guides recovery in case we write `&raw expr`.
+        if borrow_kind == ast::BorrowKind::Ref
+            && mutbl == ast::Mutability::Not
+            && matches!(&expr.kind, ExprKind::Path(None, p) if p.is_ident(kw::Raw))
+        {
+            self.expected_token_types.insert(TokenType::KwMut);
+            self.expected_token_types.insert(TokenType::KwConst);
+        }
+
         Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr)))
     }
 
@@ -1015,12 +995,30 @@ impl<'a> Parser<'a> {
     }
 
     fn error_unexpected_after_dot(&self) {
-        let actual = pprust::token_to_string(&self.token);
+        let actual = super::token_descr(&self.token);
         let span = self.token.span;
         let sm = self.psess.source_map();
         let (span, actual) = match (&self.token.kind, self.subparser_name) {
-            (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => {
-                (span.shrink_to_hi(), actual.into())
+            (token::Eof, Some(_)) if let Ok(snippet) = sm.span_to_snippet(sm.next_point(span)) => {
+                (span.shrink_to_hi(), format!("`{}`", snippet))
+            }
+            (token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))), _) => {
+                // No need to report an error. This case will only occur when parsing a pasted
+                // metavariable, and we should have emitted an error when parsing the macro call in
+                // the first place. E.g. in this code:
+                // ```
+                // macro_rules! m { ($e:expr) => { $e }; }
+                //
+                // fn main() {
+                //     let f = 1;
+                //     m!(f.);
+                // }
+                // ```
+                // we'll get an error "unexpected token: `)` when parsing the `m!(f.)`, so we don't
+                // want to issue a second error when parsing the expansion `«f.»` (where `«`/`»`
+                // represent the invisible delimiters).
+                self.dcx().span_delayed_bug(span, "bad dot expr in metavariable");
+                return;
             }
             _ => (span, actual),
         };
@@ -1330,10 +1328,16 @@ impl<'a> Parser<'a> {
 
     /// Assuming we have just parsed `.`, continue parsing into an expression.
     fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
-        if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
+        if self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
             return Ok(self.mk_await_expr(self_arg, lo));
         }
 
+        if self.eat_keyword(exp!(Use)) {
+            let use_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::ergonomic_clones, use_span);
+            return Ok(self.mk_use_expr(self_arg, lo));
+        }
+
         // Post-fix match
         if self.eat_keyword(exp!(Match)) {
             let match_span = self.prev_token.span;
@@ -1341,6 +1345,15 @@ impl<'a> Parser<'a> {
             return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix);
         }
 
+        // Parse a postfix `yield`.
+        if self.eat_keyword(exp!(Yield)) {
+            let yield_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::yield_expr, yield_span);
+            return Ok(
+                self.mk_expr(lo.to(yield_span), ExprKind::Yield(YieldKind::Postfix(self_arg)))
+            );
+        }
+
         let fn_span_lo = self.token.span;
         let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
         self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]);
@@ -1382,25 +1395,36 @@ impl<'a> Parser<'a> {
     fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
 
-        if let token::Interpolated(nt) = &self.token.kind {
-            match &**nt {
-                token::NtExpr(e) | token::NtLiteral(e) => {
-                    let e = e.clone();
-                    self.bump();
-                    return Ok(e);
-                }
-                token::NtPath(path) => {
-                    let path = (**path).clone();
-                    self.bump();
-                    return Ok(self.mk_expr(self.prev_token.span, ExprKind::Path(None, path)));
-                }
-                token::NtBlock(block) => {
-                    let block = block.clone();
-                    self.bump();
-                    return Ok(self.mk_expr(self.prev_token.span, ExprKind::Block(block, None)));
+        let span = self.token.span;
+        if let Some(expr) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
+            |this| {
+                // Force collection (as opposed to just `parse_expr`) is required to avoid the
+                // attribute duplication seen in #138478.
+                let expr = this.parse_expr_force_collect();
+                // FIXME(nnethercote) Sometimes with expressions we get a trailing comma, possibly
+                // related to the FIXME in `collect_tokens_for_expr`. Examples are the multi-line
+                // `assert_eq!` calls involving arguments annotated with `#[rustfmt::skip]` in
+                // `compiler/rustc_index/src/bit_set/tests.rs`.
+                if this.token.kind == token::Comma {
+                    this.bump();
                 }
-                _ => {}
-            };
+                expr
+            },
+        ) {
+            return Ok(expr);
+        } else if let Some(lit) =
+            self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+        {
+            return Ok(lit);
+        } else if let Some(block) =
+            self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block())
+        {
+            return Ok(self.mk_expr(span, ExprKind::Block(block, None)));
+        } else if let Some(path) =
+            self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
+        {
+            return Ok(self.mk_expr(span, ExprKind::Path(None, path)));
         }
 
         // Outer attributes are already parsed and will be
@@ -1435,6 +1459,7 @@ impl<'a> Parser<'a> {
             } else if this.check_path() {
                 this.parse_expr_path_start()
             } else if this.check_keyword(exp!(Move))
+                || this.check_keyword(exp!(Use))
                 || this.check_keyword(exp!(Static))
                 || this.check_const_closure()
             {
@@ -1492,9 +1517,9 @@ impl<'a> Parser<'a> {
                 this.parse_expr_let(restrictions)
             } else if this.eat_keyword(exp!(Underscore)) {
                 Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
-            } else if this.token.uninterpolated_span().at_least_rust_2018() {
+            } else if this.token_uninterpolated_span().at_least_rust_2018() {
                 // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
-                if this.token.uninterpolated_span().at_least_rust_2024()
+                if this.token_uninterpolated_span().at_least_rust_2024()
                     // check for `gen {}` and `gen move {}`
                     // or `async gen {}` and `async gen move {}`
                     && (this.is_gen_block(kw::Gen, 0)
@@ -1593,7 +1618,7 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
-        let maybe_eq_tok = self.prev_token.clone();
+        let maybe_eq_tok = self.prev_token;
         let (qself, path) = if self.eat_lt() {
             let lt_span = self.prev_token.span;
             let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
@@ -1612,7 +1637,7 @@ impl<'a> Parser<'a> {
         };
 
         // `!`, as an operator, is prefix, so we know this isn't that.
-        let (span, kind) = if self.eat(exp!(Not)) {
+        let (span, kind) = if self.eat(exp!(Bang)) {
             // MACRO INVOCATION expression
             if qself.is_some() {
                 self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
@@ -1652,7 +1677,7 @@ impl<'a> Parser<'a> {
         } else if self.eat_keyword(exp!(Loop)) {
             self.parse_expr_loop(label, lo)
         } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
-            || self.token.is_whole_block()
+            || self.token.is_metavar_block()
         {
             self.parse_expr_block(label, lo, BlockCheckMode::Default)
         } else if !ate_colon
@@ -1917,7 +1942,7 @@ impl<'a> Parser<'a> {
     /// Parse `"yield" expr?`.
     fn parse_expr_yield(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let kind = ExprKind::Yield(self.parse_expr_opt()?);
+        let kind = ExprKind::Yield(YieldKind::Prefix(self.parse_expr_opt()?));
         let span = lo.to(self.prev_token.span);
         self.psess.gated_spans.gate(sym::yield_expr, span);
         let expr = self.mk_expr(span, kind);
@@ -2056,7 +2081,7 @@ impl<'a> Parser<'a> {
         &mut self,
         mk_lit_char: impl FnOnce(Symbol, Span) -> L,
     ) -> PResult<'a, L> {
-        let token = self.token.clone();
+        let token = self.token;
         let err = |self_: &Self| {
             let msg = format!("unexpected token: {}", super::token_descr(&token));
             self_.dcx().struct_span_err(token.span, msg)
@@ -2085,87 +2110,112 @@ impl<'a> Parser<'a> {
             .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char))
     }
 
-    fn recover_after_dot(&mut self) -> Option<Token> {
-        let mut recovered = None;
+    fn recover_after_dot(&mut self) {
         if self.token == token::Dot {
             // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
             // dot would follow an optional literal, so we do this unconditionally.
-            recovered = self.look_ahead(1, |next_token| {
+            let recovered = self.look_ahead(1, |next_token| {
+                // If it's an integer that looks like a float, then recover as such.
+                //
+                // We will never encounter the exponent part of a floating
+                // point literal here, since there's no use of the exponent
+                // syntax that also constitutes a valid integer, so we need
+                // not check for that.
                 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
                     next_token.kind
+                    && suffix.is_none_or(|s| s == sym::f32 || s == sym::f64)
+                    && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
+                    && self.token.span.hi() == next_token.span.lo()
                 {
-                    // If this integer looks like a float, then recover as such.
-                    //
-                    // We will never encounter the exponent part of a floating
-                    // point literal here, since there's no use of the exponent
-                    // syntax that also constitutes a valid integer, so we need
-                    // not check for that.
-                    if suffix.is_none_or(|s| s == sym::f32 || s == sym::f64)
-                        && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
-                        && self.token.span.hi() == next_token.span.lo()
-                    {
-                        let s = String::from("0.") + symbol.as_str();
-                        let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
-                        return Some(Token::new(kind, self.token.span.to(next_token.span)));
-                    }
+                    let s = String::from("0.") + symbol.as_str();
+                    let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
+                    Some(Token::new(kind, self.token.span.to(next_token.span)))
+                } else {
+                    None
                 }
-                None
             });
-            if let Some(token) = &recovered {
-                self.bump();
+            if let Some(recovered) = recovered {
                 self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart {
-                    span: token.span,
-                    suggestion: token.span.shrink_to_lo(),
+                    span: recovered.span,
+                    suggestion: recovered.span.shrink_to_lo(),
                 });
+                self.bump();
+                self.token = recovered;
             }
         }
+    }
 
-        recovered
+    /// Keep this in sync with `Token::can_begin_literal_maybe_minus` and
+    /// `Lit::from_token` (excluding unary negation).
+    fn eat_token_lit(&mut self) -> Option<token::Lit> {
+        match self.token.uninterpolate().kind {
+            token::Ident(name, IdentIsRaw::No) if name.is_bool_lit() => {
+                self.bump();
+                Some(token::Lit::new(token::Bool, name, None))
+            }
+            token::Literal(token_lit) => {
+                self.bump();
+                Some(token_lit)
+            }
+            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                MetaVarKind::Literal,
+            ))) => {
+                let lit = self
+                    .eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+                    .expect("metavar seq literal");
+                let ast::ExprKind::Lit(token_lit) = lit.kind else {
+                    panic!("didn't reparse a literal");
+                };
+                Some(token_lit)
+            }
+            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                mv_kind @ MetaVarKind::Expr { can_begin_literal_maybe_minus: true, .. },
+            ))) => {
+                let expr = self
+                    .eat_metavar_seq(mv_kind, |this| this.parse_expr())
+                    .expect("metavar seq expr");
+                if let ast::ExprKind::Lit(token_lit) = expr.kind {
+                    Some(token_lit)
+                } else if let ast::ExprKind::Unary(UnOp::Neg, inner) = &expr.kind
+                    && let ast::Expr { kind: ast::ExprKind::Lit(_), .. } = **inner
+                {
+                    None
+                } else {
+                    panic!("unexpected reparsed expr: {:?}", expr.kind);
+                }
+            }
+            _ => None,
+        }
     }
 
     /// Matches `lit = true | false | token_lit`.
     /// Returns `None` if the next token is not a literal.
-    pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
-        let recovered = self.recover_after_dot();
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        let span = token.span;
-
-        token::Lit::from_token(token).map(|token_lit| {
-            self.bump();
-            (token_lit, span)
-        })
+    fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+        self.recover_after_dot();
+        let span = self.token.span;
+        self.eat_token_lit().map(|token_lit| (token_lit, span))
     }
 
     /// Matches `lit = true | false | token_lit`.
     /// Returns `None` if the next token is not a literal.
-    pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
-        let recovered = self.recover_after_dot();
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        match token::Lit::from_token(token) {
-            Some(lit) => {
-                match MetaItemLit::from_token_lit(lit, token.span) {
-                    Ok(lit) => {
-                        self.bump();
-                        Some(lit)
-                    }
-                    Err(err) => {
-                        let span = token.uninterpolated_span();
-                        self.bump();
-                        let guar = report_lit_error(self.psess, err, lit, span);
-                        // Pack possible quotes and prefixes from the original literal into
-                        // the error literal's symbol so they can be pretty-printed faithfully.
-                        let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
-                        let symbol = Symbol::intern(&suffixless_lit.to_string());
-                        let lit = token::Lit::new(token::Err(guar), symbol, lit.suffix);
-                        Some(
-                            MetaItemLit::from_token_lit(lit, span)
-                                .unwrap_or_else(|_| unreachable!()),
-                        )
-                    }
+    fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+        self.recover_after_dot();
+        let span = self.token.span;
+        let uninterpolated_span = self.token_uninterpolated_span();
+        self.eat_token_lit().map(|token_lit| {
+            match MetaItemLit::from_token_lit(token_lit, span) {
+                Ok(lit) => lit,
+                Err(err) => {
+                    let guar = report_lit_error(&self.psess, err, token_lit, uninterpolated_span);
+                    // Pack possible quotes and prefixes from the original literal into
+                    // the error literal's symbol so they can be pretty-printed faithfully.
+                    let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None);
+                    let symbol = Symbol::intern(&suffixless_lit.to_string());
+                    let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix);
+                    MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap()
                 }
             }
-            None => None,
-        }
+        })
     }
 
     pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
@@ -2189,9 +2239,10 @@ impl<'a> Parser<'a> {
     /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
     /// Keep this in sync with `Token::can_begin_literal_maybe_minus`.
     pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
-        if let token::Interpolated(nt) = &self.token.kind {
-            match &**nt {
-                // FIXME(nnethercote) The `NtExpr` case should only match if
+        if let Some(expr) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Expr { .. }),
+            |this| {
+                // FIXME(nnethercote) The `expr` case should only match if
                 // `e` is an `ExprKind::Lit` or an `ExprKind::Unary` containing
                 // an `UnOp::Neg` and an `ExprKind::Lit`, like how
                 // `can_begin_literal_maybe_minus` works. But this method has
@@ -2201,13 +2252,14 @@ impl<'a> Parser<'a> {
                 // `ExprKind::Path` must be accepted when parsing range
                 // patterns. That requires some care. So for now, we continue
                 // being less strict here than we should be.
-                token::NtExpr(e) | token::NtLiteral(e) => {
-                    let e = e.clone();
-                    self.bump();
-                    return Ok(e);
-                }
-                _ => {}
-            };
+                this.parse_expr()
+            },
+        ) {
+            return Ok(expr);
+        } else if let Some(lit) =
+            self.eat_metavar_seq(MetaVarKind::Literal, |this| this.parse_literal_maybe_minus())
+        {
+            return Ok(lit);
         }
 
         let lo = self.token.span;
@@ -2223,7 +2275,9 @@ impl<'a> Parser<'a> {
     }
 
     fn is_array_like_block(&mut self) -> bool {
-        self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
+        matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
+            && self
+                .look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_)))
             && self.look_ahead(2, |t| t == &token::Comma)
             && self.look_ahead(3, |t| t.can_begin_expr())
     }
@@ -2235,9 +2289,9 @@ impl<'a> Parser<'a> {
         let mut snapshot = self.create_snapshot_for_diagnostic();
         match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) {
             Ok(arr) => {
-                let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
+                let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfBraces {
                     span: arr.span,
-                    sub: errors::ArrayBracketsInsteadOfSpacesSugg {
+                    sub: errors::ArrayBracketsInsteadOfBracesSugg {
                         left: lo,
                         right: snapshot.prev_token.span,
                     },
@@ -2308,7 +2362,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        if self.token.is_whole_block() {
+        if self.token.is_metavar_block() {
             self.dcx().emit_err(errors::InvalidBlockMacroSegment {
                 span: self.token.span,
                 context: lo.to(self.token.span),
@@ -2319,7 +2373,7 @@ impl<'a> Parser<'a> {
             });
         }
 
-        let (attrs, blk) = self.parse_block_common(lo, blk_mode, true)?;
+        let (attrs, blk) = self.parse_block_common(lo, blk_mode, None)?;
         Ok(self.mk_expr_with_attrs(blk.span, ExprKind::Block(blk, opt_label), attrs))
     }
 
@@ -2333,7 +2387,7 @@ impl<'a> Parser<'a> {
     fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
-        let before = self.prev_token.clone();
+        let before = self.prev_token;
         let binder = if self.check_keyword(exp!(For)) {
             let lo = self.token.span;
             let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
@@ -2351,7 +2405,7 @@ impl<'a> Parser<'a> {
         let movability =
             if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
 
-        let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
+        let coroutine_kind = if self.token_uninterpolated_span().at_least_rust_2018() {
             self.parse_coroutine_kind(Case::Sensitive)
         } else {
             None
@@ -2360,23 +2414,21 @@ impl<'a> Parser<'a> {
         let capture_clause = self.parse_capture_clause()?;
         let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
         let decl_hi = self.prev_token.span;
-        let mut body = match fn_decl.output {
+        let mut body = match &fn_decl.output {
+            // No return type.
             FnRetTy::Default(_) => {
                 let restrictions =
                     self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
-                let prev = self.prev_token.clone();
-                let token = self.token.clone();
+                let prev = self.prev_token;
+                let token = self.token;
                 let attrs = self.parse_outer_attributes()?;
                 match self.parse_expr_res(restrictions, attrs) {
                     Ok((expr, _)) => expr,
                     Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
                 }
             }
-            _ => {
-                // If an explicit return type is given, require a block to appear (RFC 968).
-                let body_lo = self.token.span;
-                self.parse_expr_block(None, body_lo, BlockCheckMode::Default)?
-            }
+            // Explicit return type (`->`) needs block `-> T { }`.
+            FnRetTy::Ty(ty) => self.parse_closure_block_body(ty.span)?,
         };
 
         match coroutine_kind {
@@ -2428,7 +2480,50 @@ impl<'a> Parser<'a> {
         Ok(closure)
     }
 
-    /// Parses an optional `move` prefix to a closure-like construct.
+    /// If an explicit return type is given, require a block to appear (RFC 968).
+    fn parse_closure_block_body(&mut self, ret_span: Span) -> PResult<'a, P<Expr>> {
+        if self.may_recover()
+            && self.token.can_begin_expr()
+            && !matches!(self.token.kind, TokenKind::OpenDelim(Delimiter::Brace))
+            && !self.token.is_metavar_block()
+        {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let restrictions =
+                self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
+            let tok = self.token.clone();
+            match self.parse_expr_res(restrictions, AttrWrapper::empty()) {
+                Ok((expr, _)) => {
+                    let descr = super::token_descr(&tok);
+                    let mut diag = self
+                        .dcx()
+                        .struct_span_err(tok.span, format!("expected `{{`, found {descr}"));
+                    diag.span_label(
+                        ret_span,
+                        "explicit return type requires closure body to be enclosed in braces",
+                    );
+                    diag.multipart_suggestion_verbose(
+                        "wrap the expression in curly braces",
+                        vec![
+                            (expr.span.shrink_to_lo(), "{ ".to_string()),
+                            (expr.span.shrink_to_hi(), " }".to_string()),
+                        ],
+                        Applicability::MachineApplicable,
+                    );
+                    diag.emit();
+                    return Ok(expr);
+                }
+                Err(diag) => {
+                    diag.cancel();
+                    self.restore_snapshot(snapshot);
+                }
+            }
+        }
+
+        let body_lo = self.token.span;
+        self.parse_expr_block(None, body_lo, BlockCheckMode::Default)
+    }
+
+    /// Parses an optional `move` or `use` prefix to a closure-like construct.
     fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
         if self.eat_keyword(exp!(Move)) {
             let move_kw_span = self.prev_token.span;
@@ -2441,6 +2536,16 @@ impl<'a> Parser<'a> {
             } else {
                 Ok(CaptureBy::Value { move_kw: move_kw_span })
             }
+        } else if self.eat_keyword(exp!(Use)) {
+            let use_kw_span = self.prev_token.span;
+            self.psess.gated_spans.gate(sym::ergonomic_clones, use_kw_span);
+            // Check for `use async` and recover
+            if self.check_keyword(exp!(Async)) {
+                let use_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
+                Err(self.dcx().create_err(errors::AsyncUseOrderIncorrect { span: use_async_span }))
+            } else {
+                Ok(CaptureBy::Use { use_kw: use_kw_span })
+            }
         } else {
             Ok(CaptureBy::Ref)
         }
@@ -2562,7 +2667,7 @@ impl<'a> Parser<'a> {
             }
         } else {
             let attrs = self.parse_outer_attributes()?; // For recovery.
-            let maybe_fatarrow = self.token.clone();
+            let maybe_fatarrow = self.token;
             let block = if self.check(exp!(OpenBrace)) {
                 self.parse_block()?
             } else if let Some(block) = recover_block_from_condition(self) {
@@ -2611,7 +2716,8 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the condition of a `if` or `while` expression.
-    fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
+    // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions.
+    pub fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_outer_attributes()?;
         let (mut cond, _) =
             self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?;
@@ -2635,7 +2741,7 @@ impl<'a> Parser<'a> {
                 missing_let: None,
                 comparison: None,
             };
-            if self.prev_token == token::BinOp(token::Or) {
+            if self.prev_token == token::Or {
                 // This was part of a closure, the that part of the parser recover.
                 return Err(self.dcx().create_err(err));
             } else {
@@ -2848,7 +2954,7 @@ impl<'a> Parser<'a> {
     /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
     fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
         let is_await =
-            self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
+            self.token_uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
 
         if is_await {
             self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
@@ -2873,7 +2979,11 @@ impl<'a> Parser<'a> {
             ));
         }
 
-        let (attrs, loop_block) = self.parse_inner_attrs_and_block()?;
+        let (attrs, loop_block) = self.parse_inner_attrs_and_block(
+            // Only suggest moving erroneous block label to the loop header
+            // if there is not already a label there
+            opt_label.is_none().then_some(lo),
+        )?;
 
         let kind = ExprKind::ForLoop { pat, iter: expr, body: loop_block, label: opt_label, kind };
 
@@ -2916,11 +3026,17 @@ impl<'a> Parser<'a> {
             err.span_label(lo, "while parsing the condition of this `while` expression");
             err
         })?;
-        let (attrs, body) = self.parse_inner_attrs_and_block().map_err(|mut err| {
-            err.span_label(lo, "while parsing the body of this `while` expression");
-            err.span_label(cond.span, "this `while` condition successfully parsed");
-            err
-        })?;
+        let (attrs, body) = self
+            .parse_inner_attrs_and_block(
+                // Only suggest moving erroneous block label to the loop header
+                // if there is not already a label there
+                opt_label.is_none().then_some(lo),
+            )
+            .map_err(|mut err| {
+                err.span_label(lo, "while parsing the body of this `while` expression");
+                err.span_label(cond.span, "this `while` condition successfully parsed");
+                err
+            })?;
 
         self.recover_loop_else("while", lo)?;
 
@@ -2934,7 +3050,11 @@ impl<'a> Parser<'a> {
     /// Parses `loop { ... }` (`loop` token already eaten).
     fn parse_expr_loop(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
         let loop_span = self.prev_token.span;
-        let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        let (attrs, body) = self.parse_inner_attrs_and_block(
+            // Only suggest moving erroneous block label to the loop header
+            // if there is not already a label there
+            opt_label.is_none().then_some(lo),
+        )?;
         self.recover_loop_else("loop", lo)?;
         Ok(self.mk_expr_with_attrs(
             lo.to(self.prev_token.span),
@@ -2984,7 +3104,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect, // speculative
                 );
             }
-            if self.maybe_recover_unexpected_block_label() {
+            if self.maybe_recover_unexpected_block_label(None) {
                 e.cancel();
                 self.bump();
             } else {
@@ -3086,7 +3206,7 @@ impl<'a> Parser<'a> {
             }
 
             self.restore_snapshot(pre_pat_snapshot);
-            match self.parse_stmt_without_recovery(true, ForceCollect::No) {
+            match self.parse_stmt_without_recovery(true, ForceCollect::No, false) {
                 // Consume statements for as long as possible.
                 Ok(Some(stmt)) => {
                     stmts.push(stmt);
@@ -3127,10 +3247,11 @@ impl<'a> Parser<'a> {
             let mut result = if armless {
                 // A pattern without a body, allowed for never patterns.
                 arm_body = None;
+                let span = lo.to(this.prev_token.span);
                 this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| {
                     // Don't gate twice
                     if !pat.contains_never_pattern() {
-                        this.psess.gated_spans.gate(sym::never_patterns, pat.span);
+                        this.psess.gated_spans.gate(sym::never_patterns, span);
                     }
                     x
                 })
@@ -3397,7 +3518,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a `try {...}` expression (`try` token already eaten).
     fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
-        let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        let (attrs, body) = self.parse_inner_attrs_and_block(None)?;
         if self.eat_keyword(exp!(Catch)) {
             Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
         } else {
@@ -3411,7 +3532,7 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw::Do)
             && self.is_keyword_ahead(1, &[kw::Catch])
             && self
-                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
             && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -3422,8 +3543,8 @@ impl<'a> Parser<'a> {
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try)
             && self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
-            && self.token.uninterpolated_span().at_least_rust_2018()
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
+            && self.token_uninterpolated_span().at_least_rust_2018()
     }
 
     /// Parses an `async move? {...}` or `gen move? {...}` expression.
@@ -3445,7 +3566,7 @@ impl<'a> Parser<'a> {
         }
         let capture_clause = self.parse_capture_clause()?;
         let decl_span = lo.to(self.prev_token.span);
-        let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        let (attrs, body) = self.parse_inner_attrs_and_block(None)?;
         let kind = ExprKind::Gen(capture_clause, body, kind, decl_span);
         Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
     }
@@ -3454,14 +3575,14 @@ impl<'a> Parser<'a> {
         self.is_keyword_ahead(lookahead, &[kw])
             && ((
                 // `async move {`
-                self.is_keyword_ahead(lookahead + 1, &[kw::Move])
+                self.is_keyword_ahead(lookahead + 1, &[kw::Move, kw::Use])
                     && self.look_ahead(lookahead + 2, |t| {
-                        *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                        *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
                     })
             ) || (
                 // `async {`
                 self.look_ahead(lookahead + 1, |t| {
-                    *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                    *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block()
                 })
             ))
     }
@@ -3471,19 +3592,9 @@ impl<'a> Parser<'a> {
     }
 
     fn is_certainly_not_a_block(&self) -> bool {
+        // `{ ident, ` and `{ ident: ` cannot start a block.
         self.look_ahead(1, |t| t.is_ident())
-            && (
-                // `{ ident, ` cannot start a block.
-                self.look_ahead(2, |t| t == &token::Comma)
-                    || self.look_ahead(2, |t| t == &token::Colon)
-                        && (
-                            // `{ ident: token, ` cannot start a block.
-                            self.look_ahead(4, |t| t == &token::Comma)
-                                // `{ ident: ` cannot start a block unless it's a type ascription
-                                // `ident: Type`.
-                                || self.look_ahead(3, |t| !t.can_begin_type())
-                        )
-            )
+            && self.look_ahead(2, |t| t == &token::Comma || t == &token::Colon)
     }
 
     fn maybe_parse_struct_expr(
@@ -3764,7 +3875,7 @@ impl<'a> Parser<'a> {
                 return Err(this.dcx().create_err(errors::ExpectedStructField {
                     span: this.look_ahead(1, |t| t.span),
                     ident_span: this.token.span,
-                    token: this.look_ahead(1, |t| t.clone()),
+                    token: this.look_ahead(1, |t| *t),
                 }));
             }
             let (ident, expr) = if is_shorthand {
@@ -3816,8 +3927,8 @@ impl<'a> Parser<'a> {
         self.dcx().emit_err(errors::LeftArrowOperator { span });
     }
 
-    fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
-        ExprKind::AssignOp(binop, lhs, rhs)
+    fn mk_assign_op(&self, assign_op: AssignOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
+        ExprKind::AssignOp(assign_op, lhs, rhs)
     }
 
     fn mk_range(
@@ -3857,6 +3968,13 @@ impl<'a> Parser<'a> {
         await_expr
     }
 
+    fn mk_use_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
+        let span = lo.to(self.prev_token.span);
+        let use_expr = self.mk_expr(span, ExprKind::Use(self_arg, self.prev_token.span));
+        self.recover_from_use();
+        use_expr
+    }
+
     pub(crate) fn mk_expr_with_attrs(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
         P(Expr { kind, span, attrs, id: DUMMY_NODE_ID, tokens: None })
     }
@@ -4005,6 +4123,7 @@ impl MutVisitor for CondChecker<'_> {
             }
             ExprKind::Unary(_, _)
             | ExprKind::Await(_, _)
+            | ExprKind::Use(_, _)
             | ExprKind::AssignOp(_, _, _)
             | ExprKind::Range(_, _, _)
             | ExprKind::Try(_)
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 11f0e579de5..c3f71dd8b30 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -367,34 +367,47 @@ impl<'a> Parser<'a> {
 
         loop {
             let where_sp = where_lo.to(self.prev_token.span);
+            let attrs = self.parse_outer_attributes()?;
             let pred_lo = self.token.span;
-            let kind = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
-                let lifetime = self.expect_lifetime();
-                // Bounds starting with a colon are mandatory, but possibly empty.
-                self.expect(exp!(Colon))?;
-                let bounds = self.parse_lt_param_bounds();
-                ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
-                    lifetime,
-                    bounds,
-                })
-            } else if self.check_type() {
-                match self.parse_ty_where_predicate_kind_or_recover_tuple_struct_body(
-                    struct_, pred_lo, where_sp,
-                )? {
-                    PredicateKindOrStructBody::PredicateKind(kind) => kind,
-                    PredicateKindOrStructBody::StructBody(body) => {
-                        tuple_struct_body = Some(body);
-                        break;
-                    }
+            let predicate = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
+                for attr in &attrs {
+                    self.psess.gated_spans.gate(sym::where_clause_attrs, attr.span);
                 }
-            } else {
-                break;
-            };
-            where_clause.predicates.push(ast::WherePredicate {
-                kind,
-                id: DUMMY_NODE_ID,
-                span: pred_lo.to(self.prev_token.span),
-            });
+                let kind = if this.check_lifetime() && this.look_ahead(1, |t| !t.is_like_plus()) {
+                    let lifetime = this.expect_lifetime();
+                    // Bounds starting with a colon are mandatory, but possibly empty.
+                    this.expect(exp!(Colon))?;
+                    let bounds = this.parse_lt_param_bounds();
+                    Some(ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
+                        lifetime,
+                        bounds,
+                    }))
+                } else if this.check_type() {
+                    match this.parse_ty_where_predicate_kind_or_recover_tuple_struct_body(
+                        struct_, pred_lo, where_sp,
+                    )? {
+                        PredicateKindOrStructBody::PredicateKind(kind) => Some(kind),
+                        PredicateKindOrStructBody::StructBody(body) => {
+                            tuple_struct_body = Some(body);
+                            None
+                        }
+                    }
+                } else {
+                    None
+                };
+                let predicate = kind.map(|kind| ast::WherePredicate {
+                    attrs,
+                    kind,
+                    id: DUMMY_NODE_ID,
+                    span: pred_lo.to(this.prev_token.span),
+                    is_placeholder: false,
+                });
+                Ok((predicate, Trailing::No, UsePreAttrPos::No))
+            })?;
+            match predicate {
+                Some(predicate) => where_clause.predicates.push(predicate),
+                None => break,
+            }
 
             let prev_token = self.prev_token.span;
             let ate_comma = self.eat(exp!(Comma));
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index c923717ecaf..39a0291cb1e 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -4,7 +4,7 @@ use std::mem;
 use ast::token::IdentIsRaw;
 use rustc_ast::ast::*;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, TokenKind};
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
 use rustc_ast::{self as ast};
@@ -21,10 +21,10 @@ use super::diagnostics::{ConsumeClosingDelim, dummy_arg};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
     AttrWrapper, ExpKeywordPair, ExpTokenPair, FollowedByType, ForceCollect, Parser, PathStyle,
-    Trailing, UsePreAttrPos,
+    Recovered, Trailing, UsePreAttrPos,
 };
 use crate::errors::{self, MacroExpandsToAdtField};
-use crate::{exp, fluent_generated as fluent, maybe_whole};
+use crate::{exp, fluent_generated as fluent};
 
 impl<'a> Parser<'a> {
     /// Parses a source module as a crate. This is the main entry point for the parser.
@@ -34,10 +34,10 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
-    fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
+    fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemKind> {
         let safety = self.parse_safety(Case::Sensitive);
         self.expect_keyword(exp!(Mod))?;
-        let id = self.parse_ident()?;
+        let ident = self.parse_ident()?;
         let mod_kind = if self.eat(exp!(Semi)) {
             ModKind::Unloaded
         } else {
@@ -46,7 +46,7 @@ impl<'a> Parser<'a> {
             attrs.extend(inner_attrs);
             ModKind::Loaded(items, Inline::Yes, inner_span, Ok(()))
         };
-        Ok((id, ItemKind::Mod(safety, mod_kind)))
+        Ok(ItemKind::Mod(safety, ident, mod_kind))
     }
 
     /// Parses the contents of a module (inner attributes followed by module items).
@@ -115,8 +115,6 @@ impl<'a> Parser<'a> {
     }
 }
 
-pub(super) type ItemInfo = (Ident, ItemKind);
-
 impl<'a> Parser<'a> {
     pub fn parse_item(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<P<Item>>> {
         let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
@@ -142,10 +140,13 @@ impl<'a> Parser<'a> {
         fn_parse_mode: FnParseMode,
         force_collect: ForceCollect,
     ) -> PResult<'a, Option<Item>> {
-        maybe_whole!(self, NtItem, |item| {
+        if let Some(item) =
+            self.eat_metavar_seq(MetaVarKind::Item, |this| this.parse_item(ForceCollect::Yes))
+        {
+            let mut item = item.expect("an actual item");
             attrs.prepend_to_nt_inner(&mut item.attrs);
-            Some(item.into_inner())
-        });
+            return Ok(Some(item.into_inner()));
+        }
 
         self.collect_tokens(None, attrs, force_collect, |this, mut attrs| {
             let lo = this.token.span;
@@ -160,11 +161,11 @@ impl<'a> Parser<'a> {
                 fn_parse_mode,
                 Case::Sensitive,
             )?;
-            if let Some((ident, kind)) = kind {
+            if let Some(kind) = kind {
                 this.error_on_unconsumed_default(def, &kind);
                 let span = lo.to(this.prev_token.span);
                 let id = DUMMY_NODE_ID;
-                let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
+                let item = Item { attrs, id, kind, vis, span, tokens: None };
                 return Ok((Some(item), Trailing::No, UsePreAttrPos::No));
             }
 
@@ -205,20 +206,25 @@ impl<'a> Parser<'a> {
         def: &mut Defaultness,
         fn_parse_mode: FnParseMode,
         case: Case,
-    ) -> PResult<'a, Option<ItemInfo>> {
+    ) -> PResult<'a, Option<ItemKind>> {
         let check_pub = def == &Defaultness::Final;
         let mut def_ = || mem::replace(def, Defaultness::Final);
 
-        let info = if self.eat_keyword_case(exp!(Use), case) {
+        let info = if !self.is_use_closure() && self.eat_keyword_case(exp!(Use), case) {
             self.parse_use_item()?
         } else if self.check_fn_front_matter(check_pub, case) {
             // FUNCTION ITEM
             let (ident, sig, generics, contract, body) =
                 self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
-            (
+            ItemKind::Fn(Box::new(Fn {
+                defaultness: def_(),
                 ident,
-                ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, contract, body })),
-            )
+                sig,
+                generics,
+                contract,
+                body,
+                define_opaque: None,
+            }))
         } else if self.eat_keyword(exp!(Extern)) {
             if self.eat_keyword(exp!(Crate)) {
                 // EXTERN CRATE
@@ -237,8 +243,7 @@ impl<'a> Parser<'a> {
             // STATIC ITEM
             self.bump(); // `static`
             let mutability = self.parse_mutability();
-            let (ident, item) = self.parse_static_item(safety, mutability)?;
-            (ident, ItemKind::Static(Box::new(item)))
+            self.parse_static_item(safety, mutability)?
         } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
             // CONST ITEM
             if self.token.is_keyword(kw::Impl) {
@@ -248,15 +253,14 @@ impl<'a> Parser<'a> {
                 self.recover_const_mut(const_span);
                 self.recover_missing_kw_before_item()?;
                 let (ident, generics, ty, expr) = self.parse_const_item()?;
-                (
+                ItemKind::Const(Box::new(ConstItem {
+                    defaultness: def_(),
                     ident,
-                    ItemKind::Const(Box::new(ConstItem {
-                        defaultness: def_(),
-                        generics,
-                        ty,
-                        expr,
-                    })),
-                )
+                    generics,
+                    ty,
+                    expr,
+                    define_opaque: None,
+                }))
             }
         } else if self.check_keyword(exp!(Trait)) || self.check_auto_or_unsafe_trait_item() {
             // TRAIT ITEM
@@ -323,14 +327,14 @@ impl<'a> Parser<'a> {
                 self.recover_missing_kw_before_item()?;
             }
             // MACRO INVOCATION ITEM
-            (Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
+            ItemKind::MacCall(P(self.parse_item_macro(vis)?))
         } else {
             return Ok(None);
         };
         Ok(Some(info))
     }
 
-    fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemInfo>> {
+    fn recover_import_as_use(&mut self) -> PResult<'a, Option<ItemKind>> {
         let span = self.token.span;
         let token_name = super::token_descr(&self.token);
         let snapshot = self.create_snapshot_for_diagnostic();
@@ -348,7 +352,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_use_item(&mut self) -> PResult<'a, ItemInfo> {
+    fn parse_use_item(&mut self) -> PResult<'a, ItemKind> {
         let tree = self.parse_use_tree()?;
         if let Err(mut e) = self.expect_semi() {
             match tree.kind {
@@ -362,7 +366,7 @@ impl<'a> Parser<'a> {
             }
             return Err(e);
         }
-        Ok((Ident::empty(), ItemKind::Use(tree)))
+        Ok(ItemKind::Use(tree))
     }
 
     /// When parsing a statement, would the start of a path be an item?
@@ -382,7 +386,7 @@ impl<'a> Parser<'a> {
 
     /// Are we sure this could not possibly be a macro invocation?
     fn isnt_macro_invocation(&mut self) -> bool {
-        self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::PathSep)
+        self.check_ident() && self.look_ahead(1, |t| *t != token::Bang && *t != token::PathSep)
     }
 
     /// Recover on encountering a struct, enum, or method definition where the user
@@ -472,7 +476,7 @@ impl<'a> Parser<'a> {
         if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
     }
 
-    fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
+    fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemKind>> {
         // To be expanded
         Ok(None)
     }
@@ -480,7 +484,7 @@ impl<'a> Parser<'a> {
     /// Parses an item macro, e.g., `item!();`.
     fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
         let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
-        self.expect(exp!(Not))?; // `!`
+        self.expect(exp!(Bang))?; // `!`
         match self.parse_delim_args() {
             // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
             Ok(args) => {
@@ -540,7 +544,7 @@ impl<'a> Parser<'a> {
 
     fn parse_polarity(&mut self) -> ast::ImplPolarity {
         // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
-        if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) {
+        if self.check(exp!(Bang)) && self.look_ahead(1, |t| t.can_begin_type()) {
             self.bump(); // `!`
             ast::ImplPolarity::Negative(self.prev_token.span)
         } else {
@@ -566,7 +570,7 @@ impl<'a> Parser<'a> {
         &mut self,
         attrs: &mut AttrVec,
         defaultness: Defaultness,
-    ) -> PResult<'a, ItemInfo> {
+    ) -> PResult<'a, ItemKind> {
         let safety = self.parse_safety(Case::Sensitive);
         self.expect_keyword(exp!(Impl))?;
 
@@ -587,7 +591,7 @@ impl<'a> Parser<'a> {
         }
 
         // Parse stray `impl async Trait`
-        if (self.token.uninterpolated_span().at_least_rust_2018()
+        if (self.token_uninterpolated_span().at_least_rust_2018()
             && self.token.is_keyword(kw::Async))
             || self.is_kw_followed_by_ident(kw::Async)
         {
@@ -598,21 +602,13 @@ impl<'a> Parser<'a> {
         let polarity = self.parse_polarity();
 
         // Parse both types and traits as a type, then reinterpret if necessary.
-        let err_path = |span| ast::Path::from_ident(Ident::new(kw::Empty, span));
         let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
         {
             let span = self.prev_token.span.between(self.token.span);
-            self.dcx().emit_err(errors::MissingTraitInTraitImpl {
+            return Err(self.dcx().create_err(errors::MissingTraitInTraitImpl {
                 span,
                 for_span: span.to(self.token.span),
-            });
-
-            P(Ty {
-                kind: TyKind::Path(None, err_path(span)),
-                span,
-                id: DUMMY_NODE_ID,
-                tokens: None,
-            })
+            }));
         } else {
             self.parse_ty_with_generics_recovery(&generics)?
         };
@@ -639,7 +635,7 @@ impl<'a> Parser<'a> {
 
         let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item(ForceCollect::No))?;
 
-        let item_kind = match ty_second {
+        let (of_trait, self_ty) = match ty_second {
             Some(ty_second) => {
                 // impl Trait for Type
                 if !has_for {
@@ -653,6 +649,7 @@ impl<'a> Parser<'a> {
                     other => {
                         if let TyKind::ImplTrait(_, bounds) = other
                             && let [bound] = bounds.as_slice()
+                            && let GenericBound::Trait(poly_trait_ref) = bound
                         {
                             // Suggest removing extra `impl` keyword:
                             // `impl<T: Default> impl Default for Wrapper<T>`
@@ -662,46 +659,33 @@ impl<'a> Parser<'a> {
                                 extra_impl_kw,
                                 impl_trait_span: ty_first.span,
                             });
+                            poly_trait_ref.trait_ref.path.clone()
                         } else {
-                            self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType {
-                                span: ty_first.span,
-                            });
+                            return Err(self.dcx().create_err(
+                                errors::ExpectedTraitInTraitImplFoundType { span: ty_first.span },
+                            ));
                         }
-                        err_path(ty_first.span)
                     }
                 };
                 let trait_ref = TraitRef { path, ref_id: ty_first.id };
 
-                ItemKind::Impl(Box::new(Impl {
-                    safety,
-                    polarity,
-                    defaultness,
-                    constness,
-                    generics,
-                    of_trait: Some(trait_ref),
-                    self_ty: ty_second,
-                    items: impl_items,
-                }))
-            }
-            None => {
-                // impl Type
-                ItemKind::Impl(Box::new(Impl {
-                    safety,
-                    polarity,
-                    defaultness,
-                    constness,
-                    generics,
-                    of_trait: None,
-                    self_ty: ty_first,
-                    items: impl_items,
-                }))
+                (Some(trait_ref), ty_second)
             }
+            None => (None, ty_first), // impl Type
         };
-
-        Ok((Ident::empty(), item_kind))
+        Ok(ItemKind::Impl(Box::new(Impl {
+            safety,
+            polarity,
+            defaultness,
+            constness,
+            generics,
+            of_trait,
+            self_ty,
+            items: impl_items,
+        })))
     }
 
-    fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> {
+    fn parse_item_delegation(&mut self) -> PResult<'a, ItemKind> {
         let span = self.token.span;
         self.expect_keyword(exp!(Reuse))?;
 
@@ -724,7 +708,7 @@ impl<'a> Parser<'a> {
             })
         };
 
-        let (ident, item_kind) = if self.eat_path_sep() {
+        let item_kind = if self.eat_path_sep() {
             let suffixes = if self.eat(exp!(Star)) {
                 None
             } else {
@@ -732,7 +716,7 @@ impl<'a> Parser<'a> {
                 Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0)
             };
             let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
-            (Ident::empty(), ItemKind::DelegationMac(Box::new(deleg)))
+            ItemKind::DelegationMac(Box::new(deleg))
         } else {
             let rename = rename(self)?;
             let ident = rename.unwrap_or_else(|| path.segments.last().unwrap().ident);
@@ -740,17 +724,18 @@ impl<'a> Parser<'a> {
                 id: DUMMY_NODE_ID,
                 qself,
                 path,
+                ident,
                 rename,
                 body: body(self)?,
                 from_glob: false,
             };
-            (ident, ItemKind::Delegation(Box::new(deleg)))
+            ItemKind::Delegation(Box::new(deleg))
         };
 
         let span = span.to(self.prev_token.span);
         self.psess.gated_spans.gate(sym::fn_delegation, span);
 
-        Ok((ident, item_kind))
+        Ok(item_kind)
     }
 
     fn parse_item_list<T>(
@@ -885,7 +870,7 @@ impl<'a> Parser<'a> {
             && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
         {
             self.bump(); // `default`
-            Defaultness::Default(self.prev_token.uninterpolated_span())
+            Defaultness::Default(self.prev_token_uninterpolated_span())
         } else {
             Defaultness::Final
         }
@@ -900,7 +885,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
-    fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
+    fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemKind> {
         let safety = self.parse_safety(Case::Sensitive);
         // Parse optional `auto` prefix.
         let is_auto = if self.eat_keyword(exp!(Auto)) {
@@ -941,15 +926,12 @@ impl<'a> Parser<'a> {
 
             self.psess.gated_spans.gate(sym::trait_alias, whole_span);
 
-            Ok((ident, ItemKind::TraitAlias(generics, bounds)))
+            Ok(ItemKind::TraitAlias(ident, generics, bounds))
         } else {
             // It's a normal trait.
             generics.where_clause = self.parse_where_clause()?;
             let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
-            Ok((
-                ident,
-                ItemKind::Trait(Box::new(Trait { is_auto, safety, generics, bounds, items })),
-            ))
+            Ok(ItemKind::Trait(Box::new(Trait { is_auto, safety, ident, generics, bounds, items })))
         }
     }
 
@@ -977,23 +959,32 @@ impl<'a> Parser<'a> {
         force_collect: ForceCollect,
     ) -> PResult<'a, Option<Option<P<AssocItem>>>> {
         Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
-            |Item { attrs, id, span, vis, ident, kind, tokens }| {
+            |Item { attrs, id, span, vis, kind, tokens }| {
                 let kind = match AssocItemKind::try_from(kind) {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
-                        ItemKind::Static(box StaticItem { ty, safety: _, mutability: _, expr }) => {
+                        ItemKind::Static(box StaticItem {
+                            ident,
+                            ty,
+                            safety: _,
+                            mutability: _,
+                            expr,
+                            define_opaque,
+                        }) => {
                             self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span });
                             AssocItemKind::Const(Box::new(ConstItem {
                                 defaultness: Defaultness::Final,
+                                ident,
                                 generics: Generics::default(),
                                 ty,
                                 expr,
+                                define_opaque,
                             }))
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
                     },
                 };
-                Some(P(Item { attrs, id, span, vis, ident, kind, tokens }))
+                Some(P(Item { attrs, id, span, vis, kind, tokens }))
             },
         ))
     }
@@ -1003,7 +994,7 @@ impl<'a> Parser<'a> {
     /// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ;
     /// ```
     /// The `"type"` has already been eaten.
-    fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemInfo> {
+    fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemKind> {
         let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
 
@@ -1038,16 +1029,14 @@ impl<'a> Parser<'a> {
 
         self.expect_semi()?;
 
-        Ok((
+        Ok(ItemKind::TyAlias(Box::new(TyAlias {
+            defaultness,
             ident,
-            ItemKind::TyAlias(Box::new(TyAlias {
-                defaultness,
-                generics,
-                where_clauses,
-                bounds,
-                ty,
-            })),
-        ))
+            generics,
+            where_clauses,
+            bounds,
+            ty,
+        })))
     }
 
     /// Parses a `UseTree`.
@@ -1151,16 +1140,16 @@ impl<'a> Parser<'a> {
     /// extern crate foo;
     /// extern crate bar as foo;
     /// ```
-    fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemInfo> {
+    fn parse_item_extern_crate(&mut self) -> PResult<'a, ItemKind> {
         // Accept `extern crate name-like-this` for better diagnostics
-        let orig_name = self.parse_crate_name_with_dashes()?;
-        let (item_name, orig_name) = if let Some(rename) = self.parse_rename()? {
-            (rename, Some(orig_name.name))
+        let orig_ident = self.parse_crate_name_with_dashes()?;
+        let (orig_name, item_ident) = if let Some(rename) = self.parse_rename()? {
+            (Some(orig_ident.name), rename)
         } else {
-            (orig_name, None)
+            (None, orig_ident)
         };
         self.expect_semi()?;
-        Ok((item_name, ItemKind::ExternCrate(orig_name)))
+        Ok(ItemKind::ExternCrate(orig_name, item_ident))
     }
 
     fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
@@ -1211,8 +1200,8 @@ impl<'a> Parser<'a> {
         &mut self,
         attrs: &mut AttrVec,
         mut safety: Safety,
-    ) -> PResult<'a, ItemInfo> {
-        let extern_span = self.prev_token.uninterpolated_span();
+    ) -> PResult<'a, ItemKind> {
+        let extern_span = self.prev_token_uninterpolated_span();
         let abi = self.parse_abi(); // ABI?
         // FIXME: This recovery should be tested better.
         if safety == Safety::Default
@@ -1223,13 +1212,12 @@ impl<'a> Parser<'a> {
             safety = Safety::Unsafe(self.token.span);
             let _ = self.eat_keyword(exp!(Unsafe));
         }
-        let module = ast::ForeignMod {
+        Ok(ItemKind::ForeignMod(ast::ForeignMod {
             extern_span,
             safety,
             abi,
             items: self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?,
-        };
-        Ok((Ident::empty(), ItemKind::ForeignMod(module)))
+        }))
     }
 
     /// Parses a foreign item (one in an `extern { ... }` block).
@@ -1239,11 +1227,11 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
         let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: false };
         Ok(self.parse_item_(fn_parse_mode, force_collect)?.map(
-            |Item { attrs, id, span, vis, ident, kind, tokens }| {
+            |Item { attrs, id, span, vis, kind, tokens }| {
                 let kind = match ForeignItemKind::try_from(kind) {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
-                        ItemKind::Const(box ConstItem { ty, expr, .. }) => {
+                        ItemKind::Const(box ConstItem { ident, ty, expr, .. }) => {
                             let const_span = Some(span.with_hi(ident.span.lo()))
                                 .filter(|span| span.can_be_used_for_suggestions());
                             self.dcx().emit_err(errors::ExternItemCannotBeConst {
@@ -1251,16 +1239,18 @@ impl<'a> Parser<'a> {
                                 const_span,
                             });
                             ForeignItemKind::Static(Box::new(StaticItem {
+                                ident,
                                 ty,
                                 mutability: Mutability::Not,
                                 expr,
                                 safety: Safety::Default,
+                                define_opaque: None,
                             }))
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
                     },
                 };
-                Some(P(Item { attrs, id, span, vis, ident, kind, tokens }))
+                Some(P(Item { attrs, id, span, vis, kind, tokens }))
             },
         ))
     }
@@ -1277,23 +1267,50 @@ impl<'a> Parser<'a> {
         None
     }
 
+    fn is_use_closure(&self) -> bool {
+        if self.token.is_keyword(kw::Use) {
+            // Check if this could be a closure.
+            self.look_ahead(1, |token| {
+                // Move or Async here would be an error but still we're parsing a closure
+                let dist =
+                    if token.is_keyword(kw::Move) || token.is_keyword(kw::Async) { 2 } else { 1 };
+
+                self.look_ahead(dist, |token| matches!(token.kind, token::Or | token::OrOr))
+            })
+        } else {
+            false
+        }
+    }
+
     fn is_unsafe_foreign_mod(&self) -> bool {
-        self.token.is_keyword(kw::Unsafe)
-            && self.is_keyword_ahead(1, &[kw::Extern])
-            && self.look_ahead(
-                2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize),
-                |t| *t == token::OpenDelim(Delimiter::Brace),
-            )
+        // Look for `unsafe`.
+        if !self.token.is_keyword(kw::Unsafe) {
+            return false;
+        }
+        // Look for `extern`.
+        if !self.is_keyword_ahead(1, &[kw::Extern]) {
+            return false;
+        }
+
+        // Look for the optional ABI string literal.
+        let n = if self.look_ahead(2, |t| t.can_begin_string_literal()) { 3 } else { 2 };
+
+        // Look for the `{`. Use `tree_look_ahead` because the ABI (if present)
+        // might be a metavariable i.e. an invisible-delimited sequence, and
+        // `tree_look_ahead` will consider that a single element when looking
+        // ahead.
+        self.tree_look_ahead(n, |t| matches!(t, TokenTree::Delimited(_, _, Delimiter::Brace, _)))
+            == Some(true)
     }
 
     fn is_static_global(&mut self) -> bool {
         if self.check_keyword(exp!(Static)) {
             // Check if this could be a closure.
             !self.look_ahead(1, |token| {
-                if token.is_keyword(kw::Move) {
+                if token.is_keyword(kw::Move) || token.is_keyword(kw::Use) {
                     return true;
                 }
-                matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
+                matches!(token.kind, token::Or | token::OrOr)
             })
         } else {
             // `$qual static`
@@ -1320,13 +1337,13 @@ impl<'a> Parser<'a> {
         const_span: Span,
         attrs: &mut AttrVec,
         defaultness: Defaultness,
-    ) -> PResult<'a, ItemInfo> {
+    ) -> PResult<'a, ItemKind> {
         let impl_span = self.token.span;
         let err = self.expected_ident_found_err();
 
         // Only try to recover if this is implementing a trait for a type
-        let mut impl_info = match self.parse_item_impl(attrs, defaultness) {
-            Ok(impl_info) => impl_info,
+        let mut item_kind = match self.parse_item_impl(attrs, defaultness) {
+            Ok(item_kind) => item_kind,
             Err(recovery_error) => {
                 // Recovery failed, raise the "expected identifier" error
                 recovery_error.cancel();
@@ -1334,7 +1351,7 @@ impl<'a> Parser<'a> {
             }
         };
 
-        match &mut impl_info.1 {
+        match &mut item_kind {
             ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
                 *constness = Const::Yes(const_span);
 
@@ -1351,10 +1368,11 @@ impl<'a> Parser<'a> {
             _ => unreachable!(),
         }
 
-        Ok(impl_info)
+        Ok(item_kind)
     }
 
-    /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in `mutability`.
+    /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in
+    /// `mutability`.
     ///
     /// ```ebnf
     /// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ;
@@ -1363,7 +1381,7 @@ impl<'a> Parser<'a> {
         &mut self,
         safety: Safety,
         mutability: Mutability,
-    ) -> PResult<'a, (Ident, StaticItem)> {
+    ) -> PResult<'a, ItemKind> {
         let ident = self.parse_ident()?;
 
         if self.token == TokenKind::Lt && self.may_recover() {
@@ -1375,7 +1393,8 @@ impl<'a> Parser<'a> {
         // FIXME: This could maybe benefit from `.may_recover()`?
         let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) {
             (true, false) => self.parse_ty()?,
-            // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type.
+            // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing
+            // type.
             (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
         };
 
@@ -1383,7 +1402,8 @@ impl<'a> Parser<'a> {
 
         self.expect_semi()?;
 
-        Ok((ident, StaticItem { ty, safety, mutability, expr }))
+        let item = StaticItem { ident, ty, safety, mutability, expr, define_opaque: None };
+        Ok(ItemKind::Static(Box::new(item)))
     }
 
     /// Parse a constant item with the prefix `"const"` already parsed.
@@ -1508,7 +1528,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an enum declaration.
-    fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
+    fn parse_item_enum(&mut self) -> PResult<'a, ItemKind> {
         if self.token.is_keyword(kw::Struct) {
             let span = self.prev_token.span.to(self.token.span);
             let err = errors::EnumStructMutuallyExclusive { span };
@@ -1521,7 +1541,7 @@ impl<'a> Parser<'a> {
         }
 
         let prev_span = self.prev_token.span;
-        let id = self.parse_ident()?;
+        let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
         generics.where_clause = self.parse_where_clause()?;
 
@@ -1532,10 +1552,10 @@ impl<'a> Parser<'a> {
             (thin_vec![], Trailing::No)
         } else {
             self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
-                p.parse_enum_variant(id.span)
+                p.parse_enum_variant(ident.span)
             })
             .map_err(|mut err| {
-                err.span_label(id.span, "while parsing this enum");
+                err.span_label(ident.span, "while parsing this enum");
                 if self.token == token::Colon {
                     let snapshot = self.create_snapshot_for_diagnostic();
                     self.bump();
@@ -1561,7 +1581,7 @@ impl<'a> Parser<'a> {
         };
 
         let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
-        Ok((id, ItemKind::Enum(enum_definition, generics)))
+        Ok(ItemKind::Enum(ident, enum_definition, generics))
     }
 
     fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
@@ -1579,7 +1599,7 @@ impl<'a> Parser<'a> {
             }
             let ident = this.parse_field_ident("enum", vlo)?;
 
-            if this.token == token::Not {
+            if this.token == token::Bang {
                 if let Err(err) = this.unexpected() {
                     err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
                 }
@@ -1653,8 +1673,8 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `struct Foo { ... }`.
-    fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
-        let class_name = self.parse_ident()?;
+    fn parse_item_struct(&mut self) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
 
         let mut generics = self.parse_generics()?;
 
@@ -1675,7 +1695,7 @@ impl<'a> Parser<'a> {
         let vdata = if self.token.is_keyword(kw::Where) {
             let tuple_struct_body;
             (generics.where_clause, tuple_struct_body) =
-                self.parse_struct_where_clause(class_name, generics.span)?;
+                self.parse_struct_where_clause(ident, generics.span)?;
 
             if let Some(body) = tuple_struct_body {
                 // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);`
@@ -1689,7 +1709,7 @@ impl<'a> Parser<'a> {
                 // If we see: `struct Foo<T> where T: Copy { ... }`
                 let (fields, recovered) = self.parse_record_struct_body(
                     "struct",
-                    class_name.span,
+                    ident.span,
                     generics.where_clause.has_where_token,
                 )?;
                 VariantData::Struct { fields, recovered }
@@ -1701,7 +1721,7 @@ impl<'a> Parser<'a> {
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
             let (fields, recovered) = self.parse_record_struct_body(
                 "struct",
-                class_name.span,
+                ident.span,
                 generics.where_clause.has_where_token,
             )?;
             VariantData::Struct { fields, recovered }
@@ -1712,17 +1732,16 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             body
         } else {
-            let err =
-                errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
+            let err = errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token);
             return Err(self.dcx().create_err(err));
         };
 
-        Ok((class_name, ItemKind::Struct(vdata, generics)))
+        Ok(ItemKind::Struct(ident, vdata, generics))
     }
 
     /// Parses `union Foo { ... }`.
-    fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
-        let class_name = self.parse_ident()?;
+    fn parse_item_union(&mut self) -> PResult<'a, ItemKind> {
+        let ident = self.parse_ident()?;
 
         let mut generics = self.parse_generics()?;
 
@@ -1730,14 +1749,14 @@ impl<'a> Parser<'a> {
             generics.where_clause = self.parse_where_clause()?;
             let (fields, recovered) = self.parse_record_struct_body(
                 "union",
-                class_name.span,
+                ident.span,
                 generics.where_clause.has_where_token,
             )?;
             VariantData::Struct { fields, recovered }
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
             let (fields, recovered) = self.parse_record_struct_body(
                 "union",
-                class_name.span,
+                ident.span,
                 generics.where_clause.has_where_token,
             )?;
             VariantData::Struct { fields, recovered }
@@ -1749,7 +1768,7 @@ impl<'a> Parser<'a> {
             return Err(err);
         };
 
-        Ok((class_name, ItemKind::Union(vdata, generics)))
+        Ok(ItemKind::Union(ident, vdata, generics))
     }
 
     /// This function parses the fields of record structs:
@@ -1814,7 +1833,7 @@ impl<'a> Parser<'a> {
             let attrs = p.parse_outer_attributes()?;
             p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| {
                 let mut snapshot = None;
-                if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+                if p.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
                     // Account for `<<<<<<<` diff markers. We can't proactively error here because
                     // that can be a valid type start, so we snapshot and reparse only we've
                     // encountered another parse error.
@@ -2034,7 +2053,7 @@ impl<'a> Parser<'a> {
         attrs: AttrVec,
     ) -> PResult<'a, FieldDef> {
         let name = self.parse_field_ident(adt_ty, lo)?;
-        if self.token == token::Not {
+        if self.token == token::Bang {
             if let Err(mut err) = self.unexpected() {
                 // Encounter the macro invocation
                 err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
@@ -2101,15 +2120,17 @@ impl<'a> Parser<'a> {
                 }
             } else if self.eat_keyword(exp!(Struct)) {
                 match self.parse_item_struct() {
-                    Ok((ident, _)) => self
-                        .dcx()
-                        .struct_span_err(
-                            lo.with_hi(ident.span.hi()),
-                            format!("structs are not allowed in {adt_ty} definitions"),
-                        )
-                        .with_help(
-                            "consider creating a new `struct` definition instead of nesting",
-                        ),
+                    Ok(item) => {
+                        let ItemKind::Struct(ident, ..) = item else { unreachable!() };
+                        self.dcx()
+                            .struct_span_err(
+                                lo.with_hi(ident.span.hi()),
+                                format!("structs are not allowed in {adt_ty} definitions"),
+                            )
+                            .with_help(
+                                "consider creating a new `struct` definition instead of nesting",
+                            )
+                    }
                     Err(err) => {
                         err.cancel();
                         self.restore_snapshot(snapshot);
@@ -2154,7 +2175,7 @@ impl<'a> Parser<'a> {
     /// MacParams = "(" TOKEN_STREAM ")" ;
     /// DeclMac = "macro" Ident MacParams? MacBody ;
     /// ```
-    fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
+    fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemKind> {
         let ident = self.parse_ident()?;
         let body = if self.check(exp!(OpenBrace)) {
             self.parse_delim_args()? // `MacBody`
@@ -2176,7 +2197,7 @@ impl<'a> Parser<'a> {
         };
 
         self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
-        Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: false })))
+        Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: false }))
     }
 
     /// Is this a possibly malformed start of a `macro_rules! foo` item definition?
@@ -2184,7 +2205,7 @@ impl<'a> Parser<'a> {
         if self.check_keyword(exp!(MacroRules)) {
             let macro_rules_span = self.token.span;
 
-            if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
+            if self.look_ahead(1, |t| *t == token::Bang) && self.look_ahead(2, |t| t.is_ident()) {
                 return IsMacroRulesItem::Yes { has_bang: true };
             } else if self.look_ahead(1, |t| (t.is_ident())) {
                 // macro_rules foo
@@ -2205,15 +2226,15 @@ impl<'a> Parser<'a> {
         &mut self,
         vis: &Visibility,
         has_bang: bool,
-    ) -> PResult<'a, ItemInfo> {
+    ) -> PResult<'a, ItemKind> {
         self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
 
         if has_bang {
-            self.expect(exp!(Not))?; // `!`
+            self.expect(exp!(Bang))?; // `!`
         }
         let ident = self.parse_ident()?;
 
-        if self.eat(exp!(Not)) {
+        if self.eat(exp!(Bang)) {
             // Handle macro_rules! foo!
             let span = self.prev_token.span;
             self.dcx().emit_err(errors::MacroNameRemoveBang { span });
@@ -2223,7 +2244,7 @@ impl<'a> Parser<'a> {
         self.eat_semi_for_macro_if_needed(&body);
         self.complain_if_pub_macro(vis, true);
 
-        Ok((ident, ItemKind::MacroDef(ast::MacroDef { body, macro_rules: true })))
+        Ok(ItemKind::MacroDef(ident, ast::MacroDef { body, macro_rules: true }))
     }
 
     /// Item macro invocations or `macro_rules!` definitions need inherited visibility.
@@ -2281,7 +2302,7 @@ impl<'a> Parser<'a> {
             || self.token.is_keyword(kw::Union))
             && self.look_ahead(1, |t| t.is_ident())
         {
-            let kw_token = self.token.clone();
+            let kw_token = self.token;
             let kw_str = pprust::token_to_string(&kw_token);
             let item = self.parse_item(ForceCollect::No)?;
             let mut item = item.unwrap().span;
@@ -2514,8 +2535,8 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             *sig_hi = self.prev_token.span;
             (AttrVec::new(), None)
-        } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() {
-            self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
+        } else if self.check(exp!(OpenBrace)) || self.token.is_metavar_block() {
+            self.parse_block_common(self.token.span, BlockCheckMode::Default, None)
                 .map(|(attrs, body)| (attrs, Some(body)))?
         } else if self.token == token::Eq {
             // Recover `fn foo() = $expr;`.
@@ -2587,13 +2608,36 @@ impl<'a> Parser<'a> {
                 })
             // `extern ABI fn`
             || self.check_keyword_case(exp!(Extern), case)
+                // Use `tree_look_ahead` because `ABI` might be a metavariable,
+                // i.e. an invisible-delimited sequence, and `tree_look_ahead`
+                // will consider that a single element when looking ahead.
                 && self.look_ahead(1, |t| t.can_begin_string_literal())
-                && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
+                && (self.tree_look_ahead(2, |tt| {
+                    match tt {
+                        TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case),
+                        TokenTree::Delimited(..) => false,
+                    }
+                }) == Some(true) ||
                     // This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not
                     // allowed here.
                     (self.may_recover()
-                        && self.look_ahead(2, |t| ALL_QUALS.iter().any(|exp| t.is_keyword(exp.kw)))
-                        && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
+                        && self.tree_look_ahead(2, |tt| {
+                            match tt {
+                                TokenTree::Token(t, _) =>
+                                    ALL_QUALS.iter().any(|exp| {
+                                        t.is_keyword(exp.kw)
+                                    }),
+                                TokenTree::Delimited(..) => false,
+                            }
+                        }) == Some(true)
+                        && self.tree_look_ahead(3, |tt| {
+                            match tt {
+                                TokenTree::Token(t, _) => t.is_keyword_case(kw::Fn, case),
+                                TokenTree::Delimited(..) => false,
+                            }
+                        }) == Some(true)
+                    )
+                )
     }
 
     /// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
@@ -2729,7 +2773,7 @@ impl<'a> Parser<'a> {
                             .expect("Span extracted directly from keyword should always work");
 
                         err.span_suggestion(
-                            self.token.uninterpolated_span(),
+                            self.token_uninterpolated_span(),
                             format!("`{original_kw}` already used earlier, remove this one"),
                             "",
                             Applicability::MachineApplicable,
@@ -2740,7 +2784,7 @@ impl<'a> Parser<'a> {
                     else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
                         let correct_pos_sp = correct_pos_sp.to(self.prev_token.span);
                         if let Ok(current_qual) = self.span_to_snippet(correct_pos_sp) {
-                            let misplaced_qual_sp = self.token.uninterpolated_span();
+                            let misplaced_qual_sp = self.token_uninterpolated_span();
                             let misplaced_qual = self.span_to_snippet(misplaced_qual_sp).unwrap();
 
                             err.span_suggestion(
@@ -2857,7 +2901,7 @@ impl<'a> Parser<'a> {
                 // Skip every token until next possible arg or end.
                 p.eat_to_tokens(&[exp!(Comma), exp!(CloseParen)]);
                 // Create a placeholder argument for proper arg count (issue #34264).
-                Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)), guar))
+                Ok(dummy_arg(Ident::new(sym::dummy, lo.to(p.prev_token.span)), guar))
             });
             // ...now that we've parsed the first argument, `self` is no longer allowed.
             first_param = false;
@@ -2908,23 +2952,39 @@ impl<'a> Parser<'a> {
                 let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic();
                 this.eat_incorrect_doc_comment_for_param_type();
                 let mut ty = this.parse_ty_for_param();
-                if ty.is_ok()
-                    && this.token != token::Comma
-                    && this.token != token::CloseDelim(Delimiter::Parenthesis)
-                {
-                    // This wasn't actually a type, but a pattern looking like a type,
-                    // so we are going to rollback and re-parse for recovery.
-                    ty = this.unexpected_any();
+
+                if let Ok(t) = &ty {
+                    // Check for trailing angle brackets
+                    if let TyKind::Path(_, Path { segments, .. }) = &t.kind {
+                        if let Some(segment) = segments.last() {
+                            if let Some(guar) =
+                                this.check_trailing_angle_brackets(segment, &[exp!(CloseParen)])
+                            {
+                                return Ok((
+                                    dummy_arg(segment.ident, guar),
+                                    Trailing::No,
+                                    UsePreAttrPos::No,
+                                ));
+                            }
+                        }
+                    }
+
+                    if this.token != token::Comma
+                        && this.token != token::CloseDelim(Delimiter::Parenthesis)
+                    {
+                        // This wasn't actually a type, but a pattern looking like a type,
+                        // so we are going to rollback and re-parse for recovery.
+                        ty = this.unexpected_any();
+                    }
                 }
                 match ty {
                     Ok(ty) => {
-                        let ident = Ident::new(kw::Empty, this.prev_token.span);
-                        let bm = BindingMode::NONE;
-                        let pat = this.mk_pat_ident(ty.span, bm, ident);
+                        let pat = this.mk_pat(ty.span, PatKind::Missing);
                         (pat, ty)
                     }
                     // If this is a C-variadic argument and we hit an error, return the error.
                     Err(err) if this.token == token::DotDotDot => return Err(err),
+                    Err(err) if this.unmatched_angle_bracket_count > 0 => return Err(err),
                     // Recover from attempting to parse the argument as a type without pattern.
                     Err(err) => {
                         err.cancel();
@@ -2954,14 +3014,27 @@ impl<'a> Parser<'a> {
             }
             _ => unreachable!(),
         };
+        // is lifetime `n` tokens ahead?
+        let is_lifetime = |this: &Self, n| this.look_ahead(n, |t| t.is_lifetime());
         // Is `self` `n` tokens ahead?
         let is_isolated_self = |this: &Self, n| {
             this.is_keyword_ahead(n, &[kw::SelfLower])
                 && this.look_ahead(n + 1, |t| t != &token::PathSep)
         };
+        // Is `pin const self` `n` tokens ahead?
+        let is_isolated_pin_const_self = |this: &Self, n| {
+            this.look_ahead(n, |token| token.is_ident_named(sym::pin))
+                && this.is_keyword_ahead(n + 1, &[kw::Const])
+                && is_isolated_self(this, n + 2)
+        };
         // Is `mut self` `n` tokens ahead?
         let is_isolated_mut_self =
             |this: &Self, n| this.is_keyword_ahead(n, &[kw::Mut]) && is_isolated_self(this, n + 1);
+        // Is `pin mut self` `n` tokens ahead?
+        let is_isolated_pin_mut_self = |this: &Self, n| {
+            this.look_ahead(n, |token| token.is_ident_named(sym::pin))
+                && is_isolated_mut_self(this, n + 1)
+        };
         // Parse `self` or `self: TYPE`. We already know the current token is `self`.
         let parse_self_possibly_typed = |this: &mut Self, m| {
             let eself_ident = expect_self_ident(this);
@@ -3011,27 +3084,36 @@ impl<'a> Parser<'a> {
         // else is parsed as a normal function parameter list, so some lookahead is required.
         let eself_lo = self.token.span;
         let (eself, eself_ident, eself_hi) = match self.token.uninterpolate().kind {
-            token::BinOp(token::And) => {
-                let eself = if is_isolated_self(self, 1) {
-                    // `&self`
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Not)
-                } else if is_isolated_mut_self(self, 1) {
-                    // `&mut self`
-                    self.bump();
-                    self.bump();
-                    SelfKind::Region(None, Mutability::Mut)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_self(self, 2) {
-                    // `&'lt self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    SelfKind::Region(Some(lt), Mutability::Not)
-                } else if self.look_ahead(1, |t| t.is_lifetime()) && is_isolated_mut_self(self, 2) {
-                    // `&'lt mut self`
-                    self.bump();
-                    let lt = self.expect_lifetime();
-                    self.bump();
-                    SelfKind::Region(Some(lt), Mutability::Mut)
+            token::And => {
+                let has_lifetime = is_lifetime(self, 1);
+                let skip_lifetime_count = has_lifetime as usize;
+                let eself = if is_isolated_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    SelfKind::Region(lifetime, Mutability::Not)
+                } else if is_isolated_mut_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} mut self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.bump(); // mut
+                    SelfKind::Region(lifetime, Mutability::Mut)
+                } else if is_isolated_pin_const_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} pin const self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
+                    self.bump(); // pin
+                    self.bump(); // const
+                    SelfKind::Pinned(lifetime, Mutability::Not)
+                } else if is_isolated_pin_mut_self(self, skip_lifetime_count + 1) {
+                    // `&{'lt} pin mut self`
+                    self.bump(); // &
+                    let lifetime = has_lifetime.then(|| self.expect_lifetime());
+                    self.psess.gated_spans.gate(sym::pin_ergonomics, self.token.span);
+                    self.bump(); // pin
+                    self.bump(); // mut
+                    SelfKind::Pinned(lifetime, Mutability::Mut)
                 } else {
                     // `&not_self`
                     return Ok(None);
@@ -3041,12 +3123,12 @@ impl<'a> Parser<'a> {
                 (eself, self_ident, hi)
             }
             // `*self`
-            token::BinOp(token::Star) if is_isolated_self(self, 1) => {
+            token::Star if is_isolated_self(self, 1) => {
                 self.bump();
                 recover_self_ptr(self)?
             }
             // `*mut self` and `*const self`
-            token::BinOp(token::Star)
+            token::Star
                 if self.look_ahead(1, |t| t.is_mutability()) && is_isolated_self(self, 2) =>
             {
                 self.bump();
@@ -3071,11 +3153,13 @@ impl<'a> Parser<'a> {
 
     fn is_named_param(&self) -> bool {
         let offset = match &self.token.kind {
-            token::Interpolated(nt) => match &**nt {
-                token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
+            token::OpenDelim(Delimiter::Invisible(origin)) => match origin {
+                InvisibleOrigin::MetaVar(MetaVarKind::Pat(_)) => {
+                    return self.check_noexpect_past_close_delim(&token::Colon);
+                }
                 _ => 0,
             },
-            token::BinOp(token::And) | token::AndAnd => 1,
+            token::And | token::AndAnd => 1,
             _ if self.token.is_keyword(kw::Mut) => 1,
             _ => 0,
         };
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 1de452dcf39..2221a261b4c 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -13,7 +13,6 @@ mod ty;
 
 use std::assert_matches::debug_assert_matches;
 use std::ops::Range;
-use std::sync::Arc;
 use std::{fmt, mem, slice};
 
 use attr_wrapper::{AttrWrapper, UsePreAttrPos};
@@ -24,7 +23,8 @@ pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 use path::PathStyle;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{
-    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
+    self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, NtExprKind, NtPatKind, Token,
+    TokenKind,
 };
 use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
@@ -97,32 +97,21 @@ pub enum ForceCollect {
     No,
 }
 
-#[macro_export]
-macro_rules! maybe_whole {
-    ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
-        if let token::Interpolated(nt) = &$p.token.kind
-            && let token::$constructor(x) = &**nt
-        {
-            #[allow(unused_mut)]
-            let mut $x = x.clone();
-            $p.bump();
-            return Ok($e);
-        }
-    };
-}
-
 /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
 #[macro_export]
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
     ($self: expr, $allow_qpath_recovery: expr) => {
         if $allow_qpath_recovery
             && $self.may_recover()
-            && $self.look_ahead(1, |t| t == &token::PathSep)
-            && let token::Interpolated(nt) = &$self.token.kind
-            && let token::NtTy(ty) = &**nt
+            && let Some(mv_kind) = $self.token.is_metavar_seq()
+            && let token::MetaVarKind::Ty { .. } = mv_kind
+            && $self.check_noexpect_past_close_delim(&token::PathSep)
         {
-            let ty = ty.clone();
-            $self.bump();
+            // Reparse the type, then move to recovery.
+            let ty = $self
+                .eat_metavar_seq(mv_kind, |this| this.parse_ty_no_question_mark_recover())
+                .expect("metavar seq ty");
+
             return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
         }
     };
@@ -295,6 +284,10 @@ impl TokenTreeCursor {
         self.stream.get(self.index)
     }
 
+    fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
+        self.stream.get(self.index + n)
+    }
+
     #[inline]
     fn bump(&mut self) {
         self.index += 1;
@@ -333,12 +326,12 @@ impl TokenCursor {
             // below can be removed.
             if let Some(tree) = self.curr.curr() {
                 match tree {
-                    &TokenTree::Token(ref token, spacing) => {
+                    &TokenTree::Token(token, spacing) => {
                         debug_assert!(!matches!(
                             token.kind,
                             token::OpenDelim(_) | token::CloseDelim(_)
                         ));
-                        let res = (token.clone(), spacing);
+                        let res = (token, spacing);
                         self.curr.bump();
                         return res;
                     }
@@ -450,7 +443,6 @@ pub fn token_descr(token: &Token) -> String {
         (Some(TokenDescription::MetaVar(kind)), _) => format!("`{kind}` metavariable"),
         (None, TokenKind::NtIdent(..)) => format!("identifier `{s}`"),
         (None, TokenKind::NtLifetime(..)) => format!("lifetime `{s}`"),
-        (None, TokenKind::Interpolated(node)) => format!("{} `{s}`", node.descr()),
         (None, _) => format!("`{s}`"),
     }
 }
@@ -503,6 +495,14 @@ impl<'a> Parser<'a> {
         self
     }
 
+    #[inline]
+    fn with_recovery<T>(&mut self, recovery: Recovery, f: impl FnOnce(&mut Self) -> T) -> T {
+        let old = mem::replace(&mut self.recovery, recovery);
+        let res = f(self);
+        self.recovery = old;
+        res
+    }
+
     /// Whether the parser is allowed to recover from broken code.
     ///
     /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
@@ -614,6 +614,24 @@ impl<'a> Parser<'a> {
         self.token == *tok
     }
 
+    // Check the first token after the delimiter that closes the current
+    // delimited sequence. (Panics if used in the outermost token stream, which
+    // has no delimiters.) It uses a clone of the relevant tree cursor to skip
+    // past the entire `TokenTree::Delimited` in a single step, avoiding the
+    // need for unbounded token lookahead.
+    //
+    // Primarily used when `self.token` matches
+    // `OpenDelim(Delimiter::Invisible(_))`, to look ahead through the current
+    // metavar expansion.
+    fn check_noexpect_past_close_delim(&self, tok: &TokenKind) -> bool {
+        let mut tree_cursor = self.token_cursor.stack.last().unwrap().clone();
+        tree_cursor.bump();
+        matches!(
+            tree_cursor.curr(),
+            Some(TokenTree::Token(token::Token { kind, .. }, _)) if kind == tok
+        )
+    }
+
     /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
     ///
     /// the main purpose of this function is to reduce the cluttering of the suggestions list
@@ -721,6 +739,62 @@ impl<'a> Parser<'a> {
         if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
     }
 
+    /// Consume a sequence produced by a metavar expansion, if present.
+    fn eat_metavar_seq<T>(
+        &mut self,
+        mv_kind: MetaVarKind,
+        f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> Option<T> {
+        self.eat_metavar_seq_with_matcher(|mvk| mvk == mv_kind, f)
+    }
+
+    /// A slightly more general form of `eat_metavar_seq`, for use with the
+    /// `MetaVarKind` variants that have parameters, where an exact match isn't
+    /// desired.
+    fn eat_metavar_seq_with_matcher<T>(
+        &mut self,
+        match_mv_kind: impl Fn(MetaVarKind) -> bool,
+        mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+    ) -> Option<T> {
+        if let token::OpenDelim(delim) = self.token.kind
+            && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
+            && match_mv_kind(mv_kind)
+        {
+            self.bump();
+
+            // Recovery is disabled when parsing macro arguments, so it must
+            // also be disabled when reparsing pasted macro arguments,
+            // otherwise we get inconsistent results (e.g. #137874).
+            let res = self.with_recovery(Recovery::Forbidden, |this| f(this));
+
+            let res = match res {
+                Ok(res) => res,
+                Err(err) => {
+                    // This can occur in unusual error cases, e.g. #139445.
+                    err.delay_as_bug();
+                    return None;
+                }
+            };
+
+            if let token::CloseDelim(delim) = self.token.kind
+                && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)) = delim
+                && match_mv_kind(mv_kind)
+            {
+                self.bump();
+                Some(res)
+            } else {
+                // This can occur when invalid syntax is passed to a decl macro. E.g. see #139248,
+                // where the reparse attempt of an invalid expr consumed the trailing invisible
+                // delimiter.
+                self.dcx()
+                    .span_delayed_bug(self.token.span, "no close delim with reparsing {mv_kind:?}");
+                None
+            }
+        } else {
+            None
+        }
+    }
+
     /// Is the given keyword `kw` followed by a non-reserved identifier?
     fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
         self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
@@ -754,9 +828,9 @@ impl<'a> Parser<'a> {
         self.is_keyword_ahead(0, &[kw::Const])
             && self.look_ahead(1, |t| match &t.kind {
                 // async closures do not work with const closures, so we do not parse that here.
-                token::Ident(kw::Move | kw::Static, IdentIsRaw::No)
+                token::Ident(kw::Move | kw::Use | kw::Static, IdentIsRaw::No)
                 | token::OrOr
-                | token::BinOp(token::Or) => true,
+                | token::Or => true,
                 _ => false,
             })
     }
@@ -764,8 +838,10 @@ impl<'a> Parser<'a> {
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
             && self.look_ahead(dist + 1, |t| match &t.kind {
-                token::Interpolated(nt) => matches!(&**nt, token::NtBlock(..)),
                 token::OpenDelim(Delimiter::Brace) => true,
+                token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                    MetaVarKind::Block,
+                ))) => true,
                 _ => false,
             })
     }
@@ -1017,10 +1093,12 @@ impl<'a> Parser<'a> {
         let initial_semicolon = self.token.span;
 
         while self.eat(exp!(Semi)) {
-            let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
-                e.cancel();
-                None
-            });
+            let _ = self
+                .parse_stmt_without_recovery(false, ForceCollect::No, false)
+                .unwrap_or_else(|e| {
+                    e.cancel();
+                    None
+                });
         }
 
         expect_err
@@ -1229,6 +1307,17 @@ impl<'a> Parser<'a> {
         looker(&token)
     }
 
+    /// Like `lookahead`, but skips over token trees rather than tokens. Useful
+    /// when looking past possible metavariable pasting sites.
+    pub fn tree_look_ahead<R>(
+        &self,
+        dist: usize,
+        looker: impl FnOnce(&TokenTree) -> R,
+    ) -> Option<R> {
+        assert_ne!(dist, 0);
+        self.token_cursor.curr.look_ahead(dist - 1).map(looker)
+    }
+
     /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
     pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
         self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
@@ -1236,14 +1325,14 @@ impl<'a> Parser<'a> {
 
     /// Parses asyncness: `async` or nothing.
     fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
-        let span = self.token.uninterpolated_span();
+        let span = self.token_uninterpolated_span();
         if self.eat_keyword_case(exp!(Async), case) {
             // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
             // error if edition <= 2024, like we do with async and edition <= 2018?
-            if self.token.uninterpolated_span().at_least_rust_2024()
+            if self.token_uninterpolated_span().at_least_rust_2024()
                 && self.eat_keyword_case(exp!(Gen), case)
             {
-                let gen_span = self.prev_token.uninterpolated_span();
+                let gen_span = self.prev_token_uninterpolated_span();
                 Some(CoroutineKind::AsyncGen {
                     span: span.to(gen_span),
                     closure_id: DUMMY_NODE_ID,
@@ -1256,7 +1345,7 @@ impl<'a> Parser<'a> {
                     return_impl_trait_id: DUMMY_NODE_ID,
                 })
             }
-        } else if self.token.uninterpolated_span().at_least_rust_2024()
+        } else if self.token_uninterpolated_span().at_least_rust_2024()
             && self.eat_keyword_case(exp!(Gen), case)
         {
             Some(CoroutineKind::Gen {
@@ -1272,9 +1361,9 @@ impl<'a> Parser<'a> {
     /// Parses fn unsafety: `unsafe`, `safe` or nothing.
     fn parse_safety(&mut self, case: Case) -> Safety {
         if self.eat_keyword_case(exp!(Unsafe), case) {
-            Safety::Unsafe(self.prev_token.uninterpolated_span())
+            Safety::Unsafe(self.prev_token_uninterpolated_span())
         } else if self.eat_keyword_case(exp!(Safe), case) {
-            Safety::Safe(self.prev_token.uninterpolated_span())
+            Safety::Safe(self.prev_token_uninterpolated_span())
         } else {
             Safety::Default
         }
@@ -1298,10 +1387,10 @@ impl<'a> Parser<'a> {
         // Avoid const blocks and const closures to be parsed as const items
         if (self.check_const_closure() == is_closure)
             && !self
-                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_metavar_block())
             && self.eat_keyword_case(exp!(Const), case)
         {
-            Const::Yes(self.prev_token.uninterpolated_span())
+            Const::Yes(self.prev_token_uninterpolated_span())
         } else {
             Const::No
         }
@@ -1309,17 +1398,24 @@ impl<'a> Parser<'a> {
 
     /// Parses inline const expressions.
     fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
-        if pat {
-            self.psess.gated_spans.gate(sym::inline_const_pat, span);
-        }
         self.expect_keyword(exp!(Const))?;
-        let (attrs, blk) = self.parse_inner_attrs_and_block()?;
+        let (attrs, blk) = self.parse_inner_attrs_and_block(None)?;
         let anon_const = AnonConst {
             id: DUMMY_NODE_ID,
             value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
         };
         let blk_span = anon_const.value.span;
-        Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
+        let kind = if pat {
+            let guar = self
+                .dcx()
+                .struct_span_err(blk_span, "`inline_const_pat` has been removed")
+                .with_help("use a named `const`-item or an `if`-guard instead")
+                .emit();
+            ExprKind::Err(guar)
+        } else {
+            ExprKind::ConstBlock(anon_const)
+        };
+        Ok(self.mk_expr_with_attrs(span.to(blk_span), kind, attrs))
     }
 
     /// Parses mutability (`mut` or nothing).
@@ -1421,7 +1517,7 @@ impl<'a> Parser<'a> {
             _ => {
                 let prev_spacing = self.token_spacing;
                 self.bump();
-                TokenTree::Token(self.prev_token.clone(), prev_spacing)
+                TokenTree::Token(self.prev_token, prev_spacing)
             }
         }
     }
@@ -1455,7 +1551,11 @@ impl<'a> Parser<'a> {
     /// so emit a proper diagnostic.
     // Public for rustfmt usage.
     pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
-        maybe_whole!(self, NtVis, |vis| vis.into_inner());
+        if let Some(vis) = self
+            .eat_metavar_seq(MetaVarKind::Vis, |this| this.parse_visibility(FollowedByType::Yes))
+        {
+            return Ok(vis);
+        }
 
         if !self.eat_keyword(exp!(Pub)) {
             // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
@@ -1588,14 +1688,14 @@ impl<'a> Parser<'a> {
     /// `::{` or `::*`
     fn is_import_coupler(&mut self) -> bool {
         self.check_path_sep_and_look_ahead(|t| {
-            matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::BinOp(token::Star))
+            matches!(t.kind, token::OpenDelim(Delimiter::Brace) | token::Star)
         })
     }
 
     // Debug view of the parser's token stream, up to `{lookahead}` tokens.
     // Only used when debugging.
     #[allow(unused)]
-    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ {
+    pub(crate) fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug {
         fmt::from_fn(move |f| {
             let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of
 
@@ -1603,7 +1703,7 @@ impl<'a> Parser<'a> {
             dbg_fmt.field("prev_token", &self.prev_token);
             let mut tokens = vec![];
             for i in 0..lookahead {
-                let tok = self.look_ahead(i, |tok| tok.kind.clone());
+                let tok = self.look_ahead(i, |tok| tok.kind);
                 let is_eof = tok == TokenKind::Eof;
                 tokens.push(tok);
                 if is_eof {
@@ -1634,6 +1734,33 @@ impl<'a> Parser<'a> {
     pub fn approx_token_stream_pos(&self) -> u32 {
         self.num_bump_calls
     }
+
+    /// For interpolated `self.token`, returns a span of the fragment to which
+    /// the interpolated token refers. For all other tokens this is just a
+    /// regular span. It is particularly important to use this for identifiers
+    /// and lifetimes for which spans affect name resolution and edition
+    /// checks. Note that keywords are also identifiers, so they should use
+    /// this if they keep spans or perform edition checks.
+    pub fn token_uninterpolated_span(&self) -> Span {
+        match &self.token.kind {
+            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
+            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
+                self.look_ahead(1, |t| t.span)
+            }
+            _ => self.token.span,
+        }
+    }
+
+    /// Like `token_uninterpolated_span`, but works on `self.prev_token`.
+    pub fn prev_token_uninterpolated_span(&self) -> Span {
+        match &self.prev_token.kind {
+            token::NtIdent(ident, _) | token::NtLifetime(ident, _) => ident.span,
+            token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => {
+                self.look_ahead(0, |t| t.span)
+            }
+            _ => self.prev_token.span,
+        }
+    }
 }
 
 pub(crate) fn make_unclosed_delims_error(
@@ -1683,7 +1810,14 @@ pub enum ParseNtResult {
     Tt(TokenTree),
     Ident(Ident, IdentIsRaw),
     Lifetime(Ident, IdentIsRaw),
-
-    /// This case will eventually be removed, along with `Token::Interpolate`.
-    Nt(Arc<Nonterminal>),
+    Item(P<ast::Item>),
+    Block(P<ast::Block>),
+    Stmt(P<ast::Stmt>),
+    Pat(P<ast::Pat>, NtPatKind),
+    Expr(P<ast::Expr>, NtExprKind),
+    Literal(P<ast::Expr>),
+    Ty(P<ast::Ty>),
+    Meta(P<ast::AttrItem>),
+    Path(P<ast::Path>),
+    Vis(P<ast::Visibility>),
 }
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index eefdb641da2..b6e89cd7fa4 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,14 +1,7 @@
-use std::sync::Arc;
-
-use rustc_ast::HasTokens;
 use rustc_ast::ptr::P;
-use rustc_ast::token::Nonterminal::*;
 use rustc_ast::token::NtExprKind::*;
 use rustc_ast::token::NtPatKind::*;
-use rustc_ast::token::{
-    self, Delimiter, InvisibleOrigin, MetaVarKind, Nonterminal, NonterminalKind, Token,
-};
-use rustc_ast_pretty::pprust;
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, NonterminalKind, Token};
 use rustc_errors::PResult;
 use rustc_span::{Ident, kw};
 
@@ -30,9 +23,9 @@ impl<'a> Parser<'a> {
                 MetaVarKind::Stmt
                 | MetaVarKind::Pat(_)
                 | MetaVarKind::Expr { .. }
-                | MetaVarKind::Ty
+                | MetaVarKind::Ty { .. }
                 | MetaVarKind::Literal // `true`, `false`
-                | MetaVarKind::Meta
+                | MetaVarKind::Meta { .. }
                 | MetaVarKind::Path => true,
 
                 MetaVarKind::Item
@@ -45,23 +38,6 @@ impl<'a> Parser<'a> {
             }
         }
 
-        /// Old variant of `may_be_ident`. Being phased out.
-        fn nt_may_be_ident(nt: &Nonterminal) -> bool {
-            match nt {
-                NtStmt(_)
-                | NtPat(_)
-                | NtExpr(_)
-                | NtTy(_)
-                | NtLiteral(_) // `true`, `false`
-                | NtMeta(_)
-                | NtPath(_) => true,
-
-                NtItem(_)
-                | NtBlock(_)
-                | NtVis(_) => false,
-            }
-        }
-
         match kind {
             // `expr_2021` and earlier
             NonterminalKind::Expr(Expr2021 { .. }) => {
@@ -88,22 +64,17 @@ impl<'a> Parser<'a> {
             NonterminalKind::Ident => get_macro_ident(token).is_some(),
             NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
             NonterminalKind::Vis => match token.kind {
-                // The follow-set of :vis + "priv" keyword + interpolated
+                // The follow-set of :vis + "priv" keyword + interpolated/metavar-expansion.
                 token::Comma
                 | token::Ident(..)
                 | token::NtIdent(..)
                 | token::NtLifetime(..)
-                | token::Interpolated(_)
                 | token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(_))) => true,
                 _ => token.can_begin_type(),
             },
             NonterminalKind::Block => match &token.kind {
                 token::OpenDelim(Delimiter::Brace) => true,
                 token::NtLifetime(..) => true,
-                token::Interpolated(nt) => match &**nt {
-                    NtBlock(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
-                    NtItem(_) | NtPat(_) | NtTy(_) | NtMeta(_) | NtPath(_) | NtVis(_) => false,
-                },
                 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(k))) => match k {
                     MetaVarKind::Block
                     | MetaVarKind::Stmt
@@ -111,8 +82,8 @@ impl<'a> Parser<'a> {
                     | MetaVarKind::Literal => true,
                     MetaVarKind::Item
                     | MetaVarKind::Pat(_)
-                    | MetaVarKind::Ty
-                    | MetaVarKind::Meta
+                    | MetaVarKind::Ty { .. }
+                    | MetaVarKind::Meta { .. }
                     | MetaVarKind::Path
                     | MetaVarKind::Vis => false,
                     MetaVarKind::Lifetime | MetaVarKind::Ident | MetaVarKind::TT => {
@@ -123,7 +94,6 @@ impl<'a> Parser<'a> {
             },
             NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::PathSep | token::Ident(..) | token::NtIdent(..) => true,
-                token::Interpolated(nt) => nt_may_be_ident(nt),
                 token::OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(kind))) => {
                     may_be_ident(*kind)
                 }
@@ -147,33 +117,27 @@ impl<'a> Parser<'a> {
         // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
         // which requires having captured tokens available. Since we cannot determine
         // in advance whether or not a proc-macro will be (transitively) invoked,
-        // we always capture tokens for any `Nonterminal` which needs them.
-        let mut nt = match kind {
+        // we always capture tokens for any nonterminal that needs them.
+        match kind {
             // Note that TT is treated differently to all the others.
-            NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())),
+            NonterminalKind::TT => Ok(ParseNtResult::Tt(self.parse_token_tree())),
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
-                Some(item) => NtItem(item),
-                None => {
-                    return Err(self
-                        .dcx()
-                        .create_err(UnexpectedNonterminal::Item(self.token.span)));
-                }
+                Some(item) => Ok(ParseNtResult::Item(item)),
+                None => Err(self.dcx().create_err(UnexpectedNonterminal::Item(self.token.span))),
             },
             NonterminalKind::Block => {
                 // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
                 // the ':block' matcher does not support them
-                NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
+                Ok(ParseNtResult::Block(self.collect_tokens_no_attrs(|this| this.parse_block())?))
             }
             NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
-                Some(s) => NtStmt(P(s)),
+                Some(stmt) => Ok(ParseNtResult::Stmt(P(stmt))),
                 None => {
-                    return Err(self
-                        .dcx()
-                        .create_err(UnexpectedNonterminal::Statement(self.token.span)));
+                    Err(self.dcx().create_err(UnexpectedNonterminal::Statement(self.token.span)))
                 }
             },
-            NonterminalKind::Pat(pat_kind) => {
-                NtPat(self.collect_tokens_no_attrs(|this| match pat_kind {
+            NonterminalKind::Pat(pat_kind) => Ok(ParseNtResult::Pat(
+                self.collect_tokens_no_attrs(|this| match pat_kind {
                     PatParam { .. } => this.parse_pat_no_top_alt(None, None),
                     PatWithOr => this.parse_pat_no_top_guard(
                         None,
@@ -181,62 +145,57 @@ impl<'a> Parser<'a> {
                         RecoverColon::No,
                         CommaRecoveryMode::EitherTupleOrPipe,
                     ),
-                })?)
+                })?,
+                pat_kind,
+            )),
+            NonterminalKind::Expr(expr_kind) => {
+                Ok(ParseNtResult::Expr(self.parse_expr_force_collect()?, expr_kind))
             }
-            NonterminalKind::Expr(_) => NtExpr(self.parse_expr_force_collect()?),
             NonterminalKind::Literal => {
-                // The `:literal` matcher does not support attributes
-                NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?)
+                // The `:literal` matcher does not support attributes.
+                Ok(ParseNtResult::Literal(
+                    self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
+                ))
             }
-            NonterminalKind::Ty => {
-                NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?)
-            }
-            // this could be handled like a token, since it is one
+            NonterminalKind::Ty => Ok(ParseNtResult::Ty(
+                self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?,
+            )),
+            // This could be handled like a token, since it is one.
             NonterminalKind::Ident => {
-                return if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
+                if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
                     self.bump();
                     Ok(ParseNtResult::Ident(ident, is_raw))
                 } else {
                     Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
                         span: self.token.span,
-                        token: self.token.clone(),
+                        token: self.token,
                     }))
-                };
+                }
             }
-            NonterminalKind::Path => {
-                NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
+            NonterminalKind::Path => Ok(ParseNtResult::Path(P(
+                self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?
+            ))),
+            NonterminalKind::Meta => {
+                Ok(ParseNtResult::Meta(P(self.parse_attr_item(ForceCollect::Yes)?)))
             }
-            NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(ForceCollect::Yes)?)),
             NonterminalKind::Vis => {
-                NtVis(P(self
-                    .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))
+                Ok(ParseNtResult::Vis(P(self
+                    .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?)))
             }
             NonterminalKind::Lifetime => {
                 // We want to keep `'keyword` parsing, just like `keyword` is still
                 // an ident for nonterminal purposes.
-                return if let Some((ident, is_raw)) = self.token.lifetime() {
+                if let Some((ident, is_raw)) = self.token.lifetime() {
                     self.bump();
                     Ok(ParseNtResult::Lifetime(ident, is_raw))
                 } else {
                     Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
                         span: self.token.span,
-                        token: self.token.clone(),
+                        token: self.token,
                     }))
-                };
+                }
             }
-        };
-
-        // If tokens are supported at all, they should be collected.
-        if matches!(nt.tokens_mut(), Some(None)) {
-            panic!(
-                "Missing tokens for nt {:?} at {:?}: {:?}",
-                nt,
-                nt.use_span(),
-                pprust::nonterminal_to_string(&nt)
-            );
         }
-
-        Ok(ParseNtResult::Nt(Arc::new(nt)))
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 8ce749ec814..d5f469f9aa9 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -2,7 +2,8 @@ use std::ops::Bound;
 
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, Token};
+use rustc_ast::token::NtPatKind::*;
+use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token};
 use rustc_ast::util::parser::ExprPrecedence;
 use rustc_ast::visit::{self, Visitor};
 use rustc_ast::{
@@ -30,7 +31,7 @@ use crate::errors::{
     UnexpectedVertVertInPattern, WrapInParens,
 };
 use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
-use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath};
 
 #[derive(PartialEq, Copy, Clone)]
 pub enum Expected {
@@ -357,12 +358,12 @@ impl<'a> Parser<'a> {
             )
         });
         match (is_end_ahead, &self.token.kind) {
-            (true, token::BinOp(token::Or) | token::OrOr) => {
+            (true, token::Or | token::OrOr) => {
                 // A `|` or possibly `||` token shouldn't be here. Ban it.
                 self.dcx().emit_err(TrailingVertNotAllowed {
                     span: self.token.span,
                     start: lo,
-                    token: self.token.clone(),
+                    token: self.token,
                     note_double_vert: matches!(self.token.kind, token::OrOr),
                 });
                 self.bump();
@@ -431,7 +432,11 @@ impl<'a> Parser<'a> {
         // `[` is included for indexing operations,
         // `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`),
         // `as` is included for type casts
-        let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
+        let has_trailing_operator = matches!(
+                self.token.kind,
+                token::Plus | token::Minus | token::Star | token::Slash | token::Percent
+                | token::Caret | token::And | token::Shl | token::Shr // excludes `Or`
+            )
             || self.token == token::Question
             || (self.token == token::OpenDelim(Delimiter::Bracket)
                 && self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))) // excludes `[]`
@@ -626,15 +631,6 @@ impl<'a> Parser<'a> {
                             ident,
                             indentation,
                         });
-
-                        // help: wrap the expr in a `const { expr }`
-                        // FIXME(inline_const_pat): once stabilized, remove this check and remove the `(requires #[feature(inline_const_pat)])` note from the message
-                        if self.parser.psess.unstable_features.is_nightly_build() {
-                            err.subdiagnostic(UnexpectedExpressionInPatternSugg::InlineConst {
-                                start_span: expr_span.shrink_to_lo(),
-                                end_span: expr_span.shrink_to_hi(),
-                            });
-                        }
                     },
                 );
             }
@@ -689,6 +685,27 @@ impl<'a> Parser<'a> {
         PatVisitor { parser: self, stmt, arm: None, field: None }.visit_stmt(stmt);
     }
 
+    fn eat_metavar_pat(&mut self) -> Option<P<Pat>> {
+        // Must try both kinds of pattern nonterminals.
+        if let Some(pat) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Pat(PatParam { .. })),
+            |this| this.parse_pat_no_top_alt(None, None),
+        ) {
+            Some(pat)
+        } else if let Some(pat) = self.eat_metavar_seq(MetaVarKind::Pat(PatWithOr), |this| {
+            this.parse_pat_no_top_guard(
+                None,
+                RecoverComma::No,
+                RecoverColon::No,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )
+        }) {
+            Some(pat)
+        } else {
+            None
+        }
+    }
+
     /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
     /// allowed).
     fn parse_pat_with_range_pat(
@@ -698,7 +715,10 @@ impl<'a> Parser<'a> {
         syntax_loc: Option<PatternLocation>,
     ) -> PResult<'a, P<Pat>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
-        maybe_whole!(self, NtPat, |pat| pat);
+
+        if let Some(pat) = self.eat_metavar_pat() {
+            return Ok(pat);
+        }
 
         let mut lo = self.token.span;
 
@@ -738,7 +758,7 @@ impl<'a> Parser<'a> {
             self.recover_dotdotdot_rest_pat(lo)
         } else if let Some(form) = self.parse_range_end() {
             self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
-        } else if self.eat(exp!(Not)) {
+        } else if self.eat(exp!(Bang)) {
             // Parse `!`
             self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
             PatKind::Never
@@ -794,7 +814,7 @@ impl<'a> Parser<'a> {
             };
             let span = lo.to(self.prev_token.span);
 
-            if qself.is_none() && self.check(exp!(Not)) {
+            if qself.is_none() && self.check(exp!(Bang)) {
                 self.parse_pat_mac_invoc(path)?
             } else if let Some(form) = self.parse_range_end() {
                 let begin = self.mk_expr(span, ExprKind::Path(qself, path));
@@ -1043,10 +1063,8 @@ impl<'a> Parser<'a> {
         self.recover_additional_muts();
 
         // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
-        if let token::Interpolated(nt) = &self.token.kind {
-            if let token::NtPat(..) = &**nt {
-                self.expected_ident_found_err().emit();
-            }
+        if let Some(MetaVarKind::Pat(_)) = self.token.is_metavar_seq() {
+            self.expected_ident_found_err().emit();
         }
 
         // Parse the pattern we hope to be an identifier.
@@ -1232,9 +1250,9 @@ impl<'a> Parser<'a> {
             || self.look_ahead(dist, |t| {
                 t.is_path_start() // e.g. `MY_CONST`;
                 || *t == token::Dot // e.g. `.5` for recovery;
-                || matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus))
+                || matches!(t.kind, token::Literal(..) | token::Minus)
                 || t.is_bool_lit()
-                || t.is_whole_expr()
+                || t.is_metavar_expr()
                 || t.is_lifetime() // recover `'a` instead of `'a'`
                 || (self.may_recover() // recover leading `(`
                     && *t == token::OpenDelim(Delimiter::Parenthesis)
@@ -1308,7 +1326,7 @@ impl<'a> Parser<'a> {
             | token::OpenDelim(Delimiter::Brace) // A struct pattern.
             | token::DotDotDot | token::DotDotEq | token::DotDot // A range pattern.
             | token::PathSep // A tuple / struct variant pattern.
-            | token::Not)) // A macro expanding to a pattern.
+            | token::Bang)) // A macro expanding to a pattern.
     }
 
     /// Parses `ident` or `ident @ pat`.
@@ -1501,8 +1519,8 @@ impl<'a> Parser<'a> {
                 etc = PatFieldsRest::Rest;
                 let mut etc_sp = self.token.span;
                 if first_etc_and_maybe_comma_span.is_none() {
-                    if let Some(comma_tok) = self
-                        .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
+                    if let Some(comma_tok) =
+                        self.look_ahead(1, |&t| if t == token::Comma { Some(t) } else { None })
                     {
                         let nw_span = self
                             .psess
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index b241aa892db..02883655662 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -2,7 +2,7 @@ use std::mem;
 
 use ast::token::IdentIsRaw;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::token::{self, Delimiter, MetaVarKind, Token, TokenKind};
 use rustc_ast::{
     self as ast, AngleBracketedArg, AngleBracketedArgs, AnonConst, AssocItemConstraint,
     AssocItemConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
@@ -15,9 +15,9 @@ use tracing::debug;
 
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{Parser, Restrictions, TokenType};
-use crate::errors::{PathSingleColon, PathTripleColon};
+use crate::errors::{self, PathSingleColon, PathTripleColon};
+use crate::exp;
 use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma};
-use crate::{errors, exp, maybe_whole};
 
 /// Specifies how to parse a path.
 #[derive(Copy, Clone, PartialEq)]
@@ -194,16 +194,18 @@ impl<'a> Parser<'a> {
             }
         };
 
-        maybe_whole!(self, NtPath, |path| reject_generics_if_mod_style(self, path.into_inner()));
+        if let Some(path) =
+            self.eat_metavar_seq(MetaVarKind::Path, |this| this.parse_path(PathStyle::Type))
+        {
+            return Ok(reject_generics_if_mod_style(self, path));
+        }
 
-        if let token::Interpolated(nt) = &self.token.kind {
-            if let token::NtTy(ty) = &**nt {
-                if let ast::TyKind::Path(None, path) = &ty.kind {
-                    let path = path.clone();
-                    self.bump();
-                    return Ok(reject_generics_if_mod_style(self, path));
-                }
-            }
+        // If we have a `ty` metavar in the form of a path, reparse it directly as a path, instead
+        // of reparsing it as a `ty` and then extracting the path.
+        if let Some(path) = self.eat_metavar_seq(MetaVarKind::Ty { is_path: true }, |this| {
+            this.parse_path(PathStyle::Type)
+        }) {
+            return Ok(reject_generics_if_mod_style(self, path));
         }
 
         let lo = self.token.span;
@@ -271,7 +273,6 @@ impl<'a> Parser<'a> {
                         self.dcx().emit_err(PathSingleColon {
                             span: self.prev_token.span,
                             suggestion: self.prev_token.span.shrink_to_hi(),
-                            type_ascription: self.psess.unstable_features.is_nightly_build(),
                         });
                     }
                     continue;
@@ -303,10 +304,7 @@ impl<'a> Parser<'a> {
         let is_args_start = |token: &Token| {
             matches!(
                 token.kind,
-                token::Lt
-                    | token::BinOp(token::Shl)
-                    | token::OpenDelim(Delimiter::Parenthesis)
-                    | token::LArrow
+                token::Lt | token::Shl | token::OpenDelim(Delimiter::Parenthesis) | token::LArrow
             )
         };
         let check_args_start = |this: &mut Self| {
@@ -349,7 +347,6 @@ impl<'a> Parser<'a> {
                             err = self.dcx().create_err(PathSingleColon {
                                 span: self.token.span,
                                 suggestion: self.prev_token.span.shrink_to_hi(),
-                                type_ascription: self.psess.unstable_features.is_nightly_build(),
                             });
                         }
                         // Attempt to find places where a missing `>` might belong.
@@ -380,19 +377,22 @@ impl<'a> Parser<'a> {
 
                     self.psess.gated_spans.gate(sym::return_type_notation, span);
 
+                    let prev_lo = self.prev_token.span.shrink_to_hi();
                     if self.eat_noexpect(&token::RArrow) {
                         let lo = self.prev_token.span;
                         let ty = self.parse_ty()?;
+                        let span = lo.to(ty.span);
+                        let suggestion = prev_lo.to(ty.span);
                         self.dcx()
-                            .emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
+                            .emit_err(errors::BadReturnTypeNotationOutput { span, suggestion });
                     }
 
                     P(ast::GenericArgs::ParenthesizedElided(span))
                 } else {
                     // `(T, U) -> R`
 
-                    let prev_token_before_parsing = self.prev_token.clone();
-                    let token_before_parsing = self.token.clone();
+                    let prev_token_before_parsing = self.prev_token;
+                    let token_before_parsing = self.token;
                     let mut snapshot = None;
                     if self.may_recover()
                         && prev_token_before_parsing == token::PathSep
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index a2699b077fc..0cc8b605018 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -5,7 +5,7 @@ use std::ops::Bound;
 use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, TokenKind};
+use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
 use rustc_ast::util::classify::{self, TrailingBrace};
 use rustc_ast::{
     AttrStyle, AttrVec, Block, BlockCheckMode, DUMMY_NODE_ID, Expr, ExprKind, HasAttrs, Local,
@@ -23,8 +23,8 @@ use super::{
     AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
     Trailing, UsePreAttrPos,
 };
-use crate::errors::MalformedLoopLabel;
-use crate::{errors, exp, maybe_whole};
+use crate::errors::{self, MalformedLoopLabel};
+use crate::exp;
 
 impl<'a> Parser<'a> {
     /// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -33,8 +33,8 @@ impl<'a> Parser<'a> {
     /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
     /// whether or not we have attributes.
     // Public for rustfmt usage.
-    pub(super) fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
-        Ok(self.parse_stmt_without_recovery(false, force_collect).unwrap_or_else(|e| {
+    pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
+        Ok(self.parse_stmt_without_recovery(false, force_collect, false).unwrap_or_else(|e| {
             e.emit();
             self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
             None
@@ -42,23 +42,27 @@ impl<'a> Parser<'a> {
     }
 
     /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of
-    /// whether or not we have attributes.
-    // Public for `cfg_eval` macro expansion.
+    /// whether or not we have attributes. If `force_full_expr` is true, parses the stmt without
+    /// using `Restriction::STMT_EXPR`. Public for `cfg_eval` macro expansion.
     pub fn parse_stmt_without_recovery(
         &mut self,
         capture_semi: bool,
         force_collect: ForceCollect,
+        force_full_expr: bool,
     ) -> PResult<'a, Option<Stmt>> {
         let pre_attr_pos = self.collect_pos();
         let attrs = self.parse_outer_attributes()?;
         let lo = self.token.span;
 
-        maybe_whole!(self, NtStmt, |stmt| {
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            this.parse_stmt_without_recovery(false, ForceCollect::Yes, false)
+        }) {
+            let mut stmt = stmt.expect("an actual statement");
             stmt.visit_attrs(|stmt_attrs| {
                 attrs.prepend_to_nt_inner(stmt_attrs);
             });
-            Some(stmt.into_inner())
-        });
+            return Ok(Some(stmt));
+        }
 
         if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
             self.bump();
@@ -69,10 +73,24 @@ impl<'a> Parser<'a> {
             });
         }
 
-        let stmt = if self.token.is_keyword(kw::Let) {
+        let stmt = if self.token.is_keyword(kw::Super) && self.is_keyword_ahead(1, &[kw::Let]) {
+            self.collect_tokens(None, attrs, force_collect, |this, attrs| {
+                let super_span = this.token.span;
+                this.expect_keyword(exp!(Super))?;
+                this.expect_keyword(exp!(Let))?;
+                this.psess.gated_spans.gate(sym::super_let, super_span);
+                let local = this.parse_local(Some(super_span), attrs)?;
+                let trailing = Trailing::from(capture_semi && this.token == token::Semi);
+                Ok((
+                    this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
+                    trailing,
+                    UsePreAttrPos::No,
+                ))
+            })?
+        } else if self.token.is_keyword(kw::Let) {
             self.collect_tokens(None, attrs, force_collect, |this, attrs| {
                 this.expect_keyword(exp!(Let))?;
-                let local = this.parse_local(attrs)?;
+                let local = this.parse_local(None, attrs)?;
                 let trailing = Trailing::from(capture_semi && this.token == token::Semi);
                 Ok((
                     this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
@@ -147,12 +165,14 @@ impl<'a> Parser<'a> {
         } else if self.token != token::CloseDelim(Delimiter::Brace) {
             // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case
             // above.
+            let restrictions =
+                if force_full_expr { Restrictions::empty() } else { Restrictions::STMT_EXPR };
             let e = self.collect_tokens(
                 Some(pre_attr_pos),
                 AttrWrapper::empty(),
                 force_collect,
                 |this, _empty_attrs| {
-                    let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?;
+                    let (expr, _) = this.parse_expr_res(restrictions, attrs)?;
                     Ok((expr, Trailing::No, UsePreAttrPos::Yes))
                 },
             )?;
@@ -176,7 +196,7 @@ impl<'a> Parser<'a> {
         let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
             let path = this.parse_path(PathStyle::Expr)?;
 
-            if this.eat(exp!(Not)) {
+            if this.eat(exp!(Bang)) {
                 let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
                 return Ok((
                     stmt_mac,
@@ -229,11 +249,15 @@ impl<'a> Parser<'a> {
         let mac = P(MacCall { path, args });
 
         let kind = if (style == MacStmtStyle::Braces
-            && self.token != token::Dot
-            && self.token != token::Question)
-            || self.token == token::Semi
-            || self.token == token::Eof
-        {
+            && !matches!(self.token.kind, token::Dot | token::Question))
+            || matches!(
+                self.token.kind,
+                token::Semi
+                    | token::Eof
+                    | token::CloseDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar(
+                        MetaVarKind::Stmt
+                    )))
+            ) {
             StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
         } else {
             // Since none of the above applied, this is an expression statement macro.
@@ -271,7 +295,7 @@ impl<'a> Parser<'a> {
         force_collect: ForceCollect,
     ) -> PResult<'a, Stmt> {
         let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| {
-            let local = this.parse_local(attrs)?;
+            let local = this.parse_local(None, attrs)?;
             // FIXME - maybe capture semicolon in recovery?
             Ok((
                 this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)),
@@ -285,8 +309,8 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a local variable declaration.
-    fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
-        let lo = self.prev_token.span;
+    fn parse_local(&mut self, super_: Option<Span>, attrs: AttrVec) -> PResult<'a, P<Local>> {
+        let lo = super_.unwrap_or(self.prev_token.span);
 
         if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
             self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
@@ -388,6 +412,7 @@ impl<'a> Parser<'a> {
         };
         let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span };
         Ok(P(ast::Local {
+            super_,
             ty,
             pat,
             kind,
@@ -442,7 +467,16 @@ impl<'a> Parser<'a> {
     /// Parses the RHS of a local variable declaration (e.g., `= 14;`).
     fn parse_initializer(&mut self, eq_optional: bool) -> PResult<'a, Option<P<Expr>>> {
         let eq_consumed = match self.token.kind {
-            token::BinOpEq(..) => {
+            token::PlusEq
+            | token::MinusEq
+            | token::StarEq
+            | token::SlashEq
+            | token::PercentEq
+            | token::CaretEq
+            | token::AndEq
+            | token::OrEq
+            | token::ShlEq
+            | token::ShrEq => {
                 // Recover `let x <op>= 1` as `let x = 1` We must not use `+ BytePos(1)` here
                 // because `<op>` can be a multi-byte lookalike that was recovered, e.g. `➖=` (the
                 // `➖` is a U+2796 Heavy Minus Sign Unicode Character) that was recovered as a
@@ -463,7 +497,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a block. No inner attributes are allowed.
     pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
-        let (attrs, block) = self.parse_inner_attrs_and_block()?;
+        let (attrs, block) = self.parse_inner_attrs_and_block(None)?;
         if let [.., last] = &*attrs {
             let suggest_to_outer = match &last.kind {
                 ast::AttrKind::Normal(attr) => attr.item.is_valid_for_outer_style(),
@@ -484,7 +518,11 @@ impl<'a> Parser<'a> {
         let prev = self.prev_token.span;
         let sp = self.token.span;
         let mut e = self.dcx().struct_span_err(sp, msg);
-        let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon;
+        self.label_expected_raw_ref(&mut e);
+
+        let do_not_suggest_help = self.token.is_keyword(kw::In)
+            || self.token == token::Colon
+            || self.prev_token.is_keyword(kw::Raw);
 
         // Check to see if the user has written something like
         //
@@ -492,7 +530,7 @@ impl<'a> Parser<'a> {
         //      bar;
         //
         // which is valid in other languages, but not Rust.
-        match self.parse_stmt_without_recovery(false, ForceCollect::No) {
+        match self.parse_stmt_without_recovery(false, ForceCollect::No, false) {
             // If the next token is an open brace, e.g., we have:
             //
             //     if expr other_expr {
@@ -641,33 +679,39 @@ impl<'a> Parser<'a> {
         Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
     }
 
-    /// Parses a block. Inner attributes are allowed.
-    pub(super) fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (AttrVec, P<Block>)> {
-        self.parse_block_common(self.token.span, BlockCheckMode::Default, true)
+    /// Parses a block. Inner attributes are allowed, block labels are not.
+    ///
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
+    pub(super) fn parse_inner_attrs_and_block(
+        &mut self,
+        loop_header: Option<Span>,
+    ) -> PResult<'a, (AttrVec, P<Block>)> {
+        self.parse_block_common(self.token.span, BlockCheckMode::Default, loop_header)
     }
 
-    /// Parses a block. Inner attributes are allowed.
+    /// Parses a block. Inner attributes are allowed, block labels are not.
+    ///
+    /// If `loop_header` is `Some` and an unexpected block label is encountered,
+    /// it is suggested to be moved just before `loop_header`, else it is suggested to be removed.
     pub(super) fn parse_block_common(
         &mut self,
         lo: Span,
         blk_mode: BlockCheckMode,
-        can_be_struct_literal: bool,
+        loop_header: Option<Span>,
     ) -> PResult<'a, (AttrVec, P<Block>)> {
-        maybe_whole!(self, NtBlock, |block| (AttrVec::new(), block));
+        if let Some(block) = self.eat_metavar_seq(MetaVarKind::Block, |this| this.parse_block()) {
+            return Ok((AttrVec::new(), block));
+        }
 
-        let maybe_ident = self.prev_token.clone();
-        self.maybe_recover_unexpected_block_label();
+        let maybe_ident = self.prev_token;
+        self.maybe_recover_unexpected_block_label(loop_header);
         if !self.eat(exp!(OpenBrace)) {
             return self.error_block_no_opening_brace();
         }
 
         let attrs = self.parse_inner_attributes()?;
-        let tail = match self.maybe_suggest_struct_literal(
-            lo,
-            blk_mode,
-            maybe_ident,
-            can_be_struct_literal,
-        ) {
+        let tail = match self.maybe_suggest_struct_literal(lo, blk_mode, maybe_ident) {
             Some(tail) => tail?,
             None => self.parse_block_tail(lo, blk_mode, AttemptLocalParseRecovery::Yes)?,
         };
@@ -688,7 +732,7 @@ impl<'a> Parser<'a> {
             if self.token == token::Eof {
                 break;
             }
-            if self.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+            if self.is_vcs_conflict_marker(&TokenKind::Shl, &TokenKind::Lt) {
                 // Account for `<<<<<<<` diff markers. We can't proactively error here because
                 // that can be a valid path start, so we snapshot and reparse only we've
                 // encountered another parse error.
@@ -727,10 +771,6 @@ impl<'a> Parser<'a> {
                                     Applicability::MaybeIncorrect,
                                 );
                             }
-                            if self.psess.unstable_features.is_nightly_build() {
-                                // FIXME(Nilstrieb): Remove this again after a few months.
-                                err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
-                            }
                         }
                     }
 
@@ -801,10 +841,24 @@ impl<'a> Parser<'a> {
         &mut self,
         recover: AttemptLocalParseRecovery,
     ) -> PResult<'a, Option<Stmt>> {
-        // Skip looking for a trailing semicolon when we have an interpolated statement.
-        maybe_whole!(self, NtStmt, |stmt| Some(stmt.into_inner()));
+        // Skip looking for a trailing semicolon when we have a metavar seq.
+        if let Some(stmt) = self.eat_metavar_seq(MetaVarKind::Stmt, |this| {
+            // Why pass `true` for `force_full_expr`? Statement expressions are less expressive
+            // than "full" expressions, due to the `STMT_EXPR` restriction, and sometimes need
+            // parentheses. E.g. the "full" expression `match paren_around_match {} | true` when
+            // used in statement context must be written `(match paren_around_match {} | true)`.
+            // However, if the expression we are parsing in this statement context was pasted by a
+            // declarative macro, it may have come from a "full" expression context, and lack
+            // these parentheses. So we lift the `STMT_EXPR` restriction to ensure the statement
+            // will reparse successfully.
+            this.parse_stmt_without_recovery(false, ForceCollect::No, true)
+        }) {
+            let stmt = stmt.expect("an actual statement");
+            return Ok(Some(stmt));
+        }
 
-        let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No)? else {
+        let Some(mut stmt) = self.parse_stmt_without_recovery(true, ForceCollect::No, false)?
+        else {
             return Ok(None);
         };
 
@@ -859,7 +913,7 @@ impl<'a> Parser<'a> {
                                 {
                                     if self.token == token::Colon
                                         && self.look_ahead(1, |token| {
-                                            token.is_whole_block()
+                                            token.is_metavar_block()
                                                 || matches!(
                                                     token.kind,
                                                     token::Ident(
@@ -1000,14 +1054,7 @@ impl<'a> Parser<'a> {
         rules: BlockCheckMode,
         span: Span,
     ) -> P<Block> {
-        P(Block {
-            stmts,
-            id: DUMMY_NODE_ID,
-            rules,
-            span,
-            tokens: None,
-            could_be_bare_literal: false,
-        })
+        P(Block { stmts, id: DUMMY_NODE_ID, rules, span, tokens: None })
     }
 
     pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt {
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index 8b8c81a77a0..2f958f4d492 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -2291,7 +2291,7 @@ fn string_to_tts_macro() {
                     Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
                     _,
                 ),
-                TokenTree::Token(Token { kind: token::Not, .. }, _),
+                TokenTree::Token(Token { kind: token::Bang, .. }, _),
                 TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
                 TokenTree::Delimited(.., macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
@@ -2554,7 +2554,7 @@ fn look(p: &Parser<'_>, dist: usize, kind: rustc_ast::token::TokenKind) {
     // Do the `assert_eq` outside the closure so that `track_caller` works.
     // (`#![feature(closure_track_caller)]` + `#[track_caller]` on the closure
     // doesn't give the line number in the test below if the assertion fails.)
-    let tok = p.look_ahead(dist, |tok| tok.clone());
+    let tok = p.look_ahead(dist, |tok| *tok);
     assert_eq!(kind, tok.kind);
 }
 
@@ -2922,7 +2922,7 @@ fn out_of_line_mod() {
         .unwrap()
         .unwrap();
 
-        let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
+        let ast::ItemKind::Mod(_, _, mod_kind) = &item.kind else { panic!() };
         assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2);
     });
 }
diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs
index 40631d9154d..add3c970201 100644
--- a/compiler/rustc_parse/src/parser/token_type.rs
+++ b/compiler/rustc_parse/src/parser/token_type.rs
@@ -25,7 +25,7 @@ pub enum TokenType {
     Gt,
     AndAnd,
     OrOr,
-    Not,
+    Bang,
     Tilde,
 
     // BinOps
@@ -114,6 +114,7 @@ pub enum TokenType {
     KwSelfUpper,
     KwStatic,
     KwStruct,
+    KwSuper,
     KwTrait,
     KwTry,
     KwType,
@@ -172,7 +173,7 @@ impl TokenType {
             Gt,
             AndAnd,
             OrOr,
-            Not,
+            Bang,
             Tilde,
 
             Plus,
@@ -250,6 +251,7 @@ impl TokenType {
             KwSelfUpper,
             KwStatic,
             KwStruct,
+            KwSuper,
             KwTrait,
             KwTry,
             KwType,
@@ -324,6 +326,7 @@ impl TokenType {
             TokenType::KwSelfUpper => Some(kw::SelfUpper),
             TokenType::KwStatic => Some(kw::Static),
             TokenType::KwStruct => Some(kw::Struct),
+            TokenType::KwSuper => Some(kw::Super),
             TokenType::KwTrait => Some(kw::Trait),
             TokenType::KwTry => Some(kw::Try),
             TokenType::KwType => Some(kw::Type),
@@ -366,7 +369,7 @@ impl TokenType {
             TokenType::Gt => "`>`",
             TokenType::AndAnd => "`&&`",
             TokenType::OrOr => "`||`",
-            TokenType::Not => "`!`",
+            TokenType::Bang => "`!`",
             TokenType::Tilde => "`~`",
 
             TokenType::Plus => "`+`",
@@ -445,12 +448,6 @@ macro_rules! exp {
             token_type: $crate::parser::token_type::TokenType::$tok
         }
     };
-    (@binop, $op:ident) => {
-        $crate::parser::token_type::ExpTokenPair {
-            tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op),
-            token_type: $crate::parser::token_type::TokenType::$op,
-        }
-    };
     (@open, $delim:ident, $token_type:ident) => {
         $crate::parser::token_type::ExpTokenPair {
             tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
@@ -485,8 +482,13 @@ macro_rules! exp {
     (Gt)             => { exp!(@tok, Gt) };
     (AndAnd)         => { exp!(@tok, AndAnd) };
     (OrOr)           => { exp!(@tok, OrOr) };
-    (Not)            => { exp!(@tok, Not) };
+    (Bang)           => { exp!(@tok, Bang) };
     (Tilde)          => { exp!(@tok, Tilde) };
+    (Plus)           => { exp!(@tok, Plus) };
+    (Minus)          => { exp!(@tok, Minus) };
+    (Star)           => { exp!(@tok, Star) };
+    (And)            => { exp!(@tok, And) };
+    (Or)             => { exp!(@tok, Or) };
     (At)             => { exp!(@tok, At) };
     (Dot)            => { exp!(@tok, Dot) };
     (DotDot)         => { exp!(@tok, DotDot) };
@@ -502,12 +504,6 @@ macro_rules! exp {
     (Question)       => { exp!(@tok, Question) };
     (Eof)            => { exp!(@tok, Eof) };
 
-    (Plus)           => { exp!(@binop, Plus) };
-    (Minus)          => { exp!(@binop, Minus) };
-    (Star)           => { exp!(@binop, Star) };
-    (And)            => { exp!(@binop, And) };
-    (Or)             => { exp!(@binop, Or) };
-
     (OpenParen)      => { exp!(@open,  Parenthesis, OpenParen) };
     (OpenBrace)      => { exp!(@open,  Brace,       OpenBrace) };
     (OpenBracket)    => { exp!(@open,  Bracket,     OpenBracket) };
@@ -556,6 +552,7 @@ macro_rules! exp {
     (SelfUpper)      => { exp!(@kw, SelfUpper,  KwSelfUpper) };
     (Static)         => { exp!(@kw, Static,     KwStatic) };
     (Struct)         => { exp!(@kw, Struct,     KwStruct) };
+    (Super)          => { exp!(@kw, Super,      KwSuper) };
     (Trait)          => { exp!(@kw, Trait,      KwTrait) };
     (Try)            => { exp!(@kw, Try,        KwTry) };
     (Type)           => { exp!(@kw, Type,       KwType) };
@@ -626,7 +623,7 @@ impl Iterator for TokenTypeSetIter {
     type Item = TokenType;
 
     fn next(&mut self) -> Option<TokenType> {
-        let num_bits: u32 = (std::mem::size_of_val(&self.0.0) * 8) as u32;
+        let num_bits: u32 = (size_of_val(&self.0.0) * 8) as u32;
         assert_eq!(num_bits, 128);
         let z = self.0.0.trailing_zeros();
         if z == num_bits {
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index dc5919b3630..42ebf26784d 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,5 +1,5 @@
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw, Token, TokenKind};
+use rustc_ast::token::{self, Delimiter, IdentIsRaw, MetaVarKind, Token, TokenKind};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
     self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, DUMMY_NODE_ID, FnRetTy,
@@ -7,7 +7,7 @@ use rustc_ast::{
     Pinnedness, PolyTraitRef, PreciseCapturingArg, TraitBoundModifiers, TraitObjectSyntax, Ty,
     TyKind, UnsafeBinderTy,
 };
-use rustc_errors::{Applicability, PResult};
+use rustc_errors::{Applicability, Diag, PResult};
 use rustc_span::{ErrorGuaranteed, Ident, Span, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 
@@ -18,7 +18,7 @@ use crate::errors::{
     HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
     NestedCVariadicType, ReturnTypesUseThinArrow,
 };
-use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath};
 
 /// Signals whether parsing a type should allow `+`.
 ///
@@ -86,7 +86,7 @@ enum AllowCVariadic {
 /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
 /// that `IDENT` is not the ident of a fn trait.
 fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
-    t == &token::PathSep || t == &token::Lt || t == &token::BinOp(token::Shl)
+    t == &token::PathSep || t == &token::Lt || t == &token::Shl
 }
 
 fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
@@ -183,7 +183,8 @@ impl<'a> Parser<'a> {
         )
     }
 
-    /// Parse a type without recovering `:` as `->` to avoid breaking code such as `where fn() : for<'a>`
+    /// Parse a type without recovering `:` as `->` to avoid breaking code such
+    /// as `where fn() : for<'a>`.
     pub(super) fn parse_ty_for_where_clause(&mut self) -> PResult<'a, P<Ty>> {
         self.parse_ty_common(
             AllowPlus::Yes,
@@ -247,13 +248,19 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Ty>> {
         let allow_qpath_recovery = recover_qpath == RecoverQPath::Yes;
         maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery);
-        maybe_whole!(self, NtTy, |ty| ty);
+
+        if let Some(ty) = self.eat_metavar_seq_with_matcher(
+            |mv_kind| matches!(mv_kind, MetaVarKind::Ty { .. }),
+            |this| this.parse_ty_no_question_mark_recover(),
+        ) {
+            return Ok(ty);
+        }
 
         let lo = self.token.span;
         let mut impl_dyn_multi = false;
         let kind = if self.check(exp!(OpenParen)) {
             self.parse_ty_tuple_or_parens(lo, allow_plus)?
-        } else if self.eat(exp!(Not)) {
+        } else if self.eat(exp!(Bang)) {
             // Never type `!`
             TyKind::Never
         } else if self.eat(exp!(Star)) {
@@ -392,7 +399,7 @@ impl<'a> Parser<'a> {
         let mut trailing_plus = false;
         let (ts, trailing) = self.parse_paren_comma_seq(|p| {
             let ty = p.parse_ty()?;
-            trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus);
+            trailing_plus = p.prev_token == TokenKind::Plus;
             Ok(ty)
         })?;
 
@@ -404,6 +411,9 @@ impl<'a> Parser<'a> {
                 TyKind::Path(None, path) if maybe_bounds => {
                     self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
                 }
+                // For `('a) + …`, we know that `'a` in type position already lead to an error being
+                // emitted. To reduce output, let's indirectly suppress E0178 (bad `+` in type) and
+                // other irrelevant consequential errors.
                 TyKind::TraitObject(bounds, TraitObjectSyntax::None)
                     if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
                 {
@@ -418,12 +428,60 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
-        let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
-        let bounds = self.parse_generic_bounds_common(allow_plus)?;
-        if lt_no_plus {
-            self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
+        // A lifetime only begins a bare trait object type if it is followed by `+`!
+        if self.token.is_lifetime() && !self.look_ahead(1, |t| t.is_like_plus()) {
+            // In Rust 2021 and beyond, we assume that the user didn't intend to write a bare trait
+            // object type with a leading lifetime bound since that seems very unlikely given the
+            // fact that `dyn`-less trait objects are *semantically* invalid.
+            if self.psess.edition.at_least_rust_2021() {
+                let lt = self.expect_lifetime();
+                let mut err = self.dcx().struct_span_err(lo, "expected type, found lifetime");
+                err.span_label(lo, "expected type");
+                return Ok(match self.maybe_recover_ref_ty_no_leading_ampersand(lt, lo, err) {
+                    Ok(ref_ty) => ref_ty,
+                    Err(err) => TyKind::Err(err.emit()),
+                });
+            }
+
+            self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime {
+                span: lo,
+                suggestion: lo.shrink_to_hi(),
+            });
+        }
+        Ok(TyKind::TraitObject(
+            self.parse_generic_bounds_common(allow_plus)?,
+            TraitObjectSyntax::None,
+        ))
+    }
+
+    fn maybe_recover_ref_ty_no_leading_ampersand<'cx>(
+        &mut self,
+        lt: Lifetime,
+        lo: Span,
+        mut err: Diag<'cx>,
+    ) -> Result<TyKind, Diag<'cx>> {
+        if !self.may_recover() {
+            return Err(err);
+        }
+        let snapshot = self.create_snapshot_for_diagnostic();
+        let mutbl = self.parse_mutability();
+        match self.parse_ty_no_plus() {
+            Ok(ty) => {
+                err.span_suggestion_verbose(
+                    lo.shrink_to_lo(),
+                    "you might have meant to write a reference type here",
+                    "&",
+                    Applicability::MaybeIncorrect,
+                );
+                err.emit();
+                Ok(TyKind::Ref(Some(lt), MutTy { ty, mutbl }))
+            }
+            Err(diag) => {
+                diag.cancel();
+                self.restore_snapshot(snapshot);
+                Err(err)
+            }
         }
-        Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
     }
 
     fn parse_remaining_bounds_path(
@@ -540,7 +598,7 @@ impl<'a> Parser<'a> {
 
             // Recovery
             mutbl = Mutability::Mut;
-            let (dyn_tok, dyn_tok_sp) = (self.token.clone(), self.token_spacing);
+            let (dyn_tok, dyn_tok_sp) = (self.token, self.token_spacing);
             self.bump();
             self.bump_with((dyn_tok, dyn_tok_sp));
         }
@@ -728,7 +786,7 @@ impl<'a> Parser<'a> {
         // Always parse bounds greedily for better error recovery.
         let bounds = self.parse_generic_bounds()?;
 
-        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
+        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
 
         Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
     }
@@ -740,11 +798,7 @@ impl<'a> Parser<'a> {
         self.expect_lt()?;
         let (args, _, _) = self.parse_seq_to_before_tokens(
             &[exp!(Gt)],
-            &[
-                &TokenKind::Ge,
-                &TokenKind::BinOp(BinOpToken::Shr),
-                &TokenKind::BinOpEq(BinOpToken::Shr),
-            ],
+            &[&TokenKind::Ge, &TokenKind::Shr, &TokenKind::Shr],
             SeqSep::trailing_allowed(exp!(Comma)),
             |self_| {
                 if self_.check_keyword(exp!(SelfUpper)) {
@@ -772,9 +826,9 @@ impl<'a> Parser<'a> {
     /// Is a `dyn B0 + ... + Bn` type allowed here?
     fn is_explicit_dyn_type(&mut self) -> bool {
         self.check_keyword(exp!(Dyn))
-            && (self.token.uninterpolated_span().at_least_rust_2018()
+            && (self.token_uninterpolated_span().at_least_rust_2018()
                 || self.look_ahead(1, |t| {
-                    (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
+                    (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::Star)
                         && !can_continue_type_after_non_fn_ident(t)
                 }))
     }
@@ -796,7 +850,7 @@ impl<'a> Parser<'a> {
 
         // Always parse bounds greedily for better error recovery.
         let bounds = self.parse_generic_bounds()?;
-        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus);
+        *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::Plus;
         Ok(TyKind::TraitObject(bounds, syntax))
     }
 
@@ -814,7 +868,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, TyKind> {
         // Simple path
         let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
-        if self.eat(exp!(Not)) {
+        if self.eat(exp!(Bang)) {
             // Macro invocation in type position
             Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
         } else if allow_plus == AllowPlus::Yes && self.check_plus() {
@@ -867,7 +921,7 @@ impl<'a> Parser<'a> {
     fn can_begin_bound(&mut self) -> bool {
         self.check_path()
             || self.check_lifetime()
-            || self.check(exp!(Not))
+            || self.check(exp!(Bang))
             || self.check(exp!(Question))
             || self.check(exp!(Tilde))
             || self.check_keyword(exp!(For))
@@ -883,7 +937,7 @@ impl<'a> Parser<'a> {
     /// ```
     fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
         let lo = self.token.span;
-        let leading_token = self.prev_token.clone();
+        let leading_token = self.prev_token;
         let has_parens = self.eat(exp!(OpenParen));
 
         let bound = if self.token.is_lifetime() {
@@ -995,13 +1049,13 @@ impl<'a> Parser<'a> {
             BoundConstness::Never
         };
 
-        let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
+        let asyncness = if self.token_uninterpolated_span().at_least_rust_2018()
             && self.eat_keyword(exp!(Async))
         {
             self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
             BoundAsyncness::Async(self.prev_token.span)
         } else if self.may_recover()
-            && self.token.uninterpolated_span().is_rust_2015()
+            && self.token_uninterpolated_span().is_rust_2015()
             && self.is_kw_followed_by_ident(kw::Async)
         {
             self.bump(); // eat `async`
@@ -1018,7 +1072,7 @@ impl<'a> Parser<'a> {
 
         let polarity = if self.eat(exp!(Question)) {
             BoundPolarity::Maybe(self.prev_token.span)
-        } else if self.eat(exp!(Not)) {
+        } else if self.eat(exp!(Bang)) {
             self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
             BoundPolarity::Negative(self.prev_token.span)
         } else {
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 86f673c100c..b518fca7a65 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -16,7 +16,8 @@ use rustc_span::{Span, Symbol, sym};
 use crate::{errors, parse_in};
 
 pub fn check_attr(psess: &ParseSess, attr: &Attribute) {
-    if attr.is_doc_comment() {
+    if attr.is_doc_comment() || attr.has_name(sym::cfg_trace) || attr.has_name(sym::cfg_attr_trace)
+    {
         return;
     }
 
@@ -156,7 +157,7 @@ fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaIte
 pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr: &Attribute) {
     let attr_item = attr.get_normal_item();
 
-    if safety == AttributeSafety::Unsafe {
+    if let AttributeSafety::Unsafe { unsafe_since } = safety {
         if let ast::Safety::Default = attr_item.unsafety {
             let path_span = attr_item.path.span;
 
@@ -166,7 +167,13 @@ pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr:
             // square bracket respectively.
             let diag_span = attr_item.span();
 
-            if attr.span.at_least_rust_2024() {
+            // Attributes can be safe in earlier editions, and become unsafe in later ones.
+            let emit_error = match unsafe_since {
+                None => true,
+                Some(unsafe_since) => attr.span.edition() >= unsafe_since,
+            };
+
+            if emit_error {
                 psess.dcx().emit_err(errors::UnsafeAttrOutsideUnsafe {
                     span: path_span,
                     suggestion: errors::UnsafeAttrOutsideUnsafeSuggestion {
@@ -187,6 +194,12 @@ pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr:
             }
         }
     } else if let Safety::Unsafe(unsafe_span) = attr_item.unsafety {
+        // Allow (but don't require) `#[unsafe(naked)]` so that compiler-builtins can upgrade to it.
+        // FIXME(#139797): remove this special case when compiler-builtins has upgraded.
+        if attr.has_name(sym::naked) {
+            return;
+        }
+
         psess.dcx().emit_err(errors::InvalidAttrUnsafe {
             span: unsafe_span,
             name: attr_item.path.clone(),
@@ -215,11 +228,7 @@ pub fn check_builtin_meta_item(
     template: AttributeTemplate,
     deny_unsafety: bool,
 ) {
-    // Some special attributes like `cfg` must be checked
-    // before the generic check, so we skip them here.
-    let should_skip = |name| name == sym::cfg;
-
-    if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
+    if !is_attr_template_compatible(&template, &meta.kind) {
         emit_malformed_attribute(psess, style, meta.span, name, template);
     }