about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs47
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs14
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs105
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs4
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs98
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs88
-rw-r--r--compiler/rustc_parse/src/parser/item.rs83
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs61
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs14
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs49
-rw-r--r--compiler/rustc_parse/src/parser/path.rs29
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs4
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs5
13 files changed, 468 insertions, 133 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index aeb4fd0a304..99e66fddc70 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -521,6 +521,14 @@ pub(crate) struct CatchAfterTry {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_gen_fn)]
+#[help]
+pub(crate) struct GenFn {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_comma_after_base_struct)]
 #[note]
 pub(crate) struct CommaAfterBaseStruct {
@@ -1370,6 +1378,34 @@ pub(crate) struct FnPtrWithGenericsSugg {
     pub for_param_list_exists: bool,
 }
 
+pub(crate) struct FnTraitMissingParen {
+    pub span: Span,
+    pub machine_applicable: bool,
+}
+
+impl AddToDiagnostic for FnTraitMissingParen {
+    fn add_to_diagnostic_with<F>(self, diag: &mut rustc_errors::Diagnostic, _: F)
+    where
+        F: Fn(
+            &mut rustc_errors::Diagnostic,
+            rustc_errors::SubdiagnosticMessage,
+        ) -> rustc_errors::SubdiagnosticMessage,
+    {
+        diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren);
+        let applicability = if self.machine_applicable {
+            Applicability::MachineApplicable
+        } else {
+            Applicability::MaybeIncorrect
+        };
+        diag.span_suggestion_short(
+            self.span.shrink_to_hi(),
+            crate::fluent_generated::parse_add_paren,
+            "()",
+            applicability,
+        );
+    }
+}
+
 #[derive(Diagnostic)]
 #[diag(parse_unexpected_if_with_if)]
 pub(crate) struct UnexpectedIfWithIf(
@@ -1544,6 +1580,14 @@ pub(crate) enum AmbiguousMissingKwForItemSub {
 }
 
 #[derive(Diagnostic)]
+#[diag(parse_missing_fn_params)]
+pub(crate) struct MissingFnParams {
+    #[primary_span]
+    #[suggestion(code = "()", applicability = "machine-applicable", style = "short")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
 #[diag(parse_missing_trait_in_trait_impl)]
 pub(crate) struct MissingTraitInTraitImpl {
     #[primary_span]
@@ -2234,9 +2278,8 @@ pub(crate) enum InvalidMutInPattern {
     #[note(parse_note_mut_pattern_usage)]
     NonIdent {
         #[primary_span]
-        #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+        #[suggestion(code = "", applicability = "machine-applicable")]
         span: Span,
-        pat: String,
     },
 }
 
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index a375a1d69cd..f2eed5c9be5 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -64,10 +64,10 @@ pub(crate) fn parse_token_trees<'a>(
         override_span,
         nbsp_is_whitespace: false,
     };
-    let (token_trees, unmatched_delims) =
+    let (stream, res, unmatched_delims) =
         tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
-    match token_trees {
-        Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
+    match res {
+        Ok(()) if unmatched_delims.is_empty() => Ok(stream),
         _ => {
             // Return error if there are unmatched delimiters or unclosed delimiters.
             // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
@@ -79,9 +79,11 @@ pub(crate) fn parse_token_trees<'a>(
                     err.buffer(&mut buffer);
                 }
             }
-            if let Err(err) = token_trees {
-                // Add unclosing delimiter error
-                err.buffer(&mut buffer);
+            if let Err(errs) = res {
+                // Add unclosing delimiter or diff marker errors
+                for err in errs {
+                    err.buffer(&mut buffer);
+                }
             }
             Err(buffer)
         }
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index 1d9dbfe4b89..db795ce9f72 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -5,7 +5,8 @@ use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
 use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
-use rustc_errors::{PErr, PResult};
+use rustc_errors::{Applicability, PErr};
+use rustc_span::symbol::kw;
 
 pub(super) struct TokenTreesReader<'a> {
     string_reader: StringReader<'a>,
@@ -18,36 +19,42 @@ pub(super) struct TokenTreesReader<'a> {
 impl<'a> TokenTreesReader<'a> {
     pub(super) fn parse_all_token_trees(
         string_reader: StringReader<'a>,
-    ) -> (PResult<'a, TokenStream>, Vec<UnmatchedDelim>) {
+    ) -> (TokenStream, Result<(), Vec<PErr<'a>>>, Vec<UnmatchedDelim>) {
         let mut tt_reader = TokenTreesReader {
             string_reader,
             token: Token::dummy(),
             diag_info: TokenTreeDiagInfo::default(),
         };
-        let res = tt_reader.parse_token_trees(/* is_delimited */ false);
-        (res, tt_reader.diag_info.unmatched_delims)
+        let (stream, res) = tt_reader.parse_token_trees(/* is_delimited */ false);
+        (stream, res, tt_reader.diag_info.unmatched_delims)
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s.
-    fn parse_token_trees(&mut self, is_delimited: bool) -> PResult<'a, TokenStream> {
+    fn parse_token_trees(
+        &mut self,
+        is_delimited: bool,
+    ) -> (TokenStream, Result<(), Vec<PErr<'a>>>) {
         self.token = self.string_reader.next_token().0;
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
-                token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)?),
+                token::OpenDelim(delim) => {
+                    buf.push(match self.parse_token_tree_open_delim(delim) {
+                        Ok(val) => val,
+                        Err(errs) => return (TokenStream::new(buf), Err(errs)),
+                    })
+                }
                 token::CloseDelim(delim) => {
-                    return if is_delimited {
-                        Ok(TokenStream::new(buf))
-                    } else {
-                        Err(self.close_delim_err(delim))
-                    };
+                    return (
+                        TokenStream::new(buf),
+                        if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
+                    );
                 }
                 token::Eof => {
-                    return if is_delimited {
-                        Err(self.eof_err())
-                    } else {
-                        Ok(TokenStream::new(buf))
-                    };
+                    return (
+                        TokenStream::new(buf),
+                        if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
+                    );
                 }
                 _ => {
                     // Get the next normal token. This might require getting multiple adjacent
@@ -97,7 +104,10 @@ impl<'a> TokenTreesReader<'a> {
         err
     }
 
-    fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> PResult<'a, TokenTree> {
+    fn parse_token_tree_open_delim(
+        &mut self,
+        open_delim: Delimiter,
+    ) -> Result<TokenTree, Vec<PErr<'a>>> {
         // The span for beginning of the delimited section
         let pre_span = self.token.span;
 
@@ -106,7 +116,10 @@ impl<'a> TokenTreesReader<'a> {
         // Parse the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let tts = self.parse_token_trees(/* is_delimited */ true)?;
+        let (tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        if let Err(errs) = res {
+            return Err(self.unclosed_delim_err(tts, errs));
+        }
 
         // Expand to cover the entire delimited token tree
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
@@ -192,6 +205,62 @@ impl<'a> TokenTreesReader<'a> {
         Ok(TokenTree::Delimited(delim_span, open_delim, tts))
     }
 
+    fn unclosed_delim_err(&mut self, tts: TokenStream, mut errs: Vec<PErr<'a>>) -> Vec<PErr<'a>> {
+        // If there are unclosed delims, see if there are diff markers and if so, point them
+        // out instead of complaining about the unclosed delims.
+        let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
+        let mut diff_errs = vec![];
+        // Suggest removing a `{` we think appears in an `if`/`while` condition
+        // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
+        // we have no way of tracking this in the lexer itself, so we piggyback on the parser
+        let mut in_cond = false;
+        while parser.token != token::Eof {
+            if let Err(diff_err) = parser.err_diff_marker() {
+                diff_errs.push(diff_err);
+            } else if parser.is_keyword_ahead(0, &[kw::If, kw::While]) {
+                in_cond = true;
+            } else if matches!(
+                parser.token.kind,
+                token::CloseDelim(Delimiter::Brace) | token::FatArrow
+            ) {
+                // end of the `if`/`while` body, or the end of a `match` guard
+                in_cond = false;
+            } else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
+                // Store the `&&` and `let` to use their spans later when creating the diagnostic
+                let maybe_andand = parser.look_ahead(1, |t| t.clone());
+                let maybe_let = parser.look_ahead(2, |t| t.clone());
+                if maybe_andand == token::OpenDelim(Delimiter::Brace) {
+                    // This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
+                    in_cond = false;
+                } else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
+                    let mut err = parser.struct_span_err(
+                        parser.token.span,
+                        "found a `{` in the middle of a let-chain",
+                    );
+                    err.span_suggestion(
+                        parser.token.span,
+                        "consider removing this brace to parse the `let` as part of the same chain",
+                        "",
+                        Applicability::MachineApplicable,
+                    );
+                    err.span_label(
+                        maybe_andand.span.to(maybe_let.span),
+                        "you might have meant to continue the let-chain here",
+                    );
+                    errs.push(err);
+                }
+            }
+            parser.bump();
+        }
+        if !diff_errs.is_empty() {
+            errs.iter_mut().for_each(|err| {
+                err.delay_as_bug();
+            });
+            return diff_errs;
+        }
+        return errs;
+    }
+
     fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
         // An unexpected closing delimiter (i.e., there is no
         // matching opening delimiter).
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 104de47b97d..bad7c19cc27 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
     /// The delimiters or `=` are still put into the resulting token stream.
     pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
         let item = match &self.token.kind {
-            token::Interpolated(nt) => match &**nt {
+            token::Interpolated(nt) => match &nt.0 {
                 Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
                 _ => None,
             },
@@ -369,7 +369,7 @@ impl<'a> Parser<'a> {
     /// ```
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         let nt_meta = match &self.token.kind {
-            token::Interpolated(nt) => match &**nt {
+            token::Interpolated(nt) => match &nt.0 {
                 token::NtMeta(e) => Some(e.clone()),
                 _ => None,
             },
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 9f8361a4b1e..262e7b899ad 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -24,11 +24,12 @@ use crate::parser;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
+use rustc_ast::tokenstream::AttrTokenTree;
 use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
     AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
-    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Param, Pat, PatKind,
-    Path, PathSegment, QSelf, Ty, TyKind,
+    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
+    PatKind, Path, PathSegment, QSelf, Ty, TyKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
@@ -504,8 +505,12 @@ impl<'a> Parser<'a> {
 
         // Special-case "expected `;`" errors
         if expected.contains(&TokenType::Token(token::Semi)) {
-            if self.prev_token == token::Question && self.maybe_recover_from_ternary_operator() {
-                return Ok(true);
+            // If the user is trying to write a ternary expression, recover it and
+            // return an Err to prevent a cascade of irrelevant diagnostics
+            if self.prev_token == token::Question
+                && let Err(e) = self.maybe_recover_from_ternary_operator()
+            {
+                return Err(e);
             }
 
             if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
@@ -1428,10 +1433,10 @@ impl<'a> Parser<'a> {
 
     /// Rust has no ternary operator (`cond ? then : else`). Parse it and try
     /// to recover from it if `then` and `else` are valid expressions. Returns
-    /// whether it was a ternary operator.
-    pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> bool {
+    /// an err if this appears to be a ternary expression.
+    pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> PResult<'a, ()> {
         if self.prev_token != token::Question {
-            return false;
+            return PResult::Ok(());
         }
 
         let lo = self.prev_token.span.lo();
@@ -1449,20 +1454,18 @@ impl<'a> Parser<'a> {
             if self.eat_noexpect(&token::Colon) {
                 match self.parse_expr() {
                     Ok(_) => {
-                        self.sess.emit_err(TernaryOperator { span: self.token.span.with_lo(lo) });
-                        return true;
+                        return Err(self
+                            .sess
+                            .create_err(TernaryOperator { span: self.token.span.with_lo(lo) }));
                     }
                     Err(err) => {
                         err.cancel();
-                        self.restore_snapshot(snapshot);
                     }
                 };
             }
-        } else {
-            self.restore_snapshot(snapshot);
-        };
-
-        false
+        }
+        self.restore_snapshot(snapshot);
+        Ok(())
     }
 
     pub(super) fn maybe_recover_from_bad_type_plus(&mut self, ty: &Ty) -> PResult<'a, ()> {
@@ -2250,6 +2253,59 @@ impl<'a> Parser<'a> {
             err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
         err.span_label(span, "expected expression");
+
+        // Walk the chain of macro expansions for the current token to point at how the original
+        // code was interpreted. This helps the user realize when a macro argument of one type is
+        // later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
+        // in a subsequent macro invocation (#71039).
+        let mut tok = self.token.clone();
+        let mut labels = vec![];
+        while let TokenKind::Interpolated(node) = &tok.kind {
+            let tokens = node.0.tokens();
+            labels.push(node.clone());
+            if let Some(tokens) = tokens
+                && let tokens = tokens.to_attr_token_stream()
+                && let tokens = tokens.0.deref()
+                && let [AttrTokenTree::Token(token, _)] = &tokens[..]
+            {
+                tok = token.clone();
+            } else {
+                break;
+            }
+        }
+        let mut iter = labels.into_iter().peekable();
+        let mut show_link = false;
+        while let Some(node) = iter.next() {
+            let descr = node.0.descr();
+            if let Some(next) = iter.peek() {
+                let next_descr = next.0.descr();
+                if next_descr != descr {
+                    err.span_label(next.1, format!("this macro fragment matcher is {next_descr}"));
+                    err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
+                    err.span_label(
+                        next.0.use_span(),
+                        format!("this is expected to be {next_descr}"),
+                    );
+                    err.span_label(
+                        node.0.use_span(),
+                        format!(
+                            "this is interpreted as {}, but it is expected to be {}",
+                            next_descr, descr,
+                        ),
+                    );
+                    show_link = true;
+                } else {
+                    err.span_label(node.1, "");
+                }
+            }
+        }
+        if show_link {
+            err.note(
+                "when forwarding a matched fragment to another macro-by-example, matchers in the \
+                 second macro will see an opaque AST of the fragment type, not the underlying \
+                 tokens",
+            );
+        }
         err
     }
 
@@ -2808,8 +2864,15 @@ impl<'a> Parser<'a> {
     }
 
     pub fn recover_diff_marker(&mut self) {
+        if let Err(mut err) = self.err_diff_marker() {
+            err.emit();
+            FatalError.raise();
+        }
+    }
+
+    pub fn err_diff_marker(&mut self) -> PResult<'a, ()> {
         let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else {
-            return;
+            return Ok(());
         };
         let mut spans = Vec::with_capacity(3);
         spans.push(start);
@@ -2856,8 +2919,7 @@ impl<'a> Parser<'a> {
             "for an explanation on these markers from the `git` documentation, visit \
              <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
         );
-        err.emit();
-        FatalError.raise()
+        Err(err)
     }
 
     /// Parse and throw away a parenthesized comma separated
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 5157106f4e2..d8e99d34016 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -9,7 +9,7 @@ use super::{
 use crate::errors;
 use crate::maybe_recover_from_interpolated_ty_qpath;
 use ast::mut_visit::{noop_visit_expr, MutVisitor};
-use ast::{Path, PathSegment};
+use ast::{GenBlockKind, Path, PathSegment};
 use core::mem;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
@@ -32,10 +32,10 @@ use rustc_macros::Subdiagnostic;
 use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
 use rustc_session::lint::BuiltinLintDiagnostics;
-use rustc_span::source_map::{self, Span, Spanned};
+use rustc_span::source_map::{self, Spanned};
 use rustc_span::symbol::kw::PathRoot;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{BytePos, Pos};
+use rustc_span::{BytePos, Pos, Span};
 use thin_vec::{thin_vec, ThinVec};
 
 /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
@@ -46,7 +46,7 @@ use thin_vec::{thin_vec, ThinVec};
 macro_rules! maybe_whole_expr {
     ($p:expr) => {
         if let token::Interpolated(nt) = &$p.token.kind {
-            match &**nt {
+            match &nt.0 {
                 token::NtExpr(e) | token::NtLiteral(e) => {
                     let e = e.clone();
                     $p.bump();
@@ -1441,14 +1441,20 @@ impl<'a> Parser<'a> {
             } else if this.token.uninterpolated_span().at_least_rust_2018() {
                 // `Span:.at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
                 if this.check_keyword(kw::Async) {
-                    if this.is_async_block() {
+                    if this.is_gen_block(kw::Async) {
                         // Check for `async {` and `async move {`.
-                        this.parse_async_block()
+                        this.parse_gen_block()
                     } else {
                         this.parse_expr_closure()
                     }
                 } else if this.eat_keyword(kw::Await) {
                     this.recover_incorrect_await_syntax(lo, this.prev_token.span)
+                } else if this.token.uninterpolated_span().at_least_rust_2024() {
+                    if this.is_gen_block(kw::Gen) {
+                        this.parse_gen_block()
+                    } else {
+                        this.parse_expr_lit()
+                    }
                 } else {
                     this.parse_expr_lit()
                 }
@@ -1848,7 +1854,7 @@ impl<'a> Parser<'a> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Yield(self.parse_expr_opt()?);
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::coroutines, span);
+        self.sess.gated_spans.gate(sym::yield_expr, span);
         let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
@@ -1946,7 +1952,7 @@ impl<'a> Parser<'a> {
         mk_lit_char: impl FnOnce(Symbol, Span) -> L,
     ) -> PResult<'a, L> {
         if let token::Interpolated(nt) = &self.token.kind
-            && let token::NtExpr(e) | token::NtLiteral(e) = &**nt
+            && let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
             && matches!(e.kind, ExprKind::Err)
         {
             let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
@@ -2297,13 +2303,14 @@ impl<'a> Parser<'a> {
     /// Parses an optional `move` prefix to a closure-like construct.
     fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
         if self.eat_keyword(kw::Move) {
+            let move_kw_span = self.prev_token.span;
             // Check for `move async` and recover
             if self.check_keyword(kw::Async) {
                 let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
                 Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
                     .into_diagnostic(&self.sess.span_diagnostic))
             } else {
-                Ok(CaptureBy::Value)
+                Ok(CaptureBy::Value { move_kw: move_kw_span })
             }
         } else {
             Ok(CaptureBy::Ref)
@@ -2434,10 +2441,26 @@ impl<'a> Parser<'a> {
                     self.error_on_extra_if(&cond)?;
                     // Parse block, which will always fail, but we can add a nice note to the error
                     self.parse_block().map_err(|mut err| {
-                        err.span_note(
-                            cond_span,
-                            "the `if` expression is missing a block after this condition",
-                        );
+                        if self.prev_token == token::Semi
+                            && self.token == token::AndAnd
+                            && let maybe_let = self.look_ahead(1, |t| t.clone())
+                            && maybe_let.is_keyword(kw::Let)
+                        {
+                            err.span_suggestion(
+                                self.prev_token.span,
+                                "consider removing this semicolon to parse the `let` as part of the same chain",
+                                "",
+                                Applicability::MachineApplicable,
+                            ).span_note(
+                                self.token.span.to(maybe_let.span),
+                                "you likely meant to continue parsing the let-chain starting here",
+                            );
+                        } else {
+                            err.span_note(
+                                cond_span,
+                                "the `if` expression is missing a block after this condition",
+                            );
+                        }
                         err
                     })?
                 }
@@ -2881,15 +2904,16 @@ impl<'a> Parser<'a> {
                         "=>",
                         Applicability::MachineApplicable,
                     );
-                    err.emit();
-                    this.bump();
-                } else if matches!(
-                    (&this.prev_token.kind, &this.token.kind),
-                    (token::DotDotEq, token::Gt)
-                ) {
-                    // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
-                    // so we suppress the error here
-                    err.delay_as_bug();
+                    if matches!(
+                        (&this.prev_token.kind, &this.token.kind),
+                        (token::DotDotEq, token::Gt)
+                    ) {
+                        // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+                        // so we suppress the error here
+                        err.delay_as_bug();
+                    } else {
+                        err.emit();
+                    }
                     this.bump();
                 } else {
                     return Err(err);
@@ -3059,18 +3083,24 @@ impl<'a> Parser<'a> {
             && self.token.uninterpolated_span().at_least_rust_2018()
     }
 
-    /// Parses an `async move? {...}` expression.
-    fn parse_async_block(&mut self) -> PResult<'a, P<Expr>> {
+    /// Parses an `async move? {...}` or `gen move? {...}` expression.
+    fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        self.expect_keyword(kw::Async)?;
+        let kind = if self.eat_keyword(kw::Async) {
+            GenBlockKind::Async
+        } else {
+            assert!(self.eat_keyword(kw::Gen));
+            self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.token.span));
+            GenBlockKind::Gen
+        };
         let capture_clause = self.parse_capture_clause()?;
         let (attrs, body) = self.parse_inner_attrs_and_block()?;
-        let kind = ExprKind::Async(capture_clause, body);
+        let kind = ExprKind::Gen(capture_clause, body, kind);
         Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
     }
 
-    fn is_async_block(&self) -> bool {
-        self.token.is_keyword(kw::Async)
+    fn is_gen_block(&self, kw: Symbol) -> bool {
+        self.token.is_keyword(kw)
             && ((
                 // `async move {`
                 self.is_keyword_ahead(1, &[kw::Move])
@@ -3596,7 +3626,7 @@ impl MutVisitor for CondChecker<'_> {
             | ExprKind::Match(_, _)
             | ExprKind::Closure(_)
             | ExprKind::Block(_, _)
-            | ExprKind::Async(_, _)
+            | ExprKind::Gen(_, _, _)
             | ExprKind::TryBlock(_)
             | ExprKind::Underscore
             | ExprKind::Path(_, _)
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 982f601c0d5..d124ea571ab 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -22,9 +22,9 @@ use rustc_errors::{
 };
 use rustc_span::edit_distance::edit_distance;
 use rustc_span::edition::Edition;
-use rustc_span::source_map::{self, Span};
+use rustc_span::source_map;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::DUMMY_SP;
+use rustc_span::{Span, DUMMY_SP};
 use std::fmt::Write;
 use std::mem;
 use thin_vec::{thin_vec, ThinVec};
@@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
         // Don't use `maybe_whole` so that we have precise control
         // over when we bump the parser
         if let token::Interpolated(nt) = &self.token.kind
-            && let token::NtItem(item) = &**nt
+            && let token::NtItem(item) = &nt.0
         {
             let mut item = item.clone();
             self.bump();
@@ -2278,6 +2278,18 @@ impl<'a> Parser<'a> {
                     err.span_label(ident.span, "while parsing this `fn`");
                     err.emit();
                 } else {
+                    // check for typo'd Fn* trait bounds such as
+                    // fn foo<F>() where F: FnOnce -> () {}
+                    if self.token.kind == token::RArrow {
+                        let machine_applicable = [sym::FnOnce, sym::FnMut, sym::Fn]
+                            .into_iter()
+                            .any(|s| self.prev_token.is_ident_named(s));
+
+                        err.subdiagnostic(errors::FnTraitMissingParen {
+                            span: self.prev_token.span,
+                            machine_applicable,
+                        });
+                    }
                     return Err(err);
                 }
             }
@@ -2297,9 +2309,9 @@ impl<'a> Parser<'a> {
         // `pub` is added in case users got confused with the ordering like `async pub fn`,
         // only if it wasn't preceded by `default` as `default pub` is invalid.
         let quals: &[Symbol] = if check_pub {
-            &[kw::Pub, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+            &[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
         } else {
-            &[kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+            &[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
         };
         self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
             // `$qual fn` or `$qual $qual`:
@@ -2353,6 +2365,9 @@ impl<'a> Parser<'a> {
         let async_start_sp = self.token.span;
         let asyncness = self.parse_asyncness(case);
 
+        let _gen_start_sp = self.token.span;
+        let genness = self.parse_genness(case);
+
         let unsafe_start_sp = self.token.span;
         let unsafety = self.parse_unsafety(case);
 
@@ -2368,6 +2383,10 @@ impl<'a> Parser<'a> {
             }
         }
 
+        if let Gen::Yes { span, .. } = genness {
+            self.sess.emit_err(errors::GenFn { span });
+        }
+
         if !self.eat_keyword_case(kw::Fn, case) {
             // It is possible for `expect_one_of` to recover given the contents of
             // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
@@ -2382,22 +2401,39 @@ impl<'a> Parser<'a> {
                         Misplaced(Span),
                     }
 
+                    // We may be able to recover
+                    let mut recover_constness = constness;
+                    let mut recover_asyncness = asyncness;
+                    let mut recover_unsafety = unsafety;
                     // This will allow the machine fix to directly place the keyword in the correct place or to indicate
                     // that the keyword is already present and the second instance should be removed.
                     let wrong_kw = if self.check_keyword(kw::Const) {
                         match constness {
                             Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
-                            Const::No => Some(WrongKw::Misplaced(async_start_sp)),
+                            Const::No => {
+                                recover_constness = Const::Yes(self.token.span);
+                                Some(WrongKw::Misplaced(async_start_sp))
+                            }
                         }
                     } else if self.check_keyword(kw::Async) {
                         match asyncness {
                             Async::Yes { span, .. } => Some(WrongKw::Duplicated(span)),
-                            Async::No => Some(WrongKw::Misplaced(unsafe_start_sp)),
+                            Async::No => {
+                                recover_asyncness = Async::Yes {
+                                    span: self.token.span,
+                                    closure_id: DUMMY_NODE_ID,
+                                    return_impl_trait_id: DUMMY_NODE_ID,
+                                };
+                                Some(WrongKw::Misplaced(unsafe_start_sp))
+                            }
                         }
                     } else if self.check_keyword(kw::Unsafe) {
                         match unsafety {
                             Unsafe::Yes(sp) => Some(WrongKw::Duplicated(sp)),
-                            Unsafe::No => Some(WrongKw::Misplaced(ext_start_sp)),
+                            Unsafe::No => {
+                                recover_unsafety = Unsafe::Yes(self.token.span);
+                                Some(WrongKw::Misplaced(ext_start_sp))
+                            }
                         }
                     } else {
                         None
@@ -2467,6 +2503,23 @@ impl<'a> Parser<'a> {
                             }
                         }
                     }
+
+                    if wrong_kw.is_some()
+                        && self.may_recover()
+                        && self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
+                    {
+                        // Advance past the misplaced keyword and `fn`
+                        self.bump();
+                        self.bump();
+                        err.emit();
+                        return Ok(FnHeader {
+                            constness: recover_constness,
+                            unsafety: recover_unsafety,
+                            asyncness: recover_asyncness,
+                            ext,
+                        });
+                    }
+
                     return Err(err);
                 }
             }
@@ -2492,11 +2545,23 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
         let mut first_param = true;
         // Parse the arguments, starting out with `self` being allowed...
+        if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
+        // might be typo'd trait impl, handled elsewhere
+        && !self.token.is_keyword(kw::For)
+        {
+            // recover from missing argument list, e.g. `fn main -> () {}`
+            self.sess
+                .emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
+            return Ok(ThinVec::new());
+        }
+
         let (mut params, _) = self.parse_paren_comma_seq(|p| {
             p.recover_diff_marker();
+            let snapshot = p.create_snapshot_for_diagnostic();
             let param = p.parse_param_general(req_name, first_param).or_else(|mut e| {
                 e.emit();
                 let lo = p.prev_token.span;
+                p.restore_snapshot(snapshot);
                 // Skip every token until next possible arg or end.
                 p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
                 // Create a placeholder argument for proper arg count (issue #34264).
@@ -2685,7 +2750,7 @@ impl<'a> Parser<'a> {
 
     fn is_named_param(&self) -> bool {
         let offset = match &self.token.kind {
-            token::Interpolated(nt) => match **nt {
+            token::Interpolated(nt) => match &nt.0 {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
             },
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 597303cae73..9bd436f01ac 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -11,6 +11,7 @@ mod stmt;
 mod ty;
 
 use crate::lexer::UnmatchedDelim;
+use ast::Gen;
 pub use attr_wrapper::AttrWrapper;
 pub use diagnostics::AttemptLocalParseRecovery;
 pub(crate) use expr::ForbiddenLetReason;
@@ -35,8 +36,8 @@ use rustc_errors::{
     Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
 };
 use rustc_session::parse::ParseSess;
-use rustc_span::source_map::{Span, DUMMY_SP};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::{Span, DUMMY_SP};
 use std::ops::Range;
 use std::{mem, slice};
 use thin_vec::ThinVec;
@@ -92,7 +93,7 @@ pub enum TrailingToken {
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
         if let token::Interpolated(nt) = &$p.token.kind {
-            if let token::$constructor(x) = &**nt {
+            if let token::$constructor(x) = &nt.0 {
                 let $x = x.clone();
                 $p.bump();
                 return Ok($e);
@@ -106,15 +107,15 @@ macro_rules! maybe_whole {
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
     ($self: expr, $allow_qpath_recovery: expr) => {
         if $allow_qpath_recovery
-                    && $self.may_recover()
-                    && $self.look_ahead(1, |t| t == &token::ModSep)
-                    && let token::Interpolated(nt) = &$self.token.kind
-                    && let token::NtTy(ty) = &**nt
-                {
-                    let ty = ty.clone();
-                    $self.bump();
-                    return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
-                }
+            && $self.may_recover()
+            && $self.look_ahead(1, |t| t == &token::ModSep)
+            && let token::Interpolated(nt) = &$self.token.kind
+            && let token::NtTy(ty) = &nt.0
+        {
+            let ty = ty.clone();
+            $self.bump();
+            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
+        }
     };
 }
 
@@ -159,8 +160,9 @@ pub struct Parser<'a> {
     /// appropriately.
     ///
     /// See the comments in the `parse_path_segment` function for more details.
-    unmatched_angle_bracket_count: u32,
-    max_angle_bracket_count: u32,
+    unmatched_angle_bracket_count: u16,
+    max_angle_bracket_count: u16,
+    angle_bracket_nesting: u16,
 
     last_unexpected_token_span: Option<Span>,
     /// If present, this `Parser` is not parsing Rust code but rather a macro call.
@@ -365,12 +367,14 @@ impl TokenDescription {
 pub(super) fn token_descr(token: &Token) -> String {
     let name = pprust::token_to_string(token).to_string();
 
-    let kind = TokenDescription::from_token(token).map(|kind| match kind {
-        TokenDescription::ReservedIdentifier => "reserved identifier",
-        TokenDescription::Keyword => "keyword",
-        TokenDescription::ReservedKeyword => "reserved keyword",
-        TokenDescription::DocComment => "doc comment",
-    });
+    let kind = match (TokenDescription::from_token(token), &token.kind) {
+        (Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"),
+        (Some(TokenDescription::Keyword), _) => Some("keyword"),
+        (Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
+        (Some(TokenDescription::DocComment), _) => Some("doc comment"),
+        (None, TokenKind::Interpolated(node)) => Some(node.0.descr()),
+        (None, _) => None,
+    };
 
     if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
 }
@@ -394,6 +398,7 @@ impl<'a> Parser<'a> {
             break_last_token: false,
             unmatched_angle_bracket_count: 0,
             max_angle_bracket_count: 0,
+            angle_bracket_nesting: 0,
             last_unexpected_token_span: None,
             subparser_name,
             capture_state: CaptureState {
@@ -659,7 +664,7 @@ impl<'a> Parser<'a> {
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
             && self.look_ahead(dist + 1, |t| match &t.kind {
-                token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
+                token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)),
                 token::OpenDelim(Delimiter::Brace) => true,
                 _ => false,
             })
@@ -827,8 +832,8 @@ impl<'a> Parser<'a> {
                             // https://github.com/rust-lang/rust/issues/72373
                             if self.prev_token.is_ident() && self.token.kind == token::DotDot {
                                 let msg = format!(
-                                    "if you meant to bind the contents of \
-                                    the rest of the array pattern into `{}`, use `@`",
+                                    "if you meant to bind the contents of the rest of the array \
+                                     pattern into `{}`, use `@`",
                                     pprust::token_to_string(&self.prev_token)
                                 );
                                 expect_err
@@ -1112,7 +1117,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
-    fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
+    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
         self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
     }
 
@@ -1126,6 +1131,16 @@ impl<'a> Parser<'a> {
         }
     }
 
+    /// Parses genness: `gen` or nothing.
+    fn parse_genness(&mut self, case: Case) -> Gen {
+        if self.token.span.at_least_rust_2024() && self.eat_keyword_case(kw::Gen, case) {
+            let span = self.prev_token.uninterpolated_span();
+            Gen::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
+        } else {
+            Gen::No
+        }
+    }
+
     /// Parses unsafety: `unsafe` or nothing.
     fn parse_unsafety(&mut self, case: Case) -> Unsafe {
         if self.eat_keyword_case(kw::Unsafe, case) {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 025b0615a7e..06cc39fbb5a 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -50,12 +50,12 @@ impl<'a> Parser<'a> {
             NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
             NonterminalKind::Vis => match token.kind {
                 // The follow-set of :vis + "priv" keyword + interpolated
-                token::Comma | token::Ident(..) | token::Interpolated(..) => true,
+                token::Comma | token::Ident(..) | token::Interpolated(_) => true,
                 _ => token.can_begin_type(),
             },
             NonterminalKind::Block => match &token.kind {
                 token::OpenDelim(Delimiter::Brace) => true,
-                token::Interpolated(nt) => match **nt {
+                token::Interpolated(nt) => match &nt.0 {
                     NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
                     NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
                     | NtVis(_) => false,
@@ -64,7 +64,7 @@ impl<'a> Parser<'a> {
             },
             NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::ModSep | token::Ident(..) => true,
-                token::Interpolated(nt) => may_be_ident(nt),
+                token::Interpolated(nt) => may_be_ident(&nt.0),
                 _ => false,
             },
             NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
@@ -75,7 +75,7 @@ impl<'a> Parser<'a> {
                 token::BinOp(token::And) |                  // reference
                 token::BinOp(token::Minus) |                // negative literal
                 token::AndAnd |                             // double reference
-                token::Literal(..) |                        // literal
+                token::Literal(_) |                        // literal
                 token::DotDot |                             // range pattern (future compat)
                 token::DotDotDot |                          // range pattern (future compat)
                 token::ModSep |                             // path
@@ -83,14 +83,14 @@ impl<'a> Parser<'a> {
                 token::BinOp(token::Shl) => true,           // path (double UFCS)
                 // leading vert `|` or-pattern
                 token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
-                token::Interpolated(nt) => may_be_ident(nt),
+                token::Interpolated(nt) => may_be_ident(&nt.0),
                 _ => false,
             }
             }
             NonterminalKind::Lifetime => match &token.kind {
                 token::Lifetime(_) => true,
                 token::Interpolated(nt) => {
-                    matches!(**nt, NtLifetime(_))
+                    matches!(&nt.0, NtLifetime(_))
                 }
                 _ => false,
             },
@@ -191,7 +191,7 @@ impl<'a> Parser<'a> {
             panic!(
                 "Missing tokens for nt {:?} at {:?}: {:?}",
                 nt,
-                nt.span(),
+                nt.use_span(),
                 pprust::nonterminal_to_string(&nt)
             );
         }
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 3e4e9278910..5cec3f5762d 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -20,8 +20,9 @@ use rustc_ast::{
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult};
 use rustc_session::errors::ExprParenthesesNeeded;
-use rustc_span::source_map::{respan, Span, Spanned};
+use rustc_span::source_map::{respan, Spanned};
 use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::Span;
 use thin_vec::{thin_vec, ThinVec};
 
 #[derive(PartialEq, Copy, Clone)]
@@ -591,7 +592,7 @@ impl<'a> Parser<'a> {
 
         // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
         if let token::Interpolated(nt) = &self.token.kind {
-            if let token::NtPat(_) = **nt {
+            if let token::NtPat(..) = &nt.0 {
                 self.expected_ident_found_err().emit();
             }
         }
@@ -637,13 +638,13 @@ impl<'a> Parser<'a> {
 
     /// Error on `mut $pat` where `$pat` is not an ident.
     fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
-        let span = lo.to(pat.span);
-        let pat = pprust::pat_to_string(&pat);
-
         self.sess.emit_err(if changed_any_binding {
-            InvalidMutInPattern::NestedIdent { span, pat }
+            InvalidMutInPattern::NestedIdent {
+                span: lo.to(pat.span),
+                pat: pprust::pat_to_string(&pat),
+            }
         } else {
-            InvalidMutInPattern::NonIdent { span, pat }
+            InvalidMutInPattern::NonIdent { span: lo.until(pat.span) }
         });
     }
 
@@ -967,11 +968,12 @@ impl<'a> Parser<'a> {
 
             // check that a comma comes after every field
             if !ate_comma {
-                let err = ExpectedCommaAfterPatternField { span: self.token.span }
+                let mut err = ExpectedCommaAfterPatternField { span: self.token.span }
                     .into_diagnostic(&self.sess.span_diagnostic);
                 if let Some(mut delayed) = delayed_err {
                     delayed.emit();
                 }
+                self.recover_misplaced_pattern_modifiers(&fields, &mut err);
                 return Err(err);
             }
             ate_comma = false;
@@ -1109,6 +1111,37 @@ impl<'a> Parser<'a> {
         Ok((fields, etc))
     }
 
+    /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest
+    /// the correct code.
+    fn recover_misplaced_pattern_modifiers(
+        &self,
+        fields: &ThinVec<PatField>,
+        err: &mut DiagnosticBuilder<'a, ErrorGuaranteed>,
+    ) {
+        if let Some(last) = fields.iter().last()
+            && last.is_shorthand
+            && let PatKind::Ident(binding, ident, None) = last.pat.kind
+            && binding != BindingAnnotation::NONE
+            && self.token == token::Colon
+            // We found `ref mut? ident:`, try to parse a `name,` or `name }`.
+            && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span))
+            && self.look_ahead(2, |t| {
+                t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace)
+            })
+        {
+            let span = last.pat.span.with_hi(ident.span.lo());
+            // We have `S { ref field: name }` instead of `S { field: ref name }`
+            err.multipart_suggestion(
+                "the pattern modifiers belong after the `:`",
+                vec![
+                    (span, String::new()),
+                    (name_span.shrink_to_lo(), binding.prefix_str().to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+        }
+    }
+
     /// Recover on `...` or `_` as if it were `..` to avoid further errors.
     /// See issue #46718.
     fn recover_bad_dot_dot(&self) {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 2fcb9a78cfd..0f4ba9617c6 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -10,8 +10,8 @@ use rustc_ast::{
     Path, PathSegment, QSelf,
 };
 use rustc_errors::{Applicability, IntoDiagnostic, PResult};
-use rustc_span::source_map::{BytePos, Span};
 use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span};
 use std::mem;
 use thin_vec::ThinVec;
 use tracing::debug;
@@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
         });
 
         if let token::Interpolated(nt) = &self.token.kind {
-            if let token::NtTy(ty) = &**nt {
+            if let token::NtTy(ty) = &nt.0 {
                 if let ast::TyKind::Path(None, path) = &ty.kind {
                     let path = path.clone();
                     self.bump();
@@ -487,10 +487,24 @@ impl<'a> Parser<'a> {
         // Take a snapshot before attempting to parse - we can restore this later.
         let snapshot = is_first_invocation.then(|| self.clone());
 
+        self.angle_bracket_nesting += 1;
         debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
         match self.parse_angle_args(ty_generics) {
-            Ok(args) => Ok(args),
+            Ok(args) => {
+                self.angle_bracket_nesting -= 1;
+                Ok(args)
+            }
+            Err(mut e) if self.angle_bracket_nesting > 10 => {
+                self.angle_bracket_nesting -= 1;
+                // When encountering severely malformed code where there are several levels of
+                // nested unclosed angle args (`f::<f::<f::<f::<...`), we avoid severe O(n^2)
+                // behavior by bailing out earlier (#117080).
+                e.emit();
+                rustc_errors::FatalError.raise();
+            }
             Err(e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+                self.angle_bracket_nesting -= 1;
+
                 // Swap `self` with our backup of the parser state before attempting to parse
                 // generic arguments.
                 let snapshot = mem::replace(self, snapshot.unwrap());
@@ -520,8 +534,8 @@ impl<'a> Parser<'a> {
                     // Make a span over ${unmatched angle bracket count} characters.
                     // This is safe because `all_angle_brackets` ensures that there are only `<`s,
                     // i.e. no multibyte characters, in this range.
-                    let span =
-                        lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
+                    let span = lo
+                        .with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into()));
                     self.sess.emit_err(errors::UnmatchedAngle {
                         span,
                         plural: snapshot.unmatched_angle_bracket_count > 1,
@@ -531,7 +545,10 @@ impl<'a> Parser<'a> {
                     self.parse_angle_args(ty_generics)
                 }
             }
-            Err(e) => Err(e),
+            Err(e) => {
+                self.angle_bracket_nesting -= 1;
+                Err(e)
+            }
         }
     }
 
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 8e3c4568694..d1c5ca4b93c 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -20,8 +20,8 @@ use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyl
 use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
 use rustc_ast::{StmtKind, DUMMY_NODE_ID};
 use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
-use rustc_span::source_map::{BytePos, Span};
 use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span};
 
 use std::borrow::Cow;
 use std::mem;
@@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
         // Don't use `maybe_whole` so that we have precise control
         // over when we bump the parser
         if let token::Interpolated(nt) = &self.token.kind
-            && let token::NtStmt(stmt) = &**nt
+            && let token::NtStmt(stmt) = &nt.0
         {
             let mut stmt = stmt.clone();
             self.bump();
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index badea5d6133..dc0f1396523 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -18,9 +18,8 @@ use rustc_ast::{
     TraitObjectSyntax, Ty, TyKind,
 };
 use rustc_errors::{Applicability, PResult};
-use rustc_span::source_map::Span;
 use rustc_span::symbol::{kw, sym, Ident};
-use rustc_span::Symbol;
+use rustc_span::{Span, Symbol};
 use thin_vec::{thin_vec, ThinVec};
 
 /// Any `?`, `!`, or `~const` modifiers that appear at the start of a bound.
@@ -136,7 +135,7 @@ impl<'a> Parser<'a> {
         )
     }
 
-    /// Parse a type suitable for a field defintion.
+    /// Parse a type suitable for a field definition.
     /// The difference from `parse_ty` is that this version
     /// allows anonymous structs and unions.
     pub fn parse_ty_for_field_def(&mut self) -> PResult<'a, P<Ty>> {