about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs2973
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs120
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs916
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs521
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs358
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs705
-rw-r--r--compiler/rustc_parse/src/lib.rs167
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs260
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs191
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs1918
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs3454
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs304
-rw-r--r--compiler/rustc_parse/src/parser/item.rs1670
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs850
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs169
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs891
-rw-r--r--compiler/rustc_parse/src/parser/path.rs290
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs501
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs783
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs171
20 files changed, 11369 insertions, 5843 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
new file mode 100644
index 00000000000..3c3a8d6fbb9
--- /dev/null
+++ b/compiler/rustc_parse/src/errors.rs
@@ -0,0 +1,2973 @@
+use std::borrow::Cow;
+
+use rustc_ast::token::Token;
+use rustc_ast::{Path, Visibility};
+use rustc_errors::{
+    codes::*, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
+    IntoDiagnostic, Level, SubdiagnosticMessageOp,
+};
+use rustc_macros::{Diagnostic, Subdiagnostic};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
+use rustc_span::symbol::Ident;
+use rustc_span::{Span, Symbol};
+
+use crate::fluent_generated as fluent;
+use crate::parser::{ForbiddenLetReason, TokenDescription};
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_report_ambiguous_plus)]
+pub(crate) struct AmbiguousPlus {
+    pub sum_ty: String,
+    #[primary_span]
+    #[suggestion(code = "({sum_ty})")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_recover_from_bad_type_plus, code = E0178)]
+pub(crate) struct BadTypePlus {
+    pub ty: String,
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: BadTypePlusSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum BadTypePlusSub {
+    #[suggestion(
+        parse_add_paren,
+        code = "{sum_with_parens}",
+        applicability = "machine-applicable"
+    )]
+    AddParen {
+        sum_with_parens: String,
+        #[primary_span]
+        span: Span,
+    },
+    #[label(parse_forgot_paren)]
+    ForgotParen {
+        #[primary_span]
+        span: Span,
+    },
+    #[label(parse_expect_path)]
+    ExpectPath {
+        #[primary_span]
+        span: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_recover_from_bad_qpath_stage_2)]
+pub(crate) struct BadQPathStage2 {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub wrap: WrapType,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WrapType {
+    #[suggestion_part(code = "<")]
+    pub lo: Span,
+    #[suggestion_part(code = ">")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_semicolon)]
+pub(crate) struct IncorrectSemicolon<'a> {
+    #[primary_span]
+    #[suggestion(style = "short", code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    #[help]
+    pub opt_help: Option<()>,
+    pub name: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_await)]
+pub(crate) struct IncorrectUseOfAwait {
+    #[primary_span]
+    #[suggestion(parse_parentheses_suggestion, code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_use_of_await)]
+pub(crate) struct IncorrectAwait {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(parse_postfix_suggestion, code = "{expr}.await{question_mark}")]
+    pub sugg_span: (Span, Applicability),
+    pub expr: String,
+    pub question_mark: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_in_in_typo)]
+pub(crate) struct InInTypo {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_variable_declaration)]
+pub(crate) struct InvalidVariableDeclaration {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: InvalidVariableDeclarationSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidVariableDeclarationSub {
+    #[suggestion(parse_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
+    SwitchMutLetOrder(#[primary_span] Span),
+    #[suggestion(
+        parse_missing_let_before_mut,
+        applicability = "machine-applicable",
+        code = "let mut"
+    )]
+    MissingLet(#[primary_span] Span),
+    #[suggestion(parse_use_let_not_auto, applicability = "machine-applicable", code = "let")]
+    UseLetNotAuto(#[primary_span] Span),
+    #[suggestion(parse_use_let_not_var, applicability = "machine-applicable", code = "let")]
+    UseLetNotVar(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_switch_ref_box_order)]
+pub(crate) struct SwitchRefBoxOrder {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "box ref")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_comparison_operator)]
+pub(crate) struct InvalidComparisonOperator {
+    #[primary_span]
+    pub span: Span,
+    pub invalid: String,
+    #[subdiagnostic]
+    pub sub: InvalidComparisonOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidComparisonOperatorSub {
+    #[suggestion(
+        parse_use_instead,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "{correct}"
+    )]
+    Correctable {
+        #[primary_span]
+        span: Span,
+        invalid: String,
+        correct: String,
+    },
+    #[label(parse_spaceship_operator_invalid)]
+    Spaceship(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_logical_operator)]
+#[note]
+pub(crate) struct InvalidLogicalOperator {
+    #[primary_span]
+    pub span: Span,
+    pub incorrect: String,
+    #[subdiagnostic]
+    pub sub: InvalidLogicalOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum InvalidLogicalOperatorSub {
+    #[suggestion(
+        parse_use_amp_amp_for_conjunction,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "&&"
+    )]
+    Conjunction(#[primary_span] Span),
+    #[suggestion(
+        parse_use_pipe_pipe_for_disjunction,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "||"
+    )]
+    Disjunction(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_tilde_is_not_unary_operator)]
+pub(crate) struct TildeAsUnaryOperator(
+    #[primary_span]
+    #[suggestion(style = "short", applicability = "machine-applicable", code = "!")]
+    pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_not)]
+pub(crate) struct NotAsNegationOperator {
+    #[primary_span]
+    pub negated: Span,
+    pub negated_desc: String,
+    #[subdiagnostic]
+    pub sub: NotAsNegationOperatorSub,
+}
+
+#[derive(Subdiagnostic)]
+pub enum NotAsNegationOperatorSub {
+    #[suggestion(
+        parse_unexpected_token_after_not_default,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotDefault(#[primary_span] Span),
+
+    #[suggestion(
+        parse_unexpected_token_after_not_bitwise,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotBitwise(#[primary_span] Span),
+
+    #[suggestion(
+        parse_unexpected_token_after_not_logical,
+        style = "short",
+        applicability = "machine-applicable",
+        code = "!"
+    )]
+    SuggestNotLogical(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_malformed_loop_label)]
+pub(crate) struct MalformedLoopLabel {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "{correct_label}")]
+    pub span: Span,
+    pub correct_label: Ident,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_in_borrow_expression)]
+pub(crate) struct LifetimeInBorrowExpression {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    #[label]
+    pub lifetime_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_field_expression_with_generic)]
+pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_invocation_with_qualified_path)]
+pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_label)]
+pub(crate) struct UnexpectedTokenAfterLabel {
+    #[primary_span]
+    #[label(parse_unexpected_token_after_label)]
+    pub span: Span,
+    #[suggestion(parse_suggestion_remove_label, style = "verbose", code = "")]
+    pub remove_label: Option<Span>,
+    #[subdiagnostic]
+    pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_enclose_in_block, applicability = "machine-applicable")]
+pub(crate) struct UnexpectedTokenAfterLabelSugg {
+    #[suggestion_part(code = "{{ ")]
+    pub left: Span,
+    #[suggestion_part(code = " }}")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_require_colon_after_labeled_expression)]
+#[note]
+pub(crate) struct RequireColonAfterLabeledExpression {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub label: Span,
+    #[suggestion(style = "short", applicability = "machine-applicable", code = ": ")]
+    pub label_end: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_do_catch_syntax_removed)]
+#[note]
+pub(crate) struct DoCatchSyntaxRemoved {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "try")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_requires_integer_part)]
+pub(crate) struct FloatLiteralRequiresIntegerPart {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "{correct}")]
+    pub span: Span,
+    pub correct: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_semicolon_before_array)]
+pub(crate) struct MissingSemicolonBeforeArray {
+    #[primary_span]
+    pub open_delim: Span,
+    #[suggestion(style = "verbose", applicability = "maybe-incorrect", code = ";")]
+    pub semicolon: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_dotdot_not_dotdotdot)]
+pub(crate) struct MissingDotDot {
+    #[primary_span]
+    pub token_span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "..", style = "verbose")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_block_macro_segment)]
+pub(crate) struct InvalidBlockMacroSegment {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub context: Span,
+    #[subdiagnostic]
+    pub wrap: WrapInExplicitBlock,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WrapInExplicitBlock {
+    #[suggestion_part(code = "{{ ")]
+    pub lo: Span,
+    #[suggestion_part(code = " }}")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_if_expression_missing_then_block)]
+pub(crate) struct IfExpressionMissingThenBlock {
+    #[primary_span]
+    pub if_span: Span,
+    #[subdiagnostic]
+    pub missing_then_block_sub: IfExpressionMissingThenBlockSub,
+    #[subdiagnostic]
+    pub let_else_sub: Option<IfExpressionLetSomeSub>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum IfExpressionMissingThenBlockSub {
+    #[help(parse_condition_possibly_unfinished)]
+    UnfinishedCondition(#[primary_span] Span),
+    #[help(parse_add_then_block)]
+    AddThenBlock(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ternary_operator)]
+#[help]
+pub struct TernaryOperator {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(parse_extra_if_in_let_else, applicability = "maybe-incorrect", code = "")]
+pub(crate) struct IfExpressionLetSomeSub {
+    #[primary_span]
+    pub if_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_if_expression_missing_condition)]
+pub(crate) struct IfExpressionMissingCondition {
+    #[primary_span]
+    #[label(parse_condition_label)]
+    pub if_span: Span,
+    #[label(parse_block_label)]
+    pub block_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_expression_found_let)]
+#[note]
+pub(crate) struct ExpectedExpressionFoundLet {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub reason: ForbiddenLetReason,
+    #[subdiagnostic]
+    pub missing_let: Option<MaybeMissingLet>,
+    #[subdiagnostic]
+    pub comparison: Option<MaybeComparison>,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+    parse_maybe_missing_let,
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct MaybeMissingLet {
+    #[suggestion_part(code = "let ")]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic, Clone, Copy)]
+#[multipart_suggestion(
+    parse_maybe_comparison,
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct MaybeComparison {
+    #[suggestion_part(code = "=")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_eq_instead_of_eqeq)]
+pub(crate) struct ExpectedEqForLetExpr {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "=", style = "verbose")]
+    pub sugg_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_else_block)]
+pub(crate) struct ExpectedElseBlock {
+    #[primary_span]
+    pub first_tok_span: Span,
+    pub first_tok: String,
+    #[label]
+    pub else_span: Span,
+    #[suggestion(applicability = "maybe-incorrect", code = "if ")]
+    pub condition_start: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_struct_field)]
+pub(crate) struct ExpectedStructField {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub token: Token,
+    #[label(parse_ident_label)]
+    pub ident_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_outer_attribute_not_allowed_on_if_else)]
+pub(crate) struct OuterAttributeNotAllowedOnIfElse {
+    #[primary_span]
+    pub last: Span,
+
+    #[label(parse_branch_label)]
+    pub branch_span: Span,
+
+    #[label(parse_ctx_label)]
+    pub ctx_span: Span,
+    pub ctx: String,
+
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    pub attributes: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_in_in_for_loop)]
+pub(crate) struct MissingInInForLoop {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: MissingInInForLoopSub,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MissingInInForLoopSub {
+    // Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
+    #[suggestion(
+        parse_use_in_not_of,
+        style = "short",
+        applicability = "maybe-incorrect",
+        code = "in"
+    )]
+    InNotOf(#[primary_span] Span),
+    #[suggestion(parse_add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
+    AddIn(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_expression_in_for_loop)]
+pub(crate) struct MissingExpressionInForLoop {
+    #[primary_span]
+    #[suggestion(
+        code = "/* expression */ ",
+        applicability = "has-placeholders",
+        style = "verbose"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_loop_else)]
+#[note]
+pub(crate) struct LoopElseNotSupported {
+    #[primary_span]
+    pub span: Span,
+    pub loop_kind: &'static str,
+    #[label(parse_loop_keyword)]
+    pub loop_kw: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_comma_after_match_arm)]
+pub(crate) struct MissingCommaAfterMatchArm {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = ",")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_catch_after_try)]
+#[help]
+pub(crate) struct CatchAfterTry {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_gen_fn)]
+#[help]
+pub(crate) struct GenFn {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comma_after_base_struct)]
+#[note]
+pub(crate) struct CommaAfterBaseStruct {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "short", applicability = "machine-applicable", code = "")]
+    pub comma: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_eq_field_init)]
+pub(crate) struct EqFieldInit {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(applicability = "machine-applicable", code = ":")]
+    pub eq: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot)]
+pub(crate) struct DotDotDot {
+    #[primary_span]
+    #[suggestion(parse_suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
+    #[suggestion(parse_suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_left_arrow_operator)]
+pub(crate) struct LeftArrowOperator {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "< -")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_remove_let)]
+pub(crate) struct RemoveLet {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_use_eq_instead)]
+pub(crate) struct UseEqInstead {
+    #[primary_span]
+    #[suggestion(style = "short", applicability = "machine-applicable", code = "=")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_use_empty_block_not_semi)]
+pub(crate) struct UseEmptyBlockNotSemi {
+    #[primary_span]
+    #[suggestion(style = "hidden", applicability = "machine-applicable", code = "{{}}")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comparison_interpreted_as_generic)]
+pub(crate) struct ComparisonInterpretedAsGeneric {
+    #[primary_span]
+    #[label(parse_label_comparison)]
+    pub comparison: Span,
+    pub r#type: Path,
+    #[label(parse_label_args)]
+    pub args: Span,
+    #[subdiagnostic]
+    pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_shift_interpreted_as_generic)]
+pub(crate) struct ShiftInterpretedAsGeneric {
+    #[primary_span]
+    #[label(parse_label_comparison)]
+    pub shift: Span,
+    pub r#type: Path,
+    #[label(parse_label_args)]
+    pub args: Span,
+    #[subdiagnostic]
+    pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
+    #[suggestion_part(code = "(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_found_expr_would_be_stmt)]
+pub(crate) struct FoundExprWouldBeStmt {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub token: Token,
+    #[subdiagnostic]
+    pub suggestion: ExprParenthesesNeeded,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_leading_plus_not_supported)]
+pub(crate) struct LeadingPlusNotSupported {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[suggestion(
+        parse_suggestion_remove_plus,
+        style = "verbose",
+        code = "",
+        applicability = "machine-applicable"
+    )]
+    pub remove_plus: Option<Span>,
+    #[subdiagnostic]
+    pub add_parentheses: Option<ExprParenthesesNeeded>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_parentheses_with_struct_fields)]
+pub(crate) struct ParenthesesWithStructFields {
+    #[primary_span]
+    pub span: Span,
+    pub r#type: Path,
+    #[subdiagnostic]
+    pub braces_for_struct: BracesForStructLiteral,
+    #[subdiagnostic]
+    pub no_fields_for_fn: NoFieldsForFnCall,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_braces_for_struct, applicability = "maybe-incorrect")]
+pub(crate) struct BracesForStructLiteral {
+    #[suggestion_part(code = " {{ ")]
+    pub first: Span,
+    #[suggestion_part(code = " }}")]
+    pub second: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
+pub(crate) struct NoFieldsForFnCall {
+    #[suggestion_part(code = "")]
+    pub fields: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_labeled_loop_in_break)]
+pub(crate) struct LabeledLoopInBreak {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: WrapInParentheses,
+}
+
+#[derive(Subdiagnostic)]
+
+pub(crate) enum WrapInParentheses {
+    #[multipart_suggestion(
+        parse_sugg_wrap_expression_in_parentheses,
+        applicability = "machine-applicable"
+    )]
+    Expression {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+    #[multipart_suggestion(
+        parse_sugg_wrap_macro_in_parentheses,
+        applicability = "machine-applicable"
+    )]
+    MacroArgs {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_array_brackets_instead_of_braces)]
+pub(crate) struct ArrayBracketsInsteadOfSpaces {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: ArrayBracketsInsteadOfSpacesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
+    #[suggestion_part(code = "[")]
+    pub left: Span,
+    #[suggestion_part(code = "]")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_match_arm_body_without_braces)]
+pub(crate) struct MatchArmBodyWithoutBraces {
+    #[primary_span]
+    #[label(parse_label_statements)]
+    pub statements: Span,
+    #[label(parse_label_arrow)]
+    pub arrow: Span,
+    pub num_statements: usize,
+    #[subdiagnostic]
+    pub sub: MatchArmBodyWithoutBracesSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_extra_equals)]
+#[note]
+pub(crate) struct InclusiveRangeExtraEquals {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_remove_eq,
+        style = "short",
+        code = "..=",
+        applicability = "maybe-incorrect"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_match_arrow)]
+pub(crate) struct InclusiveRangeMatchArrow {
+    #[primary_span]
+    pub arrow: Span,
+    #[label]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = " ", applicability = "machine-applicable")]
+    pub after_pat: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_no_end, code = E0586)]
+#[note]
+pub(crate) struct InclusiveRangeNoEnd {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_open_range,
+        code = "..",
+        applicability = "machine-applicable",
+        style = "short"
+    )]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum MatchArmBodyWithoutBracesSugg {
+    #[multipart_suggestion(parse_suggestion_add_braces, applicability = "machine-applicable")]
+    AddBraces {
+        #[suggestion_part(code = "{{ ")]
+        left: Span,
+        #[suggestion_part(code = " }}")]
+        right: Span,
+    },
+    #[suggestion(
+        parse_suggestion_use_comma_not_semicolon,
+        code = ",",
+        applicability = "machine-applicable"
+    )]
+    UseComma {
+        #[primary_span]
+        semicolon: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_struct_literal_not_allowed_here)]
+pub(crate) struct StructLiteralNotAllowedHere {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sub: StructLiteralNotAllowedHereSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct StructLiteralNotAllowedHereSugg {
+    #[suggestion_part(code = "(")]
+    pub left: Span,
+    #[suggestion_part(code = ")")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_interpolated_expression)]
+pub(crate) struct InvalidInterpolatedExpression {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_literal_suffix_on_tuple_index)]
+pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub suffix: Symbol,
+    #[help(parse_tuple_exception_line_1)]
+    #[help(parse_tuple_exception_line_2)]
+    #[help(parse_tuple_exception_line_3)]
+    pub exception: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_non_string_abi_literal)]
+pub(crate) struct NonStringAbiLiteral {
+    #[primary_span]
+    #[suggestion(code = "\"C\"", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_mismatched_closing_delimiter)]
+pub(crate) struct MismatchedClosingDelimiter {
+    #[primary_span]
+    pub spans: Vec<Span>,
+    pub delimiter: String,
+    #[label(parse_label_unmatched)]
+    pub unmatched: Span,
+    #[label(parse_label_opening_candidate)]
+    pub opening_candidate: Option<Span>,
+    #[label(parse_label_unclosed)]
+    pub unclosed: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_visibility_restriction, code = E0704)]
+#[help]
+pub(crate) struct IncorrectVisibilityRestriction {
+    #[primary_span]
+    #[suggestion(code = "in {inner_str}", applicability = "machine-applicable")]
+    pub span: Span,
+    pub inner_str: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_assignment_else_not_allowed)]
+pub(crate) struct AssignmentElseNotAllowed {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_statement_after_outer_attr)]
+pub(crate) struct ExpectedStatementAfterOuterAttr {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_doc_comment_does_not_document_anything, code = E0585)]
+#[help]
+pub(crate) struct DocCommentDoesNotDocumentAnything {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = ",", applicability = "machine-applicable")]
+    pub missing_comma: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_let_mutually_exclusive)]
+pub(crate) struct ConstLetMutuallyExclusive {
+    #[primary_span]
+    #[suggestion(code = "const", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_expression_in_let_else)]
+pub(crate) struct InvalidExpressionInLetElse {
+    #[primary_span]
+    pub span: Span,
+    pub operator: &'static str,
+    #[subdiagnostic]
+    pub sugg: WrapInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_curly_in_let_else)]
+pub(crate) struct InvalidCurlyInLetElse {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: WrapInParentheses,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_compound_assignment_expression_in_let)]
+#[help]
+pub(crate) struct CompoundAssignmentExpressionInLet {
+    #[primary_span]
+    #[suggestion(style = "short", code = "=", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_suffixed_literal_in_attribute)]
+#[help]
+pub(crate) struct SuffixedLiteralInAttribute {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_meta_item)]
+pub(crate) struct InvalidMetaItem {
+    #[primary_span]
+    pub span: Span,
+    pub token: Token,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_meta_item_unquoted_ident)]
+pub(crate) struct InvalidMetaItemUnquotedIdent {
+    #[primary_span]
+    pub span: Span,
+    pub token: Token,
+    #[subdiagnostic]
+    pub sugg: InvalidMetaItemSuggQuoteIdent,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct InvalidMetaItemSuggQuoteIdent {
+    #[suggestion_part(code = "\"")]
+    pub before: Span,
+    #[suggestion_part(code = "\"")]
+    pub after: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_sugg_escape_identifier,
+    style = "verbose",
+    applicability = "maybe-incorrect",
+    code = "r#"
+)]
+pub(crate) struct SuggEscapeIdentifier {
+    #[primary_span]
+    pub span: Span,
+    pub ident_name: String,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(parse_sugg_remove_comma, applicability = "machine-applicable", code = "")]
+pub(crate) struct SuggRemoveComma {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_sugg_add_let_for_stmt,
+    style = "verbose",
+    applicability = "maybe-incorrect",
+    code = "let "
+)]
+pub(crate) struct SuggAddMissingLetStmt {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedIdentifierFound {
+    #[label(parse_expected_identifier_found_reserved_identifier)]
+    ReservedIdentifier(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_keyword)]
+    Keyword(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_reserved_keyword)]
+    ReservedKeyword(#[primary_span] Span),
+    #[label(parse_expected_identifier_found_doc_comment)]
+    DocComment(#[primary_span] Span),
+    #[label(parse_expected_identifier)]
+    Other(#[primary_span] Span),
+}
+
+impl ExpectedIdentifierFound {
+    pub fn new(token_descr: Option<TokenDescription>, span: Span) -> Self {
+        (match token_descr {
+            Some(TokenDescription::ReservedIdentifier) => {
+                ExpectedIdentifierFound::ReservedIdentifier
+            }
+            Some(TokenDescription::Keyword) => ExpectedIdentifierFound::Keyword,
+            Some(TokenDescription::ReservedKeyword) => ExpectedIdentifierFound::ReservedKeyword,
+            Some(TokenDescription::DocComment) => ExpectedIdentifierFound::DocComment,
+            None => ExpectedIdentifierFound::Other,
+        })(span)
+    }
+}
+
+pub(crate) struct ExpectedIdentifier {
+    pub span: Span,
+    pub token: Token,
+    pub suggest_raw: Option<SuggEscapeIdentifier>,
+    pub suggest_remove_comma: Option<SuggRemoveComma>,
+    pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
+}
+
+impl<'a> IntoDiagnostic<'a> for ExpectedIdentifier {
+    #[track_caller]
+    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a> {
+        let token_descr = TokenDescription::from_token(&self.token);
+
+        let mut diag = DiagnosticBuilder::new(
+            dcx,
+            level,
+            match token_descr {
+                Some(TokenDescription::ReservedIdentifier) => {
+                    fluent::parse_expected_identifier_found_reserved_identifier_str
+                }
+                Some(TokenDescription::Keyword) => {
+                    fluent::parse_expected_identifier_found_keyword_str
+                }
+                Some(TokenDescription::ReservedKeyword) => {
+                    fluent::parse_expected_identifier_found_reserved_keyword_str
+                }
+                Some(TokenDescription::DocComment) => {
+                    fluent::parse_expected_identifier_found_doc_comment_str
+                }
+                None => fluent::parse_expected_identifier_found_str,
+            },
+        );
+        diag.span(self.span);
+        diag.arg("token", self.token);
+
+        if let Some(sugg) = self.suggest_raw {
+            sugg.add_to_diagnostic(&mut diag);
+        }
+
+        ExpectedIdentifierFound::new(token_descr, self.span).add_to_diagnostic(&mut diag);
+
+        if let Some(sugg) = self.suggest_remove_comma {
+            sugg.add_to_diagnostic(&mut diag);
+        }
+
+        if let Some(help) = self.help_cannot_start_number {
+            help.add_to_diagnostic(&mut diag);
+        }
+
+        diag
+    }
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_invalid_identifier_with_leading_number)]
+pub(crate) struct HelpIdentifierStartsWithNumber {
+    #[primary_span]
+    pub num_span: Span,
+}
+
+pub(crate) struct ExpectedSemi {
+    pub span: Span,
+    pub token: Token,
+
+    pub unexpected_token_label: Option<Span>,
+    pub sugg: ExpectedSemiSugg,
+}
+
+impl<'a> IntoDiagnostic<'a> for ExpectedSemi {
+    #[track_caller]
+    fn into_diagnostic(self, dcx: &'a DiagCtxt, level: Level) -> DiagnosticBuilder<'a> {
+        let token_descr = TokenDescription::from_token(&self.token);
+
+        let mut diag = DiagnosticBuilder::new(
+            dcx,
+            level,
+            match token_descr {
+                Some(TokenDescription::ReservedIdentifier) => {
+                    fluent::parse_expected_semi_found_reserved_identifier_str
+                }
+                Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str,
+                Some(TokenDescription::ReservedKeyword) => {
+                    fluent::parse_expected_semi_found_reserved_keyword_str
+                }
+                Some(TokenDescription::DocComment) => {
+                    fluent::parse_expected_semi_found_doc_comment_str
+                }
+                None => fluent::parse_expected_semi_found_str,
+            },
+        );
+        diag.span(self.span);
+        diag.arg("token", self.token);
+
+        if let Some(unexpected_token_label) = self.unexpected_token_label {
+            diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token);
+        }
+
+        self.sugg.add_to_diagnostic(&mut diag);
+
+        diag
+    }
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ExpectedSemiSugg {
+    #[suggestion(parse_sugg_change_this_to_semi, code = ";", applicability = "machine-applicable")]
+    ChangeToSemi(#[primary_span] Span),
+    #[suggestion(
+        parse_sugg_add_semi,
+        style = "short",
+        code = ";",
+        applicability = "machine-applicable"
+    )]
+    AddSemi(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_struct_literal_body_without_path)]
+pub(crate) struct StructLiteralBodyWithoutPath {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: StructLiteralBodyWithoutPathSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "has-placeholders")]
+pub(crate) struct StructLiteralBodyWithoutPathSugg {
+    #[suggestion_part(code = "{{ SomeStruct ")]
+    pub before: Span,
+    #[suggestion_part(code = " }}")]
+    pub after: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_struct_literal_needing_parens)]
+pub(crate) struct StructLiteralNeedingParens {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: StructLiteralNeedingParensSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct StructLiteralNeedingParensSugg {
+    #[suggestion_part(code = "(")]
+    pub before: Span,
+    #[suggestion_part(code = ")")]
+    pub after: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unmatched_angle_brackets)]
+pub(crate) struct UnmatchedAngleBrackets {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    pub num_extra_brackets: usize,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generic_parameters_without_angle_brackets)]
+pub(crate) struct GenericParamsWithoutAngleBrackets {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: GenericParamsWithoutAngleBracketsSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
+    #[suggestion_part(code = "<")]
+    pub left: Span,
+    #[suggestion_part(code = ">")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_comparison_operators_cannot_be_chained)]
+pub(crate) struct ComparisonOperatorsCannotBeChained {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[suggestion(
+        parse_sugg_turbofish_syntax,
+        style = "verbose",
+        code = "::",
+        applicability = "maybe-incorrect"
+    )]
+    pub suggest_turbofish: Option<Span>,
+    #[help(parse_sugg_turbofish_syntax)]
+    #[help(parse_sugg_parentheses_for_function_args)]
+    pub help_turbofish: Option<()>,
+    #[subdiagnostic]
+    pub chaining_sugg: Option<ComparisonOperatorsCannotBeChainedSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
+    #[suggestion(
+        parse_sugg_split_comparison,
+        style = "verbose",
+        code = " && {middle_term}",
+        applicability = "maybe-incorrect"
+    )]
+    SplitComparison {
+        #[primary_span]
+        span: Span,
+        middle_term: String,
+    },
+    #[multipart_suggestion(parse_sugg_parenthesize, applicability = "maybe-incorrect")]
+    Parenthesize {
+        #[suggestion_part(code = "(")]
+        left: Span,
+        #[suggestion_part(code = ")")]
+        right: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_question_mark_in_type)]
+pub(crate) struct QuestionMarkInType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: QuestionMarkInTypeSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct QuestionMarkInTypeSugg {
+    #[suggestion_part(code = "Option<")]
+    pub left: Span,
+    #[suggestion_part(code = ">")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_parentheses_in_for_head)]
+pub(crate) struct ParenthesesInForHead {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: ParenthesesInForHeadSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInForHeadSugg {
+    #[suggestion_part(code = " ")]
+    pub left: Span,
+    #[suggestion_part(code = " ")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_parentheses_in_match_arm_pattern)]
+pub(crate) struct ParenthesesInMatchPat {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: ParenthesesInMatchPatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ParenthesesInMatchPatSugg {
+    #[suggestion_part(code = "")]
+    pub left: Span,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_doc_comment_on_param_type)]
+pub(crate) struct DocCommentOnParamType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attribute_on_param_type)]
+pub(crate) struct AttributeOnParamType {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_method_param_without_body, code = E0642)]
+pub(crate) struct PatternMethodParamWithoutBody {
+    #[primary_span]
+    #[suggestion(code = "_", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_param_not_first)]
+pub(crate) struct SelfParamNotFirst {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_generic_without_braces)]
+pub(crate) struct ConstGenericWithoutBraces {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: ConstGenericWithoutBracesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ConstGenericWithoutBracesSugg {
+    #[suggestion_part(code = "{{ ")]
+    pub left: Span,
+    #[suggestion_part(code = " }}")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_const_param_declaration)]
+pub(crate) struct UnexpectedConstParamDeclaration {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<UnexpectedConstParamDeclarationSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum UnexpectedConstParamDeclarationSugg {
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+    AddParam {
+        #[suggestion_part(code = "<{snippet}>")]
+        impl_generics: Span,
+        #[suggestion_part(code = "{ident}")]
+        incorrect_decl: Span,
+        snippet: String,
+        ident: String,
+    },
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+    AppendParam {
+        #[suggestion_part(code = ", {snippet}")]
+        impl_generics_end: Span,
+        #[suggestion_part(code = "{ident}")]
+        incorrect_decl: Span,
+        snippet: String,
+        ident: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_const_in_generic_param)]
+pub(crate) struct UnexpectedConstInGenericParam {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = "", applicability = "maybe-incorrect")]
+    pub to_remove: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_move_order_incorrect)]
+pub(crate) struct AsyncMoveOrderIncorrect {
+    #[primary_span]
+    #[suggestion(style = "verbose", code = "async move", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_double_colon_in_bound)]
+pub(crate) struct DoubleColonInBound {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = ": ", applicability = "machine-applicable")]
+    pub between: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_ptr_with_generics)]
+pub(crate) struct FnPtrWithGenerics {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<FnPtrWithGenericsSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct FnPtrWithGenericsSugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+    pub arity: usize,
+    pub for_param_list_exists: bool,
+}
+
+pub(crate) struct FnTraitMissingParen {
+    pub span: Span,
+    pub machine_applicable: bool,
+}
+
+impl AddToDiagnostic for FnTraitMissingParen {
+    fn add_to_diagnostic_with<F: SubdiagnosticMessageOp>(self, diag: &mut Diagnostic, _: F) {
+        diag.span_label(self.span, crate::fluent_generated::parse_fn_trait_missing_paren);
+        let applicability = if self.machine_applicable {
+            Applicability::MachineApplicable
+        } else {
+            Applicability::MaybeIncorrect
+        };
+        diag.span_suggestion_short(
+            self.span.shrink_to_hi(),
+            crate::fluent_generated::parse_add_paren,
+            "()",
+            applicability,
+        );
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_if_with_if)]
+pub(crate) struct UnexpectedIfWithIf(
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = " ", style = "verbose")]
+    pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_fn_typo_with_impl)]
+pub(crate) struct FnTypoWithImpl {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "impl", style = "verbose")]
+    pub fn_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_fn_path_found_fn_keyword)]
+pub(crate) struct ExpectedFnPathFoundFnKeyword {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "Fn", style = "verbose")]
+    pub fn_token_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_path_single_colon)]
+pub(crate) struct PathSingleColon {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "::")]
+    pub span: Span,
+
+    #[note(parse_type_ascription_removed)]
+    pub type_ascription: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_colon_as_semi)]
+pub(crate) struct ColonAsSemi {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = ";")]
+    pub span: Span,
+
+    #[note(parse_type_ascription_removed)]
+    pub type_ascription: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_clause_before_tuple_struct_body)]
+pub(crate) struct WhereClauseBeforeTupleStructBody {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[label(parse_name_label)]
+    pub name: Span,
+    #[label(parse_body_label)]
+    pub body: Span,
+    #[subdiagnostic]
+    pub sugg: Option<WhereClauseBeforeTupleStructBodySugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WhereClauseBeforeTupleStructBodySugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_fn_in_2015, code = E0670)]
+pub(crate) struct AsyncFnIn2015 {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub help: HelpUseLatestEdition,
+}
+
+#[derive(Subdiagnostic)]
+#[label(parse_async_block_in_2015)]
+pub(crate) struct AsyncBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_move_block_in_2015)]
+pub(crate) struct AsyncMoveBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_bound_modifier_in_2015)]
+pub(crate) struct AsyncBoundModifierIn2015 {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub help: HelpUseLatestEdition,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_argument_pointer)]
+pub(crate) struct SelfArgumentPointer {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_dot)]
+pub struct UnexpectedTokenAfterDot<'a> {
+    #[primary_span]
+    pub span: Span,
+    pub actual: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_visibility_not_followed_by_item)]
+#[help]
+pub(crate) struct VisibilityNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub vis: Visibility,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_default_not_followed_by_item)]
+#[note]
+pub(crate) struct DefaultNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum MissingKeywordForItemDefinition {
+    #[diag(parse_missing_struct_for_struct_definition)]
+    Struct {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " struct ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_function_definition)]
+    Function {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_method_definition)]
+    Method {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_ambiguous_missing_keyword_for_item_definition)]
+    Ambiguous {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        subdiag: Option<AmbiguousMissingKwForItemSub>,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum AmbiguousMissingKwForItemSub {
+    #[suggestion(parse_suggestion, applicability = "maybe-incorrect", code = "{snippet}!")]
+    SuggestMacro {
+        #[primary_span]
+        span: Span,
+        snippet: String,
+    },
+    #[help(parse_help)]
+    HelpMacro,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_fn_params)]
+pub(crate) struct MissingFnParams {
+    #[primary_span]
+    #[suggestion(code = "()", applicability = "machine-applicable", style = "short")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_trait_in_trait_impl)]
+pub(crate) struct MissingTraitInTraitImpl {
+    #[primary_span]
+    #[suggestion(parse_suggestion_add_trait, code = " Trait ", applicability = "has-placeholders")]
+    pub span: Span,
+    #[suggestion(parse_suggestion_remove_for, code = "", applicability = "maybe-incorrect")]
+    pub for_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_for_in_trait_impl)]
+pub(crate) struct MissingForInTraitImpl {
+    #[primary_span]
+    #[suggestion(style = "short", code = " for ", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_trait_in_trait_impl_found_type)]
+pub(crate) struct ExpectedTraitInTraitImplFoundType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extra_impl_keyword_in_trait_impl)]
+pub(crate) struct ExtraImplKeywordInTraitImpl {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    pub extra_impl_kw: Span,
+    #[note]
+    pub impl_trait_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bounds_not_allowed_on_trait_aliases)]
+pub(crate) struct BoundsNotAllowedOnTraitAliases {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_auto)]
+pub(crate) struct TraitAliasCannotBeAuto {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_auto)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_unsafe)]
+pub(crate) struct TraitAliasCannotBeUnsafe {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_unsafe)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_associated_static_item_not_allowed)]
+pub(crate) struct AssociatedStaticItemNotAllowed {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_crate_name_with_dashes)]
+pub(crate) struct ExternCrateNameWithDashes {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: ExternCrateNameWithDashesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ExternCrateNameWithDashesSugg {
+    #[suggestion_part(code = "_")]
+    pub dashes: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_item_cannot_be_const)]
+#[note]
+pub(crate) struct ExternItemCannotBeConst {
+    #[primary_span]
+    pub ident_span: Span,
+    #[suggestion(code = "static ", applicability = "machine-applicable")]
+    pub const_span: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_global_cannot_be_mutable)]
+pub(crate) struct ConstGlobalCannotBeMutable {
+    #[primary_span]
+    #[label]
+    pub ident_span: Span,
+    #[suggestion(code = "static", applicability = "maybe-incorrect")]
+    pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_const_type)]
+pub(crate) struct MissingConstType {
+    #[primary_span]
+    #[suggestion(code = "{colon} <type>", applicability = "has-placeholders")]
+    pub span: Span,
+
+    pub kind: &'static str,
+    pub colon: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_struct_mutually_exclusive)]
+pub(crate) struct EnumStructMutuallyExclusive {
+    #[primary_span]
+    #[suggestion(code = "enum", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedTokenAfterStructName {
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_identifier)]
+    ReservedIdentifier {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_keyword)]
+    Keyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_keyword)]
+    ReservedKeyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_doc_comment)]
+    DocComment {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_other)]
+    Other {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+}
+
+impl UnexpectedTokenAfterStructName {
+    pub fn new(span: Span, token: Token) -> Self {
+        match TokenDescription::from_token(&token) {
+            Some(TokenDescription::ReservedIdentifier) => Self::ReservedIdentifier { span, token },
+            Some(TokenDescription::Keyword) => Self::Keyword { span, token },
+            Some(TokenDescription::ReservedKeyword) => Self::ReservedKeyword { span, token },
+            Some(TokenDescription::DocComment) => Self::DocComment { span, token },
+            None => Self::Other { span, token },
+        }
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_self_in_generic_parameters)]
+#[note]
+pub(crate) struct UnexpectedSelfInGenericParameters {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_default_value_for_lifetime_in_generic_parameters)]
+pub(crate) struct UnexpectedDefaultValueForLifetimeInGenericParameters {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_multiple_where_clauses)]
+pub(crate) struct MultipleWhereClauses {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub previous: Span,
+    #[suggestion(style = "verbose", code = ",", applicability = "maybe-incorrect")]
+    pub between: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedNonterminal {
+    #[diag(parse_nonterminal_expected_item_keyword)]
+    Item(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_statement)]
+    Statement(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_ident)]
+    Ident {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_nonterminal_expected_lifetime)]
+    Lifetime {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowed {
+    #[diag(parse_or_pattern_not_allowed_in_let_binding)]
+    LetBinding {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+    #[diag(parse_or_pattern_not_allowed_in_fn_parameters)]
+    FunctionParameter {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cannot_be_raw_ident)]
+pub struct CannotBeRawIdent {
+    #[primary_span]
+    pub span: Span,
+    pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cr_doc_comment)]
+pub struct CrDocComment {
+    #[primary_span]
+    pub span: Span,
+    pub block: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_no_digits_literal, code = E0768)]
+pub struct NoDigitsLiteral {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_digit_literal)]
+pub struct InvalidDigitLiteral {
+    #[primary_span]
+    pub span: Span,
+    pub base: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_empty_exponent_float)]
+pub struct EmptyExponentFloat {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_unsupported_base)]
+pub struct FloatLiteralUnsupportedBase {
+    #[primary_span]
+    pub span: Span,
+    pub base: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_prefix)]
+#[note]
+pub struct UnknownPrefix<'a> {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub prefix: &'a str,
+    #[subdiagnostic]
+    pub sugg: Option<UnknownPrefixSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_macro_expands_to_adt_field)]
+pub struct MacroExpandsToAdtField<'a> {
+    pub adt_ty: &'a str,
+}
+
+#[derive(Subdiagnostic)]
+pub enum UnknownPrefixSugg {
+    #[suggestion(
+        parse_suggestion_br,
+        code = "br",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    UseBr(#[primary_span] Span),
+    #[suggestion(
+        parse_suggestion_whitespace,
+        code = " ",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    Whitespace(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_too_many_hashes)]
+pub struct TooManyHashes {
+    #[primary_span]
+    pub span: Span,
+    pub num: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_start_of_token)]
+pub struct UnknownTokenStart {
+    #[primary_span]
+    pub span: Span,
+    pub escaped: String,
+    #[subdiagnostic]
+    pub sugg: Option<TokenSubstitution>,
+    #[subdiagnostic]
+    pub null: Option<UnknownTokenNull>,
+    #[subdiagnostic]
+    pub repeat: Option<UnknownTokenRepeat>,
+}
+
+#[derive(Subdiagnostic)]
+pub enum TokenSubstitution {
+    #[suggestion(parse_sugg_quotes, code = "{suggestion}", applicability = "maybe-incorrect")]
+    DirectedQuotes {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+    #[suggestion(parse_sugg_other, code = "{suggestion}", applicability = "maybe-incorrect")]
+    Other {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ch: String,
+        u_name: &'static str,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_note_repeats)]
+pub struct UnknownTokenRepeat {
+    pub repeats: usize,
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_help_null)]
+pub struct UnknownTokenNull;
+
+#[derive(Diagnostic)]
+pub enum UnescapeError {
+    #[diag(parse_invalid_unicode_escape)]
+    #[help]
+    InvalidUnicodeEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        surrogate: bool,
+    },
+    #[diag(parse_escape_only_char)]
+    EscapeOnlyChar {
+        #[primary_span]
+        span: Span,
+        #[suggestion(parse_escape, applicability = "machine-applicable", code = "{escaped_sugg}")]
+        char_span: Span,
+        escaped_sugg: String,
+        escaped_msg: String,
+        byte: bool,
+    },
+    #[diag(parse_bare_cr)]
+    BareCr {
+        #[primary_span]
+        #[suggestion(parse_escape, applicability = "machine-applicable", code = "\\r")]
+        span: Span,
+        double_quotes: bool,
+    },
+    #[diag(parse_bare_cr_in_raw_string)]
+    BareCrRawString(#[primary_span] Span),
+    #[diag(parse_too_short_hex_escape)]
+    TooShortHexEscape(#[primary_span] Span),
+    #[diag(parse_invalid_char_in_escape)]
+    InvalidCharInEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        is_hex: bool,
+        ch: String,
+    },
+    #[diag(parse_out_of_range_hex_escape)]
+    OutOfRangeHexEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_leading_underscore_unicode_escape)]
+    LeadingUnderscoreUnicodeEscape {
+        #[primary_span]
+        #[label(parse_leading_underscore_unicode_escape_label)]
+        span: Span,
+        ch: String,
+    },
+    #[diag(parse_overlong_unicode_escape)]
+    OverlongUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unclosed_unicode_escape)]
+    UnclosedUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+        #[suggestion(
+            parse_terminate,
+            code = "}}",
+            applicability = "maybe-incorrect",
+            style = "verbose"
+        )]
+        Span,
+    ),
+    #[diag(parse_no_brace_unicode_escape)]
+    NoBraceInUnicodeEscape {
+        #[primary_span]
+        span: Span,
+        #[label]
+        label: Option<Span>,
+        #[subdiagnostic]
+        sub: NoBraceUnicodeSub,
+    },
+    #[diag(parse_unicode_escape_in_byte)]
+    #[help]
+    UnicodeEscapeInByte(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_empty_unicode_escape)]
+    EmptyUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_zero_chars)]
+    ZeroChars(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_lone_slash)]
+    LoneSlash(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unskipped_whitespace)]
+    UnskippedWhitespace {
+        #[primary_span]
+        span: Span,
+        #[label]
+        char_span: Span,
+        ch: String,
+    },
+    #[diag(parse_multiple_skipped_lines)]
+    MultipleSkippedLinesWarning(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_more_than_one_char)]
+    MoreThanOneChar {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        note: Option<MoreThanOneCharNote>,
+        #[subdiagnostic]
+        suggestion: MoreThanOneCharSugg,
+    },
+    #[diag(parse_nul_in_c_str)]
+    NulInCStr {
+        #[primary_span]
+        span: Span,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharSugg {
+    #[suggestion(
+        parse_consider_normalized,
+        code = "{normalized}",
+        applicability = "machine-applicable"
+    )]
+    NormalizedForm {
+        #[primary_span]
+        span: Span,
+        ch: String,
+        normalized: String,
+    },
+    #[suggestion(parse_remove_non, code = "{ch}", applicability = "maybe-incorrect")]
+    RemoveNonPrinting {
+        #[primary_span]
+        span: Span,
+        ch: String,
+    },
+    #[suggestion(parse_use_double_quotes, code = "{sugg}", applicability = "machine-applicable")]
+    Quotes {
+        #[primary_span]
+        span: Span,
+        is_byte: bool,
+        sugg: String,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharNote {
+    #[note(parse_followed_by)]
+    AllCombining {
+        #[primary_span]
+        span: Span,
+        chr: String,
+        len: usize,
+        escaped_marks: String,
+    },
+    #[note(parse_non_printing)]
+    NonPrinting {
+        #[primary_span]
+        span: Span,
+        escaped: String,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum NoBraceUnicodeSub {
+    #[suggestion(parse_use_braces, code = "{suggestion}", applicability = "maybe-incorrect")]
+    Suggestion {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+    },
+    #[help(parse_format_of_unicode)]
+    Help,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowedSugg {
+    #[suggestion(
+        parse_sugg_remove_leading_vert_in_pattern,
+        code = "{pat}",
+        applicability = "machine-applicable"
+    )]
+    RemoveLeadingVert {
+        #[primary_span]
+        span: Span,
+        pat: String,
+    },
+    #[suggestion(
+        parse_sugg_wrap_pattern_in_parens,
+        code = "({pat})",
+        applicability = "machine-applicable"
+    )]
+    WrapInParens {
+        #[primary_span]
+        span: Span,
+        pat: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_before_function_parameter)]
+#[note(parse_note_pattern_alternatives_use_single_vert)]
+pub(crate) struct UnexpectedVertVertBeforeFunctionParam {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_in_pattern)]
+pub(crate) struct UnexpectedVertVertInPattern {
+    #[primary_span]
+    #[suggestion(code = "|", applicability = "machine-applicable")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trailing_vert_not_allowed)]
+pub(crate) struct TrailingVertNotAllowed {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+    pub token: Token,
+    #[note(parse_note_pattern_alternatives_use_single_vert)]
+    pub note_double_vert: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot_rest_pattern)]
+pub(crate) struct DotDotDotRestPattern {
+    #[primary_span]
+    #[suggestion(style = "short", code = "..", applicability = "machine-applicable")]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_on_wrong_side_of_at)]
+pub(crate) struct PatternOnWrongSideOfAt {
+    #[primary_span]
+    #[suggestion(code = "{whole_pat}", applicability = "machine-applicable")]
+    pub whole_span: Span,
+    pub whole_pat: String,
+    #[label(parse_label_pattern)]
+    pub pattern: Span,
+    #[label(parse_label_binding)]
+    pub binding: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_binding_left_of_at)]
+#[note]
+pub(crate) struct ExpectedBindingLeftOfAt {
+    #[primary_span]
+    pub whole_span: Span,
+    #[label(parse_label_lhs)]
+    pub lhs: Span,
+    #[label(parse_label_rhs)]
+    pub rhs: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ambiguous_range_pattern)]
+pub(crate) struct AmbiguousRangePattern {
+    #[primary_span]
+    #[suggestion(code = "({pat})", applicability = "maybe-incorrect")]
+    pub span: Span,
+    pub pat: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_lifetime_in_pattern)]
+pub(crate) struct UnexpectedLifetimeInPattern {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    pub symbol: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ref_mut_order_incorrect)]
+pub(crate) struct RefMutOrderIncorrect {
+    #[primary_span]
+    #[suggestion(code = "ref mut", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum InvalidMutInPattern {
+    #[diag(parse_mut_on_nested_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NestedIdent {
+        #[primary_span]
+        #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+        span: Span,
+        pat: String,
+    },
+    #[diag(parse_mut_on_non_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NonIdent {
+        #[primary_span]
+        #[suggestion(code = "", applicability = "machine-applicable")]
+        span: Span,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_repeated_mut_in_pattern)]
+pub(crate) struct RepeatedMutInPattern {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_range_to_pattern_not_allowed)]
+pub(crate) struct DotDotDotRangeToPatternNotAllowed {
+    #[primary_span]
+    #[suggestion(style = "short", code = "..=", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_pattern_instead_of_identifier)]
+pub(crate) struct EnumPatternInsteadOfIdentifier {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_for_remaining_fields)]
+pub(crate) struct DotDotDotForRemainingFields {
+    #[primary_span]
+    #[suggestion(code = "..", style = "verbose", applicability = "machine-applicable")]
+    pub span: Span,
+    pub token_str: Cow<'static, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_comma_after_pattern_field)]
+pub(crate) struct ExpectedCommaAfterPatternField {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_expr_in_pat)]
+pub(crate) struct UnexpectedExpressionInPattern {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    /// Was a `RangePatternBound` expected?
+    pub is_bound: bool,
+    /// Was the unexpected expression a `MethodCallExpression`?
+    pub is_method_call: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_paren_in_range_pat)]
+pub(crate) struct UnexpectedParenInRangePat {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: UnexpectedParenInRangePatSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_unexpected_paren_in_range_pat_sugg,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct UnexpectedParenInRangePatSugg {
+    #[suggestion_part(code = "")]
+    pub start_span: Span,
+    #[suggestion_part(code = "")]
+    pub end_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_return_types_use_thin_arrow)]
+pub(crate) struct ReturnTypesUseThinArrow {
+    #[primary_span]
+    #[suggestion(style = "short", code = "->", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_need_plus_after_trait_object_lifetime)]
+pub(crate) struct NeedPlusAfterTraitObjectLifetime {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_mut_or_const_in_raw_pointer_type)]
+pub(crate) struct ExpectedMutOrConstInRawPointerType {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code("mut ", "const "), applicability = "has-placeholders")]
+    pub after_asterisk: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_after_mut)]
+pub(crate) struct LifetimeAfterMut {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "&{snippet} mut", applicability = "maybe-incorrect")]
+    pub suggest_lifetime: Option<Span>,
+    pub snippet: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dyn_after_mut)]
+pub(crate) struct DynAfterMut {
+    #[primary_span]
+    #[suggestion(code = "&mut dyn", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_const)]
+pub(crate) struct FnPointerCannotBeConst {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    #[label]
+    pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_async)]
+pub(crate) struct FnPointerCannotBeAsync {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    #[label]
+    pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_c_variadic_type, code = E0743)]
+pub(crate) struct NestedCVariadicType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_dyn_keyword)]
+#[help]
+pub(crate) struct InvalidDynKeyword {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Subdiagnostic)]
+pub enum HelpUseLatestEdition {
+    #[help(parse_help_set_edition_cargo)]
+    #[note(parse_note_edition_guide)]
+    Cargo { edition: Edition },
+    #[help(parse_help_set_edition_standalone)]
+    #[note(parse_note_edition_guide)]
+    Standalone { edition: Edition },
+}
+
+impl HelpUseLatestEdition {
+    pub fn new() -> Self {
+        let edition = LATEST_STABLE_EDITION;
+        if std::env::var_os("CARGO").is_some() {
+            Self::Cargo { edition }
+        } else {
+            Self::Standalone { edition }
+        }
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_box_syntax_removed)]
+pub struct BoxSyntaxRemoved<'a> {
+    #[primary_span]
+    #[suggestion(
+        code = "Box::new({code})",
+        applicability = "machine-applicable",
+        style = "verbose"
+    )]
+    pub span: Span,
+    pub code: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_return_type_notation_output)]
+pub(crate) struct BadReturnTypeNotationOutput {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_return_type_notation_dotdot)]
+pub(crate) struct BadReturnTypeNotationDotDot {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_assoc_type_bounds)]
+pub(crate) struct BadAssocTypeBounds {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attr_after_generic)]
+pub(crate) struct AttrAfterGeneric {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_attr_without_generics)]
+pub(crate) struct AttrWithoutGenerics {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_generics)]
+pub(crate) struct WhereOnGenerics {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generics_in_path)]
+pub(crate) struct GenericsInPath {
+    #[primary_span]
+    pub span: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_assoc_lifetime)]
+#[help]
+pub(crate) struct AssocLifetime {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub lifetime: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_modifier_lifetime)]
+pub(crate) struct ModifierLifetime {
+    #[primary_span]
+    #[suggestion(style = "tool-only", applicability = "maybe-incorrect", code = "")]
+    pub span: Span,
+    pub modifier: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_parenthesized_lifetime)]
+pub(crate) struct ParenthesizedLifetime {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(style = "short", applicability = "machine-applicable", code = "{snippet}")]
+    pub sugg: Option<Span>,
+    pub snippet: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_underscore_literal_suffix)]
+pub(crate) struct UnderscoreLiteralSuffix {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expect_label_found_ident)]
+pub(crate) struct ExpectedLabelFoundIdent {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "'", applicability = "machine-applicable", style = "short")]
+    pub start: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inappropriate_default)]
+#[note]
+pub(crate) struct InappropriateDefault {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub article: &'static str,
+    pub descr: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_recover_import_as_use)]
+pub(crate) struct RecoverImportAsUse {
+    #[primary_span]
+    #[suggestion(code = "use", applicability = "machine-applicable", style = "short")]
+    pub span: Span,
+    pub token_name: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_single_colon_import_path)]
+#[note]
+pub(crate) struct SingleColonImportPath {
+    #[primary_span]
+    #[suggestion(code = "::", applicability = "machine-applicable", style = "short")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bad_item_kind)]
+#[help]
+pub(crate) struct BadItemKind {
+    #[primary_span]
+    pub span: Span,
+    pub descr: &'static str,
+    pub ctx: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_single_colon_struct_type)]
+pub(crate) struct SingleColonStructType {
+    #[primary_span]
+    #[suggestion(code = "::", applicability = "maybe-incorrect", style = "verbose")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_equals_struct_default)]
+pub(crate) struct EqualsStructDefault {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_rules_missing_bang)]
+pub(crate) struct MacroRulesMissingBang {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "!", applicability = "machine-applicable", style = "verbose")]
+    pub hi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_name_remove_bang)]
+pub(crate) struct MacroNameRemoveBang {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_rules_visibility)]
+pub(crate) struct MacroRulesVisibility<'a> {
+    #[primary_span]
+    #[suggestion(code = "#[macro_export]", applicability = "maybe-incorrect")]
+    pub span: Span,
+    pub vis: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_macro_invocation_visibility)]
+#[help]
+pub(crate) struct MacroInvocationVisibility<'a> {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    pub vis: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_adt)]
+pub(crate) struct NestedAdt<'a> {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    pub item: Span,
+    pub keyword: &'a str,
+    pub kw_str: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_function_body_equals_expr)]
+pub(crate) struct FunctionBodyEqualsExpr {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: FunctionBodyEqualsExprSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct FunctionBodyEqualsExprSugg {
+    #[suggestion_part(code = "{{")]
+    pub eq: Span,
+    #[suggestion_part(code = " }}")]
+    pub semi: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_box_not_pat)]
+pub(crate) struct BoxNotPat {
+    #[primary_span]
+    pub span: Span,
+    #[note]
+    pub kw: Span,
+    #[suggestion(code = "r#", applicability = "maybe-incorrect", style = "verbose")]
+    pub lo: Span,
+    pub descr: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unmatched_angle)]
+pub(crate) struct UnmatchedAngle {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    pub plural: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_plus_in_bounds)]
+pub(crate) struct MissingPlusBounds {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = " +", applicability = "maybe-incorrect", style = "verbose")]
+    pub hi: Span,
+    pub sym: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_incorrect_parens_trait_bounds)]
+pub(crate) struct IncorrectParensTraitBounds {
+    #[primary_span]
+    pub span: Vec<Span>,
+    #[subdiagnostic]
+    pub sugg: IncorrectParensTraitBoundsSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    parse_incorrect_parens_trait_bounds_sugg,
+    applicability = "machine-applicable"
+)]
+pub(crate) struct IncorrectParensTraitBoundsSugg {
+    #[suggestion_part(code = " ")]
+    pub wrong_span: Span,
+    #[suggestion_part(code = "(")]
+    pub new_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_kw_bad_case)]
+pub(crate) struct KwBadCase<'a> {
+    #[primary_span]
+    #[suggestion(code = "{kw}", applicability = "machine-applicable")]
+    pub span: Span,
+    pub kw: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_meta_bad_delim)]
+pub(crate) struct MetaBadDelim {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: MetaBadDelimSugg,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cfg_attr_bad_delim)]
+pub(crate) struct CfgAttrBadDelim {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: MetaBadDelimSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_meta_bad_delim_suggestion, applicability = "machine-applicable")]
+pub(crate) struct MetaBadDelimSugg {
+    #[suggestion_part(code = "(")]
+    pub open: Span,
+    #[suggestion_part(code = ")")]
+    pub close: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_malformed_cfg_attr)]
+#[note]
+pub(crate) struct MalformedCfgAttr {
+    #[primary_span]
+    #[suggestion(code = "{sugg}")]
+    pub span: Span,
+    pub sugg: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_builtin_construct)]
+pub(crate) struct UnknownBuiltinConstruct {
+    #[primary_span]
+    pub span: Span,
+    pub name: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_builtin_ident)]
+pub(crate) struct ExpectedBuiltinIdent {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_static_with_generics)]
+pub(crate) struct StaticWithGenerics {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_clause_before_const_body)]
+pub(crate) struct WhereClauseBeforeConstBody {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[label(parse_name_label)]
+    pub name: Span,
+    #[label(parse_body_label)]
+    pub body: Span,
+    #[subdiagnostic]
+    pub sugg: Option<WhereClauseBeforeConstBodySugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WhereClauseBeforeConstBodySugg {
+    #[suggestion_part(code = "= {snippet} ")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_generic_args_in_pat_require_turbofish_syntax)]
+pub(crate) struct GenericArgsInPatRequireTurbofishSyntax {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(
+        parse_sugg_turbofish_syntax,
+        style = "verbose",
+        code = "::",
+        applicability = "maybe-incorrect"
+    )]
+    pub suggest_turbofish: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_transpose_dyn_or_impl)]
+pub(crate) struct TransposeDynOrImpl<'a> {
+    #[primary_span]
+    pub span: Span,
+    pub kw: &'a str,
+    #[subdiagnostic]
+    pub sugg: TransposeDynOrImplSugg<'a>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct TransposeDynOrImplSugg<'a> {
+    #[suggestion_part(code = "")]
+    pub removal_span: Span,
+    #[suggestion_part(code = "{kw} ")]
+    pub insertion_span: Span,
+    pub kw: &'a str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_array_index_offset_of)]
+pub(crate) struct ArrayIndexInOffsetOf(#[primary_span] pub Span);
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_offset_of)]
+pub(crate) struct InvalidOffsetOf(#[primary_span] pub Span);
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
new file mode 100644
index 00000000000..b1bd4ac75e5
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -0,0 +1,120 @@
+use super::UnmatchedDelim;
+use rustc_ast::token::Delimiter;
+use rustc_errors::Diagnostic;
+use rustc_span::source_map::SourceMap;
+use rustc_span::Span;
+
+#[derive(Default)]
+pub struct TokenTreeDiagInfo {
+    /// Stack of open delimiters and their spans. Used for error message.
+    pub open_braces: Vec<(Delimiter, Span)>,
+    pub unmatched_delims: Vec<UnmatchedDelim>,
+
+    /// Used only for error recovery when arriving to EOF with mismatched braces.
+    pub last_unclosed_found_span: Option<Span>,
+
+    /// Collect empty block spans that might have been auto-inserted by editors.
+    pub empty_block_spans: Vec<Span>,
+
+    /// Collect the spans of braces (Open, Close). Used only
+    /// for detecting if blocks are empty and only braces.
+    pub matching_block_spans: Vec<(Span, Span)>,
+}
+
+pub fn same_indentation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
+    match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
+        (Some(open_padding), Some(close_padding)) => open_padding == close_padding,
+        _ => false,
+    }
+}
+
+// When we get a `)` or `]` for `{`, we should emit help message here
+// it's more friendly compared to report `unmatched error` in later phase
+pub fn report_missing_open_delim(
+    err: &mut Diagnostic,
+    unmatched_delims: &[UnmatchedDelim],
+) -> bool {
+    let mut reported_missing_open = false;
+    for unmatch_brace in unmatched_delims.iter() {
+        if let Some(delim) = unmatch_brace.found_delim
+            && matches!(delim, Delimiter::Parenthesis | Delimiter::Bracket)
+        {
+            let missed_open = match delim {
+                Delimiter::Parenthesis => "(",
+                Delimiter::Bracket => "[",
+                _ => unreachable!(),
+            };
+            err.span_label(
+                unmatch_brace.found_span.shrink_to_lo(),
+                format!("missing open `{missed_open}` for this delimiter"),
+            );
+            reported_missing_open = true;
+        }
+    }
+    reported_missing_open
+}
+
+pub fn report_suspicious_mismatch_block(
+    err: &mut Diagnostic,
+    diag_info: &TokenTreeDiagInfo,
+    sm: &SourceMap,
+    delim: Delimiter,
+) {
+    if report_missing_open_delim(err, &diag_info.unmatched_delims) {
+        return;
+    }
+
+    let mut matched_spans: Vec<(Span, bool)> = diag_info
+        .matching_block_spans
+        .iter()
+        .map(|&(open, close)| (open.with_hi(close.lo()), same_indentation_level(sm, open, close)))
+        .collect();
+
+    // sort by `lo`, so the large block spans in the front
+    matched_spans.sort_by_key(|(span, _)| span.lo());
+
+    // We use larger block whose indentation is well to cover those inner mismatched blocks
+    // O(N^2) here, but we are on error reporting path, so it is fine
+    for i in 0..matched_spans.len() {
+        let (block_span, same_ident) = matched_spans[i];
+        if same_ident {
+            for j in i + 1..matched_spans.len() {
+                let (inner_block, inner_same_ident) = matched_spans[j];
+                if block_span.contains(inner_block) && !inner_same_ident {
+                    matched_spans[j] = (inner_block, true);
+                }
+            }
+        }
+    }
+
+    // Find the inner-most span candidate for final report
+    let candidate_span =
+        matched_spans.into_iter().rev().find(|&(_, same_ident)| !same_ident).map(|(span, _)| span);
+
+    if let Some(block_span) = candidate_span {
+        err.span_label(block_span.shrink_to_lo(), "this delimiter might not be properly closed...");
+        err.span_label(
+            block_span.shrink_to_hi(),
+            "...as it matches this but it has different indentation",
+        );
+
+        // If there is a empty block in the mismatched span, note it
+        if delim == Delimiter::Brace {
+            for span in diag_info.empty_block_spans.iter() {
+                if block_span.contains(*span) {
+                    err.span_label(*span, "block is empty, you might have not meant to close it");
+                    break;
+                }
+            }
+        }
+    } else {
+        // If there is no suspicious span, give the last properly closed block may help
+        if let Some(parent) = diag_info.matching_block_spans.last()
+            && diag_info.open_braces.last().is_none()
+            && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1))
+        {
+            err.span_label(parent.0, "this opening brace...");
+            err.span_label(parent.1, "...matches this closing brace");
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index e9701ec2d7f..31552452676 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,11 +1,16 @@
+use std::ops::Range;
+
+use crate::errors;
 use crate::lexer::unicode_chars::UNICODE_ARRAY;
+use crate::make_unclosed_delims_error;
 use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{Spacing, TokenStream};
+use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
-use rustc_lexer::unescape::{self, Mode};
+use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
+use rustc_lexer::unescape::{self, EscapeError, Mode};
 use rustc_lexer::{Base, DocStyle, RawStrError};
+use rustc_lexer::{Cursor, LiteralKind};
 use rustc_session::lint::builtin::{
     RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
 };
@@ -14,16 +19,22 @@ use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{sym, Symbol};
 use rustc_span::{edition::Edition, BytePos, Pos, Span};
 
-use tracing::debug;
-
+mod diagnostics;
 mod tokentrees;
 mod unescape_error_reporting;
 mod unicode_chars;
 
 use unescape_error_reporting::{emit_unescape_error, escaped_char};
 
+// This type is used a lot. Make sure it doesn't unintentionally get bigger.
+//
+// This assertion is in this crate, rather than in `rustc_lexer`, because that
+// crate cannot depend on `rustc_data_structures`.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
+
 #[derive(Clone, Debug)]
-pub struct UnmatchedBrace {
+pub struct UnmatchedDelim {
     pub expected_delim: Delimiter,
     pub found_delim: Option<Delimiter>,
     pub found_span: Span,
@@ -31,118 +42,304 @@ pub struct UnmatchedBrace {
     pub candidate_span: Option<Span>,
 }
 
-pub(crate) fn parse_token_trees<'a>(
-    sess: &'a ParseSess,
-    src: &'a str,
-    start_pos: BytePos,
+pub(crate) fn parse_token_trees<'sess, 'src>(
+    sess: &'sess ParseSess,
+    mut src: &'src str,
+    mut start_pos: BytePos,
     override_span: Option<Span>,
-) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
-    StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span }
-        .into_token_trees()
+) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
+    // Skip `#!`, if present.
+    if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
+        src = &src[shebang_len..];
+        start_pos = start_pos + BytePos::from_usize(shebang_len);
+    }
+
+    let cursor = Cursor::new(src);
+    let string_reader = StringReader {
+        sess,
+        start_pos,
+        pos: start_pos,
+        src,
+        cursor,
+        override_span,
+        nbsp_is_whitespace: false,
+    };
+    let (stream, res, unmatched_delims) =
+        tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
+    match res {
+        Ok(()) if unmatched_delims.is_empty() => Ok(stream),
+        _ => {
+            // Return error if there are unmatched delimiters or unclosed delimiters.
+            // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
+            // because the delimiter mismatch is more likely to be the root cause of error
+
+            let mut buffer = Vec::with_capacity(1);
+            for unmatched in unmatched_delims {
+                if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
+                    buffer.push(err);
+                }
+            }
+            if let Err(errs) = res {
+                // Add unclosing delimiter or diff marker errors
+                for err in errs {
+                    buffer.push(err);
+                }
+            }
+            Err(buffer)
+        }
+    }
 }
 
-struct StringReader<'a> {
-    sess: &'a ParseSess,
+struct StringReader<'sess, 'src> {
+    sess: &'sess ParseSess,
     /// Initial position, read-only.
     start_pos: BytePos,
     /// The absolute offset within the source_map of the current character.
     pos: BytePos,
-    /// Stop reading src at this index.
-    end_src_index: usize,
     /// Source text to tokenize.
-    src: &'a str,
+    src: &'src str,
+    /// Cursor for getting lexer tokens.
+    cursor: Cursor<'src>,
     override_span: Option<Span>,
+    /// When a "unknown start of token: \u{a0}" has already been emitted earlier
+    /// in this file, it's safe to treat further occurrences of the non-breaking
+    /// space character as whitespace.
+    nbsp_is_whitespace: bool,
 }
 
-impl<'a> StringReader<'a> {
+impl<'sess, 'src> StringReader<'sess, 'src> {
+    pub fn dcx(&self) -> &'sess DiagCtxt {
+        &self.sess.dcx
+    }
+
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
         self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
     }
 
-    /// Returns the next token, and info about preceding whitespace, if any.
-    fn next_token(&mut self) -> (Spacing, Token) {
-        let mut spacing = Spacing::Joint;
-
-        // Skip `#!` at the start of the file
-        let start_src_index = self.src_index(self.pos);
-        let text: &str = &self.src[start_src_index..self.end_src_index];
-        let is_beginning_of_file = self.pos == self.start_pos;
-        if is_beginning_of_file {
-            if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
-                self.pos = self.pos + BytePos::from_usize(shebang_len);
-                spacing = Spacing::Alone;
-            }
-        }
-
+    /// Returns the next token, paired with a bool indicating if the token was
+    /// preceded by whitespace.
+    fn next_token(&mut self) -> (Token, bool) {
+        let mut preceded_by_whitespace = false;
+        let mut swallow_next_invalid = 0;
         // Skip trivial (whitespace & comments) tokens
         loop {
-            let start_src_index = self.src_index(self.pos);
-            let text: &str = &self.src[start_src_index..self.end_src_index];
-
-            if text.is_empty() {
-                let span = self.mk_sp(self.pos, self.pos);
-                return (spacing, Token::new(token::Eof, span));
-            }
-
-            let token = rustc_lexer::first_token(text);
-
+            let str_before = self.cursor.as_str();
+            let token = self.cursor.advance_token();
             let start = self.pos;
-            self.pos = self.pos + BytePos::from_usize(token.len);
+            self.pos = self.pos + BytePos(token.len);
 
             debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
 
-            match self.cook_lexer_token(token.kind, start) {
-                Some(kind) => {
-                    let span = self.mk_sp(start, self.pos);
-                    return (spacing, Token::new(kind, span));
+            // Now "cook" the token, converting the simple `rustc_lexer::TokenKind` enum into a
+            // rich `rustc_ast::TokenKind`. This turns strings into interned symbols and runs
+            // additional validation.
+            let kind = match token.kind {
+                rustc_lexer::TokenKind::LineComment { doc_style } => {
+                    // Skip non-doc comments
+                    let Some(doc_style) = doc_style else {
+                        self.lint_unicode_text_flow(start);
+                        preceded_by_whitespace = true;
+                        continue;
+                    };
+
+                    // Opening delimiter of the length 3 is not included into the symbol.
+                    let content_start = start + BytePos(3);
+                    let content = self.str_from(content_start);
+                    self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
                 }
-                None => spacing = Spacing::Alone,
-            }
-        }
-    }
-
-    /// Report a fatal lexical error with a given span.
-    fn fatal_span(&self, sp: Span, m: &str) -> ! {
-        self.sess.span_diagnostic.span_fatal(sp, m)
-    }
-
-    /// Report a lexical error with a given span.
-    fn err_span(&self, sp: Span, m: &str) {
-        self.sess.span_diagnostic.struct_span_err(sp, m).emit();
-    }
-
-    /// Report a fatal error spanning [`from_pos`, `to_pos`).
-    fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! {
-        self.fatal_span(self.mk_sp(from_pos, to_pos), m)
-    }
+                rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
+                    if !terminated {
+                        self.report_unterminated_block_comment(start, doc_style);
+                    }
 
-    /// Report a lexical error spanning [`from_pos`, `to_pos`).
-    fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
-        self.err_span(self.mk_sp(from_pos, to_pos), m)
-    }
+                    // Skip non-doc comments
+                    let Some(doc_style) = doc_style else {
+                        self.lint_unicode_text_flow(start);
+                        preceded_by_whitespace = true;
+                        continue;
+                    };
+
+                    // Opening delimiter of the length 3 and closing delimiter of the length 2
+                    // are not included into the symbol.
+                    let content_start = start + BytePos(3);
+                    let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
+                    let content = self.str_from_to(content_start, content_end);
+                    self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
+                }
+                rustc_lexer::TokenKind::Whitespace => {
+                    preceded_by_whitespace = true;
+                    continue;
+                }
+                rustc_lexer::TokenKind::Ident => {
+                    self.ident(start)
+                }
+                rustc_lexer::TokenKind::RawIdent => {
+                    let sym = nfc_normalize(self.str_from(start + BytePos(2)));
+                    let span = self.mk_sp(start, self.pos);
+                    self.sess.symbol_gallery.insert(sym, span);
+                    if !sym.can_be_raw() {
+                        self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
+                    }
+                    self.sess.raw_identifier_spans.push(span);
+                    token::Ident(sym, true)
+                }
+                rustc_lexer::TokenKind::UnknownPrefix => {
+                    self.report_unknown_prefix(start);
+                    self.ident(start)
+                }
+                rustc_lexer::TokenKind::InvalidIdent
+                    // Do not recover an identifier with emoji if the codepoint is a confusable
+                    // with a recoverable substitution token, like `➖`.
+                    if !UNICODE_ARRAY
+                        .iter()
+                        .any(|&(c, _, _)| {
+                            let sym = self.str_from(start);
+                            sym.chars().count() == 1 && c == sym.chars().next().unwrap()
+                        }) =>
+                {
+                    let sym = nfc_normalize(self.str_from(start));
+                    let span = self.mk_sp(start, self.pos);
+                    self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
+                        .push(span);
+                    token::Ident(sym, false)
+                }
+                // split up (raw) c string literals to an ident and a string literal when edition < 2021.
+                rustc_lexer::TokenKind::Literal {
+                    kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
+                    suffix_start: _,
+                } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
+                    let prefix_len = match kind {
+                        LiteralKind::CStr { .. } => 1,
+                        LiteralKind::RawCStr { .. } => 2,
+                        _ => unreachable!(),
+                    };
+
+                    // reset the state so that only the prefix ("c" or "cr")
+                    // was consumed.
+                    let lit_start = start + BytePos(prefix_len);
+                    self.pos = lit_start;
+                    self.cursor = Cursor::new(&str_before[prefix_len as usize..]);
 
-    fn struct_fatal_span_char(
-        &self,
-        from_pos: BytePos,
-        to_pos: BytePos,
-        m: &str,
-        c: char,
-    ) -> DiagnosticBuilder<'a, !> {
-        self.sess
-            .span_diagnostic
-            .struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
+                    self.report_unknown_prefix(start);
+                    let prefix_span = self.mk_sp(start, lit_start);
+                    return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
+                }
+                rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+                    let suffix_start = start + BytePos(suffix_start);
+                    let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
+                    let suffix = if suffix_start < self.pos {
+                        let string = self.str_from(suffix_start);
+                        if string == "_" {
+                            self.sess
+                                .dcx
+                                .emit_err(errors::UnderscoreLiteralSuffix { span: self.mk_sp(suffix_start, self.pos) });
+                            None
+                        } else {
+                            Some(Symbol::intern(string))
+                        }
+                    } else {
+                        None
+                    };
+                    token::Literal(token::Lit { kind, symbol, suffix })
+                }
+                rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+                    // Include the leading `'` in the real identifier, for macro
+                    // expansion purposes. See #12512 for the gory details of why
+                    // this is necessary.
+                    let lifetime_name = self.str_from(start);
+                    if starts_with_number {
+                        let span = self.mk_sp(start, self.pos);
+                        self.dcx().struct_err("lifetimes cannot start with a number")
+                            .with_span(span)
+                            .stash(span, StashKey::LifetimeIsChar);
+                    }
+                    let ident = Symbol::intern(lifetime_name);
+                    token::Lifetime(ident)
+                }
+                rustc_lexer::TokenKind::Semi => token::Semi,
+                rustc_lexer::TokenKind::Comma => token::Comma,
+                rustc_lexer::TokenKind::Dot => token::Dot,
+                rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
+                rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
+                rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
+                rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
+                rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
+                rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
+                rustc_lexer::TokenKind::At => token::At,
+                rustc_lexer::TokenKind::Pound => token::Pound,
+                rustc_lexer::TokenKind::Tilde => token::Tilde,
+                rustc_lexer::TokenKind::Question => token::Question,
+                rustc_lexer::TokenKind::Colon => token::Colon,
+                rustc_lexer::TokenKind::Dollar => token::Dollar,
+                rustc_lexer::TokenKind::Eq => token::Eq,
+                rustc_lexer::TokenKind::Bang => token::Not,
+                rustc_lexer::TokenKind::Lt => token::Lt,
+                rustc_lexer::TokenKind::Gt => token::Gt,
+                rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
+                rustc_lexer::TokenKind::And => token::BinOp(token::And),
+                rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
+                rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
+                rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
+                rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
+                rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
+                rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
+
+                rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
+                    // Don't emit diagnostics for sequences of the same invalid token
+                    if swallow_next_invalid > 0 {
+                        swallow_next_invalid -= 1;
+                        continue;
+                    }
+                    let mut it = self.str_from_to_end(start).chars();
+                    let c = it.next().unwrap();
+                    if c == '\u{00a0}' {
+                        // If an error has already been reported on non-breaking
+                        // space characters earlier in the file, treat all
+                        // subsequent occurrences as whitespace.
+                        if self.nbsp_is_whitespace {
+                            preceded_by_whitespace = true;
+                            continue;
+                        }
+                        self.nbsp_is_whitespace = true;
+                    }
+                    let repeats = it.take_while(|c1| *c1 == c).count();
+                    // FIXME: the lexer could be used to turn the ASCII version of unicode
+                    // homoglyphs, instead of keeping a table in `check_for_substitution`into the
+                    // token. Ideally, this should be inside `rustc_lexer`. However, we should
+                    // first remove compound tokens like `<<` from `rustc_lexer`, and then add
+                    // fancier error recovery to it, as there will be less overall work to do this
+                    // way.
+                    let (token, sugg) = unicode_chars::check_for_substitution(self, start, c, repeats+1);
+                    self.dcx().emit_err(errors::UnknownTokenStart {
+                        span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
+                        escaped: escaped_char(c),
+                        sugg,
+                        null: if c == '\x00' {Some(errors::UnknownTokenNull)} else {None},
+                        repeat: if repeats > 0 {
+                            swallow_next_invalid = repeats;
+                            Some(errors::UnknownTokenRepeat { repeats })
+                        } else {None}
+                    });
+
+                    if let Some(token) = token {
+                        token
+                    } else {
+                        preceded_by_whitespace = true;
+                        continue;
+                    }
+                }
+                rustc_lexer::TokenKind::Eof => token::Eof,
+            };
+            let span = self.mk_sp(start, self.pos);
+            return (Token::new(kind, span), preceded_by_whitespace);
+        }
     }
 
-    fn struct_err_span_char(
-        &self,
-        from_pos: BytePos,
-        to_pos: BytePos,
-        m: &str,
-        c: char,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        self.sess
-            .span_diagnostic
-            .struct_span_err(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
+    fn ident(&self, start: BytePos) -> TokenKind {
+        let sym = nfc_normalize(self.str_from(start));
+        let span = self.mk_sp(start, self.pos);
+        self.sess.symbol_gallery.insert(sym, span);
+        token::Ident(sym, false)
     }
 
     /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
@@ -154,7 +351,7 @@ impl<'a> StringReader<'a> {
         if contains_text_flow_control_chars(content) {
             let span = self.mk_sp(start, self.pos);
             self.sess.buffer_lint_with_diagnostic(
-                &TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
+                TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
                 span,
                 ast::CRATE_NODE_ID,
                 "unicode codepoint changing visible direction of text present in comment",
@@ -163,171 +360,6 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Turns simple `rustc_lexer::TokenKind` enum into a rich
-    /// `rustc_ast::TokenKind`. This turns strings into interned
-    /// symbols and runs additional validation.
-    fn cook_lexer_token(&self, token: rustc_lexer::TokenKind, start: BytePos) -> Option<TokenKind> {
-        Some(match token {
-            rustc_lexer::TokenKind::LineComment { doc_style } => {
-                // Skip non-doc comments
-                let Some(doc_style) = doc_style else {
-                    self.lint_unicode_text_flow(start);
-                    return None;
-                };
-
-                // Opening delimiter of the length 3 is not included into the symbol.
-                let content_start = start + BytePos(3);
-                let content = self.str_from(content_start);
-                self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
-            }
-            rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
-                if !terminated {
-                    self.report_unterminated_block_comment(start, doc_style);
-                }
-
-                // Skip non-doc comments
-                let Some(doc_style) = doc_style else {
-                    self.lint_unicode_text_flow(start);
-                    return None;
-                };
-
-                // Opening delimiter of the length 3 and closing delimiter of the length 2
-                // are not included into the symbol.
-                let content_start = start + BytePos(3);
-                let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
-                let content = self.str_from_to(content_start, content_end);
-                self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
-            }
-            rustc_lexer::TokenKind::Whitespace => return None,
-            rustc_lexer::TokenKind::Ident
-            | rustc_lexer::TokenKind::RawIdent
-            | rustc_lexer::TokenKind::UnknownPrefix => {
-                let is_raw_ident = token == rustc_lexer::TokenKind::RawIdent;
-                let is_unknown_prefix = token == rustc_lexer::TokenKind::UnknownPrefix;
-                let mut ident_start = start;
-                if is_raw_ident {
-                    ident_start = ident_start + BytePos(2);
-                }
-                if is_unknown_prefix {
-                    self.report_unknown_prefix(start);
-                }
-                let sym = nfc_normalize(self.str_from(ident_start));
-                let span = self.mk_sp(start, self.pos);
-                self.sess.symbol_gallery.insert(sym, span);
-                if is_raw_ident {
-                    if !sym.can_be_raw() {
-                        self.err_span(span, &format!("`{}` cannot be a raw identifier", sym));
-                    }
-                    self.sess.raw_identifier_spans.borrow_mut().push(span);
-                }
-                token::Ident(sym, is_raw_ident)
-            }
-            rustc_lexer::TokenKind::InvalidIdent
-                // Do not recover an identifier with emoji if the codepoint is a confusable
-                // with a recoverable substitution token, like `➖`.
-                if !UNICODE_ARRAY
-                    .iter()
-                    .any(|&(c, _, _)| {
-                        let sym = self.str_from(start);
-                        sym.chars().count() == 1 && c == sym.chars().next().unwrap()
-                    })
-                     =>
-            {
-                let sym = nfc_normalize(self.str_from(start));
-                let span = self.mk_sp(start, self.pos);
-                self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default().push(span);
-                token::Ident(sym, false)
-            }
-            rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
-                let suffix_start = start + BytePos(suffix_start as u32);
-                let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
-                let suffix = if suffix_start < self.pos {
-                    let string = self.str_from(suffix_start);
-                    if string == "_" {
-                        self.sess
-                            .span_diagnostic
-                            .struct_span_warn(
-                                self.mk_sp(suffix_start, self.pos),
-                                "underscore literal suffix is not allowed",
-                            )
-                            .warn(
-                                "this was previously accepted by the compiler but is \
-                                   being phased out; it will become a hard error in \
-                                   a future release!",
-                            )
-                            .note(
-                                "see issue #42326 \
-                                 <https://github.com/rust-lang/rust/issues/42326> \
-                                 for more information",
-                            )
-                            .emit();
-                        None
-                    } else {
-                        Some(Symbol::intern(string))
-                    }
-                } else {
-                    None
-                };
-                token::Literal(token::Lit { kind, symbol, suffix })
-            }
-            rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
-                // Include the leading `'` in the real identifier, for macro
-                // expansion purposes. See #12512 for the gory details of why
-                // this is necessary.
-                let lifetime_name = self.str_from(start);
-                if starts_with_number {
-                    self.err_span_(start, self.pos, "lifetimes cannot start with a number");
-                }
-                let ident = Symbol::intern(lifetime_name);
-                token::Lifetime(ident)
-            }
-            rustc_lexer::TokenKind::Semi => token::Semi,
-            rustc_lexer::TokenKind::Comma => token::Comma,
-            rustc_lexer::TokenKind::Dot => token::Dot,
-            rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
-            rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
-            rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
-            rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
-            rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
-            rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
-            rustc_lexer::TokenKind::At => token::At,
-            rustc_lexer::TokenKind::Pound => token::Pound,
-            rustc_lexer::TokenKind::Tilde => token::Tilde,
-            rustc_lexer::TokenKind::Question => token::Question,
-            rustc_lexer::TokenKind::Colon => token::Colon,
-            rustc_lexer::TokenKind::Dollar => token::Dollar,
-            rustc_lexer::TokenKind::Eq => token::Eq,
-            rustc_lexer::TokenKind::Bang => token::Not,
-            rustc_lexer::TokenKind::Lt => token::Lt,
-            rustc_lexer::TokenKind::Gt => token::Gt,
-            rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
-            rustc_lexer::TokenKind::And => token::BinOp(token::And),
-            rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
-            rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
-            rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
-            rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
-            rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
-            rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
-
-            rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
-                let c = self.str_from(start).chars().next().unwrap();
-                let mut err =
-                    self.struct_err_span_char(start, self.pos, "unknown start of token", c);
-                // FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs,
-                // instead of keeping a table in `check_for_substitution`into the token. Ideally,
-                // this should be inside `rustc_lexer`. However, we should first remove compound
-                // tokens like `<<` from `rustc_lexer`, and then add fancier error recovery to it,
-                // as there will be less overall work to do this way.
-                let token = unicode_chars::check_for_substitution(self, start, c, &mut err);
-                if c == '\x00' {
-                    err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used");
-                }
-                err.emit();
-                token?
-            }
-        })
-    }
-
     fn cook_doc_comment(
         &self,
         content_start: BytePos,
@@ -337,14 +369,12 @@ impl<'a> StringReader<'a> {
     ) -> TokenKind {
         if content.contains('\r') {
             for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
-                self.err_span_(
+                let span = self.mk_sp(
                     content_start + BytePos(idx as u32),
                     content_start + BytePos(idx as u32 + 1),
-                    match comment_kind {
-                        CommentKind::Line => "bare CR not allowed in doc-comment",
-                        CommentKind::Block => "bare CR not allowed in block doc-comment",
-                    },
                 );
+                let block = matches!(comment_kind, CommentKind::Block);
+                self.dcx().emit_err(errors::CrDocComment { span, block });
             }
         }
 
@@ -359,106 +389,134 @@ impl<'a> StringReader<'a> {
     fn cook_lexer_literal(
         &self,
         start: BytePos,
-        suffix_start: BytePos,
+        end: BytePos,
         kind: rustc_lexer::LiteralKind,
     ) -> (token::LitKind, Symbol) {
-        // prefix means `"` or `br"` or `r###"`, ...
-        let (lit_kind, mode, prefix_len, postfix_len) = match kind {
+        match kind {
             rustc_lexer::LiteralKind::Char { terminated } => {
                 if !terminated {
-                    self.sess.span_diagnostic.span_fatal_with_code(
-                        self.mk_sp(start, suffix_start),
-                        "unterminated character literal",
-                        error_code!(E0762),
-                    )
+                    self.dcx()
+                        .struct_span_fatal(self.mk_sp(start, end), "unterminated character literal")
+                        .with_code(E0762)
+                        .emit()
                 }
-                (token::Char, Mode::Char, 1, 1) // ' '
+                self.cook_unicode(token::Char, Mode::Char, start, end, 1, 1) // ' '
             }
             rustc_lexer::LiteralKind::Byte { terminated } => {
                 if !terminated {
-                    self.sess.span_diagnostic.span_fatal_with_code(
-                        self.mk_sp(start + BytePos(1), suffix_start),
-                        "unterminated byte constant",
-                        error_code!(E0763),
-                    )
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated byte constant",
+                        )
+                        .with_code(E0763)
+                        .emit()
                 }
-                (token::Byte, Mode::Byte, 2, 1) // b' '
+                self.cook_unicode(token::Byte, Mode::Byte, start, end, 2, 1) // b' '
             }
             rustc_lexer::LiteralKind::Str { terminated } => {
                 if !terminated {
-                    self.sess.span_diagnostic.span_fatal_with_code(
-                        self.mk_sp(start, suffix_start),
-                        "unterminated double quote string",
-                        error_code!(E0765),
-                    )
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start, end),
+                            "unterminated double quote string",
+                        )
+                        .with_code(E0765)
+                        .emit()
                 }
-                (token::Str, Mode::Str, 1, 1) // " "
+                self.cook_unicode(token::Str, Mode::Str, start, end, 1, 1) // " "
             }
             rustc_lexer::LiteralKind::ByteStr { terminated } => {
                 if !terminated {
-                    self.sess.span_diagnostic.span_fatal_with_code(
-                        self.mk_sp(start + BytePos(1), suffix_start),
-                        "unterminated double quote byte string",
-                        error_code!(E0766),
-                    )
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated double quote byte string",
+                        )
+                        .with_code(E0766)
+                        .emit()
                 }
-                (token::ByteStr, Mode::ByteStr, 2, 1) // b" "
+                self.cook_unicode(token::ByteStr, Mode::ByteStr, start, end, 2, 1) // b" "
             }
-            rustc_lexer::LiteralKind::RawStr { n_hashes, err } => {
-                self.report_raw_str_error(start, err);
-                let n = u32::from(n_hashes);
-                (token::StrRaw(n_hashes), Mode::RawStr, 2 + n, 1 + n) // r##" "##
+            rustc_lexer::LiteralKind::CStr { terminated } => {
+                if !terminated {
+                    self.dcx()
+                        .struct_span_fatal(
+                            self.mk_sp(start + BytePos(1), end),
+                            "unterminated C string",
+                        )
+                        .with_code(E0767)
+                        .emit()
+                }
+                self.cook_mixed(token::CStr, Mode::CStr, start, end, 2, 1) // c" "
+            }
+            rustc_lexer::LiteralKind::RawStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::StrRaw(n_hashes);
+                    self.cook_unicode(kind, Mode::RawStr, start, end, 2 + n, 1 + n) // r##" "##
+                } else {
+                    self.report_raw_str_error(start, 1);
+                }
+            }
+            rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::ByteStrRaw(n_hashes);
+                    self.cook_unicode(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n) // br##" "##
+                } else {
+                    self.report_raw_str_error(start, 2);
+                }
             }
-            rustc_lexer::LiteralKind::RawByteStr { n_hashes, err } => {
-                self.report_raw_str_error(start, err);
-                let n = u32::from(n_hashes);
-                (token::ByteStrRaw(n_hashes), Mode::RawByteStr, 3 + n, 1 + n) // br##" "##
+            rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
+                if let Some(n_hashes) = n_hashes {
+                    let n = u32::from(n_hashes);
+                    let kind = token::CStrRaw(n_hashes);
+                    self.cook_unicode(kind, Mode::RawCStr, start, end, 3 + n, 1 + n) // cr##" "##
+                } else {
+                    self.report_raw_str_error(start, 2);
+                }
             }
             rustc_lexer::LiteralKind::Int { base, empty_int } => {
-                return if empty_int {
-                    self.sess
-                        .span_diagnostic
-                        .struct_span_err_with_code(
-                            self.mk_sp(start, suffix_start),
-                            "no valid digits found for number",
-                            error_code!(E0768),
-                        )
-                        .emit();
+                if empty_int {
+                    let span = self.mk_sp(start, end);
+                    self.dcx().emit_err(errors::NoDigitsLiteral { span });
                     (token::Integer, sym::integer(0))
                 } else {
-                    self.validate_int_literal(base, start, suffix_start);
-                    (token::Integer, self.symbol_from_to(start, suffix_start))
-                };
+                    if matches!(base, Base::Binary | Base::Octal) {
+                        let base = base as u32;
+                        let s = self.str_from_to(start + BytePos(2), end);
+                        for (idx, c) in s.char_indices() {
+                            let span = self.mk_sp(
+                                start + BytePos::from_usize(2 + idx),
+                                start + BytePos::from_usize(2 + idx + c.len_utf8()),
+                            );
+                            if c != '_' && c.to_digit(base).is_none() {
+                                self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
+                            }
+                        }
+                    }
+                    (token::Integer, self.symbol_from_to(start, end))
+                }
             }
             rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
                 if empty_exponent {
-                    self.err_span_(start, self.pos, "expected at least one digit in exponent");
+                    let span = self.mk_sp(start, self.pos);
+                    self.dcx().emit_err(errors::EmptyExponentFloat { span });
                 }
-
-                match base {
-                    Base::Hexadecimal => self.err_span_(
-                        start,
-                        suffix_start,
-                        "hexadecimal float literal is not supported",
-                    ),
-                    Base::Octal => {
-                        self.err_span_(start, suffix_start, "octal float literal is not supported")
-                    }
-                    Base::Binary => {
-                        self.err_span_(start, suffix_start, "binary float literal is not supported")
-                    }
-                    _ => (),
+                let base = match base {
+                    Base::Hexadecimal => Some("hexadecimal"),
+                    Base::Octal => Some("octal"),
+                    Base::Binary => Some("binary"),
+                    _ => None,
+                };
+                if let Some(base) = base {
+                    let span = self.mk_sp(start, end);
+                    self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
                 }
-
-                let id = self.symbol_from_to(start, suffix_start);
-                return (token::Float, id);
+                (token::Float, self.symbol_from_to(start, end))
             }
-        };
-        let content_start = start + BytePos(prefix_len);
-        let content_end = suffix_start - BytePos(postfix_len);
-        let id = self.symbol_from_to(content_start, content_end);
-        self.validate_literal_escape(mode, content_start, content_end, prefix_len, postfix_len);
-        (lit_kind, id)
+        }
     }
 
     #[inline]
@@ -468,7 +526,7 @@ impl<'a> StringReader<'a> {
 
     /// Slice of the source text from `start` up to but excluding `self.pos`,
     /// meaning the slice does not include the character `self.ch`.
-    fn str_from(&self, start: BytePos) -> &str {
+    fn str_from(&self, start: BytePos) -> &'src str {
         self.str_from_to(start, self.pos)
     }
 
@@ -479,64 +537,69 @@ impl<'a> StringReader<'a> {
     }
 
     /// Slice of the source text spanning from `start` up to but excluding `end`.
-    fn str_from_to(&self, start: BytePos, end: BytePos) -> &str {
+    fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
         &self.src[self.src_index(start)..self.src_index(end)]
     }
 
-    fn report_raw_str_error(&self, start: BytePos, opt_err: Option<RawStrError>) {
-        match opt_err {
-            Some(RawStrError::InvalidStarter { bad_char }) => {
+    /// Slice of the source text spanning from `start` until the end
+    fn str_from_to_end(&self, start: BytePos) -> &'src str {
+        &self.src[self.src_index(start)..]
+    }
+
+    fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
+        match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
+            Err(RawStrError::InvalidStarter { bad_char }) => {
                 self.report_non_started_raw_string(start, bad_char)
             }
-            Some(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
+            Err(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
                 .report_unterminated_raw_string(start, expected, possible_terminator_offset, found),
-            Some(RawStrError::TooManyDelimiters { found }) => {
+            Err(RawStrError::TooManyDelimiters { found }) => {
                 self.report_too_many_hashes(start, found)
             }
-            None => (),
+            Ok(()) => panic!("no error found for supposedly invalid raw string literal"),
         }
     }
 
     fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
-        self.struct_fatal_span_char(
-            start,
-            self.pos,
-            "found invalid character; only `#` is allowed in raw string delimitation",
-            bad_char,
-        )
-        .emit()
+        self.sess
+            .dcx
+            .struct_span_fatal(
+                self.mk_sp(start, self.pos),
+                format!(
+                    "found invalid character; only `#` is allowed in raw string delimitation: {}",
+                    escaped_char(bad_char)
+                ),
+            )
+            .emit()
     }
 
     fn report_unterminated_raw_string(
         &self,
         start: BytePos,
-        n_hashes: usize,
-        possible_offset: Option<usize>,
-        found_terminators: usize,
+        n_hashes: u32,
+        possible_offset: Option<u32>,
+        found_terminators: u32,
     ) -> ! {
-        let mut err = self.sess.span_diagnostic.struct_span_fatal_with_code(
-            self.mk_sp(start, start),
-            "unterminated raw string",
-            error_code!(E0748),
-        );
-
+        let mut err =
+            self.dcx().struct_span_fatal(self.mk_sp(start, start), "unterminated raw string");
+        err.code(E0748);
         err.span_label(self.mk_sp(start, start), "unterminated raw string");
 
         if n_hashes > 0 {
-            err.note(&format!(
+            err.note(format!(
                 "this raw string should be terminated with `\"{}`",
-                "#".repeat(n_hashes)
+                "#".repeat(n_hashes as usize)
             ));
         }
 
         if let Some(possible_offset) = possible_offset {
-            let lo = start + BytePos(possible_offset as u32);
-            let hi = lo + BytePos(found_terminators as u32);
+            let lo = start + BytePos(possible_offset);
+            let hi = lo + BytePos(found_terminators);
             let span = self.mk_sp(lo, hi);
             err.span_suggestion(
                 span,
                 "consider terminating the string here",
-                "#".repeat(n_hashes),
+                "#".repeat(n_hashes as usize),
                 Applicability::MaybeIncorrect,
             );
         }
@@ -550,11 +613,8 @@ impl<'a> StringReader<'a> {
             None => "unterminated block comment",
         };
         let last_bpos = self.pos;
-        let mut err = self.sess.span_diagnostic.struct_span_fatal_with_code(
-            self.mk_sp(start, last_bpos),
-            msg,
-            error_code!(E0758),
-        );
+        let mut err = self.dcx().struct_span_fatal(self.mk_sp(start, last_bpos), msg);
+        err.code(E0758);
         let mut nested_block_comment_open_idxs = vec![];
         let mut last_nested_block_comment_idxs = None;
         let mut content_chars = self.str_from(start).char_indices().peekable();
@@ -599,76 +659,63 @@ impl<'a> StringReader<'a> {
     // identifier tokens.
     fn report_unknown_prefix(&self, start: BytePos) {
         let prefix_span = self.mk_sp(start, self.pos);
-        let prefix_str = self.str_from_to(start, self.pos);
-        let msg = format!("prefix `{}` is unknown", prefix_str);
+        let prefix = self.str_from_to(start, self.pos);
 
         let expn_data = prefix_span.ctxt().outer_expn_data();
 
         if expn_data.edition >= Edition::Edition2021 {
             // In Rust 2021, this is a hard error.
-            let mut err = self.sess.span_diagnostic.struct_span_err(prefix_span, &msg);
-            err.span_label(prefix_span, "unknown prefix");
-            if prefix_str == "rb" {
-                err.span_suggestion_verbose(
-                    prefix_span,
-                    "use `br` for a raw byte string",
-                    "br",
-                    Applicability::MaybeIncorrect,
-                );
+            let sugg = if prefix == "rb" {
+                Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
             } else if expn_data.is_root() {
-                err.span_suggestion_verbose(
-                    prefix_span.shrink_to_hi(),
-                    "consider inserting whitespace here",
-                    " ",
-                    Applicability::MaybeIncorrect,
-                );
-            }
-            err.note("prefixed identifiers and literals are reserved since Rust 2021");
-            err.emit();
+                Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
+            } else {
+                None
+            };
+            self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
         } else {
             // Before Rust 2021, only emit a lint for migration.
             self.sess.buffer_lint_with_diagnostic(
-                &RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
+                RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
                 prefix_span,
                 ast::CRATE_NODE_ID,
-                &msg,
+                format!("prefix `{prefix}` is unknown"),
                 BuiltinLintDiagnostics::ReservedPrefix(prefix_span),
             );
         }
     }
 
-    fn report_too_many_hashes(&self, start: BytePos, found: usize) -> ! {
-        self.fatal_span_(
-            start,
-            self.pos,
-            &format!(
-                "too many `#` symbols: raw strings may be delimited \
-                by up to 255 `#` symbols, but found {}",
-                found
-            ),
-        )
+    fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
+        self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
     }
 
-    fn validate_literal_escape(
+    fn cook_common(
         &self,
+        kind: token::LitKind,
         mode: Mode,
-        content_start: BytePos,
-        content_end: BytePos,
+        start: BytePos,
+        end: BytePos,
         prefix_len: u32,
         postfix_len: u32,
-    ) {
+        unescape: fn(&str, Mode, &mut dyn FnMut(Range<usize>, Result<(), EscapeError>)),
+    ) -> (token::LitKind, Symbol) {
+        let mut has_fatal_err = false;
+        let content_start = start + BytePos(prefix_len);
+        let content_end = end - BytePos(postfix_len);
         let lit_content = self.str_from_to(content_start, content_end);
-        unescape::unescape_literal(lit_content, mode, &mut |range, result| {
+        unescape(lit_content, mode, &mut |range, result| {
             // Here we only check for errors. The actual unescaping is done later.
             if let Err(err) = result {
-                let span_with_quotes = self
-                    .mk_sp(content_start - BytePos(prefix_len), content_end + BytePos(postfix_len));
+                let span_with_quotes = self.mk_sp(start, end);
                 let (start, end) = (range.start as u32, range.end as u32);
                 let lo = content_start + BytePos(start);
                 let hi = lo + BytePos(end - start);
                 let span = self.mk_sp(lo, hi);
+                if err.is_fatal() {
+                    has_fatal_err = true;
+                }
                 emit_unescape_error(
-                    &self.sess.span_diagnostic,
+                    self.dcx(),
                     lit_content,
                     span_with_quotes,
                     span,
@@ -678,24 +725,47 @@ impl<'a> StringReader<'a> {
                 );
             }
         });
-    }
 
-    fn validate_int_literal(&self, base: Base, content_start: BytePos, content_end: BytePos) {
-        let base = match base {
-            Base::Binary => 2,
-            Base::Octal => 8,
-            _ => return,
-        };
-        let s = self.str_from_to(content_start + BytePos(2), content_end);
-        for (idx, c) in s.char_indices() {
-            let idx = idx as u32;
-            if c != '_' && c.to_digit(base).is_none() {
-                let lo = content_start + BytePos(2 + idx);
-                let hi = content_start + BytePos(2 + idx + c.len_utf8() as u32);
-                self.err_span_(lo, hi, &format!("invalid digit for a base {} literal", base));
-            }
+        // We normally exclude the quotes for the symbol, but for errors we
+        // include it because it results in clearer error messages.
+        if !has_fatal_err {
+            (kind, Symbol::intern(lit_content))
+        } else {
+            (token::Err, self.symbol_from_to(start, end))
         }
     }
+
+    fn cook_unicode(
+        &self,
+        kind: token::LitKind,
+        mode: Mode,
+        start: BytePos,
+        end: BytePos,
+        prefix_len: u32,
+        postfix_len: u32,
+    ) -> (token::LitKind, Symbol) {
+        self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
+            unescape::unescape_unicode(src, mode, &mut |span, result| {
+                callback(span, result.map(drop))
+            })
+        })
+    }
+
+    fn cook_mixed(
+        &self,
+        kind: token::LitKind,
+        mode: Mode,
+        start: BytePos,
+        end: BytePos,
+        prefix_len: u32,
+        postfix_len: u32,
+    ) -> (token::LitKind, Symbol) {
+        self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
+            unescape::unescape_mixed(src, mode, &mut |span, result| {
+                callback(span, result.map(drop))
+            })
+        })
+    }
 }
 
 pub fn nfc_normalize(string: &str) -> Symbol {
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index ef84f95ec83..c9ff2d58e2c 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,299 +1,308 @@
-use super::{StringReader, UnmatchedBrace};
-
+use super::diagnostics::report_suspicious_mismatch_block;
+use super::diagnostics::same_indentation_level;
+use super::diagnostics::TokenTreeDiagInfo;
+use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
-use rustc_ast::tokenstream::{
-    DelimSpan,
-    Spacing::{self, *},
-    TokenStream, TokenTree, TreeAndSpacing,
-};
+use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
-use rustc_data_structures::fx::FxHashMap;
-use rustc_errors::PResult;
-use rustc_span::Span;
+use rustc_errors::{Applicability, PErr};
+use rustc_span::symbol::kw;
+
+pub(super) struct TokenTreesReader<'sess, 'src> {
+    string_reader: StringReader<'sess, 'src>,
+    /// The "next" token, which has been obtained from the `StringReader` but
+    /// not yet handled by the `TokenTreesReader`.
+    token: Token,
+    diag_info: TokenTreeDiagInfo,
+}
 
-impl<'a> StringReader<'a> {
-    pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+impl<'sess, 'src> TokenTreesReader<'sess, 'src> {
+    pub(super) fn parse_all_token_trees(
+        string_reader: StringReader<'sess, 'src>,
+    ) -> (TokenStream, Result<(), Vec<PErr<'sess>>>, Vec<UnmatchedDelim>) {
         let mut tt_reader = TokenTreesReader {
-            string_reader: self,
+            string_reader,
             token: Token::dummy(),
-            open_braces: Vec::new(),
-            unmatched_braces: Vec::new(),
-            matching_delim_spans: Vec::new(),
-            last_unclosed_found_span: None,
-            last_delim_empty_block_spans: FxHashMap::default(),
-            matching_block_spans: Vec::new(),
+            diag_info: TokenTreeDiagInfo::default(),
         };
-        let res = tt_reader.parse_all_token_trees();
-        (res, tt_reader.unmatched_braces)
+        let (_open_spacing, stream, res) =
+            tt_reader.parse_token_trees(/* is_delimited */ false);
+        (stream, res, tt_reader.diag_info.unmatched_delims)
     }
-}
-
-struct TokenTreesReader<'a> {
-    string_reader: StringReader<'a>,
-    token: Token,
-    /// Stack of open delimiters and their spans. Used for error message.
-    open_braces: Vec<(Delimiter, Span)>,
-    unmatched_braces: Vec<UnmatchedBrace>,
-    /// The type and spans for all braces
-    ///
-    /// Used only for error recovery when arriving to EOF with mismatched braces.
-    matching_delim_spans: Vec<(Delimiter, Span, Span)>,
-    last_unclosed_found_span: Option<Span>,
-    /// Collect empty block spans that might have been auto-inserted by editors.
-    last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
-    /// Collect the spans of braces (Open, Close). Used only
-    /// for detecting if blocks are empty and only braces.
-    matching_block_spans: Vec<(Span, Span)>,
-}
 
-impl<'a> TokenTreesReader<'a> {
-    // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
-    fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
-        let mut buf = TokenStreamBuilder::default();
+    // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
+    // the result is that of the opening delimiter.
+    fn parse_token_trees(
+        &mut self,
+        is_delimited: bool,
+    ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'sess>>>) {
+        // Move past the opening delimiter.
+        let (_, open_spacing) = self.bump(false);
 
-        self.bump();
-        while self.token != token::Eof {
-            buf.push(self.parse_token_tree()?);
-        }
-
-        Ok(buf.into_token_stream())
-    }
-
-    // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
-    fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
-        let mut buf = TokenStreamBuilder::default();
+        let mut buf = Vec::new();
         loop {
-            if let token::CloseDelim(..) = self.token.kind {
-                return buf.into_token_stream();
-            }
-
-            match self.parse_token_tree() {
-                Ok(tree) => buf.push(tree),
-                Err(mut e) => {
-                    e.emit();
-                    return buf.into_token_stream();
+            match self.token.kind {
+                token::OpenDelim(delim) => {
+                    buf.push(match self.parse_token_tree_open_delim(delim) {
+                        Ok(val) => val,
+                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
+                    })
+                }
+                token::CloseDelim(delim) => {
+                    return (
+                        open_spacing,
+                        TokenStream::new(buf),
+                        if is_delimited { Ok(()) } else { Err(vec![self.close_delim_err(delim)]) },
+                    );
+                }
+                token::Eof => {
+                    return (
+                        open_spacing,
+                        TokenStream::new(buf),
+                        if is_delimited { Err(vec![self.eof_err()]) } else { Ok(()) },
+                    );
+                }
+                _ => {
+                    // Get the next normal token.
+                    let (this_tok, this_spacing) = self.bump(true);
+                    buf.push(TokenTree::Token(this_tok, this_spacing));
                 }
             }
         }
     }
 
-    fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> {
-        let sm = self.string_reader.sess.source_map();
-
-        match self.token.kind {
-            token::Eof => {
-                let msg = "this file contains an unclosed delimiter";
-                let mut err =
-                    self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
-                for &(_, sp) in &self.open_braces {
-                    err.span_label(sp, "unclosed delimiter");
-                    self.unmatched_braces.push(UnmatchedBrace {
-                        expected_delim: Delimiter::Brace,
-                        found_delim: None,
-                        found_span: self.token.span,
-                        unclosed_span: Some(sp),
-                        candidate_span: None,
-                    });
-                }
-
-                if let Some((delim, _)) = self.open_braces.last() {
-                    if let Some((_, open_sp, close_sp)) =
-                        self.matching_delim_spans.iter().find(|(d, open_sp, close_sp)| {
-                            if let Some(close_padding) = sm.span_to_margin(*close_sp) {
-                                if let Some(open_padding) = sm.span_to_margin(*open_sp) {
-                                    return delim == d && close_padding != open_padding;
-                                }
-                            }
-                            false
-                        })
-                    // these are in reverse order as they get inserted on close, but
-                    {
-                        // we want the last open/first close
-                        err.span_label(*open_sp, "this delimiter might not be properly closed...");
-                        err.span_label(
-                            *close_sp,
-                            "...as it matches this but it has different indentation",
-                        );
-                    }
-                }
-                Err(err)
-            }
-            token::OpenDelim(delim) => {
-                // The span for beginning of the delimited section
-                let pre_span = self.token.span;
+    fn eof_err(&mut self) -> PErr<'sess> {
+        let msg = "this file contains an unclosed delimiter";
+        let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
+        for &(_, sp) in &self.diag_info.open_braces {
+            err.span_label(sp, "unclosed delimiter");
+            self.diag_info.unmatched_delims.push(UnmatchedDelim {
+                expected_delim: Delimiter::Brace,
+                found_delim: None,
+                found_span: self.token.span,
+                unclosed_span: Some(sp),
+                candidate_span: None,
+            });
+        }
 
-                // Parse the open delimiter.
-                self.open_braces.push((delim, self.token.span));
-                self.bump();
+        if let Some((delim, _)) = self.diag_info.open_braces.last() {
+            report_suspicious_mismatch_block(
+                &mut err,
+                &self.diag_info,
+                self.string_reader.sess.source_map(),
+                *delim,
+            )
+        }
+        err
+    }
 
-                // Parse the token trees within the delimiters.
-                // We stop at any delimiter so we can try to recover if the user
-                // uses an incorrect delimiter.
-                let tts = self.parse_token_trees_until_close_delim();
+    fn parse_token_tree_open_delim(
+        &mut self,
+        open_delim: Delimiter,
+    ) -> Result<TokenTree, Vec<PErr<'sess>>> {
+        // The span for beginning of the delimited section
+        let pre_span = self.token.span;
 
-                // Expand to cover the entire delimited token tree
-                let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+        self.diag_info.open_braces.push((open_delim, self.token.span));
 
-                match self.token.kind {
-                    // Correct delimiter.
-                    token::CloseDelim(d) if d == delim => {
-                        let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
-                        let close_brace_span = self.token.span;
+        // Parse the token trees within the delimiters.
+        // We stop at any delimiter so we can try to recover if the user
+        // uses an incorrect delimiter.
+        let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        if let Err(errs) = res {
+            return Err(self.unclosed_delim_err(tts, errs));
+        }
 
-                        if tts.is_empty() {
-                            let empty_block_span = open_brace_span.to(close_brace_span);
-                            if !sm.is_multiline(empty_block_span) {
-                                // Only track if the block is in the form of `{}`, otherwise it is
-                                // likely that it was written on purpose.
-                                self.last_delim_empty_block_spans.insert(delim, empty_block_span);
-                            }
-                        }
+        // Expand to cover the entire delimited token tree
+        let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+        let sm = self.string_reader.sess.source_map();
 
-                        //only add braces
-                        if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, delim) {
-                            self.matching_block_spans.push((open_brace_span, close_brace_span));
-                        }
+        let close_spacing = match self.token.kind {
+            // Correct delimiter.
+            token::CloseDelim(close_delim) if close_delim == open_delim => {
+                let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
+                let close_brace_span = self.token.span;
 
-                        if self.open_braces.is_empty() {
-                            // Clear up these spans to avoid suggesting them as we've found
-                            // properly matched delimiters so far for an entire block.
-                            self.matching_delim_spans.clear();
-                        } else {
-                            self.matching_delim_spans.push((
-                                open_brace,
-                                open_brace_span,
-                                close_brace_span,
-                            ));
-                        }
-                        // Parse the closing delimiter.
-                        self.bump();
+                if tts.is_empty() && close_delim == Delimiter::Brace {
+                    let empty_block_span = open_brace_span.to(close_brace_span);
+                    if !sm.is_multiline(empty_block_span) {
+                        // Only track if the block is in the form of `{}`, otherwise it is
+                        // likely that it was written on purpose.
+                        self.diag_info.empty_block_spans.push(empty_block_span);
                     }
-                    // Incorrect delimiter.
-                    token::CloseDelim(other) => {
-                        let mut unclosed_delimiter = None;
-                        let mut candidate = None;
-
-                        if self.last_unclosed_found_span != Some(self.token.span) {
-                            // do not complain about the same unclosed delimiter multiple times
-                            self.last_unclosed_found_span = Some(self.token.span);
-                            // This is a conservative error: only report the last unclosed
-                            // delimiter. The previous unclosed delimiters could actually be
-                            // closed! The parser just hasn't gotten to them yet.
-                            if let Some(&(_, sp)) = self.open_braces.last() {
-                                unclosed_delimiter = Some(sp);
-                            };
-                            if let Some(current_padding) = sm.span_to_margin(self.token.span) {
-                                for (brace, brace_span) in &self.open_braces {
-                                    if let Some(padding) = sm.span_to_margin(*brace_span) {
-                                        // high likelihood of these two corresponding
-                                        if current_padding == padding && brace == &other {
-                                            candidate = Some(*brace_span);
-                                        }
-                                    }
-                                }
-                            }
-                            let (tok, _) = self.open_braces.pop().unwrap();
-                            self.unmatched_braces.push(UnmatchedBrace {
-                                expected_delim: tok,
-                                found_delim: Some(other),
-                                found_span: self.token.span,
-                                unclosed_span: unclosed_delimiter,
-                                candidate_span: candidate,
-                            });
-                        } else {
-                            self.open_braces.pop();
-                        }
+                }
 
-                        // If the incorrect delimiter matches an earlier opening
-                        // delimiter, then don't consume it (it can be used to
-                        // close the earlier one). Otherwise, consume it.
-                        // E.g., we try to recover from:
-                        // fn foo() {
-                        //     bar(baz(
-                        // }  // Incorrect delimiter but matches the earlier `{`
-                        if !self.open_braces.iter().any(|&(b, _)| b == other) {
-                            self.bump();
-                        }
-                    }
-                    token::Eof => {
-                        // Silently recover, the EOF token will be seen again
-                        // and an error emitted then. Thus we don't pop from
-                        // self.open_braces here.
-                    }
-                    _ => {}
+                // only add braces
+                if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
+                    // Add all the matching spans, we will sort by span later
+                    self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span));
                 }
 
-                Ok(TokenTree::Delimited(delim_span, delim, tts).into())
+                // Move past the closing delimiter.
+                self.bump(false).1
             }
-            token::CloseDelim(delim) => {
-                // An unexpected closing delimiter (i.e., there is no
-                // matching opening delimiter).
-                let token_str = token_to_string(&self.token);
-                let msg = format!("unexpected closing delimiter: `{}`", token_str);
-                let mut err =
-                    self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
-
-                // Braces are added at the end, so the last element is the biggest block
-                if let Some(parent) = self.matching_block_spans.last() {
-                    if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
-                        // Check if the (empty block) is in the last properly closed block
-                        if (parent.0.to(parent.1)).contains(span) {
-                            err.span_label(
-                                span,
-                                "block is empty, you might have not meant to close it",
-                            );
-                        } else {
-                            err.span_label(parent.0, "this opening brace...");
+            // Incorrect delimiter.
+            token::CloseDelim(close_delim) => {
+                let mut unclosed_delimiter = None;
+                let mut candidate = None;
 
-                            err.span_label(parent.1, "...matches this closing brace");
+                if self.diag_info.last_unclosed_found_span != Some(self.token.span) {
+                    // do not complain about the same unclosed delimiter multiple times
+                    self.diag_info.last_unclosed_found_span = Some(self.token.span);
+                    // This is a conservative error: only report the last unclosed
+                    // delimiter. The previous unclosed delimiters could actually be
+                    // closed! The parser just hasn't gotten to them yet.
+                    if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
+                        unclosed_delimiter = Some(sp);
+                    };
+                    for (brace, brace_span) in &self.diag_info.open_braces {
+                        if same_indentation_level(sm, self.token.span, *brace_span)
+                            && brace == &close_delim
+                        {
+                            // high likelihood of these two corresponding
+                            candidate = Some(*brace_span);
                         }
-                    } else {
-                        err.span_label(parent.0, "this opening brace...");
-
-                        err.span_label(parent.1, "...matches this closing brace");
                     }
+                    let (tok, _) = self.diag_info.open_braces.pop().unwrap();
+                    self.diag_info.unmatched_delims.push(UnmatchedDelim {
+                        expected_delim: tok,
+                        found_delim: Some(close_delim),
+                        found_span: self.token.span,
+                        unclosed_span: unclosed_delimiter,
+                        candidate_span: candidate,
+                    });
+                } else {
+                    self.diag_info.open_braces.pop();
                 }
 
-                err.span_label(self.token.span, "unexpected closing delimiter");
-                Err(err)
-            }
-            _ => {
-                let tt = TokenTree::Token(self.token.take());
-                let mut spacing = self.bump();
-                if !self.token.is_op() {
-                    spacing = Alone;
+                // If the incorrect delimiter matches an earlier opening
+                // delimiter, then don't consume it (it can be used to
+                // close the earlier one). Otherwise, consume it.
+                // E.g., we try to recover from:
+                // fn foo() {
+                //     bar(baz(
+                // }  // Incorrect delimiter but matches the earlier `{`
+                if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
+                    self.bump(false).1
+                } else {
+                    // The choice of value here doesn't matter.
+                    Spacing::Alone
                 }
-                Ok((tt, spacing))
             }
-        }
-    }
+            token::Eof => {
+                // Silently recover, the EOF token will be seen again
+                // and an error emitted then. Thus we don't pop from
+                // self.open_braces here. The choice of spacing value here
+                // doesn't matter.
+                Spacing::Alone
+            }
+            _ => unreachable!(),
+        };
+
+        let spacing = DelimSpacing::new(open_spacing, close_spacing);
 
-    fn bump(&mut self) -> Spacing {
-        let (spacing, token) = self.string_reader.next_token();
-        self.token = token;
-        spacing
+        Ok(TokenTree::Delimited(delim_span, spacing, open_delim, tts))
     }
-}
 
-#[derive(Default)]
-struct TokenStreamBuilder {
-    buf: Vec<TreeAndSpacing>,
-}
+    // Move on to the next token, returning the current token and its spacing.
+    // Will glue adjacent single-char tokens together if `glue` is set.
+    fn bump(&mut self, glue: bool) -> (Token, Spacing) {
+        let (this_spacing, next_tok) = loop {
+            let (next_tok, is_next_tok_preceded_by_whitespace) = self.string_reader.next_token();
 
-impl TokenStreamBuilder {
-    fn push(&mut self, (tree, joint): TreeAndSpacing) {
-        if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last()
-            && let TokenTree::Token(token) = &tree
-            && let Some(glued) = prev_token.glue(token)
-        {
-            self.buf.pop();
-            self.buf.push((TokenTree::Token(glued), joint));
-            return;
+            if is_next_tok_preceded_by_whitespace {
+                break (Spacing::Alone, next_tok);
+            } else if glue && let Some(glued) = self.token.glue(&next_tok) {
+                self.token = glued;
+            } else {
+                let this_spacing = if next_tok.is_punct() {
+                    Spacing::Joint
+                } else if next_tok.kind == token::Eof {
+                    Spacing::Alone
+                } else {
+                    Spacing::JointHidden
+                };
+                break (this_spacing, next_tok);
+            }
+        };
+        let this_tok = std::mem::replace(&mut self.token, next_tok);
+        (this_tok, this_spacing)
+    }
+
+    fn unclosed_delim_err(
+        &mut self,
+        tts: TokenStream,
+        mut errs: Vec<PErr<'sess>>,
+    ) -> Vec<PErr<'sess>> {
+        // If there are unclosed delims, see if there are diff markers and if so, point them
+        // out instead of complaining about the unclosed delims.
+        let mut parser = crate::stream_to_parser(self.string_reader.sess, tts, None);
+        let mut diff_errs = vec![];
+        // Suggest removing a `{` we think appears in an `if`/`while` condition
+        // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
+        // we have no way of tracking this in the lexer itself, so we piggyback on the parser
+        let mut in_cond = false;
+        while parser.token != token::Eof {
+            if let Err(diff_err) = parser.err_diff_marker() {
+                diff_errs.push(diff_err);
+            } else if parser.is_keyword_ahead(0, &[kw::If, kw::While]) {
+                in_cond = true;
+            } else if matches!(
+                parser.token.kind,
+                token::CloseDelim(Delimiter::Brace) | token::FatArrow
+            ) {
+                // end of the `if`/`while` body, or the end of a `match` guard
+                in_cond = false;
+            } else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
+                // Store the `&&` and `let` to use their spans later when creating the diagnostic
+                let maybe_andand = parser.look_ahead(1, |t| t.clone());
+                let maybe_let = parser.look_ahead(2, |t| t.clone());
+                if maybe_andand == token::OpenDelim(Delimiter::Brace) {
+                    // This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
+                    in_cond = false;
+                } else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
+                    let mut err = parser.dcx().struct_span_err(
+                        parser.token.span,
+                        "found a `{` in the middle of a let-chain",
+                    );
+                    err.span_suggestion(
+                        parser.token.span,
+                        "consider removing this brace to parse the `let` as part of the same chain",
+                        "",
+                        Applicability::MachineApplicable,
+                    );
+                    err.span_label(
+                        maybe_andand.span.to(maybe_let.span),
+                        "you might have meant to continue the let-chain here",
+                    );
+                    errs.push(err);
+                }
+            }
+            parser.bump();
         }
-        self.buf.push((tree, joint))
+        if !diff_errs.is_empty() {
+            for err in errs {
+                err.cancel();
+            }
+            return diff_errs;
+        }
+        return errs;
     }
 
-    fn into_token_stream(self) -> TokenStream {
-        TokenStream::new(self.buf)
+    fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'sess> {
+        // An unexpected closing delimiter (i.e., there is no
+        // matching opening delimiter).
+        let token_str = token_to_string(&self.token);
+        let msg = format!("unexpected closing delimiter: `{token_str}`");
+        let mut err = self.string_reader.sess.dcx.struct_span_err(self.token.span, msg);
+
+        report_suspicious_mismatch_block(
+            &mut err,
+            &self.diag_info,
+            self.string_reader.sess.source_map(),
+            delim,
+        );
+        err.span_label(self.token.span, "unexpected closing delimiter");
+        err
     }
 }
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 273827864f1..3238f8e23bb 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,85 +3,66 @@
 use std::iter::once;
 use std::ops::Range;
 
-use rustc_errors::{pluralize, Applicability, Handler};
+use rustc_errors::{Applicability, DiagCtxt};
 use rustc_lexer::unescape::{EscapeError, Mode};
 use rustc_span::{BytePos, Span};
 
+use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
+
 pub(crate) fn emit_unescape_error(
-    handler: &Handler,
-    // interior part of the literal, without quotes
+    dcx: &DiagCtxt,
+    // interior part of the literal, between quotes
     lit: &str,
-    // full span of the literal, including quotes
-    span_with_quotes: Span,
-    // interior span of the literal, without quotes
-    span: Span,
+    // full span of the literal, including quotes and any prefix
+    full_lit_span: Span,
+    // span of the error part of the literal
+    err_span: Span,
     mode: Mode,
     // range of the error inside `lit`
     range: Range<usize>,
     error: EscapeError,
 ) {
-    tracing::debug!(
+    debug!(
         "emit_unescape_error: {:?}, {:?}, {:?}, {:?}, {:?}",
-        lit,
-        span_with_quotes,
-        mode,
-        range,
-        error
+        lit, full_lit_span, mode, range, error
     );
     let last_char = || {
-        let c = lit[range.clone()].chars().rev().next().unwrap();
-        let span = span.with_lo(span.hi() - BytePos(c.len_utf8() as u32));
+        let c = lit[range.clone()].chars().next_back().unwrap();
+        let span = err_span.with_lo(err_span.hi() - BytePos(c.len_utf8() as u32));
         (c, span)
     };
     match error {
         EscapeError::LoneSurrogateUnicodeEscape => {
-            handler
-                .struct_span_err(span, "invalid unicode character escape")
-                .span_label(span, "invalid escape")
-                .help("unicode escape must not be a surrogate")
-                .emit();
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: true });
         }
         EscapeError::OutOfRangeUnicodeEscape => {
-            handler
-                .struct_span_err(span, "invalid unicode character escape")
-                .span_label(span, "invalid escape")
-                .help("unicode escape must be at most 10FFFF")
-                .emit();
+            dcx.emit_err(UnescapeError::InvalidUnicodeEscape { span: err_span, surrogate: false });
         }
         EscapeError::MoreThanOneChar => {
             use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
+            let mut sugg = None;
+            let mut note = None;
 
-            let mut has_help = false;
-            let mut handler = handler.struct_span_err(
-                span_with_quotes,
-                "character literal may only contain one codepoint",
-            );
-
-            if lit.chars().skip(1).all(|c| is_combining_mark(c)) {
-                let escaped_marks =
-                    lit.chars().skip(1).map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
-                handler.span_note(
-                    span,
-                    &format!(
-                        "this `{}` is followed by the combining mark{} `{}`",
-                        lit.chars().next().unwrap(),
-                        pluralize!(escaped_marks.len()),
-                        escaped_marks.join(""),
-                    ),
-                );
+            let lit_chars = lit.chars().collect::<Vec<_>>();
+            let (first, rest) = lit_chars.split_first().unwrap();
+            if rest.iter().copied().all(is_combining_mark) {
                 let normalized = lit.nfc().to_string();
                 if normalized.chars().count() == 1 {
-                    has_help = true;
-                    handler.span_suggestion(
-                        span,
-                        &format!(
-                            "consider using the normalized form `{}` of this character",
-                            normalized.chars().next().unwrap().escape_default()
-                        ),
+                    let ch = normalized.chars().next().unwrap().escape_default().to_string();
+                    sugg = Some(MoreThanOneCharSugg::NormalizedForm {
+                        span: err_span,
+                        ch,
                         normalized,
-                        Applicability::MachineApplicable,
-                    );
+                    });
                 }
+                let escaped_marks =
+                    rest.iter().map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
+                note = Some(MoreThanOneCharNote::AllCombining {
+                    span: err_span,
+                    chr: format!("{first}"),
+                    len: escaped_marks.len(),
+                    escaped_marks: escaped_marks.join(""),
+                });
             } else {
                 let printable: Vec<char> = lit
                     .chars()
@@ -91,91 +72,72 @@ pub(crate) fn emit_unescape_error(
                     })
                     .collect();
 
-                if let [ch] = printable.as_slice() {
-                    has_help = true;
-
-                    handler.span_note(
-                        span,
-                        &format!(
-                            "there are non-printing characters, the full sequence is `{}`",
-                            lit.escape_default(),
-                        ),
-                    );
-
-                    handler.span_suggestion(
-                        span,
-                        "consider removing the non-printing characters",
-                        ch,
-                        Applicability::MaybeIncorrect,
-                    );
+                if let &[ch] = printable.as_slice() {
+                    sugg = Some(MoreThanOneCharSugg::RemoveNonPrinting {
+                        span: err_span,
+                        ch: ch.to_string(),
+                    });
+                    note = Some(MoreThanOneCharNote::NonPrinting {
+                        span: err_span,
+                        escaped: lit.escape_default().to_string(),
+                    });
                 }
-            }
-
-            if !has_help {
-                let (prefix, msg) = if mode.is_bytes() {
-                    ("b", "if you meant to write a byte string literal, use double quotes")
-                } else {
-                    ("", "if you meant to write a `str` literal, use double quotes")
-                };
-
-                handler.span_suggestion(
-                    span_with_quotes,
-                    msg,
-                    format!("{}\"{}\"", prefix, lit),
-                    Applicability::MachineApplicable,
-                );
-            }
-
-            handler.emit();
+            };
+            let sugg = sugg.unwrap_or_else(|| {
+                let prefix = mode.prefix_noraw();
+                let mut escaped = String::with_capacity(lit.len());
+                let mut in_escape = false;
+                for c in lit.chars() {
+                    match c {
+                        '\\' => in_escape = !in_escape,
+                        '"' if !in_escape => escaped.push('\\'),
+                        _ => in_escape = false,
+                    }
+                    escaped.push(c);
+                }
+                let sugg = format!("{prefix}\"{escaped}\"");
+                MoreThanOneCharSugg::Quotes {
+                    span: full_lit_span,
+                    is_byte: mode == Mode::Byte,
+                    sugg,
+                }
+            });
+            dcx.emit_err(UnescapeError::MoreThanOneChar {
+                span: full_lit_span,
+                note,
+                suggestion: sugg,
+            });
         }
         EscapeError::EscapeOnlyChar => {
             let (c, char_span) = last_char();
-
-            let msg = if mode.is_bytes() {
-                "byte constant must be escaped"
-            } else {
-                "character constant must be escaped"
-            };
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, escaped_char(c)))
-                .span_suggestion(
-                    char_span,
-                    "escape the character",
-                    c.escape_default(),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            dcx.emit_err(UnescapeError::EscapeOnlyChar {
+                span: err_span,
+                char_span,
+                escaped_sugg: c.escape_default().to_string(),
+                escaped_msg: escaped_char(c),
+                byte: mode == Mode::Byte,
+            });
         }
         EscapeError::BareCarriageReturn => {
-            let msg = if mode.in_double_quotes() {
-                "bare CR not allowed in string, use `\\r` instead"
-            } else {
-                "character constant must be escaped: `\\r`"
-            };
-            handler
-                .struct_span_err(span, msg)
-                .span_suggestion(
-                    span,
-                    "escape the character",
-                    "\\r",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            let double_quotes = mode.in_double_quotes();
+            dcx.emit_err(UnescapeError::BareCr { span: err_span, double_quotes });
         }
         EscapeError::BareCarriageReturnInRawString => {
             assert!(mode.in_double_quotes());
-            let msg = "bare CR not allowed in raw string";
-            handler.span_err(span, msg);
+            dcx.emit_err(UnescapeError::BareCrRawString(err_span));
         }
         EscapeError::InvalidEscape => {
             let (c, span) = last_char();
 
-            let label =
-                if mode.is_bytes() { "unknown byte escape" } else { "unknown character escape" };
+            let label = if mode == Mode::Byte || mode == Mode::ByteStr {
+                "unknown byte escape"
+            } else {
+                "unknown character escape"
+            };
             let ec = escaped_char(c);
-            let mut diag = handler.struct_span_err(span, &format!("{}: `{}`", label, ec));
+            let mut diag = dcx.struct_span_err(span, format!("{label}: `{ec}`"));
             diag.span_label(span, label);
-            if c == '{' || c == '}' && !mode.is_bytes() {
+            if c == '{' || c == '}' && matches!(mode, Mode::Str | Mode::RawStr) {
                 diag.help(
                     "if used in a formatting string, curly braces are escaped with `{{` and `}}`",
                 );
@@ -185,71 +147,65 @@ pub(crate) fn emit_unescape_error(
                      version control settings",
                 );
             } else {
-                if !mode.is_bytes() {
+                if mode == Mode::Str || mode == Mode::Char {
                     diag.span_suggestion(
-                        span_with_quotes,
+                        full_lit_span,
                         "if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
-                        format!("r\"{}\"", lit),
+                        format!("r\"{lit}\""),
                         Applicability::MaybeIncorrect,
                     );
                 }
 
                 diag.help(
                     "for more information, visit \
-                     <https://static.rust-lang.org/doc/master/reference.html#literals>",
+                     <https://doc.rust-lang.org/reference/tokens.html#literals>",
                 );
             }
             diag.emit();
         }
         EscapeError::TooShortHexEscape => {
-            handler.span_err(span, "numeric character escape is too short");
+            dcx.emit_err(UnescapeError::TooShortHexEscape(err_span));
         }
         EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
             let (c, span) = last_char();
-
-            let msg = if error == EscapeError::InvalidCharInHexEscape {
-                "invalid character in numeric character escape"
-            } else {
-                "invalid character in unicode escape"
-            };
-            let c = escaped_char(c);
-
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, c))
-                .span_label(span, msg)
-                .emit();
+            let is_hex = error == EscapeError::InvalidCharInHexEscape;
+            let ch = escaped_char(c);
+            dcx.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
         }
         EscapeError::NonAsciiCharInByte => {
-            assert!(mode.is_bytes());
             let (c, span) = last_char();
-            let mut err = handler.struct_span_err(span, "non-ASCII character in byte constant");
+            let desc = match mode {
+                Mode::Byte => "byte literal",
+                Mode::ByteStr => "byte string literal",
+                Mode::RawByteStr => "raw byte string literal",
+                _ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
+            };
+            let mut err = dcx.struct_span_err(span, format!("non-ASCII character in {desc}"));
             let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
-                format!(" but is {:?}", c)
+                format!(" but is {c:?}")
             } else {
                 String::new()
             };
-            err.span_label(span, &format!("byte constant must be ASCII{}", postfix));
-            if (c as u32) <= 0xFF {
+            err.span_label(span, format!("must be ASCII{postfix}"));
+            // Note: the \\xHH suggestions are not given for raw byte string
+            // literals, because they are araw and so cannot use any escapes.
+            if (c as u32) <= 0xFF && mode != Mode::RawByteStr {
                 err.span_suggestion(
                     span,
-                    &format!(
-                        "if you meant to use the unicode code point for {:?}, use a \\xHH escape",
-                        c
+                    format!(
+                        "if you meant to use the unicode code point for {c:?}, use a \\xHH escape"
                     ),
                     format!("\\x{:X}", c as u32),
                     Applicability::MaybeIncorrect,
                 );
-            } else if matches!(mode, Mode::Byte) {
+            } else if mode == Mode::Byte {
                 err.span_label(span, "this multibyte character does not fit into a single byte");
-            } else if matches!(mode, Mode::ByteStr) {
+            } else if mode != Mode::RawByteStr {
                 let mut utf8 = String::new();
                 utf8.push(c);
                 err.span_suggestion(
                     span,
-                    &format!(
-                        "if you meant to use the UTF-8 encoding of {:?}, use \\xHH escapes",
-                        c
-                    ),
+                    format!("if you meant to use the UTF-8 encoding of {c:?}, use \\xHH escapes"),
                     utf8.as_bytes()
                         .iter()
                         .map(|b: &u8| format!("\\x{:X}", *b))
@@ -259,55 +215,23 @@ pub(crate) fn emit_unescape_error(
             }
             err.emit();
         }
-        EscapeError::NonAsciiCharInByteString => {
-            assert!(mode.is_bytes());
-            let (c, span) = last_char();
-            let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
-                format!(" but is {:?}", c)
-            } else {
-                String::new()
-            };
-            handler
-                .struct_span_err(span, "raw byte string must be ASCII")
-                .span_label(span, &format!("must be ASCII{}", postfix))
-                .emit();
-        }
         EscapeError::OutOfRangeHexEscape => {
-            handler
-                .struct_span_err(span, "out of range hex escape")
-                .span_label(span, "must be a character in the range [\\x00-\\x7f]")
-                .emit();
+            dcx.emit_err(UnescapeError::OutOfRangeHexEscape(err_span));
         }
         EscapeError::LeadingUnderscoreUnicodeEscape => {
             let (c, span) = last_char();
-            let msg = "invalid start of unicode escape";
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, c))
-                .span_label(span, msg)
-                .emit();
+            dcx.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
+                span,
+                ch: escaped_char(c),
+            });
         }
         EscapeError::OverlongUnicodeEscape => {
-            handler
-                .struct_span_err(span, "overlong unicode escape")
-                .span_label(span, "must have at most 6 hex digits")
-                .emit();
+            dcx.emit_err(UnescapeError::OverlongUnicodeEscape(err_span));
         }
         EscapeError::UnclosedUnicodeEscape => {
-            handler
-                .struct_span_err(span, "unterminated unicode escape")
-                .span_label(span, "missing a closing `}`")
-                .span_suggestion_verbose(
-                    span.shrink_to_hi(),
-                    "terminate the unicode escape",
-                    "}",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            dcx.emit_err(UnescapeError::UnclosedUnicodeEscape(err_span, err_span.shrink_to_hi()));
         }
         EscapeError::NoBraceInUnicodeEscape => {
-            let msg = "incorrect unicode escape sequence";
-            let mut diag = handler.struct_span_err(span, msg);
-
             let mut suggestion = "\\u{".to_owned();
             let mut suggestion_len = 0;
             let (c, char_span) = last_char();
@@ -317,54 +241,40 @@ pub(crate) fn emit_unescape_error(
                 suggestion_len += c.len_utf8();
             }
 
-            if suggestion_len > 0 {
+            let (label, sub) = if suggestion_len > 0 {
                 suggestion.push('}');
                 let hi = char_span.lo() + BytePos(suggestion_len as u32);
-                diag.span_suggestion(
-                    span.with_hi(hi),
-                    "format of unicode escape sequences uses braces",
-                    suggestion,
-                    Applicability::MaybeIncorrect,
-                );
+                (None, NoBraceUnicodeSub::Suggestion { span: err_span.with_hi(hi), suggestion })
             } else {
-                diag.span_label(span, msg);
-                diag.help("format of unicode escape sequences is `\\u{...}`");
-            }
-
-            diag.emit();
+                (Some(err_span), NoBraceUnicodeSub::Help)
+            };
+            dcx.emit_err(UnescapeError::NoBraceInUnicodeEscape { span: err_span, label, sub });
         }
         EscapeError::UnicodeEscapeInByte => {
-            let msg = "unicode escape in byte string";
-            handler
-                .struct_span_err(span, msg)
-                .span_label(span, msg)
-                .help("unicode escape sequences cannot be used as a byte or in a byte string")
-                .emit();
+            dcx.emit_err(UnescapeError::UnicodeEscapeInByte(err_span));
         }
         EscapeError::EmptyUnicodeEscape => {
-            handler
-                .struct_span_err(span, "empty unicode escape")
-                .span_label(span, "this escape must have at least 1 hex digit")
-                .emit();
+            dcx.emit_err(UnescapeError::EmptyUnicodeEscape(err_span));
         }
         EscapeError::ZeroChars => {
-            let msg = "empty character literal";
-            handler.struct_span_err(span, msg).span_label(span, msg).emit();
+            dcx.emit_err(UnescapeError::ZeroChars(err_span));
         }
         EscapeError::LoneSlash => {
-            let msg = "invalid trailing slash in literal";
-            handler.struct_span_err(span, msg).span_label(span, msg).emit();
+            dcx.emit_err(UnescapeError::LoneSlash(err_span));
+        }
+        EscapeError::NulInCStr => {
+            dcx.emit_err(UnescapeError::NulInCStr { span: err_span });
         }
         EscapeError::UnskippedWhitespaceWarning => {
             let (c, char_span) = last_char();
-            let msg =
-                format!("non-ASCII whitespace symbol '{}' is not skipped", c.escape_unicode());
-            handler.struct_span_warn(span, &msg).span_label(char_span, &msg).emit();
+            dcx.emit_warn(UnescapeError::UnskippedWhitespace {
+                span: err_span,
+                ch: escaped_char(c),
+                char_span,
+            });
         }
         EscapeError::MultipleSkippedLinesWarning => {
-            let msg = "multiple lines skipped by escaped newline";
-            let bottom_msg = "skipping everything up to and including this point";
-            handler.struct_span_warn(span, msg).span_label(span, bottom_msg).emit();
+            dcx.emit_warn(UnescapeError::MultipleSkippedLinesWarning(err_span));
         }
     }
 }
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 2c68cc5895c..a136abaa28b 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -1,377 +1,382 @@
-// Characters and their corresponding confusables were collected from
-// https://www.unicode.org/Public/security/10.0.0/confusables.txt
+//! Characters and their corresponding confusables were collected from
+//! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
 
 use super::StringReader;
-use crate::token::{self, Delimiter};
-use rustc_errors::{Applicability, Diagnostic};
+use crate::{
+    errors::TokenSubstitution,
+    token::{self, Delimiter},
+};
 use rustc_span::{symbol::kw, BytePos, Pos, Span};
 
 #[rustfmt::skip] // for line breaks
-pub(crate) const UNICODE_ARRAY: &[(char, &str, char)] = &[
-    ('
', "Line Separator", ' '),
-    ('
', "Paragraph Separator", ' '),
-    (' ', "Ogham Space mark", ' '),
-    (' ', "En Quad", ' '),
-    (' ', "Em Quad", ' '),
-    (' ', "En Space", ' '),
-    (' ', "Em Space", ' '),
-    (' ', "Three-Per-Em Space", ' '),
-    (' ', "Four-Per-Em Space", ' '),
-    (' ', "Six-Per-Em Space", ' '),
-    (' ', "Punctuation Space", ' '),
-    (' ', "Thin Space", ' '),
-    (' ', "Hair Space", ' '),
-    (' ', "Medium Mathematical Space", ' '),
-    (' ', "No-Break Space", ' '),
-    (' ', "Figure Space", ' '),
-    (' ', "Narrow No-Break Space", ' '),
-    (' ', "Ideographic Space", ' '),
-
-    ('ߺ', "Nko Lajanyalan", '_'),
-    ('﹍', "Dashed Low Line", '_'),
-    ('﹎', "Centreline Low Line", '_'),
-    ('﹏', "Wavy Low Line", '_'),
-    ('_', "Fullwidth Low Line", '_'),
-
-    ('‐', "Hyphen", '-'),
-    ('‑', "Non-Breaking Hyphen", '-'),
-    ('‒', "Figure Dash", '-'),
-    ('–', "En Dash", '-'),
-    ('—', "Em Dash", '-'),
-    ('﹘', "Small Em Dash", '-'),
-    ('۔', "Arabic Full Stop", '-'),
-    ('⁃', "Hyphen Bullet", '-'),
-    ('˗', "Modifier Letter Minus Sign", '-'),
-    ('−', "Minus Sign", '-'),
-    ('➖', "Heavy Minus Sign", '-'),
-    ('Ⲻ', "Coptic Letter Dialect-P Ni", '-'),
-    ('ー', "Katakana-Hiragana Prolonged Sound Mark", '-'),
-    ('-', "Fullwidth Hyphen-Minus", '-'),
-    ('―', "Horizontal Bar", '-'),
-    ('─', "Box Drawings Light Horizontal", '-'),
-    ('━', "Box Drawings Heavy Horizontal", '-'),
-    ('㇐', "CJK Stroke H", '-'),
-    ('ꟷ', "Latin Epigraphic Letter Sideways I", '-'),
-    ('ᅳ', "Hangul Jungseong Eu", '-'),
-    ('ㅡ', "Hangul Letter Eu", '-'),
-    ('一', "CJK Unified Ideograph-4E00", '-'),
-    ('⼀', "Kangxi Radical One", '-'),
-
-    ('؍', "Arabic Date Separator", ','),
-    ('٫', "Arabic Decimal Separator", ','),
-    ('‚', "Single Low-9 Quotation Mark", ','),
-    ('¸', "Cedilla", ','),
-    ('ꓹ', "Lisu Letter Tone Na Po", ','),
-    (',', "Fullwidth Comma", ','),
-
-    (';', "Greek Question Mark", ';'),
-    (';', "Fullwidth Semicolon", ';'),
-    ('︔', "Presentation Form For Vertical Semicolon", ';'),
-
-    ('ः', "Devanagari Sign Visarga", ':'),
-    ('ઃ', "Gujarati Sign Visarga", ':'),
-    (':', "Fullwidth Colon", ':'),
-    ('։', "Armenian Full Stop", ':'),
-    ('܃', "Syriac Supralinear Colon", ':'),
-    ('܄', "Syriac Sublinear Colon", ':'),
-    ('᛬', "Runic Multiple Punctuation", ':'),
-    ('︰', "Presentation Form For Vertical Two Dot Leader", ':'),
-    ('᠃', "Mongolian Full Stop", ':'),
-    ('᠉', "Mongolian Manchu Full Stop", ':'),
-    ('⁚', "Two Dot Punctuation", ':'),
-    ('׃', "Hebrew Punctuation Sof Pasuq", ':'),
-    ('˸', "Modifier Letter Raised Colon", ':'),
-    ('꞉', "Modifier Letter Colon", ':'),
-    ('∶', "Ratio", ':'),
-    ('ː', "Modifier Letter Triangular Colon", ':'),
-    ('ꓽ', "Lisu Letter Tone Mya Jeu", ':'),
-    ('︓', "Presentation Form For Vertical Colon", ':'),
-
-    ('!', "Fullwidth Exclamation Mark", '!'),
-    ('ǃ', "Latin Letter Retroflex Click", '!'),
-    ('ⵑ', "Tifinagh Letter Tuareg Yang", '!'),
-    ('︕', "Presentation Form For Vertical Exclamation Mark", '!'),
-
-    ('ʔ', "Latin Letter Glottal Stop", '?'),
-    ('Ɂ', "Latin Capital Letter Glottal Stop", '?'),
-    ('ॽ', "Devanagari Letter Glottal Stop", '?'),
-    ('Ꭾ', "Cherokee Letter He", '?'),
-    ('ꛫ', "Bamum Letter Ntuu", '?'),
-    ('?', "Fullwidth Question Mark", '?'),
-    ('︖', "Presentation Form For Vertical Question Mark", '?'),
-
-    ('𝅭', "Musical Symbol Combining Augmentation Dot", '.'),
-    ('․', "One Dot Leader", '.'),
-    ('܁', "Syriac Supralinear Full Stop", '.'),
-    ('܂', "Syriac Sublinear Full Stop", '.'),
-    ('꘎', "Vai Full Stop", '.'),
-    ('𐩐', "Kharoshthi Punctuation Dot", '.'),
-    ('٠', "Arabic-Indic Digit Zero", '.'),
-    ('۰', "Extended Arabic-Indic Digit Zero", '.'),
-    ('ꓸ', "Lisu Letter Tone Mya Ti", '.'),
-    ('·', "Middle Dot", '.'),
-    ('・', "Katakana Middle Dot", '.'),
-    ('・', "Halfwidth Katakana Middle Dot", '.'),
-    ('᛫', "Runic Single Punctuation", '.'),
-    ('·', "Greek Ano Teleia", '.'),
-    ('⸱', "Word Separator Middle Dot", '.'),
-    ('𐄁', "Aegean Word Separator Dot", '.'),
-    ('•', "Bullet", '.'),
-    ('‧', "Hyphenation Point", '.'),
-    ('∙', "Bullet Operator", '.'),
-    ('⋅', "Dot Operator", '.'),
-    ('ꞏ', "Latin Letter Sinological Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('.', "Fullwidth Full Stop", '.'),
-    ('。', "Ideographic Full Stop", '.'),
-    ('︒', "Presentation Form For Vertical Ideographic Full Stop", '.'),
-
-    ('՝', "Armenian Comma", '\''),
-    (''', "Fullwidth Apostrophe", '\''),
-    ('‘', "Left Single Quotation Mark", '\''),
-    ('’', "Right Single Quotation Mark", '\''),
-    ('‛', "Single High-Reversed-9 Quotation Mark", '\''),
-    ('′', "Prime", '\''),
-    ('‵', "Reversed Prime", '\''),
-    ('՚', "Armenian Apostrophe", '\''),
-    ('׳', "Hebrew Punctuation Geresh", '\''),
-    ('`', "Grave Accent", '\''),
-    ('`', "Greek Varia", '\''),
-    ('`', "Fullwidth Grave Accent", '\''),
-    ('´', "Acute Accent", '\''),
-    ('΄', "Greek Tonos", '\''),
-    ('´', "Greek Oxia", '\''),
-    ('᾽', "Greek Koronis", '\''),
-    ('᾿', "Greek Psili", '\''),
-    ('῾', "Greek Dasia", '\''),
-    ('ʹ', "Modifier Letter Prime", '\''),
-    ('ʹ', "Greek Numeral Sign", '\''),
-    ('ˈ', "Modifier Letter Vertical Line", '\''),
-    ('ˊ', "Modifier Letter Acute Accent", '\''),
-    ('ˋ', "Modifier Letter Grave Accent", '\''),
-    ('˴', "Modifier Letter Middle Grave Accent", '\''),
-    ('ʻ', "Modifier Letter Turned Comma", '\''),
-    ('ʽ', "Modifier Letter Reversed Comma", '\''),
-    ('ʼ', "Modifier Letter Apostrophe", '\''),
-    ('ʾ', "Modifier Letter Right Half Ring", '\''),
-    ('ꞌ', "Latin Small Letter Saltillo", '\''),
-    ('י', "Hebrew Letter Yod", '\''),
-    ('ߴ', "Nko High Tone Apostrophe", '\''),
-    ('ߵ', "Nko Low Tone Apostrophe", '\''),
-    ('ᑊ', "Canadian Syllabics West-Cree P", '\''),
-    ('ᛌ', "Runic Letter Short-Twig-Sol S", '\''),
-    ('𖽑', "Miao Sign Aspiration", '\''),
-    ('𖽒', "Miao Sign Reformed Voicing", '\''),
-
-    ('᳓', "Vedic Sign Nihshvasa", '"'),
-    ('"', "Fullwidth Quotation Mark", '"'),
-    ('“', "Left Double Quotation Mark", '"'),
-    ('”', "Right Double Quotation Mark", '"'),
-    ('‟', "Double High-Reversed-9 Quotation Mark", '"'),
-    ('″', "Double Prime", '"'),
-    ('‶', "Reversed Double Prime", '"'),
-    ('〃', "Ditto Mark", '"'),
-    ('״', "Hebrew Punctuation Gershayim", '"'),
-    ('˝', "Double Acute Accent", '"'),
-    ('ʺ', "Modifier Letter Double Prime", '"'),
-    ('˶', "Modifier Letter Middle Double Acute Accent", '"'),
-    ('˵', "Modifier Letter Middle Double Grave Accent", '"'),
-    ('ˮ', "Modifier Letter Double Apostrophe", '"'),
-    ('ײ', "Hebrew Ligature Yiddish Double Yod", '"'),
-    ('❞', "Heavy Double Comma Quotation Mark Ornament", '"'),
-    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", '"'),
-
-    ('(', "Fullwidth Left Parenthesis", '('),
-    ('❨', "Medium Left Parenthesis Ornament", '('),
-    ('﴾', "Ornate Left Parenthesis", '('),
-
-    (')', "Fullwidth Right Parenthesis", ')'),
-    ('❩', "Medium Right Parenthesis Ornament", ')'),
-    ('﴿', "Ornate Right Parenthesis", ')'),
-
-    ('[', "Fullwidth Left Square Bracket", '['),
-    ('❲', "Light Left Tortoise Shell Bracket Ornament", '['),
-    ('「', "Left Corner Bracket", '['),
-    ('『', "Left White Corner Bracket", '['),
-    ('【', "Left Black Lenticular Bracket", '['),
-    ('〔', "Left Tortoise Shell Bracket", '['),
-    ('〖', "Left White Lenticular Bracket", '['),
-    ('〘', "Left White Tortoise Shell Bracket", '['),
-    ('〚', "Left White Square Bracket", '['),
-
-    (']', "Fullwidth Right Square Bracket", ']'),
-    ('❳', "Light Right Tortoise Shell Bracket Ornament", ']'),
-    ('」', "Right Corner Bracket", ']'),
-    ('』', "Right White Corner Bracket", ']'),
-    ('】', "Right Black Lenticular Bracket", ']'),
-    ('〕', "Right Tortoise Shell Bracket", ']'),
-    ('〗', "Right White Lenticular Bracket", ']'),
-    ('〙', "Right White Tortoise Shell Bracket", ']'),
-    ('〛', "Right White Square Bracket", ']'),
-
-    ('❴', "Medium Left Curly Bracket Ornament", '{'),
-    ('𝄔', "Musical Symbol Brace", '{'),
-    ('{', "Fullwidth Left Curly Bracket", '{'),
-
-    ('❵', "Medium Right Curly Bracket Ornament", '}'),
-    ('}', "Fullwidth Right Curly Bracket", '}'),
-
-    ('⁎', "Low Asterisk", '*'),
-    ('٭', "Arabic Five Pointed Star", '*'),
-    ('∗', "Asterisk Operator", '*'),
-    ('𐌟', "Old Italic Letter Ess", '*'),
-    ('*', "Fullwidth Asterisk", '*'),
-
-    ('᜵', "Philippine Single Punctuation", '/'),
-    ('⁁', "Caret Insertion Point", '/'),
-    ('∕', "Division Slash", '/'),
-    ('⁄', "Fraction Slash", '/'),
-    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", '/'),
-    ('⟋', "Mathematical Rising Diagonal", '/'),
-    ('⧸', "Big Solidus", '/'),
-    ('𝈺', "Greek Instrumental Notation Symbol-47", '/'),
-    ('㇓', "CJK Stroke Sp", '/'),
-    ('〳', "Vertical Kana Repeat Mark Upper Half", '/'),
-    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", '/'),
-    ('ノ', "Katakana Letter No", '/'),
-    ('丿', "CJK Unified Ideograph-4E3F", '/'),
-    ('⼃', "Kangxi Radical Slash", '/'),
-    ('/', "Fullwidth Solidus", '/'),
-
-    ('\', "Fullwidth Reverse Solidus", '\\'),
-    ('﹨', "Small Reverse Solidus", '\\'),
-    ('∖', "Set Minus", '\\'),
-    ('⟍', "Mathematical Falling Diagonal", '\\'),
-    ('⧵', "Reverse Solidus Operator", '\\'),
-    ('⧹', "Big Reverse Solidus", '\\'),
-    ('⧹', "Greek Vocal Notation Symbol-16", '\\'),
-    ('⧹', "Greek Instrumental Symbol-48", '\\'),
-    ('㇔', "CJK Stroke D", '\\'),
-    ('丶', "CJK Unified Ideograph-4E36", '\\'),
-    ('⼂', "Kangxi Radical Dot", '\\'),
-    ('、', "Ideographic Comma", '\\'),
-    ('ヽ', "Katakana Iteration Mark", '\\'),
-
-    ('ꝸ', "Latin Small Letter Um", '&'),
-    ('&', "Fullwidth Ampersand", '&'),
-
-    ('᛭', "Runic Cross Punctuation", '+'),
-    ('➕', "Heavy Plus Sign", '+'),
-    ('𐊛', "Lycian Letter H", '+'),
-    ('﬩', "Hebrew Letter Alternative Plus Sign", '+'),
-    ('+', "Fullwidth Plus Sign", '+'),
-
-    ('‹', "Single Left-Pointing Angle Quotation Mark", '<'),
-    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", '<'),
-    ('˂', "Modifier Letter Left Arrowhead", '<'),
-    ('𝈶', "Greek Instrumental Symbol-40", '<'),
-    ('ᐸ', "Canadian Syllabics Pa", '<'),
-    ('ᚲ', "Runic Letter Kauna", '<'),
-    ('❬', "Medium Left-Pointing Angle Bracket Ornament", '<'),
-    ('⟨', "Mathematical Left Angle Bracket", '<'),
-    ('〈', "Left-Pointing Angle Bracket", '<'),
-    ('〈', "Left Angle Bracket", '<'),
-    ('㇛', "CJK Stroke Pd", '<'),
-    ('く', "Hiragana Letter Ku", '<'),
-    ('𡿨', "CJK Unified Ideograph-21FE8", '<'),
-    ('《', "Left Double Angle Bracket", '<'),
-    ('<', "Fullwidth Less-Than Sign", '<'),
-
-    ('᐀', "Canadian Syllabics Hyphen", '='),
-    ('⹀', "Double Hyphen", '='),
-    ('゠', "Katakana-Hiragana Double Hyphen", '='),
-    ('꓿', "Lisu Punctuation Full Stop", '='),
-    ('=', "Fullwidth Equals Sign", '='),
-
-    ('›', "Single Right-Pointing Angle Quotation Mark", '>'),
-    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", '>'),
-    ('˃', "Modifier Letter Right Arrowhead", '>'),
-    ('𝈷', "Greek Instrumental Symbol-42", '>'),
-    ('ᐳ', "Canadian Syllabics Po", '>'),
-    ('𖼿', "Miao Letter Archaic Zza", '>'),
-    ('❭', "Medium Right-Pointing Angle Bracket Ornament", '>'),
-    ('⟩', "Mathematical Right Angle Bracket", '>'),
-    ('〉', "Right-Pointing Angle Bracket", '>'),
-    ('〉', "Right Angle Bracket", '>'),
-    ('》', "Right Double Angle Bracket", '>'),
-    ('>', "Fullwidth Greater-Than Sign", '>'),
+pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
+    ('
', "Line Separator", " "),
+    ('
', "Paragraph Separator", " "),
+    (' ', "Ogham Space mark", " "),
+    (' ', "En Quad", " "),
+    (' ', "Em Quad", " "),
+    (' ', "En Space", " "),
+    (' ', "Em Space", " "),
+    (' ', "Three-Per-Em Space", " "),
+    (' ', "Four-Per-Em Space", " "),
+    (' ', "Six-Per-Em Space", " "),
+    (' ', "Punctuation Space", " "),
+    (' ', "Thin Space", " "),
+    (' ', "Hair Space", " "),
+    (' ', "Medium Mathematical Space", " "),
+    (' ', "No-Break Space", " "),
+    (' ', "Figure Space", " "),
+    (' ', "Narrow No-Break Space", " "),
+    (' ', "Ideographic Space", " "),
+
+    ('ߺ', "Nko Lajanyalan", "_"),
+    ('﹍', "Dashed Low Line", "_"),
+    ('﹎', "Centreline Low Line", "_"),
+    ('﹏', "Wavy Low Line", "_"),
+    ('_', "Fullwidth Low Line", "_"),
+
+    ('‐', "Hyphen", "-"),
+    ('‑', "Non-Breaking Hyphen", "-"),
+    ('‒', "Figure Dash", "-"),
+    ('–', "En Dash", "-"),
+    ('—', "Em Dash", "-"),
+    ('﹘', "Small Em Dash", "-"),
+    ('۔', "Arabic Full Stop", "-"),
+    ('⁃', "Hyphen Bullet", "-"),
+    ('˗', "Modifier Letter Minus Sign", "-"),
+    ('−', "Minus Sign", "-"),
+    ('➖', "Heavy Minus Sign", "-"),
+    ('Ⲻ', "Coptic Letter Dialect-P Ni", "-"),
+    ('ー', "Katakana-Hiragana Prolonged Sound Mark", "-"),
+    ('-', "Fullwidth Hyphen-Minus", "-"),
+    ('―', "Horizontal Bar", "-"),
+    ('─', "Box Drawings Light Horizontal", "-"),
+    ('━', "Box Drawings Heavy Horizontal", "-"),
+    ('㇐', "CJK Stroke H", "-"),
+    ('ꟷ', "Latin Epigraphic Letter Sideways I", "-"),
+    ('ᅳ', "Hangul Jungseong Eu", "-"),
+    ('ㅡ', "Hangul Letter Eu", "-"),
+    ('一', "CJK Unified Ideograph-4E00", "-"),
+    ('⼀', "Kangxi Radical One", "-"),
+
+    ('؍', "Arabic Date Separator", ","),
+    ('٫', "Arabic Decimal Separator", ","),
+    ('‚', "Single Low-9 Quotation Mark", ","),
+    ('¸', "Cedilla", ","),
+    ('ꓹ', "Lisu Letter Tone Na Po", ","),
+    (',', "Fullwidth Comma", ","),
+
+    (';', "Greek Question Mark", ";"),
+    (';', "Fullwidth Semicolon", ";"),
+    ('︔', "Presentation Form For Vertical Semicolon", ";"),
+
+    ('ः', "Devanagari Sign Visarga", ":"),
+    ('ઃ', "Gujarati Sign Visarga", ":"),
+    (':', "Fullwidth Colon", ":"),
+    ('։', "Armenian Full Stop", ":"),
+    ('܃', "Syriac Supralinear Colon", ":"),
+    ('܄', "Syriac Sublinear Colon", ":"),
+    ('᛬', "Runic Multiple Punctuation", ":"),
+    ('︰', "Presentation Form For Vertical Two Dot Leader", ":"),
+    ('᠃', "Mongolian Full Stop", ":"),
+    ('᠉', "Mongolian Manchu Full Stop", ":"),
+    ('⁚', "Two Dot Punctuation", ":"),
+    ('׃', "Hebrew Punctuation Sof Pasuq", ":"),
+    ('˸', "Modifier Letter Raised Colon", ":"),
+    ('꞉', "Modifier Letter Colon", ":"),
+    ('∶', "Ratio", ":"),
+    ('ː', "Modifier Letter Triangular Colon", ":"),
+    ('ꓽ', "Lisu Letter Tone Mya Jeu", ":"),
+    ('︓', "Presentation Form For Vertical Colon", ":"),
+
+    ('!', "Fullwidth Exclamation Mark", "!"),
+    ('ǃ', "Latin Letter Retroflex Click", "!"),
+    ('ⵑ', "Tifinagh Letter Tuareg Yang", "!"),
+    ('︕', "Presentation Form For Vertical Exclamation Mark", "!"),
+
+    ('ʔ', "Latin Letter Glottal Stop", "?"),
+    ('Ɂ', "Latin Capital Letter Glottal Stop", "?"),
+    ('ॽ', "Devanagari Letter Glottal Stop", "?"),
+    ('Ꭾ', "Cherokee Letter He", "?"),
+    ('ꛫ', "Bamum Letter Ntuu", "?"),
+    ('?', "Fullwidth Question Mark", "?"),
+    ('︖', "Presentation Form For Vertical Question Mark", "?"),
+
+    ('𝅭', "Musical Symbol Combining Augmentation Dot", "."),
+    ('․', "One Dot Leader", "."),
+    ('܁', "Syriac Supralinear Full Stop", "."),
+    ('܂', "Syriac Sublinear Full Stop", "."),
+    ('꘎', "Vai Full Stop", "."),
+    ('𐩐', "Kharoshthi Punctuation Dot", "."),
+    ('٠', "Arabic-Indic Digit Zero", "."),
+    ('۰', "Extended Arabic-Indic Digit Zero", "."),
+    ('ꓸ', "Lisu Letter Tone Mya Ti", "."),
+    ('·', "Middle Dot", "."),
+    ('・', "Katakana Middle Dot", "."),
+    ('・', "Halfwidth Katakana Middle Dot", "."),
+    ('᛫', "Runic Single Punctuation", "."),
+    ('·', "Greek Ano Teleia", "."),
+    ('⸱', "Word Separator Middle Dot", "."),
+    ('𐄁', "Aegean Word Separator Dot", "."),
+    ('•', "Bullet", "."),
+    ('‧', "Hyphenation Point", "."),
+    ('∙', "Bullet Operator", "."),
+    ('⋅', "Dot Operator", "."),
+    ('ꞏ', "Latin Letter Sinological Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('.', "Fullwidth Full Stop", "."),
+    ('。', "Ideographic Full Stop", "."),
+    ('︒', "Presentation Form For Vertical Ideographic Full Stop", "."),
+
+    ('՝', "Armenian Comma", "\'"),
+    (''', "Fullwidth Apostrophe", "\'"),
+    ('‘', "Left Single Quotation Mark", "\'"),
+    ('’', "Right Single Quotation Mark", "\'"),
+    ('‛', "Single High-Reversed-9 Quotation Mark", "\'"),
+    ('′', "Prime", "\'"),
+    ('‵', "Reversed Prime", "\'"),
+    ('՚', "Armenian Apostrophe", "\'"),
+    ('׳', "Hebrew Punctuation Geresh", "\'"),
+    ('`', "Grave Accent", "\'"),
+    ('`', "Greek Varia", "\'"),
+    ('`', "Fullwidth Grave Accent", "\'"),
+    ('´', "Acute Accent", "\'"),
+    ('΄', "Greek Tonos", "\'"),
+    ('´', "Greek Oxia", "\'"),
+    ('᾽', "Greek Koronis", "\'"),
+    ('᾿', "Greek Psili", "\'"),
+    ('῾', "Greek Dasia", "\'"),
+    ('ʹ', "Modifier Letter Prime", "\'"),
+    ('ʹ', "Greek Numeral Sign", "\'"),
+    ('ˈ', "Modifier Letter Vertical Line", "\'"),
+    ('ˊ', "Modifier Letter Acute Accent", "\'"),
+    ('ˋ', "Modifier Letter Grave Accent", "\'"),
+    ('˴', "Modifier Letter Middle Grave Accent", "\'"),
+    ('ʻ', "Modifier Letter Turned Comma", "\'"),
+    ('ʽ', "Modifier Letter Reversed Comma", "\'"),
+    ('ʼ', "Modifier Letter Apostrophe", "\'"),
+    ('ʾ', "Modifier Letter Right Half Ring", "\'"),
+    ('ꞌ', "Latin Small Letter Saltillo", "\'"),
+    ('י', "Hebrew Letter Yod", "\'"),
+    ('ߴ', "Nko High Tone Apostrophe", "\'"),
+    ('ߵ', "Nko Low Tone Apostrophe", "\'"),
+    ('ᑊ', "Canadian Syllabics West-Cree P", "\'"),
+    ('ᛌ', "Runic Letter Short-Twig-Sol S", "\'"),
+    ('𖽑', "Miao Sign Aspiration", "\'"),
+    ('𖽒', "Miao Sign Reformed Voicing", "\'"),
+
+    ('᳓', "Vedic Sign Nihshvasa", "\""),
+    ('"', "Fullwidth Quotation Mark", "\""),
+    ('“', "Left Double Quotation Mark", "\""),
+    ('”', "Right Double Quotation Mark", "\""),
+    ('‟', "Double High-Reversed-9 Quotation Mark", "\""),
+    ('″', "Double Prime", "\""),
+    ('‶', "Reversed Double Prime", "\""),
+    ('〃', "Ditto Mark", "\""),
+    ('״', "Hebrew Punctuation Gershayim", "\""),
+    ('˝', "Double Acute Accent", "\""),
+    ('ʺ', "Modifier Letter Double Prime", "\""),
+    ('˶', "Modifier Letter Middle Double Acute Accent", "\""),
+    ('˵', "Modifier Letter Middle Double Grave Accent", "\""),
+    ('ˮ', "Modifier Letter Double Apostrophe", "\""),
+    ('ײ', "Hebrew Ligature Yiddish Double Yod", "\""),
+    ('❞', "Heavy Double Comma Quotation Mark Ornament", "\""),
+    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", "\""),
+
+    ('(', "Fullwidth Left Parenthesis", "("),
+    ('❨', "Medium Left Parenthesis Ornament", "("),
+    ('﴾', "Ornate Left Parenthesis", "("),
+
+    (')', "Fullwidth Right Parenthesis", ")"),
+    ('❩', "Medium Right Parenthesis Ornament", ")"),
+    ('﴿', "Ornate Right Parenthesis", ")"),
+
+    ('[', "Fullwidth Left Square Bracket", "["),
+    ('❲', "Light Left Tortoise Shell Bracket Ornament", "["),
+    ('「', "Left Corner Bracket", "["),
+    ('『', "Left White Corner Bracket", "["),
+    ('【', "Left Black Lenticular Bracket", "["),
+    ('〔', "Left Tortoise Shell Bracket", "["),
+    ('〖', "Left White Lenticular Bracket", "["),
+    ('〘', "Left White Tortoise Shell Bracket", "["),
+    ('〚', "Left White Square Bracket", "["),
+
+    (']', "Fullwidth Right Square Bracket", "]"),
+    ('❳', "Light Right Tortoise Shell Bracket Ornament", "]"),
+    ('」', "Right Corner Bracket", "]"),
+    ('』', "Right White Corner Bracket", "]"),
+    ('】', "Right Black Lenticular Bracket", "]"),
+    ('〕', "Right Tortoise Shell Bracket", "]"),
+    ('〗', "Right White Lenticular Bracket", "]"),
+    ('〙', "Right White Tortoise Shell Bracket", "]"),
+    ('〛', "Right White Square Bracket", "]"),
+
+    ('❴', "Medium Left Curly Bracket Ornament", "{"),
+    ('𝄔', "Musical Symbol Brace", "{"),
+    ('{', "Fullwidth Left Curly Bracket", "{"),
+
+    ('❵', "Medium Right Curly Bracket Ornament", "}"),
+    ('}', "Fullwidth Right Curly Bracket", "}"),
+
+    ('⁎', "Low Asterisk", "*"),
+    ('٭', "Arabic Five Pointed Star", "*"),
+    ('∗', "Asterisk Operator", "*"),
+    ('𐌟', "Old Italic Letter Ess", "*"),
+    ('*', "Fullwidth Asterisk", "*"),
+
+    ('᜵', "Philippine Single Punctuation", "/"),
+    ('⁁', "Caret Insertion Point", "/"),
+    ('∕', "Division Slash", "/"),
+    ('⁄', "Fraction Slash", "/"),
+    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", "/"),
+    ('⟋', "Mathematical Rising Diagonal", "/"),
+    ('⧸', "Big Solidus", "/"),
+    ('𝈺', "Greek Instrumental Notation Symbol-47", "/"),
+    ('㇓', "CJK Stroke Sp", "/"),
+    ('〳', "Vertical Kana Repeat Mark Upper Half", "/"),
+    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", "/"),
+    ('ノ', "Katakana Letter No", "/"),
+    ('丿', "CJK Unified Ideograph-4E3F", "/"),
+    ('⼃', "Kangxi Radical Slash", "/"),
+    ('/', "Fullwidth Solidus", "/"),
+
+    ('\', "Fullwidth Reverse Solidus", "\\"),
+    ('﹨', "Small Reverse Solidus", "\\"),
+    ('∖', "Set Minus", "\\"),
+    ('⟍', "Mathematical Falling Diagonal", "\\"),
+    ('⧵', "Reverse Solidus Operator", "\\"),
+    ('⧹', "Big Reverse Solidus", "\\"),
+    ('⧹', "Greek Vocal Notation Symbol-16", "\\"),
+    ('⧹', "Greek Instrumental Symbol-48", "\\"),
+    ('㇔', "CJK Stroke D", "\\"),
+    ('丶', "CJK Unified Ideograph-4E36", "\\"),
+    ('⼂', "Kangxi Radical Dot", "\\"),
+    ('、', "Ideographic Comma", "\\"),
+    ('ヽ', "Katakana Iteration Mark", "\\"),
+
+    ('ꝸ', "Latin Small Letter Um", "&"),
+    ('&', "Fullwidth Ampersand", "&"),
+
+    ('᛭', "Runic Cross Punctuation", "+"),
+    ('➕', "Heavy Plus Sign", "+"),
+    ('𐊛', "Lycian Letter H", "+"),
+    ('﬩', "Hebrew Letter Alternative Plus Sign", "+"),
+    ('+', "Fullwidth Plus Sign", "+"),
+
+    ('‹', "Single Left-Pointing Angle Quotation Mark", "<"),
+    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", "<"),
+    ('˂', "Modifier Letter Left Arrowhead", "<"),
+    ('𝈶', "Greek Instrumental Symbol-40", "<"),
+    ('ᐸ', "Canadian Syllabics Pa", "<"),
+    ('ᚲ', "Runic Letter Kauna", "<"),
+    ('❬', "Medium Left-Pointing Angle Bracket Ornament", "<"),
+    ('⟨', "Mathematical Left Angle Bracket", "<"),
+    ('〈', "Left-Pointing Angle Bracket", "<"),
+    ('〈', "Left Angle Bracket", "<"),
+    ('㇛', "CJK Stroke Pd", "<"),
+    ('く', "Hiragana Letter Ku", "<"),
+    ('𡿨', "CJK Unified Ideograph-21FE8", "<"),
+    ('《', "Left Double Angle Bracket", "<"),
+    ('<', "Fullwidth Less-Than Sign", "<"),
+
+    ('᐀', "Canadian Syllabics Hyphen", "="),
+    ('⹀', "Double Hyphen", "="),
+    ('゠', "Katakana-Hiragana Double Hyphen", "="),
+    ('꓿', "Lisu Punctuation Full Stop", "="),
+    ('=', "Fullwidth Equals Sign", "="),
+
+    ('›', "Single Right-Pointing Angle Quotation Mark", ">"),
+    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", ">"),
+    ('˃', "Modifier Letter Right Arrowhead", ">"),
+    ('𝈷', "Greek Instrumental Symbol-42", ">"),
+    ('ᐳ', "Canadian Syllabics Po", ">"),
+    ('𖼿', "Miao Letter Archaic Zza", ">"),
+    ('❭', "Medium Right-Pointing Angle Bracket Ornament", ">"),
+    ('⟩', "Mathematical Right Angle Bracket", ">"),
+    ('〉', "Right-Pointing Angle Bracket", ">"),
+    ('〉', "Right Angle Bracket", ">"),
+    ('》', "Right Double Angle Bracket", ">"),
+    ('>', "Fullwidth Greater-Than Sign", ">"),
+    ('⩵', "Two Consecutive Equals Signs", "==")
 ];
 
 // FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs, instead of
 // keeping the substitution token in this table. Ideally, this should be inside `rustc_lexer`.
 // However, we should first remove compound tokens like `<<` from `rustc_lexer`, and then add
 // fancier error recovery to it, as there will be less overall work to do this way.
-const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
-    (' ', "Space", None),
-    ('_', "Underscore", Some(token::Ident(kw::Underscore, false))),
-    ('-', "Minus/Hyphen", Some(token::BinOp(token::Minus))),
-    (',', "Comma", Some(token::Comma)),
-    (';', "Semicolon", Some(token::Semi)),
-    (':', "Colon", Some(token::Colon)),
-    ('!', "Exclamation Mark", Some(token::Not)),
-    ('?', "Question Mark", Some(token::Question)),
-    ('.', "Period", Some(token::Dot)),
-    ('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
-    (')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
-    ('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
-    (']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
-    ('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
-    ('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
-    ('*', "Asterisk", Some(token::BinOp(token::Star))),
-    ('/', "Slash", Some(token::BinOp(token::Slash))),
-    ('\\', "Backslash", None),
-    ('&', "Ampersand", Some(token::BinOp(token::And))),
-    ('+', "Plus Sign", Some(token::BinOp(token::Plus))),
-    ('<', "Less-Than Sign", Some(token::Lt)),
-    ('=', "Equals Sign", Some(token::Eq)),
-    ('>', "Greater-Than Sign", Some(token::Gt)),
+const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
+    (" ", "Space", None),
+    ("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
+    ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
+    (",", "Comma", Some(token::Comma)),
+    (";", "Semicolon", Some(token::Semi)),
+    (":", "Colon", Some(token::Colon)),
+    ("!", "Exclamation Mark", Some(token::Not)),
+    ("?", "Question Mark", Some(token::Question)),
+    (".", "Period", Some(token::Dot)),
+    ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
+    (")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
+    ("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
+    ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
+    ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
+    ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
+    ("*", "Asterisk", Some(token::BinOp(token::Star))),
+    ("/", "Slash", Some(token::BinOp(token::Slash))),
+    ("\\", "Backslash", None),
+    ("&", "Ampersand", Some(token::BinOp(token::And))),
+    ("+", "Plus Sign", Some(token::BinOp(token::Plus))),
+    ("<", "Less-Than Sign", Some(token::Lt)),
+    ("=", "Equals Sign", Some(token::Eq)),
+    ("==", "Double Equals Sign", Some(token::EqEq)),
+    (">", "Greater-Than Sign", Some(token::Gt)),
     // FIXME: Literals are already lexed by this point, so we can't recover gracefully just by
     // spitting the correct token out.
-    ('\'', "Single Quote", None),
-    ('"', "Quotation Mark", None),
+    ("\'", "Single Quote", None),
+    ("\"", "Quotation Mark", None),
 ];
 
-pub(super) fn check_for_substitution<'a>(
-    reader: &StringReader<'a>,
+pub(super) fn check_for_substitution(
+    reader: &StringReader<'_, '_>,
     pos: BytePos,
     ch: char,
-    err: &mut Diagnostic,
-) -> Option<token::TokenKind> {
-    let &(_u_char, u_name, ascii_char) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
+    count: usize,
+) -> (Option<token::TokenKind>, Option<TokenSubstitution>) {
+    let Some(&(_, u_name, ascii_str)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else {
+        return (None, None);
+    };
 
-    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
+    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
 
-    let Some((_ascii_char, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) else {
-        let msg = format!("substitution character not found for '{}'", ch);
-        reader.sess.span_diagnostic.span_bug_no_panic(span, &msg);
-        return None;
+    let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
+        let msg = format!("substitution character not found for '{ch}'");
+        reader.sess.dcx.span_bug(span, msg);
     };
 
     // special help suggestion for "directed" double quotes
-    if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
-        let msg = format!(
-            "Unicode characters '“' (Left Double Quotation Mark) and \
-             '”' (Right Double Quotation Mark) look like '{}' ({}), but are not",
-            ascii_char, ascii_name
-        );
-        err.span_suggestion(
-            Span::with_root_ctxt(
-                pos,
-                pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
-            ),
-            &msg,
-            format!("\"{}\"", s),
-            Applicability::MaybeIncorrect,
+    let sugg = if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
+        let span = Span::with_root_ctxt(
+            pos,
+            pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
         );
+        Some(TokenSubstitution::DirectedQuotes {
+            span,
+            suggestion: format!("\"{s}\""),
+            ascii_str,
+            ascii_name,
+        })
     } else {
-        let msg = format!(
-            "Unicode character '{}' ({}) looks like '{}' ({}), but it is not",
-            ch, u_name, ascii_char, ascii_name
-        );
-        err.span_suggestion(span, &msg, ascii_char, Applicability::MaybeIncorrect);
-    }
-    token.clone()
+        let suggestion = ascii_str.to_string().repeat(count);
+        Some(TokenSubstitution::Other {
+            span,
+            suggestion,
+            ch: ch.to_string(),
+            u_name,
+            ascii_str,
+            ascii_name,
+        })
+    };
+    (token.clone(), sugg)
 }
 
 /// Extract string if found at current position with given delimiters
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index d56e3773dc7..5bd8bb72bd6 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -1,13 +1,13 @@
 //! The main parser interface.
 
+#![allow(internal_features)]
+#![allow(rustc::diagnostic_outside_of_impl)]
+#![allow(rustc::untranslatable_diagnostic)]
 #![feature(array_windows)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
-#![cfg_attr(bootstrap, feature(let_chains))]
-#![feature(let_else)]
-#![feature(never_type)]
-#![feature(rustc_attrs)]
-#![recursion_limit = "256"]
+#![feature(iter_intersperse)]
+#![feature(let_chains)]
 
 #[macro_use]
 extern crate tracing;
@@ -15,11 +15,10 @@ extern crate tracing;
 use rustc_ast as ast;
 use rustc_ast::token;
 use rustc_ast::tokenstream::TokenStream;
-use rustc_ast::Attribute;
-use rustc_ast::{AttrItem, MetaItem};
+use rustc_ast::{AttrItem, Attribute, MetaItem};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
-use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
+use rustc_errors::{DiagnosticBuilder, FatalError, PResult};
 use rustc_session::parse::ParseSess;
 use rustc_span::{FileName, SourceFile, Span};
 
@@ -29,25 +28,29 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
 
 #[macro_use]
 pub mod parser;
-use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
+use parser::{make_unclosed_delims_error, Parser};
 pub mod lexer;
 pub mod validate_attr;
 
+mod errors;
+
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
+
 // A bunch of utility functions of the form `parse_<thing>_from_<source>`
 // where <thing> includes crate, expr, item, stmt, tts, and one that
 // uses a HOF to parse anything, and <source> includes file and
 // `source_str`.
 
-/// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
+/// A variant of 'panictry!' that works on a `Vec<DiagnosticBuilder>` instead of a single
+/// `DiagnosticBuilder`.
 macro_rules! panictry_buffer {
-    ($handler:expr, $e:expr) => {{
-        use rustc_errors::FatalError;
+    ($e:expr) => {{
         use std::result::Result::{Err, Ok};
         match $e {
             Ok(e) => e,
             Err(errs) => {
-                for mut e in errs {
-                    $handler.emit_diagnostic(&mut e);
+                for e in errs {
+                    e.emit();
                 }
                 FatalError.raise()
             }
@@ -63,7 +66,7 @@ pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'
 pub fn parse_crate_attrs_from_file<'a>(
     input: &Path,
     sess: &'a ParseSess,
-) -> PResult<'a, Vec<ast::Attribute>> {
+) -> PResult<'a, ast::AttrVec> {
     let mut parser = new_parser_from_file(sess, input, None);
     parser.parse_inner_attributes()
 }
@@ -80,7 +83,7 @@ pub fn parse_crate_attrs_from_source_str(
     name: FileName,
     source: String,
     sess: &ParseSess,
-) -> PResult<'_, Vec<ast::Attribute>> {
+) -> PResult<'_, ast::AttrVec> {
     new_parser_from_source_str(sess, name, source).parse_inner_attributes()
 }
 
@@ -90,48 +93,49 @@ pub fn parse_stream_from_source_str(
     sess: &ParseSess,
     override_span: Option<Span>,
 ) -> TokenStream {
-    let (stream, mut errors) =
-        source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
-    emit_unclosed_delims(&mut errors, &sess);
-    stream
+    source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
 /// Creates a new parser from a source string.
 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
-    panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
+    panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source))
 }
 
 /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
-/// token stream.
+/// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
+/// when they are dropped.
 pub fn maybe_new_parser_from_source_str(
     sess: &ParseSess,
     name: FileName,
     source: String,
-) -> Result<Parser<'_>, Vec<Diagnostic>> {
+) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
     maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
 }
 
-/// Creates a new parser, handling errors as appropriate if the file doesn't exist.
-/// If a span is given, that is used on an error as the source of the problem.
+/// Creates a new parser, aborting if the file doesn't exist. If a span is given, that is used on
+/// an error as the source of the problem.
 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
-    source_file_to_parser(sess, file_to_source_file(sess, path, sp))
-}
+    let source_file = sess.source_map().load_file(path).unwrap_or_else(|e| {
+        let msg = format!("couldn't read {}: {}", path.display(), e);
+        let mut err = sess.dcx.struct_fatal(msg);
+        if let Some(sp) = sp {
+            err.span(sp);
+        }
+        err.emit();
+    });
 
-/// Given a `source_file` and config, returns a parser.
-fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
-    panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
+    panictry_buffer!(maybe_source_file_to_parser(sess, source_file))
 }
 
-/// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the
-/// initial token stream.
+/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
+/// the initial token stream.
 fn maybe_source_file_to_parser(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
-) -> Result<Parser<'_>, Vec<Diagnostic>> {
-    let end_pos = source_file.end_pos;
-    let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
+) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
+    let end_pos = source_file.end_position();
+    let stream = maybe_file_to_stream(sess, source_file, None)?;
     let mut parser = stream_to_parser(sess, stream, None);
-    parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof {
         parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
     }
@@ -141,76 +145,30 @@ fn maybe_source_file_to_parser(
 
 // Base abstractions
 
-/// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's source_map and return the new source_file or
-/// error when a file can't be read.
-fn try_file_to_source_file(
-    sess: &ParseSess,
-    path: &Path,
-    spanopt: Option<Span>,
-) -> Result<Lrc<SourceFile>, Diagnostic> {
-    sess.source_map().load_file(path).map_err(|e| {
-        let msg = format!("couldn't read {}: {}", path.display(), e);
-        let mut diag = Diagnostic::new(Level::Fatal, &msg);
-        if let Some(sp) = spanopt {
-            diag.set_span(sp);
-        }
-        diag
-    })
-}
-
-/// Given a session and a path and an optional span (for error reporting),
-/// adds the path to the session's `source_map` and returns the new `source_file`.
-fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
-    match try_file_to_source_file(sess, path, spanopt) {
-        Ok(source_file) => source_file,
-        Err(mut d) => {
-            sess.span_diagnostic.emit_diagnostic(&mut d);
-            FatalError.raise();
-        }
-    }
-}
-
 /// Given a `source_file`, produces a sequence of token trees.
 pub fn source_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
-) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
-    panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
+) -> TokenStream {
+    panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span))
 }
 
 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
 /// parsing the token stream.
-pub fn maybe_file_to_stream(
-    sess: &ParseSess,
+fn maybe_file_to_stream<'sess>(
+    sess: &'sess ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
-) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
+) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
-        sess.span_diagnostic.bug(&format!(
+        sess.dcx.bug(format!(
             "cannot lex `source_file` without source: {}",
             sess.source_map().filename_for_diagnostics(&source_file.name)
         ));
     });
 
-    let (token_trees, unmatched_braces) =
-        lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
-
-    match token_trees {
-        Ok(stream) => Ok((stream, unmatched_braces)),
-        Err(err) => {
-            let mut buffer = Vec::with_capacity(1);
-            err.buffer(&mut buffer);
-            // Not using `emit_unclosed_delims` to use `db.buffer`
-            for unmatched in unmatched_braces {
-                if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
-                    err.buffer(&mut buffer);
-                }
-            }
-            Err(buffer)
-        }
-    }
+    lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span)
 }
 
 /// Given a stream and the `ParseSess`, produces a parser.
@@ -219,7 +177,7 @@ pub fn stream_to_parser<'a>(
     stream: TokenStream,
     subparser_name: Option<&'static str>,
 ) -> Parser<'a> {
-    Parser::new(sess, stream, false, subparser_name)
+    Parser::new(sess, stream, subparser_name)
 }
 
 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
@@ -229,7 +187,7 @@ pub fn parse_in<'a, T>(
     name: &'static str,
     mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
 ) -> PResult<'a, T> {
-    let mut parser = Parser::new(sess, tts, false, Some(name));
+    let mut parser = Parser::new(sess, tts, Some(name));
     let result = f(&mut parser)?;
     if parser.token != token::Eof {
         parser.unexpected()?;
@@ -254,14 +212,15 @@ pub fn parse_cfg_attr(
     parse_sess: &ParseSess,
 ) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
     match attr.get_normal_item().args {
-        ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => {
-            let msg = "wrong `cfg_attr` delimiters";
-            crate::validate_attr::check_meta_bad_delim(parse_sess, dspan, delim, msg);
-            match parse_in(parse_sess, tts.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+        ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
+            if !tokens.is_empty() =>
+        {
+            crate::validate_attr::check_cfg_attr_bad_delim(parse_sess, dspan, delim);
+            match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
                 Ok(r) => return Some(r),
-                Err(mut e) => {
-                    e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
-                        .note(CFG_ATTR_NOTE_REF)
+                Err(e) => {
+                    e.with_help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`"))
+                        .with_note(CFG_ATTR_NOTE_REF)
                         .emit();
                 }
             }
@@ -277,15 +236,5 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
     #the-cfg_attr-attribute>";
 
 fn error_malformed_cfg_attr_missing(span: Span, parse_sess: &ParseSess) {
-    parse_sess
-        .span_diagnostic
-        .struct_span_err(span, "malformed `cfg_attr` attribute input")
-        .span_suggestion(
-            span,
-            "missing condition and attribute",
-            CFG_ATTR_GRAMMAR_HELP,
-            Applicability::HasPlaceholders,
-        )
-        .note(CFG_ATTR_NOTE_REF)
-        .emit();
+    parse_sess.dcx.emit_err(errors::MalformedCfgAttr { span, sugg: CFG_ATTR_GRAMMAR_HELP });
 }
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index acdbddf4099..98e062dd784 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,29 +1,31 @@
+use crate::errors::{
+    InvalidMetaItem, InvalidMetaItemSuggQuoteIdent, InvalidMetaItemUnquotedIdent,
+    SuffixedLiteralInAttribute,
+};
+use crate::fluent_generated as fluent;
+
 use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
 use rustc_ast as ast;
 use rustc_ast::attr;
 use rustc_ast::token::{self, Delimiter, Nonterminal};
-use rustc_ast_pretty::pprust;
-use rustc_errors::{error_code, Diagnostic, PResult};
+use rustc_errors::{codes::*, Diagnostic, PResult};
 use rustc_span::{sym, BytePos, Span};
-use std::convert::TryInto;
-
+use thin_vec::ThinVec;
 use tracing::debug;
 
 // Public for rustfmt usage
 #[derive(Debug)]
-pub enum InnerAttrPolicy<'a> {
+pub enum InnerAttrPolicy {
     Permitted,
-    Forbidden { reason: &'a str, saw_doc_comment: bool, prev_outer_attr_sp: Option<Span> },
+    Forbidden(Option<InnerAttrForbiddenReason>),
 }
 
-const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \
-                                                     permitted in this context";
-
-pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPolicy::Forbidden {
-    reason: DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG,
-    saw_doc_comment: false,
-    prev_outer_attr_sp: None,
-};
+#[derive(Clone, Copy, Debug)]
+pub enum InnerAttrForbiddenReason {
+    InCodeBlock,
+    AfterOuterDocComment { prev_doc_comment_span: Span },
+    AfterOuterAttribute { prev_outer_attr_sp: Span },
+}
 
 enum OuterAttributeType {
     DocComment,
@@ -34,35 +36,32 @@ enum OuterAttributeType {
 impl<'a> Parser<'a> {
     /// Parses attributes that appear before an item.
     pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
-        let mut outer_attrs: Vec<ast::Attribute> = Vec::new();
+        let mut outer_attrs = ast::AttrVec::new();
         let mut just_parsed_doc_comment = false;
-        let start_pos = self.token_cursor.num_next_calls;
+        let start_pos = self.num_bump_calls;
         loop {
             let attr = if self.check(&token::Pound) {
                 let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
 
                 let inner_error_reason = if just_parsed_doc_comment {
-                    "an inner attribute is not permitted following an outer doc comment"
-                } else if prev_outer_attr_sp.is_some() {
-                    "an inner attribute is not permitted following an outer attribute"
+                    Some(InnerAttrForbiddenReason::AfterOuterDocComment {
+                        prev_doc_comment_span: prev_outer_attr_sp.unwrap(),
+                    })
                 } else {
-                    DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
-                };
-                let inner_parse_policy = InnerAttrPolicy::Forbidden {
-                    reason: inner_error_reason,
-                    saw_doc_comment: just_parsed_doc_comment,
-                    prev_outer_attr_sp,
+                    prev_outer_attr_sp.map(|prev_outer_attr_sp| {
+                        InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }
+                    })
                 };
+                let inner_parse_policy = InnerAttrPolicy::Forbidden(inner_error_reason);
                 just_parsed_doc_comment = false;
                 Some(self.parse_attribute(inner_parse_policy)?)
             } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
                 if attr_style != ast::AttrStyle::Outer {
                     let span = self.token.span;
-                    let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
-                        span,
-                        "expected outer doc comment",
-                        error_code!(E0753),
-                    );
+                    let mut err = self
+                        .dcx()
+                        .struct_span_err(span, fluent::parse_inner_doc_comment_not_permitted);
+                    err.code(E0753);
                     if let Some(replacement_span) = self.annotate_following_item_if_applicable(
                         &mut err,
                         span,
@@ -71,13 +70,10 @@ impl<'a> Parser<'a> {
                             token::CommentKind::Block => OuterAttributeType::DocBlockComment,
                         },
                     ) {
-                        err.note(
-                            "inner doc comments like this (starting with `//!` or `/*!`) can \
-                            only appear before items",
-                        );
+                        err.note(fluent::parse_note);
                         err.span_suggestion_verbose(
                             replacement_span,
-                            "you might have meant to write a regular comment",
+                            fluent::parse_suggestion,
                             "",
                             rustc_errors::Applicability::MachineApplicable,
                         );
@@ -89,6 +85,7 @@ impl<'a> Parser<'a> {
                 // Always make an outer attribute - this allows us to recover from a misplaced
                 // inner attribute.
                 Some(attr::mk_doc_comment(
+                    &self.sess.attr_id_generator,
                     comment_kind,
                     ast::AttrStyle::Outer,
                     data,
@@ -106,7 +103,7 @@ impl<'a> Parser<'a> {
                 break;
             }
         }
-        Ok(AttrWrapper::new(outer_attrs.into(), start_pos))
+        Ok(AttrWrapper::new(outer_attrs, start_pos))
     }
 
     /// Matches `attribute = # ! [ meta_item ]`.
@@ -114,7 +111,7 @@ impl<'a> Parser<'a> {
     // Public for rustfmt usage.
     pub fn parse_attribute(
         &mut self,
-        inner_parse_policy: InnerAttrPolicy<'_>,
+        inner_parse_policy: InnerAttrPolicy,
     ) -> PResult<'a, ast::Attribute> {
         debug!(
             "parse_attribute: inner_parse_policy={:?} self.token={:?}",
@@ -123,29 +120,22 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         // Attributes can't have attributes of their own [Editor's note: not with that attitude]
         self.collect_tokens_no_attrs(|this| {
-            if this.eat(&token::Pound) {
-                let style = if this.eat(&token::Not) {
-                    ast::AttrStyle::Inner
-                } else {
-                    ast::AttrStyle::Outer
-                };
+            assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
 
-                this.expect(&token::OpenDelim(Delimiter::Bracket))?;
-                let item = this.parse_attr_item(false)?;
-                this.expect(&token::CloseDelim(Delimiter::Bracket))?;
-                let attr_sp = lo.to(this.prev_token.span);
+            let style =
+                if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
 
-                // Emit error if inner attribute is encountered and forbidden.
-                if style == ast::AttrStyle::Inner {
-                    this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
-                }
+            this.expect(&token::OpenDelim(Delimiter::Bracket))?;
+            let item = this.parse_attr_item(false)?;
+            this.expect(&token::CloseDelim(Delimiter::Bracket))?;
+            let attr_sp = lo.to(this.prev_token.span);
 
-                Ok(attr::mk_attr_from_item(item, None, style, attr_sp))
-            } else {
-                let token_str = pprust::token_to_string(&this.token);
-                let msg = &format!("expected `#`, found `{token_str}`");
-                Err(this.struct_span_err(this.token.span, msg))
+            // Emit error if inner attribute is encountered and forbidden.
+            if style == ast::AttrStyle::Inner {
+                this.error_on_forbidden_inner_attr(attr_sp, inner_parse_policy);
             }
+
+            Ok(attr::mk_attr_from_item(&self.sess.attr_id_generator, item, None, style, attr_sp))
         })
     }
 
@@ -185,21 +175,12 @@ impl<'a> Parser<'a> {
             ForceCollect::No,
         ) {
             Ok(Some(item)) => {
-                let attr_name = match attr_type {
-                    OuterAttributeType::Attribute => "attribute",
-                    _ => "doc comment",
-                };
-                err.span_label(
-                    item.span,
-                    &format!("the inner {} doesn't annotate this {}", attr_name, item.kind.descr()),
-                );
+                // FIXME(#100717)
+                err.arg("item", item.kind.descr());
+                err.span_label(item.span, fluent::parse_label_does_not_annotate_this);
                 err.span_suggestion_verbose(
                     replacement_span,
-                    &format!(
-                        "to annotate the {}, change the {} from inner to outer style",
-                        item.kind.descr(),
-                        attr_name
-                    ),
+                    fluent::parse_sugg_change_inner_to_outer,
                     match attr_type {
                         OuterAttributeType::Attribute => "",
                         OuterAttributeType::DocBlockComment => "*",
@@ -217,22 +198,35 @@ impl<'a> Parser<'a> {
         Some(replacement_span)
     }
 
-    pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy<'_>) {
-        if let InnerAttrPolicy::Forbidden { reason, saw_doc_comment, prev_outer_attr_sp } = policy {
-            let prev_outer_attr_note =
-                if saw_doc_comment { "previous doc comment" } else { "previous outer attribute" };
-
-            let mut diag = self.struct_span_err(attr_sp, reason);
-
-            if let Some(prev_outer_attr_sp) = prev_outer_attr_sp {
-                diag.span_label(attr_sp, "not permitted following an outer attribute")
-                    .span_label(prev_outer_attr_sp, prev_outer_attr_note);
-            }
+    pub(super) fn error_on_forbidden_inner_attr(&self, attr_sp: Span, policy: InnerAttrPolicy) {
+        if let InnerAttrPolicy::Forbidden(reason) = policy {
+            let mut diag = match reason.as_ref().copied() {
+                Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
+                    self.dcx()
+                        .struct_span_err(
+                            attr_sp,
+                            fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
+                        )
+                        .with_span_label(attr_sp, fluent::parse_label_attr)
+                        .with_span_label(
+                            prev_doc_comment_span,
+                            fluent::parse_label_prev_doc_comment,
+                        )
+                }
+                Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => self
+                    .dcx()
+                    .struct_span_err(
+                        attr_sp,
+                        fluent::parse_inner_attr_not_permitted_after_outer_attr,
+                    )
+                    .with_span_label(attr_sp, fluent::parse_label_attr)
+                    .with_span_label(prev_outer_attr_sp, fluent::parse_label_prev_attr),
+                Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
+                    self.dcx().struct_span_err(attr_sp, fluent::parse_inner_attr_not_permitted)
+                }
+            };
 
-            diag.note(
-                "inner attributes, like `#![no_std]`, annotate the item enclosing them, and \
-                are usually found at the beginning of source files",
-            );
+            diag.note(fluent::parse_inner_attr_explanation);
             if self
                 .annotate_following_item_if_applicable(
                     &mut diag,
@@ -241,7 +235,7 @@ impl<'a> Parser<'a> {
                 )
                 .is_some()
             {
-                diag.note("outer attributes, like `#[test]`, annotate the item following them");
+                diag.note(fluent::parse_outer_attr_explanation);
             };
             diag.emit();
         }
@@ -257,9 +251,9 @@ impl<'a> Parser<'a> {
     ///     PATH `=` UNSUFFIXED_LIT
     /// The delimiters or `=` are still put into the resulting token stream.
     pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
-        let item = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
+        let item = match &self.token.kind {
+            token::Interpolated(nt) => match &nt.0 {
+                Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
                 _ => None,
             },
             _ => None,
@@ -283,17 +277,23 @@ impl<'a> Parser<'a> {
     /// terminated by a semicolon.
     ///
     /// Matches `inner_attrs*`.
-    pub(crate) fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
-        let mut attrs: Vec<ast::Attribute> = vec![];
+    pub(crate) fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
+        let mut attrs = ast::AttrVec::new();
         loop {
-            let start_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
+            let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
             // Only try to parse if it is an inner attribute (has `!`).
             let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
                 Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
             } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
                 if attr_style == ast::AttrStyle::Inner {
                     self.bump();
-                    Some(attr::mk_doc_comment(comment_kind, attr_style, data, self.prev_token.span))
+                    Some(attr::mk_doc_comment(
+                        &self.sess.attr_id_generator,
+                        comment_kind,
+                        attr_style,
+                        data,
+                        self.prev_token.span,
+                    ))
                 } else {
                     None
                 }
@@ -301,11 +301,11 @@ impl<'a> Parser<'a> {
                 None
             };
             if let Some(attr) = attr {
-                let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
+                let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
                 // If we are currently capturing tokens, mark the location of this inner attribute.
-                // If capturing ends up creating a `LazyTokenStream`, we will include
+                // If capturing ends up creating a `LazyAttrTokenStream`, we will include
                 // this replace range with it, removing the inner attribute from the final
-                // `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note.
+                // `AttrTokenStream`. Inner attributes are stored in the parsed AST note.
                 // During macro expansion, they are selectively inserted back into the
                 // token stream (the first inner attribute is removed each time we invoke the
                 // corresponding macro).
@@ -321,17 +321,13 @@ impl<'a> Parser<'a> {
         Ok(attrs)
     }
 
-    pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
-        let lit = self.parse_lit()?;
-        debug!("checking if {:?} is unusuffixed", lit);
+    // Note: must be unsuffixed.
+    pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
+        let lit = self.parse_meta_item_lit()?;
+        debug!("checking if {:?} is unsuffixed", lit);
 
         if !lit.kind.is_unsuffixed() {
-            self.struct_span_err(lit.span, "suffixed literals are not allowed in attributes")
-                .help(
-                    "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
-                    use an unsuffixed version (`1`, `1.0`, etc.)",
-                )
-                .emit();
+            self.dcx().emit_err(SuffixedLiteralInAttribute { span: lit.span });
         }
 
         Ok(lit)
@@ -357,9 +353,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Matches `COMMASEP(meta_item_inner)`.
-    pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
+    pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
         // Presumably, the majority of the time there will only be one attr.
-        let mut nmis = Vec::with_capacity(1);
+        let mut nmis = ThinVec::with_capacity(1);
         while self.token.kind != token::Eof {
             nmis.push(self.parse_meta_item_inner()?);
             if !self.eat(&token::Comma) {
@@ -375,9 +371,9 @@ impl<'a> Parser<'a> {
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     /// ```
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
-        let nt_meta = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtMeta(ref e) => Some(e.clone()),
+        let nt_meta = match &self.token.kind {
+            token::Interpolated(nt) => match &nt.0 {
+                token::NtMeta(e) => Some(e.clone()),
                 _ => None,
             },
             _ => None,
@@ -402,7 +398,7 @@ impl<'a> Parser<'a> {
 
     pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
         Ok(if self.eat(&token::Eq) {
-            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             // Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
             let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
@@ -414,8 +410,8 @@ impl<'a> Parser<'a> {
 
     /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
     fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
-        match self.parse_unsuffixed_lit() {
-            Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
+        match self.parse_unsuffixed_meta_item_lit() {
+            Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
             Err(err) => err.cancel(),
         }
 
@@ -424,21 +420,35 @@ impl<'a> Parser<'a> {
             Err(err) => err.cancel(),
         }
 
-        let found = pprust::token_to_string(&self.token);
-        let msg = format!("expected unsuffixed literal or identifier, found `{found}`");
-        Err(self.struct_span_err(self.token.span, &msg))
+        let token = self.token.clone();
+
+        // Check for unquoted idents in meta items, e.g.: #[cfg(key = foo)]
+        // `from_expansion()` ensures we don't suggest for cases such as
+        // `#[cfg(feature = $expr)]` in macros
+        if self.prev_token == token::Eq && !self.token.span.from_expansion() {
+            let before = self.token.span.shrink_to_lo();
+            while matches!(self.token.kind, token::Ident(..)) {
+                self.bump();
+            }
+            let after = self.prev_token.span.shrink_to_hi();
+            let sugg = InvalidMetaItemSuggQuoteIdent { before, after };
+            return Err(self.dcx().create_err(InvalidMetaItemUnquotedIdent {
+                span: token.span,
+                token,
+                sugg,
+            }));
+        }
+
+        Err(self.dcx().create_err(InvalidMetaItem { span: token.span, token }))
     }
 }
 
-pub fn maybe_needs_tokens(attrs: &[ast::Attribute]) -> bool {
-    // One of the attributes may either itself be a macro,
-    // or expand to macro attributes (`cfg_attr`).
-    attrs.iter().any(|attr| {
-        if attr.is_doc_comment() {
-            return false;
-        }
-        attr.ident().map_or(true, |ident| {
-            ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
-        })
+/// The attributes are complete if all attributes are either a doc comment or a builtin attribute other than `cfg_attr`
+pub fn is_complete(attrs: &[ast::Attribute]) -> bool {
+    attrs.iter().all(|attr| {
+        attr.is_doc_comment()
+            || attr.ident().is_some_and(|ident| {
+                ident.name != sym::cfg_attr && rustc_feature::is_builtin_attr_name(ident.name)
+            })
     })
 }
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 6c750ff428f..2307f4cfffa 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -1,13 +1,13 @@
 use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
-use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
+use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing};
+use rustc_ast::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, ToAttrTokenStream};
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
 use rustc_errors::PResult;
-use rustc_span::{sym, Span};
+use rustc_session::parse::ParseSess;
+use rustc_span::{sym, Span, DUMMY_SP};
 
-use std::convert::TryInto;
 use std::ops::Range;
 
 /// A wrapper type to ensure that the parser handles outer attributes correctly.
@@ -15,11 +15,11 @@ use std::ops::Range;
 /// for the attribute target. This allows us to perform cfg-expansion on
 /// a token stream before we invoke a derive proc-macro.
 ///
-/// This wrapper prevents direct access to the underlying `Vec<ast::Attribute>`.
+/// This wrapper prevents direct access to the underlying `ast::AttrVec>`.
 /// Parsing code can only get access to the underlying attributes
 /// by passing an `AttrWrapper` to `collect_tokens_trailing_tokens`.
 /// This makes it difficult to accidentally construct an AST node
-/// (which stores a `Vec<ast::Attribute>`) without first collecting tokens.
+/// (which stores an `ast::AttrVec`) without first collecting tokens.
 ///
 /// This struct has its own module, to ensure that the parser code
 /// cannot directly access the `attrs` field
@@ -32,11 +32,6 @@ pub struct AttrWrapper {
     start_pos: usize,
 }
 
-// This struct is passed around very frequently,
-// so make sure it doesn't accidentally get larger
-#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(AttrWrapper, 16);
-
 impl AttrWrapper {
     pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
         AttrWrapper { attrs, start_pos }
@@ -44,14 +39,20 @@ impl AttrWrapper {
     pub fn empty() -> AttrWrapper {
         AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
     }
-    // FIXME: Delay span bug here?
-    pub(crate) fn take_for_recovery(self) -> AttrVec {
+
+    pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
+        sess.dcx.span_delayed_bug(
+            self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
+            "AttrVec is taken for recovery but no error is produced",
+        );
+
         self.attrs
     }
 
+    /// Prepend `self.attrs` to `attrs`.
     // FIXME: require passing an NT to prevent misuse of this method
-    pub(crate) fn prepend_to_nt_inner(self, attrs: &mut Vec<Attribute>) {
-        let mut self_attrs: Vec<_> = self.attrs.into();
+    pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
+        let mut self_attrs = self.attrs;
         std::mem::swap(attrs, &mut self_attrs);
         attrs.extend(self_attrs);
     }
@@ -60,8 +61,8 @@ impl AttrWrapper {
         self.attrs.is_empty()
     }
 
-    pub fn maybe_needs_tokens(&self) -> bool {
-        crate::parser::attr::maybe_needs_tokens(&self.attrs)
+    pub fn is_complete(&self) -> bool {
+        crate::parser::attr::is_complete(&self.attrs)
     }
 }
 
@@ -71,7 +72,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
     // Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that
     // we don't need to do any eager expansion.
     attrs.iter().any(|attr| {
-        attr.ident().map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
+        attr.ident().is_some_and(|ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
     })
 }
 
@@ -87,7 +88,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
 // This also makes `Parser` very cheap to clone, since
 // there is no intermediate collection buffer to clone.
 #[derive(Clone)]
-struct LazyTokenStreamImpl {
+struct LazyAttrTokenStreamImpl {
     start_token: (Token, Spacing),
     cursor_snapshot: TokenCursor,
     num_calls: usize,
@@ -95,11 +96,8 @@ struct LazyTokenStreamImpl {
     replace_ranges: Box<[ReplaceRange]>,
 }
 
-#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
-
-impl CreateTokenStream for LazyTokenStreamImpl {
-    fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
+impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
+    fn to_attr_token_stream(&self) -> AttrTokenStream {
         // The token produced by the final call to `{,inlined_}next` was not
         // actually consumed by the callback. The combination of chaining the
         // initial token and using `take` produces the desired result - we
@@ -108,15 +106,15 @@ impl CreateTokenStream for LazyTokenStreamImpl {
         let mut cursor_snapshot = self.cursor_snapshot.clone();
         let tokens =
             std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
-                .chain((0..self.num_calls).map(|_| {
-                    let token = cursor_snapshot.next(cursor_snapshot.desugar_doc_comments);
+                .chain(std::iter::repeat_with(|| {
+                    let token = cursor_snapshot.next();
                     (FlatToken::Token(token.0), token.1)
                 }))
                 .take(self.num_calls);
 
         if !self.replace_ranges.is_empty() {
             let mut tokens: Vec<_> = tokens.collect();
-            let mut replace_ranges = self.replace_ranges.clone();
+            let mut replace_ranges = self.replace_ranges.to_vec();
             replace_ranges.sort_by_key(|(range, _)| range.start);
 
             #[cfg(debug_assertions)]
@@ -136,24 +134,22 @@ impl CreateTokenStream for LazyTokenStreamImpl {
             // Process the replace ranges, starting from the highest start
             // position and working our way back. If have tokens like:
             //
-            // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
             //
             // Then we will generate replace ranges for both
             // the `#[cfg(FALSE)] field: bool` and the entire
-            // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
             //
             // By starting processing from the replace range with the greatest
             // start position, we ensure that any replace range which encloses
             // another replace range will capture the *replaced* tokens for the inner
             // range, not the original tokens.
-            for (range, new_tokens) in replace_ranges.iter().rev() {
-                assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range);
+            for (range, new_tokens) in replace_ranges.into_iter().rev() {
+                assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
                 // Replace ranges are only allowed to decrease the number of tokens.
                 assert!(
                     range.len() >= new_tokens.len(),
-                    "Range {:?} has greater len than {:?}",
-                    range,
-                    new_tokens
+                    "Range {range:?} has greater len than {new_tokens:?}"
                 );
 
                 // Replace any removed tokens with `FlatToken::Empty`.
@@ -165,7 +161,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
 
                 tokens.splice(
                     (range.start as usize)..(range.end as usize),
-                    new_tokens.clone().into_iter().chain(filler),
+                    new_tokens.into_iter().chain(filler),
                 );
             }
             make_token_stream(tokens.into_iter(), self.break_last_token)
@@ -178,7 +174,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
 impl<'a> Parser<'a> {
     /// Records all tokens consumed by the provided callback,
     /// including the current token. These tokens are collected
-    /// into a `LazyTokenStream`, and returned along with the result
+    /// into a `LazyAttrTokenStream`, and returned along with the result
     /// of the callback.
     ///
     /// Note: If your callback consumes an opening delimiter
@@ -196,14 +192,14 @@ impl<'a> Parser<'a> {
         &mut self,
         attrs: AttrWrapper,
         force_collect: ForceCollect,
-        f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
+        f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, TrailingToken)>,
     ) -> PResult<'a, R> {
         // We only bail out when nothing could possibly observe the collected tokens:
         // 1. We cannot be force collecting tokens (since force-collecting requires tokens
         //    by definition
         if matches!(force_collect, ForceCollect::No)
             // None of our outer attributes can require tokens (e.g. a proc-macro)
-            && !attrs.maybe_needs_tokens()
+            && attrs.is_complete()
             // If our target supports custom inner attributes, then we cannot bail
             // out early, since we may need to capture tokens for a custom inner attribute
             // invocation.
@@ -212,17 +208,18 @@ impl<'a> Parser<'a> {
             // or `#[cfg_attr]` attributes.
             && !self.capture_cfg
         {
-            return Ok(f(self, attrs.attrs.into())?.0);
+            return Ok(f(self, attrs.attrs)?.0);
         }
 
         let start_token = (self.token.clone(), self.token_spacing);
         let cursor_snapshot = self.token_cursor.clone();
+        let start_pos = self.num_bump_calls;
 
         let has_outer_attrs = !attrs.attrs.is_empty();
         let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
         let replace_ranges_start = self.capture_state.replace_ranges.len();
 
-        let ret = f(self, attrs.attrs.into());
+        let ret = f(self, attrs.attrs);
 
         self.capture_state.capturing = prev_capturing;
 
@@ -246,9 +243,9 @@ impl<'a> Parser<'a> {
         // Now that we've parsed an AST node, we have more information available.
         if matches!(force_collect, ForceCollect::No)
             // We now have inner attributes available, so this check is more precise
-            // than `attrs.maybe_needs_tokens()` at the start of the function.
+            // than `attrs.is_complete()` at the start of the function.
             // As a result, we don't need to check `R::SUPPORTS_CUSTOM_INNER_ATTRS`
-            && !crate::parser::attr::maybe_needs_tokens(ret.attrs())
+            && crate::parser::attr::is_complete(ret.attrs())
             // Subtle: We call `has_cfg_or_cfg_attr` with the attrs from `ret`.
             // This ensures that we consider inner attributes (e.g. `#![cfg]`),
             // which require us to have tokens available
@@ -269,45 +266,45 @@ impl<'a> Parser<'a> {
             if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
                 inner_attr_replace_ranges.push(attr_range);
             } else {
-                self.sess
-                    .span_diagnostic
-                    .delay_span_bug(inner_attr.span, "Missing token range for attribute");
+                self.dcx().span_delayed_bug(inner_attr.span, "Missing token range for attribute");
             }
         }
 
         let replace_ranges_end = self.capture_state.replace_ranges.len();
 
-        let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls;
-        let mut end_pos = self.token_cursor.num_next_calls;
+        let mut end_pos = self.num_bump_calls;
+
+        let mut captured_trailing = false;
 
         // Capture a trailing token if requested by the callback 'f'
         match trailing {
             TrailingToken::None => {}
+            TrailingToken::Gt => {
+                assert_eq!(self.token.kind, token::Gt);
+            }
             TrailingToken::Semi => {
                 assert_eq!(self.token.kind, token::Semi);
                 end_pos += 1;
+                captured_trailing = true;
             }
             TrailingToken::MaybeComma => {
                 if self.token.kind == token::Comma {
                     end_pos += 1;
+                    captured_trailing = true;
                 }
             }
         }
 
         // If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
         // then extend the range of captured tokens to include it, since the parser
-        // was not actually bumped past it. When the `LazyTokenStream` gets converted
-        // into an `AttrAnnotatedTokenStream`, we will create the proper token.
-        if self.token_cursor.break_last_token {
-            assert_eq!(
-                trailing,
-                TrailingToken::None,
-                "Cannot set `break_last_token` and have trailing token"
-            );
+        // was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
+        // into an `AttrTokenStream`, we will create the proper token.
+        if self.break_last_token {
+            assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
             end_pos += 1;
         }
 
-        let num_calls = end_pos - cursor_snapshot_next_calls;
+        let num_calls = end_pos - start_pos;
 
         // If we have no attributes, then we will never need to
         // use any replace ranges.
@@ -315,24 +312,24 @@ impl<'a> Parser<'a> {
             Box::new([])
         } else {
             // Grab any replace ranges that occur *inside* the current AST node.
-            // We will perform the actual replacement when we convert the `LazyTokenStream`
-            // to an `AttrAnnotatedTokenStream`
-            let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
+            // We will perform the actual replacement when we convert the `LazyAttrTokenStream`
+            // to an `AttrTokenStream`.
+            let start_calls: u32 = start_pos.try_into().unwrap();
             self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
                 .iter()
                 .cloned()
-                .chain(inner_attr_replace_ranges.clone().into_iter())
+                .chain(inner_attr_replace_ranges.iter().cloned())
                 .map(|(range, tokens)| {
                     ((range.start - start_calls)..(range.end - start_calls), tokens)
                 })
                 .collect()
         };
 
-        let tokens = LazyTokenStream::new(LazyTokenStreamImpl {
+        let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
             start_token,
             num_calls,
             cursor_snapshot,
-            break_last_token: self.token_cursor.break_last_token,
+            break_last_token: self.break_last_token,
             replace_ranges,
         });
 
@@ -352,22 +349,18 @@ impl<'a> Parser<'a> {
         // on the captured token stream.
         if self.capture_cfg
             && matches!(self.capture_state.capturing, Capturing::Yes)
-            && has_cfg_or_cfg_attr(&final_attrs)
+            && has_cfg_or_cfg_attr(final_attrs)
         {
-            let attr_data = AttributesData { attrs: final_attrs.to_vec().into(), tokens };
+            let attr_data = AttributesData { attrs: final_attrs.iter().cloned().collect(), tokens };
 
             // Replace the entire AST node that we just parsed, including attributes,
             // with a `FlatToken::AttrTarget`. If this AST node is inside an item
             // that has `#[derive]`, then this will allow us to cfg-expand this
             // AST node.
-            let start_pos =
-                if has_outer_attrs { attrs.start_pos } else { cursor_snapshot_next_calls };
+            let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
             let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
 
-            assert!(
-                !self.token_cursor.break_last_token,
-                "Should not have unglued last token with cfg attr"
-            );
+            assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
             let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
             self.capture_state.replace_ranges.push((range, new_tokens));
             self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
@@ -391,74 +384,82 @@ impl<'a> Parser<'a> {
 fn make_token_stream(
     mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
     break_last_token: bool,
-) -> AttrAnnotatedTokenStream {
+) -> AttrTokenStream {
     #[derive(Debug)]
     struct FrameData {
         // This is `None` for the first frame, `Some` for all others.
-        open_delim_sp: Option<(Delimiter, Span)>,
-        inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
+        open_delim_sp: Option<(Delimiter, Span, Spacing)>,
+        inner: Vec<AttrTokenTree>,
     }
     let mut stack = vec![FrameData { open_delim_sp: None, inner: vec![] }];
     let mut token_and_spacing = iter.next();
     while let Some((token, spacing)) = token_and_spacing {
         match token {
             FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
-                stack.push(FrameData { open_delim_sp: Some((delim, span)), inner: vec![] });
+                stack
+                    .push(FrameData { open_delim_sp: Some((delim, span, spacing)), inner: vec![] });
             }
             FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
                 let frame_data = stack
                     .pop()
-                    .unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
+                    .unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
 
-                let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
+                let (open_delim, open_sp, open_spacing) = frame_data.open_delim_sp.unwrap();
                 assert_eq!(
                     open_delim, delim,
-                    "Mismatched open/close delims: open={:?} close={:?}",
-                    open_delim, span
+                    "Mismatched open/close delims: open={open_delim:?} close={span:?}"
                 );
                 let dspan = DelimSpan::from_pair(open_sp, span);
-                let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
-                let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
+                let dspacing = DelimSpacing::new(open_spacing, spacing);
+                let stream = AttrTokenStream::new(frame_data.inner);
+                let delimited = AttrTokenTree::Delimited(dspan, dspacing, delim, stream);
                 stack
                     .last_mut()
-                    .unwrap_or_else(|| {
-                        panic!("Bottom token frame is missing for token: {:?}", token)
-                    })
+                    .unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
                     .inner
-                    .push((delimited, Spacing::Alone));
+                    .push(delimited);
             }
             FlatToken::Token(token) => stack
                 .last_mut()
                 .expect("Bottom token frame is missing!")
                 .inner
-                .push((AttrAnnotatedTokenTree::Token(token), spacing)),
+                .push(AttrTokenTree::Token(token, spacing)),
             FlatToken::AttrTarget(data) => stack
                 .last_mut()
                 .expect("Bottom token frame is missing!")
                 .inner
-                .push((AttrAnnotatedTokenTree::Attributes(data), spacing)),
+                .push(AttrTokenTree::Attributes(data)),
             FlatToken::Empty => {}
         }
         token_and_spacing = iter.next();
     }
     let mut final_buf = stack.pop().expect("Missing final buf!");
     if break_last_token {
-        let (last_token, spacing) = final_buf.inner.pop().unwrap();
-        if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
+        let last_token = final_buf.inner.pop().unwrap();
+        if let AttrTokenTree::Token(last_token, spacing) = last_token {
             let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
 
             // An 'unglued' token is always two ASCII characters
             let mut first_span = last_token.span.shrink_to_lo();
             first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
 
-            final_buf.inner.push((
-                AttrAnnotatedTokenTree::Token(Token::new(unglued_first, first_span)),
-                spacing,
-            ));
+            final_buf
+                .inner
+                .push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing));
         } else {
-            panic!("Unexpected last token {:?}", last_token)
+            panic!("Unexpected last token {last_token:?}")
         }
     }
-    assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
-    AttrAnnotatedTokenStream::new(final_buf.inner)
+    AttrTokenStream::new(final_buf.inner)
+}
+
+// Some types are used a lot. Make sure they don't unintentionally get bigger.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+mod size_asserts {
+    use super::*;
+    use rustc_data_structures::static_assert_size;
+    // tidy-alphabetical-start
+    static_assert_size!(AttrWrapper, 16);
+    static_assert_size!(LazyAttrTokenStreamImpl, 104);
+    // tidy-alphabetical-end
 }
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 63055c56c5c..445d5b2ce79 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -3,42 +3,53 @@ use super::{
     BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep,
     TokenExpectType, TokenType,
 };
-
-use crate::lexer::UnmatchedBrace;
+use crate::errors::{
+    AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, BadQPathStage2, BadTypePlus,
+    BadTypePlusSub, ColonAsSemi, ComparisonOperatorsCannotBeChained,
+    ComparisonOperatorsCannotBeChainedSugg, ConstGenericWithoutBraces,
+    ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, DocCommentOnParamType,
+    DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
+    GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
+    HelpIdentifierStartsWithNumber, HelpUseLatestEdition, InInTypo, IncorrectAwait,
+    IncorrectSemicolon, IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType,
+    QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
+    StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens, StructLiteralNeedingParensSugg,
+    SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator,
+    UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
+    UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
+};
+use crate::fluent_generated as fluent;
+use crate::parser;
+use crate::parser::attr::InnerAttrPolicy;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
+use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
+use rustc_ast::tokenstream::AttrTokenTree;
 use rustc_ast::util::parser::AssocOp;
 use rustc_ast::{
-    AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingMode, Block,
-    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, Item, ItemKind, Mutability, Param, Pat,
+    AngleBracketedArg, AngleBracketedArgs, AnonConst, AttrVec, BinOpKind, BindingAnnotation, Block,
+    BlockCheckMode, Expr, ExprKind, GenericArg, Generics, HasTokens, Item, ItemKind, Param, Pat,
     PatKind, Path, PathSegment, QSelf, Ty, TyKind,
 };
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::{
-    fluent, Applicability, DiagnosticBuilder, DiagnosticMessage, Handler, MultiSpan, PResult,
+    pluralize, AddToDiagnostic, Applicability, DiagCtxt, Diagnostic, DiagnosticBuilder,
+    ErrorGuaranteed, FatalError, PErr, PResult,
 };
-use rustc_errors::{pluralize, struct_span_err, Diagnostic, EmissionGuarantee, ErrorGuaranteed};
-use rustc_macros::{SessionDiagnostic, SessionSubdiagnostic};
+use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::Spanned;
-use rustc_span::symbol::{kw, Ident};
-use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
-use std::ops::{Deref, DerefMut};
-
+use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span, SpanSnippetError, Symbol, DUMMY_SP};
 use std::mem::take;
-
-use crate::parser;
-use tracing::{debug, trace};
-
-const TURBOFISH_SUGGESTION_STR: &str =
-    "use `::<...>` instead of `<...>` to specify lifetime, type, or const arguments";
+use std::ops::{Deref, DerefMut};
+use thin_vec::{thin_vec, ThinVec};
 
 /// Creates a placeholder argument.
 pub(super) fn dummy_arg(ident: Ident) -> Param {
     let pat = P(Pat {
         id: ast::DUMMY_NODE_ID,
-        kind: PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None),
+        kind: PatKind::Ident(BindingAnnotation::NONE, ident, None),
         span: ident.span,
         tokens: None,
     });
@@ -53,38 +64,10 @@ pub(super) fn dummy_arg(ident: Ident) -> Param {
     }
 }
 
-pub enum Error {
-    UselessDocComment,
-}
-
-impl Error {
-    fn span_err(
-        self,
-        sp: impl Into<MultiSpan>,
-        handler: &Handler,
-    ) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
-        match self {
-            Error::UselessDocComment => {
-                let mut err = struct_span_err!(
-                    handler,
-                    sp,
-                    E0585,
-                    "found a documentation comment that doesn't document anything",
-                );
-                err.help(
-                    "doc comments must come before what they document, maybe a comment was \
-                          intended with `//`?",
-                );
-                err
-            }
-        }
-    }
-}
-
 pub(super) trait RecoverQPath: Sized + 'static {
     const PATH_STYLE: PathStyle = PathStyle::Expr;
     fn to_ty(&self) -> Option<P<Ty>>;
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self;
 }
 
 impl RecoverQPath for Ty {
@@ -92,7 +75,7 @@ impl RecoverQPath for Ty {
     fn to_ty(&self) -> Option<P<Ty>> {
         Some(P(self.clone()))
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: TyKind::Path(qself, path),
@@ -103,10 +86,11 @@ impl RecoverQPath for Ty {
 }
 
 impl RecoverQPath for Pat {
+    const PATH_STYLE: PathStyle = PathStyle::Pat;
     fn to_ty(&self) -> Option<P<Ty>> {
         self.to_ty()
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: PatKind::Path(qself, path),
@@ -120,7 +104,7 @@ impl RecoverQPath for Expr {
     fn to_ty(&self) -> Option<P<Ty>> {
         self.to_ty()
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: ExprKind::Path(qself, path),
@@ -178,15 +162,11 @@ enum IsStandalone {
     Standalone,
     /// It's a subexpression, i.e., *not* standalone.
     Subexpr,
-    /// It's maybe standalone; we're not sure.
-    Maybe,
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
 enum IncOrDec {
     Inc,
-    // FIXME: `i--` recovery isn't implemented yet
-    #[allow(dead_code)]
     Dec,
 }
 
@@ -228,118 +208,20 @@ struct MultiSugg {
 }
 
 impl MultiSugg {
-    fn emit<G: EmissionGuarantee>(self, err: &mut DiagnosticBuilder<'_, G>) {
-        err.multipart_suggestion(&self.msg, self.patches, self.applicability);
+    fn emit(self, err: &mut Diagnostic) {
+        err.multipart_suggestion(self.msg, self.patches, self.applicability);
     }
 
-    /// Overrides individual messages and applicabilities.
-    fn emit_many<G: EmissionGuarantee>(
-        err: &mut DiagnosticBuilder<'_, G>,
-        msg: &str,
-        applicability: Applicability,
-        suggestions: impl Iterator<Item = Self>,
-    ) {
-        err.multipart_suggestions(msg, suggestions.map(|s| s.patches), applicability);
+    fn emit_verbose(self, err: &mut Diagnostic) {
+        err.multipart_suggestion_verbose(self.msg, self.patches, self.applicability);
     }
 }
 
-#[derive(SessionDiagnostic)]
-#[error(parser::maybe_report_ambiguous_plus)]
-struct AmbiguousPlus {
-    pub sum_ty: String,
-    #[primary_span]
-    #[suggestion(code = "({sum_ty})")]
-    pub span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::maybe_recover_from_bad_type_plus, code = "E0178")]
-struct BadTypePlus {
-    pub ty: String,
-    #[primary_span]
-    pub span: Span,
-    #[subdiagnostic]
-    pub sub: BadTypePlusSub,
-}
-
-#[derive(SessionSubdiagnostic)]
-pub enum BadTypePlusSub {
-    #[suggestion(
-        parser::add_paren,
-        code = "{sum_with_parens}",
-        applicability = "machine-applicable"
-    )]
-    AddParen {
-        sum_with_parens: String,
-        #[primary_span]
-        span: Span,
-    },
-    #[label(parser::forgot_paren)]
-    ForgotParen {
-        #[primary_span]
-        span: Span,
-    },
-    #[label(parser::expect_path)]
-    ExpectPath {
-        #[primary_span]
-        span: Span,
-    },
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::maybe_recover_from_bad_qpath_stage_2)]
-struct BadQPathStage2 {
-    #[primary_span]
-    #[suggestion(applicability = "maybe-incorrect")]
-    span: Span,
-    ty: String,
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::incorrect_semicolon)]
-struct IncorrectSemicolon<'a> {
-    #[primary_span]
-    #[suggestion_short(applicability = "machine-applicable")]
-    span: Span,
-    #[help]
-    opt_help: Option<()>,
-    name: &'a str,
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::incorrect_use_of_await)]
-struct IncorrectUseOfAwait {
-    #[primary_span]
-    #[suggestion(parser::parentheses_suggestion, applicability = "machine-applicable")]
-    span: Span,
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::incorrect_use_of_await)]
-struct IncorrectAwait {
-    #[primary_span]
-    span: Span,
-    #[suggestion(parser::postfix_suggestion, code = "{expr}.await{question_mark}")]
-    sugg_span: (Span, Applicability),
-    expr: String,
-    question_mark: &'static str,
-}
-
-#[derive(SessionDiagnostic)]
-#[error(parser::in_in_typo)]
-struct InInTypo {
-    #[primary_span]
-    span: Span,
-    #[suggestion(applicability = "machine-applicable")]
-    sugg_span: Span,
-}
-
-// SnapshotParser is used to create a snapshot of the parser
-// without causing duplicate errors being emitted when the `Parser`
-// is dropped.
+/// SnapshotParser is used to create a snapshot of the parser
+/// without causing duplicate errors being emitted when the `Parser`
+/// is dropped.
 pub struct SnapshotParser<'a> {
     parser: Parser<'a>,
-    unclosed_delims: Vec<UnmatchedBrace>,
 }
 
 impl<'a> Deref for SnapshotParser<'a> {
@@ -357,64 +239,39 @@ impl<'a> DerefMut for SnapshotParser<'a> {
 }
 
 impl<'a> Parser<'a> {
-    #[rustc_lint_diagnostics]
-    pub(super) fn span_err<S: Into<MultiSpan>>(
-        &self,
-        sp: S,
-        err: Error,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        err.span_err(sp, self.diagnostic())
-    }
-
-    #[rustc_lint_diagnostics]
-    pub fn struct_span_err<S: Into<MultiSpan>>(
-        &self,
-        sp: S,
-        m: impl Into<DiagnosticMessage>,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        self.sess.span_diagnostic.struct_span_err(sp, m)
+    pub fn dcx(&self) -> &'a DiagCtxt {
+        &self.sess.dcx
     }
 
-    pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: impl Into<DiagnosticMessage>) -> ! {
-        self.sess.span_diagnostic.span_bug(sp, m)
-    }
-
-    pub(super) fn diagnostic(&self) -> &'a Handler {
-        &self.sess.span_diagnostic
-    }
-
-    /// Replace `self` with `snapshot.parser` and extend `unclosed_delims` with `snapshot.unclosed_delims`.
-    /// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears.
+    /// Replace `self` with `snapshot.parser`.
     pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
         *self = snapshot.parser;
-        self.unclosed_delims.extend(snapshot.unclosed_delims.clone());
-    }
-
-    pub fn unclosed_delims(&self) -> &[UnmatchedBrace] {
-        &self.unclosed_delims
     }
 
     /// Create a snapshot of the `Parser`.
     pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> {
-        let mut snapshot = self.clone();
-        let unclosed_delims = self.unclosed_delims.clone();
-        // Clear `unclosed_delims` in snapshot to avoid
-        // duplicate errors being emitted when the `Parser`
-        // is dropped (which may or may not happen, depending
-        // if the parsing the snapshot is created for is successful)
-        snapshot.unclosed_delims.clear();
-        SnapshotParser { parser: snapshot, unclosed_delims }
+        let snapshot = self.clone();
+        SnapshotParser { parser: snapshot }
     }
 
     pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
         self.sess.source_map().span_to_snippet(span)
     }
 
-    pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        let mut err = self.struct_span_err(
-            self.token.span,
-            &format!("expected identifier, found {}", super::token_descr(&self.token)),
-        );
+    /// Emits an error with suggestions if an identifier was expected but not found.
+    ///
+    /// Returns a possibly recovered identifier.
+    pub(super) fn expected_ident_found(
+        &mut self,
+        recover: bool,
+    ) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+        if let TokenKind::DocComment(..) = self.prev_token.kind {
+            return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
+                span: self.prev_token.span,
+                missing_comma: None,
+            }));
+        }
+
         let valid_follow = &[
             TokenKind::Eq,
             TokenKind::Colon,
@@ -426,34 +283,146 @@ impl<'a> Parser<'a> {
             TokenKind::CloseDelim(Delimiter::Brace),
             TokenKind::CloseDelim(Delimiter::Parenthesis),
         ];
-        match self.token.ident() {
-            Some((ident, false))
-                if ident.is_raw_guess()
-                    && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) =>
-            {
-                err.span_suggestion_verbose(
-                    ident.span.shrink_to_lo(),
-                    &format!("escape `{}` to use it as an identifier", ident.name),
-                    "r#",
-                    Applicability::MaybeIncorrect,
-                );
-            }
-            _ => {}
-        }
-        if let Some(token_descr) = super::token_descr_opt(&self.token) {
-            err.span_label(self.token.span, format!("expected identifier, found {}", token_descr));
+
+        let mut recovered_ident = None;
+        // we take this here so that the correct original token is retained in
+        // the diagnostic, regardless of eager recovery.
+        let bad_token = self.token.clone();
+
+        // suggest prepending a keyword in identifier position with `r#`
+        let suggest_raw = if let Some((ident, false)) = self.token.ident()
+            && ident.is_raw_guess()
+            && self.look_ahead(1, |t| valid_follow.contains(&t.kind))
+        {
+            recovered_ident = Some((ident, true));
+
+            // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
+            // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
+            let ident_name = ident.name.to_string();
+
+            Some(SuggEscapeIdentifier { span: ident.span.shrink_to_lo(), ident_name })
         } else {
-            err.span_label(self.token.span, "expected identifier");
+            None
+        };
+
+        let suggest_remove_comma =
             if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
-                err.span_suggestion(
-                    self.token.span,
-                    "remove this comma",
-                    "",
-                    Applicability::MachineApplicable,
-                );
+                if recover {
+                    self.bump();
+                    recovered_ident = self.ident_or_err(false).ok();
+                };
+
+                Some(SuggRemoveComma { span: bad_token.span })
+            } else {
+                None
+            };
+
+        let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
+            let (invalid, valid) = self.token.span.split_at(len as u32);
+
+            recovered_ident = Some((Ident::new(valid_portion, valid), false));
+
+            HelpIdentifierStartsWithNumber { num_span: invalid }
+        });
+
+        let err = ExpectedIdentifier {
+            span: bad_token.span,
+            token: bad_token,
+            suggest_raw,
+            suggest_remove_comma,
+            help_cannot_start_number,
+        };
+        let mut err = self.dcx().create_err(err);
+
+        // if the token we have is a `<`
+        // it *might* be a misplaced generic
+        // FIXME: could we recover with this?
+        if self.token == token::Lt {
+            // all keywords that could have generic applied
+            let valid_prev_keywords =
+                [kw::Fn, kw::Type, kw::Struct, kw::Enum, kw::Union, kw::Trait];
+
+            // If we've expected an identifier,
+            // and the current token is a '<'
+            // if the previous token is a valid keyword
+            // that might use a generic, then suggest a correct
+            // generic placement (later on)
+            let maybe_keyword = self.prev_token.clone();
+            if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
+                // if we have a valid keyword, attempt to parse generics
+                // also obtain the keywords symbol
+                match self.parse_generics() {
+                    Ok(generic) => {
+                        if let TokenKind::Ident(symbol, _) = maybe_keyword.kind {
+                            let ident_name = symbol;
+                            // at this point, we've found something like
+                            // `fn <T>id`
+                            // and current token should be Ident with the item name (i.e. the function name)
+                            // if there is a `<` after the fn name, then don't show a suggestion, show help
+
+                            if !self.look_ahead(1, |t| *t == token::Lt)
+                                && let Ok(snippet) =
+                                    self.sess.source_map().span_to_snippet(generic.span)
+                            {
+                                err.multipart_suggestion_verbose(
+                                        format!("place the generic parameter name after the {ident_name} name"),
+                                        vec![
+                                            (self.token.span.shrink_to_hi(), snippet),
+                                            (generic.span, String::new())
+                                        ],
+                                        Applicability::MaybeIncorrect,
+                                    );
+                            } else {
+                                err.help(format!(
+                                    "place the generic parameter name after the {ident_name} name"
+                                ));
+                            }
+                        }
+                    }
+                    Err(err) => {
+                        // if there's an error parsing the generics,
+                        // then don't do a misplaced generics suggestion
+                        // and emit the expected ident error instead;
+                        err.cancel();
+                    }
+                }
             }
         }
-        err
+
+        if let Some(recovered_ident) = recovered_ident
+            && recover
+        {
+            err.emit();
+            Ok(recovered_ident)
+        } else {
+            Err(err)
+        }
+    }
+
+    pub(super) fn expected_ident_found_err(&mut self) -> DiagnosticBuilder<'a> {
+        self.expected_ident_found(false).unwrap_err()
+    }
+
+    /// Checks if the current token is a integer or float literal and looks like
+    /// it could be a invalid identifier with digits at the start.
+    ///
+    /// Returns the number of characters (bytes) composing the invalid portion
+    /// of the identifier and the valid portion of the identifier.
+    pub(super) fn is_lit_bad_ident(&mut self) -> Option<(usize, Symbol)> {
+        // ensure that the integer literal is followed by a *invalid*
+        // suffix: this is how we know that it is a identifier with an
+        // invalid beginning.
+        if let token::Literal(Lit {
+            kind: token::LitKind::Integer | token::LitKind::Float,
+            symbol,
+            suffix: Some(suffix), // no suffix makes it a valid literal
+        }) = self.token.kind
+            && rustc_ast::MetaItemLit::from_token(&self.token).is_none()
+        {
+            Some((symbol.as_str().len(), suffix))
+        } else {
+            None
+        }
     }
 
     pub(super) fn expected_one_of_not_found(
@@ -479,48 +448,57 @@ impl<'a> Parser<'a> {
             })
         }
 
-        let mut expected = edible
+        self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
+        let mut expected = self
+            .expected_tokens
             .iter()
-            .map(|x| TokenType::Token(x.clone()))
-            .chain(inedible.iter().map(|x| TokenType::Token(x.clone())))
-            .chain(self.expected_tokens.iter().cloned())
-            .filter_map(|token| {
-                // filter out suggestions which suggest the same token which was found and deemed incorrect
+            .cloned()
+            .filter(|token| {
+                // Filter out suggestions that suggest the same token which was found and deemed incorrect.
                 fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool {
-                    if let TokenKind::Ident(current_sym, _) = found {
-                        if let TokenType::Keyword(suggested_sym) = expected {
-                            return current_sym == suggested_sym;
-                        }
+                    if let TokenKind::Ident(current_sym, _) = found
+                        && let TokenType::Keyword(suggested_sym) = expected
+                    {
+                        return current_sym == suggested_sym;
                     }
                     false
                 }
-                if token != parser::TokenType::Token(self.token.kind.clone()) {
+
+                if *token != parser::TokenType::Token(self.token.kind.clone()) {
                     let eq = is_ident_eq_keyword(&self.token.kind, &token);
-                    // if the suggestion is a keyword and the found token is an ident,
+                    // If the suggestion is a keyword and the found token is an ident,
                     // the content of which are equal to the suggestion's content,
-                    // we can remove that suggestion (see the return None statement below)
+                    // we can remove that suggestion (see the `return false` below).
 
-                    // if this isn't the case however, and the suggestion is a token the
-                    // content of which is the same as the found token's, we remove it as well
+                    // If this isn't the case however, and the suggestion is a token the
+                    // content of which is the same as the found token's, we remove it as well.
                     if !eq {
                         if let TokenType::Token(kind) = &token {
                             if kind == &self.token.kind {
-                                return None;
+                                return false;
                             }
                         }
-                        return Some(token);
+                        return true;
                     }
                 }
-                return None;
+                false
             })
             .collect::<Vec<_>>();
         expected.sort_by_cached_key(|x| x.to_string());
         expected.dedup();
 
         let sm = self.sess.source_map();
-        let msg = format!("expected `;`, found {}", super::token_descr(&self.token));
-        let appl = Applicability::MachineApplicable;
+
+        // Special-case "expected `;`" errors.
         if expected.contains(&TokenType::Token(token::Semi)) {
+            // If the user is trying to write a ternary expression, recover it and
+            // return an Err to prevent a cascade of irrelevant diagnostics.
+            if self.prev_token == token::Question
+                && let Err(e) = self.maybe_recover_from_ternary_operator()
+            {
+                return Err(e);
+            }
+
             if self.token.span == DUMMY_SP || self.prev_token.span == DUMMY_SP {
                 // Likely inside a macro, can't provide meaningful suggestions.
             } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) {
@@ -547,49 +525,68 @@ impl<'a> Parser<'a> {
                 //
                 //   let x = 32:
                 //   let y = 42;
+                self.dcx().emit_err(ExpectedSemi {
+                    span: self.token.span,
+                    token: self.token.clone(),
+                    unexpected_token_label: None,
+                    sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span),
+                });
                 self.bump();
-                let sp = self.prev_token.span;
-                self.struct_span_err(sp, &msg)
-                    .span_suggestion_short(sp, "change this to `;`", ";", appl)
-                    .emit();
                 return Ok(true);
             } else if self.look_ahead(0, |t| {
                 t == &token::CloseDelim(Delimiter::Brace)
-                    || (t.can_begin_expr() && t != &token::Semi && t != &token::Pound)
+                    || ((t.can_begin_expr() || t.can_begin_item())
+                        && t != &token::Semi
+                        && t != &token::Pound)
                     // Avoid triggering with too many trailing `#` in raw string.
                     || (sm.is_multiline(
-                        self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo())
+                        self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()),
                     ) && t == &token::Pound)
-            }) {
+            }) && !expected.contains(&TokenType::Token(token::Comma))
+            {
                 // Missing semicolon typo. This is triggered if the next token could either start a
                 // new statement or is a block close. For example:
                 //
                 //   let x = 32
                 //   let y = 42;
-                let sp = self.prev_token.span.shrink_to_hi();
-                self.struct_span_err(sp, &msg)
-                    .span_label(self.token.span, "unexpected token")
-                    .span_suggestion_short(sp, "add `;` here", ";", appl)
-                    .emit();
+                let span = self.prev_token.span.shrink_to_hi();
+                self.dcx().emit_err(ExpectedSemi {
+                    span,
+                    token: self.token.clone(),
+                    unexpected_token_label: Some(self.token.span),
+                    sugg: ExpectedSemiSugg::AddSemi(span),
+                });
                 return Ok(true);
             }
         }
 
+        if self.token.kind == TokenKind::EqEq
+            && self.prev_token.is_ident()
+            && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
+        {
+            // Likely typo: `=` → `==` in let expr or enum item
+            return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
+        }
+
+        if self.token.is_keyword(kw::Move) && self.prev_token.is_keyword(kw::Async) {
+            // The 2015 edition is in use because parsing of `async move` has failed.
+            let span = self.prev_token.span.to(self.token.span);
+            return Err(self.dcx().create_err(AsyncMoveBlockIn2015 { span }));
+        }
+
         let expect = tokens_to_string(&expected);
         let actual = super::token_descr(&self.token);
         let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
+            let fmt = format!("expected one of {expect}, found {actual}");
             let short_expect = if expected.len() > 6 {
                 format!("{} possible tokens", expected.len())
             } else {
-                expect.clone()
+                expect
             };
-            (
-                format!("expected one of {expect}, found {actual}"),
-                (self.prev_token.span.shrink_to_hi(), format!("expected one of {short_expect}")),
-            )
+            (fmt, (self.prev_token.span.shrink_to_hi(), format!("expected one of {short_expect}")))
         } else if expected.is_empty() {
             (
-                format!("unexpected token: {}", actual),
+                format!("unexpected token: {actual}"),
                 (self.prev_token.span, "unexpected token after this".to_string()),
             )
         } else {
@@ -599,15 +596,109 @@ impl<'a> Parser<'a> {
             )
         };
         self.last_unexpected_token_span = Some(self.token.span);
-        let mut err = self.struct_span_err(self.token.span, &msg_exp);
+        // FIXME: translation requires list formatting (for `expect`)
+        let mut err = self.dcx().struct_span_err(self.token.span, msg_exp);
+
+        // Look for usages of '=>' where '>=' was probably intended
+        if self.token == token::FatArrow
+            && expected
+                .iter()
+                .any(|tok| matches!(tok, TokenType::Operator | TokenType::Token(TokenKind::Le)))
+            && !expected.iter().any(|tok| {
+                matches!(
+                    tok,
+                    TokenType::Token(TokenKind::FatArrow) | TokenType::Token(TokenKind::Comma)
+                )
+            })
+        {
+            err.span_suggestion(
+                self.token.span,
+                "you might have meant to write a \"greater than or equal to\" comparison",
+                ">=",
+                Applicability::MaybeIncorrect,
+            );
+        }
+
+        if let TokenKind::Ident(symbol, _) = &self.prev_token.kind {
+            if ["def", "fun", "func", "function"].contains(&symbol.as_str()) {
+                err.span_suggestion_short(
+                    self.prev_token.span,
+                    format!("write `fn` instead of `{symbol}` to declare a function"),
+                    "fn",
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+
+        if let TokenKind::Ident(prev, _) = &self.prev_token.kind
+            && let TokenKind::Ident(cur, _) = &self.token.kind
+        {
+            let concat = Symbol::intern(&format!("{prev}{cur}"));
+            let ident = Ident::new(concat, DUMMY_SP);
+            if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
+                let span = self.prev_token.span.to(self.token.span);
+                err.span_suggestion_verbose(
+                    span,
+                    format!("consider removing the space to spell keyword `{concat}`"),
+                    concat,
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
 
-        // Add suggestion for a missing closing angle bracket if '>' is included in expected_tokens
-        // there are unclosed angle brackets
-        if self.unmatched_angle_bracket_count > 0
-            && self.token.kind == TokenKind::Eq
-            && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Gt)))
+        // Try to detect an intended c-string literal while using a pre-2021 edition. The heuristic
+        // here is to identify a cooked, uninterpolated `c` id immediately followed by a string, or
+        // a cooked, uninterpolated `cr` id immediately followed by a string or a `#`, in an edition
+        // where c-string literals are not allowed. There is the very slight possibility of a false
+        // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
+        // that in the parser requires unbounded lookahead, so we only add a hint to the existing
+        // error rather than replacing it entirely.
+        if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
+            && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
+            || (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
+                && matches!(
+                    &self.token.kind,
+                    TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound
+                )))
+            && self.prev_token.span.hi() == self.token.span.lo()
+            && !self.token.span.at_least_rust_2021()
         {
-            err.span_label(self.prev_token.span, "maybe try to close unmatched angle bracket");
+            err.note("you may be trying to write a c-string literal");
+            err.note("c-string literals require Rust 2021 or later");
+            HelpUseLatestEdition::new().add_to_diagnostic(&mut err);
+        }
+
+        // `pub` may be used for an item or `pub(crate)`
+        if self.prev_token.is_ident_named(sym::public)
+            && (self.token.can_begin_item()
+                || self.token.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
+        {
+            err.span_suggestion_short(
+                self.prev_token.span,
+                "write `pub` instead of `public` to make the item public",
+                "pub",
+                Applicability::MachineApplicable,
+            );
+        }
+
+        if let token::DocComment(kind, style, _) = self.token.kind {
+            // We have something like `expr //!val` where the user likely meant `expr // !val`
+            let pos = self.token.span.lo() + BytePos(2);
+            let span = self.token.span.with_lo(pos).with_hi(pos);
+            err.span_suggestion_verbose(
+                span,
+                format!(
+                    "add a space before {} to write a regular comment",
+                    match (kind, style) {
+                        (token::CommentKind::Line, ast::AttrStyle::Inner) => "`!`",
+                        (token::CommentKind::Block, ast::AttrStyle::Inner) => "`!`",
+                        (token::CommentKind::Line, ast::AttrStyle::Outer) => "the last `/`",
+                        (token::CommentKind::Block, ast::AttrStyle::Outer) => "the last `*`",
+                    },
+                ),
+                " ".to_string(),
+                Applicability::MachineApplicable,
+            );
         }
 
         let sp = if self.token == token::Eof {
@@ -616,21 +707,6 @@ impl<'a> Parser<'a> {
         } else {
             label_sp
         };
-        match self.recover_closing_delimiter(
-            &expected
-                .iter()
-                .filter_map(|tt| match tt {
-                    TokenType::Token(t) => Some(t.clone()),
-                    _ => None,
-                })
-                .collect::<Vec<_>>(),
-            err,
-        ) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
 
         if self.check_too_many_raw_str_terminators(&mut err) {
             if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
@@ -663,10 +739,113 @@ impl<'a> Parser<'a> {
             err.span_label(sp, label_exp);
             err.span_label(self.token.span, "unexpected token");
         }
-        self.maybe_annotate_with_ascription(&mut err, false);
         Err(err)
     }
 
+    pub(super) fn attr_on_non_tail_expr(&self, expr: &Expr) {
+        // Missing semicolon typo error.
+        let span = self.prev_token.span.shrink_to_hi();
+        let mut err = self.dcx().create_err(ExpectedSemi {
+            span,
+            token: self.token.clone(),
+            unexpected_token_label: Some(self.token.span),
+            sugg: ExpectedSemiSugg::AddSemi(span),
+        });
+        let attr_span = match &expr.attrs[..] {
+            [] => unreachable!(),
+            [only] => only.span,
+            [first, rest @ ..] => {
+                for attr in rest {
+                    err.span_label(attr.span, "");
+                }
+                first.span
+            }
+        };
+        err.span_label(
+            attr_span,
+            format!(
+                "only `;` terminated statements or tail expressions are allowed after {}",
+                if expr.attrs.len() == 1 { "this attribute" } else { "these attributes" },
+            ),
+        );
+        if self.token == token::Pound
+            && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket))
+        {
+            // We have
+            // #[attr]
+            // expr
+            // #[not_attr]
+            // other_expr
+            err.span_label(span, "expected `;` here");
+            err.multipart_suggestion(
+                "alternatively, consider surrounding the expression with a block",
+                vec![
+                    (expr.span.shrink_to_lo(), "{ ".to_string()),
+                    (expr.span.shrink_to_hi(), " }".to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+            let mut snapshot = self.create_snapshot_for_diagnostic();
+            if let [attr] = &expr.attrs[..]
+                && let ast::AttrKind::Normal(attr_kind) = &attr.kind
+                && let [segment] = &attr_kind.item.path.segments[..]
+                && segment.ident.name == sym::cfg
+                && let Some(args_span) = attr_kind.item.args.span()
+                && let next_attr = match snapshot.parse_attribute(InnerAttrPolicy::Forbidden(None))
+                {
+                    Ok(next_attr) => next_attr,
+                    Err(inner_err) => {
+                        err.cancel();
+                        inner_err.cancel();
+                        return;
+                    }
+                }
+                && let ast::AttrKind::Normal(next_attr_kind) = next_attr.kind
+                && let Some(next_attr_args_span) = next_attr_kind.item.args.span()
+                && let [next_segment] = &next_attr_kind.item.path.segments[..]
+                && segment.ident.name == sym::cfg
+            {
+                let next_expr = match snapshot.parse_expr() {
+                    Ok(next_expr) => next_expr,
+                    Err(inner_err) => {
+                        err.cancel();
+                        inner_err.cancel();
+                        return;
+                    }
+                };
+                // We have for sure
+                // #[cfg(..)]
+                // expr
+                // #[cfg(..)]
+                // other_expr
+                // So we suggest using `if cfg!(..) { expr } else if cfg!(..) { other_expr }`.
+                let margin = self.sess.source_map().span_to_margin(next_expr.span).unwrap_or(0);
+                let sugg = vec![
+                    (attr.span.with_hi(segment.span().hi()), "if cfg!".to_string()),
+                    (args_span.shrink_to_hi().with_hi(attr.span.hi()), " {".to_string()),
+                    (expr.span.shrink_to_lo(), "    ".to_string()),
+                    (
+                        next_attr.span.with_hi(next_segment.span().hi()),
+                        "} else if cfg!".to_string(),
+                    ),
+                    (
+                        next_attr_args_span.shrink_to_hi().with_hi(next_attr.span.hi()),
+                        " {".to_string(),
+                    ),
+                    (next_expr.span.shrink_to_lo(), "    ".to_string()),
+                    (next_expr.span.shrink_to_hi(), format!("\n{}}}", " ".repeat(margin))),
+                ];
+                err.multipart_suggestion(
+                    "it seems like you are trying to provide different expressions depending on \
+                     `cfg`, consider using `if cfg!(..)`",
+                    sugg,
+                    Applicability::MachineApplicable,
+                );
+            }
+        }
+        err.emit();
+    }
+
     fn check_too_many_raw_str_terminators(&mut self, err: &mut Diagnostic) -> bool {
         let sm = self.sess.source_map();
         match (&self.prev_token.kind, &self.token.kind) {
@@ -681,7 +860,7 @@ impl<'a> Parser<'a> {
             ) =>
             {
                 let n_hashes: u8 = *n_hashes;
-                err.set_primary_message("too many `#` when terminating raw string");
+                err.primary_message("too many `#` when terminating raw string");
                 let str_span = self.prev_token.span;
                 let mut span = self.token.span;
                 let mut count = 0;
@@ -692,16 +871,16 @@ impl<'a> Parser<'a> {
                     self.bump();
                     count += 1;
                 }
-                err.set_span(span);
+                err.span(span);
                 err.span_suggestion(
                     span,
-                    &format!("remove the extra `#`{}", pluralize!(count)),
+                    format!("remove the extra `#`{}", pluralize!(count)),
                     "",
                     Applicability::MachineApplicable,
                 );
                 err.span_label(
                     str_span,
-                    &format!("this raw string started with {n_hashes} `#`{}", pluralize!(n_hashes)),
+                    format!("this raw string started with {n_hashes} `#`{}", pluralize!(n_hashes)),
                 );
                 true
             }
@@ -713,19 +892,25 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         s: BlockCheckMode,
+        maybe_struct_name: token::Token,
+        can_be_struct_literal: bool,
     ) -> Option<PResult<'a, P<Block>>> {
         if self.token.is_ident() && self.look_ahead(1, |t| t == &token::Colon) {
             // We might be having a struct literal where people forgot to include the path:
             // fn foo() -> Foo {
             //     field: value,
             // }
+            debug!(?maybe_struct_name, ?self.token);
             let mut snapshot = self.create_snapshot_for_diagnostic();
-            let path =
-                Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
-            let struct_expr = snapshot.parse_struct_expr(None, path, AttrVec::new(), false);
+            let path = Path {
+                segments: ThinVec::new(),
+                span: self.prev_token.span.shrink_to_lo(),
+                tokens: None,
+            };
+            let struct_expr = snapshot.parse_expr_struct(None, path, false);
             let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
             return Some(match (struct_expr, block_tail) {
-                (Ok(expr), Err(mut err)) => {
+                (Ok(expr), Err(err)) => {
                     // We have encountered the following:
                     // fn foo() -> Foo {
                     //     field: value,
@@ -735,27 +920,43 @@ impl<'a> Parser<'a> {
                     //     field: value,
                     // } }
                     err.delay_as_bug();
-                    self.struct_span_err(
-                        expr.span,
-                        fluent::parser::struct_literal_body_without_path,
-                    )
-                    .multipart_suggestion(
-                        fluent::parser::suggestion,
-                        vec![
-                            (expr.span.shrink_to_lo(), "{ SomeStruct ".to_string()),
-                            (expr.span.shrink_to_hi(), " }".to_string()),
-                        ],
-                        Applicability::MaybeIncorrect,
-                    )
-                    .emit();
                     self.restore_snapshot(snapshot);
                     let mut tail = self.mk_block(
-                        vec![self.mk_stmt_err(expr.span)],
+                        thin_vec![self.mk_stmt_err(expr.span)],
                         s,
                         lo.to(self.prev_token.span),
                     );
                     tail.could_be_bare_literal = true;
-                    Ok(tail)
+                    if maybe_struct_name.is_ident() && can_be_struct_literal {
+                        // Account for `if Example { a: one(), }.is_pos() {}`.
+                        // expand `before` so that we take care of module path such as:
+                        // `foo::Bar { ... } `
+                        // we expect to suggest `(foo::Bar { ... })` instead of `foo::(Bar { ... })`
+                        let sm = self.sess.source_map();
+                        let before = maybe_struct_name.span.shrink_to_lo();
+                        if let Ok(extend_before) = sm.span_extend_prev_while(before, |t| {
+                            t.is_alphanumeric() || t == ':' || t == '_'
+                        }) {
+                            Err(self.dcx().create_err(StructLiteralNeedingParens {
+                                span: maybe_struct_name.span.to(expr.span),
+                                sugg: StructLiteralNeedingParensSugg {
+                                    before: extend_before.shrink_to_lo(),
+                                    after: expr.span.shrink_to_hi(),
+                                },
+                            }))
+                        } else {
+                            return None;
+                        }
+                    } else {
+                        self.dcx().emit_err(StructLiteralBodyWithoutPath {
+                            span: expr.span,
+                            sugg: StructLiteralBodyWithoutPathSugg {
+                                before: expr.span.shrink_to_lo(),
+                                after: expr.span.shrink_to_hi(),
+                            },
+                        });
+                        Ok(tail)
+                    }
                 }
                 (Err(err), Ok(tail)) => {
                     // We have a block tail that contains a somehow valid type ascription expr.
@@ -777,57 +978,63 @@ impl<'a> Parser<'a> {
         None
     }
 
-    pub fn maybe_annotate_with_ascription(
+    pub(super) fn recover_closure_body(
         &mut self,
-        err: &mut Diagnostic,
-        maybe_expected_semicolon: bool,
-    ) {
-        if let Some((sp, likely_path)) = self.last_type_ascription.take() {
-            let sm = self.sess.source_map();
-            let next_pos = sm.lookup_char_pos(self.token.span.lo());
-            let op_pos = sm.lookup_char_pos(sp.hi());
-
-            let allow_unstable = self.sess.unstable_features.is_nightly_build();
-
-            if likely_path {
-                err.span_suggestion(
-                    sp,
-                    "maybe write a path separator here",
-                    "::",
-                    if allow_unstable {
-                        Applicability::MaybeIncorrect
-                    } else {
-                        Applicability::MachineApplicable
-                    },
+        mut err: DiagnosticBuilder<'a>,
+        before: token::Token,
+        prev: token::Token,
+        token: token::Token,
+        lo: Span,
+        decl_hi: Span,
+    ) -> PResult<'a, P<Expr>> {
+        err.span_label(lo.to(decl_hi), "while parsing the body of this closure");
+        match before.kind {
+            token::OpenDelim(Delimiter::Brace)
+                if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
+            {
+                // `{ || () }` should have been `|| { () }`
+                err.multipart_suggestion(
+                    "you might have meant to open the body of the closure, instead of enclosing \
+                     the closure in a block",
+                    vec![
+                        (before.span, String::new()),
+                        (prev.span.shrink_to_hi(), " {".to_string()),
+                    ],
+                    Applicability::MaybeIncorrect,
                 );
-                self.sess.type_ascription_path_suggestions.borrow_mut().insert(sp);
-            } else if op_pos.line != next_pos.line && maybe_expected_semicolon {
-                err.span_suggestion(
-                    sp,
-                    "try using a semicolon",
-                    ";",
+                err.emit();
+                self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+            }
+            token::OpenDelim(Delimiter::Parenthesis)
+                if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) =>
+            {
+                // We are within a function call or tuple, we can emit the error
+                // and recover.
+                self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]);
+
+                err.multipart_suggestion_verbose(
+                    "you might have meant to open the body of the closure",
+                    vec![
+                        (prev.span.shrink_to_hi(), " {".to_string()),
+                        (self.token.span.shrink_to_lo(), "}".to_string()),
+                    ],
                     Applicability::MaybeIncorrect,
                 );
-            } else if allow_unstable {
-                err.span_label(sp, "tried to parse a type due to this type ascription");
-            } else {
-                err.span_label(sp, "tried to parse a type due to this");
+                err.emit();
             }
-            if allow_unstable {
-                // Give extra information about type ascription only if it's a nightly compiler.
-                err.note(
-                    "`#![feature(type_ascription)]` lets you annotate an expression with a type: \
-                     `<expr>: <type>`",
+            _ if !matches!(token.kind, token::OpenDelim(Delimiter::Brace)) => {
+                // We don't have a heuristic to correctly identify where the block
+                // should be closed.
+                err.multipart_suggestion_verbose(
+                    "you might have meant to open the body of the closure",
+                    vec![(prev.span.shrink_to_hi(), " {".to_string())],
+                    Applicability::HasPlaceholders,
                 );
-                if !likely_path {
-                    // Avoid giving too much info when it was likely an unrelated typo.
-                    err.note(
-                        "see issue #23416 <https://github.com/rust-lang/rust/issues/23416> \
-                        for more information",
-                    );
-                }
+                return Err(err);
             }
+            _ => return Err(err),
         }
+        Ok(self.mk_expr_err(lo.to(self.token.span)))
     }
 
     /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
@@ -856,7 +1063,11 @@ impl<'a> Parser<'a> {
         &mut self,
         segment: &PathSegment,
         end: &[&TokenKind],
-    ) -> bool {
+    ) -> Option<ErrorGuaranteed> {
+        if !self.may_recover() {
+            return None;
+        }
+
         // This function is intended to be invoked after parsing a path segment where there are two
         // cases:
         //
@@ -882,14 +1093,14 @@ impl<'a> Parser<'a> {
         //
         // `x.foo::<u32>>>(3)`
         let parsed_angle_bracket_args =
-            segment.args.as_ref().map_or(false, |args| args.is_angle_bracketed());
+            segment.args.as_ref().is_some_and(|args| args.is_angle_bracketed());
 
         debug!(
             "check_trailing_angle_brackets: parsed_angle_bracket_args={:?}",
             parsed_angle_bracket_args,
         );
         if !parsed_angle_bracket_args {
-            return false;
+            return None;
         }
 
         // Keep the span at the start so we can highlight the sequence of `>` characters to be
@@ -927,7 +1138,7 @@ impl<'a> Parser<'a> {
             number_of_gt, number_of_shr,
         );
         if number_of_gt < 1 && number_of_shr < 1 {
-            return false;
+            return None;
         }
 
         // Finally, double check that we have our end token as otherwise this is the
@@ -941,26 +1152,19 @@ impl<'a> Parser<'a> {
             self.eat_to_tokens(end);
             let span = lo.until(self.token.span);
 
-            let total_num_of_gt = number_of_gt + number_of_shr * 2;
-            self.struct_span_err(
-                span,
-                &format!("unmatched angle bracket{}", pluralize!(total_num_of_gt)),
-            )
-            .span_suggestion(
-                span,
-                &format!("remove extra angle bracket{}", pluralize!(total_num_of_gt)),
-                "",
-                Applicability::MachineApplicable,
-            )
-            .emit();
-            return true;
+            let num_extra_brackets = number_of_gt + number_of_shr * 2;
+            return Some(self.dcx().emit_err(UnmatchedAngleBrackets { span, num_extra_brackets }));
         }
-        false
+        None
     }
 
     /// Check if a method call with an intended turbofish has been written without surrounding
     /// angle brackets.
     pub(super) fn check_turbofish_missing_angle_brackets(&mut self, segment: &mut PathSegment) {
+        if !self.may_recover() {
+            return;
+        }
+
         if token::ModSep == self.token.kind && segment.args.is_none() {
             let snapshot = self.create_snapshot_for_diagnostic();
             self.bump();
@@ -978,22 +1182,15 @@ impl<'a> Parser<'a> {
                     }
                     if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
                         // Recover from bad turbofish: `foo.collect::Vec<_>()`.
-                        let args = AngleBracketedArgs { args, span }.into();
-                        segment.args = args;
+                        segment.args = Some(AngleBracketedArgs { args, span }.into());
 
-                        self.struct_span_err(
+                        self.dcx().emit_err(GenericParamsWithoutAngleBrackets {
                             span,
-                            "generic parameters without surrounding angle brackets",
-                        )
-                        .multipart_suggestion(
-                            "surround the type parameters with angle brackets",
-                            vec![
-                                (span.shrink_to_lo(), "<".to_string()),
-                                (trailing_span, ">".to_string()),
-                            ],
-                            Applicability::MachineApplicable,
-                        )
-                        .emit();
+                            sugg: GenericParamsWithoutAngleBracketsSugg {
+                                left: span.shrink_to_lo(),
+                                right: trailing_span,
+                            },
+                        });
                     } else {
                         // This doesn't look like an invalid turbofish, can't recover parse state.
                         self.restore_snapshot(snapshot);
@@ -1013,7 +1210,7 @@ impl<'a> Parser<'a> {
     /// encounter a parse error when encountering the first `,`.
     pub(super) fn check_mistyped_turbofish_with_multiple_type_params(
         &mut self,
-        mut e: DiagnosticBuilder<'a, ErrorGuaranteed>,
+        mut e: DiagnosticBuilder<'a>,
         expr: &mut P<Expr>,
     ) -> PResult<'a, ()> {
         if let ExprKind::Binary(binop, _, _) = &expr.kind
@@ -1028,45 +1225,69 @@ impl<'a> Parser<'a> {
             match x {
                 Ok((_, _, false)) => {
                     if self.eat(&token::Gt) {
+                        // We made sense of it. Improve the error message.
                         e.span_suggestion_verbose(
                             binop.span.shrink_to_lo(),
-                            TURBOFISH_SUGGESTION_STR,
+                            fluent::parse_sugg_turbofish_syntax,
                             "::",
                             Applicability::MaybeIncorrect,
-                        )
-                        .emit();
+                        );
                         match self.parse_expr() {
                             Ok(_) => {
-                                *expr =
-                                    self.mk_expr_err(expr.span.to(self.prev_token.span));
+                                // The subsequent expression is valid. Mark
+                                // `expr` as erroneous and emit `e` now, but
+                                // return `Ok` so parsing can continue.
+                                e.emit();
+                                *expr = self.mk_expr_err(expr.span.to(self.prev_token.span));
                                 return Ok(());
                             }
                             Err(err) => {
-                                *expr = self.mk_expr_err(expr.span);
                                 err.cancel();
                             }
                         }
                     }
                 }
+                Ok((_, _, true)) => {}
                 Err(err) => {
                     err.cancel();
                 }
-                _ => {}
             }
         }
         Err(e)
     }
 
+    /// Suggest add the missing `let` before the identifier in stmt
+    /// `a: Ty = 1` -> `let a: Ty = 1`
+    pub(super) fn suggest_add_missing_let_for_stmt(&mut self, err: &mut DiagnosticBuilder<'a>) {
+        if self.token == token::Colon {
+            let prev_span = self.prev_token.span.shrink_to_lo();
+            let snapshot = self.create_snapshot_for_diagnostic();
+            self.bump();
+            match self.parse_ty() {
+                Ok(_) => {
+                    if self.token == token::Eq {
+                        let sugg = SuggAddMissingLetStmt { span: prev_span };
+                        sugg.add_to_diagnostic(err);
+                    }
+                }
+                Err(e) => {
+                    e.cancel();
+                }
+            }
+            self.restore_snapshot(snapshot);
+        }
+    }
+
     /// Check to see if a pair of chained operators looks like an attempt at chained comparison,
     /// e.g. `1 < x <= 3`. If so, suggest either splitting the comparison into two, or
     /// parenthesising the leftmost comparison.
     fn attempt_chained_comparison_suggestion(
         &mut self,
-        err: &mut Diagnostic,
+        err: &mut ComparisonOperatorsCannotBeChained,
         inner_op: &Expr,
         outer_op: &Spanned<AssocOp>,
     ) -> bool /* advanced the cursor */ {
-        if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
+        if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
             if let ExprKind::Field(_, ident) = l1.kind
                 && ident.as_str().parse::<i32>().is_err()
                 && !matches!(r1.kind, ExprKind::Lit(_))
@@ -1075,16 +1296,6 @@ impl<'a> Parser<'a> {
                 // suggestion being the only one to apply is high.
                 return false;
             }
-            let mut enclose = |left: Span, right: Span| {
-                err.multipart_suggestion(
-                    "parenthesize the comparison",
-                    vec![
-                        (left.shrink_to_lo(), "(".to_string()),
-                        (right.shrink_to_hi(), ")".to_string()),
-                    ],
-                    Applicability::MaybeIncorrect,
-                );
-            };
             return match (op.node, &outer_op.node) {
                 // `x == y == z`
                 (BinOpKind::Eq, AssocOp::Equal) |
@@ -1096,14 +1307,12 @@ impl<'a> Parser<'a> {
                 (BinOpKind::Ge, AssocOp::GreaterEqual | AssocOp::Greater) => {
                     let expr_to_str = |e: &Expr| {
                         self.span_to_snippet(e.span)
-                            .unwrap_or_else(|_| pprust::expr_to_string(&e))
+                            .unwrap_or_else(|_| pprust::expr_to_string(e))
                     };
-                    err.span_suggestion_verbose(
-                        inner_op.span.shrink_to_hi(),
-                        "split the comparison into two",
-                        format!(" && {}", expr_to_str(&r1)),
-                        Applicability::MaybeIncorrect,
-                    );
+                    err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::SplitComparison {
+                        span: inner_op.span.shrink_to_hi(),
+                        middle_term: expr_to_str(r1),
+                    });
                     false // Keep the current parse behavior, where the AST is `(x < y) < z`.
                 }
                 // `x == y < z`
@@ -1114,7 +1323,10 @@ impl<'a> Parser<'a> {
                         Ok(r2) => {
                             // We are sure that outer-op-rhs could be consumed, the suggestion is
                             // likely correct.
-                            enclose(r1.span, r2.span);
+                            err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+                                left: r1.span.shrink_to_lo(),
+                                right: r2.span.shrink_to_hi(),
+                            });
                             true
                         }
                         Err(expr_err) => {
@@ -1131,7 +1343,10 @@ impl<'a> Parser<'a> {
                     // further checks are necessary.
                     match self.parse_expr() {
                         Ok(_) => {
-                            enclose(l1.span, r1.span);
+                            err.chaining_sugg = Some(ComparisonOperatorsCannotBeChainedSugg::Parenthesize {
+                                left: l1.span.shrink_to_lo(),
+                                right: r1.span.shrink_to_hi(),
+                            });
                             true
                         }
                         Err(expr_err) => {
@@ -1176,23 +1391,15 @@ impl<'a> Parser<'a> {
             outer_op.node,
         );
 
-        let mk_err_expr =
-            |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err, AttrVec::new())));
+        let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err)));
 
-        match inner_op.kind {
-            ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
-                let mut err = self.struct_span_err(
-                    vec![op.span, self.prev_token.span],
-                    "comparison operators cannot be chained",
-                );
-
-                let suggest = |err: &mut Diagnostic| {
-                    err.span_suggestion_verbose(
-                        op.span.shrink_to_lo(),
-                        TURBOFISH_SUGGESTION_STR,
-                        "::",
-                        Applicability::MaybeIncorrect,
-                    );
+        match &inner_op.kind {
+            ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
+                let mut err = ComparisonOperatorsCannotBeChained {
+                    span: vec![op.span, self.prev_token.span],
+                    suggest_turbofish: None,
+                    help_turbofish: None,
+                    chaining_sugg: None,
                 };
 
                 // Include `<` to provide this recommendation even in a case like
@@ -1219,7 +1426,13 @@ impl<'a> Parser<'a> {
                     return if token::ModSep == self.token.kind {
                         // We have some certainty that this was a bad turbofish at this point.
                         // `foo< bar >::`
-                        suggest(&mut err);
+                        if let ExprKind::Binary(o, ..) = inner_op.kind
+                            && o.node == BinOpKind::Lt
+                        {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = Some(());
+                        }
 
                         let snapshot = self.create_snapshot_for_diagnostic();
                         self.bump(); // `::`
@@ -1228,7 +1441,7 @@ impl<'a> Parser<'a> {
                         match self.parse_expr() {
                             Ok(_) => {
                                 // 99% certain that the suggestion is correct, continue parsing.
-                                err.emit();
+                                self.dcx().emit_err(err);
                                 // FIXME: actually check that the two expressions in the binop are
                                 // paths and resynthesize new fn call expression instead of using
                                 // `ExprKind::Err` placeholder.
@@ -1239,18 +1452,24 @@ impl<'a> Parser<'a> {
                                 // Not entirely sure now, but we bubble the error up with the
                                 // suggestion.
                                 self.restore_snapshot(snapshot);
-                                Err(err)
+                                Err(self.dcx().create_err(err))
                             }
                         }
                     } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
                         // We have high certainty that this was a bad turbofish at this point.
                         // `foo< bar >(`
-                        suggest(&mut err);
+                        if let ExprKind::Binary(o, ..) = inner_op.kind
+                            && o.node == BinOpKind::Lt
+                        {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = Some(());
+                        }
                         // Consume the fn call arguments.
                         match self.consume_fn_args() {
-                            Err(()) => Err(err),
+                            Err(()) => Err(self.dcx().create_err(err)),
                             Ok(()) => {
-                                err.emit();
+                                self.dcx().emit_err(err);
                                 // FIXME: actually check that the two expressions in the binop are
                                 // paths and resynthesize new fn call expression instead of using
                                 // `ExprKind::Err` placeholder.
@@ -1263,25 +1482,24 @@ impl<'a> Parser<'a> {
                         {
                             // All we know is that this is `foo < bar >` and *nothing* else. Try to
                             // be helpful, but don't attempt to recover.
-                            err.help(TURBOFISH_SUGGESTION_STR);
-                            err.help("or use `(...)` if you meant to specify fn arguments");
+                            err.help_turbofish = Some(());
                         }
 
                         // If it looks like a genuine attempt to chain operators (as opposed to a
                         // misformatted turbofish, for instance), suggest a correct form.
                         if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op)
                         {
-                            err.emit();
+                            self.dcx().emit_err(err);
                             mk_err_expr(self, inner_op.span.to(self.prev_token.span))
                         } else {
                             // These cases cause too many knock-down errors, bail out (#61329).
-                            Err(err)
+                            Err(self.dcx().create_err(err))
                         }
                     };
                 }
                 let recover =
                     self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op);
-                err.emit();
+                self.dcx().emit_err(err);
                 if recover {
                     return mk_err_expr(self, inner_op.span.to(self.prev_token.span));
                 }
@@ -1314,7 +1532,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn maybe_report_ambiguous_plus(&mut self, impl_dyn_multi: bool, ty: &Ty) {
         if impl_dyn_multi {
-            self.sess.emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(&ty), span: ty.span });
+            self.dcx().emit_err(AmbiguousPlus { sum_ty: pprust::ty_to_string(ty), span: ty.span });
         }
     }
 
@@ -1322,23 +1540,56 @@ impl<'a> Parser<'a> {
     pub(super) fn maybe_recover_from_question_mark(&mut self, ty: P<Ty>) -> P<Ty> {
         if self.token == token::Question {
             self.bump();
-            self.struct_span_err(self.prev_token.span, "invalid `?` in type")
-                .span_label(self.prev_token.span, "`?` is only allowed on expressions, not types")
-                .multipart_suggestion(
-                    "if you meant to express that the type might not contain a value, use the `Option` wrapper type",
-                    vec![
-                        (ty.span.shrink_to_lo(), "Option<".to_string()),
-                        (self.prev_token.span, ">".to_string()),
-                    ],
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.dcx().emit_err(QuestionMarkInType {
+                span: self.prev_token.span,
+                sugg: QuestionMarkInTypeSugg {
+                    left: ty.span.shrink_to_lo(),
+                    right: self.prev_token.span,
+                },
+            });
             self.mk_ty(ty.span.to(self.prev_token.span), TyKind::Err)
         } else {
             ty
         }
     }
 
+    /// Rust has no ternary operator (`cond ? then : else`). Parse it and try
+    /// to recover from it if `then` and `else` are valid expressions. Returns
+    /// an err if this appears to be a ternary expression.
+    pub(super) fn maybe_recover_from_ternary_operator(&mut self) -> PResult<'a, ()> {
+        if self.prev_token != token::Question {
+            return PResult::Ok(());
+        }
+
+        let lo = self.prev_token.span.lo();
+        let snapshot = self.create_snapshot_for_diagnostic();
+
+        if match self.parse_expr() {
+            Ok(_) => true,
+            Err(err) => {
+                err.cancel();
+                // The colon can sometimes be mistaken for type
+                // ascription. Catch when this happens and continue.
+                self.token == token::Colon
+            }
+        } {
+            if self.eat_noexpect(&token::Colon) {
+                match self.parse_expr() {
+                    Ok(_) => {
+                        return Err(self
+                            .dcx()
+                            .create_err(TernaryOperator { span: self.token.span.with_lo(lo) }));
+                    }
+                    Err(err) => {
+                        err.cancel();
+                    }
+                };
+            }
+        }
+        self.restore_snapshot(snapshot);
+        Ok(())
+    }
+
     pub(super) fn maybe_recover_from_bad_type_plus(&mut self, ty: &Ty) -> PResult<'a, ()> {
         // Do not add `+` to expected tokens.
         if !self.token.is_like_plus() {
@@ -1346,11 +1597,11 @@ impl<'a> Parser<'a> {
         }
 
         self.bump(); // `+`
-        let bounds = self.parse_generic_bounds(None)?;
+        let bounds = self.parse_generic_bounds()?;
         let sum_span = ty.span.to(self.prev_token.span);
 
-        let sub = match ty.kind {
-            TyKind::Rptr(ref lifetime, ref mut_ty) => {
+        let sub = match &ty.kind {
+            TyKind::Ref(lifetime, mut_ty) => {
                 let sum_with_parens = pprust::to_string(|s| {
                     s.s.word("&");
                     s.print_opt_lifetime(lifetime);
@@ -1370,7 +1621,7 @@ impl<'a> Parser<'a> {
             _ => BadTypePlusSub::ExpectPath { span: sum_span },
         };
 
-        self.sess.emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
+        self.dcx().emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
 
         Ok(())
     }
@@ -1379,12 +1630,10 @@ impl<'a> Parser<'a> {
         &mut self,
         operand_expr: P<Expr>,
         op_span: Span,
-        prev_is_semi: bool,
+        start_stmt: bool,
     ) -> PResult<'a, P<Expr>> {
-        let standalone =
-            if prev_is_semi { IsStandalone::Standalone } else { IsStandalone::Subexpr };
+        let standalone = if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr };
         let kind = IncDecRecovery { standalone, op: IncOrDec::Inc, fixity: UnaryFixity::Pre };
-
         self.recover_from_inc_dec(operand_expr, kind, op_span)
     }
 
@@ -1392,13 +1641,27 @@ impl<'a> Parser<'a> {
         &mut self,
         operand_expr: P<Expr>,
         op_span: Span,
+        start_stmt: bool,
     ) -> PResult<'a, P<Expr>> {
         let kind = IncDecRecovery {
-            standalone: IsStandalone::Maybe,
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
             op: IncOrDec::Inc,
             fixity: UnaryFixity::Post,
         };
+        self.recover_from_inc_dec(operand_expr, kind, op_span)
+    }
 
+    pub(super) fn recover_from_postfix_decrement(
+        &mut self,
+        operand_expr: P<Expr>,
+        op_span: Span,
+        start_stmt: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let kind = IncDecRecovery {
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
+            op: IncOrDec::Dec,
+            fixity: UnaryFixity::Post,
+        };
         self.recover_from_inc_dec(operand_expr, kind, op_span)
     }
 
@@ -1408,14 +1671,14 @@ impl<'a> Parser<'a> {
         kind: IncDecRecovery,
         op_span: Span,
     ) -> PResult<'a, P<Expr>> {
-        let mut err = self.struct_span_err(
+        let mut err = self.dcx().struct_span_err(
             op_span,
-            &format!("Rust has no {} {} operator", kind.fixity, kind.op.name()),
+            format!("Rust has no {} {} operator", kind.fixity, kind.op.name()),
         );
-        err.span_label(op_span, &format!("not a valid {} operator", kind.fixity));
+        err.span_label(op_span, format!("not a valid {} operator", kind.fixity));
 
         let help_base_case = |mut err: DiagnosticBuilder<'_, _>, base| {
-            err.help(&format!("use `{}= 1` instead", kind.op.chr()));
+            err.help(format!("use `{}= 1` instead", kind.op.chr()));
             err.emit();
             Ok(base)
         };
@@ -1427,34 +1690,26 @@ impl<'a> Parser<'a> {
         };
 
         match kind.standalone {
-            IsStandalone::Standalone => self.inc_dec_standalone_suggest(kind, spans).emit(&mut err),
+            IsStandalone::Standalone => {
+                self.inc_dec_standalone_suggest(kind, spans).emit_verbose(&mut err)
+            }
             IsStandalone::Subexpr => {
-                let Ok(base_src) = self.span_to_snippet(base.span)
-                    else { return help_base_case(err, base) };
+                let Ok(base_src) = self.span_to_snippet(base.span) else {
+                    return help_base_case(err, base);
+                };
                 match kind.fixity {
                     UnaryFixity::Pre => {
                         self.prefix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
                     }
                     UnaryFixity::Post => {
-                        self.postfix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                        // won't suggest since we can not handle the precedences
+                        // for example: `a + b++` has been parsed (a + b)++ and we can not suggest here
+                        if !matches!(base.kind, ExprKind::Binary(_, _, _)) {
+                            self.postfix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                        }
                     }
                 }
             }
-            IsStandalone::Maybe => {
-                let Ok(base_src) = self.span_to_snippet(base.span)
-                    else { return help_base_case(err, base) };
-                let sugg1 = match kind.fixity {
-                    UnaryFixity::Pre => self.prefix_inc_dec_suggest(base_src, kind, spans),
-                    UnaryFixity::Post => self.postfix_inc_dec_suggest(base_src, kind, spans),
-                };
-                let sugg2 = self.inc_dec_standalone_suggest(kind, spans);
-                MultiSugg::emit_many(
-                    &mut err,
-                    "use `+= 1` instead",
-                    Applicability::Unspecified,
-                    [sugg1, sugg2].into_iter(),
-                )
-            }
         }
         Err(err)
     }
@@ -1485,7 +1740,7 @@ impl<'a> Parser<'a> {
         MultiSugg {
             msg: format!("use `{}= 1` instead", kind.op.chr()),
             patches: vec![
-                (pre_span, format!("{{ let {} = ", tmp_var)),
+                (pre_span, format!("{{ let {tmp_var} = ")),
                 (post_span, format!("; {} {}= 1; {} }}", base_src, kind.op.chr(), tmp_var)),
             ],
             applicability: Applicability::HasPlaceholders,
@@ -1497,9 +1752,16 @@ impl<'a> Parser<'a> {
         kind: IncDecRecovery,
         (pre_span, post_span): (Span, Span),
     ) -> MultiSugg {
+        let mut patches = Vec::new();
+
+        if !pre_span.is_empty() {
+            patches.push((pre_span, String::new()));
+        }
+
+        patches.push((post_span, format!(" {}= 1", kind.op.chr())));
         MultiSugg {
             msg: format!("use `{}= 1` instead", kind.op.chr()),
-            patches: vec![(pre_span, String::new()), (post_span, format!(" {}= 1", kind.op.chr()))],
+            patches,
             applicability: Applicability::MachineApplicable,
         }
     }
@@ -1511,6 +1773,10 @@ impl<'a> Parser<'a> {
         &mut self,
         base: P<T>,
     ) -> PResult<'a, P<T>> {
+        if !self.may_recover() {
+            return Ok(base);
+        }
+
         // Do not add `::` to expected tokens.
         if self.token == token::ModSep {
             if let Some(ty) = base.to_ty() {
@@ -1529,18 +1795,17 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<T>> {
         self.expect(&token::ModSep)?;
 
-        let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
+        let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
         self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
         path.span = ty_span.to(self.prev_token.span);
 
-        let ty_str = self.span_to_snippet(ty_span).unwrap_or_else(|_| pprust::ty_to_string(&ty));
-        self.sess.emit_err(BadQPathStage2 {
-            span: path.span,
-            ty: format!("<{}>::{}", ty_str, pprust::path_to_string(&path)),
+        self.dcx().emit_err(BadQPathStage2 {
+            span: ty_span,
+            wrap: WrapType { lo: ty_span.shrink_to_lo(), hi: ty_span.shrink_to_hi() },
         });
 
         let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
-        Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
+        Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path)))
     }
 
     pub fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
@@ -1566,7 +1831,7 @@ impl<'a> Parser<'a> {
                     err.name = name;
                 }
             }
-            self.sess.emit_err(err);
+            self.dcx().emit_err(err);
             true
         } else {
             false
@@ -1584,7 +1849,7 @@ impl<'a> Parser<'a> {
         let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
             // Point at the end of the macro call when reaching end of macro arguments.
             (token::Eof, Some(_)) => {
-                let sp = self.sess.source_map().next_point(self.prev_token.span);
+                let sp = self.prev_token.span.shrink_to_hi();
                 (sp, sp)
             }
             // We don't want to point at the following span after DUMMY_SP.
@@ -1602,14 +1867,8 @@ impl<'a> Parser<'a> {
                 _ => this_token_str,
             },
         );
-        let mut err = self.struct_span_err(sp, &msg);
+        let mut err = self.dcx().struct_span_err(sp, msg);
         let label_exp = format!("expected `{token_str}`");
-        match self.recover_closing_delimiter(&[t.clone()], err) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
         let sm = self.sess.source_map();
         if !sm.is_multiline(prev_sp.until(sp)) {
             // When the spans are in the same line, it means that the only content
@@ -1623,19 +1882,42 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
-        if self.eat(&token::Semi) {
+        if self.eat(&token::Semi) || self.recover_colon_as_semi() {
             return Ok(());
         }
         self.expect(&token::Semi).map(drop) // Error unconditionally
     }
 
+    pub(super) fn recover_colon_as_semi(&mut self) -> bool {
+        let line_idx = |span: Span| {
+            self.sess
+                .source_map()
+                .span_to_lines(span)
+                .ok()
+                .and_then(|lines| Some(lines.lines.get(0)?.line_index))
+        };
+
+        if self.may_recover()
+            && self.token == token::Colon
+            && self.look_ahead(1, |next| line_idx(self.token.span) < line_idx(next.span))
+        {
+            self.dcx().emit_err(ColonAsSemi {
+                span: self.token.span,
+                type_ascription: self.sess.unstable_features.is_nightly_build().then_some(()),
+            });
+            self.bump();
+            return true;
+        }
+
+        false
+    }
+
     /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`,
     /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`.
     pub(super) fn recover_incorrect_await_syntax(
         &mut self,
         lo: Span,
         await_sp: Span,
-        attrs: AttrVec,
     ) -> PResult<'a, P<Expr>> {
         let (hi, expr, is_question) = if self.token == token::Not {
             // Handle `await!(<expr>)`.
@@ -1644,13 +1926,7 @@ impl<'a> Parser<'a> {
             self.recover_await_prefix(await_sp)?
         };
         let sp = self.error_on_incorrect_await(lo, hi, &expr, is_question);
-        let kind = match expr.kind {
-            // Avoid knock-down errors as we don't know whether to interpret this as `foo().await?`
-            // or `foo()?.await` (the very reason we went with postfix syntax 😅).
-            ExprKind::Try(_) => ExprKind::Err,
-            _ => ExprKind::Await(expr),
-        };
-        let expr = self.mk_expr(lo.to(sp), kind, attrs);
+        let expr = self.mk_expr(lo.to(sp), ExprKind::Err);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
@@ -1668,7 +1944,7 @@ impl<'a> Parser<'a> {
             // Handle `await { <expr> }`.
             // This needs to be handled separately from the next arm to avoid
             // interpreting `await { <expr> }?` as `<expr>?.await`.
-            self.parse_block_expr(None, self.token.span, BlockCheckMode::Default, AttrVec::new())
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)
         } else {
             self.parse_expr()
         }
@@ -1686,10 +1962,10 @@ impl<'a> Parser<'a> {
             _ => Applicability::MachineApplicable,
         };
 
-        self.sess.emit_err(IncorrectAwait {
+        self.dcx().emit_err(IncorrectAwait {
             span,
             sugg_span: (span, applicability),
-            expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(&expr)),
+            expr: self.span_to_snippet(expr.span).unwrap_or_else(|_| pprust::expr_to_string(expr)),
             question_mark: if is_question { "?" } else { "" },
         });
 
@@ -1707,7 +1983,7 @@ impl<'a> Parser<'a> {
             let span = lo.to(self.token.span);
             self.bump(); // )
 
-            self.sess.emit_err(IncorrectUseOfAwait { span });
+            self.dcx().emit_err(IncorrectUseOfAwait { span });
         }
     }
 
@@ -1726,7 +2002,7 @@ impl<'a> Parser<'a> {
             self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
             let hi = self.token.span;
             self.bump(); //remove )
-            let mut err = self.struct_span_err(lo.to(hi), "use of deprecated `try` macro");
+            let mut err = self.dcx().struct_span_err(lo.to(hi), "use of deprecated `try` macro");
             err.note("in the 2018 edition `try` is a reserved keyword, and the `try!()` macro is deprecated");
             let prefix = if is_empty { "" } else { "alternatively, " };
             if !is_empty {
@@ -1736,7 +2012,7 @@ impl<'a> Parser<'a> {
                     Applicability::MachineApplicable,
                 );
             }
-            err.span_suggestion(lo.shrink_to_lo(), &format!("{prefix}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax"), "r#", Applicability::MachineApplicable);
+            err.span_suggestion(lo.shrink_to_lo(), format!("{prefix}you can still access the deprecated `try!()` macro using the \"raw identifier\" syntax"), "r#", Applicability::MachineApplicable);
             err.emit();
             Ok(self.mk_expr_err(lo.to(hi)))
         } else {
@@ -1744,151 +2020,49 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Recovers a situation like `for ( $pat in $expr )`
-    /// and suggest writing `for $pat in $expr` instead.
-    ///
-    /// This should be called before parsing the `$block`.
-    pub(super) fn recover_parens_around_for_head(
+    /// When trying to close a generics list and encountering code like
+    /// ```text
+    /// impl<S: Into<std::borrow::Cow<'static, str>> From<S> for Canonical {}
+    ///                                          // ^ missing > here
+    /// ```
+    /// we provide a structured suggestion on the error from `expect_gt`.
+    pub(super) fn expect_gt_or_maybe_suggest_closing_generics(
         &mut self,
-        pat: P<Pat>,
-        begin_paren: Option<Span>,
-    ) -> P<Pat> {
-        match (&self.token.kind, begin_paren) {
-            (token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
-                self.bump();
-
-                self.struct_span_err(
-                    MultiSpan::from_spans(vec![begin_par_sp, self.prev_token.span]),
-                    "unexpected parentheses surrounding `for` loop head",
-                )
-                .multipart_suggestion(
-                    "remove parentheses in `for` loop",
-                    vec![(begin_par_sp, String::new()), (self.prev_token.span, String::new())],
-                    // With e.g. `for (x) in y)` this would replace `(x) in y)`
-                    // with `x) in y)` which is syntactically invalid.
-                    // However, this is prevented before we get here.
-                    Applicability::MachineApplicable,
-                )
-                .emit();
-
-                // Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
-                pat.and_then(|pat| match pat.kind {
-                    PatKind::Paren(pat) => pat,
-                    _ => P(pat),
+        params: &[ast::GenericParam],
+    ) -> PResult<'a, ()> {
+        let Err(mut err) = self.expect_gt() else {
+            return Ok(());
+        };
+        // Attempt to find places where a missing `>` might belong.
+        if let [.., ast::GenericParam { bounds, .. }] = params
+            && let Some(poly) = bounds
+                .iter()
+                .filter_map(|bound| match bound {
+                    ast::GenericBound::Trait(poly, _) => Some(poly),
+                    _ => None,
                 })
-            }
-            _ => pat,
+                .last()
+        {
+            err.span_suggestion_verbose(
+                poly.span.shrink_to_hi(),
+                "you might have meant to end the type parameters here",
+                ">",
+                Applicability::MaybeIncorrect,
+            );
         }
-    }
-
-    pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
-        (self.token == token::Lt && // `foo:<bar`, likely a typoed turbofish.
-            self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()))
-            || self.token.is_ident() &&
-            matches!(node, ast::ExprKind::Path(..) | ast::ExprKind::Field(..)) &&
-            !self.token.is_reserved_ident() &&           // v `foo:bar(baz)`
-            self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Parenthesis))
-            || self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace)) // `foo:bar {`
-            || self.look_ahead(1, |t| t == &token::Colon) &&     // `foo:bar::<baz`
-            self.look_ahead(2, |t| t == &token::Lt) &&
-            self.look_ahead(3, |t| t.is_ident())
-            || self.look_ahead(1, |t| t == &token::Colon) &&  // `foo:bar:baz`
-            self.look_ahead(2, |t| t.is_ident())
-            || self.look_ahead(1, |t| t == &token::ModSep)
-                && (self.look_ahead(2, |t| t.is_ident()) ||   // `foo:bar::baz`
-            self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>`
+        Err(err)
     }
 
     pub(super) fn recover_seq_parse_error(
         &mut self,
         delim: Delimiter,
         lo: Span,
-        result: PResult<'a, P<Expr>>,
+        err: PErr<'a>,
     ) -> P<Expr> {
-        match result {
-            Ok(x) => x,
-            Err(mut err) => {
-                err.emit();
-                // Recover from parse error, callers expect the closing delim to be consumed.
-                self.consume_block(delim, ConsumeClosingDelim::Yes);
-                self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err, AttrVec::new())
-            }
-        }
-    }
-
-    pub(super) fn recover_closing_delimiter(
-        &mut self,
-        tokens: &[TokenKind],
-        mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
-    ) -> PResult<'a, bool> {
-        let mut pos = None;
-        // We want to use the last closing delim that would apply.
-        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
-            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
-                && Some(self.token.span) > unmatched.unclosed_span
-            {
-                pos = Some(i);
-            }
-        }
-        match pos {
-            Some(pos) => {
-                // Recover and assume that the detected unclosed delimiter was meant for
-                // this location. Emit the diagnostic and act as if the delimiter was
-                // present for the parser's sake.
-
-                // Don't attempt to recover from this unclosed delimiter more than once.
-                let unmatched = self.unclosed_delims.remove(pos);
-                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
-                if unmatched.found_delim.is_none() {
-                    // We encountered `Eof`, set this fact here to avoid complaining about missing
-                    // `fn main()` when we found place to suggest the closing brace.
-                    *self.sess.reached_eof.borrow_mut() = true;
-                }
-
-                // We want to suggest the inclusion of the closing delimiter where it makes
-                // the most sense, which is immediately after the last token:
-                //
-                //  {foo(bar {}}
-                //      ^      ^
-                //      |      |
-                //      |      help: `)` may belong here
-                //      |
-                //      unclosed delimiter
-                if let Some(sp) = unmatched.unclosed_span {
-                    let mut primary_span: Vec<Span> =
-                        err.span.primary_spans().iter().cloned().collect();
-                    primary_span.push(sp);
-                    let mut primary_span: MultiSpan = primary_span.into();
-                    for span_label in err.span.span_labels() {
-                        if let Some(label) = span_label.label {
-                            primary_span.push_span_label(span_label.span, label);
-                        }
-                    }
-                    err.set_span(primary_span);
-                    err.span_label(sp, "unclosed delimiter");
-                }
-                // Backticks should be removed to apply suggestions.
-                let mut delim = delim.to_string();
-                delim.retain(|c| c != '`');
-                err.span_suggestion_short(
-                    self.prev_token.span.shrink_to_hi(),
-                    &format!("`{delim}` may belong here"),
-                    delim,
-                    Applicability::MaybeIncorrect,
-                );
-                if unmatched.found_delim.is_none() {
-                    // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
-                    // errors which would be emitted elsewhere in the parser and let other error
-                    // recovery consume the rest of the file.
-                    Err(err)
-                } else {
-                    err.emit();
-                    self.expected_tokens.clear(); // Reduce the number of errors.
-                    Ok(true)
-                }
-            }
-            _ => Err(err),
-        }
+        err.emit();
+        // Recover from parse error, callers expect the closing delim to be consumed.
+        self.consume_block(delim, ConsumeClosingDelim::Yes);
+        self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err)
     }
 
     /// Eats tokens until we can be relatively sure we reached the end of the
@@ -1968,7 +2142,6 @@ impl<'a> Parser<'a> {
                         && brace_depth == 0
                         && bracket_depth == 0 =>
                 {
-                    debug!("recover_stmt_ return - Semi");
                     break;
                 }
                 _ => self.bump(),
@@ -1979,7 +2152,7 @@ impl<'a> Parser<'a> {
     pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
         if self.eat_keyword(kw::In) {
             // a common typo: `for _ in in bar {}`
-            self.sess.emit_err(InInTypo {
+            self.dcx().emit_err(InInTypo {
                 span: self.prev_token.span,
                 sugg_span: in_span.until(self.prev_token.span),
             });
@@ -1988,12 +2161,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) {
         if let token::DocComment(..) = self.token.kind {
-            self.struct_span_err(
-                self.token.span,
-                "documentation comments cannot be applied to a function parameter's type",
-            )
-            .span_label(self.token.span, "doc comments are not allowed here")
-            .emit();
+            self.dcx().emit_err(DocCommentOnParamType { span: self.token.span });
             self.bump();
         } else if self.token == token::Pound
             && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket))
@@ -2005,9 +2173,7 @@ impl<'a> Parser<'a> {
             }
             let sp = lo.to(self.token.span);
             self.bump();
-            self.struct_span_err(sp, "attributes cannot be applied to a function parameter's type")
-                .span_label(sp, "attributes are not allowed here")
-                .emit();
+            self.dcx().emit_err(AttributeOnParamType { span: sp });
         }
     }
 
@@ -2124,23 +2290,11 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
-        let pat = self.parse_pat_no_top_alt(Some("argument name"))?;
+        let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName), None)?;
         self.expect(&token::Colon)?;
         let ty = self.parse_ty()?;
 
-        struct_span_err!(
-            self.diagnostic(),
-            pat.span,
-            E0642,
-            "patterns aren't allowed in methods without bodies",
-        )
-        .span_suggestion_short(
-            pat.span,
-            "give this argument a name or use an underscore to ignore it",
-            "_",
-            Applicability::MachineApplicable,
-        )
-        .emit();
+        self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span });
 
         // Pretend the pattern is `_`, to avoid duplicate errors from AST validation.
         let pat =
@@ -2149,11 +2303,9 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn recover_bad_self_param(&mut self, mut param: Param) -> PResult<'a, Param> {
-        let sp = param.pat.span;
+        let span = param.pat.span;
         param.ty.kind = TyKind::Err;
-        self.struct_span_err(sp, "unexpected `self` parameter in function")
-            .span_label(sp, "must be the first parameter of an associated function")
-            .emit();
+        self.dcx().emit_err(SelfParamNotFirst { span });
         Ok(param)
     }
 
@@ -2184,23 +2336,76 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+    pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
         let (span, msg) = match (&self.token.kind, self.subparser_name) {
             (&token::Eof, Some(origin)) => {
-                let sp = self.sess.source_map().next_point(self.prev_token.span);
+                let sp = self.prev_token.span.shrink_to_hi();
                 (sp, format!("expected expression, found end of {origin}"))
             }
             _ => (
                 self.token.span,
-                format!("expected expression, found {}", super::token_descr(&self.token),),
+                format!("expected expression, found {}", super::token_descr(&self.token)),
             ),
         };
-        let mut err = self.struct_span_err(span, &msg);
+        let mut err = self.dcx().struct_span_err(span, msg);
         let sp = self.sess.source_map().start_point(self.token.span);
         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            self.sess.expr_parentheses_needed(&mut err, *sp);
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
         err.span_label(span, "expected expression");
+
+        // Walk the chain of macro expansions for the current token to point at how the original
+        // code was interpreted. This helps the user realize when a macro argument of one type is
+        // later reinterpreted as a different type, like `$x:expr` being reinterpreted as `$x:pat`
+        // in a subsequent macro invocation (#71039).
+        let mut tok = self.token.clone();
+        let mut labels = vec![];
+        while let TokenKind::Interpolated(node) = &tok.kind {
+            let tokens = node.0.tokens();
+            labels.push(node.clone());
+            if let Some(tokens) = tokens
+                && let tokens = tokens.to_attr_token_stream()
+                && let tokens = tokens.0.deref()
+                && let [AttrTokenTree::Token(token, _)] = &tokens[..]
+            {
+                tok = token.clone();
+            } else {
+                break;
+            }
+        }
+        let mut iter = labels.into_iter().peekable();
+        let mut show_link = false;
+        while let Some(node) = iter.next() {
+            let descr = node.0.descr();
+            if let Some(next) = iter.peek() {
+                let next_descr = next.0.descr();
+                if next_descr != descr {
+                    err.span_label(next.1, format!("this macro fragment matcher is {next_descr}"));
+                    err.span_label(node.1, format!("this macro fragment matcher is {descr}"));
+                    err.span_label(
+                        next.0.use_span(),
+                        format!("this is expected to be {next_descr}"),
+                    );
+                    err.span_label(
+                        node.0.use_span(),
+                        format!(
+                            "this is interpreted as {}, but it is expected to be {}",
+                            next_descr, descr,
+                        ),
+                    );
+                    show_link = true;
+                } else {
+                    err.span_label(node.1, "");
+                }
+            }
+        }
+        if show_link {
+            err.note(
+                "when forwarding a matched fragment to another macro-by-example, matchers in the \
+                 second macro will see an opaque AST of the fragment type, not the underlying \
+                 tokens",
+            );
+        }
         err
     }
 
@@ -2229,7 +2434,7 @@ impl<'a> Parser<'a> {
     /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
     /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
     /// we deduplicate them to not complain about duplicated parameter names.
-    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
+    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) {
         let mut seen_inputs = FxHashSet::default();
         for input in fn_inputs.iter_mut() {
             let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
@@ -2253,7 +2458,7 @@ impl<'a> Parser<'a> {
     /// like the user has forgotten them.
     pub fn handle_ambiguous_unbraced_const_arg(
         &mut self,
-        args: &mut Vec<AngleBracketedArg>,
+        args: &mut ThinVec<AngleBracketedArg>,
     ) -> PResult<'a, bool> {
         // If we haven't encountered a closing `>`, then the argument is malformed.
         // It's likely that the user has written a const expression without enclosing it
@@ -2264,9 +2469,9 @@ impl<'a> Parser<'a> {
         // We are causing this error here exclusively in case that a `const` expression
         // could be recovered from the current parser state, even if followed by more
         // arguments after a comma.
-        let mut err = self.struct_span_err(
+        let mut err = self.dcx().struct_span_err(
             self.token.span,
-            &format!("expected one of `,` or `>`, found {}", super::token_descr(&self.token)),
+            format!("expected one of `,` or `>`, found {}", super::token_descr(&self.token)),
         );
         err.span_label(self.token.span, "expected one of `,` or `>`");
         match self.recover_const_arg(arg.span(), err) {
@@ -2276,7 +2481,7 @@ impl<'a> Parser<'a> {
                     return Ok(true); // Continue
                 }
             }
-            Err(mut err) => {
+            Err(err) => {
                 args.push(arg);
                 // We will emit a more generic error later.
                 err.delay_as_bug();
@@ -2302,27 +2507,20 @@ impl<'a> Parser<'a> {
             err
         })?;
         if !self.expr_is_valid_const_arg(&expr) {
-            self.struct_span_err(
-                expr.span,
-                "expressions must be enclosed in braces to be used as const generic \
-                    arguments",
-            )
-            .multipart_suggestion(
-                "enclose the `const` expression in braces",
-                vec![
-                    (expr.span.shrink_to_lo(), "{ ".to_string()),
-                    (expr.span.shrink_to_hi(), " }".to_string()),
-                ],
-                Applicability::MachineApplicable,
-            )
-            .emit();
+            self.dcx().emit_err(ConstGenericWithoutBraces {
+                span: expr.span,
+                sugg: ConstGenericWithoutBracesSugg {
+                    left: expr.span.shrink_to_lo(),
+                    right: expr.span.shrink_to_hi(),
+                },
+            });
         }
         Ok(expr)
     }
 
     fn recover_const_param_decl(&mut self, ty_generics: Option<&Generics>) -> Option<GenericArg> {
         let snapshot = self.create_snapshot_for_diagnostic();
-        let param = match self.parse_const_param(vec![]) {
+        let param = match self.parse_const_param(AttrVec::new()) {
             Ok(param) => param,
             Err(err) => {
                 err.cancel();
@@ -2330,24 +2528,30 @@ impl<'a> Parser<'a> {
                 return None;
             }
         };
-        let mut err =
-            self.struct_span_err(param.span(), "unexpected `const` parameter declaration");
-        err.span_label(param.span(), "expected a `const` expression, not a parameter declaration");
-        if let (Some(generics), Ok(snippet)) =
-            (ty_generics, self.sess.source_map().span_to_snippet(param.span()))
-        {
-            let (span, sugg) = match &generics.params[..] {
-                [] => (generics.span, format!("<{snippet}>")),
-                [.., generic] => (generic.span().shrink_to_hi(), format!(", {snippet}")),
-            };
-            err.multipart_suggestion(
-                "`const` parameters must be declared for the `impl`",
-                vec![(span, sugg), (param.span(), param.ident.to_string())],
-                Applicability::MachineApplicable,
-            );
-        }
+
+        let ident = param.ident.to_string();
+        let sugg = match (ty_generics, self.sess.source_map().span_to_snippet(param.span())) {
+            (Some(Generics { params, span: impl_generics, .. }), Ok(snippet)) => {
+                Some(match &params[..] {
+                    [] => UnexpectedConstParamDeclarationSugg::AddParam {
+                        impl_generics: *impl_generics,
+                        incorrect_decl: param.span(),
+                        snippet,
+                        ident,
+                    },
+                    [.., generic] => UnexpectedConstParamDeclarationSugg::AppendParam {
+                        impl_generics_end: generic.span().shrink_to_hi(),
+                        incorrect_decl: param.span(),
+                        snippet,
+                        ident,
+                    },
+                })
+            }
+            _ => None,
+        };
+        self.dcx().emit_err(UnexpectedConstParamDeclaration { span: param.span(), sugg });
+
         let value = self.mk_expr_err(param.span());
-        err.emit();
         Some(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value }))
     }
 
@@ -2365,20 +2569,14 @@ impl<'a> Parser<'a> {
         self.bump(); // `const`
 
         // Detect and recover from the old, pre-RFC2000 syntax for const generics.
-        let mut err = self
-            .struct_span_err(start, "expected lifetime, type, or constant, found keyword `const`");
+        let mut err = UnexpectedConstInGenericParam { span: start, to_remove: None };
         if self.check_const_arg() {
-            err.span_suggestion_verbose(
-                start.until(self.token.span),
-                "the `const` keyword is only needed in the definition of the type",
-                "",
-                Applicability::MaybeIncorrect,
-            );
-            err.emit();
+            err.to_remove = Some(start.until(self.token.span));
+            self.dcx().emit_err(err);
             Ok(Some(GenericArg::Const(self.parse_const_arg()?)))
         } else {
             let after_kw_const = self.token.span;
-            self.recover_const_arg(after_kw_const, err).map(Some)
+            self.recover_const_arg(after_kw_const, self.dcx().create_err(err)).map(Some)
         }
     }
 
@@ -2386,11 +2584,11 @@ impl<'a> Parser<'a> {
     ///
     /// When encountering code like `foo::< bar + 3 >` or `foo::< bar - baz >` we suggest
     /// `foo::<{ bar + 3 }>` and `foo::<{ bar - baz }>`, respectively. We only provide a suggestion
-    /// if we think that that the resulting expression would be well formed.
+    /// if we think that the resulting expression would be well formed.
     pub fn recover_const_arg(
         &mut self,
         start: Span,
-        mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
+        mut err: DiagnosticBuilder<'a>,
     ) -> PResult<'a, GenericArg> {
         let is_op_or_dot = AssocOp::from_token(&self.token)
             .and_then(|op| {
@@ -2467,10 +2665,32 @@ impl<'a> Parser<'a> {
         Err(err)
     }
 
+    /// Try to recover from an unbraced const argument whose first token [could begin a type][ty].
+    ///
+    /// [ty]: token::Token::can_begin_type
+    pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
+        &mut self,
+        mut snapshot: SnapshotParser<'a>,
+    ) -> Option<P<ast::Expr>> {
+        match snapshot.parse_expr_res(Restrictions::CONST_EXPR, None) {
+            // Since we don't know the exact reason why we failed to parse the type or the
+            // expression, employ a simple heuristic to weed out some pathological cases.
+            Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
+                self.restore_snapshot(snapshot);
+                Some(expr)
+            }
+            Ok(_) => None,
+            Err(err) => {
+                err.cancel();
+                None
+            }
+        }
+    }
+
     /// Creates a dummy const argument, and reports that the expression must be enclosed in braces
     pub fn dummy_const_arg_needs_braces(
         &self,
-        mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
+        mut err: DiagnosticBuilder<'a>,
         span: Span,
     ) -> GenericArg {
         err.multipart_suggestion(
@@ -2484,30 +2704,12 @@ impl<'a> Parser<'a> {
         GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })
     }
 
-    /// Get the diagnostics for the cases where `move async` is found.
-    ///
-    /// `move_async_span` starts at the 'm' of the move keyword and ends with the 'c' of the async keyword
-    pub(super) fn incorrect_move_async_order_found(
-        &self,
-        move_async_span: Span,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        let mut err =
-            self.struct_span_err(move_async_span, "the order of `move` and `async` is incorrect");
-        err.span_suggestion_verbose(
-            move_async_span,
-            "try switching the order",
-            "async move",
-            Applicability::MaybeIncorrect,
-        );
-        err
-    }
-
     /// Some special error handling for the "top-level" patterns in a match arm,
     /// `for` loop, `let`, &c. (in contrast to subpatterns within such).
     pub(crate) fn maybe_recover_colon_colon_in_pat_typo(
         &mut self,
         mut first_pat: P<Pat>,
-        expected: Expected,
+        expected: Option<Expected>,
     ) -> P<Pat> {
         if token::Colon != self.token.kind {
             return first_pat;
@@ -2515,26 +2717,42 @@ impl<'a> Parser<'a> {
         if !matches!(first_pat.kind, PatKind::Ident(_, _, None) | PatKind::Path(..))
             || !self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
         {
+            let mut snapshot_type = self.create_snapshot_for_diagnostic();
+            snapshot_type.bump(); // `:`
+            match snapshot_type.parse_ty() {
+                Err(inner_err) => {
+                    inner_err.cancel();
+                }
+                Ok(ty) => {
+                    let Err(mut err) = self.expected_one_of_not_found(&[], &[]) else {
+                        return first_pat;
+                    };
+                    err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                    self.restore_snapshot(snapshot_type);
+                    let span = first_pat.span.to(ty.span);
+                    first_pat = self.mk_pat(span, PatKind::Wild);
+                    err.emit();
+                }
+            }
             return first_pat;
         }
         // The pattern looks like it might be a path with a `::` -> `:` typo:
         // `match foo { bar:baz => {} }`
-        let span = self.token.span;
+        let colon_span = self.token.span;
         // We only emit "unexpected `:`" error here if we can successfully parse the
         // whole pattern correctly in that case.
-        let snapshot = self.create_snapshot_for_diagnostic();
+        let mut snapshot_pat = self.create_snapshot_for_diagnostic();
+        let mut snapshot_type = self.create_snapshot_for_diagnostic();
 
         // Create error for "unexpected `:`".
         match self.expected_one_of_not_found(&[], &[]) {
             Err(mut err) => {
-                self.bump(); // Skip the `:`.
-                match self.parse_pat_no_top_alt(expected) {
+                // Skip the `:`.
+                snapshot_pat.bump();
+                snapshot_type.bump();
+                match snapshot_pat.parse_pat_no_top_alt(expected, None) {
                     Err(inner_err) => {
-                        // Carry on as if we had not done anything, callers will emit a
-                        // reasonable error.
                         inner_err.cancel();
-                        err.cancel();
-                        self.restore_snapshot(snapshot);
                     }
                     Ok(mut pat) => {
                         // We've parsed the rest of the pattern.
@@ -2565,14 +2783,14 @@ impl<'a> Parser<'a> {
                                 }
                                 _ => {}
                             },
-                            PatKind::Ident(BindingMode::ByValue(Mutability::Not), ident, None) => {
+                            PatKind::Ident(BindingAnnotation::NONE, ident, None) => {
                                 match &first_pat.kind {
                                     PatKind::Ident(_, old_ident, _) => {
                                         let path = PatKind::Path(
                                             None,
                                             Path {
                                                 span: new_span,
-                                                segments: vec![
+                                                segments: thin_vec![
                                                     PathSegment::from_ident(*old_ident),
                                                     PathSegment::from_ident(*ident),
                                                 ],
@@ -2598,8 +2816,8 @@ impl<'a> Parser<'a> {
                             _ => {}
                         }
                         if show_sugg {
-                            err.span_suggestion(
-                                span,
+                            err.span_suggestion_verbose(
+                                colon_span.until(self.look_ahead(1, |t| t.span)),
                                 "maybe write a path separator here",
                                 "::",
                                 Applicability::MaybeIncorrect,
@@ -2607,34 +2825,50 @@ impl<'a> Parser<'a> {
                         } else {
                             first_pat = self.mk_pat(new_span, PatKind::Wild);
                         }
-                        err.emit();
+                        self.restore_snapshot(snapshot_pat);
                     }
                 }
+                match snapshot_type.parse_ty() {
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                    }
+                    Ok(ty) => {
+                        err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                        self.restore_snapshot(snapshot_type);
+                        let new_span = first_pat.span.to(ty.span);
+                        first_pat = self.mk_pat(new_span, PatKind::Wild);
+                    }
+                }
+                err.emit();
             }
             _ => {
                 // Carry on as if we had not done anything. This should be unreachable.
-                self.restore_snapshot(snapshot);
             }
         };
         first_pat
     }
 
     pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool {
-        let Some(label) = self.eat_label().filter(|_| {
-            self.eat(&token::Colon) && self.token.kind == token::OpenDelim(Delimiter::Brace)
-        }) else {
+        // Check for `'a : {`
+        if !(self.check_lifetime()
+            && self.look_ahead(1, |tok| tok.kind == token::Colon)
+            && self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace)))
+        {
             return false;
-        };
+        }
+        let label = self.eat_label().expect("just checked if a label exists");
+        self.bump(); // eat `:`
         let span = label.ident.span.to(self.prev_token.span);
-        let mut err = self.struct_span_err(span, "block label not supported here");
-        err.span_label(span, "not supported here");
-        err.tool_only_span_suggestion(
-            label.ident.span.until(self.token.span),
-            "remove this block label",
-            "",
-            Applicability::MachineApplicable,
-        );
-        err.emit();
+        self.dcx()
+            .struct_span_err(span, "block label not supported here")
+            .with_span_label(span, "not supported here")
+            .with_tool_only_span_suggestion(
+                label.ident.span.until(self.token.span),
+                "remove this block label",
+                "",
+                Applicability::MachineApplicable,
+            )
+            .emit();
         true
     }
 
@@ -2661,10 +2895,10 @@ impl<'a> Parser<'a> {
             err.cancel();
         }
         let seq_span = lo.to(self.prev_token.span);
-        let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern");
+        let mut err = self.dcx().struct_span_err(comma_span, "unexpected `,` in pattern");
         if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
             err.multipart_suggestion(
-                &format!(
+                format!(
                     "try adding parentheses to match on a tuple{}",
                     if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." },
                 ),
@@ -2690,35 +2924,119 @@ impl<'a> Parser<'a> {
         let TyKind::Path(qself, path) = &ty.kind else { return Ok(()) };
         let qself_position = qself.as_ref().map(|qself| qself.position);
         for (i, segments) in path.segments.windows(2).enumerate() {
-            if qself_position.map(|pos| i < pos).unwrap_or(false) {
+            if qself_position.is_some_and(|pos| i < pos) {
                 continue;
             }
             if let [a, b] = segments {
                 let (a_span, b_span) = (a.span(), b.span());
                 let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
-                if self.span_to_snippet(between_span).as_ref().map(|a| &a[..]) == Ok(":: ") {
-                    let mut err = self.struct_span_err(
-                        path.span.shrink_to_hi(),
-                        "expected `:` followed by trait or lifetime",
-                    );
-                    err.span_suggestion(
-                        between_span,
-                        "use single colon",
-                        ": ",
-                        Applicability::MachineApplicable,
-                    );
-                    return Err(err);
+                if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
+                    return Err(self.dcx().create_err(DoubleColonInBound {
+                        span: path.span.shrink_to_hi(),
+                        between: between_span,
+                    }));
                 }
             }
         }
         Ok(())
     }
 
+    /// Check for exclusive ranges written as `..<`
+    pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: PErr<'a>) -> PErr<'a> {
+        if maybe_lt == token::Lt
+            && (self.expected_tokens.contains(&TokenType::Token(token::Gt))
+                || matches!(self.token.kind, token::Literal(..)))
+        {
+            err.span_suggestion(
+                maybe_lt.span,
+                "remove the `<` to write an exclusive range",
+                "",
+                Applicability::MachineApplicable,
+            );
+        }
+        err
+    }
+
+    pub fn is_diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> bool {
+        (0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind))
+            && self.look_ahead(3, |tok| tok == short_kind)
+    }
+
+    fn diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> {
+        if self.is_diff_marker(long_kind, short_kind) {
+            let lo = self.token.span;
+            for _ in 0..4 {
+                self.bump();
+            }
+            return Some(lo.to(self.prev_token.span));
+        }
+        None
+    }
+
+    pub fn recover_diff_marker(&mut self) {
+        if let Err(err) = self.err_diff_marker() {
+            err.emit();
+            FatalError.raise();
+        }
+    }
+
+    pub fn err_diff_marker(&mut self) -> PResult<'a, ()> {
+        let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else {
+            return Ok(());
+        };
+        let mut spans = Vec::with_capacity(3);
+        spans.push(start);
+        let mut middlediff3 = None;
+        let mut middle = None;
+        let mut end = None;
+        loop {
+            if self.token.kind == TokenKind::Eof {
+                break;
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) {
+                middlediff3 = Some(span);
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::EqEq, &TokenKind::Eq) {
+                middle = Some(span);
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt) {
+                spans.push(span);
+                end = Some(span);
+                break;
+            }
+            self.bump();
+        }
+        let mut err = self.dcx().struct_span_err(spans, "encountered diff marker");
+        err.span_label(start, "after this is the code before the merge");
+        if let Some(middle) = middlediff3 {
+            err.span_label(middle, "");
+        }
+        if let Some(middle) = middle {
+            err.span_label(middle, "");
+        }
+        if let Some(end) = end {
+            err.span_label(end, "above this are the incoming code changes");
+        }
+        err.help(
+            "if you're having merge conflicts after pulling new code, the top section is the code \
+             you already had and the bottom section is the remote code",
+        );
+        err.help(
+            "if you're in the middle of a rebase, the top section is the code being rebased onto \
+             and the bottom section is the code coming from the current commit being rebased",
+        );
+        err.note(
+            "for an explanation on these markers from the `git` documentation, visit \
+             <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
+        );
+        Err(err)
+    }
+
     /// Parse and throw away a parenthesized comma separated
     /// sequence of patterns until `)` is reached.
     fn skip_pat_list(&mut self) -> PResult<'a, ()> {
         while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
-            self.parse_pat_no_top_alt(None)?;
+            self.parse_pat_no_top_alt(None, None)?;
             if !self.eat(&token::Comma) {
                 return Ok(());
             }
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 8e1b279d9b6..1a57474bac2 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,32 +1,42 @@
+// ignore-tidy-filelength
 use super::diagnostics::SnapshotParser;
-use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
+use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
     AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
     SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
 };
-use crate::maybe_recover_from_interpolated_ty_qpath;
 
+use crate::errors;
+use crate::maybe_recover_from_interpolated_ty_qpath;
+use ast::mut_visit::{noop_visit_expr, MutVisitor};
+use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment};
 use core::mem;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::tokenstream::Spacing;
+use rustc_ast::util::case::Case;
 use rustc_ast::util::classify;
-use rustc_ast::util::literal::LitError;
 use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
 use rustc_ast::visit::Visitor;
-use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
+use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
 use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
-use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
-use rustc_ast::{ClosureBinder, StmtKind};
+use rustc_ast::{Arm, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
+use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
 use rustc_ast_pretty::pprust;
-use rustc_data_structures::thin_vec::ThinVec;
-use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, PResult};
+use rustc_data_structures::stack::ensure_sufficient_stack;
+use rustc_errors::{
+    AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, PResult, StashKey,
+};
+use rustc_lexer::unescape::unescape_char;
+use rustc_macros::Subdiagnostic;
+use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
 use rustc_session::lint::BuiltinLintDiagnostics;
-use rustc_span::source_map::{self, Span, Spanned};
+use rustc_span::source_map::{self, Spanned};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::{BytePos, Pos};
+use rustc_span::{BytePos, Pos, Span};
+use thin_vec::{thin_vec, ThinVec};
 
 /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
 /// dropped into the token stream, which happens while parsing the result of
@@ -36,7 +46,7 @@ use rustc_span::{BytePos, Pos};
 macro_rules! maybe_whole_expr {
     ($p:expr) => {
         if let token::Interpolated(nt) = &$p.token.kind {
-            match &**nt {
+            match &nt.0 {
                 token::NtExpr(e) | token::NtLiteral(e) => {
                     let e = e.clone();
                     $p.bump();
@@ -45,20 +55,12 @@ macro_rules! maybe_whole_expr {
                 token::NtPath(path) => {
                     let path = (**path).clone();
                     $p.bump();
-                    return Ok($p.mk_expr(
-                        $p.prev_token.span,
-                        ExprKind::Path(None, path),
-                        AttrVec::new(),
-                    ));
+                    return Ok($p.mk_expr($p.prev_token.span, ExprKind::Path(None, path)));
                 }
                 token::NtBlock(block) => {
                     let block = block.clone();
                     $p.bump();
-                    return Ok($p.mk_expr(
-                        $p.prev_token.span,
-                        ExprKind::Block(block, None),
-                        AttrVec::new(),
-                    ));
+                    return Ok($p.mk_expr($p.prev_token.span, ExprKind::Block(block, None)));
                 }
                 _ => {}
             };
@@ -70,7 +72,7 @@ macro_rules! maybe_whole_expr {
 pub(super) enum LhsExpr {
     NotYetParsed,
     AttributesParsed(AttrWrapper),
-    AlreadyParsed(P<Expr>),
+    AlreadyParsed { expr: P<Expr>, starts_statement: bool },
 }
 
 impl From<Option<AttrWrapper>> for LhsExpr {
@@ -84,14 +86,26 @@ impl From<Option<AttrWrapper>> for LhsExpr {
 }
 
 impl From<P<Expr>> for LhsExpr {
-    /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`.
+    /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed { expr, starts_statement: false }`.
     ///
     /// This conversion does not allocate.
     fn from(expr: P<Expr>) -> Self {
-        LhsExpr::AlreadyParsed(expr)
+        LhsExpr::AlreadyParsed { expr, starts_statement: false }
     }
 }
 
+#[derive(Debug)]
+enum DestructuredFloat {
+    /// 1e2
+    Single(Symbol, Span),
+    /// 1.
+    TrailingDot(Symbol, Span, Span),
+    /// 1.2 | 1.2e3
+    MiddleDot(Symbol, Span, Span, Symbol, Span),
+    /// Invalid
+    Error,
+}
+
 impl<'a> Parser<'a> {
     /// Parses an expression.
     #[inline]
@@ -106,21 +120,21 @@ impl<'a> Parser<'a> {
         self.collect_tokens_no_attrs(|this| this.parse_expr())
     }
 
-    pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
+    pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
         self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
     }
 
-    fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> {
-        match self.parse_expr() {
+    fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
+        match self.parse_expr_res(restrictions, None) {
             Ok(expr) => Ok(expr),
-            Err(mut err) => match self.token.ident() {
+            Err(err) => match self.token.ident() {
                 Some((Ident { name: kw::Underscore, .. }, false))
-                    if self.look_ahead(1, |t| t == &token::Comma) =>
+                    if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
                 {
                     // Special-case handling of `foo(_, _, _)`
                     err.emit();
                     self.bump();
-                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new()))
+                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err))
                 }
                 _ => Err(err),
             },
@@ -128,8 +142,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a sequence of expressions delimited by parentheses.
-    fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
-        self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r)
+    fn parse_expr_paren_seq(&mut self) -> PResult<'a, ThinVec<P<Expr>>> {
+        self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore(Restrictions::empty()))
+            .map(|(r, _)| r)
     }
 
     /// Parses an expression, subject to the given restrictions.
@@ -139,7 +154,7 @@ impl<'a> Parser<'a> {
         r: Restrictions,
         already_parsed_attrs: Option<AttrWrapper>,
     ) -> PResult<'a, P<Expr>> {
-        self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
+        self.with_res(r, |this| this.parse_expr_assoc(already_parsed_attrs))
     }
 
     /// Parses an associative expression.
@@ -147,48 +162,42 @@ impl<'a> Parser<'a> {
     /// This parses an expression accounting for associativity and precedence of the operators in
     /// the expression.
     #[inline]
-    fn parse_assoc_expr(
+    fn parse_expr_assoc(
         &mut self,
         already_parsed_attrs: Option<AttrWrapper>,
     ) -> PResult<'a, P<Expr>> {
-        self.parse_assoc_expr_with(0, already_parsed_attrs.into())
+        self.parse_expr_assoc_with(0, already_parsed_attrs.into())
     }
 
     /// Parses an associative expression with operators of at least `min_prec` precedence.
-    pub(super) fn parse_assoc_expr_with(
+    pub(super) fn parse_expr_assoc_with(
         &mut self,
         min_prec: usize,
         lhs: LhsExpr,
     ) -> PResult<'a, P<Expr>> {
-        let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
+        let mut starts_stmt = false;
+        let mut lhs = if let LhsExpr::AlreadyParsed { expr, starts_statement } = lhs {
+            starts_stmt = starts_statement;
             expr
         } else {
             let attrs = match lhs {
                 LhsExpr::AttributesParsed(attrs) => Some(attrs),
                 _ => None,
             };
-            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
-                return self.parse_prefix_range_expr(attrs);
+            if self.token.is_range_separator() {
+                return self.parse_expr_prefix_range(attrs);
             } else {
-                self.parse_prefix_expr(attrs)?
+                self.parse_expr_prefix(attrs)?
             }
         };
-        let last_type_ascription_set = self.last_type_ascription.is_some();
 
         if !self.should_continue_as_assoc_expr(&lhs) {
-            self.last_type_ascription = None;
             return Ok(lhs);
         }
 
         self.expected_tokens.push(TokenType::Operator);
         while let Some(op) = self.check_assoc_op() {
-            // Adjust the span for interpolated LHS to point to the `$lhs` token
-            // and not to what it refers to.
-            let lhs_span = match self.prev_token.kind {
-                TokenKind::Interpolated(..) => self.prev_token.span,
-                _ => lhs.span,
-            };
-
+            let lhs_span = self.interpolated_or_expr_span(&lhs);
             let cur_op_span = self.token.span;
             let restrictions = if op.node.is_assign_like() {
                 self.restrictions & Restrictions::NO_STRUCT_LITERAL
@@ -225,15 +234,18 @@ impl<'a> Parser<'a> {
                     AssocOp::Equal => "==",
                     AssocOp::NotEqual => "!=",
                     _ => unreachable!(),
-                };
-                self.struct_span_err(sp, &format!("invalid comparison operator `{sugg}=`"))
-                    .span_suggestion_short(
-                        sp,
-                        &format!("`{s}=` is not a valid comparison operator, use `{s}`", s = sugg),
-                        sugg,
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                }
+                .into();
+                let invalid = format!("{sugg}=");
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: invalid.clone(),
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
+                        span: sp,
+                        invalid,
+                        correct: sugg,
+                    },
+                });
                 self.bump();
             }
 
@@ -243,14 +255,15 @@ impl<'a> Parser<'a> {
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
                 let sp = op.span.to(self.token.span);
-                self.struct_span_err(sp, "invalid comparison operator `<>`")
-                    .span_suggestion_short(
-                        sp,
-                        "`<>` is not a valid comparison operator, use `!=`",
-                        "!=",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: "<>".into(),
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
+                        span: sp,
+                        invalid: "<>".into(),
+                        correct: "!=".into(),
+                    },
+                });
                 self.bump();
             }
 
@@ -260,12 +273,11 @@ impl<'a> Parser<'a> {
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
                 let sp = op.span.to(self.token.span);
-                self.struct_span_err(sp, "invalid comparison operator `<=>`")
-                    .span_label(
-                        sp,
-                        "`<=>` is not a valid comparison operator, use `std::cmp::Ordering`",
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::InvalidComparisonOperator {
+                    span: sp,
+                    invalid: "<=>".into(),
+                    sub: errors::InvalidComparisonOperatorSub::Spaceship(sp),
+                });
                 self.bump();
             }
 
@@ -276,7 +288,19 @@ impl<'a> Parser<'a> {
                 let op_span = self.prev_token.span.to(self.token.span);
                 // Eat the second `+`
                 self.bump();
-                lhs = self.recover_from_postfix_increment(lhs, op_span)?;
+                lhs = self.recover_from_postfix_increment(lhs, op_span, starts_stmt)?;
+                continue;
+            }
+
+            if self.prev_token == token::BinOp(token::Minus)
+                && self.token == token::BinOp(token::Minus)
+                && self.prev_token.span.between(self.token.span).is_empty()
+                && !self.look_ahead(1, |tok| tok.can_begin_expr())
+            {
+                let op_span = self.prev_token.span.to(self.token.span);
+                // Eat the second `-`
+                self.bump();
+                lhs = self.recover_from_postfix_decrement(lhs, op_span, starts_stmt)?;
                 continue;
             }
 
@@ -285,13 +309,10 @@ impl<'a> Parser<'a> {
             if op == AssocOp::As {
                 lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?;
                 continue;
-            } else if op == AssocOp::Colon {
-                lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?;
-                continue;
             } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
                 // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
                 // generalise it to the Fixity::None code.
-                lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?;
+                lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?;
                 break;
             }
 
@@ -304,7 +325,7 @@ impl<'a> Parser<'a> {
                 Fixity::None => 1,
             };
             let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
-                this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
+                this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
             })?;
 
             let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
@@ -329,11 +350,9 @@ impl<'a> Parser<'a> {
                 | AssocOp::GreaterEqual => {
                     let ast_op = op.to_ast_binop().unwrap();
                     let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
-                    self.mk_expr(span, binary, AttrVec::new())
-                }
-                AssocOp::Assign => {
-                    self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span), AttrVec::new())
+                    self.mk_expr(span, binary)
                 }
+                AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span)),
                 AssocOp::AssignOp(k) => {
                     let aop = match k {
                         token::Plus => BinOpKind::Add,
@@ -348,10 +367,10 @@ impl<'a> Parser<'a> {
                         token::Shr => BinOpKind::Shr,
                     };
                     let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs);
-                    self.mk_expr(span, aopexpr, AttrVec::new())
+                    self.mk_expr(span, aopexpr)
                 }
-                AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => {
-                    self.span_bug(span, "AssocOp should have been handled by special case")
+                AssocOp::As | AssocOp::DotDot | AssocOp::DotDotEq => {
+                    self.dcx().span_bug(span, "AssocOp should have been handled by special case")
                 }
             };
 
@@ -359,9 +378,7 @@ impl<'a> Parser<'a> {
                 break;
             }
         }
-        if last_type_ascription_set {
-            self.last_type_ascription = None;
-        }
+
         Ok(lhs)
     }
 
@@ -376,20 +393,11 @@ impl<'a> Parser<'a> {
             // want to keep their span info to improve diagnostics in these cases in a later stage.
             (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
             (true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
-            (true, Some(AssocOp::Add)) // `{ 42 } + 42
-            // If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
-            // `if x { a } else { b } && if y { c } else { d }`
-            if !self.look_ahead(1, |t| t.is_used_keyword()) => {
-                // These cases are ambiguous and can't be identified in the parser alone.
-                let sp = self.sess.source_map().start_point(self.token.span);
-                self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
-                false
-            }
-            (true, Some(AssocOp::LAnd)) |
-            (true, Some(AssocOp::LOr)) |
-            (true, Some(AssocOp::BitOr)) => {
-                // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
-                // above due to #74233.
+            (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus)
+            (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+            (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure)
+            (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42`
+            => {
                 // These cases are ambiguous and can't be identified in the parser alone.
                 //
                 // Bitwise AND is left out because guessing intent is hard. We can make
@@ -400,7 +408,7 @@ impl<'a> Parser<'a> {
                 self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
                 false
             }
-            (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
+            (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
             (true, Some(_)) => {
                 self.error_found_expr_would_be_stmt(lhs);
                 true
@@ -412,13 +420,11 @@ impl<'a> Parser<'a> {
     /// but the next token implies this should be parsed as an expression.
     /// For example: `if let Some(x) = x { x } else { 0 } / 2`.
     fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
-        let mut err = self.struct_span_err(
-            self.token.span,
-            &format!("expected expression, found `{}`", pprust::token_to_string(&self.token),),
-        );
-        err.span_label(self.token.span, "expected expression");
-        self.sess.expr_parentheses_needed(&mut err, lhs.span);
-        err.emit();
+        self.dcx().emit_err(errors::FoundExprWouldBeStmt {
+            span: self.token.span,
+            token: self.token.clone(),
+            suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
+        });
     }
 
     /// Possibly translate the current token to an associative operator.
@@ -439,13 +445,34 @@ impl<'a> Parser<'a> {
             ) if self.restrictions.contains(Restrictions::CONST_EXPR) => {
                 return None;
             }
+            // When recovering patterns as expressions, stop parsing when encountering an assignment `=`, an alternative `|`, or a range `..`.
+            (
+                Some(
+                    AssocOp::Assign
+                    | AssocOp::AssignOp(_)
+                    | AssocOp::BitOr
+                    | AssocOp::DotDot
+                    | AssocOp::DotDotEq,
+                ),
+                _,
+            ) if self.restrictions.contains(Restrictions::IS_PAT) => {
+                return None;
+            }
             (Some(op), _) => (op, self.token.span),
-            (None, Some((Ident { name: sym::and, span }, false))) => {
-                self.error_bad_logical_op("and", "&&", "conjunction");
+            (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
+                self.dcx().emit_err(errors::InvalidLogicalOperator {
+                    span: self.token.span,
+                    incorrect: "and".into(),
+                    sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
+                });
                 (AssocOp::LAnd, span)
             }
-            (None, Some((Ident { name: sym::or, span }, false))) => {
-                self.error_bad_logical_op("or", "||", "disjunction");
+            (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
+                self.dcx().emit_err(errors::InvalidLogicalOperator {
+                    span: self.token.span,
+                    incorrect: "or".into(),
+                    sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
+                });
                 (AssocOp::LOr, span)
             }
             _ => return None,
@@ -453,19 +480,6 @@ impl<'a> Parser<'a> {
         Some(source_map::respan(span, op))
     }
 
-    /// Error on `and` and `or` suggesting `&&` and `||` respectively.
-    fn error_bad_logical_op(&self, bad: &str, good: &str, english: &str) {
-        self.struct_span_err(self.token.span, &format!("`{bad}` is not a logical operator"))
-            .span_suggestion_short(
-                self.token.span,
-                &format!("use `{good}` to perform logical {english}"),
-                good,
-                Applicability::MachineApplicable,
-            )
-            .note("unlike in e.g., python and PHP, `&&` and `||` are used for logical operators")
-            .emit();
-    }
-
     /// Checks if this expression is a successfully parsed statement.
     fn expr_is_complete(&self, e: &Expr) -> bool {
         self.restrictions.contains(Restrictions::STMT_EXPR)
@@ -474,7 +488,7 @@ impl<'a> Parser<'a> {
 
     /// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
     /// The other two variants are handled in `parse_prefix_range_expr` below.
-    fn parse_range_expr(
+    fn parse_expr_range(
         &mut self,
         prec: usize,
         lhs: P<Expr>,
@@ -482,7 +496,11 @@ impl<'a> Parser<'a> {
         cur_op_span: Span,
     ) -> PResult<'a, P<Expr>> {
         let rhs = if self.is_at_start_of_range_notation_rhs() {
-            Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
+            let maybe_lt = self.token.clone();
+            Some(
+                self.parse_expr_assoc_with(prec + 1, LhsExpr::NotYetParsed)
+                    .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
+            )
         } else {
             None
         };
@@ -491,7 +509,7 @@ impl<'a> Parser<'a> {
         let limits =
             if op == AssocOp::DotDot { RangeLimits::HalfOpen } else { RangeLimits::Closed };
         let range = self.mk_range(Some(lhs), rhs, limits);
-        Ok(self.mk_expr(span, range, AttrVec::new()))
+        Ok(self.mk_expr(span, range))
     }
 
     fn is_at_start_of_range_notation_rhs(&self) -> bool {
@@ -507,14 +525,14 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
-    fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_prefix_range(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         // Check for deprecated `...` syntax.
         if self.token == token::DotDotDot {
             self.err_dotdotdot_syntax(self.token.span);
         }
 
         debug_assert!(
-            [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
+            self.token.is_range_separator(),
             "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
             self.token
         );
@@ -531,21 +549,23 @@ impl<'a> Parser<'a> {
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         self.collect_tokens_for_expr(attrs, |this, attrs| {
             let lo = this.token.span;
+            let maybe_lt = this.look_ahead(1, |t| t.clone());
             this.bump();
             let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
                 // RHS must be parsed with more associativity than the dots.
-                this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
-                    .map(|x| (lo.to(x.span), Some(x)))?
+                this.parse_expr_assoc_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
+                    .map(|x| (lo.to(x.span), Some(x)))
+                    .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
             } else {
                 (lo, None)
             };
             let range = this.mk_range(None, opt_end, limits);
-            Ok(this.mk_expr(span, range, attrs.into()))
+            Ok(this.mk_expr_with_attrs(span, range, attrs))
         })
     }
 
     /// Parses a prefix-unary-operator expr.
-    fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_prefix(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         let lo = self.token.span;
 
@@ -553,7 +573,7 @@ impl<'a> Parser<'a> {
             ($this:ident, $attrs:expr, |this, _| $body:expr) => {
                 $this.collect_tokens_for_expr($attrs, |$this, attrs| {
                     let (hi, ex) = $body?;
-                    Ok($this.mk_expr(lo.to(hi), ex, attrs.into()))
+                    Ok($this.mk_expr_with_attrs(lo.to(hi), ex, attrs))
                 })
             };
         }
@@ -562,91 +582,92 @@ impl<'a> Parser<'a> {
 
         // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
         match this.token.uninterpolate().kind {
-            token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
-            token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
+            // `!expr`
+            token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
+            // `~expr`
+            token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
+            // `-expr`
             token::BinOp(token::Minus) => {
-                make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
-            } // `-expr`
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
+            }
+            // `*expr`
             token::BinOp(token::Star) => {
-                make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
-            } // `*expr`
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
+            }
+            // `&expr` and `&&expr`
             token::BinOp(token::And) | token::AndAnd => {
-                make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
+                make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
             }
+            // `+lit`
             token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
-                let mut err = this.struct_span_err(lo, "leading `+` is not supported");
-                err.span_label(lo, "unexpected `+`");
+                let mut err = errors::LeadingPlusNotSupported {
+                    span: lo,
+                    remove_plus: None,
+                    add_parentheses: None,
+                };
 
                 // a block on the LHS might have been intended to be an expression instead
                 if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
-                    this.sess.expr_parentheses_needed(&mut err, *sp);
+                    err.add_parentheses = Some(ExprParenthesesNeeded::surrounding(*sp));
                 } else {
-                    err.span_suggestion_verbose(
-                        lo,
-                        "try removing the `+`",
-                        "",
-                        Applicability::MachineApplicable,
-                    );
+                    err.remove_plus = Some(lo);
                 }
-                err.emit();
+                this.dcx().emit_err(err);
 
                 this.bump();
-                this.parse_prefix_expr(None)
-            } // `+expr`
+                this.parse_expr_prefix(None)
+            }
             // Recover from `++x`:
             token::BinOp(token::Plus)
                 if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
             {
-                let prev_is_semi = this.prev_token == token::Semi;
+                let starts_stmt = this.prev_token == token::Semi
+                    || this.prev_token == token::CloseDelim(Delimiter::Brace);
                 let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
                 // Eat both `+`s.
                 this.bump();
                 this.bump();
 
-                let operand_expr = this.parse_dot_or_call_expr(Default::default())?;
-                this.recover_from_prefix_increment(operand_expr, pre_span, prev_is_semi)
+                let operand_expr = this.parse_expr_dot_or_call(Default::default())?;
+                this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt)
             }
             token::Ident(..) if this.token.is_keyword(kw::Box) => {
-                make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
+                make_it!(this, attrs, |this, _| this.parse_expr_box(lo))
             }
-            token::Ident(..) if this.is_mistaken_not_ident_negation() => {
+            token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
                 make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
             }
-            _ => return this.parse_dot_or_call_expr(Some(attrs)),
+            _ => return this.parse_expr_dot_or_call(Some(attrs)),
         }
     }
 
-    fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
+    fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
         self.bump();
-        let expr = self.parse_prefix_expr(None);
-        let (span, expr) = self.interpolated_or_expr_span(expr)?;
+        let expr = self.parse_expr_prefix(None)?;
+        let span = self.interpolated_or_expr_span(&expr);
         Ok((lo.to(span), expr))
     }
 
-    fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
-        let (span, expr) = self.parse_prefix_expr_common(lo)?;
+    fn parse_expr_unary(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
+        let (span, expr) = self.parse_expr_prefix_common(lo)?;
         Ok((span, self.mk_unary(op, expr)))
     }
 
-    // Recover on `!` suggesting for bitwise negation instead.
+    /// Recover on `~expr` in favor of `!expr`.
     fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        self.struct_span_err(lo, "`~` cannot be used as a unary operator")
-            .span_suggestion_short(
-                lo,
-                "use `!` to perform bitwise not",
-                "!",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx().emit_err(errors::TildeAsUnaryOperator(lo));
 
-        self.parse_unary_expr(lo, UnOp::Not)
+        self.parse_expr_unary(lo, UnOp::Not)
     }
 
-    /// Parse `box expr`.
-    fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        let (span, expr) = self.parse_prefix_expr_common(lo)?;
-        self.sess.gated_spans.gate(sym::box_syntax, span);
-        Ok((span, ExprKind::Box(expr)))
+    /// Parse `box expr` - this syntax has been removed, but we still parse this
+    /// for now to provide a more useful error
+    fn parse_expr_box(&mut self, box_kw: Span) -> PResult<'a, (Span, ExprKind)> {
+        let (span, _) = self.parse_expr_prefix_common(box_kw)?;
+        let inner_span = span.with_lo(box_kw.hi());
+        let code = self.sess.source_map().span_to_snippet(inner_span).unwrap();
+        self.dcx().emit_err(errors::BoxSyntaxRemoved { span: span, code: code.trim() });
+        Ok((span, ExprKind::Err))
     }
 
     fn is_mistaken_not_ident_negation(&self) -> bool {
@@ -662,40 +683,35 @@ impl<'a> Parser<'a> {
 
     /// Recover on `not expr` in favor of `!expr`.
     fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        // Emit the error...
-        let not_token = self.look_ahead(1, |t| t.clone());
-        self.struct_span_err(
-            not_token.span,
-            &format!("unexpected {} after identifier", super::token_descr(&not_token)),
-        )
-        .span_suggestion_short(
+        let negated_token = self.look_ahead(1, |t| t.clone());
+
+        let sub_diag = if negated_token.is_numeric_lit() {
+            errors::NotAsNegationOperatorSub::SuggestNotBitwise
+        } else if negated_token.is_bool_lit() {
+            errors::NotAsNegationOperatorSub::SuggestNotLogical
+        } else {
+            errors::NotAsNegationOperatorSub::SuggestNotDefault
+        };
+
+        self.dcx().emit_err(errors::NotAsNegationOperator {
+            negated: negated_token.span,
+            negated_desc: super::token_descr(&negated_token),
             // Span the `not` plus trailing whitespace to avoid
             // trailing whitespace after the `!` in our suggestion
-            self.sess.source_map().span_until_non_whitespace(lo.to(not_token.span)),
-            "use `!` to perform logical negation",
-            "!",
-            Applicability::MachineApplicable,
-        )
-        .emit();
+            sub: sub_diag(
+                self.sess.source_map().span_until_non_whitespace(lo.to(negated_token.span)),
+            ),
+        });
 
-        // ...and recover!
-        self.parse_unary_expr(lo, UnOp::Not)
+        self.parse_expr_unary(lo, UnOp::Not)
     }
 
-    /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
-    fn interpolated_or_expr_span(
-        &self,
-        expr: PResult<'a, P<Expr>>,
-    ) -> PResult<'a, (Span, P<Expr>)> {
-        expr.map(|e| {
-            (
-                match self.prev_token.kind {
-                    TokenKind::Interpolated(..) => self.prev_token.span,
-                    _ => e.span,
-                },
-                e,
-            )
-        })
+    /// Returns the span of expr if it was not interpolated, or the span of the interpolated token.
+    fn interpolated_or_expr_span(&self, expr: &Expr) -> Span {
+        match self.prev_token.kind {
+            TokenKind::Interpolated(..) => self.prev_token.span,
+            _ => expr.span,
+        }
     }
 
     fn parse_assoc_op_cast(
@@ -705,11 +721,7 @@ impl<'a> Parser<'a> {
         expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind,
     ) -> PResult<'a, P<Expr>> {
         let mk_expr = |this: &mut Self, lhs: P<Expr>, rhs: P<Ty>| {
-            this.mk_expr(
-                this.mk_expr_sp(&lhs, lhs_span, rhs.span),
-                expr_kind(lhs, rhs),
-                AttrVec::new(),
-            )
+            this.mk_expr(this.mk_expr_sp(&lhs, lhs_span, rhs.span), expr_kind(lhs, rhs))
         };
 
         // Save the state of the parser before parsing type normally, in case there is a
@@ -718,6 +730,10 @@ impl<'a> Parser<'a> {
         let cast_expr = match self.parse_as_cast_ty() {
             Ok(rhs) => mk_expr(self, lhs, rhs),
             Err(type_err) => {
+                if !self.may_recover() {
+                    return Err(type_err);
+                }
+
                 // Rewind to before attempting to parse the type with generics, to recover
                 // from situations like `x as usize < y` in which we first tried to parse
                 // `usize < y` as a type with generic arguments.
@@ -728,7 +744,7 @@ impl<'a> Parser<'a> {
                     (
                         // `foo: `
                         ExprKind::Path(None, ast::Path { segments, .. }),
-                        TokenKind::Ident(kw::For | kw::Loop | kw::While, false),
+                        token::Ident(kw::For | kw::Loop | kw::While, false),
                     ) if segments.len() == 1 => {
                         let snapshot = self.create_snapshot_for_diagnostic();
                         let label = Label {
@@ -737,17 +753,13 @@ impl<'a> Parser<'a> {
                                 segments[0].ident.span,
                             ),
                         };
-                        match self.parse_labeled_expr(label, AttrVec::new(), false) {
+                        match self.parse_expr_labeled(label, false) {
                             Ok(expr) => {
                                 type_err.cancel();
-                                self.struct_span_err(label.ident.span, "malformed loop label")
-                                    .span_suggestion(
-                                        label.ident.span,
-                                        "use the correct loop label format",
-                                        label.ident,
-                                        Applicability::MachineApplicable,
-                                    )
-                                    .emit();
+                                self.dcx().emit_err(errors::MalformedLoopLabel {
+                                    span: label.ident.span,
+                                    correct_label: label.ident,
+                                });
                                 return Ok(expr);
                             }
                             Err(err) => {
@@ -761,9 +773,36 @@ impl<'a> Parser<'a> {
 
                 match self.parse_path(PathStyle::Expr) {
                     Ok(path) => {
-                        let (op_noun, op_verb) = match self.token.kind {
-                            token::Lt => ("comparison", "comparing"),
-                            token::BinOp(token::Shl) => ("shift", "shifting"),
+                        let span_after_type = parser_snapshot_after_type.token.span;
+                        let expr = mk_expr(
+                            self,
+                            lhs,
+                            self.mk_ty(path.span, TyKind::Path(None, path.clone())),
+                        );
+
+                        let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
+                        let suggestion = errors::ComparisonOrShiftInterpretedAsGenericSugg {
+                            left: expr.span.shrink_to_lo(),
+                            right: expr.span.shrink_to_hi(),
+                        };
+
+                        match self.token.kind {
+                            token::Lt => {
+                                self.dcx().emit_err(errors::ComparisonInterpretedAsGeneric {
+                                    comparison: self.token.span,
+                                    r#type: path,
+                                    args: args_span,
+                                    suggestion,
+                                })
+                            }
+                            token::BinOp(token::Shl) => {
+                                self.dcx().emit_err(errors::ShiftInterpretedAsGeneric {
+                                    shift: self.token.span,
+                                    r#type: path,
+                                    args: args_span,
+                                    suggestion,
+                                })
+                            }
                             _ => {
                                 // We can end up here even without `<` being the next token, for
                                 // example because `parse_ty_no_plus` returns `Err` on keywords,
@@ -777,33 +816,7 @@ impl<'a> Parser<'a> {
                         // Successfully parsed the type path leaving a `<` yet to parse.
                         type_err.cancel();
 
-                        // Report non-fatal diagnostics, keep `x as usize` as an expression
-                        // in AST and continue parsing.
-                        let msg = format!(
-                            "`<` is interpreted as a start of generic arguments for `{}`, not a {}",
-                            pprust::path_to_string(&path),
-                            op_noun,
-                        );
-                        let span_after_type = parser_snapshot_after_type.token.span;
-                        let expr =
-                            mk_expr(self, lhs, self.mk_ty(path.span, TyKind::Path(None, path)));
-
-                        self.struct_span_err(self.token.span, &msg)
-                            .span_label(
-                                self.look_ahead(1, |t| t.span).to(span_after_type),
-                                "interpreted as generic arguments",
-                            )
-                            .span_label(self.token.span, format!("not interpreted as {op_noun}"))
-                            .multipart_suggestion(
-                                &format!("try {op_verb} the cast value"),
-                                vec![
-                                    (expr.span.shrink_to_lo(), "(".to_string()),
-                                    (expr.span.shrink_to_hi(), ")".to_string()),
-                                ],
-                                Applicability::MachineApplicable,
-                            )
-                            .emit();
-
+                        // Keep `x as usize` as an expression in AST and continue parsing.
                         expr
                     }
                     Err(path_err) => {
@@ -826,39 +839,33 @@ impl<'a> Parser<'a> {
         &mut self,
         cast_expr: P<Expr>,
     ) -> PResult<'a, P<Expr>> {
+        if let ExprKind::Type(_, _) = cast_expr.kind {
+            panic!("ExprKind::Type must not be parsed");
+        }
+
         let span = cast_expr.span;
-        let maybe_ascription_span = if let ExprKind::Type(ascripted_expr, _) = &cast_expr.kind {
-            Some(ascripted_expr.span.shrink_to_hi().with_hi(span.hi()))
-        } else {
-            None
-        };
 
-        // Save the memory location of expr before parsing any following postfix operators.
-        // This will be compared with the memory location of the output expression.
-        // If they different we can assume we parsed another expression because the existing expression is not reallocated.
-        let addr_before = &*cast_expr as *const _ as usize;
-        let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
-        let changed = addr_before != &*with_postfix as *const _ as usize;
+        let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
 
         // Check if an illegal postfix operator has been added after the cast.
-        // If the resulting expression is not a cast, or has a different memory location, it is an illegal postfix operator.
-        if !matches!(with_postfix.kind, ExprKind::Cast(_, _) | ExprKind::Type(_, _)) || changed {
+        // If the resulting expression is not a cast, it is an illegal postfix operator.
+        if !matches!(with_postfix.kind, ExprKind::Cast(_, _)) {
             let msg = format!(
-                "casts cannot be followed by {}",
+                "cast cannot be followed by {}",
                 match with_postfix.kind {
-                    ExprKind::Index(_, _) => "indexing",
+                    ExprKind::Index(..) => "indexing",
                     ExprKind::Try(_) => "`?`",
                     ExprKind::Field(_, _) => "a field access",
-                    ExprKind::MethodCall(_, _, _) => "a method call",
+                    ExprKind::MethodCall(_) => "a method call",
                     ExprKind::Call(_, _) => "a function call",
-                    ExprKind::Await(_) => "`.await`",
+                    ExprKind::Await(_, _) => "`.await`",
                     ExprKind::Err => return Ok(with_postfix),
                     _ => unreachable!("parse_dot_or_call_expr_with_ shouldn't produce this"),
                 }
             );
-            let mut err = self.struct_span_err(span, &msg);
+            let mut err = self.dcx().struct_span_err(span, msg);
 
-            let suggest_parens = |err: &mut DiagnosticBuilder<'_, _>| {
+            let suggest_parens = |err: &mut Diagnostic| {
                 let suggestions = vec![
                     (span.shrink_to_lo(), "(".to_string()),
                     (span.shrink_to_hi(), ")".to_string()),
@@ -870,52 +877,25 @@ impl<'a> Parser<'a> {
                 );
             };
 
-            // If type ascription is "likely an error", the user will already be getting a useful
-            // help message, and doesn't need a second.
-            if self.last_type_ascription.map_or(false, |last_ascription| last_ascription.1) {
-                self.maybe_annotate_with_ascription(&mut err, false);
-            } else if let Some(ascription_span) = maybe_ascription_span {
-                let is_nightly = self.sess.unstable_features.is_nightly_build();
-                if is_nightly {
-                    suggest_parens(&mut err);
-                }
-                err.span_suggestion(
-                    ascription_span,
-                    &format!(
-                        "{}remove the type ascription",
-                        if is_nightly { "alternatively, " } else { "" }
-                    ),
-                    "",
-                    if is_nightly {
-                        Applicability::MaybeIncorrect
-                    } else {
-                        Applicability::MachineApplicable
-                    },
-                );
-            } else {
-                suggest_parens(&mut err);
-            }
+            suggest_parens(&mut err);
+
             err.emit();
         };
         Ok(with_postfix)
     }
 
-    fn parse_assoc_op_ascribe(&mut self, lhs: P<Expr>, lhs_span: Span) -> PResult<'a, P<Expr>> {
-        let maybe_path = self.could_ascription_be_path(&lhs.kind);
-        self.last_type_ascription = Some((self.prev_token.span, maybe_path));
-        let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?;
-        self.sess.gated_spans.gate(sym::type_ascription, lhs.span);
-        Ok(lhs)
-    }
-
     /// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
-    fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+    fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
         self.expect_and()?;
         let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
         let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
         let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
-        let expr = self.parse_prefix_expr(None);
-        let (hi, expr) = self.interpolated_or_expr_span(expr)?;
+        let expr = if self.token.is_range_separator() {
+            self.parse_expr_prefix_range(None)
+        } else {
+            self.parse_expr_prefix(None)
+        }?;
+        let hi = self.interpolated_or_expr_span(&expr);
         let span = lo.to(hi);
         if let Some(lt) = lifetime {
             self.error_remove_borrow_lifetime(span, lt.ident.span);
@@ -924,15 +904,7 @@ impl<'a> Parser<'a> {
     }
 
     fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
-        self.struct_span_err(span, "borrow expressions cannot be annotated with lifetimes")
-            .span_label(lt_span, "annotated with lifetime here")
-            .span_suggestion(
-                lt_span,
-                "remove the lifetime annotation",
-                "",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx().emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
     }
 
     /// Parse `mut?` or `raw [ const | mut ]`.
@@ -951,34 +923,39 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
-    fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_dot_or_call(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         self.collect_tokens_for_expr(attrs, |this, attrs| {
-            let base = this.parse_bottom_expr();
-            let (span, base) = this.interpolated_or_expr_span(base)?;
-            this.parse_dot_or_call_expr_with(base, span, attrs)
+            let base = this.parse_expr_bottom()?;
+            let span = this.interpolated_or_expr_span(&base);
+            this.parse_expr_dot_or_call_with(base, span, attrs)
         })
     }
 
-    pub(super) fn parse_dot_or_call_expr_with(
+    pub(super) fn parse_expr_dot_or_call_with(
         &mut self,
         e0: P<Expr>,
         lo: Span,
-        mut attrs: Vec<ast::Attribute>,
+        mut attrs: ast::AttrVec,
     ) -> PResult<'a, P<Expr>> {
         // Stitch the list of outer attributes onto the return value.
         // A little bit ugly, but the best way given the current code
         // structure
-        self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| {
-            expr.map(|mut expr| {
-                attrs.extend::<Vec<_>>(expr.attrs.into());
-                expr.attrs = attrs.into();
-                expr
+        let res = self.parse_expr_dot_or_call_with_(e0, lo);
+        if attrs.is_empty() {
+            res
+        } else {
+            res.map(|expr| {
+                expr.map(|mut expr| {
+                    attrs.extend(expr.attrs);
+                    expr.attrs = attrs;
+                    expr
+                })
             })
-        })
+        }
     }
 
-    fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+    fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
         loop {
             let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
                 // we are using noexpect here because we don't expect a `?` directly after a `return`
@@ -989,7 +966,7 @@ impl<'a> Parser<'a> {
             };
             if has_question {
                 // `expr?`
-                e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e), AttrVec::new());
+                e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
                 continue;
             }
             let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
@@ -1008,27 +985,21 @@ impl<'a> Parser<'a> {
                 return Ok(e);
             }
             e = match self.token.kind {
-                token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
-                token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
+                token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e),
+                token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?,
                 _ => return Ok(e),
             }
         }
     }
 
-    fn look_ahead_type_ascription_as_field(&mut self) -> bool {
-        self.look_ahead(1, |t| t.is_ident())
-            && self.look_ahead(2, |t| t == &token::Colon)
-            && self.look_ahead(3, |t| t.can_begin_expr())
-    }
-
     fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
         match self.token.uninterpolate().kind {
             token::Ident(..) => self.parse_dot_suffix(base, lo),
             token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
-                Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
+                Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None))
             }
             token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
-                Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
+                Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix))
             }
             _ => {
                 self.error_unexpected_after_dot();
@@ -1038,9 +1009,16 @@ impl<'a> Parser<'a> {
     }
 
     fn error_unexpected_after_dot(&self) {
-        // FIXME Could factor this out into non_fatal_unexpected or something.
         let actual = pprust::token_to_string(&self.token);
-        self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit();
+        let span = self.token.span;
+        let sm = self.sess.source_map();
+        let (span, actual) = match (&self.token.kind, self.subparser_name) {
+            (token::Eof, Some(_)) if let Ok(actual) = sm.span_to_snippet(sm.next_point(span)) => {
+                (span.shrink_to_hi(), actual.into())
+            }
+            _ => (span, actual),
+        };
+        self.dcx().emit_err(errors::UnexpectedTokenAfterDot { span, actual });
     }
 
     // We need an identifier or integer, but the next token is a float.
@@ -1050,13 +1028,8 @@ impl<'a> Parser<'a> {
     // support pushing "future tokens" (would be also helpful to `break_and_eat`), or
     // we should break everything including floats into more basic proc-macro style
     // tokens in the lexer (probably preferable).
-    fn parse_tuple_field_access_expr_float(
-        &mut self,
-        lo: Span,
-        base: P<Expr>,
-        float: Symbol,
-        suffix: Option<Symbol>,
-    ) -> P<Expr> {
+    // See also `TokenKind::break_two_token_op` which does similar splitting of `>>` into `>`.
+    fn break_up_float(&self, float: Symbol, span: Span) -> DestructuredFloat {
         #[derive(Debug)]
         enum FloatComponent {
             IdentLike(String),
@@ -1076,7 +1049,7 @@ impl<'a> Parser<'a> {
                 }
                 components.push(Punct(c));
             } else {
-                panic!("unexpected character in a float token: {:?}", c)
+                panic!("unexpected character in a float token: {c:?}")
             }
         }
         if !ident_like.is_empty() {
@@ -1086,14 +1059,13 @@ impl<'a> Parser<'a> {
         // With proc macros the span can refer to anything, the source may be too short,
         // or too long, or non-ASCII. It only makes sense to break our span into components
         // if its underlying text is identical to our float literal.
-        let span = self.token.span;
         let can_take_span_apart =
             || self.span_to_snippet(span).as_deref() == Ok(float_str).as_deref();
 
         match &*components {
             // 1e2
             [IdentLike(i)] => {
-                self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
+                DestructuredFloat::Single(Symbol::intern(i), span)
             }
             // 1.
             [IdentLike(i), Punct('.')] => {
@@ -1105,11 +1077,8 @@ impl<'a> Parser<'a> {
                 } else {
                     (span, span)
                 };
-                assert!(suffix.is_none());
-                let symbol = Symbol::intern(&i);
-                self.token = Token::new(token::Ident(symbol, false), ident_span);
-                let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
-                self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
+                let symbol = Symbol::intern(i);
+                DestructuredFloat::TrailingDot(symbol, ident_span, dot_span)
             }
             // 1.2 | 1.2e3
             [IdentLike(i1), Punct('.'), IdentLike(i2)] => {
@@ -1124,17 +1093,9 @@ impl<'a> Parser<'a> {
                 } else {
                     (span, span, span)
                 };
-                let symbol1 = Symbol::intern(&i1);
-                self.token = Token::new(token::Ident(symbol1, false), ident1_span);
-                // This needs to be `Spacing::Alone` to prevent regressions.
-                // See issue #76399 and PR #76285 for more details
-                let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
-                let base1 =
-                    self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
-                let symbol2 = Symbol::intern(&i2);
-                let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
-                self.bump_with((next_token2, self.token_spacing)); // `.`
-                self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
+                let symbol1 = Symbol::intern(i1);
+                let symbol2 = Symbol::intern(i2);
+                DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span)
             }
             // 1e+ | 1e- (recovered)
             [IdentLike(_), Punct('+' | '-')] |
@@ -1146,13 +1107,149 @@ impl<'a> Parser<'a> {
             [IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
                 // See the FIXME about `TokenCursor` above.
                 self.error_unexpected_after_dot();
-                base
+                DestructuredFloat::Error
+            }
+            _ => panic!("unexpected components in a float token: {components:?}"),
+        }
+    }
+
+    fn parse_expr_tuple_field_access_float(
+        &mut self,
+        lo: Span,
+        base: P<Expr>,
+        float: Symbol,
+        suffix: Option<Symbol>,
+    ) -> P<Expr> {
+        match self.break_up_float(float, self.token.span) {
+            // 1e2
+            DestructuredFloat::Single(sym, _sp) => {
+                self.parse_expr_tuple_field_access(lo, base, sym, suffix, None)
+            }
+            // 1.
+            DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
+                assert!(suffix.is_none());
+                self.token = Token::new(token::Ident(sym, false), ident_span);
+                let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
+                self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
+            }
+            // 1.2 | 1.2e3
+            DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
+                self.token = Token::new(token::Ident(symbol1, false), ident1_span);
+                // This needs to be `Spacing::Alone` to prevent regressions.
+                // See issue #76399 and PR #76285 for more details
+                let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
+                let base1 =
+                    self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
+                let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
+                self.bump_with((next_token2, self.token_spacing)); // `.`
+                self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
             }
-            _ => panic!("unexpected components in a float token: {:?}", components),
+            DestructuredFloat::Error => base,
         }
     }
 
-    fn parse_tuple_field_access_expr(
+    /// Parse the field access used in offset_of, matched by `$(e:expr)+`.
+    /// Currently returns a list of idents. However, it should be possible in
+    /// future to also do array indices, which might be arbitrary expressions.
+    fn parse_floating_field_access(&mut self) -> PResult<'a, P<[Ident]>> {
+        let mut fields = Vec::new();
+        let mut trailing_dot = None;
+
+        loop {
+            // This is expected to use a metavariable $(args:expr)+, but the builtin syntax
+            // could be called directly. Calling `parse_expr` allows this function to only
+            // consider `Expr`s.
+            let expr = self.parse_expr()?;
+            let mut current = &expr;
+            let start_idx = fields.len();
+            loop {
+                match current.kind {
+                    ExprKind::Field(ref left, right) => {
+                        // Field access is read right-to-left.
+                        fields.insert(start_idx, right);
+                        trailing_dot = None;
+                        current = left;
+                    }
+                    // Parse this both to give helpful error messages and to
+                    // verify it can be done with this parser setup.
+                    ExprKind::Index(ref left, ref _right, span) => {
+                        self.dcx().emit_err(errors::ArrayIndexInOffsetOf(span));
+                        current = left;
+                    }
+                    ExprKind::Lit(token::Lit {
+                        kind: token::Float | token::Integer,
+                        symbol,
+                        suffix,
+                    }) => {
+                        if let Some(suffix) = suffix {
+                            self.expect_no_tuple_index_suffix(current.span, suffix);
+                        }
+                        match self.break_up_float(symbol, current.span) {
+                            // 1e2
+                            DestructuredFloat::Single(sym, sp) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(sym, sp));
+                            }
+                            // 1.
+                            DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
+                                assert!(suffix.is_none());
+                                trailing_dot = Some(dot_span);
+                                fields.insert(start_idx, Ident::new(sym, sym_span));
+                            }
+                            // 1.2 | 1.2e3
+                            DestructuredFloat::MiddleDot(
+                                symbol1,
+                                span1,
+                                _dot_span,
+                                symbol2,
+                                span2,
+                            ) => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol2, span2));
+                                fields.insert(start_idx, Ident::new(symbol1, span1));
+                            }
+                            DestructuredFloat::Error => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, Ident::new(symbol, self.prev_token.span));
+                            }
+                        }
+                        break;
+                    }
+                    ExprKind::Path(None, Path { ref segments, .. }) => {
+                        match &segments[..] {
+                            [PathSegment { ident, args: None, .. }] => {
+                                trailing_dot = None;
+                                fields.insert(start_idx, *ident)
+                            }
+                            _ => {
+                                self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                                break;
+                            }
+                        }
+                        break;
+                    }
+                    _ => {
+                        self.dcx().emit_err(errors::InvalidOffsetOf(current.span));
+                        break;
+                    }
+                }
+            }
+
+            if matches!(self.token.kind, token::CloseDelim(..) | token::Comma) {
+                break;
+            } else if trailing_dot.is_none() {
+                // This loop should only repeat if there is a trailing dot.
+                self.dcx().emit_err(errors::InvalidOffsetOf(self.token.span));
+                break;
+            }
+        }
+        if let Some(dot) = trailing_dot {
+            self.dcx().emit_err(errors::InvalidOffsetOf(dot));
+        }
+        Ok(fields.into_iter().collect())
+    }
+
+    fn parse_expr_tuple_field_access(
         &mut self,
         lo: Span,
         base: P<Expr>,
@@ -1166,30 +1263,28 @@ impl<'a> Parser<'a> {
         }
         let span = self.prev_token.span;
         let field = ExprKind::Field(base, Ident::new(field, span));
-        self.expect_no_suffix(span, "a tuple index", suffix);
-        self.mk_expr(lo.to(span), field, AttrVec::new())
+        if let Some(suffix) = suffix {
+            self.expect_no_tuple_index_suffix(span, suffix);
+        }
+        self.mk_expr(lo.to(span), field)
     }
 
     /// Parse a function call expression, `expr(...)`.
-    fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
-        let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
-            && self.look_ahead_type_ascription_as_field()
-        {
+    fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
+        let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
             Some((self.create_snapshot_for_diagnostic(), fun.kind.clone()))
         } else {
             None
         };
         let open_paren = self.token.span;
 
-        let mut seq = self.parse_paren_expr_seq().map(|args| {
-            self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args), AttrVec::new())
-        });
-        if let Some(expr) =
-            self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot)
-        {
-            return expr;
+        let seq = self
+            .parse_expr_paren_seq()
+            .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
+        match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) {
+            Ok(expr) => expr,
+            Err(err) => self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err),
         }
-        self.recover_seq_parse_error(Delimiter::Parenthesis, lo, seq)
     }
 
     /// If we encounter a parser state that looks like the user has written a `struct` literal with
@@ -1199,14 +1294,13 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         open_paren: Span,
-        seq: &mut PResult<'a, P<Expr>>,
+        seq: PResult<'a, P<Expr>>,
         snapshot: Option<(SnapshotParser<'a>, ExprKind)>,
-    ) -> Option<P<Expr>> {
-        match (seq.as_mut(), snapshot) {
-            (Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
-                let name = pprust::path_to_string(&path);
+    ) -> PResult<'a, P<Expr>> {
+        match (self.may_recover(), seq, snapshot) {
+            (true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
                 snapshot.bump(); // `(`
-                match snapshot.parse_struct_fields(path, false, Delimiter::Parenthesis) {
+                match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
                     Ok((fields, ..))
                         if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
                     {
@@ -1214,86 +1308,98 @@ impl<'a> Parser<'a> {
                         // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
                         self.restore_snapshot(snapshot);
                         let close_paren = self.prev_token.span;
-                        let span = lo.to(self.prev_token.span);
-                        if !fields.is_empty() {
-                            let replacement_err = self.struct_span_err(
-                                span,
-                                "invalid `struct` delimiters or `fn` call arguments",
-                            );
-                            mem::replace(err, replacement_err).cancel();
-
-                            err.multipart_suggestion(
-                                &format!("if `{name}` is a struct, use braces as delimiters"),
-                                vec![
-                                    (open_paren, " { ".to_string()),
-                                    (close_paren, " }".to_string()),
-                                ],
-                                Applicability::MaybeIncorrect,
-                            );
-                            err.multipart_suggestion(
-                                &format!("if `{name}` is a function, use the arguments directly"),
-                                fields
-                                    .into_iter()
-                                    .map(|field| (field.span.until(field.expr.span), String::new()))
-                                    .collect(),
-                                Applicability::MaybeIncorrect,
-                            );
-                            err.emit();
+                        let span = lo.to(close_paren);
+                        // filter shorthand fields
+                        let fields: Vec<_> =
+                            fields.into_iter().filter(|field| !field.is_shorthand).collect();
+
+                        if !fields.is_empty() &&
+                            // `token.kind` should not be compared here.
+                            // This is because the `snapshot.token.kind` is treated as the same as
+                            // that of the open delim in `TokenTreesReader::parse_token_tree`, even
+                            // if they are different.
+                            self.span_to_snippet(close_paren).is_ok_and(|snippet| snippet == ")")
+                        {
+                            err.cancel();
+                            self.dcx()
+                                .create_err(errors::ParenthesesWithStructFields {
+                                    span,
+                                    r#type: path,
+                                    braces_for_struct: errors::BracesForStructLiteral {
+                                        first: open_paren,
+                                        second: close_paren,
+                                    },
+                                    no_fields_for_fn: errors::NoFieldsForFnCall {
+                                        fields: fields
+                                            .into_iter()
+                                            .map(|field| field.span.until(field.expr.span))
+                                            .collect(),
+                                    },
+                                })
+                                .emit();
                         } else {
                             err.emit();
                         }
-                        return Some(self.mk_expr_err(span));
+                        Ok(self.mk_expr_err(span))
                     }
-                    Ok(_) => {}
-                    Err(mut err) => {
-                        err.emit();
+                    Ok(_) => Err(err),
+                    Err(err2) => {
+                        err2.cancel();
+                        Err(err)
                     }
                 }
             }
-            _ => {}
+            (_, seq, _) => seq,
         }
-        None
     }
 
     /// Parse an indexing expression `expr[...]`.
-    fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_index(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+        let prev_span = self.prev_token.span;
+        let open_delim_span = self.token.span;
         self.bump(); // `[`
         let index = self.parse_expr()?;
+        self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?;
         self.expect(&token::CloseDelim(Delimiter::Bracket))?;
-        Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new()))
+        Ok(self.mk_expr(
+            lo.to(self.prev_token.span),
+            self.mk_index(base, index, open_delim_span.to(self.prev_token.span)),
+        ))
     }
 
     /// Assuming we have just parsed `.`, continue parsing into an expression.
     fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
-        if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) {
+        if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) {
             return Ok(self.mk_await_expr(self_arg, lo));
         }
 
         let fn_span_lo = self.token.span;
-        let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
-        self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
-        self.check_turbofish_missing_angle_brackets(&mut segment);
+        let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
+        self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
+        self.check_turbofish_missing_angle_brackets(&mut seg);
 
         if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             // Method call `expr.f()`
-            let mut args = self.parse_paren_expr_seq()?;
-            args.insert(0, self_arg);
-
+            let args = self.parse_expr_paren_seq()?;
             let fn_span = fn_span_lo.to(self.prev_token.span);
             let span = lo.to(self.prev_token.span);
-            Ok(self.mk_expr(span, ExprKind::MethodCall(segment, args, fn_span), AttrVec::new()))
+            Ok(self.mk_expr(
+                span,
+                ExprKind::MethodCall(Box::new(ast::MethodCall {
+                    seg,
+                    receiver: self_arg,
+                    args,
+                    span: fn_span,
+                })),
+            ))
         } else {
             // Field access `expr.f`
-            if let Some(args) = segment.args {
-                self.struct_span_err(
-                    args.span(),
-                    "field expressions cannot have generic arguments",
-                )
-                .emit();
+            if let Some(args) = seg.args {
+                self.dcx().emit_err(errors::FieldExpressionWithGeneric(args.span()));
             }
 
             let span = lo.to(self.prev_token.span);
-            Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), AttrVec::new()))
+            Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident)))
         }
     }
 
@@ -1302,153 +1408,153 @@ impl<'a> Parser<'a> {
     ///
     /// N.B., this does not parse outer attributes, and is private because it only works
     /// correctly if called from `parse_dot_or_call_expr()`.
-    fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
         maybe_whole_expr!(self);
 
         // Outer attributes are already parsed and will be
         // added to the return value after the fact.
-        //
-        // Therefore, prevent sub-parser from parsing
-        // attributes by giving them an empty "already-parsed" list.
-        let attrs = AttrVec::new();
 
-        // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
-        let lo = self.token.span;
-        if let token::Literal(_) = self.token.kind {
-            // This match arm is a special-case of the `_` match arm below and
-            // could be removed without changing functionality, but it's faster
-            // to have it here, especially for programs with large constants.
-            self.parse_lit_expr(attrs)
-        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
-            self.parse_tuple_parens_expr(attrs)
-        } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs)
-        } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
-            self.parse_closure_expr(attrs).map_err(|mut err| {
-                // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
-                // then suggest parens around the lhs.
-                if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
-                    self.sess.expr_parentheses_needed(&mut err, *sp);
+        let restrictions = self.restrictions;
+        self.with_res(restrictions - Restrictions::ALLOW_LET, |this| {
+            // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
+            let lo = this.token.span;
+            if let token::Literal(_) = this.token.kind {
+                // This match arm is a special-case of the `_` match arm below and
+                // could be removed without changing functionality, but it's faster
+                // to have it here, especially for programs with large constants.
+                this.parse_expr_lit()
+            } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+                this.parse_expr_tuple_parens(restrictions)
+            } else if this.check(&token::OpenDelim(Delimiter::Brace)) {
+                this.parse_expr_block(None, lo, BlockCheckMode::Default)
+            } else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) {
+                this.parse_expr_closure().map_err(|mut err| {
+                    // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
+                    // then suggest parens around the lhs.
+                    if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
+                        err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
+                    }
+                    err
+                })
+            } else if this.check(&token::OpenDelim(Delimiter::Bracket)) {
+                this.parse_expr_array_or_repeat(Delimiter::Bracket)
+            } else if this.is_builtin() {
+                this.parse_expr_builtin()
+            } else if this.check_path() {
+                this.parse_expr_path_start()
+            } else if this.check_keyword(kw::Move)
+                || this.check_keyword(kw::Static)
+                || this.check_const_closure()
+            {
+                this.parse_expr_closure()
+            } else if this.eat_keyword(kw::If) {
+                this.parse_expr_if()
+            } else if this.check_keyword(kw::For) {
+                if this.choose_generics_over_qpath(1) {
+                    this.parse_expr_closure()
+                } else {
+                    assert!(this.eat_keyword(kw::For));
+                    this.parse_expr_for(None, this.prev_token.span)
                 }
-                err
-            })
-        } else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
-            self.parse_array_or_repeat_expr(attrs, Delimiter::Bracket)
-        } else if self.check_path() {
-            self.parse_path_start_expr(attrs)
-        } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
-            self.parse_closure_expr(attrs)
-        } else if self.eat_keyword(kw::If) {
-            self.parse_if_expr(attrs)
-        } else if self.check_keyword(kw::For) {
-            if self.choose_generics_over_qpath(1) {
-                self.parse_closure_expr(attrs)
-            } else {
-                assert!(self.eat_keyword(kw::For));
-                self.parse_for_expr(None, self.prev_token.span, attrs)
-            }
-        } else if self.eat_keyword(kw::While) {
-            self.parse_while_expr(None, self.prev_token.span, attrs)
-        } else if let Some(label) = self.eat_label() {
-            self.parse_labeled_expr(label, attrs, true)
-        } else if self.eat_keyword(kw::Loop) {
-            let sp = self.prev_token.span;
-            self.parse_loop_expr(None, self.prev_token.span, attrs).map_err(|mut err| {
-                err.span_label(sp, "while parsing this `loop` expression");
-                err
-            })
-        } else if self.eat_keyword(kw::Continue) {
-            let kind = ExprKind::Continue(self.eat_label());
-            Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
-        } else if self.eat_keyword(kw::Match) {
-            let match_sp = self.prev_token.span;
-            self.parse_match_expr(attrs).map_err(|mut err| {
-                err.span_label(match_sp, "while parsing this `match` expression");
-                err
-            })
-        } else if self.eat_keyword(kw::Unsafe) {
-            let sp = self.prev_token.span;
-            self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided), attrs)
-                .map_err(|mut err| {
-                    err.span_label(sp, "while parsing this `unsafe` expression");
+            } else if this.eat_keyword(kw::While) {
+                this.parse_expr_while(None, this.prev_token.span)
+            } else if let Some(label) = this.eat_label() {
+                this.parse_expr_labeled(label, true)
+            } else if this.eat_keyword(kw::Loop) {
+                let sp = this.prev_token.span;
+                this.parse_expr_loop(None, this.prev_token.span).map_err(|mut err| {
+                    err.span_label(sp, "while parsing this `loop` expression");
                     err
                 })
-        } else if self.check_inline_const(0) {
-            self.parse_const_block(lo.to(self.token.span), false)
-        } else if self.is_do_catch_block() {
-            self.recover_do_catch(attrs)
-        } else if self.is_try_block() {
-            self.expect_keyword(kw::Try)?;
-            self.parse_try_block(lo, attrs)
-        } else if self.eat_keyword(kw::Return) {
-            self.parse_return_expr(attrs)
-        } else if self.eat_keyword(kw::Break) {
-            self.parse_break_expr(attrs)
-        } else if self.eat_keyword(kw::Yield) {
-            self.parse_yield_expr(attrs)
-        } else if self.is_do_yeet() {
-            self.parse_yeet_expr(attrs)
-        } else if self.check_keyword(kw::Let) {
-            self.manage_let_chains_context();
-            self.bump();
-            self.parse_let_expr(attrs)
-        } else if self.eat_keyword(kw::Underscore) {
-            Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore, attrs))
-        } else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
-            // Don't complain about bare semicolons after unclosed braces
-            // recovery in order to keep the error count down. Fixing the
-            // delimiters will possibly also fix the bare semicolon found in
-            // expression context. For example, silence the following error:
-            //
-            //     error: expected expression, found `;`
-            //      --> file.rs:2:13
-            //       |
-            //     2 |     foo(bar(;
-            //       |             ^ expected expression
-            self.bump();
-            Ok(self.mk_expr_err(self.token.span))
-        } else if self.token.uninterpolated_span().rust_2018() {
-            // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
-            if self.check_keyword(kw::Async) {
-                if self.is_async_block() {
-                    // Check for `async {` and `async move {`.
-                    self.parse_async_block(attrs)
+            } else if this.eat_keyword(kw::Match) {
+                let match_sp = this.prev_token.span;
+                this.parse_expr_match().map_err(|mut err| {
+                    err.span_label(match_sp, "while parsing this `match` expression");
+                    err
+                })
+            } else if this.eat_keyword(kw::Unsafe) {
+                let sp = this.prev_token.span;
+                this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
+                    |mut err| {
+                        err.span_label(sp, "while parsing this `unsafe` expression");
+                        err
+                    },
+                )
+            } else if this.check_inline_const(0) {
+                this.parse_const_block(lo.to(this.token.span), false)
+            } else if this.may_recover() && this.is_do_catch_block() {
+                this.recover_do_catch()
+            } else if this.is_try_block() {
+                this.expect_keyword(kw::Try)?;
+                this.parse_try_block(lo)
+            } else if this.eat_keyword(kw::Return) {
+                this.parse_expr_return()
+            } else if this.eat_keyword(kw::Continue) {
+                this.parse_expr_continue(lo)
+            } else if this.eat_keyword(kw::Break) {
+                this.parse_expr_break()
+            } else if this.eat_keyword(kw::Yield) {
+                this.parse_expr_yield()
+            } else if this.is_do_yeet() {
+                this.parse_expr_yeet()
+            } else if this.eat_keyword(kw::Become) {
+                this.parse_expr_become()
+            } else if this.check_keyword(kw::Let) {
+                this.parse_expr_let(restrictions)
+            } else if this.eat_keyword(kw::Underscore) {
+                Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
+            } else if this.token.uninterpolated_span().at_least_rust_2018() {
+                // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
+                if this.token.uninterpolated_span().at_least_rust_2024()
+                    // check for `gen {}` and `gen move {}`
+                    // or `async gen {}` and `async gen move {}`
+                    && (this.is_gen_block(kw::Gen, 0)
+                        || (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1)))
+                {
+                    // FIXME: (async) gen closures aren't yet parsed.
+                    this.parse_gen_block()
+                } else if this.check_keyword(kw::Async) {
+                    // FIXME(gen_blocks): Parse `gen async` and suggest swap
+                    if this.is_gen_block(kw::Async, 0) {
+                        // Check for `async {` and `async move {`,
+                        this.parse_gen_block()
+                    } else {
+                        this.parse_expr_closure()
+                    }
+                } else if this.eat_keyword_noexpect(kw::Await) {
+                    this.recover_incorrect_await_syntax(lo, this.prev_token.span)
                 } else {
-                    self.parse_closure_expr(attrs)
+                    this.parse_expr_lit()
                 }
-            } else if self.eat_keyword(kw::Await) {
-                self.recover_incorrect_await_syntax(lo, self.prev_token.span, attrs)
             } else {
-                self.parse_lit_expr(attrs)
+                this.parse_expr_lit()
             }
-        } else {
-            self.parse_lit_expr(attrs)
-        }
+        })
     }
 
-    fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_lit(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        match self.parse_opt_lit() {
-            Some(literal) => {
-                let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal), attrs);
+        match self.parse_opt_token_lit() {
+            Some((token_lit, _)) => {
+                let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
                 self.maybe_recover_from_bad_qpath(expr)
             }
             None => self.try_macro_suggestion(),
         }
     }
 
-    fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
         self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
         let (es, trailing_comma) = match self.parse_seq_to_end(
             &token::CloseDelim(Delimiter::Parenthesis),
             SeqSep::trailing_allowed(token::Comma),
-            |p| p.parse_expr_catch_underscore(),
+            |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)),
         ) {
             Ok(x) => x,
             Err(err) => {
-                return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, Err(err)));
+                return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
             }
         };
         let kind = if es.len() == 1 && !trailing_comma {
@@ -1458,118 +1564,137 @@ impl<'a> Parser<'a> {
             // `(e,)` is a tuple with only one field, `e`.
             ExprKind::Tup(es)
         };
-        let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_array_or_repeat_expr(
-        &mut self,
-        attrs: AttrVec,
-        close_delim: Delimiter,
-    ) -> PResult<'a, P<Expr>> {
+    fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
         self.bump(); // `[` or other open delim
 
         let close = &token::CloseDelim(close_delim);
         let kind = if self.eat(close) {
             // Empty vector
-            ExprKind::Array(Vec::new())
+            ExprKind::Array(ThinVec::new())
         } else {
             // Non-empty vector
             let first_expr = self.parse_expr()?;
             if self.eat(&token::Semi) {
                 // Repeating array syntax: `[ 0; 512 ]`
-                let count = self.parse_anon_const_expr()?;
+                let count = self.parse_expr_anon_const()?;
                 self.expect(close)?;
                 ExprKind::Repeat(first_expr, count)
             } else if self.eat(&token::Comma) {
                 // Vector with two or more elements.
                 let sep = SeqSep::trailing_allowed(token::Comma);
-                let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
-                let mut exprs = vec![first_expr];
-                exprs.extend(remaining_exprs);
+                let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
+                exprs.insert(0, first_expr);
                 ExprKind::Array(exprs)
             } else {
                 // Vector with one element
                 self.expect(close)?;
-                ExprKind::Array(vec![first_expr])
+                ExprKind::Array(thin_vec![first_expr])
             }
         };
-        let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_path_start_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
+        let maybe_eq_tok = self.prev_token.clone();
         let (qself, path) = if self.eat_lt() {
-            let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+            let lt_span = self.prev_token.span;
+            let (qself, path) = self.parse_qpath(PathStyle::Expr).map_err(|mut err| {
+                // Suggests using '<=' if there is an error parsing qpath when the previous token
+                // is an '=' token. Only emits suggestion if the '<' token and '=' token are
+                // directly adjacent (i.e. '=<')
+                if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() {
+                    let eq_lt = maybe_eq_tok.span.to(lt_span);
+                    err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified);
+                }
+                err
+            })?;
             (Some(qself), path)
         } else {
             (None, self.parse_path(PathStyle::Expr)?)
         };
-        let lo = path.span;
 
         // `!`, as an operator, is prefix, so we know this isn't that.
-        let (hi, kind) = if self.eat(&token::Not) {
+        let (span, kind) = if self.eat(&token::Not) {
             // MACRO INVOCATION expression
             if qself.is_some() {
-                self.struct_span_err(path.span, "macros cannot use qualified paths").emit();
+                self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
             }
-            let mac = MacCall {
-                path,
-                args: self.parse_mac_args()?,
-                prior_type_ascription: self.last_type_ascription,
-            };
-            (self.prev_token.span, ExprKind::MacCall(mac))
-        } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) {
-                if qself.is_some() {
-                    self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
-                }
-                return expr;
-            } else {
-                (path.span, ExprKind::Path(qself, path))
+            let lo = path.span;
+            let mac = P(MacCall { path, args: self.parse_delim_args()? });
+            (lo.to(self.prev_token.span), ExprKind::MacCall(mac))
+        } else if self.check(&token::OpenDelim(Delimiter::Brace))
+            && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
+        {
+            if qself.is_some() {
+                self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
             }
+            return expr;
         } else {
             (path.span, ExprKind::Path(qself, path))
         };
 
-        let expr = self.mk_expr(lo.to(hi), kind, attrs);
+        let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
     /// Parse `'label: $expr`. The label is already parsed.
-    fn parse_labeled_expr(
+    pub(super) fn parse_expr_labeled(
         &mut self,
-        label: Label,
-        attrs: AttrVec,
+        label_: Label,
         mut consume_colon: bool,
     ) -> PResult<'a, P<Expr>> {
-        let lo = label.ident.span;
-        let label = Some(label);
+        let lo = label_.ident.span;
+        let label = Some(label_);
         let ate_colon = self.eat(&token::Colon);
         let expr = if self.eat_keyword(kw::While) {
-            self.parse_while_expr(label, lo, attrs)
+            self.parse_expr_while(label, lo)
         } else if self.eat_keyword(kw::For) {
-            self.parse_for_expr(label, lo, attrs)
+            self.parse_expr_for(label, lo)
         } else if self.eat_keyword(kw::Loop) {
-            self.parse_loop_expr(label, lo, attrs)
+            self.parse_expr_loop(label, lo)
         } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
             || self.token.is_whole_block()
         {
-            self.parse_block_expr(label, lo, BlockCheckMode::Default, attrs)
+            self.parse_expr_block(label, lo, BlockCheckMode::Default)
+        } else if !ate_colon
+            && self.may_recover()
+            && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
+                || self.token.is_punct())
+            && could_be_unclosed_char_literal(label_.ident)
+        {
+            let (lit, _) =
+                self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
+                    self_.dcx().create_err(errors::UnexpectedTokenAfterLabel {
+                        span: self_.token.span,
+                        remove_label: None,
+                        enclose_in_block: None,
+                    })
+                });
+            consume_colon = false;
+            Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
         } else if !ate_colon
             && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
         {
             // We're probably inside of a `Path<'a>` that needs a turbofish
-            let msg = "expected `while`, `for`, `loop` or `{` after a label";
-            self.struct_span_err(self.token.span, msg).span_label(self.token.span, msg).emit();
+            self.dcx().emit_err(errors::UnexpectedTokenAfterLabel {
+                span: self.token.span,
+                remove_label: None,
+                enclose_in_block: None,
+            });
             consume_colon = false;
             Ok(self.mk_expr_err(lo))
         } else {
-            let msg = "expected `while`, `for`, `loop` or `{` after a label";
-
-            let mut err = self.struct_span_err(self.token.span, msg);
-            err.span_label(self.token.span, msg);
+            let mut err = errors::UnexpectedTokenAfterLabel {
+                span: self.token.span,
+                remove_label: None,
+                enclose_in_block: None,
+            };
 
             // Continue as an expression in an effort to recover on `'label: non_block_expr`.
             let expr = self.parse_expr().map(|expr| {
@@ -1591,83 +1716,83 @@ impl<'a> Parser<'a> {
                     vis.0
                 };
 
-                // Suggestion involves adding a (as of time of writing this, unstable) labeled block.
+                // Suggestion involves adding a labeled block.
                 //
                 // If there are no breaks that may use this label, suggest removing the label and
                 // recover to the unmodified expression.
                 if !found_labeled_breaks {
-                    let msg = "consider removing the label";
-                    err.span_suggestion_verbose(
-                        lo.until(span),
-                        msg,
-                        "",
-                        Applicability::MachineApplicable,
-                    );
+                    err.remove_label = Some(lo.until(span));
 
                     return expr;
                 }
 
-                let sugg_msg = "consider enclosing expression in a block";
-                let suggestions = vec![
-                    (span.shrink_to_lo(), "{ ".to_owned()),
-                    (span.shrink_to_hi(), " }".to_owned()),
-                ];
+                err.enclose_in_block = Some(errors::UnexpectedTokenAfterLabelSugg {
+                    left: span.shrink_to_lo(),
+                    right: span.shrink_to_hi(),
+                });
 
-                err.multipart_suggestion_verbose(
-                    sugg_msg,
-                    suggestions,
-                    Applicability::MachineApplicable,
-                );
-
-                // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to supress future errors about `break 'label`.
+                // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
                 let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
-                let blk = self.mk_block(vec![stmt], BlockCheckMode::Default, span);
-                self.mk_expr(span, ExprKind::Block(blk, label), ThinVec::new())
+                let blk = self.mk_block(thin_vec![stmt], BlockCheckMode::Default, span);
+                self.mk_expr(span, ExprKind::Block(blk, label))
             });
 
-            err.emit();
+            self.dcx().emit_err(err);
             expr
         }?;
 
         if !ate_colon && consume_colon {
-            self.error_labeled_expr_must_be_followed_by_colon(lo, expr.span);
+            self.dcx().emit_err(errors::RequireColonAfterLabeledExpression {
+                span: expr.span,
+                label: lo,
+                label_end: lo.shrink_to_hi(),
+            });
         }
 
         Ok(expr)
     }
 
-    fn error_labeled_expr_must_be_followed_by_colon(&self, lo: Span, span: Span) {
-        self.struct_span_err(span, "labeled expression must be followed by `:`")
-            .span_label(lo, "the label")
-            .span_suggestion_short(
-                lo.shrink_to_hi(),
-                "add `:` after the label",
-                ": ",
-                Applicability::MachineApplicable,
+    /// Emit an error when a char is parsed as a lifetime or label because of a missing quote.
+    pub(super) fn recover_unclosed_char<L>(
+        &self,
+        ident: Ident,
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+        err: impl FnOnce(&Self) -> DiagnosticBuilder<'a>,
+    ) -> L {
+        assert!(could_be_unclosed_char_literal(ident));
+        if let Some(diag) = self.dcx().steal_diagnostic(ident.span, StashKey::LifetimeIsChar) {
+            diag.with_span_suggestion_verbose(
+                ident.span.shrink_to_hi(),
+                "add `'` to close the char literal",
+                "'",
+                Applicability::MaybeIncorrect,
             )
-            .note("labels are used before loops and blocks, allowing e.g., `break 'label` to them")
             .emit();
+        } else {
+            err(self)
+                .with_span_suggestion_verbose(
+                    ident.span.shrink_to_hi(),
+                    "add `'` to close the char literal",
+                    "'",
+                    Applicability::MaybeIncorrect,
+                )
+                .emit();
+        }
+        let name = ident.without_first_quote().name;
+        mk_lit_char(name, ident.span)
     }
 
     /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
-    fn recover_do_catch(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn recover_do_catch(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         self.bump(); // `do`
         self.bump(); // `catch`
 
-        let span_dc = lo.to(self.prev_token.span);
-        self.struct_span_err(span_dc, "found removed `do catch` syntax")
-            .span_suggestion(
-                span_dc,
-                "replace with the new syntax",
-                "try",
-                Applicability::MachineApplicable,
-            )
-            .note("following RFC #2388, the new non-placeholder syntax is `try`")
-            .emit();
+        let span = lo.to(self.prev_token.span);
+        self.dcx().emit_err(errors::DoCatchSyntaxRemoved { span });
 
-        self.parse_try_block(lo, attrs)
+        self.parse_try_block(lo)
     }
 
     /// Parse an expression if the token can begin one.
@@ -1676,15 +1801,15 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse `"return" expr?`.
-    fn parse_return_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_return(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Ret(self.parse_expr_opt()?);
-        let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs);
+        let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
     /// Parse `"do" "yeet" expr?`.
-    fn parse_yeet_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_yeet(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         self.bump(); // `do`
@@ -1694,7 +1819,17 @@ impl<'a> Parser<'a> {
 
         let span = lo.to(self.prev_token.span);
         self.sess.gated_spans.gate(sym::yeet_expr, span);
-        let expr = self.mk_expr(span, kind, attrs);
+        let expr = self.mk_expr(span, kind);
+        self.maybe_recover_from_bad_qpath(expr)
+    }
+
+    /// Parse `"become" expr`, with `"become"` token already eaten.
+    fn parse_expr_become(&mut self) -> PResult<'a, P<Expr>> {
+        let lo = self.prev_token.span;
+        let kind = ExprKind::Become(self.parse_expr()?);
+        let span = lo.to(self.prev_token.span);
+        self.sess.gated_spans.gate(sym::explicit_tail_calls, span);
+        let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
@@ -1706,38 +1841,34 @@ impl<'a> Parser<'a> {
     /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
     /// expression only gets a warning for compatibility reasons; and a labeled break
     /// with a labeled loop does not even get a warning because there is no ambiguity.
-    fn parse_break_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_break(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let mut label = self.eat_label();
-        let kind = if label.is_some() && self.token == token::Colon {
+        let kind = if self.token == token::Colon
+            && let Some(label) = label.take()
+        {
             // The value expression can be a labeled loop, see issue #86948, e.g.:
             // `loop { break 'label: loop { break 'label 42; }; }`
-            let lexpr = self.parse_labeled_expr(label.take().unwrap(), AttrVec::new(), true)?;
-            self.struct_span_err(
-                lexpr.span,
-                "parentheses are required around this expression to avoid confusion with a labeled break expression",
-            )
-            .multipart_suggestion(
-                "wrap the expression in parentheses",
-                vec![
-                    (lexpr.span.shrink_to_lo(), "(".to_string()),
-                    (lexpr.span.shrink_to_hi(), ")".to_string()),
-                ],
-                Applicability::MachineApplicable,
-            )
-            .emit();
+            let lexpr = self.parse_expr_labeled(label, true)?;
+            self.dcx().emit_err(errors::LabeledLoopInBreak {
+                span: lexpr.span,
+                sub: errors::WrapInParentheses::Expression {
+                    left: lexpr.span.shrink_to_lo(),
+                    right: lexpr.span.shrink_to_hi(),
+                },
+            });
             Some(lexpr)
         } else if self.token != token::OpenDelim(Delimiter::Brace)
             || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
         {
-            let expr = self.parse_expr_opt()?;
-            if let Some(ref expr) = expr {
+            let mut expr = self.parse_expr_opt()?;
+            if let Some(expr) = &mut expr {
                 if label.is_some()
                     && matches!(
                         expr.kind,
                         ExprKind::While(_, _, None)
-                            | ExprKind::ForLoop(_, _, _, None)
-                            | ExprKind::Loop(_, None)
+                            | ExprKind::ForLoop { label: None, .. }
+                            | ExprKind::Loop(_, None, _)
                             | ExprKind::Block(_, None)
                     )
                 {
@@ -1749,35 +1880,135 @@ impl<'a> Parser<'a> {
                         BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
                     );
                 }
+
+                // Recover `break label aaaaa`
+                if self.may_recover()
+                    && let ExprKind::Path(None, p) = &expr.kind
+                    && let [segment] = &*p.segments
+                    && let &ast::PathSegment { ident, args: None, .. } = segment
+                    && let Some(next) = self.parse_expr_opt()?
+                {
+                    label = Some(self.recover_ident_into_label(ident));
+                    *expr = next;
+                }
             }
+
             expr
         } else {
             None
         };
-        let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind), attrs);
+        let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Break(label, kind));
         self.maybe_recover_from_bad_qpath(expr)
     }
 
+    /// Parse `"continue" label?`.
+    fn parse_expr_continue(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let mut label = self.eat_label();
+
+        // Recover `continue label` -> `continue 'label`
+        if self.may_recover()
+            && label.is_none()
+            && let Some((ident, _)) = self.token.ident()
+        {
+            self.bump();
+            label = Some(self.recover_ident_into_label(ident));
+        }
+
+        let kind = ExprKind::Continue(label);
+        Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
+    }
+
     /// Parse `"yield" expr?`.
-    fn parse_yield_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_yield(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Yield(self.parse_expr_opt()?);
         let span = lo.to(self.prev_token.span);
-        self.sess.gated_spans.gate(sym::generators, span);
-        let expr = self.mk_expr(span, kind, attrs);
+        self.sess.gated_spans.gate(sym::yield_expr, span);
+        let expr = self.mk_expr(span, kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
+    /// Parse `builtin # ident(args,*)`.
+    fn parse_expr_builtin(&mut self) -> PResult<'a, P<Expr>> {
+        self.parse_builtin(|this, lo, ident| {
+            if ident.name == sym::offset_of {
+                return Ok(Some(this.parse_expr_offset_of(lo)?));
+            }
+
+            Ok(None)
+        })
+    }
+
+    pub(crate) fn parse_builtin<T>(
+        &mut self,
+        parse: impl FnOnce(&mut Parser<'a>, Span, Ident) -> PResult<'a, Option<T>>,
+    ) -> PResult<'a, T> {
+        let lo = self.token.span;
+
+        self.bump(); // `builtin`
+        self.bump(); // `#`
+
+        let Some((ident, false)) = self.token.ident() else {
+            let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
+            return Err(err);
+        };
+        self.sess.gated_spans.gate(sym::builtin_syntax, ident.span);
+        self.bump();
+
+        self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?;
+        let ret = if let Some(res) = parse(self, lo, ident)? {
+            Ok(res)
+        } else {
+            let err = self.dcx().create_err(errors::UnknownBuiltinConstruct {
+                span: lo.to(ident.span),
+                name: ident.name,
+            });
+            return Err(err);
+        };
+        self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
+
+        ret
+    }
+
+    pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let container = self.parse_ty()?;
+        self.expect(&TokenKind::Comma)?;
+
+        let fields = self.parse_floating_field_access()?;
+        let trailing_comma = self.eat_noexpect(&TokenKind::Comma);
+
+        if let Err(mut e) =
+            self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)])
+        {
+            if trailing_comma {
+                e.note("unexpected third argument to offset_of");
+            } else {
+                e.note("offset_of expects dot-separated field and variant names");
+            }
+            e.emit();
+        }
+
+        // Eat tokens until the macro call ends.
+        if self.may_recover() {
+            while !matches!(self.token.kind, token::CloseDelim(..) | token::Eof) {
+                self.bump();
+            }
+        }
+
+        let span = lo.to(self.token.span);
+        Ok(self.mk_expr(span, ExprKind::OffsetOf(container, fields)))
+    }
+
     /// Returns a string literal if the next token is a string literal.
     /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
     /// and returns `None` if the next token is not literal at all.
-    pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
-        match self.parse_opt_lit() {
+    pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
+        match self.parse_opt_meta_item_lit() {
             Some(lit) => match lit.kind {
                 ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
                     style,
-                    symbol: lit.token.symbol,
-                    suffix: lit.token.suffix,
+                    symbol: lit.symbol,
+                    suffix: lit.suffix,
                     span: lit.span,
                     symbol_unescaped,
                 }),
@@ -1787,32 +2018,63 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        self.parse_opt_lit().ok_or_else(|| {
-            if let token::Interpolated(inner) = &self.token.kind {
-                let expr = match inner.as_ref() {
-                    token::NtExpr(expr) => Some(expr),
-                    token::NtLiteral(expr) => Some(expr),
-                    _ => None,
-                };
-                if let Some(expr) = expr {
-                    if matches!(expr.kind, ExprKind::Err) {
-                        let mut err = self
-                            .diagnostic()
-                            .struct_span_err(self.token.span, "invalid interpolated expression");
-                        err.downgrade_to_delayed_bug();
-                        return err;
-                    }
-                }
-            }
-            let msg = format!("unexpected token: {}", super::token_descr(&self.token));
-            self.struct_span_err(self.token.span, &msg)
-        })
+    pub(crate) fn mk_token_lit_char(name: Symbol, span: Span) -> (token::Lit, Span) {
+        (token::Lit { symbol: name, suffix: None, kind: token::Char }, span)
     }
 
-    /// Matches `lit = true | false | token_lit`.
-    /// Returns `None` if the next token is not a literal.
-    pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
+    fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit {
+        ast::MetaItemLit {
+            symbol: name,
+            suffix: None,
+            kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
+            span,
+        }
+    }
+
+    fn handle_missing_lit<L>(
+        &mut self,
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+    ) -> PResult<'a, L> {
+        if let token::Interpolated(nt) = &self.token.kind
+            && let token::NtExpr(e) | token::NtLiteral(e) = &nt.0
+            && matches!(e.kind, ExprKind::Err)
+        {
+            let mut err = self
+                .dcx()
+                .create_err(errors::InvalidInterpolatedExpression { span: self.token.span });
+            err.downgrade_to_delayed_bug();
+            return Err(err);
+        }
+        let token = self.token.clone();
+        let err = |self_: &Self| {
+            let msg = format!("unexpected token: {}", super::token_descr(&token));
+            self_.dcx().struct_span_err(token.span, msg)
+        };
+        // On an error path, eagerly consider a lifetime to be an unclosed character lit, if that
+        // makes sense.
+        if let Some(ident) = self.token.lifetime()
+            && could_be_unclosed_char_literal(ident)
+        {
+            let lt = self.expect_lifetime();
+            Ok(self.recover_unclosed_char(lt.ident, mk_lit_char, err))
+        } else {
+            Err(err(self))
+        }
+    }
+
+    pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
+        self.parse_opt_token_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_token_lit_char))
+    }
+
+    pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
+        self.parse_opt_meta_item_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char))
+    }
+
+    fn recover_after_dot(&mut self) -> Option<Token> {
         let mut recovered = None;
         if self.token == token::Dot {
             // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
@@ -1821,7 +2083,16 @@ impl<'a> Parser<'a> {
                 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
                     next_token.kind
                 {
-                    if self.token.span.hi() == next_token.span.lo() {
+                    // If this integer looks like a float, then recover as such.
+                    //
+                    // We will never encounter the exponent part of a floating
+                    // point literal here, since there's no use of the exponent
+                    // syntax that also constitutes a valid integer, so we need
+                    // not check for that.
+                    if suffix.map_or(true, |s| s == sym::f32 || s == sym::f64)
+                        && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
+                        && self.token.span.hi() == next_token.span.lo()
+                    {
                         let s = String::from("0.") + symbol.as_str();
                         let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
                         return Some(Token::new(kind, self.token.span.to(next_token.span)));
@@ -1831,172 +2102,76 @@ impl<'a> Parser<'a> {
             });
             if let Some(token) = &recovered {
                 self.bump();
-                self.error_float_lits_must_have_int_part(&token);
-            }
-        }
-
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        match Lit::from_token(token) {
-            Ok(lit) => {
-                self.bump();
-                Some(lit)
-            }
-            Err(LitError::NotLiteral) => None,
-            Err(err) => {
-                let span = token.span;
-                let token::Literal(lit) = token.kind else {
-                    unreachable!();
-                };
-                self.bump();
-                self.report_lit_error(err, lit, span);
-                // Pack possible quotes and prefixes from the original literal into
-                // the error literal's symbol so they can be pretty-printed faithfully.
-                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
-                let symbol = Symbol::intern(&suffixless_lit.to_string());
-                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
-                Some(Lit::from_lit_token(lit, span).unwrap_or_else(|_| unreachable!()))
+                self.dcx().emit_err(errors::FloatLiteralRequiresIntegerPart {
+                    span: token.span,
+                    correct: pprust::token_to_string(token).into_owned(),
+                });
             }
         }
-    }
 
-    fn error_float_lits_must_have_int_part(&self, token: &Token) {
-        self.struct_span_err(token.span, "float literals must have an integer part")
-            .span_suggestion(
-                token.span,
-                "must have an integer part",
-                pprust::token_to_string(token),
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        recovered
     }
 
-    fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
-        // Checks if `s` looks like i32 or u1234 etc.
-        fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-            s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
-        }
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+        let recovered = self.recover_after_dot();
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        let span = token.span;
 
-        // Try to lowercase the prefix if it's a valid base prefix.
-        fn fix_base_capitalisation(s: &str) -> Option<String> {
-            if let Some(stripped) = s.strip_prefix('B') {
-                Some(format!("0b{stripped}"))
-            } else if let Some(stripped) = s.strip_prefix('O') {
-                Some(format!("0o{stripped}"))
-            } else if let Some(stripped) = s.strip_prefix('X') {
-                Some(format!("0x{stripped}"))
-            } else {
-                None
-            }
-        }
+        token::Lit::from_token(token).map(|token_lit| {
+            self.bump();
+            (token_lit, span)
+        })
+    }
 
-        let token::Lit { kind, suffix, .. } = lit;
-        match err {
-            // `NotLiteral` is not an error by itself, so we don't report
-            // it and give the parser opportunity to try something else.
-            LitError::NotLiteral => {}
-            // `LexerError` *is* an error, but it was already reported
-            // by lexer, so here we don't report it the second time.
-            LitError::LexerError => {}
-            LitError::InvalidSuffix => {
-                self.expect_no_suffix(
-                    span,
-                    &format!("{} {} literal", kind.article(), kind.descr()),
-                    suffix,
-                );
-            }
-            LitError::InvalidIntSuffix => {
-                let suf = suffix.expect("suffix error with no suffix");
-                let suf = suf.as_str();
-                if looks_like_width_suffix(&['i', 'u'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
-                    self.struct_span_err(span, &msg)
-                        .help("valid widths are 8, 16, 32, 64 and 128")
-                        .emit();
-                } else if let Some(fixed) = fix_base_capitalisation(suf) {
-                    let msg = "invalid base prefix for number literal";
-
-                    self.struct_span_err(span, msg)
-                        .note("base prefixes (`0xff`, `0b1010`, `0o755`) are lowercase")
-                        .span_suggestion(
-                            span,
-                            "try making the prefix lowercase",
-                            fixed,
-                            Applicability::MaybeIncorrect,
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+        let recovered = self.recover_after_dot();
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match token::Lit::from_token(token) {
+            Some(lit) => {
+                match MetaItemLit::from_token_lit(lit, token.span) {
+                    Ok(lit) => {
+                        self.bump();
+                        Some(lit)
+                    }
+                    Err(err) => {
+                        let span = token.uninterpolated_span();
+                        self.bump();
+                        report_lit_error(self.sess, err, lit, span);
+                        // Pack possible quotes and prefixes from the original literal into
+                        // the error literal's symbol so they can be pretty-printed faithfully.
+                        let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+                        let symbol = Symbol::intern(&suffixless_lit.to_string());
+                        let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+                        Some(
+                            MetaItemLit::from_token_lit(lit, span)
+                                .unwrap_or_else(|_| unreachable!()),
                         )
-                        .emit();
-                } else {
-                    let msg = format!("invalid suffix `{suf}` for number literal");
-                    self.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{suf}`"))
-                        .help("the suffix must be one of the numeric types (`u32`, `isize`, `f32`, etc.)")
-                        .emit();
-                }
-            }
-            LitError::InvalidFloatSuffix => {
-                let suf = suffix.expect("suffix error with no suffix");
-                let suf = suf.as_str();
-                if looks_like_width_suffix(&['f'], suf) {
-                    // If it looks like a width, try to be helpful.
-                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
-                    self.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit();
-                } else {
-                    let msg = format!("invalid suffix `{suf}` for float literal");
-                    self.struct_span_err(span, &msg)
-                        .span_label(span, format!("invalid suffix `{suf}`"))
-                        .help("valid suffixes are `f32` and `f64`")
-                        .emit();
+                    }
                 }
             }
-            LitError::NonDecimalFloat(base) => {
-                let descr = match base {
-                    16 => "hexadecimal",
-                    8 => "octal",
-                    2 => "binary",
-                    _ => unreachable!(),
-                };
-                self.struct_span_err(span, &format!("{descr} float literal is not supported"))
-                    .span_label(span, "not supported")
-                    .emit();
-            }
-            LitError::IntTooLarge => {
-                self.struct_span_err(span, "integer literal is too large").emit();
-            }
+            None => None,
         }
     }
 
-    pub(super) fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<Symbol>) {
-        if let Some(suf) = suffix {
-            let mut err = if kind == "a tuple index"
-                && [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf)
-            {
-                // #59553: warn instead of reject out of hand to allow the fix to percolate
-                // through the ecosystem when people fix their macros
-                let mut err = self
-                    .sess
-                    .span_diagnostic
-                    .struct_span_warn(sp, &format!("suffixes on {kind} are invalid"));
-                err.note(&format!(
-                    "`{}` is *temporarily* accepted on tuple index fields as it was \
-                        incorrectly accepted on stable for a few releases",
-                    suf,
-                ));
-                err.help(
-                    "on proc macros, you'll want to use `syn::Index::from` or \
-                        `proc_macro::Literal::*_unsuffixed` for code that will desugar \
-                        to tuple field access",
-                );
-                err.note(
-                    "see issue #60210 <https://github.com/rust-lang/rust/issues/60210> \
-                     for more information",
-                );
-                err
-            } else {
-                self.struct_span_err(sp, &format!("suffixes on {kind} are invalid"))
-                    .forget_guarantee()
-            };
-            err.span_label(sp, format!("invalid suffix `{suf}`"));
-            err.emit();
+    pub(super) fn expect_no_tuple_index_suffix(&self, span: Span, suffix: Symbol) {
+        if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
+            // #59553: warn instead of reject out of hand to allow the fix to percolate
+            // through the ecosystem when people fix their macros
+            self.dcx().emit_warn(errors::InvalidLiteralSuffixOnTupleIndex {
+                span,
+                suffix,
+                exception: Some(()),
+            });
+        } else {
+            self.dcx().emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
+                span,
+                suffix,
+                exception: None,
+            });
         }
     }
 
@@ -2007,15 +2182,11 @@ impl<'a> Parser<'a> {
 
         let lo = self.token.span;
         let minus_present = self.eat(&token::BinOp(token::Minus));
-        let lit = self.parse_lit()?;
-        let expr = self.mk_expr(lit.span, ExprKind::Lit(lit), AttrVec::new());
+        let (token_lit, span) = self.parse_token_lit()?;
+        let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
 
         if minus_present {
-            Ok(self.mk_expr(
-                lo.to(self.prev_token.span),
-                self.mk_unary(UnOp::Neg, expr),
-                AttrVec::new(),
-            ))
+            Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
         } else {
             Ok(expr)
         }
@@ -2030,22 +2201,17 @@ impl<'a> Parser<'a> {
     /// Emits a suggestion if it looks like the user meant an array but
     /// accidentally used braces, causing the code to be interpreted as a block
     /// expression.
-    fn maybe_suggest_brackets_instead_of_braces(
-        &mut self,
-        lo: Span,
-        attrs: AttrVec,
-    ) -> Option<P<Expr>> {
+    fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
         let mut snapshot = self.create_snapshot_for_diagnostic();
-        match snapshot.parse_array_or_repeat_expr(attrs, Delimiter::Brace) {
+        match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
             Ok(arr) => {
-                let hi = snapshot.prev_token.span;
-                self.struct_span_err(arr.span, "this is a block expression, not an array")
-                    .multipart_suggestion(
-                        "to make an array, use square brackets instead of curly braces",
-                        vec![(lo, "[".to_owned()), (hi, "]".to_owned())],
-                        Applicability::MaybeIncorrect,
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
+                    span: arr.span,
+                    sub: errors::ArrayBracketsInsteadOfSpacesSugg {
+                        left: lo,
+                        right: snapshot.prev_token.span,
+                    },
+                });
 
                 self.restore_snapshot(snapshot);
                 Some(self.mk_expr_err(arr.span))
@@ -2057,45 +2223,87 @@ impl<'a> Parser<'a> {
         }
     }
 
+    fn suggest_missing_semicolon_before_array(
+        &self,
+        prev_span: Span,
+        open_delim_span: Span,
+    ) -> PResult<'a, ()> {
+        if !self.may_recover() {
+            return Ok(());
+        }
+
+        if self.token.kind == token::Comma {
+            if !self.sess.source_map().is_multiline(prev_span.until(self.token.span)) {
+                return Ok(());
+            }
+            let mut snapshot = self.create_snapshot_for_diagnostic();
+            snapshot.bump();
+            match snapshot.parse_seq_to_before_end(
+                &token::CloseDelim(Delimiter::Bracket),
+                SeqSep::trailing_allowed(token::Comma),
+                |p| p.parse_expr(),
+            ) {
+                Ok(_)
+                    // When the close delim is `)`, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
+                    // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`.
+                    // This is because the `token.kind` of the close delim is treated as the same as
+                    // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
+                    // Therefore, `token.kind` should not be compared here.
+                    if snapshot
+                        .span_to_snippet(snapshot.token.span)
+                        .is_ok_and(|snippet| snippet == "]") =>
+                {
+                    return Err(self.dcx().create_err(errors::MissingSemicolonBeforeArray {
+                        open_delim: open_delim_span,
+                        semicolon: prev_span.shrink_to_hi(),
+                    }));
+                }
+                Ok(_) => (),
+                Err(err) => err.cancel(),
+            }
+        }
+        Ok(())
+    }
+
     /// Parses a block or unsafe block.
-    pub(super) fn parse_block_expr(
+    pub(super) fn parse_expr_block(
         &mut self,
         opt_label: Option<Label>,
         lo: Span,
         blk_mode: BlockCheckMode,
-        mut attrs: AttrVec,
     ) -> PResult<'a, P<Expr>> {
-        if self.is_array_like_block() {
-            if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo, attrs.clone()) {
+        if self.may_recover() && self.is_array_like_block() {
+            if let Some(arr) = self.maybe_suggest_brackets_instead_of_braces(lo) {
                 return Ok(arr);
             }
         }
 
-        if let Some(label) = opt_label {
-            self.sess.gated_spans.gate(sym::label_break_value, label.ident.span);
-        }
-
         if self.token.is_whole_block() {
-            self.struct_span_err(self.token.span, "cannot use a `block` macro fragment here")
-                .span_label(lo.to(self.token.span), "the `block` fragment is within this context")
-                .emit();
+            self.dcx().emit_err(errors::InvalidBlockMacroSegment {
+                span: self.token.span,
+                context: lo.to(self.token.span),
+                wrap: errors::WrapInExplicitBlock {
+                    lo: self.token.span.shrink_to_lo(),
+                    hi: self.token.span.shrink_to_hi(),
+                },
+            });
         }
 
-        let (inner_attrs, blk) = self.parse_block_common(lo, blk_mode)?;
-        attrs.extend(inner_attrs);
-        Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs))
+        let (attrs, blk) = self.parse_block_common(lo, blk_mode, true)?;
+        Ok(self.mk_expr_with_attrs(blk.span, ExprKind::Block(blk, opt_label), attrs))
     }
 
     /// Parse a block which takes no attributes and has no label
     fn parse_simple_block(&mut self) -> PResult<'a, P<Expr>> {
         let blk = self.parse_block()?;
-        Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()))
+        Ok(self.mk_expr(blk.span, ExprKind::Block(blk, None)))
     }
 
     /// Parses a closure expression (e.g., `move |args| expr`).
-    fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
+        let before = self.prev_token.clone();
         let binder = if self.check_keyword(kw::For) {
             let lo = self.token.span;
             let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
@@ -2103,42 +2311,59 @@ impl<'a> Parser<'a> {
 
             self.sess.gated_spans.gate(sym::closure_lifetime_binder, span);
 
-            ClosureBinder::For { span, generic_params: P::from_vec(lifetime_defs) }
+            ClosureBinder::For { span, generic_params: lifetime_defs }
         } else {
             ClosureBinder::NotPresent
         };
 
+        let constness = self.parse_closure_constness();
+
         let movability =
             if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
 
-        let asyncness = if self.token.uninterpolated_span().rust_2018() {
-            self.parse_asyncness()
+        let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
+            self.parse_coroutine_kind(Case::Sensitive)
         } else {
-            Async::No
+            None
         };
 
         let capture_clause = self.parse_capture_clause()?;
-        let decl = self.parse_fn_block_decl()?;
+        let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
         let decl_hi = self.prev_token.span;
-        let mut body = match decl.output {
+        let mut body = match fn_decl.output {
             FnRetTy::Default(_) => {
-                let restrictions = self.restrictions - Restrictions::STMT_EXPR;
-                self.parse_expr_res(restrictions, None)?
+                let restrictions =
+                    self.restrictions - Restrictions::STMT_EXPR - Restrictions::ALLOW_LET;
+                let prev = self.prev_token.clone();
+                let token = self.token.clone();
+                match self.parse_expr_res(restrictions, None) {
+                    Ok(expr) => expr,
+                    Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?,
+                }
             }
             _ => {
                 // If an explicit return type is given, require a block to appear (RFC 968).
                 let body_lo = self.token.span;
-                self.parse_block_expr(None, body_lo, BlockCheckMode::Default, AttrVec::new())?
+                self.parse_expr_block(None, body_lo, BlockCheckMode::Default)?
             }
         };
 
-        if let Async::Yes { span, .. } = asyncness {
-            // Feature-gate `async ||` closures.
-            self.sess.gated_spans.gate(sym::async_closure, span);
+        match coroutine_kind {
+            Some(CoroutineKind::Async { span, .. }) => {
+                // Feature-gate `async ||` closures.
+                self.sess.gated_spans.gate(sym::async_closure, span);
+            }
+            Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+                // Feature-gate `gen ||` and `async gen ||` closures.
+                // FIXME(gen_blocks): This perhaps should be a different gate.
+                self.sess.gated_spans.gate(sym::gen_blocks, span);
+            }
+            None => {}
         }
 
         if self.token.kind == TokenKind::Semi
-            && matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
+            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
+            && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
             // braces have been removed. We will recover and eat the next
@@ -2150,16 +2375,17 @@ impl<'a> Parser<'a> {
 
         let closure = self.mk_expr(
             lo.to(body.span),
-            ExprKind::Closure(
+            ExprKind::Closure(Box::new(ast::Closure {
                 binder,
                 capture_clause,
-                asyncness,
+                constness,
+                coroutine_kind,
                 movability,
-                decl,
+                fn_decl,
                 body,
-                lo.to(decl_hi),
-            ),
-            attrs,
+                fn_decl_span: lo.to(decl_hi),
+                fn_arg_span,
+            })),
         );
 
         // Disable recovery for closure body
@@ -2173,12 +2399,15 @@ impl<'a> Parser<'a> {
     /// Parses an optional `move` prefix to a closure-like construct.
     fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
         if self.eat_keyword(kw::Move) {
+            let move_kw_span = self.prev_token.span;
             // Check for `move async` and recover
             if self.check_keyword(kw::Async) {
                 let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
-                Err(self.incorrect_move_async_order_found(move_async_span))
+                Err(self
+                    .dcx()
+                    .create_err(errors::AsyncMoveOrderIncorrect { span: move_async_span }))
             } else {
-                Ok(CaptureBy::Value)
+                Ok(CaptureBy::Value { move_kw: move_kw_span })
             }
         } else {
             Ok(CaptureBy::Ref)
@@ -2186,9 +2415,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the `|arg, arg|` header of a closure.
-    fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+    fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
+        let arg_start = self.token.span.lo();
+
         let inputs = if self.eat(&token::OrOr) {
-            Vec::new()
+            ThinVec::new()
         } else {
             self.expect(&token::BinOp(token::Or))?;
             let args = self
@@ -2202,10 +2433,11 @@ impl<'a> Parser<'a> {
             self.expect_or()?;
             args
         };
+        let arg_span = self.prev_token.span.with_lo(arg_start);
         let output =
             self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
 
-        Ok(P(FnDecl { inputs, output }))
+        Ok((P(FnDecl { inputs, output }), arg_span))
     }
 
     /// Parses a parameter in a closure header (e.g., `|arg, arg|`).
@@ -2213,19 +2445,19 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         let attrs = self.parse_outer_attributes()?;
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
-            let pat = this.parse_pat_no_top_alt(PARAM_EXPECTED)?;
+            let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
             let ty = if this.eat(&token::Colon) {
                 this.parse_ty()?
             } else {
-                this.mk_ty(this.prev_token.span, TyKind::Infer)
+                this.mk_ty(pat.span, TyKind::Infer)
             };
 
             Ok((
                 Param {
-                    attrs: attrs.into(),
+                    attrs,
                     ty,
                     pat,
-                    span: lo.to(this.token.span),
+                    span: lo.to(this.prev_token.span),
                     id: DUMMY_NODE_ID,
                     is_placeholder: false,
                 },
@@ -2235,19 +2467,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an `if` expression (`if` token already eaten).
-    fn parse_if_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let cond = self.parse_cond_expr()?;
-
-        self.parse_if_after_cond(attrs, lo, cond)
+        let cond = self.parse_expr_cond()?;
+        self.parse_if_after_cond(lo, cond)
     }
 
-    fn parse_if_after_cond(
-        &mut self,
-        attrs: AttrVec,
-        lo: Span,
-        mut cond: P<Expr>,
-    ) -> PResult<'a, P<Expr>> {
+    fn parse_if_after_cond(&mut self, lo: Span, mut cond: P<Expr>) -> PResult<'a, P<Expr>> {
         let cond_span = cond.span;
         // Tries to interpret `cond` as either a missing expression if it's a block,
         // or as an unfinished expression if it's a binop and the RHS is a block.
@@ -2255,12 +2481,23 @@ impl<'a> Parser<'a> {
         let mut recover_block_from_condition = |this: &mut Self| {
             let block = match &mut cond.kind {
                 ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
-                    if let ExprKind::Block(_, None) = right.kind => {
-                        this.error_missing_if_then_block(lo, cond_span.shrink_to_lo().to(*binop_span), true).emit();
-                        std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
-                    },
+                    if let ExprKind::Block(_, None) = right.kind =>
+                {
+                    this.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                        if_span: lo,
+                        missing_then_block_sub:
+                            errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(
+                                cond_span.shrink_to_lo().to(*binop_span),
+                            ),
+                        let_else_sub: None,
+                    });
+                    std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
+                }
                 ExprKind::Block(_, None) => {
-                    this.error_missing_if_cond(lo, cond_span).emit();
+                    this.dcx().emit_err(errors::IfExpressionMissingCondition {
+                        if_span: lo.with_neighbor(cond.span).shrink_to_hi(),
+                        block_span: self.sess.source_map().start_point(cond_span),
+                    });
                     std::mem::replace(&mut cond, this.mk_expr_err(cond_span.shrink_to_hi()))
                 }
                 _ => {
@@ -2278,98 +2515,104 @@ impl<'a> Parser<'a> {
             if let Some(block) = recover_block_from_condition(self) {
                 block
             } else {
-                self.error_missing_if_then_block(lo, cond_span, false).emit();
+                let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
+                    .then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
+
+                self.dcx().emit_err(errors::IfExpressionMissingThenBlock {
+                    if_span: lo,
+                    missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
+                        cond_span.shrink_to_hi(),
+                    ),
+                    let_else_sub,
+                });
                 self.mk_block_err(cond_span.shrink_to_hi())
             }
         } else {
-            let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+            let attrs = self.parse_outer_attributes()?; // For recovery.
+            let maybe_fatarrow = self.token.clone();
             let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
                 self.parse_block()?
             } else {
                 if let Some(block) = recover_block_from_condition(self) {
                     block
                 } else {
+                    self.error_on_extra_if(&cond)?;
                     // Parse block, which will always fail, but we can add a nice note to the error
                     self.parse_block().map_err(|mut err| {
-                        err.span_note(
-                            cond_span,
-                            "the `if` expression is missing a block after this condition",
-                        );
+                        if self.prev_token == token::Semi
+                            && self.token == token::AndAnd
+                            && let maybe_let = self.look_ahead(1, |t| t.clone())
+                            && maybe_let.is_keyword(kw::Let)
+                        {
+                            err.span_suggestion(
+                                self.prev_token.span,
+                                "consider removing this semicolon to parse the `let` as part of the same chain",
+                                "",
+                                Applicability::MachineApplicable,
+                            ).span_note(
+                                self.token.span.to(maybe_let.span),
+                                "you likely meant to continue parsing the let-chain starting here",
+                            );
+                        } else {
+                            // Look for usages of '=>' where '>=' might be intended
+                            if maybe_fatarrow.kind == token::FatArrow {
+                                err.span_suggestion(
+                                    maybe_fatarrow.span,
+                                    "you might have meant to write a \"greater than or equal to\" comparison",
+                                    ">=",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                            err.span_note(
+                                cond_span,
+                                "the `if` expression is missing a block after this condition",
+                            );
+                        }
                         err
                     })?
                 }
             };
-            self.error_on_if_block_attrs(lo, false, block.span, &attrs);
+            self.error_on_if_block_attrs(lo, false, block.span, attrs);
             block
         };
-        let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
-        Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els), attrs))
-    }
-
-    fn error_missing_if_then_block(
-        &self,
-        if_span: Span,
-        cond_span: Span,
-        is_unfinished: bool,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        let mut err = self.struct_span_err(
-            if_span,
-            "this `if` expression is missing a block after the condition",
-        );
-        if is_unfinished {
-            err.span_help(cond_span, "this binary operation is possibly unfinished");
-        } else {
-            err.span_help(cond_span.shrink_to_hi(), "add a block here");
-        }
-        err
-    }
-
-    fn error_missing_if_cond(
-        &self,
-        lo: Span,
-        span: Span,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        let next_span = self.sess.source_map().next_point(lo);
-        let mut err = self.struct_span_err(next_span, "missing condition for `if` expression");
-        err.span_label(next_span, "expected condition here");
-        err.span_label(
-            self.sess.source_map().start_point(span),
-            "if this block is the condition of the `if` expression, then it must be followed by another block"
-        );
-        err
+        let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None };
+        Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
     }
 
     /// Parses the condition of a `if` or `while` expression.
-    fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
-        self.with_let_management(true, |local_self| {
-            local_self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)
-        })
-    }
+    fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
+        let mut cond =
+            self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
 
-    // Checks if `let` is in an invalid position like `let x = let y = 1;` or
-    // if the current `let` is in a let_chains context but nested in another
-    // expression like `if let Some(_) = _opt && [1, 2, 3][let _ = ()] = 1`.
-    //
-    // This method expects that the current token is `let`.
-    fn manage_let_chains_context(&mut self) {
-        debug_assert!(matches!(self.token.kind, TokenKind::Ident(kw::Let, _)));
-        let is_in_a_let_chains_context_but_nested_in_other_expr = self.let_expr_allowed
-            && !matches!(
-                self.prev_token.kind,
-                TokenKind::AndAnd
-                    | TokenKind::CloseDelim(Delimiter::Brace)
-                    | TokenKind::Ident(kw::If, _)
-                    | TokenKind::Ident(kw::While, _)
-            );
-        if !self.let_expr_allowed || is_in_a_let_chains_context_but_nested_in_other_expr {
-            self.struct_span_err(self.token.span, "expected expression, found `let` statement")
-                .emit();
+        CondChecker::new(self).visit_expr(&mut cond);
+
+        if let ExprKind::Let(_, _, _, None) = cond.kind {
+            // Remove the last feature gating of a `let` expression since it's stable.
+            self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
         }
+
+        Ok(cond)
     }
 
     /// Parses a `let $pat = $expr` pseudo-expression.
-    /// The `let` token has already been eaten.
-    fn parse_let_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_let(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
+        let is_recovered = if !restrictions.contains(Restrictions::ALLOW_LET) {
+            let err = errors::ExpectedExpressionFoundLet {
+                span: self.token.span,
+                reason: ForbiddenLetReason::OtherForbidden,
+                missing_let: None,
+                comparison: None,
+            };
+            if self.prev_token.kind == token::BinOp(token::Or) {
+                // This was part of a closure, the that part of the parser recover.
+                return Err(self.dcx().create_err(err));
+            } else {
+                Some(self.dcx().emit_err(err))
+            }
+        } else {
+            None
+        };
+        self.bump(); // Eat `let` token
         let lo = self.prev_token.span;
         let pat = self.parse_pat_allow_top_alt(
             None,
@@ -2377,20 +2620,26 @@ impl<'a> Parser<'a> {
             RecoverColon::Yes,
             CommaRecoveryMode::LikelyTuple,
         )?;
-        self.expect(&token::Eq)?;
-        let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
-            this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
-        })?;
+        if self.token == token::EqEq {
+            self.dcx().emit_err(errors::ExpectedEqForLetExpr {
+                span: self.token.span,
+                sugg_span: self.token.span,
+            });
+            self.bump();
+        } else {
+            self.expect(&token::Eq)?;
+        }
+        let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())?;
         let span = lo.to(expr.span);
-        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span), attrs))
+        Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, is_recovered)))
     }
 
     /// Parses an `else { ... }` expression (`else` token already eaten).
-    fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
         let else_span = self.prev_token.span; // `else`
-        let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+        let attrs = self.parse_outer_attributes()?; // For recovery.
         let expr = if self.eat_keyword(kw::If) {
-            self.parse_if_expr(AttrVec::new())?
+            ensure_sufficient_stack(|| self.parse_expr_if())?
         } else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
             self.parse_simple_block()?
         } else {
@@ -2404,16 +2653,13 @@ impl<'a> Parser<'a> {
                     if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
                         && classify::expr_requires_semi_to_be_stmt(&cond) =>
                 {
-                    self.struct_span_err(first_tok_span, format!("expected `{{`, found {first_tok}"))
-                        .span_label(else_span, "expected an `if` or a block after this `else`")
-                        .span_suggestion(
-                            cond.span.shrink_to_lo(),
-                            "add an `if` if this is the condition of a chained `else if` statement",
-                            "if ",
-                            Applicability::MaybeIncorrect,
-                        )
-                        .emit();
-                    self.parse_if_after_cond(AttrVec::new(), cond.span.shrink_to_lo(), cond)?
+                    self.dcx().emit_err(errors::ExpectedElseBlock {
+                        first_tok_span,
+                        first_tok,
+                        else_span,
+                        condition_start: cond.span.shrink_to_lo(),
+                    });
+                    self.parse_if_after_cond(cond.span.shrink_to_lo(), cond)?
                 }
                 Err(e) => {
                     e.cancel();
@@ -2426,7 +2672,7 @@ impl<'a> Parser<'a> {
                 },
             }
         };
-        self.error_on_if_block_attrs(else_span, true, expr.span, &attrs);
+        self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
         Ok(expr)
     }
 
@@ -2435,106 +2681,198 @@ impl<'a> Parser<'a> {
         ctx_span: Span,
         is_ctx_else: bool,
         branch_span: Span,
-        attrs: &[ast::Attribute],
+        attrs: AttrWrapper,
     ) {
-        let (span, last) = match attrs {
+        if attrs.is_empty() {
+            return;
+        }
+
+        let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
+        let (attributes, last) = match attrs {
             [] => return,
             [x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
         };
         let ctx = if is_ctx_else { "else" } else { "if" };
-        self.struct_span_err(last, "outer attributes are not allowed on `if` and `else` branches")
-            .span_label(branch_span, "the attributes are attached to this branch")
-            .span_label(ctx_span, format!("the branch belongs to this `{ctx}`"))
-            .span_suggestion(span, "remove the attributes", "", Applicability::MachineApplicable)
-            .emit();
+        self.dcx().emit_err(errors::OuterAttributeNotAllowedOnIfElse {
+            last,
+            branch_span,
+            ctx_span,
+            ctx: ctx.to_string(),
+            attributes,
+        });
     }
 
-    /// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
-    fn parse_for_expr(
-        &mut self,
-        opt_label: Option<Label>,
-        lo: Span,
-        mut attrs: AttrVec,
-    ) -> PResult<'a, P<Expr>> {
-        // Record whether we are about to parse `for (`.
-        // This is used below for recovery in case of `for ( $stuff ) $block`
-        // in which case we will suggest `for $stuff $block`.
-        let begin_paren = match self.token.kind {
-            token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
-            _ => None,
-        };
+    fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
+        if let ExprKind::Binary(Spanned { span: binop_span, node: binop }, _, right) = &cond.kind
+            && let BinOpKind::And = binop
+            && let ExprKind::If(cond, ..) = &right.kind
+        {
+            Err(self.dcx().create_err(errors::UnexpectedIfWithIf(
+                binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()),
+            )))
+        } else {
+            Ok(())
+        }
+    }
 
-        let pat = self.parse_pat_allow_top_alt(
-            None,
-            RecoverComma::Yes,
-            RecoverColon::Yes,
-            CommaRecoveryMode::LikelyTuple,
-        )?;
+    fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
+        let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
+            // Record whether we are about to parse `for (`.
+            // This is used below for recovery in case of `for ( $stuff ) $block`
+            // in which case we will suggest `for $stuff $block`.
+            let start_span = self.token.span;
+            let left = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+            Some((start_span, left))
+        } else {
+            None
+        };
+        // Try to parse the pattern `for ($PAT) in $EXPR`.
+        let pat = match (
+            self.parse_pat_allow_top_alt(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::LikelyTuple,
+            ),
+            begin_paren,
+        ) {
+            (Ok(pat), _) => pat, // Happy path.
+            (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
+                // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
+                // happen right before the return of this method.
+                let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None) {
+                    Ok(expr) => expr,
+                    Err(expr_err) => {
+                        // We don't know what followed the `in`, so cancel and bubble up the
+                        // original error.
+                        expr_err.cancel();
+                        return Err(err);
+                    }
+                };
+                return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
+                    // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
+                    // parser state and emit a targetted suggestion.
+                    let span = vec![start_span, self.token.span];
+                    let right = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
+                    self.bump(); // )
+                    err.cancel();
+                    self.dcx().emit_err(errors::ParenthesesInForHead {
+                        span,
+                        // With e.g. `for (x) in y)` this would replace `(x) in y)`
+                        // with `x) in y)` which is syntactically invalid.
+                        // However, this is prevented before we get here.
+                        sugg: errors::ParenthesesInForHeadSugg { left, right },
+                    });
+                    Ok((self.mk_pat(start_span.to(right), ast::PatKind::Wild), expr))
+                } else {
+                    Err(err) // Some other error, bubble up.
+                };
+            }
+            (Err(err), _) => return Err(err), // Some other error, bubble up.
+        };
         if !self.eat_keyword(kw::In) {
             self.error_missing_in_for_loop();
         }
         self.check_for_for_in_in_typo(self.prev_token.span);
         let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
+        Ok((pat, expr))
+    }
+
+    /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
+    fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let is_await =
+            self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await);
 
-        let pat = self.recover_parens_around_for_head(pat, begin_paren);
+        if is_await {
+            self.sess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
+        }
+
+        let kind = if is_await { ForLoopKind::ForAwait } else { ForLoopKind::For };
+
+        let (pat, expr) = self.parse_for_head()?;
+        // Recover from missing expression in `for` loop
+        if matches!(expr.kind, ExprKind::Block(..))
+            && !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
+            && self.may_recover()
+        {
+            self.dcx()
+                .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
+            let err_expr = self.mk_expr(expr.span, ExprKind::Err);
+            let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
+            return Ok(self.mk_expr(
+                lo.to(self.prev_token.span),
+                ExprKind::ForLoop { pat, iter: err_expr, body: block, label: opt_label, kind },
+            ));
+        }
 
-        let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
-        attrs.extend(iattrs);
+        let (attrs, loop_block) = self.parse_inner_attrs_and_block()?;
 
-        let kind = ExprKind::ForLoop(pat, expr, loop_block, opt_label);
-        Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
+        let kind = ExprKind::ForLoop { pat, iter: expr, body: loop_block, label: opt_label, kind };
+
+        self.recover_loop_else("for", lo)?;
+
+        Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
+    }
+
+    /// Recovers from an `else` clause after a loop (`for...else`, `while...else`)
+    fn recover_loop_else(&mut self, loop_kind: &'static str, loop_kw: Span) -> PResult<'a, ()> {
+        if self.token.is_keyword(kw::Else) && self.may_recover() {
+            let else_span = self.token.span;
+            self.bump();
+            let else_clause = self.parse_expr_else()?;
+            self.dcx().emit_err(errors::LoopElseNotSupported {
+                span: else_span.to(else_clause.span),
+                loop_kind,
+                loop_kw,
+            });
+        }
+        Ok(())
     }
 
     fn error_missing_in_for_loop(&mut self) {
-        let (span, msg, sugg) = if self.token.is_ident_named(sym::of) {
+        let (span, sub): (_, fn(_) -> _) = if self.token.is_ident_named(sym::of) {
             // Possibly using JS syntax (#75311).
             let span = self.token.span;
             self.bump();
-            (span, "try using `in` here instead", "in")
+            (span, errors::MissingInInForLoopSub::InNotOf)
         } else {
-            (self.prev_token.span.between(self.token.span), "try adding `in` here", " in ")
+            (self.prev_token.span.between(self.token.span), errors::MissingInInForLoopSub::AddIn)
         };
-        self.struct_span_err(span, "missing `in` in `for` loop")
-            .span_suggestion_short(
-                span,
-                msg,
-                sugg,
-                // Has been misleading, at least in the past (closed Issue #48492).
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+
+        self.dcx().emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
     }
 
     /// Parses a `while` or `while let` expression (`while` token already eaten).
-    fn parse_while_expr(
-        &mut self,
-        opt_label: Option<Label>,
-        lo: Span,
-        mut attrs: AttrVec,
-    ) -> PResult<'a, P<Expr>> {
-        let cond = self.parse_cond_expr().map_err(|mut err| {
+    fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let cond = self.parse_expr_cond().map_err(|mut err| {
             err.span_label(lo, "while parsing the condition of this `while` expression");
             err
         })?;
-        let (iattrs, body) = self.parse_inner_attrs_and_block().map_err(|mut err| {
+        let (attrs, body) = self.parse_inner_attrs_and_block().map_err(|mut err| {
             err.span_label(lo, "while parsing the body of this `while` expression");
             err.span_label(cond.span, "this `while` condition successfully parsed");
             err
         })?;
-        attrs.extend(iattrs);
-        Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::While(cond, body, opt_label), attrs))
+
+        self.recover_loop_else("while", lo)?;
+
+        Ok(self.mk_expr_with_attrs(
+            lo.to(self.prev_token.span),
+            ExprKind::While(cond, body, opt_label),
+            attrs,
+        ))
     }
 
     /// Parses `loop { ... }` (`loop` token already eaten).
-    fn parse_loop_expr(
-        &mut self,
-        opt_label: Option<Label>,
-        lo: Span,
-        mut attrs: AttrVec,
-    ) -> PResult<'a, P<Expr>> {
-        let (iattrs, body) = self.parse_inner_attrs_and_block()?;
-        attrs.extend(iattrs);
-        Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::Loop(body, opt_label), attrs))
+    fn parse_expr_loop(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let loop_span = self.prev_token.span;
+        let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        self.recover_loop_else("loop", lo)?;
+        Ok(self.mk_expr_with_attrs(
+            lo.to(self.prev_token.span),
+            ExprKind::Loop(body, opt_label, loop_span),
+            attrs,
+        ))
     }
 
     pub(crate) fn eat_label(&mut self) -> Option<Label> {
@@ -2545,7 +2883,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `match ... { ... }` expression (`match` token already eaten).
-    fn parse_match_expr(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
         let match_span = self.prev_token.span;
         let lo = self.prev_token.span;
         let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
@@ -2565,27 +2903,41 @@ impl<'a> Parser<'a> {
                 return Err(e);
             }
         }
-        attrs.extend(self.parse_inner_attributes()?);
+        let attrs = self.parse_inner_attributes()?;
 
-        let mut arms: Vec<Arm> = Vec::new();
+        let mut arms = ThinVec::new();
         while self.token != token::CloseDelim(Delimiter::Brace) {
             match self.parse_arm() {
                 Ok(arm) => arms.push(arm),
-                Err(mut e) => {
+                Err(e) => {
                     // Recover by skipping to the end of the block.
-                    e.emit();
+                    let guar = e.emit();
                     self.recover_stmt();
                     let span = lo.to(self.token.span);
                     if self.token == token::CloseDelim(Delimiter::Brace) {
                         self.bump();
                     }
-                    return Ok(self.mk_expr(span, ExprKind::Match(scrutinee, arms), attrs));
+                    // Always push at least one arm to make the match non-empty
+                    arms.push(Arm {
+                        attrs: Default::default(),
+                        pat: self.mk_pat(span, ast::PatKind::Err(guar)),
+                        guard: None,
+                        body: Some(self.mk_expr_err(span)),
+                        span,
+                        id: DUMMY_NODE_ID,
+                        is_placeholder: false,
+                    });
+                    return Ok(self.mk_expr_with_attrs(
+                        span,
+                        ExprKind::Match(scrutinee, arms),
+                        attrs,
+                    ));
                 }
             }
         }
         let hi = self.token.span;
         self.bump();
-        Ok(self.mk_expr(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs))
+        Ok(self.mk_expr_with_attrs(lo.to(hi), ExprKind::Match(scrutinee, arms), attrs))
     }
 
     /// Attempt to recover from match arm body with statements and no surrounding braces.
@@ -2602,39 +2954,22 @@ impl<'a> Parser<'a> {
         self.bump(); // `;`
         let mut stmts =
             vec![self.mk_stmt(first_expr.span, ast::StmtKind::Expr(first_expr.clone()))];
-        let err = |this: &mut Parser<'_>, stmts: Vec<ast::Stmt>| {
+        let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
             let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
-            let mut err = this.struct_span_err(span, "`match` arm body without braces");
-            let (these, s, are) =
-                if stmts.len() > 1 { ("these", "s", "are") } else { ("this", "", "is") };
-            err.span_label(
-                span,
-                &format!(
-                    "{these} statement{s} {are} not surrounded by a body",
-                    these = these,
-                    s = s,
-                    are = are
-                ),
-            );
-            err.span_label(arrow_span, "while parsing the `match` arm starting here");
-            if stmts.len() > 1 {
-                err.multipart_suggestion(
-                    &format!("surround the statement{s} with a body"),
-                    vec![
-                        (span.shrink_to_lo(), "{ ".to_string()),
-                        (span.shrink_to_hi(), " }".to_string()),
-                    ],
-                    Applicability::MachineApplicable,
-                );
-            } else {
-                err.span_suggestion(
-                    semi_sp,
-                    "use a comma to end a `match` arm expression",
-                    ",",
-                    Applicability::MachineApplicable,
-                );
-            }
-            err.emit();
+
+            this.dcx().emit_err(errors::MatchArmBodyWithoutBraces {
+                statements: span,
+                arrow: arrow_span,
+                num_statements: stmts.len(),
+                sub: if stmts.len() > 1 {
+                    errors::MatchArmBodyWithoutBracesSugg::AddBraces {
+                        left: span.shrink_to_lo(),
+                        right: span.shrink_to_hi(),
+                    }
+                } else {
+                    errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
+                },
+            });
             this.mk_expr_err(span)
         };
         // We might have either a `,` -> `;` typo, or a block without braces. We need
@@ -2649,7 +2984,7 @@ impl<'a> Parser<'a> {
                 return None;
             }
             let pre_pat_snapshot = self.create_snapshot_for_diagnostic();
-            match self.parse_pat_no_top_alt(None) {
+            match self.parse_pat_no_top_alt(None, None) {
                 Ok(_pat) => {
                     if self.token.kind == token::FatArrow {
                         // Reached arm end.
@@ -2685,156 +3020,173 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
-        // Used to check the `let_chains` and `if_let_guard` features mostly by scaning
-        // `&&` tokens.
-        fn check_let_expr(expr: &Expr) -> bool {
-            match expr.kind {
-                ExprKind::Binary(_, ref lhs, ref rhs) => check_let_expr(lhs) || check_let_expr(rhs),
-                ExprKind::Let(..) => true,
-                _ => false,
-            }
-        }
         let attrs = self.parse_outer_attributes()?;
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
-            let pat = this.parse_pat_allow_top_alt(
-                None,
-                RecoverComma::Yes,
-                RecoverColon::Yes,
-                CommaRecoveryMode::EitherTupleOrPipe,
-            )?;
-            let guard = if this.eat_keyword(kw::If) {
-                let if_span = this.prev_token.span;
-                let cond = this.with_let_management(true, |local_this| local_this.parse_expr())?;
-                let has_let_expr = check_let_expr(&cond);
-                if has_let_expr {
-                    let span = if_span.to(cond.span);
-                    this.sess.gated_spans.gate(sym::if_let_guard, span);
-                }
-                Some(cond)
+            let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
+
+            let span_before_body = this.prev_token.span;
+            let arm_body;
+            let is_fat_arrow = this.check(&token::FatArrow);
+            let is_almost_fat_arrow = TokenKind::FatArrow
+                .similar_tokens()
+                .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind));
+
+            // this avoids the compiler saying that a `,` or `}` was expected even though
+            // the pattern isn't a never pattern (and thus an arm body is required)
+            let armless = (!is_fat_arrow && !is_almost_fat_arrow && pat.could_be_never_pattern())
+                || matches!(this.token.kind, token::Comma | token::CloseDelim(Delimiter::Brace));
+
+            let mut result = if armless {
+                // A pattern without a body, allowed for never patterns.
+                arm_body = None;
+                this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map(
+                    |x| {
+                        // Don't gate twice
+                        if !pat.contains_never_pattern() {
+                            this.sess.gated_spans.gate(sym::never_patterns, pat.span);
+                        }
+                        x
+                    },
+                )
             } else {
-                None
-            };
-            let arrow_span = this.token.span;
-            if let Err(mut err) = this.expect(&token::FatArrow) {
-                // We might have a `=>` -> `=` or `->` typo (issue #89396).
-                if TokenKind::FatArrow
-                    .similar_tokens()
-                    .map_or(false, |similar_tokens| similar_tokens.contains(&this.token.kind))
-                {
-                    err.span_suggestion(
-                        this.token.span,
-                        "try using a fat arrow here",
-                        "=>",
-                        Applicability::MaybeIncorrect,
-                    );
-                    err.emit();
-                    this.bump();
-                } else {
-                    return Err(err);
+                if let Err(mut err) = this.expect(&token::FatArrow) {
+                    // We might have a `=>` -> `=` or `->` typo (issue #89396).
+                    if is_almost_fat_arrow {
+                        err.span_suggestion(
+                            this.token.span,
+                            "use a fat arrow to start a match arm",
+                            "=>",
+                            Applicability::MachineApplicable,
+                        );
+                        if matches!(
+                            (&this.prev_token.kind, &this.token.kind),
+                            (token::DotDotEq, token::Gt)
+                        ) {
+                            // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+                            // so we suppress the error here
+                            err.delay_as_bug();
+                        } else {
+                            err.emit();
+                        }
+                        this.bump();
+                    } else {
+                        return Err(err);
+                    }
                 }
-            }
-            let arm_start_span = this.token.span;
+                let arrow_span = this.prev_token.span;
+                let arm_start_span = this.token.span;
 
-            let expr = this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
-                err.span_label(arrow_span, "while parsing the `match` arm starting here");
-                err
-            })?;
+                let expr =
+                    this.parse_expr_res(Restrictions::STMT_EXPR, None).map_err(|mut err| {
+                        err.span_label(arrow_span, "while parsing the `match` arm starting here");
+                        err
+                    })?;
 
-            let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
-                && this.token != token::CloseDelim(Delimiter::Brace);
-
-            let hi = this.prev_token.span;
-
-            if require_comma {
-                let sm = this.sess.source_map();
-                if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
-                    let span = body.span;
-                    return Ok((
-                        ast::Arm {
-                            attrs: attrs.into(),
-                            pat,
-                            guard,
-                            body,
-                            span,
-                            id: DUMMY_NODE_ID,
-                            is_placeholder: false,
-                        },
-                        TrailingToken::None,
-                    ));
-                }
-                this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
-                    .or_else(|mut err| {
-                        if this.token == token::FatArrow {
-                            if let Ok(expr_lines) = sm.span_to_lines(expr.span)
-                            && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
-                            && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
-                            && expr_lines.lines.len() == 2
-                            {
-                                // We check whether there's any trailing code in the parse span,
-                                // if there isn't, we very likely have the following:
-                                //
-                                // X |     &Y => "y"
-                                //   |        --    - missing comma
-                                //   |        |
-                                //   |        arrow_span
-                                // X |     &X => "x"
-                                //   |      - ^^ self.token.span
-                                //   |      |
-                                //   |      parsed until here as `"y" & X`
-                                err.span_suggestion_short(
-                                    arm_start_span.shrink_to_hi(),
-                                    "missing a comma here to end this `match` arm",
-                                    ",",
-                                    Applicability::MachineApplicable,
+                let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
+                    && this.token != token::CloseDelim(Delimiter::Brace);
+
+                if !require_comma {
+                    arm_body = Some(expr);
+                    this.eat(&token::Comma);
+                    Ok(false)
+                } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) {
+                    arm_body = Some(body);
+                    Ok(true)
+                } else {
+                    let expr_span = expr.span;
+                    arm_body = Some(expr);
+                    this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
+                        .map_err(|mut err| {
+                            if this.token == token::FatArrow {
+                                let sm = this.sess.source_map();
+                                if let Ok(expr_lines) = sm.span_to_lines(expr_span)
+                                    && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
+                                    && arm_start_lines.lines[0].end_col
+                                        == expr_lines.lines[0].end_col
+                                    && expr_lines.lines.len() == 2
+                                {
+                                    // We check whether there's any trailing code in the parse span,
+                                    // if there isn't, we very likely have the following:
+                                    //
+                                    // X |     &Y => "y"
+                                    //   |        --    - missing comma
+                                    //   |        |
+                                    //   |        arrow_span
+                                    // X |     &X => "x"
+                                    //   |      - ^^ self.token.span
+                                    //   |      |
+                                    //   |      parsed until here as `"y" & X`
+                                    err.span_suggestion_short(
+                                        arm_start_span.shrink_to_hi(),
+                                        "missing a comma here to end this `match` arm",
+                                        ",",
+                                        Applicability::MachineApplicable,
+                                    );
+                                }
+                            } else {
+                                err.span_label(
+                                    arrow_span,
+                                    "while parsing the `match` arm starting here",
                                 );
-                                return Err(err);
-                            }
-                        } else {
-                            // FIXME(compiler-errors): We could also recover `; PAT =>` here
-
-                            // Try to parse a following `PAT =>`, if successful
-                            // then we should recover.
-                            let mut snapshot = this.create_snapshot_for_diagnostic();
-                            let pattern_follows = snapshot
-                                .parse_pat_allow_top_alt(
-                                    None,
-                                    RecoverComma::Yes,
-                                    RecoverColon::Yes,
-                                    CommaRecoveryMode::EitherTupleOrPipe,
-                                )
-                                .map_err(|err| err.cancel())
-                                .is_ok();
-                            if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
-                                err.cancel();
-                                this.struct_span_err(
-                                    hi.shrink_to_hi(),
-                                    "expected `,` following `match` arm",
-                                )
-                                .span_suggestion(
-                                    hi.shrink_to_hi(),
-                                    "missing a comma here to end this `match` arm",
-                                    ",",
-                                    Applicability::MachineApplicable,
-                                )
-                                .emit();
-                                return Ok(true);
                             }
-                        }
-                        err.span_label(arrow_span, "while parsing the `match` arm starting here");
-                        Err(err)
-                    })?;
-            } else {
-                this.eat(&token::Comma);
+                            err
+                        })
+                }
+            };
+
+            let hi_span = arm_body.as_ref().map_or(span_before_body, |body| body.span);
+            let arm_span = lo.to(hi_span);
+
+            // We want to recover:
+            // X |     Some(_) => foo()
+            //   |                     - missing comma
+            // X |     None => "x"
+            //   |     ^^^^ self.token.span
+            // as well as:
+            // X |     Some(!)
+            //   |            - missing comma
+            // X |     None => "x"
+            //   |     ^^^^ self.token.span
+            // But we musn't recover
+            // X |     pat[0] => {}
+            //   |        ^ self.token.span
+            let recover_missing_comma = arm_body.is_some() || pat.could_be_never_pattern();
+            if recover_missing_comma {
+                result = result.or_else(|err| {
+                    // FIXME(compiler-errors): We could also recover `; PAT =>` here
+
+                    // Try to parse a following `PAT =>`, if successful
+                    // then we should recover.
+                    let mut snapshot = this.create_snapshot_for_diagnostic();
+                    let pattern_follows = snapshot
+                        .parse_pat_allow_top_alt(
+                            None,
+                            RecoverComma::Yes,
+                            RecoverColon::Yes,
+                            CommaRecoveryMode::EitherTupleOrPipe,
+                        )
+                        .map_err(|err| err.cancel())
+                        .is_ok();
+                    if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
+                        err.cancel();
+                        this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
+                            span: arm_span.shrink_to_hi(),
+                        });
+                        return Ok(true);
+                    }
+                    Err(err)
+                });
             }
+            result?;
 
             Ok((
                 ast::Arm {
-                    attrs: attrs.into(),
+                    attrs,
                     pat,
                     guard,
-                    body: expr,
-                    span: lo.to(hi),
+                    body: arm_body,
+                    span: arm_span,
                     id: DUMMY_NODE_ID,
                     is_placeholder: false,
                 },
@@ -2843,29 +3195,138 @@ impl<'a> Parser<'a> {
         })
     }
 
+    fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> {
+        // Used to check the `let_chains` and `if_let_guard` features mostly by scanning
+        // `&&` tokens.
+        fn check_let_expr(expr: &Expr) -> (bool, bool) {
+            match &expr.kind {
+                ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
+                    let lhs_rslt = check_let_expr(lhs);
+                    let rhs_rslt = check_let_expr(rhs);
+                    (lhs_rslt.0 || rhs_rslt.0, false)
+                }
+                ExprKind::Let(..) => (true, true),
+                _ => (false, true),
+            }
+        }
+        if !self.eat_keyword(kw::If) {
+            // No match arm guard present.
+            return Ok(None);
+        }
+
+        let if_span = self.prev_token.span;
+        let mut cond = self.parse_match_guard_condition()?;
+
+        CondChecker::new(self).visit_expr(&mut cond);
+
+        let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
+        if has_let_expr {
+            if does_not_have_bin_op {
+                // Remove the last feature gating of a `let` expression since it's stable.
+                self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
+            }
+            let span = if_span.to(cond.span);
+            self.sess.gated_spans.gate(sym::if_let_guard, span);
+        }
+        Ok(Some(cond))
+    }
+
+    fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
+        if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
+            // Detect and recover from `($pat if $cond) => $arm`.
+            let left = self.token.span;
+            match self.parse_pat_allow_top_alt(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            ) {
+                Ok(pat) => Ok((pat, self.parse_match_arm_guard()?)),
+                Err(err)
+                    if let prev_sp = self.prev_token.span
+                        && let true = self.eat_keyword(kw::If) =>
+                {
+                    // We know for certain we've found `($pat if` so far.
+                    let mut cond = match self.parse_match_guard_condition() {
+                        Ok(cond) => cond,
+                        Err(cond_err) => {
+                            cond_err.cancel();
+                            return Err(err);
+                        }
+                    };
+                    err.cancel();
+                    CondChecker::new(self).visit_expr(&mut cond);
+                    self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
+                    self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                    let right = self.prev_token.span;
+                    self.dcx().emit_err(errors::ParenthesesInMatchPat {
+                        span: vec![left, right],
+                        sugg: errors::ParenthesesInMatchPatSugg { left, right },
+                    });
+                    Ok((self.mk_pat(left.to(prev_sp), ast::PatKind::Wild), Some(cond)))
+                }
+                Err(err) => Err(err),
+            }
+        } else {
+            // Regular parser flow:
+            let pat = self.parse_pat_allow_top_alt(
+                None,
+                RecoverComma::Yes,
+                RecoverColon::Yes,
+                CommaRecoveryMode::EitherTupleOrPipe,
+            )?;
+            Ok((pat, self.parse_match_arm_guard()?))
+        }
+    }
+
+    fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
+        self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
+            |mut err| {
+                if self.prev_token == token::OpenDelim(Delimiter::Brace) {
+                    let sugg_sp = self.prev_token.span.shrink_to_lo();
+                    // Consume everything within the braces, let's avoid further parse
+                    // errors.
+                    self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
+                    let msg = "you might have meant to start a match arm after the match guard";
+                    if self.eat(&token::CloseDelim(Delimiter::Brace)) {
+                        let applicability = if self.token.kind != token::FatArrow {
+                            // We have high confidence that we indeed didn't have a struct
+                            // literal in the match guard, but rather we had some operation
+                            // that ended in a path, immediately followed by a block that was
+                            // meant to be the match arm.
+                            Applicability::MachineApplicable
+                        } else {
+                            Applicability::MaybeIncorrect
+                        };
+                        err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability);
+                    }
+                }
+                err
+            },
+        )
+    }
+
+    pub(crate) fn is_builtin(&self) -> bool {
+        self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound)
+    }
+
     /// Parses a `try {...}` expression (`try` token already eaten).
-    fn parse_try_block(&mut self, span_lo: Span, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
-        let (iattrs, body) = self.parse_inner_attrs_and_block()?;
-        attrs.extend(iattrs);
+    fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
+        let (attrs, body) = self.parse_inner_attrs_and_block()?;
         if self.eat_keyword(kw::Catch) {
-            let mut error = self.struct_span_err(
-                self.prev_token.span,
-                "keyword `catch` cannot follow a `try` block",
-            );
-            error.help("try using `match` on the result of the `try` block instead");
-            error.emit();
-            Err(error)
+            Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
         } else {
             let span = span_lo.to(body.span);
             self.sess.gated_spans.gate(sym::try_blocks, span);
-            Ok(self.mk_expr(span, ExprKind::TryBlock(body), attrs))
+            Ok(self.mk_expr_with_attrs(span, ExprKind::TryBlock(body), attrs))
         }
     }
 
     fn is_do_catch_block(&self) -> bool {
         self.token.is_keyword(kw::Do)
             && self.is_keyword_ahead(1, &[kw::Catch])
-            && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
+            && self
+                .look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
             && !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
 
@@ -2875,33 +3336,54 @@ impl<'a> Parser<'a> {
 
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try)
-            && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
-            && self.token.uninterpolated_span().rust_2018()
+            && self
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+            && self.token.uninterpolated_span().at_least_rust_2018()
     }
 
-    /// Parses an `async move? {...}` expression.
-    fn parse_async_block(&mut self, mut attrs: AttrVec) -> PResult<'a, P<Expr>> {
+    /// Parses an `async move? {...}` or `gen move? {...}` expression.
+    fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        self.expect_keyword(kw::Async)?;
+        let kind = if self.eat_keyword(kw::Async) {
+            if self.eat_keyword(kw::Gen) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
+        } else {
+            assert!(self.eat_keyword(kw::Gen));
+            GenBlockKind::Gen
+        };
+        match kind {
+            GenBlockKind::Async => {
+                // `async` blocks are stable
+            }
+            GenBlockKind::Gen | GenBlockKind::AsyncGen => {
+                self.sess.gated_spans.gate(sym::gen_blocks, lo.to(self.prev_token.span));
+            }
+        }
         let capture_clause = self.parse_capture_clause()?;
-        let (iattrs, body) = self.parse_inner_attrs_and_block()?;
-        attrs.extend(iattrs);
-        let kind = ExprKind::Async(capture_clause, DUMMY_NODE_ID, body);
-        Ok(self.mk_expr(lo.to(self.prev_token.span), kind, attrs))
+        let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        let kind = ExprKind::Gen(capture_clause, body, kind);
+        Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
     }
 
-    fn is_async_block(&self) -> bool {
-        self.token.is_keyword(kw::Async)
+    fn is_gen_block(&self, kw: Symbol, lookahead: usize) -> bool {
+        self.is_keyword_ahead(lookahead, &[kw])
             && ((
                 // `async move {`
-                self.is_keyword_ahead(1, &[kw::Move])
-                    && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))
+                self.is_keyword_ahead(lookahead + 1, &[kw::Move])
+                    && self.look_ahead(lookahead + 2, |t| {
+                        *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                    })
             ) || (
                 // `async {`
-                self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
+                self.look_ahead(lookahead + 1, |t| {
+                    *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block()
+                })
             ))
     }
 
+    pub(super) fn is_async_gen_block(&self) -> bool {
+        self.token.is_keyword(kw::Async) && self.is_gen_block(kw::Gen, 1)
+    }
+
     fn is_certainly_not_a_block(&self) -> bool {
         self.look_ahead(1, |t| t.is_ident())
             && (
@@ -2910,62 +3392,59 @@ impl<'a> Parser<'a> {
                     || self.look_ahead(2, |t| t == &token::Colon)
                         && (
                             // `{ ident: token, ` cannot start a block.
-                            self.look_ahead(4, |t| t == &token::Comma) ||
-                // `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
-                self.look_ahead(3, |t| !t.can_begin_type())
+                            self.look_ahead(4, |t| t == &token::Comma)
+                                // `{ ident: ` cannot start a block unless it's a type ascription
+                                // `ident: Type`.
+                                || self.look_ahead(3, |t| !t.can_begin_type())
                         )
             )
     }
 
     fn maybe_parse_struct_expr(
         &mut self,
-        qself: Option<&ast::QSelf>,
+        qself: &Option<P<ast::QSelf>>,
         path: &ast::Path,
-        attrs: &AttrVec,
     ) -> Option<PResult<'a, P<Expr>>> {
         let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
         if struct_allowed || self.is_certainly_not_a_block() {
             if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
                 return Some(Err(err));
             }
-            let expr = self.parse_struct_expr(qself.cloned(), path.clone(), attrs.clone(), true);
+            let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
             if let (Ok(expr), false) = (&expr, struct_allowed) {
                 // This is a struct literal, but we don't can't accept them here.
-                self.error_struct_lit_not_allowed_here(path.span, expr.span);
+                self.dcx().emit_err(errors::StructLiteralNotAllowedHere {
+                    span: expr.span,
+                    sub: errors::StructLiteralNotAllowedHereSugg {
+                        left: path.span.shrink_to_lo(),
+                        right: expr.span.shrink_to_hi(),
+                    },
+                });
             }
             return Some(expr);
         }
         None
     }
 
-    fn error_struct_lit_not_allowed_here(&self, lo: Span, sp: Span) {
-        self.struct_span_err(sp, "struct literals are not allowed here")
-            .multipart_suggestion(
-                "surround the struct literal with parentheses",
-                vec![(lo.shrink_to_lo(), "(".to_string()), (sp.shrink_to_hi(), ")".to_string())],
-                Applicability::MachineApplicable,
-            )
-            .emit();
-    }
-
     pub(super) fn parse_struct_fields(
         &mut self,
         pth: ast::Path,
         recover: bool,
         close_delim: Delimiter,
-    ) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
-        let mut fields = Vec::new();
+    ) -> PResult<'a, (ThinVec<ExprField>, ast::StructRest, bool)> {
+        let mut fields = ThinVec::new();
         let mut base = ast::StructRest::None;
         let mut recover_async = false;
+        let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD);
 
         let mut async_block_err = |e: &mut Diagnostic, span: Span| {
             recover_async = true;
-            e.span_label(span, "`async` blocks are only allowed in Rust 2018 or later");
-            e.help_use_latest_edition();
+            errors::AsyncBlockIn2015 { span }.add_to_diagnostic(e);
+            errors::HelpUseLatestEdition::new().add_to_diagnostic(e);
         };
 
         while self.token != token::CloseDelim(close_delim) {
-            if self.eat(&token::DotDot) {
+            if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
                 let exp_span = self.prev_token.span;
                 // We permit `.. }` on the left-hand side of a destructuring assignment.
                 if self.check(&token::CloseDelim(close_delim)) {
@@ -2974,7 +3453,7 @@ impl<'a> Parser<'a> {
                 }
                 match self.parse_expr() {
                     Ok(e) => base = ast::StructRest::Base(e),
-                    Err(mut e) if recover => {
+                    Err(e) if recover => {
                         e.emit();
                         self.recover_stmt();
                     }
@@ -2993,6 +3472,31 @@ impl<'a> Parser<'a> {
                     } else {
                         e.span_label(pth.span, "while parsing this struct");
                     }
+
+                    if let Some((ident, _)) = self.token.ident()
+                        && !self.token.is_reserved_ident()
+                        && self.look_ahead(1, |t| {
+                            AssocOp::from_token(t).is_some()
+                                || matches!(t.kind, token::OpenDelim(_))
+                                || t.kind == token::Dot
+                        })
+                    {
+                        // Looks like they tried to write a shorthand, complex expression.
+                        e.span_suggestion_verbose(
+                            self.token.span.shrink_to_lo(),
+                            "try naming a field",
+                            &format!("{ident}: ",),
+                            Applicability::MaybeIncorrect,
+                        );
+                    }
+                    if in_if_guard && close_delim == Delimiter::Brace {
+                        return Err(e);
+                    }
+
+                    if !recover {
+                        return Err(e);
+                    }
+
                     e.emit();
 
                     // If the next token is a comma, then try to parse
@@ -3004,11 +3508,12 @@ impl<'a> Parser<'a> {
                             break;
                         }
                     }
+
                     None
                 }
             };
 
-            let is_shorthand = parsed_field.as_ref().map_or(false, |f| f.is_shorthand);
+            let is_shorthand = parsed_field.as_ref().is_some_and(|f| f.is_shorthand);
             // A shorthand field can be turned into a full field with `:`.
             // We should point this out.
             self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon));
@@ -3033,19 +3538,6 @@ impl<'a> Parser<'a> {
                                 ",",
                                 Applicability::MachineApplicable,
                             );
-                        } else if is_shorthand
-                            && (AssocOp::from_token(&self.token).is_some()
-                                || matches!(&self.token.kind, token::OpenDelim(_))
-                                || self.token.kind == token::Dot)
-                        {
-                            // Looks like they tried to write a shorthand, complex expression.
-                            let ident = parsed_field.expect("is_shorthand implies Some").ident;
-                            e.span_suggestion(
-                                ident.span.shrink_to_lo(),
-                                "try naming a field",
-                                &format!("{ident}: "),
-                                Applicability::HasPlaceholders,
-                            );
                         }
                     }
                     if !recover {
@@ -3061,11 +3553,10 @@ impl<'a> Parser<'a> {
     }
 
     /// Precondition: already parsed the '{'.
-    pub(super) fn parse_struct_expr(
+    pub(super) fn parse_expr_struct(
         &mut self,
-        qself: Option<ast::QSelf>,
+        qself: Option<P<ast::QSelf>>,
         pth: ast::Path,
-        attrs: AttrVec,
         recover: bool,
     ) -> PResult<'a, P<Expr>> {
         let lo = pth.span;
@@ -3078,7 +3569,7 @@ impl<'a> Parser<'a> {
         } else {
             ExprKind::Struct(P(ast::StructExpr { qself, path: pth, fields, rest: base }))
         };
-        Ok(self.mk_expr(span, expr, attrs))
+        Ok(self.mk_expr(span, expr))
     }
 
     /// Use in case of error after field-looking code: `S { foo: () with a }`.
@@ -3106,34 +3597,71 @@ impl<'a> Parser<'a> {
         if self.token != token::Comma {
             return;
         }
-        self.struct_span_err(
-            span.to(self.prev_token.span),
-            "cannot use a comma after the base struct",
-        )
-        .span_suggestion_short(
-            self.token.span,
-            "remove this comma",
-            "",
-            Applicability::MachineApplicable,
-        )
-        .note("the base struct must always be the last field")
-        .emit();
+        self.dcx().emit_err(errors::CommaAfterBaseStruct {
+            span: span.to(self.prev_token.span),
+            comma: self.token.span,
+        });
         self.recover_stmt();
     }
 
+    fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
+        if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
+            && self.eat(&token::DotDotDot)
+        {
+            // recover from typo of `...`, suggest `..`
+            let span = self.prev_token.span;
+            self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
+            return true;
+        }
+        false
+    }
+
+    /// Converts an ident into 'label and emits an "expected a label, found an identifier" error.
+    fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
+        // Convert `label` -> `'label`,
+        // so that nameres doesn't complain about non-existing label
+        let label = format!("'{}", ident.name);
+        let ident = Ident { name: Symbol::intern(&label), span: ident.span };
+
+        self.dcx().emit_err(errors::ExpectedLabelFoundIdent {
+            span: ident.span,
+            start: ident.span.shrink_to_lo(),
+        });
+
+        Label { ident }
+    }
+
     /// Parses `ident (COLON expr)?`.
     fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
 
             // Check if a colon exists one ahead. This means we're parsing a fieldname.
             let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
+            // Proactively check whether parsing the field will be incorrect.
+            let is_wrong = this.token.is_ident()
+                && !this.token.is_reserved_ident()
+                && !this.look_ahead(1, |t| {
+                    t == &token::Colon
+                        || t == &token::Eq
+                        || t == &token::Comma
+                        || t == &token::CloseDelim(Delimiter::Brace)
+                        || t == &token::CloseDelim(Delimiter::Parenthesis)
+                });
+            if is_wrong {
+                return Err(this.dcx().create_err(errors::ExpectedStructField {
+                    span: this.look_ahead(1, |t| t.span),
+                    ident_span: this.token.span,
+                    token: this.look_ahead(1, |t| t.clone()),
+                }));
+            }
             let (ident, expr) = if is_shorthand {
                 // Mimic `x: x` for the `x` field shorthand.
                 let ident = this.parse_ident_common(false)?;
                 let path = ast::Path::from_ident(ident);
-                (ident, this.mk_expr(ident.span, ExprKind::Path(None, path), AttrVec::new()))
+                (ident, this.mk_expr(ident.span, ExprKind::Path(None, path)))
             } else {
                 let ident = this.parse_field_name()?;
                 this.error_on_eq_field_init(ident);
@@ -3147,7 +3675,7 @@ impl<'a> Parser<'a> {
                     span: lo.to(expr.span),
                     expr,
                     is_shorthand,
-                    attrs: attrs.into(),
+                    attrs,
                     id: DUMMY_NODE_ID,
                     is_placeholder: false,
                 },
@@ -3163,43 +3691,18 @@ impl<'a> Parser<'a> {
             return;
         }
 
-        self.struct_span_err(self.token.span, "expected `:`, found `=`")
-            .span_suggestion(
-                field_name.span.shrink_to_hi().to(self.token.span),
-                "replace equals symbol with a colon",
-                ":",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx().emit_err(errors::EqFieldInit {
+            span: self.token.span,
+            eq: field_name.span.shrink_to_hi().to(self.token.span),
+        });
     }
 
     fn err_dotdotdot_syntax(&self, span: Span) {
-        self.struct_span_err(span, "unexpected token: `...`")
-            .span_suggestion(
-                span,
-                "use `..` for an exclusive range",
-                "..",
-                Applicability::MaybeIncorrect,
-            )
-            .span_suggestion(
-                span,
-                "or `..=` for an inclusive range",
-                "..=",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+        self.dcx().emit_err(errors::DotDotDot { span });
     }
 
     fn err_larrow_operator(&self, span: Span) {
-        self.struct_span_err(span, "unexpected token: `<-`")
-            .span_suggestion(
-                span,
-                "if you meant to write a comparison against a negative value, add a \
-             space in between `<` and `-`",
-                "< -",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+        self.dcx().emit_err(errors::LeftArrowOperator { span });
     }
 
     fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
@@ -3213,7 +3716,7 @@ impl<'a> Parser<'a> {
         limits: RangeLimits,
     ) -> ExprKind {
         if end.is_none() && limits == RangeLimits::Closed {
-            self.inclusive_range_with_incorrect_end(self.prev_token.span);
+            self.inclusive_range_with_incorrect_end();
             ExprKind::Err
         } else {
             ExprKind::Range(start, end, limits)
@@ -3228,27 +3731,31 @@ impl<'a> Parser<'a> {
         ExprKind::Binary(binop, lhs, rhs)
     }
 
-    fn mk_index(&self, expr: P<Expr>, idx: P<Expr>) -> ExprKind {
-        ExprKind::Index(expr, idx)
+    fn mk_index(&self, expr: P<Expr>, idx: P<Expr>, brackets_span: Span) -> ExprKind {
+        ExprKind::Index(expr, idx, brackets_span)
     }
 
-    fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind {
+    fn mk_call(&self, f: P<Expr>, args: ThinVec<P<Expr>>) -> ExprKind {
         ExprKind::Call(f, args)
     }
 
     fn mk_await_expr(&mut self, self_arg: P<Expr>, lo: Span) -> P<Expr> {
         let span = lo.to(self.prev_token.span);
-        let await_expr = self.mk_expr(span, ExprKind::Await(self_arg), AttrVec::new());
+        let await_expr = self.mk_expr(span, ExprKind::Await(self_arg, self.prev_token.span));
         self.recover_from_await_method_call();
         await_expr
     }
 
-    pub(crate) fn mk_expr(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
+    pub(crate) fn mk_expr_with_attrs(&self, span: Span, kind: ExprKind, attrs: AttrVec) -> P<Expr> {
         P(Expr { kind, span, attrs, id: DUMMY_NODE_ID, tokens: None })
     }
 
+    pub(crate) fn mk_expr(&self, span: Span, kind: ExprKind) -> P<Expr> {
+        self.mk_expr_with_attrs(span, kind, AttrVec::new())
+    }
+
     pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> {
-        self.mk_expr(span, ExprKind::Err, AttrVec::new())
+        self.mk_expr(span, ExprKind::Err)
     }
 
     /// Create expression span ensuring the span of the parent node
@@ -3264,7 +3771,7 @@ impl<'a> Parser<'a> {
     fn collect_tokens_for_expr(
         &mut self,
         attrs: AttrWrapper,
-        f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
+        f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P<Expr>>,
     ) -> PResult<'a, P<Expr>> {
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let res = f(this, attrs)?;
@@ -3272,6 +3779,8 @@ impl<'a> Parser<'a> {
                 && this.token.kind == token::Semi
             {
                 TrailingToken::Semi
+            } else if this.token.kind == token::Gt {
+                TrailingToken::Gt
             } else {
                 // FIXME - pass this through from the place where we know
                 // we need a comma, rather than assuming that `#[attr] expr,`
@@ -3281,17 +3790,166 @@ impl<'a> Parser<'a> {
             Ok((res, trailing))
         })
     }
+}
 
-    // Calls `f` with the internal `let_expr_allowed` set to `let_expr_allowed` and then
-    // sets the internal `let_expr_allowed` back to its original value.
-    fn with_let_management<T>(
-        &mut self,
-        let_expr_allowed: bool,
-        f: impl FnOnce(&mut Self) -> T,
-    ) -> T {
-        let last_let_expr_allowed = mem::replace(&mut self.let_expr_allowed, let_expr_allowed);
-        let rslt = f(self);
-        self.let_expr_allowed = last_let_expr_allowed;
-        rslt
+/// Could this lifetime/label be an unclosed char literal? For example, `'a`
+/// could be, but `'abc` could not.
+pub(crate) fn could_be_unclosed_char_literal(ident: Ident) -> bool {
+    ident.name.as_str().starts_with('\'')
+        && unescape_char(ident.without_first_quote().name.as_str()).is_ok()
+}
+
+/// Used to forbid `let` expressions in certain syntactic locations.
+#[derive(Clone, Copy, Subdiagnostic)]
+pub(crate) enum ForbiddenLetReason {
+    /// `let` is not valid and the source environment is not important
+    OtherForbidden,
+    /// A let chain with the `||` operator
+    #[note(parse_not_supported_or)]
+    NotSupportedOr(#[primary_span] Span),
+    /// A let chain with invalid parentheses
+    ///
+    /// For example, `let 1 = 1 && (expr && expr)` is allowed
+    /// but `(let 1 = 1 && (let 1 = 1 && (let 1 = 1))) && let a = 1` is not
+    #[note(parse_not_supported_parentheses)]
+    NotSupportedParentheses(#[primary_span] Span),
+}
+
+/// Visitor to check for invalid/unstable use of `ExprKind::Let` that can't
+/// easily be caught in parsing. For example:
+///
+/// ```rust,ignore (example)
+/// // Only know that the let isn't allowed once the `||` token is reached
+/// if let Some(x) = y || true {}
+/// // Only know that the let isn't allowed once the second `=` token is reached.
+/// if let Some(x) = y && z = 1 {}
+/// ```
+struct CondChecker<'a> {
+    parser: &'a Parser<'a>,
+    forbid_let_reason: Option<ForbiddenLetReason>,
+    missing_let: Option<errors::MaybeMissingLet>,
+    comparison: Option<errors::MaybeComparison>,
+}
+
+impl<'a> CondChecker<'a> {
+    fn new(parser: &'a Parser<'a>) -> Self {
+        CondChecker { parser, forbid_let_reason: None, missing_let: None, comparison: None }
+    }
+}
+
+impl MutVisitor for CondChecker<'_> {
+    fn visit_expr(&mut self, e: &mut P<Expr>) {
+        use ForbiddenLetReason::*;
+
+        let span = e.span;
+        match e.kind {
+            ExprKind::Let(_, _, _, ref mut is_recovered @ None) => {
+                if let Some(reason) = self.forbid_let_reason {
+                    *is_recovered =
+                        Some(self.parser.dcx().emit_err(errors::ExpectedExpressionFoundLet {
+                            span,
+                            reason,
+                            missing_let: self.missing_let,
+                            comparison: self.comparison,
+                        }));
+                } else {
+                    self.parser.sess.gated_spans.gate(sym::let_chains, span);
+                }
+            }
+            ExprKind::Binary(Spanned { node: BinOpKind::And, .. }, _, _) => {
+                noop_visit_expr(e, self);
+            }
+            ExprKind::Binary(Spanned { node: BinOpKind::Or, span: or_span }, _, _)
+                if let None | Some(NotSupportedOr(_)) = self.forbid_let_reason =>
+            {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(NotSupportedOr(or_span));
+                noop_visit_expr(e, self);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Paren(ref inner)
+                if let None | Some(NotSupportedParentheses(_)) = self.forbid_let_reason =>
+            {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(NotSupportedParentheses(inner.span));
+                noop_visit_expr(e, self);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Assign(ref lhs, _, span) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                let missing_let = self.missing_let;
+                if let ExprKind::Binary(_, _, rhs) = &lhs.kind
+                    && let ExprKind::Path(_, _)
+                    | ExprKind::Struct(_)
+                    | ExprKind::Call(_, _)
+                    | ExprKind::Array(_) = rhs.kind
+                {
+                    self.missing_let =
+                        Some(errors::MaybeMissingLet { span: rhs.span.shrink_to_lo() });
+                }
+                let comparison = self.comparison;
+                self.comparison = Some(errors::MaybeComparison { span: span.shrink_to_hi() });
+                noop_visit_expr(e, self);
+                self.forbid_let_reason = forbid_let_reason;
+                self.missing_let = missing_let;
+                self.comparison = comparison;
+            }
+            ExprKind::Unary(_, _)
+            | ExprKind::Await(_, _)
+            | ExprKind::AssignOp(_, _, _)
+            | ExprKind::Range(_, _, _)
+            | ExprKind::Try(_)
+            | ExprKind::AddrOf(_, _, _)
+            | ExprKind::Binary(_, _, _)
+            | ExprKind::Field(_, _)
+            | ExprKind::Index(_, _, _)
+            | ExprKind::Call(_, _)
+            | ExprKind::MethodCall(_)
+            | ExprKind::Tup(_)
+            | ExprKind::Paren(_) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                noop_visit_expr(e, self);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Cast(ref mut op, _) | ExprKind::Type(ref mut op, _) => {
+                let forbid_let_reason = self.forbid_let_reason;
+                self.forbid_let_reason = Some(OtherForbidden);
+                self.visit_expr(op);
+                self.forbid_let_reason = forbid_let_reason;
+            }
+            ExprKind::Let(_, _, _, Some(_))
+            | ExprKind::Array(_)
+            | ExprKind::ConstBlock(_)
+            | ExprKind::Lit(_)
+            | ExprKind::If(_, _, _)
+            | ExprKind::While(_, _, _)
+            | ExprKind::ForLoop { .. }
+            | ExprKind::Loop(_, _, _)
+            | ExprKind::Match(_, _)
+            | ExprKind::Closure(_)
+            | ExprKind::Block(_, _)
+            | ExprKind::Gen(_, _, _)
+            | ExprKind::TryBlock(_)
+            | ExprKind::Underscore
+            | ExprKind::Path(_, _)
+            | ExprKind::Break(_, _)
+            | ExprKind::Continue(_)
+            | ExprKind::Ret(_)
+            | ExprKind::InlineAsm(_)
+            | ExprKind::OffsetOf(_, _)
+            | ExprKind::MacCall(_)
+            | ExprKind::Struct(_)
+            | ExprKind::Repeat(_, _)
+            | ExprKind::Yield(_)
+            | ExprKind::Yeet(_)
+            | ExprKind::Become(_)
+            | ExprKind::IncludedBytes(_)
+            | ExprKind::FormatArgs(_)
+            | ExprKind::Err => {
+                // These would forbid any let expressions they contain already.
+            }
+        }
     }
 }
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 1acfd93d86f..e059e707491 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -1,11 +1,25 @@
+use crate::errors::{
+    self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
+    UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
+    WhereClauseBeforeTupleStructBodySugg,
+};
+
 use super::{ForceCollect, Parser, TrailingToken};
 
+use ast::token::Delimiter;
 use rustc_ast::token;
 use rustc_ast::{
-    self as ast, Attribute, GenericBounds, GenericParam, GenericParamKind, WhereClause,
+    self as ast, AttrVec, GenericBounds, GenericParam, GenericParamKind, TyKind, WhereClause,
 };
 use rustc_errors::{Applicability, PResult};
-use rustc_span::symbol::kw;
+use rustc_span::symbol::{kw, Ident};
+use rustc_span::Span;
+use thin_vec::ThinVec;
+
+enum PredicateOrStructBody {
+    Predicate(ast::WherePredicate),
+    StructBody(ThinVec<ast::FieldDef>),
+}
 
 impl<'a> Parser<'a> {
     /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
@@ -26,24 +40,62 @@ impl<'a> Parser<'a> {
     }
 
     /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
-    fn parse_ty_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
+    fn parse_ty_param(&mut self, preceding_attrs: AttrVec) -> PResult<'a, GenericParam> {
         let ident = self.parse_ident()?;
 
+        // We might have a typo'd `Const` that was parsed as a type parameter.
+        if self.may_recover()
+            && ident.name.as_str().to_ascii_lowercase() == kw::Const.as_str()
+            && self.check_ident()
+        // `Const` followed by IDENT
+        {
+            return self.recover_const_param_with_mistyped_const(preceding_attrs, ident);
+        }
+
         // Parse optional colon and param bounds.
         let mut colon_span = None;
         let bounds = if self.eat(&token::Colon) {
             colon_span = Some(self.prev_token.span);
-            self.parse_generic_bounds(colon_span)?
+            // recover from `impl Trait` in type param bound
+            if self.token.is_keyword(kw::Impl) {
+                let impl_span = self.token.span;
+                let snapshot = self.create_snapshot_for_diagnostic();
+                match self.parse_ty() {
+                    Ok(p) => {
+                        if let TyKind::ImplTrait(_, bounds) = &p.kind {
+                            let span = impl_span.to(self.token.span.shrink_to_lo());
+                            let mut err = self.dcx().struct_span_err(
+                                span,
+                                "expected trait bound, found `impl Trait` type",
+                            );
+                            err.span_label(span, "not a trait");
+                            if let [bound, ..] = &bounds[..] {
+                                err.span_suggestion_verbose(
+                                    impl_span.until(bound.span()),
+                                    "use the trait bounds directly",
+                                    String::new(),
+                                    Applicability::MachineApplicable,
+                                );
+                            }
+                            return Err(err);
+                        }
+                    }
+                    Err(err) => {
+                        err.cancel();
+                    }
+                }
+                self.restore_snapshot(snapshot);
+            }
+            self.parse_generic_bounds()?
         } else {
             Vec::new()
         };
 
         let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
-
         Ok(GenericParam {
             ident,
             id: ast::DUMMY_NODE_ID,
-            attrs: preceding_attrs.into(),
+            attrs: preceding_attrs,
             bounds,
             kind: GenericParamKind::Type { default },
             is_placeholder: false,
@@ -53,7 +105,7 @@ impl<'a> Parser<'a> {
 
     pub(crate) fn parse_const_param(
         &mut self,
-        preceding_attrs: Vec<Attribute>,
+        preceding_attrs: AttrVec,
     ) -> PResult<'a, GenericParam> {
         let const_span = self.token.span;
 
@@ -68,7 +120,7 @@ impl<'a> Parser<'a> {
         Ok(GenericParam {
             ident,
             id: ast::DUMMY_NODE_ID,
-            attrs: preceding_attrs.into(),
+            attrs: preceding_attrs,
             bounds: Vec::new(),
             kind: GenericParamKind::Const { ty, kw_span: const_span, default },
             is_placeholder: false,
@@ -76,10 +128,46 @@ impl<'a> Parser<'a> {
         })
     }
 
+    pub(crate) fn recover_const_param_with_mistyped_const(
+        &mut self,
+        preceding_attrs: AttrVec,
+        mistyped_const_ident: Ident,
+    ) -> PResult<'a, GenericParam> {
+        let ident = self.parse_ident()?;
+        self.expect(&token::Colon)?;
+        let ty = self.parse_ty()?;
+
+        // Parse optional const generics default value.
+        let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
+
+        self.dcx()
+            .struct_span_err(
+                mistyped_const_ident.span,
+                format!("`const` keyword was mistyped as `{}`", mistyped_const_ident.as_str()),
+            )
+            .with_span_suggestion_verbose(
+                mistyped_const_ident.span,
+                "use the `const` keyword",
+                kw::Const,
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        Ok(GenericParam {
+            ident,
+            id: ast::DUMMY_NODE_ID,
+            attrs: preceding_attrs,
+            bounds: Vec::new(),
+            kind: GenericParamKind::Const { ty, kw_span: mistyped_const_ident.span, default },
+            is_placeholder: false,
+            colon_span: None,
+        })
+    }
+
     /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
     /// a trailing comma and erroneous trailing attributes.
-    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
-        let mut params = Vec::new();
+    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> {
+        let mut params = ThinVec::new();
         let mut done = false;
         while !done {
             let attrs = self.parse_outer_attributes()?;
@@ -88,12 +176,9 @@ impl<'a> Parser<'a> {
                     if this.eat_keyword_noexpect(kw::SelfUpper) {
                         // `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
                         // as if `Self` never existed.
-                        this.struct_span_err(
-                            this.prev_token.span,
-                            "unexpected keyword `Self` in generic parameters",
-                        )
-                        .note("you cannot use `Self` as a generic parameter because it is reserved for associated items")
-                        .emit();
+                        this.dcx().emit_err(UnexpectedSelfInGenericParameters {
+                            span: this.prev_token.span,
+                        });
 
                         this.eat(&token::Comma);
                     }
@@ -106,10 +191,24 @@ impl<'a> Parser<'a> {
                         } else {
                             (None, Vec::new())
                         };
+
+                        if this.check_noexpect(&token::Eq)
+                            && this.look_ahead(1, |t| t.is_lifetime())
+                        {
+                            let lo = this.token.span;
+                            // Parse `= 'lifetime`.
+                            this.bump(); // `=`
+                            this.bump(); // `'lifetime`
+                            let span = lo.to(this.prev_token.span);
+                            this.dcx().emit_err(
+                                UnexpectedDefaultValueForLifetimeInGenericParameters { span },
+                            );
+                        }
+
                         Some(ast::GenericParam {
                             ident: lifetime.ident,
                             id: lifetime.id,
-                            attrs: attrs.into(),
+                            attrs,
                             bounds,
                             kind: ast::GenericParamKind::Lifetime,
                             is_placeholder: false,
@@ -126,12 +225,9 @@ impl<'a> Parser<'a> {
                         let snapshot = this.create_snapshot_for_diagnostic();
                         match this.parse_ty_where_predicate() {
                             Ok(where_predicate) => {
-                                this.struct_span_err(
-                                    where_predicate.span(),
-                                    "bounds on associated types do not belong here",
-                                )
-                                .span_label(where_predicate.span(), "belongs in `where` clause")
-                                .emit();
+                                this.dcx().emit_err(errors::BadAssocTypeBounds {
+                                    span: where_predicate.span(),
+                                });
                                 // FIXME - try to continue parsing other generics?
                                 return Ok((None, TrailingToken::None));
                             }
@@ -146,22 +242,11 @@ impl<'a> Parser<'a> {
                         // Check for trailing attributes and stop parsing.
                         if !attrs.is_empty() {
                             if !params.is_empty() {
-                                this.struct_span_err(
-                                    attrs[0].span,
-                                    "trailing attribute after generic parameter",
-                                )
-                                .span_label(attrs[0].span, "attributes must go before parameters")
-                                .emit();
+                                this.dcx()
+                                    .emit_err(errors::AttrAfterGeneric { span: attrs[0].span });
                             } else {
-                                this.struct_span_err(
-                                    attrs[0].span,
-                                    "attribute without generic parameters",
-                                )
-                                .span_label(
-                                    attrs[0].span,
-                                    "attributes are only permitted when preceding parameters",
-                                )
-                                .emit();
+                                this.dcx()
+                                    .emit_err(errors::AttrWithoutGenerics { span: attrs[0].span });
                             }
                         }
                         return Ok((None, TrailingToken::None));
@@ -194,55 +279,67 @@ impl<'a> Parser<'a> {
         let span_lo = self.token.span;
         let (params, span) = if self.eat_lt() {
             let params = self.parse_generic_params()?;
-            self.expect_gt()?;
+            self.expect_gt_or_maybe_suggest_closing_generics(&params)?;
             (params, span_lo.to(self.prev_token.span))
         } else {
-            (vec![], self.prev_token.span.shrink_to_hi())
+            (ThinVec::new(), self.prev_token.span.shrink_to_hi())
         };
         Ok(ast::Generics {
             params,
             where_clause: WhereClause {
                 has_where_token: false,
-                predicates: Vec::new(),
+                predicates: ThinVec::new(),
                 span: self.prev_token.span.shrink_to_hi(),
             },
             span,
         })
     }
 
-    /// Parses an optional where-clause and places it in `generics`.
+    /// Parses an optional where-clause.
     ///
     /// ```ignore (only-for-syntax-highlight)
     /// where T : Trait<U, V> + 'b, 'a : 'b
     /// ```
     pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
+        self.parse_where_clause_common(None).map(|(clause, _)| clause)
+    }
+
+    pub(super) fn parse_struct_where_clause(
+        &mut self,
+        struct_name: Ident,
+        body_insertion_point: Span,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
+        self.parse_where_clause_common(Some((struct_name, body_insertion_point)))
+    }
+
+    fn parse_where_clause_common(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
         let mut where_clause = WhereClause {
             has_where_token: false,
-            predicates: Vec::new(),
+            predicates: ThinVec::new(),
             span: self.prev_token.span.shrink_to_hi(),
         };
+        let mut tuple_struct_body = None;
 
         if !self.eat_keyword(kw::Where) {
-            return Ok(where_clause);
+            return Ok((where_clause, None));
         }
         where_clause.has_where_token = true;
-        let lo = self.prev_token.span;
+        let where_lo = self.prev_token.span;
 
         // We are considering adding generics to the `where` keyword as an alternative higher-rank
         // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
         // change we parse those generics now, but report an error.
         if self.choose_generics_over_qpath(0) {
             let generics = self.parse_generics()?;
-            self.struct_span_err(
-                generics.span,
-                "generic parameters on `where` clauses are reserved for future use",
-            )
-            .span_label(generics.span, "currently unsupported")
-            .emit();
+            self.dcx().emit_err(errors::WhereOnGenerics { span: generics.span });
         }
 
         loop {
-            let lo = self.token.span;
+            let where_sp = where_lo.to(self.prev_token.span);
+            let pred_lo = self.token.span;
             if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                 let lifetime = self.expect_lifetime();
                 // Bounds starting with a colon are mandatory, but possibly empty.
@@ -250,13 +347,21 @@ impl<'a> Parser<'a> {
                 let bounds = self.parse_lt_param_bounds();
                 where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
                     ast::WhereRegionPredicate {
-                        span: lo.to(self.prev_token.span),
+                        span: pred_lo.to(self.prev_token.span),
                         lifetime,
                         bounds,
                     },
                 ));
             } else if self.check_type() {
-                where_clause.predicates.push(self.parse_ty_where_predicate()?);
+                match self.parse_ty_where_predicate_or_recover_tuple_struct_body(
+                    struct_, pred_lo, where_sp,
+                )? {
+                    PredicateOrStructBody::Predicate(pred) => where_clause.predicates.push(pred),
+                    PredicateOrStructBody::StructBody(body) => {
+                        tuple_struct_body = Some(body);
+                        break;
+                    }
+                }
             } else {
                 break;
             }
@@ -265,23 +370,82 @@ impl<'a> Parser<'a> {
             let ate_comma = self.eat(&token::Comma);
 
             if self.eat_keyword_noexpect(kw::Where) {
-                let msg = "cannot define duplicate `where` clauses on an item";
-                let mut err = self.struct_span_err(self.token.span, msg);
-                err.span_label(lo, "previous `where` clause starts here");
-                err.span_suggestion_verbose(
-                    prev_token.shrink_to_hi().to(self.prev_token.span),
-                    "consider joining the two `where` clauses into one",
-                    ",",
-                    Applicability::MaybeIncorrect,
-                );
-                err.emit();
+                self.dcx().emit_err(MultipleWhereClauses {
+                    span: self.token.span,
+                    previous: pred_lo,
+                    between: prev_token.shrink_to_hi().to(self.prev_token.span),
+                });
             } else if !ate_comma {
                 break;
             }
         }
 
-        where_clause.span = lo.to(self.prev_token.span);
-        Ok(where_clause)
+        where_clause.span = where_lo.to(self.prev_token.span);
+        Ok((where_clause, tuple_struct_body))
+    }
+
+    fn parse_ty_where_predicate_or_recover_tuple_struct_body(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+        pred_lo: Span,
+        where_sp: Span,
+    ) -> PResult<'a, PredicateOrStructBody> {
+        let mut snapshot = None;
+
+        if let Some(struct_) = struct_
+            && self.may_recover()
+            && self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
+        {
+            snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
+        };
+
+        match self.parse_ty_where_predicate() {
+            Ok(pred) => Ok(PredicateOrStructBody::Predicate(pred)),
+            Err(type_err) => {
+                let Some(((struct_name, body_insertion_point), mut snapshot)) = snapshot else {
+                    return Err(type_err);
+                };
+
+                // Check if we might have encountered an out of place tuple struct body.
+                match snapshot.parse_tuple_struct_body() {
+                    // Since we don't know the exact reason why we failed to parse the
+                    // predicate (we might have stumbled upon something bogus like `(T): ?`),
+                    // employ a simple heuristic to weed out some pathological cases:
+                    // Look for a semicolon (strong indicator) or anything that might mark
+                    // the end of the item (weak indicator) following the body.
+                    Ok(body)
+                        if matches!(snapshot.token.kind, token::Semi | token::Eof)
+                            || snapshot.token.can_begin_item() =>
+                    {
+                        type_err.cancel();
+
+                        let body_sp = pred_lo.to(snapshot.prev_token.span);
+                        let map = self.sess.source_map();
+
+                        self.dcx().emit_err(WhereClauseBeforeTupleStructBody {
+                            span: where_sp,
+                            name: struct_name.span,
+                            body: body_sp,
+                            sugg: map.span_to_snippet(body_sp).ok().map(|body| {
+                                WhereClauseBeforeTupleStructBodySugg {
+                                    left: body_insertion_point.shrink_to_hi(),
+                                    snippet: body,
+                                    right: map.end_point(where_sp).to(body_sp),
+                                }
+                            }),
+                        });
+
+                        self.restore_snapshot(snapshot);
+                        Ok(PredicateOrStructBody::StructBody(body))
+                    }
+                    Ok(_) => Err(type_err),
+                    Err(body_err) => {
+                        body_err.cancel();
+                        Err(type_err)
+                    }
+                }
+            }
+        }
     }
 
     fn parse_ty_where_predicate(&mut self) -> PResult<'a, ast::WherePredicate> {
@@ -299,7 +463,7 @@ impl<'a> Parser<'a> {
         // or with mandatory equality sign and the second type.
         let ty = self.parse_ty_for_where_clause()?;
         if self.eat(&token::Colon) {
-            let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?;
+            let bounds = self.parse_generic_bounds()?;
             Ok(ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
                 span: lo.to(self.prev_token.span),
                 bound_generic_params: lifetime_defs,
@@ -314,7 +478,6 @@ impl<'a> Parser<'a> {
                 span: lo.to(self.prev_token.span),
                 lhs_ty: ty,
                 rhs_ty,
-                id: ast::DUMMY_NODE_ID,
             }))
         } else {
             self.maybe_recover_bounds_doubled_colon(&ty)?;
@@ -334,6 +497,8 @@ impl<'a> Parser<'a> {
         //     `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
         //     `<` (LIFETIME|IDENT) `=` - generic parameter with a default
         //     `<` const                - generic const parameter
+        //     `<` IDENT `?`            - RECOVERY for `impl<T ?Bound` missing a `:`, meant to
+        //                                avoid the `T?` to `Option<T>` recovery for types.
         // The only truly ambiguous case is
         //     `<` IDENT `>` `::` IDENT ...
         // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
@@ -344,6 +509,9 @@ impl<'a> Parser<'a> {
                 || self.look_ahead(start + 1, |t| t.is_lifetime() || t.is_ident())
                     && self.look_ahead(start + 2, |t| {
                         matches!(t.kind, token::Gt | token::Comma | token::Colon | token::Eq)
+                        // Recovery-only branch -- this could be removed,
+                        // since it only affects diagnostics currently.
+                            || matches!(t.kind, token::Question)
                     })
                 || self.is_keyword_ahead(start + 1, &[kw::Const]))
     }
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 87bc0d9762e..8050b34956c 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,28 +1,24 @@
-use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
+use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
-
+use crate::errors::{self, MacroExpandsToAdtField};
+use crate::fluent_generated as fluent;
 use rustc_ast::ast::*;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
-use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
-use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
-use rustc_ast::{BindingMode, Block, FnDecl, FnSig, Param, SelfKind};
-use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
-use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
-use rustc_ast::{MacArgs, MacCall, MacDelimiter};
+use rustc_ast::util::case::Case;
+use rustc_ast::{self as ast};
 use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, PResult, StashKey};
+use rustc_errors::{codes::*, struct_span_code_err, Applicability, PResult, StashKey};
+use rustc_span::edit_distance::edit_distance;
 use rustc_span::edition::Edition;
-use rustc_span::lev_distance::lev_distance;
-use rustc_span::source_map::{self, Span};
+use rustc_span::source_map;
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-use rustc_span::DUMMY_SP;
-
-use std::convert::TryFrom;
+use rustc_span::{Span, DUMMY_SP};
+use std::fmt::Write;
 use std::mem;
-use tracing::debug;
+use thin_vec::{thin_vec, ThinVec};
 
 impl<'a> Parser<'a> {
     /// Parses a source module as a crate. This is the main entry point for the parser.
@@ -32,17 +28,17 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
-    fn parse_item_mod(&mut self, attrs: &mut Vec<Attribute>) -> PResult<'a, ItemInfo> {
-        let unsafety = self.parse_unsafety();
+    fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
+        let unsafety = self.parse_unsafety(Case::Sensitive);
         self.expect_keyword(kw::Mod)?;
         let id = self.parse_ident()?;
         let mod_kind = if self.eat(&token::Semi) {
             ModKind::Unloaded
         } else {
             self.expect(&token::OpenDelim(Delimiter::Brace))?;
-            let (mut inner_attrs, items, inner_span) =
+            let (inner_attrs, items, inner_span) =
                 self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
-            attrs.append(&mut inner_attrs);
+            attrs.extend(inner_attrs);
             ModKind::Loaded(items, Inline::Yes, inner_span)
         };
         Ok((id, ItemKind::Mod(unsafety, mod_kind)))
@@ -52,12 +48,12 @@ impl<'a> Parser<'a> {
     pub fn parse_mod(
         &mut self,
         term: &TokenKind,
-    ) -> PResult<'a, (Vec<Attribute>, Vec<P<Item>>, ModSpans)> {
+    ) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
         let lo = self.token.span;
         let attrs = self.parse_inner_attributes()?;
 
         let post_attr_lo = self.token.span;
-        let mut items = vec![];
+        let mut items = ThinVec::new();
         while let Some(item) = self.parse_item(ForceCollect::No)? {
             items.push(item);
             self.maybe_consume_incorrect_semicolon(&items);
@@ -66,9 +62,18 @@ impl<'a> Parser<'a> {
         if !self.eat(term) {
             let token_str = super::token_descr(&self.token);
             if !self.maybe_consume_incorrect_semicolon(&items) {
-                let msg = &format!("expected item, found {token_str}");
-                let mut err = self.struct_span_err(self.token.span, msg);
-                err.span_label(self.token.span, "expected item");
+                let msg = format!("expected item, found {token_str}");
+                let mut err = self.dcx().struct_span_err(self.token.span, msg);
+                let span = self.token.span;
+                if self.is_kw_followed_by_ident(kw::Let) {
+                    err.span_label(
+                        span,
+                        "consider using `const` or `static` instead of `let` for global variables",
+                    );
+                } else {
+                    err.span_label(span, "expected item")
+                        .note("for a full list of items that can appear in modules, see <https://doc.rust-lang.org/reference/items.html>");
+                };
                 return Err(err);
             }
         }
@@ -92,7 +97,9 @@ impl<'a> Parser<'a> {
         fn_parse_mode: FnParseMode,
         force_collect: ForceCollect,
     ) -> PResult<'a, Option<Item>> {
+        self.recover_diff_marker();
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.parse_item_common(attrs, true, false, fn_parse_mode, force_collect)
     }
 
@@ -106,7 +113,9 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, Option<Item>> {
         // Don't use `maybe_whole` so that we have precise control
         // over when we bump the parser
-        if let token::Interpolated(nt) = &self.token.kind && let token::NtItem(item) = &**nt {
+        if let token::Interpolated(nt) = &self.token.kind
+            && let token::NtItem(item) = &nt.0
+        {
             let mut item = item.clone();
             self.bump();
 
@@ -114,22 +123,19 @@ impl<'a> Parser<'a> {
             return Ok(Some(item.into_inner()));
         };
 
-        let mut unclosed_delims = vec![];
         let item =
             self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
                 let item =
                     this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
-                unclosed_delims.append(&mut this.unclosed_delims);
                 Ok((item?, TrailingToken::None))
             })?;
 
-        self.unclosed_delims.append(&mut unclosed_delims);
         Ok(item)
     }
 
     fn parse_item_common_(
         &mut self,
-        mut attrs: Vec<Attribute>,
+        mut attrs: AttrVec,
         mac_allowed: bool,
         attrs_allowed: bool,
         fn_parse_mode: FnParseMode,
@@ -137,8 +143,15 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         let vis = self.parse_visibility(FollowedByType::No)?;
         let mut def = self.parse_defaultness();
-        let kind =
-            self.parse_item_kind(&mut attrs, mac_allowed, lo, &vis, &mut def, fn_parse_mode)?;
+        let kind = self.parse_item_kind(
+            &mut attrs,
+            mac_allowed,
+            lo,
+            &vis,
+            &mut def,
+            fn_parse_mode,
+            Case::Sensitive,
+        )?;
         if let Some((ident, kind)) = kind {
             self.error_on_unconsumed_default(def, &kind);
             let span = lo.to(self.prev_token.span);
@@ -148,67 +161,52 @@ impl<'a> Parser<'a> {
         }
 
         // At this point, we have failed to parse an item.
-        self.error_on_unmatched_vis(&vis);
-        self.error_on_unmatched_defaultness(def);
-        if !attrs_allowed {
-            self.recover_attrs_no_item(&attrs)?;
+        if !matches!(vis.kind, VisibilityKind::Inherited) {
+            self.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
         }
-        Ok(None)
-    }
 
-    /// Error in-case a non-inherited visibility was parsed but no item followed.
-    fn error_on_unmatched_vis(&self, vis: &Visibility) {
-        if let VisibilityKind::Inherited = vis.kind {
-            return;
+        if let Defaultness::Default(span) = def {
+            self.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
         }
-        let vs = pprust::vis_to_string(&vis);
-        let vs = vs.trim_end();
-        self.struct_span_err(vis.span, &format!("visibility `{vs}` is not followed by an item"))
-            .span_label(vis.span, "the visibility")
-            .help(&format!("you likely meant to define an item, e.g., `{vs} fn foo() {{}}`"))
-            .emit();
-    }
-
-    /// Error in-case a `default` was parsed but no item followed.
-    fn error_on_unmatched_defaultness(&self, def: Defaultness) {
-        if let Defaultness::Default(sp) = def {
-            self.struct_span_err(sp, "`default` is not followed by an item")
-                .span_label(sp, "the `default` qualifier")
-                .note("only `fn`, `const`, `type`, or `impl` items may be prefixed by `default`")
-                .emit();
+
+        if !attrs_allowed {
+            self.recover_attrs_no_item(&attrs)?;
         }
+        Ok(None)
     }
 
     /// Error in-case `default` was parsed in an in-appropriate context.
     fn error_on_unconsumed_default(&self, def: Defaultness, kind: &ItemKind) {
         if let Defaultness::Default(span) = def {
-            let msg = format!("{} {} cannot be `default`", kind.article(), kind.descr());
-            self.struct_span_err(span, &msg)
-                .span_label(span, "`default` because of this")
-                .note("only associated `fn`, `const`, and `type` items can be `default`")
-                .emit();
+            self.dcx().emit_err(errors::InappropriateDefault {
+                span,
+                article: kind.article(),
+                descr: kind.descr(),
+            });
         }
     }
 
     /// Parses one of the items allowed by the flags.
     fn parse_item_kind(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         macros_allowed: bool,
         lo: Span,
         vis: &Visibility,
         def: &mut Defaultness,
         fn_parse_mode: FnParseMode,
+        case: Case,
     ) -> PResult<'a, Option<ItemInfo>> {
         let def_final = def == &Defaultness::Final;
-        let mut def = || mem::replace(def, Defaultness::Final);
+        let mut def_ = || mem::replace(def, Defaultness::Final);
 
-        let info = if self.eat_keyword(kw::Use) {
+        let info = if self.eat_keyword_case(kw::Use, case) {
             self.parse_use_item()?
-        } else if self.check_fn_front_matter(def_final) {
+        } else if self.check_fn_front_matter(def_final, case) {
             // FUNCTION ITEM
-            let (ident, sig, generics, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis)?;
-            (ident, ItemKind::Fn(Box::new(Fn { defaultness: def(), sig, generics, body })))
+            let (ident, sig, generics, body) =
+                self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
+            (ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
         } else if self.eat_keyword(kw::Extern) {
             if self.eat_keyword(kw::Crate) {
                 // EXTERN CRATE
@@ -219,24 +217,32 @@ impl<'a> Parser<'a> {
             }
         } else if self.is_unsafe_foreign_mod() {
             // EXTERN BLOCK
-            let unsafety = self.parse_unsafety();
+            let unsafety = self.parse_unsafety(Case::Sensitive);
             self.expect_keyword(kw::Extern)?;
             self.parse_item_foreign_mod(attrs, unsafety)?
         } else if self.is_static_global() {
             // STATIC ITEM
             self.bump(); // `static`
-            let m = self.parse_mutability();
-            let (ident, ty, expr) = self.parse_item_global(Some(m))?;
-            (ident, ItemKind::Static(ty, m, expr))
-        } else if let Const::Yes(const_span) = self.parse_constness() {
+            let mutability = self.parse_mutability();
+            let (ident, item) = self.parse_static_item(mutability)?;
+            (ident, ItemKind::Static(Box::new(item)))
+        } else if let Const::Yes(const_span) = self.parse_constness(Case::Sensitive) {
             // CONST ITEM
             if self.token.is_keyword(kw::Impl) {
                 // recover from `const impl`, suggest `impl const`
-                self.recover_const_impl(const_span, attrs, def())?
+                self.recover_const_impl(const_span, attrs, def_())?
             } else {
                 self.recover_const_mut(const_span);
-                let (ident, ty, expr) = self.parse_item_global(None)?;
-                (ident, ItemKind::Const(def(), ty, expr))
+                let (ident, generics, ty, expr) = self.parse_const_item()?;
+                (
+                    ident,
+                    ItemKind::Const(Box::new(ConstItem {
+                        defaultness: def_(),
+                        generics,
+                        ty,
+                        expr,
+                    })),
+                )
             }
         } else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
             // TRAIT ITEM
@@ -245,7 +251,9 @@ impl<'a> Parser<'a> {
             || self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
         {
             // IMPL ITEM
-            self.parse_item_impl(attrs, def())?
+            self.parse_item_impl(attrs, def_())?
+        } else if self.is_reuse_path_item() {
+            self.parse_item_delegation()?
         } else if self.check_keyword(kw::Mod)
             || self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
         {
@@ -253,7 +261,7 @@ impl<'a> Parser<'a> {
             self.parse_item_mod(attrs)?
         } else if self.eat_keyword(kw::Type) {
             // TYPE ITEM
-            self.parse_type_alias(def())?
+            self.parse_type_alias(def_())?
         } else if self.eat_keyword(kw::Enum) {
             // ENUM ITEM
             self.parse_item_enum()?
@@ -264,6 +272,9 @@ impl<'a> Parser<'a> {
             // UNION ITEM
             self.bump(); // `union`
             self.parse_item_union()?
+        } else if self.is_builtin() {
+            // BUILTIN# ITEM
+            return self.parse_item_builtin();
         } else if self.eat_keyword(kw::Macro) {
             // MACROS 2.0 ITEM
             self.parse_item_decl_macro(lo)?
@@ -271,15 +282,31 @@ impl<'a> Parser<'a> {
             // MACRO_RULES ITEM
             self.parse_item_macro_rules(vis, has_bang)?
         } else if self.isnt_macro_invocation()
-            && (self.token.is_ident_named(sym::import) || self.token.is_ident_named(sym::using))
+            && (self.token.is_ident_named(sym::import)
+                || self.token.is_ident_named(sym::using)
+                || self.token.is_ident_named(sym::include)
+                || self.token.is_ident_named(sym::require))
         {
             return self.recover_import_as_use();
         } else if self.isnt_macro_invocation() && vis.kind.is_pub() {
             self.recover_missing_kw_before_item()?;
             return Ok(None);
+        } else if self.isnt_macro_invocation() && case == Case::Sensitive {
+            _ = def_;
+
+            // Recover wrong cased keywords
+            return self.parse_item_kind(
+                attrs,
+                macros_allowed,
+                lo,
+                vis,
+                def,
+                fn_parse_mode,
+                Case::Insensitive,
+            );
         } else if macros_allowed && self.check_path() {
             // MACRO INVOCATION ITEM
-            (Ident::empty(), ItemKind::MacCall(self.parse_item_macro(vis)?))
+            (Ident::empty(), ItemKind::MacCall(P(self.parse_item_macro(vis)?)))
         } else {
             return Ok(None);
         };
@@ -293,14 +320,7 @@ impl<'a> Parser<'a> {
         self.bump();
         match self.parse_use_item() {
             Ok(u) => {
-                self.struct_span_err(span, format!("expected item, found {token_name}"))
-                    .span_suggestion_short(
-                        span,
-                        "items are imported using the `use` keyword",
-                        "use",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::RecoverImportAsUse { span, token_name });
                 Ok(Some(u))
             }
             Err(e) => {
@@ -331,11 +351,18 @@ impl<'a> Parser<'a> {
     /// When parsing a statement, would the start of a path be an item?
     pub(super) fn is_path_start_item(&mut self) -> bool {
         self.is_kw_followed_by_ident(kw::Union) // no: `union::b`, yes: `union U { .. }`
+        || self.is_reuse_path_item()
         || self.check_auto_or_unsafe_trait_item() // no: `auto::b`, yes: `auto trait X { .. }`
         || self.is_async_fn() // no(2015): `async::b`, yes: `async fn`
         || matches!(self.is_macro_rules_item(), IsMacroRulesItem::Yes{..}) // no: `macro_rules::b`, yes: `macro_rules! mac`
     }
 
+    fn is_reuse_path_item(&mut self) -> bool {
+        // no: `reuse ::path` for compatibility reasons with macro invocations
+        self.token.is_keyword(kw::Reuse)
+            && self.look_ahead(1, |t| t.is_path_start() && t.kind != token::ModSep)
+    }
+
     /// Are we sure this could not possibly be a macro invocation?
     fn isnt_macro_invocation(&mut self) -> bool {
         self.check_ident() && self.look_ahead(1, |t| *t != token::Not && *t != token::ModSep)
@@ -351,108 +378,98 @@ impl<'a> Parser<'a> {
         let sp = self.prev_token.span.between(self.token.span);
         let full_sp = self.prev_token.span.to(self.token.span);
         let ident_sp = self.token.span;
-        if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
+
+        let ident = if self.look_ahead(1, |t| {
+            [
+                token::Lt,
+                token::OpenDelim(Delimiter::Brace),
+                token::OpenDelim(Delimiter::Parenthesis),
+            ]
+            .contains(&t.kind)
+        }) {
+            self.parse_ident().unwrap()
+        } else {
+            return Ok(());
+        };
+
+        let mut found_generics = false;
+        if self.check(&token::Lt) {
+            found_generics = true;
+            self.eat_to_tokens(&[&token::Gt]);
+            self.bump(); // `>`
+        }
+
+        let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
             // possible public struct definition where `struct` was forgotten
-            let ident = self.parse_ident().unwrap();
-            let msg = format!("add `struct` here to parse `{ident}` as a public struct");
-            let mut err = self.struct_span_err(sp, "missing `struct` for struct definition");
-            err.span_suggestion_short(
-                sp,
-                &msg,
-                " struct ",
-                Applicability::MaybeIncorrect, // speculative
-            );
-            Err(err)
-        } else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
-            let ident = self.parse_ident().unwrap();
+            Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident })
+        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+            // possible public function or tuple struct definition where `fn`/`struct` was
+            // forgotten
             self.bump(); // `(`
-            let kw_name = self.recover_first_param();
+            let is_method = self.recover_self_param();
+
             self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
-            let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
-                self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
-                self.bump(); // `{`
-                ("fn", kw_name, false)
-            } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-                self.bump(); // `{`
-                ("fn", kw_name, false)
-            } else if self.check(&token::Colon) {
-                let kw = "struct";
-                (kw, kw, false)
-            } else {
-                ("fn` or `struct", "function or struct", true)
-            };
 
-            let msg = format!("missing `{kw}` for {kw_name} definition");
-            let mut err = self.struct_span_err(sp, &msg);
-            if !ambiguous {
-                self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
-                let suggestion =
-                    format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
-                err.span_suggestion_short(
-                    sp,
-                    &suggestion,
-                    format!(" {kw} "),
-                    Applicability::MachineApplicable,
-                );
-            } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
-                err.span_suggestion(
-                    full_sp,
-                    "if you meant to call a macro, try",
-                    format!("{}!", snippet),
-                    // this is the `ambiguous` conditional branch
-                    Applicability::MaybeIncorrect,
-                );
-            } else {
-                err.help(
-                    "if you meant to call a macro, remove the `pub` \
-                              and add a trailing `!` after the identifier",
-                );
-            }
-            Err(err)
-        } else if self.look_ahead(1, |t| *t == token::Lt) {
-            let ident = self.parse_ident().unwrap();
-            self.eat_to_tokens(&[&token::Gt]);
-            self.bump(); // `>`
-            let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
-                ("fn", self.recover_first_param(), false)
-            } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-                ("struct", "struct", false)
-            } else {
-                ("fn` or `struct", "function or struct", true)
-            };
-            let msg = format!("missing `{kw}` for {kw_name} definition");
-            let mut err = self.struct_span_err(sp, &msg);
-            if !ambiguous {
-                err.span_suggestion_short(
-                    sp,
-                    &format!("add `{kw}` here to parse `{ident}` as a public {kw_name}"),
-                    format!(" {} ", kw),
-                    Applicability::MachineApplicable,
-                );
-            }
-            Err(err)
+            let err =
+                if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) {
+                    self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
+                    self.bump(); // `{`
+                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                    if is_method {
+                        errors::MissingKeywordForItemDefinition::Method { span: sp, ident }
+                    } else {
+                        errors::MissingKeywordForItemDefinition::Function { span: sp, ident }
+                    }
+                } else if self.check(&token::Semi) {
+                    errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }
+                } else {
+                    errors::MissingKeywordForItemDefinition::Ambiguous {
+                        span: sp,
+                        subdiag: if found_generics {
+                            None
+                        } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
+                            Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
+                                span: full_sp,
+                                snippet,
+                            })
+                        } else {
+                            Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
+                        },
+                    }
+                };
+            Some(err)
+        } else if found_generics {
+            Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None })
         } else {
-            Ok(())
-        }
+            None
+        };
+
+        if let Some(err) = err { Err(self.dcx().create_err(err)) } else { Ok(()) }
+    }
+
+    fn parse_item_builtin(&mut self) -> PResult<'a, Option<ItemInfo>> {
+        // To be expanded
+        return Ok(None);
     }
 
     /// Parses an item macro, e.g., `item!();`.
     fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
         let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
         self.expect(&token::Not)?; // `!`
-        match self.parse_mac_args() {
+        match self.parse_delim_args() {
             // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
             Ok(args) => {
                 self.eat_semi_for_macro_if_needed(&args);
                 self.complain_if_pub_macro(vis, false);
-                Ok(MacCall { path, args, prior_type_ascription: self.last_type_ascription })
+                Ok(MacCall { path, args })
             }
 
             Err(mut err) => {
                 // Maybe the user misspelled `macro_rules` (issue #91227)
                 if self.token.is_ident()
                     && path.segments.len() == 1
-                    && lev_distance("macro_rules", &path.segments[0].ident.to_string(), 3).is_some()
+                    && edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2)
+                        .is_some()
                 {
                     err.span_suggestion(
                         path.span,
@@ -476,19 +493,16 @@ impl<'a> Parser<'a> {
         } else {
             "expected item after attributes"
         };
-        let mut err = self.struct_span_err(end.span, msg);
+        let mut err = self.dcx().struct_span_err(end.span, msg);
         if end.is_doc_comment() {
             err.span_label(end.span, "this doc comment doesn't document anything");
-        }
-        if end.meta_kind().is_some() {
-            if self.token.kind == TokenKind::Semi {
-                err.span_suggestion_verbose(
-                    self.token.span,
-                    "consider removing this semicolon",
-                    "",
-                    Applicability::MaybeIncorrect,
-                );
-            }
+        } else if self.token.kind == TokenKind::Semi {
+            err.span_suggestion_verbose(
+                self.token.span,
+                "consider removing this semicolon",
+                "",
+                Applicability::MaybeIncorrect,
+            );
         }
         if let [.., penultimate, _] = attrs {
             err.span_label(start.span.to(penultimate.span), "other attributes here");
@@ -526,10 +540,10 @@ impl<'a> Parser<'a> {
     /// ```
     fn parse_item_impl(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         defaultness: Defaultness,
     ) -> PResult<'a, ItemInfo> {
-        let unsafety = self.parse_unsafety();
+        let unsafety = self.parse_unsafety(Case::Sensitive);
         self.expect_keyword(kw::Impl)?;
 
         // First, parse generic parameters if necessary.
@@ -543,7 +557,7 @@ impl<'a> Parser<'a> {
             generics
         };
 
-        let constness = self.parse_constness();
+        let constness = self.parse_constness(Case::Sensitive);
         if let Const::Yes(span) = constness {
             self.sess.gated_spans.gate(sym::const_trait_impl, span);
         }
@@ -555,20 +569,11 @@ impl<'a> Parser<'a> {
         let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
         {
             let span = self.prev_token.span.between(self.token.span);
-            self.struct_span_err(span, "missing trait in a trait impl")
-                .span_suggestion(
-                    span,
-                    "add a trait here",
-                    " Trait ",
-                    Applicability::HasPlaceholders,
-                )
-                .span_suggestion(
-                    span.to(self.token.span),
-                    "for an inherent impl, drop this `for`",
-                    "",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.dcx().emit_err(errors::MissingTraitInTraitImpl {
+                span,
+                for_span: span.to(self.token.span),
+            });
+
             P(Ty {
                 kind: TyKind::Path(None, err_path(span)),
                 span,
@@ -601,22 +606,30 @@ impl<'a> Parser<'a> {
             Some(ty_second) => {
                 // impl Trait for Type
                 if !has_for {
-                    self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
-                        .span_suggestion_short(
-                            missing_for_span,
-                            "add `for` here",
-                            " for ",
-                            Applicability::MachineApplicable,
-                        )
-                        .emit();
+                    self.dcx().emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
                 }
 
                 let ty_first = ty_first.into_inner();
                 let path = match ty_first.kind {
                     // This notably includes paths passed through `ty` macro fragments (#46438).
                     TyKind::Path(None, path) => path,
-                    _ => {
-                        self.struct_span_err(ty_first.span, "expected a trait, found type").emit();
+                    other => {
+                        if let TyKind::ImplTrait(_, bounds) = other
+                            && let [bound] = bounds.as_slice()
+                        {
+                            // Suggest removing extra `impl` keyword:
+                            // `impl<T: Default> impl Default for Wrapper<T>`
+                            //                   ^^^^^
+                            let extra_impl_kw = ty_first.span.until(bound.span());
+                            self.dcx().emit_err(errors::ExtraImplKeywordInTraitImpl {
+                                extra_impl_kw,
+                                impl_trait_span: ty_first.span,
+                            });
+                        } else {
+                            self.dcx().emit_err(errors::ExpectedTraitInTraitImplFoundType {
+                                span: ty_first.span,
+                            });
+                        }
                         err_path(ty_first.span)
                     }
                 };
@@ -651,38 +664,125 @@ impl<'a> Parser<'a> {
         Ok((Ident::empty(), item_kind))
     }
 
+    fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> {
+        let span = self.token.span;
+        self.expect_keyword(kw::Reuse)?;
+
+        let (qself, path) = if self.eat_lt() {
+            let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+            (Some(qself), path)
+        } else {
+            (None, self.parse_path(PathStyle::Expr)?)
+        };
+
+        let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
+            Some(self.parse_block()?)
+        } else {
+            self.expect(&token::Semi)?;
+            None
+        };
+        let span = span.to(self.prev_token.span);
+        self.sess.gated_spans.gate(sym::fn_delegation, span);
+
+        let ident = path.segments.last().map(|seg| seg.ident).unwrap_or(Ident::empty());
+        Ok((
+            ident,
+            ItemKind::Delegation(Box::new(Delegation { id: DUMMY_NODE_ID, qself, path, body })),
+        ))
+    }
+
     fn parse_item_list<T>(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
-    ) -> PResult<'a, Vec<T>> {
+    ) -> PResult<'a, ThinVec<T>> {
         let open_brace_span = self.token.span;
+
+        // Recover `impl Ty;` instead of `impl Ty {}`
+        if self.token == TokenKind::Semi {
+            self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
+            self.bump();
+            return Ok(ThinVec::new());
+        }
+
         self.expect(&token::OpenDelim(Delimiter::Brace))?;
-        attrs.append(&mut self.parse_inner_attributes()?);
+        attrs.extend(self.parse_inner_attributes()?);
 
-        let mut items = Vec::new();
+        let mut items = ThinVec::new();
         while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
             if self.recover_doc_comment_before_brace() {
                 continue;
             }
+            self.recover_diff_marker();
             match parse_item(self) {
                 Ok(None) => {
+                    let mut is_unnecessary_semicolon = !items.is_empty()
+                        // When the close delim is `)` in a case like the following, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
+                        // but the actual `token.kind` is `token::CloseDelim(Delimiter::Brace)`.
+                        // This is because the `token.kind` of the close delim is treated as the same as
+                        // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
+                        // Therefore, `token.kind` should not be compared here.
+                        //
+                        // issue-60075.rs
+                        // ```
+                        // trait T {
+                        //     fn qux() -> Option<usize> {
+                        //         let _ = if true {
+                        //         });
+                        //          ^ this close delim
+                        //         Some(4)
+                        //     }
+                        // ```
+                        && self
+                            .span_to_snippet(self.prev_token.span)
+                            .is_ok_and(|snippet| snippet == "}")
+                        && self.token.kind == token::Semi;
+                    let mut semicolon_span = self.token.span;
+                    if !is_unnecessary_semicolon {
+                        // #105369, Detect spurious `;` before assoc fn body
+                        is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
+                            && self.prev_token.kind == token::Semi;
+                        semicolon_span = self.prev_token.span;
+                    }
                     // We have to bail or we'll potentially never make progress.
                     let non_item_span = self.token.span;
+                    let is_let = self.token.is_keyword(kw::Let);
+
+                    let mut err =
+                        self.dcx().struct_span_err(non_item_span, "non-item in item list");
                     self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
-                    self.struct_span_err(non_item_span, "non-item in item list")
-                        .span_label(open_brace_span, "item list starts here")
-                        .span_label(non_item_span, "non-item starts here")
-                        .span_label(self.prev_token.span, "item list ends here")
-                        .emit();
+                    if is_let {
+                        err.span_suggestion(
+                            non_item_span,
+                            "consider using `const` instead of `let` for associated const",
+                            "const",
+                            Applicability::MachineApplicable,
+                        );
+                    } else {
+                        err.span_label(open_brace_span, "item list starts here")
+                            .span_label(non_item_span, "non-item starts here")
+                            .span_label(self.prev_token.span, "item list ends here");
+                    }
+                    if is_unnecessary_semicolon {
+                        err.span_suggestion(
+                            semicolon_span,
+                            "consider removing this semicolon",
+                            "",
+                            Applicability::MaybeIncorrect,
+                        );
+                    }
+                    err.emit();
                     break;
                 }
                 Ok(Some(item)) => items.extend(item),
-                Err(mut err) => {
+                Err(err) => {
                     self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
-                    err.span_label(open_brace_span, "while parsing this item list starting here")
-                        .span_label(self.prev_token.span, "the item list ends here")
-                        .emit();
+                    err.with_span_label(
+                        open_brace_span,
+                        "while parsing this item list starting here",
+                    )
+                    .with_span_label(self.prev_token.span, "the item list ends here")
+                    .emit();
                     break;
                 }
             }
@@ -694,16 +794,17 @@ impl<'a> Parser<'a> {
     fn recover_doc_comment_before_brace(&mut self) -> bool {
         if let token::DocComment(..) = self.token.kind {
             if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
-                struct_span_err!(
-                    self.diagnostic(),
+                // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
+                struct_span_code_err!(
+                    self.dcx(),
                     self.token.span,
                     E0584,
                     "found a documentation comment that doesn't document anything",
                 )
-                .span_label(self.token.span, "this doc comment doesn't document anything")
-                .help(
-                    "doc comments must come before what they document, maybe a \
-                    comment was intended with `//`?",
+                .with_span_label(self.token.span, "this doc comment doesn't document anything")
+                .with_help(
+                    "doc comments must come before what they document, if a comment was \
+                    intended use `//`",
                 )
                 .emit();
                 self.bump();
@@ -737,10 +838,15 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
-    fn parse_item_trait(&mut self, attrs: &mut Vec<Attribute>, lo: Span) -> PResult<'a, ItemInfo> {
-        let unsafety = self.parse_unsafety();
+    fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
+        let unsafety = self.parse_unsafety(Case::Sensitive);
         // Parse optional `auto` prefix.
-        let is_auto = if self.eat_keyword(kw::Auto) { IsAuto::Yes } else { IsAuto::No };
+        let is_auto = if self.eat_keyword(kw::Auto) {
+            self.sess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
+            IsAuto::Yes
+        } else {
+            IsAuto::No
+        };
 
         self.expect_keyword(kw::Trait)?;
         let ident = self.parse_ident()?;
@@ -749,32 +855,26 @@ impl<'a> Parser<'a> {
         // Parse optional colon and supertrait bounds.
         let had_colon = self.eat(&token::Colon);
         let span_at_colon = self.prev_token.span;
-        let bounds = if had_colon {
-            self.parse_generic_bounds(Some(self.prev_token.span))?
-        } else {
-            Vec::new()
-        };
+        let bounds = if had_colon { self.parse_generic_bounds()? } else { Vec::new() };
 
         let span_before_eq = self.prev_token.span;
         if self.eat(&token::Eq) {
             // It's a trait alias.
             if had_colon {
                 let span = span_at_colon.to(span_before_eq);
-                self.struct_span_err(span, "bounds are not allowed on trait aliases").emit();
+                self.dcx().emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
             }
 
-            let bounds = self.parse_generic_bounds(None)?;
+            let bounds = self.parse_generic_bounds()?;
             generics.where_clause = self.parse_where_clause()?;
             self.expect_semi()?;
 
             let whole_span = lo.to(self.prev_token.span);
             if is_auto == IsAuto::Yes {
-                let msg = "trait aliases cannot be `auto`";
-                self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+                self.dcx().emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
             }
             if let Unsafe::Yes(_) = unsafety {
-                let msg = "trait aliases cannot be `unsafe`";
-                self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+                self.dcx().emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
             }
 
             self.sess.gated_spans.gate(sym::trait_alias, whole_span);
@@ -819,10 +919,14 @@ impl<'a> Parser<'a> {
                 let kind = match AssocItemKind::try_from(kind) {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
-                        ItemKind::Static(a, _, b) => {
-                            self.struct_span_err(span, "associated `static` items are not allowed")
-                                .emit();
-                            AssocItemKind::Const(Defaultness::Final, a, b)
+                        ItemKind::Static(box StaticItem { ty, mutability: _, expr }) => {
+                            self.dcx().emit_err(errors::AssociatedStaticItemNotAllowed { span });
+                            AssocItemKind::Const(Box::new(ConstItem {
+                                defaultness: Defaultness::Final,
+                                generics: Generics::default(),
+                                ty,
+                                expr,
+                            }))
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
                     },
@@ -834,7 +938,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a `type` alias with the following grammar:
     /// ```ebnf
-    /// TypeAlias = "type" Ident Generics {":" GenericBounds}? {"=" Ty}? ";" ;
+    /// TypeAlias = "type" Ident Generics (":" GenericBounds)? WhereClause ("=" Ty)? WhereClause ";" ;
     /// ```
     /// The `"type"` has already been eaten.
     fn parse_type_alias(&mut self, defaultness: Defaultness) -> PResult<'a, ItemInfo> {
@@ -843,7 +947,7 @@ impl<'a> Parser<'a> {
 
         // Parse optional colon and param bounds.
         let bounds =
-            if self.eat(&token::Colon) { self.parse_generic_bounds(None)? } else { Vec::new() };
+            if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() };
         let before_where_clause = self.parse_where_clause()?;
 
         let ty = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
@@ -856,7 +960,7 @@ impl<'a> Parser<'a> {
         );
         let where_predicates_split = before_where_clause.predicates.len();
         let mut predicates = before_where_clause.predicates;
-        predicates.extend(after_where_clause.predicates.into_iter());
+        predicates.extend(after_where_clause.predicates);
         let where_clause = WhereClause {
             has_where_token: before_where_clause.has_where_token
                 || after_where_clause.has_where_token,
@@ -892,7 +996,8 @@ impl<'a> Parser<'a> {
     fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
         let lo = self.token.span;
 
-        let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
+        let mut prefix =
+            ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
         let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
             || self.check(&token::BinOp(token::Star))
             || self.is_import_coupler()
@@ -913,7 +1018,17 @@ impl<'a> Parser<'a> {
             if self.eat(&token::ModSep) {
                 self.parse_use_tree_glob_or_nested()?
             } else {
-                UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
+                // Recover from using a colon as path separator.
+                while self.eat_noexpect(&token::Colon) {
+                    self.dcx()
+                        .emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
+
+                    // We parse the rest of the path and append it to the original prefix.
+                    self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
+                    prefix.span = lo.to(self.prev_token.span);
+                }
+
+                UseTreeKind::Simple(self.parse_rename()?)
             }
         };
 
@@ -934,9 +1049,12 @@ impl<'a> Parser<'a> {
     /// ```text
     /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
     /// ```
-    fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
-        self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
-            .map(|(r, _)| r)
+    fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
+        self.parse_delim_comma_seq(Delimiter::Brace, |p| {
+            p.recover_diff_marker();
+            Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
+        })
+        .map(|(r, _)| r)
     }
 
     fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
@@ -974,41 +1092,37 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
-        let error_msg = "crate name using dashes are not valid in `extern crate` statements";
-        let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
-                              in the code";
-        let mut ident = if self.token.is_keyword(kw::SelfLower) {
+        let ident = if self.token.is_keyword(kw::SelfLower) {
             self.parse_path_segment_ident()
         } else {
             self.parse_ident()
         }?;
-        let mut idents = vec![];
-        let mut replacement = vec![];
-        let mut fixed_crate_name = false;
-        // Accept `extern crate name-like-this` for better diagnostics.
+
         let dash = token::BinOp(token::BinOpToken::Minus);
-        if self.token == dash {
-            // Do not include `-` as part of the expected tokens list.
-            while self.eat(&dash) {
-                fixed_crate_name = true;
-                replacement.push((self.prev_token.span, "_".to_string()));
-                idents.push(self.parse_ident()?);
-            }
+        if self.token != dash {
+            return Ok(ident);
         }
-        if fixed_crate_name {
-            let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
-            let mut fixed_name = ident.name.to_string();
-            for part in idents {
-                fixed_name.push_str(&format!("_{}", part.name));
-            }
-            ident = Ident::from_str_and_span(&fixed_name, fixed_name_sp);
 
-            self.struct_span_err(fixed_name_sp, error_msg)
-                .span_label(fixed_name_sp, "dash-separated idents are not valid")
-                .multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable)
-                .emit();
+        // Accept `extern crate name-like-this` for better diagnostics.
+        let mut dashes = vec![];
+        let mut idents = vec![];
+        while self.eat(&dash) {
+            dashes.push(self.prev_token.span);
+            idents.push(self.parse_ident()?);
         }
-        Ok(ident)
+
+        let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+        let mut fixed_name = ident.name.to_string();
+        for part in idents {
+            write!(fixed_name, "_{}", part.name).unwrap();
+        }
+
+        self.dcx().emit_err(errors::ExternCrateNameWithDashes {
+            span: fixed_name_sp,
+            sugg: errors::ExternCrateNameWithDashesSugg { dashes },
+        });
+
+        Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp))
     }
 
     /// Parses `extern` for foreign ABIs modules.
@@ -1023,7 +1137,7 @@ impl<'a> Parser<'a> {
     /// ```
     fn parse_item_foreign_mod(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         mut unsafety: Unsafe,
     ) -> PResult<'a, ItemInfo> {
         let abi = self.parse_abi(); // ABI?
@@ -1031,8 +1145,7 @@ impl<'a> Parser<'a> {
             && self.token.is_keyword(kw::Unsafe)
             && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace))
         {
-            let mut err = self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err();
-            err.emit();
+            self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
             unsafety = Unsafe::Yes(self.token.span);
             self.eat_keyword(kw::Unsafe);
         }
@@ -1055,9 +1168,14 @@ impl<'a> Parser<'a> {
                 let kind = match ForeignItemKind::try_from(kind) {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
-                        ItemKind::Const(_, a, b) => {
-                            self.error_on_foreign_const(span, ident);
-                            ForeignItemKind::Static(a, Mutability::Not, b)
+                        ItemKind::Const(box ConstItem { ty, expr, .. }) => {
+                            let const_span = Some(span.with_hi(ident.span.lo()))
+                                .filter(|span| span.can_be_used_for_suggestions());
+                            self.dcx().emit_err(errors::ExternItemCannotBeConst {
+                                ident_span: ident.span,
+                                const_span,
+                            });
+                            ForeignItemKind::Static(ty, Mutability::Not, expr)
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
                     },
@@ -1067,27 +1185,14 @@ impl<'a> Parser<'a> {
         ))
     }
 
-    fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &str) -> Option<T> {
+    fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &'static str) -> Option<T> {
+        // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
         let span = self.sess.source_map().guess_head_span(span);
         let descr = kind.descr();
-        self.struct_span_err(span, &format!("{descr} is not supported in {ctx}"))
-            .help(&format!("consider moving the {descr} out to a nearby module scope"))
-            .emit();
+        self.dcx().emit_err(errors::BadItemKind { span, descr, ctx });
         None
     }
 
-    fn error_on_foreign_const(&self, span: Span, ident: Ident) {
-        self.struct_span_err(ident.span, "extern items cannot be `const`")
-            .span_suggestion(
-                span.with_hi(ident.span.lo()),
-                "try using a static value",
-                "static ",
-                Applicability::MachineApplicable,
-            )
-            .note("for more information, visit https://doc.rust-lang.org/std/keyword.extern.html")
-            .emit();
-    }
-
     fn is_unsafe_foreign_mod(&self) -> bool {
         self.token.is_keyword(kw::Unsafe)
             && self.is_keyword_ahead(1, &[kw::Extern])
@@ -1115,15 +1220,11 @@ impl<'a> Parser<'a> {
     fn recover_const_mut(&mut self, const_span: Span) {
         if self.eat_keyword(kw::Mut) {
             let span = self.prev_token.span;
-            self.struct_span_err(span, "const globals cannot be mutable")
-                .span_label(span, "cannot be mutable")
-                .span_suggestion(
-                    const_span,
-                    "you might want to declare a static instead",
-                    "static",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.dcx()
+                .emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
+        } else if self.eat_keyword(kw::Let) {
+            let span = self.prev_token.span;
+            self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
         }
     }
 
@@ -1131,11 +1232,11 @@ impl<'a> Parser<'a> {
     fn recover_const_impl(
         &mut self,
         const_span: Span,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         defaultness: Defaultness,
     ) -> PResult<'a, ItemInfo> {
         let impl_span = self.token.span;
-        let mut err = self.expected_ident_found();
+        let err = self.expected_ident_found_err();
 
         // Only try to recover if this is implementing a trait for a type
         let mut impl_info = match self.parse_item_impl(attrs, defaultness) {
@@ -1147,13 +1248,13 @@ impl<'a> Parser<'a> {
             }
         };
 
-        match impl_info.1 {
-            ItemKind::Impl(box Impl { of_trait: Some(ref trai), ref mut constness, .. }) => {
+        match &mut impl_info.1 {
+            ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
                 *constness = Const::Yes(const_span);
 
                 let before_trait = trai.path.span.shrink_to_lo();
                 let const_up_to_impl = const_span.with_hi(impl_span.lo());
-                err.multipart_suggestion(
+                err.with_multipart_suggestion(
                     "you might have meant to write a const trait impl",
                     vec![(const_up_to_impl, "".to_owned()), (before_trait, "const ".to_owned())],
                     Applicability::MaybeIncorrect,
@@ -1167,32 +1268,134 @@ impl<'a> Parser<'a> {
         Ok(impl_info)
     }
 
-    /// Parse `["const" | ("static" "mut"?)] $ident ":" $ty (= $expr)?` with
-    /// `["const" | ("static" "mut"?)]` already parsed and stored in `m`.
+    /// Parse a static item with the prefix `"static" "mut"?` already parsed and stored in `mutability`.
     ///
-    /// When `m` is `"const"`, `$ident` may also be `"_"`.
-    fn parse_item_global(
-        &mut self,
-        m: Option<Mutability>,
-    ) -> PResult<'a, (Ident, P<Ty>, Option<P<ast::Expr>>)> {
-        let id = if m.is_none() { self.parse_ident_or_underscore() } else { self.parse_ident() }?;
+    /// ```ebnf
+    /// Static = "static" "mut"? $ident ":" $ty (= $expr)? ";" ;
+    /// ```
+    fn parse_static_item(&mut self, mutability: Mutability) -> PResult<'a, (Ident, StaticItem)> {
+        let ident = self.parse_ident()?;
 
-        // Parse the type of a `const` or `static mut?` item.
-        // That is, the `":" $ty` fragment.
-        let ty = if self.eat(&token::Colon) {
-            self.parse_ty()?
-        } else {
-            self.recover_missing_const_type(id, m)
+        if self.token.kind == TokenKind::Lt && self.may_recover() {
+            let generics = self.parse_generics()?;
+            self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span });
+        }
+
+        // Parse the type of a static item. That is, the `":" $ty` fragment.
+        // FIXME: This could maybe benefit from `.may_recover()`?
+        let ty = match (self.eat(&token::Colon), self.check(&token::Eq) | self.check(&token::Semi))
+        {
+            (true, false) => self.parse_ty()?,
+            // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type.
+            (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
         };
 
         let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
+
         self.expect_semi()?;
-        Ok((id, ty, expr))
+
+        Ok((ident, StaticItem { ty, mutability, expr }))
     }
 
-    /// We were supposed to parse `:` but the `:` was missing.
+    /// Parse a constant item with the prefix `"const"` already parsed.
+    ///
+    /// ```ebnf
+    /// Const = "const" ($ident | "_") Generics ":" $ty (= $expr)? WhereClause ";" ;
+    /// ```
+    fn parse_const_item(&mut self) -> PResult<'a, (Ident, Generics, P<Ty>, Option<P<ast::Expr>>)> {
+        let ident = self.parse_ident_or_underscore()?;
+
+        let mut generics = self.parse_generics()?;
+
+        // Check the span for emptiness instead of the list of parameters in order to correctly
+        // recognize and subsequently flag empty parameter lists (`<>`) as unstable.
+        if !generics.span.is_empty() {
+            self.sess.gated_spans.gate(sym::generic_const_items, generics.span);
+        }
+
+        // Parse the type of a constant item. That is, the `":" $ty` fragment.
+        // FIXME: This could maybe benefit from `.may_recover()`?
+        let ty = match (
+            self.eat(&token::Colon),
+            self.check(&token::Eq) | self.check(&token::Semi) | self.check_keyword(kw::Where),
+        ) {
+            (true, false) => self.parse_ty()?,
+            // If there wasn't a `:` or the colon was followed by a `=`, `;` or `where`, recover a missing type.
+            (colon, _) => self.recover_missing_global_item_type(colon, None),
+        };
+
+        // Proactively parse a where-clause to be able to provide a good error message in case we
+        // encounter the item body following it.
+        let before_where_clause =
+            if self.may_recover() { self.parse_where_clause()? } else { WhereClause::default() };
+
+        let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
+
+        let after_where_clause = self.parse_where_clause()?;
+
+        // Provide a nice error message if the user placed a where-clause before the item body.
+        // Users may be tempted to write such code if they are still used to the deprecated
+        // where-clause location on type aliases and associated types. See also #89122.
+        if before_where_clause.has_where_token
+            && let Some(expr) = &expr
+        {
+            self.dcx().emit_err(errors::WhereClauseBeforeConstBody {
+                span: before_where_clause.span,
+                name: ident.span,
+                body: expr.span,
+                sugg: if !after_where_clause.has_where_token {
+                    self.sess.source_map().span_to_snippet(expr.span).ok().map(|body| {
+                        errors::WhereClauseBeforeConstBodySugg {
+                            left: before_where_clause.span.shrink_to_lo(),
+                            snippet: body,
+                            right: before_where_clause.span.shrink_to_hi().to(expr.span),
+                        }
+                    })
+                } else {
+                    // FIXME(generic_const_items): Provide a structured suggestion to merge the first
+                    // where-clause into the second one.
+                    None
+                },
+            });
+        }
+
+        // Merge the predicates of both where-clauses since either one can be relevant.
+        // If we didn't parse a body (which is valid for associated consts in traits) and we were
+        // allowed to recover, `before_where_clause` contains the predicates, otherwise they are
+        // in `after_where_clause`. Further, both of them might contain predicates iff two
+        // where-clauses were provided which is syntactically ill-formed but we want to recover from
+        // it and treat them as one large where-clause.
+        let mut predicates = before_where_clause.predicates;
+        predicates.extend(after_where_clause.predicates);
+        let where_clause = WhereClause {
+            has_where_token: before_where_clause.has_where_token
+                || after_where_clause.has_where_token,
+            predicates,
+            span: if after_where_clause.has_where_token {
+                after_where_clause.span
+            } else {
+                before_where_clause.span
+            },
+        };
+
+        if where_clause.has_where_token {
+            self.sess.gated_spans.gate(sym::generic_const_items, where_clause.span);
+        }
+
+        generics.where_clause = where_clause;
+
+        self.expect_semi()?;
+
+        Ok((ident, generics, ty, expr))
+    }
+
+    /// We were supposed to parse `":" $ty` but the `:` or the type was missing.
     /// This means that the type is missing.
-    fn recover_missing_const_type(&mut self, id: Ident, m: Option<Mutability>) -> P<Ty> {
+    fn recover_missing_global_item_type(
+        &mut self,
+        colon_present: bool,
+        m: Option<Mutability>,
+    ) -> P<Ty> {
         // Construct the error and stash it away with the hope
         // that typeck will later enrich the error with a type.
         let kind = match m {
@@ -1200,39 +1403,82 @@ impl<'a> Parser<'a> {
             Some(Mutability::Not) => "static",
             None => "const",
         };
-        let mut err = self.struct_span_err(id.span, &format!("missing type for `{kind}` item"));
-        err.span_suggestion(
-            id.span,
-            "provide a type for the item",
-            format!("{id}: <type>"),
-            Applicability::HasPlaceholders,
-        );
-        err.stash(id.span, StashKey::ItemNoType);
+
+        let colon = match colon_present {
+            true => "",
+            false => ":",
+        };
+
+        let span = self.prev_token.span.shrink_to_hi();
+        let err = self.dcx().create_err(errors::MissingConstType { span, colon, kind });
+        err.stash(span, StashKey::ItemNoType);
 
         // The user intended that the type be inferred,
         // so treat this as if the user wrote e.g. `const A: _ = expr;`.
-        P(Ty { kind: TyKind::Infer, span: id.span, id: ast::DUMMY_NODE_ID, tokens: None })
+        P(Ty { kind: TyKind::Infer, span, id: ast::DUMMY_NODE_ID, tokens: None })
     }
 
     /// Parses an enum declaration.
     fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
+        if self.token.is_keyword(kw::Struct) {
+            let span = self.prev_token.span.to(self.token.span);
+            let err = errors::EnumStructMutuallyExclusive { span };
+            if self.look_ahead(1, |t| t.is_ident()) {
+                self.bump();
+                self.dcx().emit_err(err);
+            } else {
+                return Err(self.dcx().create_err(err));
+            }
+        }
+
+        let prev_span = self.prev_token.span;
         let id = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
         generics.where_clause = self.parse_where_clause()?;
 
-        let (variants, _) = self
-            .parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant())
-            .map_err(|e| {
-                self.recover_stmt();
-                e
-            })?;
+        // Possibly recover `enum Foo;` instead of `enum Foo {}`
+        let (variants, _) = if self.token == TokenKind::Semi {
+            self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
+            self.bump();
+            (thin_vec![], false)
+        } else {
+            self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
+                .map_err(|mut err| {
+                    err.span_label(id.span, "while parsing this enum");
+                    if self.token == token::Colon {
+                        let snapshot = self.create_snapshot_for_diagnostic();
+                        self.bump();
+                        match self.parse_ty() {
+                            Ok(_) => {
+                                err.span_suggestion_verbose(
+                                    prev_span,
+                                    "perhaps you meant to use `struct` here",
+                                    "struct",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                            Err(e) => {
+                                e.cancel();
+                            }
+                        }
+                        self.restore_snapshot(snapshot);
+                    }
+                    self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+                    self.bump(); // }
+                    err
+                })?
+        };
 
         let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
         Ok((id, ItemKind::Enum(enum_definition, generics)))
     }
 
-    fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
+    fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
+        self.recover_diff_marker();
         let variant_attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
+        let help = "enum variants can be `Variant`, `Variant = <integer>`, \
+                    `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
         self.collect_tokens_trailing_token(
             variant_attrs,
             ForceCollect::No,
@@ -1245,24 +1491,65 @@ impl<'a> Parser<'a> {
                 }
                 let ident = this.parse_field_ident("enum", vlo)?;
 
+                if this.token == token::Not {
+                    if let Err(err) = this.unexpected::<()>() {
+                        err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
+                    }
+
+                    this.bump();
+                    this.parse_delim_args()?;
+
+                    return Ok((None, TrailingToken::MaybeComma));
+                }
+
                 let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
                     // Parse a struct variant.
-                    let (fields, recovered) = this.parse_record_struct_body("struct", false)?;
-                    VariantData::Struct(fields, recovered)
+                    let (fields, recovered) =
+                        match this.parse_record_struct_body("struct", ident.span, false) {
+                            Ok((fields, recovered)) => (fields, recovered),
+                            Err(mut err) => {
+                                if this.token == token::Colon {
+                                    // We handle `enum` to `struct` suggestion in the caller.
+                                    return Err(err);
+                                }
+                                this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+                                this.bump(); // }
+                                err.span_label(span, "while parsing this enum");
+                                err.help(help);
+                                err.emit();
+                                (thin_vec![], true)
+                            }
+                        };
+                    VariantData::Struct { fields, recovered }
                 } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
-                    VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
+                    let body = match this.parse_tuple_struct_body() {
+                        Ok(body) => body,
+                        Err(mut err) => {
+                            if this.token == token::Colon {
+                                // We handle `enum` to `struct` suggestion in the caller.
+                                return Err(err);
+                            }
+                            this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
+                            this.bump(); // )
+                            err.span_label(span, "while parsing this enum");
+                            err.help(help);
+                            err.emit();
+                            thin_vec![]
+                        }
+                    };
+                    VariantData::Tuple(body, DUMMY_NODE_ID)
                 } else {
                     VariantData::Unit(DUMMY_NODE_ID)
                 };
 
                 let disr_expr =
-                    if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
+                    if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
 
                 let vr = ast::Variant {
                     ident,
                     vis,
                     id: DUMMY_NODE_ID,
-                    attrs: variant_attrs.into(),
+                    attrs: variant_attrs,
                     data: struct_def,
                     disr_expr,
                     span: vlo.to(this.prev_token.span),
@@ -1272,6 +1559,10 @@ impl<'a> Parser<'a> {
                 Ok((Some(vr), TrailingToken::MaybeComma))
             },
         )
+        .map_err(|mut err| {
+            err.help(help);
+            err
+        })
     }
 
     /// Parses `struct Foo { ... }`.
@@ -1295,24 +1586,38 @@ impl<'a> Parser<'a> {
         // struct.
 
         let vdata = if self.token.is_keyword(kw::Where) {
-            generics.where_clause = self.parse_where_clause()?;
-            if self.eat(&token::Semi) {
+            let tuple_struct_body;
+            (generics.where_clause, tuple_struct_body) =
+                self.parse_struct_where_clause(class_name, generics.span)?;
+
+            if let Some(body) = tuple_struct_body {
+                // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);`
+                let body = VariantData::Tuple(body, DUMMY_NODE_ID);
+                self.expect_semi()?;
+                body
+            } else if self.eat(&token::Semi) {
                 // If we see a: `struct Foo<T> where T: Copy;` style decl.
                 VariantData::Unit(DUMMY_NODE_ID)
             } else {
                 // If we see: `struct Foo<T> where T: Copy { ... }`
-                let (fields, recovered) =
-                    self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
-                VariantData::Struct(fields, recovered)
+                let (fields, recovered) = self.parse_record_struct_body(
+                    "struct",
+                    class_name.span,
+                    generics.where_clause.has_where_token,
+                )?;
+                VariantData::Struct { fields, recovered }
             }
         // No `where` so: `struct Foo<T>;`
         } else if self.eat(&token::Semi) {
             VariantData::Unit(DUMMY_NODE_ID)
         // Record-style struct definition
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
-            let (fields, recovered) =
-                self.parse_record_struct_body("struct", generics.where_clause.has_where_token)?;
-            VariantData::Struct(fields, recovered)
+            let (fields, recovered) = self.parse_record_struct_body(
+                "struct",
+                class_name.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
         // Tuple-style struct definition with optional where-clause.
         } else if self.token == token::OpenDelim(Delimiter::Parenthesis) {
             let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID);
@@ -1320,13 +1625,9 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             body
         } else {
-            let token_str = super::token_descr(&self.token);
-            let msg = &format!(
-                "expected `where`, `{{`, `(`, or `;` after struct name, found {token_str}"
-            );
-            let mut err = self.struct_span_err(self.token.span, msg);
-            err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name");
-            return Err(err);
+            let err =
+                errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
+            return Err(self.dcx().create_err(err));
         };
 
         Ok((class_name, ItemKind::Struct(vdata, generics)))
@@ -1340,17 +1641,23 @@ impl<'a> Parser<'a> {
 
         let vdata = if self.token.is_keyword(kw::Where) {
             generics.where_clause = self.parse_where_clause()?;
-            let (fields, recovered) =
-                self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
-            VariantData::Struct(fields, recovered)
+            let (fields, recovered) = self.parse_record_struct_body(
+                "union",
+                class_name.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
-            let (fields, recovered) =
-                self.parse_record_struct_body("union", generics.where_clause.has_where_token)?;
-            VariantData::Struct(fields, recovered)
+            let (fields, recovered) = self.parse_record_struct_body(
+                "union",
+                class_name.span,
+                generics.where_clause.has_where_token,
+            )?;
+            VariantData::Struct { fields, recovered }
         } else {
             let token_str = super::token_descr(&self.token);
-            let msg = &format!("expected `where` or `{{` after union name, found {token_str}");
-            let mut err = self.struct_span_err(self.token.span, msg);
+            let msg = format!("expected `where` or `{{` after union name, found {token_str}");
+            let mut err = self.dcx().struct_span_err(self.token.span, msg);
             err.span_label(self.token.span, "expected `where` or `{` after union name");
             return Err(err);
         };
@@ -1358,12 +1665,13 @@ impl<'a> Parser<'a> {
         Ok((class_name, ItemKind::Union(vdata, generics)))
     }
 
-    fn parse_record_struct_body(
+    pub(crate) fn parse_record_struct_body(
         &mut self,
         adt_ty: &str,
+        ident_span: Span,
         parsed_where: bool,
-    ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
-        let mut fields = Vec::new();
+    ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
+        let mut fields = ThinVec::new();
         let mut recovered = false;
         if self.eat(&token::OpenDelim(Delimiter::Brace)) {
             while self.token != token::CloseDelim(Delimiter::Brace) {
@@ -1375,6 +1683,7 @@ impl<'a> Parser<'a> {
                 match field {
                     Ok(field) => fields.push(field),
                     Err(mut err) => {
+                        err.span_label(ident_span, format!("while parsing this {adt_ty}"));
                         err.emit();
                         break;
                     }
@@ -1383,12 +1692,12 @@ impl<'a> Parser<'a> {
             self.eat(&token::CloseDelim(Delimiter::Brace));
         } else {
             let token_str = super::token_descr(&self.token);
-            let msg = &format!(
+            let msg = format!(
                 "expected {}`{{` after struct name, found {}",
                 if parsed_where { "" } else { "`where`, or " },
                 token_str
             );
-            let mut err = self.struct_span_err(self.token.span, msg);
+            let mut err = self.dcx().struct_span_err(self.token.span, msg);
             err.span_label(
                 self.token.span,
                 format!(
@@ -1402,15 +1711,38 @@ impl<'a> Parser<'a> {
         Ok((fields, recovered))
     }
 
-    fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<FieldDef>> {
+    pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef>> {
         // This is the case where we find `struct Foo<T>(T) where T: Copy;`
         // Unit like structs are handled in parse_item_struct function
         self.parse_paren_comma_seq(|p| {
             let attrs = p.parse_outer_attributes()?;
             p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
+                let mut snapshot = None;
+                if p.is_diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+                    // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                    // that can be a valid type start, so we snapshot and reparse only we've
+                    // encountered another parse error.
+                    snapshot = Some(p.create_snapshot_for_diagnostic());
+                }
                 let lo = p.token.span;
-                let vis = p.parse_visibility(FollowedByType::Yes)?;
-                let ty = p.parse_ty()?;
+                let vis = match p.parse_visibility(FollowedByType::Yes) {
+                    Ok(vis) => vis,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_diff_marker();
+                        }
+                        return Err(err);
+                    }
+                };
+                let ty = match p.parse_ty() {
+                    Ok(ty) => ty,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_diff_marker();
+                        }
+                        return Err(err);
+                    }
+                };
 
                 Ok((
                     FieldDef {
@@ -1419,7 +1751,7 @@ impl<'a> Parser<'a> {
                         ident: None,
                         id: DUMMY_NODE_ID,
                         ty,
-                        attrs: attrs.into(),
+                        attrs,
                         is_placeholder: false,
                     },
                     TrailingToken::MaybeComma,
@@ -1431,11 +1763,14 @@ impl<'a> Parser<'a> {
 
     /// Parses an element of a struct declaration.
     fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> {
+        self.recover_diff_marker();
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
             let vis = this.parse_visibility(FollowedByType::No)?;
-            Ok((this.parse_single_struct_field(adt_ty, lo, vis, attrs)?, TrailingToken::None))
+            this.parse_single_struct_field(adt_ty, lo, vis, attrs)
+                .map(|field| (field, TrailingToken::None))
         })
     }
 
@@ -1445,13 +1780,25 @@ impl<'a> Parser<'a> {
         adt_ty: &str,
         lo: Span,
         vis: Visibility,
-        attrs: Vec<Attribute>,
+        attrs: AttrVec,
     ) -> PResult<'a, FieldDef> {
         let mut seen_comma: bool = false;
         let a_var = self.parse_name_and_ty(adt_ty, lo, vis, attrs)?;
         if self.token == token::Comma {
             seen_comma = true;
         }
+        if self.eat(&token::Semi) {
+            let sp = self.prev_token.span;
+            let mut err =
+                self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`"));
+            err.span_suggestion_short(
+                sp,
+                "replace `;` with `,`",
+                ",",
+                Applicability::MachineApplicable,
+            );
+            return Err(err);
+        }
         match self.token.kind {
             token::Comma => {
                 self.bump();
@@ -1459,7 +1806,10 @@ impl<'a> Parser<'a> {
             token::CloseDelim(Delimiter::Brace) => {}
             token::DocComment(..) => {
                 let previous_span = self.prev_token.span;
-                let mut err = self.span_err(self.token.span, Error::UselessDocComment);
+                let mut err = errors::DocCommentDoesNotDocumentAnything {
+                    span: self.token.span,
+                    missing_comma: None,
+                };
                 self.bump(); // consume the doc comment
                 let comma_after_doc_seen = self.eat(&token::Comma);
                 // `seen_comma` is always false, because we are inside doc block
@@ -1468,49 +1818,47 @@ impl<'a> Parser<'a> {
                     seen_comma = true;
                 }
                 if comma_after_doc_seen || self.token == token::CloseDelim(Delimiter::Brace) {
-                    err.emit();
+                    self.dcx().emit_err(err);
                 } else {
                     if !seen_comma {
-                        let sp = self.sess.source_map().next_point(previous_span);
-                        err.span_suggestion(
-                            sp,
-                            "missing comma here",
-                            ",",
-                            Applicability::MachineApplicable,
-                        );
+                        let sp = previous_span.shrink_to_hi();
+                        err.missing_comma = Some(sp);
                     }
-                    return Err(err);
+                    return Err(self.dcx().create_err(err));
                 }
             }
             _ => {
                 let sp = self.prev_token.span.shrink_to_hi();
-                let mut err = self.struct_span_err(
-                    sp,
-                    &format!("expected `,`, or `}}`, found {}", super::token_descr(&self.token)),
-                );
+                let msg =
+                    format!("expected `,`, or `}}`, found {}", super::token_descr(&self.token));
 
                 // Try to recover extra trailing angle brackets
-                let mut recovered = false;
                 if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind {
                     if let Some(last_segment) = segments.last() {
-                        recovered = self.check_trailing_angle_brackets(
+                        let guar = self.check_trailing_angle_brackets(
                             last_segment,
                             &[&token::Comma, &token::CloseDelim(Delimiter::Brace)],
                         );
-                        if recovered {
+                        if let Some(_guar) = guar {
                             // Handle a case like `Vec<u8>>,` where we can continue parsing fields
                             // after the comma
                             self.eat(&token::Comma);
-                            // `check_trailing_angle_brackets` already emitted a nicer error
-                            // NOTE(eddyb) this was `.cancel()`, but `err`
-                            // gets returned, so we can't fully defuse it.
-                            err.delay_as_bug();
+
+                            // `check_trailing_angle_brackets` already emitted a nicer error, as
+                            // proven by the presence of `_guar`. We can continue parsing.
+                            return Ok(a_var);
                         }
                     }
                 }
 
-                if self.token.is_ident() {
-                    // This is likely another field; emit the diagnostic and keep going
+                let mut err = self.dcx().struct_span_err(sp, msg);
+
+                if self.token.is_ident()
+                    || (self.token.kind == TokenKind::Pound
+                        && (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket))))
+                {
+                    // This is likely another field, TokenKind::Pound is used for `#[..]`
+                    // attribute for next field. Emit the diagnostic and continue parsing.
                     err.span_suggestion(
                         sp,
                         "try adding a comma",
@@ -1518,14 +1866,6 @@ impl<'a> Parser<'a> {
                         Applicability::MachineApplicable,
                     );
                     err.emit();
-                    recovered = true;
-                }
-
-                if recovered {
-                    // Make sure an error was emitted (either by recovering an angle bracket,
-                    // or by finding an identifier as the next token), since we're
-                    // going to continue parsing
-                    assert!(self.sess.span_diagnostic.has_errors().is_some());
                 } else {
                     return Err(err);
                 }
@@ -1535,7 +1875,7 @@ impl<'a> Parser<'a> {
     }
 
     fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
-        if let Err(mut err) = self.expect(&token::Colon) {
+        if let Err(err) = self.expect(&token::Colon) {
             let sm = self.sess.source_map();
             let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start());
             let semi_typo = self.token.kind == token::Semi
@@ -1551,13 +1891,13 @@ impl<'a> Parser<'a> {
             if eq_typo || semi_typo {
                 self.bump();
                 // Gracefully handle small typos.
-                err.span_suggestion_short(
+                err.with_span_suggestion_short(
                     self.prev_token.span,
                     "field names and their types are separated with `:`",
                     ":",
                     Applicability::MachineApplicable,
-                );
-                err.emit();
+                )
+                .emit();
             } else {
                 return Err(err);
             }
@@ -1571,33 +1911,26 @@ impl<'a> Parser<'a> {
         adt_ty: &str,
         lo: Span,
         vis: Visibility,
-        attrs: Vec<Attribute>,
+        attrs: AttrVec,
     ) -> PResult<'a, FieldDef> {
         let name = self.parse_field_ident(adt_ty, lo)?;
+        if self.token.kind == token::Not {
+            if let Err(mut err) = self.unexpected::<FieldDef>() {
+                // Encounter the macro invocation
+                err.subdiagnostic(MacroExpandsToAdtField { adt_ty });
+                return Err(err);
+            }
+        }
         self.expect_field_ty_separator()?;
-        let ty = self.parse_ty()?;
+        let ty = self.parse_ty_for_field_def()?;
         if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
-            self.struct_span_err(self.token.span, "found single colon in a struct field type path")
-                .span_suggestion_verbose(
-                    self.token.span,
-                    "write a path separator here",
-                    "::",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span });
         }
         if self.token.kind == token::Eq {
             self.bump();
-            let const_expr = self.parse_anon_const_expr()?;
+            let const_expr = self.parse_expr_anon_const()?;
             let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
-            self.struct_span_err(sp, "default values on `struct` fields aren't supported")
-                .span_suggestion(
-                    sp,
-                    "remove this unsupported default value",
-                    "",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.dcx().emit_err(errors::EqualsStructDefault { span: sp });
         }
         Ok(FieldDef {
             span: lo.to(self.prev_token.span),
@@ -1605,7 +1938,7 @@ impl<'a> Parser<'a> {
             vis,
             id: DUMMY_NODE_ID,
             ty,
-            attrs: attrs.into(),
+            attrs,
             is_placeholder: false,
         })
     }
@@ -1613,9 +1946,12 @@ impl<'a> Parser<'a> {
     /// Parses a field identifier. Specialized version of `parse_ident_common`
     /// for better diagnostics and suggestions.
     fn parse_field_ident(&mut self, adt_ty: &str, lo: Span) -> PResult<'a, Ident> {
-        let (ident, is_raw) = self.ident_or_err()?;
-        if !is_raw && ident.is_reserved() {
-            let err = if self.check_fn_front_matter(false) {
+        let (ident, is_raw) = self.ident_or_err(true)?;
+        if ident.name == kw::Underscore {
+            self.sess.gated_spans.gate(sym::unnamed_fields, lo);
+        } else if !is_raw && ident.is_reserved() {
+            let snapshot = self.create_snapshot_for_diagnostic();
+            let err = if self.check_fn_front_matter(false, Case::Sensitive) {
                 let inherited_vis = Visibility {
                     span: rustc_span::DUMMY_SP,
                     kind: VisibilityKind::Inherited,
@@ -1623,20 +1959,70 @@ impl<'a> Parser<'a> {
                 };
                 // We use `parse_fn` to get a span for the function
                 let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
-                if let Err(mut db) =
-                    self.parse_fn(&mut Vec::new(), fn_parse_mode, lo, &inherited_vis)
+                match self.parse_fn(
+                    &mut AttrVec::new(),
+                    fn_parse_mode,
+                    lo,
+                    &inherited_vis,
+                    Case::Insensitive,
+                ) {
+                    Ok(_) => {
+                        self.dcx().struct_span_err(
+                            lo.to(self.prev_token.span),
+                            format!("functions are not allowed in {adt_ty} definitions"),
+                        )
+                        .with_help(
+                            "unlike in C++, Java, and C#, functions are declared in `impl` blocks",
+                        )
+                        .with_help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information")
+                    }
+                    Err(err) => {
+                        err.cancel();
+                        self.restore_snapshot(snapshot);
+                        self.expected_ident_found_err()
+                    }
+                }
+            } else if self.eat_keyword(kw::Struct) {
+                match self.parse_item_struct() {
+                    Ok((ident, _)) => self
+                        .dcx()
+                        .struct_span_err(
+                            lo.with_hi(ident.span.hi()),
+                            format!("structs are not allowed in {adt_ty} definitions"),
+                        )
+                        .with_help(
+                            "consider creating a new `struct` definition instead of nesting",
+                        ),
+                    Err(err) => {
+                        err.cancel();
+                        self.restore_snapshot(snapshot);
+                        self.expected_ident_found_err()
+                    }
+                }
+            } else {
+                let mut err = self.expected_ident_found_err();
+                if self.eat_keyword_noexpect(kw::Let)
+                    && let removal_span = self.prev_token.span.until(self.token.span)
+                    && let Ok(ident) = self
+                        .parse_ident_common(false)
+                        // Cancel this error, we don't need it.
+                        .map_err(|err| err.cancel())
+                    && self.token.kind == TokenKind::Colon
                 {
-                    db.delay_as_bug();
+                    err.span_suggestion(
+                        removal_span,
+                        "remove this `let` keyword",
+                        String::new(),
+                        Applicability::MachineApplicable,
+                    );
+                    err.note("the `let` keyword is not allowed in `struct` fields");
+                    err.note("see <https://doc.rust-lang.org/book/ch05-01-defining-structs.html> for more information");
+                    err.emit();
+                    return Ok(ident);
+                } else {
+                    self.restore_snapshot(snapshot);
                 }
-                let mut err = self.struct_span_err(
-                    lo.to(self.prev_token.span),
-                    &format!("functions are not allowed in {adt_ty} definitions"),
-                );
-                err.help("unlike in C++, Java, and C#, functions are declared in `impl` blocks");
-                err.help("see https://doc.rust-lang.org/book/ch05-03-method-syntax.html for more information");
                 err
-            } else {
-                self.expected_ident_found()
             };
             return Err(err);
         }
@@ -1654,7 +2040,7 @@ impl<'a> Parser<'a> {
     fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
         let ident = self.parse_ident()?;
         let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            self.parse_mac_args()? // `MacBody`
+            self.parse_delim_args()? // `MacBody`
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             let params = self.parse_token_tree(); // `MacParams`
             let pspan = params.span();
@@ -1664,10 +2050,10 @@ impl<'a> Parser<'a> {
             let body = self.parse_token_tree(); // `MacBody`
             // Convert `MacParams MacBody` into `{ MacParams => MacBody }`.
             let bspan = body.span();
-            let arrow = TokenTree::token(token::FatArrow, pspan.between(bspan)); // `=>`
-            let tokens = TokenStream::new(vec![params.into(), arrow.into(), body.into()]);
+            let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
+            let tokens = TokenStream::new(vec![params, arrow, body]);
             let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
-            P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
+            P(DelimArgs { dspan, delim: Delimiter::Brace, tokens })
         } else {
             return self.unexpected();
         };
@@ -1677,7 +2063,6 @@ impl<'a> Parser<'a> {
     }
 
     /// Is this a possibly malformed start of a `macro_rules! foo` item definition?
-
     fn is_macro_rules_item(&mut self) -> IsMacroRulesItem {
         if self.check_keyword(kw::MacroRules) {
             let macro_rules_span = self.token.span;
@@ -1686,14 +2071,10 @@ impl<'a> Parser<'a> {
                 return IsMacroRulesItem::Yes { has_bang: true };
             } else if self.look_ahead(1, |t| (t.is_ident())) {
                 // macro_rules foo
-                self.struct_span_err(macro_rules_span, "expected `!` after `macro_rules`")
-                    .span_suggestion(
-                        macro_rules_span,
-                        "add a `!`",
-                        "macro_rules!",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::MacroRulesMissingBang {
+                    span: macro_rules_span,
+                    hi: macro_rules_span.shrink_to_hi(),
+                });
 
                 return IsMacroRulesItem::Yes { has_bang: false };
             }
@@ -1718,12 +2099,10 @@ impl<'a> Parser<'a> {
         if self.eat(&token::Not) {
             // Handle macro_rules! foo!
             let span = self.prev_token.span;
-            self.struct_span_err(span, "macro names aren't followed by a `!`")
-                .span_suggestion(span, "remove the `!`", "", Applicability::MachineApplicable)
-                .emit();
+            self.dcx().emit_err(errors::MacroNameRemoveBang { span });
         }
 
-        let body = self.parse_mac_args()?;
+        let body = self.parse_delim_args()?;
         self.eat_semi_for_macro_if_needed(&body);
         self.complain_if_pub_macro(vis, true);
 
@@ -1740,63 +2119,35 @@ impl<'a> Parser<'a> {
         let vstr = pprust::vis_to_string(vis);
         let vstr = vstr.trim_end();
         if macro_rules {
-            let msg = format!("can't qualify macro_rules invocation with `{vstr}`");
-            self.struct_span_err(vis.span, &msg)
-                .span_suggestion(
-                    vis.span,
-                    "try exporting the macro",
-                    "#[macro_export]",
-                    Applicability::MaybeIncorrect, // speculative
-                )
-                .emit();
+            self.dcx().emit_err(errors::MacroRulesVisibility { span: vis.span, vis: vstr });
         } else {
-            self.struct_span_err(vis.span, "can't qualify macro invocation with `pub`")
-                .span_suggestion(
-                    vis.span,
-                    "remove the visibility",
-                    "",
-                    Applicability::MachineApplicable,
-                )
-                .help(&format!("try adjusting the macro to put `{vstr}` inside the invocation"))
-                .emit();
+            self.dcx().emit_err(errors::MacroInvocationVisibility { span: vis.span, vis: vstr });
         }
     }
 
-    fn eat_semi_for_macro_if_needed(&mut self, args: &MacArgs) {
+    fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
         if args.need_semicolon() && !self.eat(&token::Semi) {
             self.report_invalid_macro_expansion_item(args);
         }
     }
 
-    fn report_invalid_macro_expansion_item(&self, args: &MacArgs) {
-        let span = args.span().expect("undelimited macro call");
-        let mut err = self.struct_span_err(
+    fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
+        let span = args.dspan.entire();
+        let mut err = self.dcx().struct_span_err(
             span,
             "macros that expand to items must be delimited with braces or followed by a semicolon",
         );
         // FIXME: This will make us not emit the help even for declarative
         // macros within the same crate (that we can fix), which is sad.
         if !span.from_expansion() {
-            if self.unclosed_delims.is_empty() {
-                let DelimSpan { open, close } = match args {
-                    MacArgs::Empty | MacArgs::Eq(..) => unreachable!(),
-                    MacArgs::Delimited(dspan, ..) => *dspan,
-                };
-                err.multipart_suggestion(
-                    "change the delimiters to curly braces",
-                    vec![(open, "{".to_string()), (close, '}'.to_string())],
-                    Applicability::MaybeIncorrect,
-                );
-            } else {
-                err.span_suggestion(
-                    span,
-                    "change the delimiters to curly braces",
-                    " { /* items */ }",
-                    Applicability::HasPlaceholders,
-                );
-            }
+            let DelimSpan { open, close } = args.dspan;
+            err.multipart_suggestion(
+                "change the delimiters to curly braces",
+                vec![(open, "{".to_string()), (close, '}'.to_string())],
+                Applicability::MaybeIncorrect,
+            );
             err.span_suggestion(
-                span.shrink_to_hi(),
+                span.with_neighbor(self.token.span).shrink_to_hi(),
                 "add a semicolon",
                 ';',
                 Applicability::MaybeIncorrect,
@@ -1816,18 +2167,12 @@ impl<'a> Parser<'a> {
             let kw_token = self.token.clone();
             let kw_str = pprust::token_to_string(&kw_token);
             let item = self.parse_item(ForceCollect::No)?;
-
-            self.struct_span_err(
-                kw_token.span,
-                &format!("`{kw_str}` definition cannot be nested inside `{keyword}`"),
-            )
-            .span_suggestion(
-                item.unwrap().span,
-                &format!("consider creating a new `{kw_str}` definition instead of nesting"),
-                "",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+            self.dcx().emit_err(errors::NestedAdt {
+                span: kw_token.span,
+                item: item.unwrap().span,
+                kw_str,
+                keyword: keyword.as_str(),
+            });
             // We successfully parsed the item but we must inform the caller about nested problem.
             return Ok(false);
         }
@@ -1901,16 +2246,32 @@ impl<'a> Parser<'a> {
     /// Parse a function starting from the front matter (`const ...`) to the body `{ ... }` or `;`.
     fn parse_fn(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         fn_parse_mode: FnParseMode,
         sig_lo: Span,
         vis: &Visibility,
+        case: Case,
     ) -> PResult<'a, (Ident, FnSig, Generics, Option<P<Block>>)> {
-        let header = self.parse_fn_front_matter(vis)?; // `const ... fn`
+        let fn_span = self.token.span;
+        let header = self.parse_fn_front_matter(vis, case)?; // `const ... fn`
         let ident = self.parse_ident()?; // `foo`
         let mut generics = self.parse_generics()?; // `<'a, T, ...>`
-        let decl =
-            self.parse_fn_decl(fn_parse_mode.req_name, AllowPlus::Yes, RecoverReturnSign::Yes)?; // `(p: u8, ...)`
+        let decl = match self.parse_fn_decl(
+            fn_parse_mode.req_name,
+            AllowPlus::Yes,
+            RecoverReturnSign::Yes,
+        ) {
+            Ok(decl) => decl,
+            Err(old_err) => {
+                // If we see `for Ty ...` then user probably meant `impl` item.
+                if self.token.is_keyword(kw::For) {
+                    old_err.cancel();
+                    return Err(self.dcx().create_err(errors::FnTypoWithImpl { fn_span }));
+                } else {
+                    return Err(old_err);
+                }
+            }
+        };
         generics.where_clause = self.parse_where_clause()?; // `where T: Ord`
 
         let mut sig_hi = self.prev_token.span;
@@ -1924,7 +2285,7 @@ impl<'a> Parser<'a> {
     /// or e.g. a block when the function is a provided one.
     fn parse_fn_body(
         &mut self,
-        attrs: &mut Vec<Attribute>,
+        attrs: &mut AttrVec,
         ident: &Ident,
         sig_hi: &mut Span,
         req_body: bool,
@@ -1939,9 +2300,10 @@ impl<'a> Parser<'a> {
             // Include the trailing semicolon in the span of the signature
             self.expect_semi()?;
             *sig_hi = self.prev_token.span;
-            (Vec::new(), None)
+            (AttrVec::new(), None)
         } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
-            self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
+            self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
+                .map(|(attrs, body)| (attrs, Some(body)))?
         } else if self.token.kind == token::Eq {
             // Recover `fn foo() = $expr;`.
             self.bump(); // `=`
@@ -1949,31 +2311,40 @@ impl<'a> Parser<'a> {
             let _ = self.parse_expr()?;
             self.expect_semi()?; // `;`
             let span = eq_sp.to(self.prev_token.span);
-            self.struct_span_err(span, "function body cannot be `= expression;`")
-                .multipart_suggestion(
-                    "surround the expression with `{` and `}` instead of `=` and `;`",
-                    vec![(eq_sp, "{".to_string()), (self.prev_token.span, " }".to_string())],
-                    Applicability::MachineApplicable,
-                )
-                .emit();
-            (Vec::new(), Some(self.mk_block_err(span)))
+            self.dcx().emit_err(errors::FunctionBodyEqualsExpr {
+                span,
+                sugg: errors::FunctionBodyEqualsExprSugg { eq: eq_sp, semi: self.prev_token.span },
+            });
+            (AttrVec::new(), Some(self.mk_block_err(span)))
         } else {
             let expected = if req_body {
                 &[token::OpenDelim(Delimiter::Brace)][..]
             } else {
                 &[token::Semi, token::OpenDelim(Delimiter::Brace)]
             };
-            if let Err(mut err) = self.expected_one_of_not_found(&[], &expected) {
+            if let Err(mut err) = self.expected_one_of_not_found(&[], expected) {
                 if self.token.kind == token::CloseDelim(Delimiter::Brace) {
                     // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in
                     // the AST for typechecking.
                     err.span_label(ident.span, "while parsing this `fn`");
                     err.emit();
                 } else {
+                    // check for typo'd Fn* trait bounds such as
+                    // fn foo<F>() where F: FnOnce -> () {}
+                    if self.token.kind == token::RArrow {
+                        let machine_applicable = [sym::FnOnce, sym::FnMut, sym::Fn]
+                            .into_iter()
+                            .any(|s| self.prev_token.is_ident_named(s));
+
+                        err.subdiagnostic(errors::FnTraitMissingParen {
+                            span: self.prev_token.span,
+                            machine_applicable,
+                        });
+                    }
                     return Err(err);
                 }
             }
-            (Vec::new(), None)
+            (AttrVec::new(), None)
         };
         attrs.extend(inner_attrs);
         Ok(body)
@@ -1983,33 +2354,46 @@ impl<'a> Parser<'a> {
     ///
     /// `check_pub` adds additional `pub` to the checks in case users place it
     /// wrongly, can be used to ensure `pub` never comes after `default`.
-    pub(super) fn check_fn_front_matter(&mut self, check_pub: bool) -> bool {
+    pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
         // We use an over-approximation here.
         // `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
         // `pub` is added in case users got confused with the ordering like `async pub fn`,
         // only if it wasn't preceded by `default` as `default pub` is invalid.
         let quals: &[Symbol] = if check_pub {
-            &[kw::Pub, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+            &[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
         } else {
-            &[kw::Const, kw::Async, kw::Unsafe, kw::Extern]
+            &[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Extern]
         };
-        self.check_keyword(kw::Fn) // Definitely an `fn`.
+        self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
             // `$qual fn` or `$qual $qual`:
-            || quals.iter().any(|&kw| self.check_keyword(kw))
+            || quals.iter().any(|&kw| self.check_keyword_case(kw, case))
                 && self.look_ahead(1, |t| {
                     // `$qual fn`, e.g. `const fn` or `async fn`.
-                    t.is_keyword(kw::Fn)
+                    t.is_keyword_case(kw::Fn, case)
                     // Two qualifiers `$qual $qual` is enough, e.g. `async unsafe`.
-                    || t.is_non_raw_ident_where(|i| quals.contains(&i.name)
-                        // Rule out 2015 `const async: T = val`.
-                        && i.is_reserved()
-                        // Rule out unsafe extern block.
-                        && !self.is_unsafe_foreign_mod())
+                    || (
+                        (
+                            t.is_non_raw_ident_where(|i|
+                                quals.contains(&i.name)
+                                    // Rule out 2015 `const async: T = val`.
+                                    && i.is_reserved()
+                            )
+                            || case == Case::Insensitive
+                                && t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
+                        )
+                        // Rule out `unsafe extern {`.
+                        && !self.is_unsafe_foreign_mod()
+                        // Rule out `async gen {` and `async gen move {`
+                        && !self.is_async_gen_block())
                 })
             // `extern ABI fn`
-            || self.check_keyword(kw::Extern)
+            || self.check_keyword_case(kw::Extern, case)
                 && self.look_ahead(1, |t| t.can_begin_literal_maybe_minus())
-                && self.look_ahead(2, |t| t.is_keyword(kw::Fn))
+                && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
+                    // this branch is only for better diagnostic in later, `pub` is not allowed here
+                    (self.may_recover()
+                        && self.look_ahead(2, |t| t.is_keyword(kw::Pub))
+                        && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
     }
 
     /// Parses all the "front matter" (or "qualifiers") for a `fn` declaration,
@@ -2023,24 +2407,40 @@ impl<'a> Parser<'a> {
     ///
     /// `vis` represents the visibility that was already parsed, if any. Use
     /// `Visibility::Inherited` when no visibility is known.
-    pub(super) fn parse_fn_front_matter(&mut self, orig_vis: &Visibility) -> PResult<'a, FnHeader> {
+    pub(super) fn parse_fn_front_matter(
+        &mut self,
+        orig_vis: &Visibility,
+        case: Case,
+    ) -> PResult<'a, FnHeader> {
         let sp_start = self.token.span;
-        let constness = self.parse_constness();
+        let constness = self.parse_constness(case);
 
         let async_start_sp = self.token.span;
-        let asyncness = self.parse_asyncness();
+        let coroutine_kind = self.parse_coroutine_kind(case);
 
         let unsafe_start_sp = self.token.span;
-        let unsafety = self.parse_unsafety();
+        let unsafety = self.parse_unsafety(case);
 
         let ext_start_sp = self.token.span;
-        let ext = self.parse_extern();
+        let ext = self.parse_extern(case);
 
-        if let Async::Yes { span, .. } = asyncness {
-            self.ban_async_in_2015(span);
+        if let Some(CoroutineKind::Async { span, .. }) = coroutine_kind {
+            if span.is_rust_2015() {
+                self.dcx().emit_err(errors::AsyncFnIn2015 {
+                    span,
+                    help: errors::HelpUseLatestEdition::new(),
+                });
+            }
         }
 
-        if !self.eat_keyword(kw::Fn) {
+        match coroutine_kind {
+            Some(CoroutineKind::Gen { span, .. }) | Some(CoroutineKind::AsyncGen { span, .. }) => {
+                self.sess.gated_spans.gate(sym::gen_blocks, span);
+            }
+            Some(CoroutineKind::Async { .. }) | None => {}
+        }
+
+        if !self.eat_keyword_case(kw::Fn, case) {
             // It is possible for `expect_one_of` to recover given the contents of
             // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
             // account for this.
@@ -2054,22 +2454,53 @@ impl<'a> Parser<'a> {
                         Misplaced(Span),
                     }
 
+                    // We may be able to recover
+                    let mut recover_constness = constness;
+                    let mut recover_coroutine_kind = coroutine_kind;
+                    let mut recover_unsafety = unsafety;
                     // This will allow the machine fix to directly place the keyword in the correct place or to indicate
                     // that the keyword is already present and the second instance should be removed.
                     let wrong_kw = if self.check_keyword(kw::Const) {
                         match constness {
                             Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
-                            Const::No => Some(WrongKw::Misplaced(async_start_sp)),
+                            Const::No => {
+                                recover_constness = Const::Yes(self.token.span);
+                                Some(WrongKw::Misplaced(async_start_sp))
+                            }
                         }
                     } else if self.check_keyword(kw::Async) {
-                        match asyncness {
-                            Async::Yes { span, .. } => Some(WrongKw::Duplicated(span)),
-                            Async::No => Some(WrongKw::Misplaced(unsafe_start_sp)),
+                        match coroutine_kind {
+                            Some(CoroutineKind::Async { span, .. }) => {
+                                Some(WrongKw::Duplicated(span))
+                            }
+                            Some(CoroutineKind::AsyncGen { span, .. }) => {
+                                Some(WrongKw::Duplicated(span))
+                            }
+                            Some(CoroutineKind::Gen { .. }) => {
+                                recover_coroutine_kind = Some(CoroutineKind::AsyncGen {
+                                    span: self.token.span,
+                                    closure_id: DUMMY_NODE_ID,
+                                    return_impl_trait_id: DUMMY_NODE_ID,
+                                });
+                                // FIXME(gen_blocks): This span is wrong, didn't want to think about it.
+                                Some(WrongKw::Misplaced(unsafe_start_sp))
+                            }
+                            None => {
+                                recover_coroutine_kind = Some(CoroutineKind::Async {
+                                    span: self.token.span,
+                                    closure_id: DUMMY_NODE_ID,
+                                    return_impl_trait_id: DUMMY_NODE_ID,
+                                });
+                                Some(WrongKw::Misplaced(unsafe_start_sp))
+                            }
                         }
                     } else if self.check_keyword(kw::Unsafe) {
                         match unsafety {
                             Unsafe::Yes(sp) => Some(WrongKw::Duplicated(sp)),
-                            Unsafe::No => Some(WrongKw::Misplaced(ext_start_sp)),
+                            Unsafe::No => {
+                                recover_unsafety = Unsafe::Yes(self.token.span);
+                                Some(WrongKw::Misplaced(ext_start_sp))
+                            }
                         }
                     } else {
                         None
@@ -2083,11 +2514,11 @@ impl<'a> Parser<'a> {
 
                         err.span_suggestion(
                             self.token.uninterpolated_span(),
-                            &format!("`{original_kw}` already used earlier, remove this one"),
+                            format!("`{original_kw}` already used earlier, remove this one"),
                             "",
                             Applicability::MachineApplicable,
                         )
-                        .span_note(original_sp, &format!("`{original_kw}` first seen here"));
+                        .span_note(original_sp, format!("`{original_kw}` first seen here"));
                     }
                     // The keyword has not been seen yet, suggest correct placement in the function front matter
                     else if let Some(WrongKw::Misplaced(correct_pos_sp)) = wrong_kw {
@@ -2098,7 +2529,7 @@ impl<'a> Parser<'a> {
 
                             err.span_suggestion(
                                     correct_pos_sp.to(misplaced_qual_sp),
-                                    &format!("`{misplaced_qual}` must come before `{current_qual}`"),
+                                    format!("`{misplaced_qual}` must come before `{current_qual}`"),
                                     format!("{misplaced_qual} {current_qual}"),
                                     Applicability::MachineApplicable,
                                 ).note("keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`");
@@ -2122,7 +2553,7 @@ impl<'a> Parser<'a> {
                             if matches!(orig_vis.kind, VisibilityKind::Inherited) {
                                 err.span_suggestion(
                                     sp_start.to(self.prev_token.span),
-                                    &format!("visibility `{vs}` must come before `{snippet}`"),
+                                    format!("visibility `{vs}` must come before `{snippet}`"),
                                     format!("{vs} {snippet}"),
                                     Applicability::MachineApplicable,
                                 );
@@ -2139,23 +2570,31 @@ impl<'a> Parser<'a> {
                             }
                         }
                     }
+
+                    // FIXME(gen_blocks): add keyword recovery logic for genness
+
+                    if wrong_kw.is_some()
+                        && self.may_recover()
+                        && self.look_ahead(1, |tok| tok.is_keyword_case(kw::Fn, case))
+                    {
+                        // Advance past the misplaced keyword and `fn`
+                        self.bump();
+                        self.bump();
+                        err.emit();
+                        return Ok(FnHeader {
+                            constness: recover_constness,
+                            unsafety: recover_unsafety,
+                            coroutine_kind: recover_coroutine_kind,
+                            ext,
+                        });
+                    }
+
                     return Err(err);
                 }
             }
         }
 
-        Ok(FnHeader { constness, unsafety, asyncness, ext })
-    }
-
-    /// We are parsing `async fn`. If we are on Rust 2015, emit an error.
-    fn ban_async_in_2015(&self, span: Span) {
-        if span.rust_2015() {
-            let diag = self.diagnostic();
-            struct_span_err!(diag, span, E0670, "`async fn` is not permitted in Rust 2015")
-                .span_label(span, "to use `async fn`, switch to Rust 2018 or later")
-                .help_use_latest_edition()
-                .emit();
-        }
+        Ok(FnHeader { constness, unsafety, coroutine_kind, ext })
     }
 
     /// Parses the parameter list and result type of a function declaration.
@@ -2172,13 +2611,26 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the parameter list of a function, including the `(` and `)` delimiters.
-    fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, Vec<Param>> {
+    pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
         let mut first_param = true;
         // Parse the arguments, starting out with `self` being allowed...
+        if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis)
+        // might be typo'd trait impl, handled elsewhere
+        && !self.token.is_keyword(kw::For)
+        {
+            // recover from missing argument list, e.g. `fn main -> () {}`
+            self.dcx()
+                .emit_err(errors::MissingFnParams { span: self.prev_token.span.shrink_to_hi() });
+            return Ok(ThinVec::new());
+        }
+
         let (mut params, _) = self.parse_paren_comma_seq(|p| {
-            let param = p.parse_param_general(req_name, first_param).or_else(|mut e| {
+            p.recover_diff_marker();
+            let snapshot = p.create_snapshot_for_diagnostic();
+            let param = p.parse_param_general(req_name, first_param).or_else(|e| {
                 e.emit();
                 let lo = p.prev_token.span;
+                p.restore_snapshot(snapshot);
                 // Skip every token until next possible arg or end.
                 p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
                 // Create a placeholder argument for proper arg count (issue #34264).
@@ -2202,18 +2654,17 @@ impl<'a> Parser<'a> {
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
             if let Some(mut param) = this.parse_self_param()? {
-                param.attrs = attrs.into();
+                param.attrs = attrs;
                 let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
                 return Ok((res?, TrailingToken::None));
             }
 
             let is_name_required = match this.token.kind {
                 token::DotDotDot => false,
-                _ => req_name(this.token.span.edition()),
+                _ => req_name(this.token.span.with_neighbor(this.prev_token.span).edition()),
             };
             let (pat, ty) = if is_name_required || this.is_named_param() {
                 debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
-
                 let (pat, colon) = this.parse_fn_param_pat_colon()?;
                 if !colon {
                     let mut err = this.unexpected::<()>().unwrap_err();
@@ -2231,7 +2682,7 @@ impl<'a> Parser<'a> {
                 (pat, this.parse_ty_for_param()?)
             } else {
                 debug!("parse_param_general ident_to_pat");
-                let parser_snapshot_before_ty = this.clone();
+                let parser_snapshot_before_ty = this.create_snapshot_for_diagnostic();
                 this.eat_incorrect_doc_comment_for_param_type();
                 let mut ty = this.parse_ty_for_param();
                 if ty.is_ok()
@@ -2245,7 +2696,7 @@ impl<'a> Parser<'a> {
                 match ty {
                     Ok(ty) => {
                         let ident = Ident::new(kw::Empty, this.prev_token.span);
-                        let bm = BindingMode::ByValue(Mutability::Not);
+                        let bm = BindingAnnotation::NONE;
                         let pat = this.mk_pat_ident(ty.span, bm, ident);
                         (pat, ty)
                     }
@@ -2254,23 +2705,16 @@ impl<'a> Parser<'a> {
                     // Recover from attempting to parse the argument as a type without pattern.
                     Err(err) => {
                         err.cancel();
-                        *this = parser_snapshot_before_ty;
+                        this.restore_snapshot(parser_snapshot_before_ty);
                         this.recover_arg_parse()?
                     }
                 }
             };
 
-            let span = lo.until(this.token.span);
+            let span = lo.to(this.prev_token.span);
 
             Ok((
-                Param {
-                    attrs: attrs.into(),
-                    id: ast::DUMMY_NODE_ID,
-                    is_placeholder: false,
-                    pat,
-                    span,
-                    ty,
-                },
+                Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty },
                 TrailingToken::None,
             ))
         })
@@ -2307,9 +2751,7 @@ impl<'a> Parser<'a> {
         };
         // Recover for the grammar `*self`, `*const self`, and `*mut self`.
         let recover_self_ptr = |this: &mut Self| {
-            let msg = "cannot pass `self` by raw pointer";
-            let span = this.token.span;
-            this.struct_span_err(span, msg).span_label(span, msg).emit();
+            this.dcx().emit_err(errors::SelfArgumentPointer { span: this.token.span });
 
             Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
         };
@@ -2376,8 +2818,8 @@ impl<'a> Parser<'a> {
     }
 
     fn is_named_param(&self) -> bool {
-        let offset = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
+        let offset = match &self.token.kind {
+            token::Interpolated(nt) => match &nt.0 {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
             },
@@ -2390,15 +2832,13 @@ impl<'a> Parser<'a> {
             && self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
-    fn recover_first_param(&mut self) -> &'static str {
-        match self
-            .parse_outer_attributes()
-            .and_then(|_| self.parse_self_param())
-            .map_err(|e| e.cancel())
-        {
-            Ok(Some(_)) => "method",
-            _ => "function",
-        }
+    fn recover_self_param(&mut self) -> bool {
+        matches!(
+            self.parse_outer_attributes()
+                .and_then(|_| self.parse_self_param())
+                .map_err(|e| e.cancel()),
+            Ok(Some(_))
+        )
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 67e6402c0ae..623407eb380 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -10,43 +10,50 @@ mod path;
 mod stmt;
 mod ty;
 
-use crate::lexer::UnmatchedBrace;
+use crate::lexer::UnmatchedDelim;
 pub use attr_wrapper::AttrWrapper;
 pub use diagnostics::AttemptLocalParseRecovery;
-use diagnostics::Error;
+pub(crate) use expr::ForbiddenLetReason;
 pub(crate) use item::FnParseMode;
 pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::AttributesData;
-use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
+use rustc_ast::util::case::Case;
 use rustc_ast::AttrId;
+use rustc_ast::CoroutineKind;
 use rustc_ast::DUMMY_NODE_ID;
-use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, Extern};
-use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacArgsEq, MacDelimiter, Mutability, StrLit};
+use rustc_ast::{self as ast, AnonConst, Const, DelimArgs, Extern};
+use rustc_ast::{AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit};
 use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::PResult;
-use rustc_errors::{
-    struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, MultiSpan,
-};
+use rustc_errors::{Applicability, DiagnosticBuilder, FatalError, MultiSpan};
 use rustc_session::parse::ParseSess;
-use rustc_span::source_map::{Span, DUMMY_SP};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
+use rustc_span::{Span, DUMMY_SP};
+use std::ops::Range;
+use std::{mem, slice};
+use thin_vec::ThinVec;
 use tracing::debug;
 
-use std::ops::Range;
-use std::{cmp, mem, slice};
+use crate::errors::{
+    self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
+};
 
 bitflags::bitflags! {
+    #[derive(Clone, Copy)]
     struct Restrictions: u8 {
         const STMT_EXPR         = 1 << 0;
         const NO_STRUCT_LITERAL = 1 << 1;
         const CONST_EXPR        = 1 << 2;
+        const ALLOW_LET         = 1 << 3;
+        const IN_IF_GUARD       = 1 << 4;
+        const IS_PAT            = 1 << 5;
     }
 }
 
@@ -75,6 +82,7 @@ pub enum ForceCollect {
 pub enum TrailingToken {
     None,
     Semi,
+    Gt,
     /// If the trailing token is a comma, then capture it
     /// Otherwise, ignore the trailing token
     MaybeComma,
@@ -85,7 +93,7 @@ pub enum TrailingToken {
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
         if let token::Interpolated(nt) = &$p.token.kind {
-            if let token::$constructor(x) = &**nt {
+            if let token::$constructor(x) = &nt.0 {
                 let $x = x.clone();
                 $p.bump();
                 return Ok($e);
@@ -99,63 +107,79 @@ macro_rules! maybe_whole {
 macro_rules! maybe_recover_from_interpolated_ty_qpath {
     ($self: expr, $allow_qpath_recovery: expr) => {
         if $allow_qpath_recovery
-                    && $self.look_ahead(1, |t| t == &token::ModSep)
-                    && let token::Interpolated(nt) = &$self.token.kind
-                    && let token::NtTy(ty) = &**nt
-                {
-                    let ty = ty.clone();
-                    $self.bump();
-                    return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
-                }
+            && $self.may_recover()
+            && $self.look_ahead(1, |t| t == &token::ModSep)
+            && let token::Interpolated(nt) = &$self.token.kind
+            && let token::NtTy(ty) = &nt.0
+        {
+            let ty = ty.clone();
+            $self.bump();
+            return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
+        }
     };
 }
 
+#[derive(Clone, Copy)]
+pub enum Recovery {
+    Allowed,
+    Forbidden,
+}
+
 #[derive(Clone)]
 pub struct Parser<'a> {
     pub sess: &'a ParseSess,
     /// The current token.
     pub token: Token,
-    /// The spacing for the current token
+    /// The spacing for the current token.
     pub token_spacing: Spacing,
     /// The previous token.
     pub prev_token: Token,
     pub capture_cfg: bool,
     restrictions: Restrictions,
     expected_tokens: Vec<TokenType>,
-    // Important: This must only be advanced from `bump` to ensure that
-    // `token_cursor.num_next_calls` is updated properly.
     token_cursor: TokenCursor,
-    desugar_doc_comments: bool,
+    // The number of calls to `bump`, i.e. the position in the token stream.
+    num_bump_calls: usize,
+    // During parsing we may sometimes need to 'unglue' a glued token into two
+    // component tokens (e.g. '>>' into '>' and '>), so the parser can consume
+    // them one at a time. This process bypasses the normal capturing mechanism
+    // (e.g. `num_bump_calls` will not be incremented), since the 'unglued'
+    // tokens due not exist in the original `TokenStream`.
+    //
+    // If we end up consuming both unglued tokens, this is not an issue. We'll
+    // end up capturing the single 'glued' token.
+    //
+    // However, sometimes we may want to capture just the first 'unglued'
+    // token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
+    // requires us to unglue the trailing `>>` token. The `break_last_token`
+    // field is used to track this token. It gets appended to the captured
+    // stream when we evaluate a `LazyAttrTokenStream`.
+    break_last_token: bool,
     /// This field is used to keep track of how many left angle brackets we have seen. This is
     /// required in order to detect extra leading left angle brackets (`<` characters) and error
     /// appropriately.
     ///
     /// See the comments in the `parse_path_segment` function for more details.
-    unmatched_angle_bracket_count: u32,
-    max_angle_bracket_count: u32,
-    /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
-    /// it gets removed from here. Every entry left at the end gets emitted as an independent
-    /// error.
-    pub(super) unclosed_delims: Vec<UnmatchedBrace>,
+    unmatched_angle_bracket_count: u16,
+    max_angle_bracket_count: u16,
+    angle_bracket_nesting: u16,
+
     last_unexpected_token_span: Option<Span>,
-    /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
-    /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
-    pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
     /// If present, this `Parser` is not parsing Rust code but rather a macro call.
     subparser_name: Option<&'static str>,
     capture_state: CaptureState,
     /// This allows us to recover when the user forget to add braces around
     /// multiple statements in the closure body.
     pub current_closure: Option<ClosureSpans>,
-    /// Used to track where `let`s are allowed. For example, `if true && let 1 = 1` is valid
-    /// but `[1, 2, 3][let _ = ()]` is not.
-    let_expr_allowed: bool,
+    /// Whether the parser is allowed to do recovery.
+    /// This is disabled when parsing macro arguments, see #103534
+    pub recovery: Recovery,
 }
 
-// This type is used a lot, e.g. it's cloned when matching many declarative macro rules. Make sure
+// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
 // it doesn't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(Parser<'_>, 336);
+rustc_data_structures::static_assert_size!(Parser<'_>, 264);
 
 /// Stores span information about a closure.
 #[derive(Clone)]
@@ -173,7 +197,7 @@ pub struct ClosureSpans {
 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
 /// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
-/// on an `AttrAnnotatedTokenStream`
+/// on an `AttrTokenStream`.
 ///
 /// 2. When we parse an inner attribute while collecting tokens. We
 /// remove inner attributes from the token stream entirely, and
@@ -186,7 +210,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
 
 /// Controls how we capture tokens. Capturing can be expensive,
 /// so we try to avoid performing capturing in cases where
-/// we will never need an `AttrAnnotatedTokenStream`
+/// we will never need an `AttrTokenStream`.
 #[derive(Copy, Clone)]
 pub enum Capturing {
     /// We aren't performing any capturing - this is the default mode.
@@ -202,151 +226,73 @@ struct CaptureState {
     inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
 }
 
-impl<'a> Drop for Parser<'a> {
-    fn drop(&mut self) {
-        emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
-    }
-}
-
+/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
+/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
+/// use this type to emit them as a linear sequence. But a linear sequence is
+/// what the parser expects, for the most part.
 #[derive(Clone)]
 struct TokenCursor {
-    // The current (innermost) frame. `frame` and `stack` could be combined,
-    // but it's faster to have them separately to access `frame` directly
-    // rather than via something like `stack.last().unwrap()` or
-    // `stack[stack.len() - 1]`.
-    frame: TokenCursorFrame,
-    // Additional frames that enclose `frame`.
-    stack: Vec<TokenCursorFrame>,
-    desugar_doc_comments: bool,
-    // Counts the number of calls to `{,inlined_}next`.
-    num_next_calls: usize,
-    // During parsing, we may sometimes need to 'unglue' a
-    // glued token into two component tokens
-    // (e.g. '>>' into '>' and '>), so that the parser
-    // can consume them one at a time. This process
-    // bypasses the normal capturing mechanism
-    // (e.g. `num_next_calls` will not be incremented),
-    // since the 'unglued' tokens due not exist in
-    // the original `TokenStream`.
-    //
-    // If we end up consuming both unglued tokens,
-    // then this is not an issue - we'll end up
-    // capturing the single 'glued' token.
-    //
-    // However, in certain circumstances, we may
-    // want to capture just the first 'unglued' token.
-    // For example, capturing the `Vec<u8>`
-    // in `Option<Vec<u8>>` requires us to unglue
-    // the trailing `>>` token. The `break_last_token`
-    // field is used to track this token - it gets
-    // appended to the captured stream when
-    // we evaluate a `LazyTokenStream`
-    break_last_token: bool,
-}
-
-#[derive(Clone)]
-struct TokenCursorFrame {
-    delim_sp: Option<(Delimiter, DelimSpan)>,
-    tree_cursor: tokenstream::Cursor,
-}
-
-impl TokenCursorFrame {
-    fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
-        TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
-    }
+    // Cursor for the current (innermost) token stream. The delimiters for this
+    // token stream are found in `self.stack.last()`; when that is `None` then
+    // we are in the outermost token stream which never has delimiters.
+    tree_cursor: TokenTreeCursor,
+
+    // Token streams surrounding the current one. The delimiters for stack[n]'s
+    // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
+    // because it's the outermost token stream which never has delimiters.
+    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
 }
 
 impl TokenCursor {
-    fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
-        self.inlined_next(desugar_doc_comments)
+    fn next(&mut self) -> (Token, Spacing) {
+        self.inlined_next()
     }
 
     /// This always-inlined version should only be used on hot code paths.
     #[inline(always)]
-    fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
+    fn inlined_next(&mut self) -> (Token, Spacing) {
         loop {
-            // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
-            // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
-            // removed.
-            if let Some((tree, spacing)) = self.frame.tree_cursor.next_with_spacing_ref() {
+            // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
+            // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
+            // below can be removed.
+            if let Some(tree) = self.tree_cursor.next_ref() {
                 match tree {
-                    &TokenTree::Token(ref token) => match (desugar_doc_comments, token) {
-                        (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
-                            return self.desugar(attr_style, data, span);
-                        }
-                        _ => return (token.clone(), *spacing),
-                    },
-                    &TokenTree::Delimited(sp, delim, ref tts) => {
-                        // Set `open_delim` to true here because we deal with it immediately.
-                        let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
-                        self.stack.push(mem::replace(&mut self.frame, frame));
+                    &TokenTree::Token(ref token, spacing) => {
+                        debug_assert!(!matches!(
+                            token.kind,
+                            token::OpenDelim(_) | token::CloseDelim(_)
+                        ));
+                        return (token.clone(), spacing);
+                    }
+                    &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
+                        let trees = tts.clone().into_trees();
+                        self.stack.push((
+                            mem::replace(&mut self.tree_cursor, trees),
+                            sp,
+                            spacing,
+                            delim,
+                        ));
                         if delim != Delimiter::Invisible {
-                            return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
+                            return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
-                        // No open delimeter to return; continue on to the next iteration.
+                        // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some(frame) = self.stack.pop() {
-                if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
-                    self.frame = frame;
-                    return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
+            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
+                // We have exhausted this token stream. Move back to its parent token stream.
+                self.tree_cursor = tree_cursor;
+                if delim != Delimiter::Invisible {
+                    return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
-                self.frame = frame;
                 // No close delimiter to return; continue on to the next iteration.
             } else {
+                // We have exhausted the outermost token stream. The use of
+                // `Spacing::Alone` is arbitrary and immaterial, because the
+                // `Eof` token's spacing is never used.
                 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
             }
         }
     }
-
-    fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) {
-        // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
-        // required to wrap the text.
-        let mut num_of_hashes = 0;
-        let mut count = 0;
-        for ch in data.as_str().chars() {
-            count = match ch {
-                '"' => 1,
-                '#' if count > 0 => count + 1,
-                _ => 0,
-            };
-            num_of_hashes = cmp::max(num_of_hashes, count);
-        }
-
-        let delim_span = DelimSpan::from_single(span);
-        let body = TokenTree::Delimited(
-            delim_span,
-            Delimiter::Bracket,
-            [
-                TokenTree::token(token::Ident(sym::doc, false), span),
-                TokenTree::token(token::Eq, span),
-                TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), span),
-            ]
-            .iter()
-            .cloned()
-            .collect::<TokenStream>(),
-        );
-
-        self.stack.push(mem::replace(
-            &mut self.frame,
-            TokenCursorFrame::new(
-                None,
-                if attr_style == AttrStyle::Inner {
-                    [TokenTree::token(token::Pound, span), TokenTree::token(token::Not, span), body]
-                        .iter()
-                        .cloned()
-                        .collect::<TokenStream>()
-                } else {
-                    [TokenTree::token(token::Pound, span), body]
-                        .iter()
-                        .cloned()
-                        .collect::<TokenStream>()
-                },
-            ),
-        ));
-
-        self.next(/* desugar_doc_comments */ false)
-    }
 }
 
 #[derive(Debug, Clone, PartialEq)]
@@ -363,9 +309,9 @@ enum TokenType {
 
 impl TokenType {
     fn to_string(&self) -> String {
-        match *self {
-            TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
-            TokenType::Keyword(kw) => format!("`{}`", kw),
+        match self {
+            TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
+            TokenType::Keyword(kw) => format!("`{kw}`"),
             TokenType::Operator => "an operator".to_string(),
             TokenType::Lifetime => "lifetime".to_string(),
             TokenType::Ident => "identifier".to_string(),
@@ -376,9 +322,15 @@ impl TokenType {
     }
 }
 
+/// Used by [`Parser::expect_any_with_type`].
 #[derive(Copy, Clone, Debug)]
 enum TokenExpectType {
+    /// Unencountered tokens are inserted into [`Parser::expected_tokens`].
+    /// See [`Parser::check`].
     Expect,
+
+    /// Unencountered tokens are not inserted into [`Parser::expected_tokens`].
+    /// See [`Parser::check_noexpect`].
     NoExpect,
 }
 
@@ -405,29 +357,45 @@ pub enum FollowedByType {
     No,
 }
 
-fn token_descr_opt(token: &Token) -> Option<&'static str> {
-    Some(match token.kind {
-        _ if token.is_special_ident() => "reserved identifier",
-        _ if token.is_used_keyword() => "keyword",
-        _ if token.is_unused_keyword() => "reserved keyword",
-        token::DocComment(..) => "doc comment",
-        _ => return None,
-    })
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum TokenDescription {
+    ReservedIdentifier,
+    Keyword,
+    ReservedKeyword,
+    DocComment,
 }
 
-pub(super) fn token_descr(token: &Token) -> String {
-    let token_str = pprust::token_to_string(token);
-    match token_descr_opt(token) {
-        Some(prefix) => format!("{} `{}`", prefix, token_str),
-        _ => format!("`{}`", token_str),
+impl TokenDescription {
+    pub fn from_token(token: &Token) -> Option<Self> {
+        match token.kind {
+            _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
+            _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
+            _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
+            token::DocComment(..) => Some(TokenDescription::DocComment),
+            _ => None,
+        }
     }
 }
 
+pub(super) fn token_descr(token: &Token) -> String {
+    let name = pprust::token_to_string(token).to_string();
+
+    let kind = match (TokenDescription::from_token(token), &token.kind) {
+        (Some(TokenDescription::ReservedIdentifier), _) => Some("reserved identifier"),
+        (Some(TokenDescription::Keyword), _) => Some("keyword"),
+        (Some(TokenDescription::ReservedKeyword), _) => Some("reserved keyword"),
+        (Some(TokenDescription::DocComment), _) => Some("doc comment"),
+        (None, TokenKind::Interpolated(node)) => Some(node.0.descr()),
+        (None, _) => None,
+    };
+
+    if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
+}
+
 impl<'a> Parser<'a> {
     pub fn new(
         sess: &'a ParseSess,
-        tokens: TokenStream,
-        desugar_doc_comments: bool,
+        stream: TokenStream,
         subparser_name: Option<&'static str>,
     ) -> Self {
         let mut parser = Parser {
@@ -438,19 +406,13 @@ impl<'a> Parser<'a> {
             capture_cfg: false,
             restrictions: Restrictions::empty(),
             expected_tokens: Vec::new(),
-            token_cursor: TokenCursor {
-                frame: TokenCursorFrame::new(None, tokens),
-                stack: Vec::new(),
-                num_next_calls: 0,
-                desugar_doc_comments,
-                break_last_token: false,
-            },
-            desugar_doc_comments,
+            token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() },
+            num_bump_calls: 0,
+            break_last_token: false,
             unmatched_angle_bracket_count: 0,
             max_angle_bracket_count: 0,
-            unclosed_delims: Vec::new(),
+            angle_bracket_nesting: 0,
             last_unexpected_token_span: None,
-            last_type_ascription: None,
             subparser_name,
             capture_state: CaptureState {
                 capturing: Capturing::No,
@@ -458,7 +420,7 @@ impl<'a> Parser<'a> {
                 inner_attr_ranges: Default::default(),
             },
             current_closure: None,
-            let_expr_allowed: false,
+            recovery: Recovery::Allowed,
         };
 
         // Make parser point to the first token.
@@ -467,6 +429,22 @@ impl<'a> Parser<'a> {
         parser
     }
 
+    pub fn recovery(mut self, recovery: Recovery) -> Self {
+        self.recovery = recovery;
+        self
+    }
+
+    /// Whether the parser is allowed to recover from broken code.
+    ///
+    /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
+    /// is not allowed. All recovery done by the parser must be gated behind this check.
+    ///
+    /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
+    /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
+    fn may_recover(&self) -> bool {
+        matches!(self.recovery, Recovery::Allowed)
+    }
+
     pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
         match self.expect_one_of(&[], &[]) {
             Err(e) => Err(e),
@@ -490,9 +468,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect next token to be edible or inedible token.  If edible,
+    /// Expect next token to be edible or inedible token. If edible,
     /// then consume it; if inedible, then return without consuming
-    /// anything.  Signal a fatal error if next token is unexpected.
+    /// anything. Signal a fatal error if next token is unexpected.
     pub fn expect_one_of(
         &mut self,
         edible: &[TokenKind],
@@ -504,7 +482,9 @@ impl<'a> Parser<'a> {
         } else if inedible.contains(&self.token.kind) {
             // leave it in the input
             Ok(false)
-        } else if self.last_unexpected_token_span == Some(self.token.span) {
+        } else if self.token.kind != token::Eof
+            && self.last_unexpected_token_span == Some(self.token.span)
+        {
             FatalError.raise();
         } else {
             self.expected_one_of_not_found(edible, inedible)
@@ -516,19 +496,11 @@ impl<'a> Parser<'a> {
         self.parse_ident_common(true)
     }
 
-    fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
-        self.token.ident().ok_or_else(|| match self.prev_token.kind {
-            TokenKind::DocComment(..) => {
-                self.span_err(self.prev_token.span, Error::UselessDocComment)
-            }
-            _ => self.expected_ident_found(),
-        })
-    }
-
     fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
-        let (ident, is_raw) = self.ident_or_err()?;
+        let (ident, is_raw) = self.ident_or_err(recover)?;
+
         if !is_raw && ident.is_reserved() {
-            let mut err = self.expected_ident_found();
+            let err = self.expected_ident_found_err();
             if recover {
                 err.emit();
             } else {
@@ -539,6 +511,13 @@ impl<'a> Parser<'a> {
         Ok(ident)
     }
 
+    fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
+        match self.token.ident() {
+            Some(ident) => Ok(ident),
+            None => self.expected_ident_found(recover),
+        }
+    }
+
     /// Checks if the next token is `tok`, and returns `true` if so.
     ///
     /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
@@ -583,6 +562,21 @@ impl<'a> Parser<'a> {
         self.token.is_keyword(kw)
     }
 
+    fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+        if self.check_keyword(kw) {
+            return true;
+        }
+
+        if case == Case::Insensitive
+            && let Some((ident, /* is_raw */ false)) = self.token.ident()
+            && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
+        {
+            true
+        } else {
+            false
+        }
+    }
+
     /// If the next token is the given keyword, eats it and returns `true`.
     /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
     // Public for rustfmt usage.
@@ -595,6 +589,26 @@ impl<'a> Parser<'a> {
         }
     }
 
+    /// Eats a keyword, optionally ignoring the case.
+    /// If the case differs (and is ignored) an error is issued.
+    /// This is useful for recovery.
+    fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
+        if self.eat_keyword(kw) {
+            return true;
+        }
+
+        if case == Case::Insensitive
+            && let Some((ident, /* is_raw */ false)) = self.token.ident()
+            && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
+        {
+            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
+            self.bump();
+            return true;
+        }
+
+        false
+    }
+
     fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
         if self.token.is_keyword(kw) {
             self.bump();
@@ -641,10 +655,21 @@ impl<'a> Parser<'a> {
         self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
     }
 
+    fn check_const_closure(&self) -> bool {
+        self.is_keyword_ahead(0, &[kw::Const])
+            && self.look_ahead(1, |t| match &t.kind {
+                // async closures do not work with const closures, so we do not parse that here.
+                token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
+                    true
+                }
+                _ => false,
+            })
+    }
+
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
-            && self.look_ahead(dist + 1, |t| match t.kind {
-                token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
+            && self.look_ahead(dist + 1, |t| match &t.kind {
+                token::Interpolated(nt) => matches!(&nt.0, token::NtBlock(..)),
                 token::OpenDelim(Delimiter::Brace) => true,
                 _ => false,
             })
@@ -678,9 +703,9 @@ impl<'a> Parser<'a> {
                 // If we consume any additional tokens, then this token
                 // is not needed (we'll capture the entire 'glued' token),
                 // and `bump` will set this field to `None`
-                self.token_cursor.break_last_token = true;
-                // Use the spacing of the glued token as the spacing
-                // of the unglued second token.
+                self.break_last_token = true;
+                // Use the spacing of the glued token as the spacing of the
+                // unglued second token.
                 self.bump_with((Token::new(second, second_span), self.token_spacing));
                 true
             }
@@ -741,39 +766,44 @@ impl<'a> Parser<'a> {
         }
     }
 
+    /// Checks if the next token is contained within `kets`, and returns `true` if so.
     fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
         kets.iter().any(|k| match expect {
             TokenExpectType::Expect => self.check(k),
-            TokenExpectType::NoExpect => self.token == **k,
+            TokenExpectType::NoExpect => self.check_noexpect(k),
         })
     }
 
+    /// Parses a sequence until the specified delimiters. The function
+    /// `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
     fn parse_seq_to_before_tokens<T>(
         &mut self,
         kets: &[&TokenKind],
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
         let mut first = true;
         let mut recovered = false;
         let mut trailing = false;
-        let mut v = vec![];
-        let unclosed_delims = !self.unclosed_delims.is_empty();
+        let mut v = ThinVec::new();
 
         while !self.expect_any_with_type(kets, expect) {
             if let token::CloseDelim(..) | token::Eof = self.token.kind {
                 break;
             }
-            if let Some(ref t) = sep.sep {
+            if let Some(t) = &sep.sep {
                 if first {
+                    // no separator for the first element
                     first = false;
                 } else {
+                    // check for separator
                     match self.expect(t) {
-                        Ok(false) => {
+                        Ok(false) /* not recovered */ => {
                             self.current_closure.take();
                         }
-                        Ok(true) => {
+                        Ok(true) /* recovered */ => {
                             self.current_closure.take();
                             recovered = true;
                             break;
@@ -800,8 +830,8 @@ impl<'a> Parser<'a> {
 
                                 _ => {
                                     // Attempt to keep parsing if it was a similar separator.
-                                    if let Some(ref tokens) = t.similar_tokens() {
-                                        if tokens.contains(&self.token.kind) && !unclosed_delims {
+                                    if let Some(tokens) = t.similar_tokens() {
+                                        if tokens.contains(&self.token.kind) {
                                             self.bump();
                                         }
                                     }
@@ -813,14 +843,14 @@ impl<'a> Parser<'a> {
                             // https://github.com/rust-lang/rust/issues/72373
                             if self.prev_token.is_ident() && self.token.kind == token::DotDot {
                                 let msg = format!(
-                                    "if you meant to bind the contents of \
-                                    the rest of the array pattern into `{}`, use `@`",
+                                    "if you meant to bind the contents of the rest of the array \
+                                     pattern into `{}`, use `@`",
                                     pprust::token_to_string(&self.prev_token)
                                 );
                                 expect_err
-                                    .span_suggestion_verbose(
+                                    .with_span_suggestion_verbose(
                                         self.prev_token.span.shrink_to_hi().until(self.token.span),
-                                        &msg,
+                                        msg,
                                         " @ ",
                                         Applicability::MaybeIncorrect,
                                     )
@@ -834,9 +864,9 @@ impl<'a> Parser<'a> {
                                     // Parsed successfully, therefore most probably the code only
                                     // misses a separator.
                                     expect_err
-                                        .span_suggestion_short(
+                                        .with_span_suggestion_short(
                                             sp,
-                                            &format!("missing `{}`", token_str),
+                                            format!("missing `{token_str}`"),
                                             token_str,
                                             Applicability::MaybeIncorrect,
                                         )
@@ -848,10 +878,21 @@ impl<'a> Parser<'a> {
                                 Err(e) => {
                                     // Parsing failed, therefore it must be something more serious
                                     // than just a missing separator.
-                                    expect_err.emit();
-
+                                    for xx in &e.children {
+                                        // propagate the help message from sub error 'e' to main error 'expect_err;
+                                        expect_err.children.push(xx.clone());
+                                    }
                                     e.cancel();
-                                    break;
+                                    if self.token == token::Colon {
+                                        // we will try to recover in `maybe_recover_struct_lit_bad_delims`
+                                        return Err(expect_err);
+                                    } else if let [token::CloseDelim(Delimiter::Parenthesis)] = kets
+                                    {
+                                        return Err(expect_err);
+                                    } else {
+                                        expect_err.emit();
+                                        break;
+                                    }
                                 }
                             }
                         }
@@ -873,17 +914,20 @@ impl<'a> Parser<'a> {
     fn recover_missing_braces_around_closure_body(
         &mut self,
         closure_spans: ClosureSpans,
-        mut expect_err: DiagnosticBuilder<'_, ErrorGuaranteed>,
+        mut expect_err: DiagnosticBuilder<'_>,
     ) -> PResult<'a, ()> {
         let initial_semicolon = self.token.span;
 
         while self.eat(&TokenKind::Semi) {
-            let _ = self.parse_stmt(ForceCollect::Yes)?;
+            let _ =
+                self.parse_stmt_without_recovery(false, ForceCollect::Yes).unwrap_or_else(|e| {
+                    e.cancel();
+                    None
+                });
         }
 
-        expect_err.set_primary_message(
-            "closure bodies that contain statements must be surrounded by braces",
-        );
+        expect_err
+            .primary_message("closure bodies that contain statements must be surrounded by braces");
 
         let preceding_pipe_span = closure_spans.closing_pipe;
         let following_token_span = self.token.span;
@@ -907,7 +951,7 @@ impl<'a> Parser<'a> {
         );
         expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
 
-        expect_err.set_span(vec![preceding_pipe_span, following_token_span]);
+        expect_err.span(vec![preceding_pipe_span, following_token_span]);
 
         let opening_suggestion_str = " {".to_string();
         let closing_suggestion_str = "}".to_string();
@@ -926,7 +970,7 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
-    /// Parses a sequence, not including the closing delimiter. The function
+    /// Parses a sequence, not including the delimiters. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     fn parse_seq_to_before_end<T>(
@@ -934,11 +978,11 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
         self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
     }
 
-    /// Parses a sequence, including the closing delimiter. The function
+    /// Parses a sequence, including only the closing delimiter. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     fn parse_seq_to_end<T>(
@@ -946,7 +990,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
         let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
         if !recovered {
             self.eat(ket);
@@ -954,7 +998,7 @@ impl<'a> Parser<'a> {
         Ok((val, trailing))
     }
 
-    /// Parses a sequence, including the closing delimiter. The function
+    /// Parses a sequence, including both delimiters. The function
     /// `f` must consume tokens until reaching the next separator or
     /// closing bracket.
     fn parse_unspanned_seq<T>(
@@ -963,16 +1007,19 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
         self.expect(bra)?;
         self.parse_seq_to_end(ket, sep, f)
     }
 
+    /// Parses a comma-separated sequence, including both delimiters.
+    /// The function `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
     fn parse_delim_comma_seq<T>(
         &mut self,
         delim: Delimiter,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
         self.parse_unspanned_seq(
             &token::OpenDelim(delim),
             &token::CloseDelim(delim),
@@ -981,10 +1028,13 @@ impl<'a> Parser<'a> {
         )
     }
 
+    /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
+    /// The function `f` must consume tokens until reaching the next separator or
+    /// closing bracket.
     fn parse_paren_comma_seq<T>(
         &mut self,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
         self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
     }
 
@@ -1008,12 +1058,12 @@ impl<'a> Parser<'a> {
     pub fn bump(&mut self) {
         // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
         // than `.0`/`.1` access.
-        let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments);
-        self.token_cursor.num_next_calls += 1;
+        let mut next = self.token_cursor.inlined_next();
+        self.num_bump_calls += 1;
         // We've retrieved an token from the underlying
         // cursor, so we no longer need to worry about
         // an unglued token. See `break_and_eat` for more details
-        self.token_cursor.break_last_token = false;
+        self.break_last_token = false;
         if next.0.span.is_dummy() {
             // Tweak the location for better diagnostics, but keep syntactic context intact.
             let fallback_span = self.token.span;
@@ -1027,36 +1077,53 @@ impl<'a> Parser<'a> {
     }
 
     /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
-    /// When `dist == 0` then the current token is looked at.
+    /// When `dist == 0` then the current token is looked at. `Eof` will be
+    /// returned if the look-ahead is any distance past the end of the tokens.
     pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
         if dist == 0 {
             return looker(&self.token);
         }
 
-        let frame = &self.token_cursor.frame;
-        if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
+        if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
+            && delim != Delimiter::Invisible
+        {
+            // We are not in the outermost token stream, and the token stream
+            // we are in has non-skipped delimiters. Look for skipped
+            // delimiters in the lookahead range.
+            let tree_cursor = &self.token_cursor.tree_cursor;
             let all_normal = (0..dist).all(|i| {
-                let token = frame.tree_cursor.look_ahead(i);
-                !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
+                let token = tree_cursor.look_ahead(i);
+                !matches!(token, Some(TokenTree::Delimited(.., Delimiter::Invisible, _)))
             });
             if all_normal {
-                return match frame.tree_cursor.look_ahead(dist - 1) {
-                    Some(tree) => match tree {
-                        TokenTree::Token(token) => looker(token),
-                        TokenTree::Delimited(dspan, delim, _) => {
-                            looker(&Token::new(token::OpenDelim(*delim), dspan.open))
+                // There were no skipped delimiters. Do lookahead by plain indexing.
+                return match tree_cursor.look_ahead(dist - 1) {
+                    Some(tree) => {
+                        // Indexing stayed within the current token stream.
+                        match tree {
+                            TokenTree::Token(token, _) => looker(token),
+                            TokenTree::Delimited(dspan, _, delim, _) => {
+                                looker(&Token::new(token::OpenDelim(*delim), dspan.open))
+                            }
                         }
-                    },
-                    None => looker(&Token::new(token::CloseDelim(delim), span.close)),
+                    }
+                    None => {
+                        // Indexing went past the end of the current token
+                        // stream. Use the close delimiter, no matter how far
+                        // ahead `dist` went.
+                        looker(&Token::new(token::CloseDelim(delim), span.close))
+                    }
                 };
             }
         }
 
+        // We are in a more complex case. Just clone the token cursor and use
+        // `next`, skipping delimiters as necessary. Slow but simple.
         let mut cursor = self.token_cursor.clone();
         let mut i = 0;
         let mut token = Token::dummy();
         while i < dist {
-            token = cursor.next(/* desugar_doc_comments */ false).0;
+            token = cursor.next().0;
             if matches!(
                 token.kind,
                 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
@@ -1065,27 +1132,52 @@ impl<'a> Parser<'a> {
             }
             i += 1;
         }
-        return looker(&token);
+        looker(&token)
     }
 
     /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
-    fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
+    pub(crate) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
         self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
     }
 
     /// Parses asyncness: `async` or nothing.
-    fn parse_asyncness(&mut self) -> Async {
-        if self.eat_keyword(kw::Async) {
-            let span = self.prev_token.uninterpolated_span();
-            Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
+    fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
+        let span = self.token.uninterpolated_span();
+        if self.eat_keyword_case(kw::Async, case) {
+            // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
+            // error if edition <= 2024, like we do with async and edition <= 2018?
+            if self.token.uninterpolated_span().at_least_rust_2024()
+                && self.eat_keyword_case(kw::Gen, case)
+            {
+                let gen_span = self.prev_token.uninterpolated_span();
+                Some(CoroutineKind::AsyncGen {
+                    span: span.to(gen_span),
+                    closure_id: DUMMY_NODE_ID,
+                    return_impl_trait_id: DUMMY_NODE_ID,
+                })
+            } else {
+                Some(CoroutineKind::Async {
+                    span,
+                    closure_id: DUMMY_NODE_ID,
+                    return_impl_trait_id: DUMMY_NODE_ID,
+                })
+            }
+        } else if self.token.uninterpolated_span().at_least_rust_2024()
+            && self.eat_keyword_case(kw::Gen, case)
+        {
+            Some(CoroutineKind::Gen {
+                span,
+                closure_id: DUMMY_NODE_ID,
+                return_impl_trait_id: DUMMY_NODE_ID,
+            })
         } else {
-            Async::No
+            None
         }
     }
 
     /// Parses unsafety: `unsafe` or nothing.
-    fn parse_unsafety(&mut self) -> Unsafe {
-        if self.eat_keyword(kw::Unsafe) {
+    fn parse_unsafety(&mut self, case: Case) -> Unsafe {
+        if self.eat_keyword_case(kw::Unsafe, case) {
             Unsafe::Yes(self.prev_token.uninterpolated_span())
         } else {
             Unsafe::No
@@ -1093,10 +1185,25 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses constness: `const` or nothing.
-    fn parse_constness(&mut self) -> Const {
-        // Avoid const blocks to be parsed as const items
-        if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
-            && self.eat_keyword(kw::Const)
+    fn parse_constness(&mut self, case: Case) -> Const {
+        self.parse_constness_(case, false)
+    }
+
+    /// Parses constness for closures (case sensitive, feature-gated)
+    fn parse_closure_constness(&mut self) -> Const {
+        let constness = self.parse_constness_(Case::Sensitive, true);
+        if let Const::Yes(span) = constness {
+            self.sess.gated_spans.gate(sym::const_closures, span);
+        }
+        constness
+    }
+
+    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
+        // Avoid const blocks and const closures to be parsed as const items
+        if (self.check_const_closure() == is_closure)
+            && !self
+                .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
+            && self.eat_keyword_case(kw::Const, case)
         {
             Const::Yes(self.prev_token.uninterpolated_span())
         } else {
@@ -1115,10 +1222,10 @@ impl<'a> Parser<'a> {
         let (attrs, blk) = self.parse_inner_attrs_and_block()?;
         let anon_const = AnonConst {
             id: DUMMY_NODE_ID,
-            value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()),
+            value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
         };
         let blk_span = anon_const.value.span;
-        Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::from(attrs)))
+        Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
     }
 
     /// Parses mutability (`mut` or nothing).
@@ -1140,7 +1247,9 @@ impl<'a> Parser<'a> {
     fn parse_field_name(&mut self) -> PResult<'a, Ident> {
         if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
         {
-            self.expect_no_suffix(self.token.span, "a tuple index", suffix);
+            if let Some(suffix) = suffix {
+                self.expect_no_tuple_index_suffix(self.token.span, suffix);
+            }
             self.bump();
             Ok(Ident::new(symbol, self.prev_token.span))
         } else {
@@ -1148,39 +1257,34 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
-        self.parse_mac_args_common(true).map(P)
+    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
+        if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
     }
 
-    fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
-        self.parse_mac_args_common(false)
+    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
+        Ok(if let Some(args) = self.parse_delim_args_inner() {
+            AttrArgs::Delimited(args)
+        } else {
+            if self.eat(&token::Eq) {
+                let eq_span = self.prev_token.span;
+                AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
+            } else {
+                AttrArgs::Empty
+            }
+        })
     }
 
-    fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
-        Ok(
-            if self.check(&token::OpenDelim(Delimiter::Parenthesis))
-                || self.check(&token::OpenDelim(Delimiter::Bracket))
-                || self.check(&token::OpenDelim(Delimiter::Brace))
-            {
-                match self.parse_token_tree() {
-                    TokenTree::Delimited(dspan, delim, tokens) =>
-                    // We've confirmed above that there is a delimiter so unwrapping is OK.
-                    {
-                        MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
-                    }
-                    _ => unreachable!(),
-                }
-            } else if !delimited_only {
-                if self.eat(&token::Eq) {
-                    let eq_span = self.prev_token.span;
-                    MacArgs::Eq(eq_span, MacArgsEq::Ast(self.parse_expr_force_collect()?))
-                } else {
-                    MacArgs::Empty
-                }
-            } else {
-                return self.unexpected();
-            },
-        )
+    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
+        let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
+            || self.check(&token::OpenDelim(Delimiter::Bracket))
+            || self.check(&token::OpenDelim(Delimiter::Brace));
+
+        delimited.then(|| {
+            let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
+                unreachable!()
+            };
+            DelimArgs { dspan, delim, tokens }
+        })
     }
 
     fn parse_or_use_outer_attributes(
@@ -1195,13 +1299,12 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a single token tree from the input.
-    pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
+    pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
-                // Grab the tokens from this frame.
-                let frame = &self.token_cursor.frame;
-                let stream = frame.tree_cursor.stream.clone();
-                let (delim, span) = frame.delim_sp.unwrap();
+                // Grab the tokens within the delimiters.
+                let stream = self.token_cursor.tree_cursor.stream.clone();
+                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1221,31 +1324,23 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, delim, stream)
+                TokenTree::Delimited(span, spacing, delim, stream)
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
+                let prev_spacing = self.token_spacing;
                 self.bump();
-                TokenTree::Token(self.prev_token.clone())
+                TokenTree::Token(self.prev_token.clone(), prev_spacing)
             }
         }
     }
 
-    /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
-    pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
-        let mut tts = Vec::new();
-        while self.token != token::Eof {
-            tts.push(self.parse_token_tree());
-        }
-        Ok(tts)
-    }
-
     pub fn parse_tokens(&mut self) -> TokenStream {
         let mut result = Vec::new();
         loop {
             match self.token.kind {
                 token::Eof | token::CloseDelim(..) => break,
-                _ => result.push(self.parse_token_tree().into()),
+                _ => result.push(self.parse_token_tree()),
             }
         }
         TokenStream::new(result)
@@ -1294,7 +1389,11 @@ impl<'a> Parser<'a> {
                 self.bump(); // `in`
                 let path = self.parse_path(PathStyle::Mod)?; // `path`
                 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
-                let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+                let vis = VisibilityKind::Restricted {
+                    path: P(path),
+                    id: ast::DUMMY_NODE_ID,
+                    shorthand: false,
+                };
                 return Ok(Visibility {
                     span: lo.to(self.prev_token.span),
                     kind: vis,
@@ -1307,7 +1406,11 @@ impl<'a> Parser<'a> {
                 self.bump(); // `(`
                 let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
                 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
-                let vis = VisibilityKind::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+                let vis = VisibilityKind::Restricted {
+                    path: P(path),
+                    id: ast::DUMMY_NODE_ID,
+                    shorthand: true,
+                };
                 return Ok(Visibility {
                     span: lo.to(self.prev_token.span),
                     kind: vis,
@@ -1330,30 +1433,16 @@ impl<'a> Parser<'a> {
         let path = self.parse_path(PathStyle::Mod)?;
         self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
 
-        let msg = "incorrect visibility restriction";
-        let suggestion = r##"some possible visibility restrictions are:
-`pub(crate)`: visible only on the current crate
-`pub(super)`: visible only in the current module's parent
-`pub(in path::to::module)`: visible only on the specified path"##;
-
         let path_str = pprust::path_to_string(&path);
-
-        struct_span_err!(self.sess.span_diagnostic, path.span, E0704, "{}", msg)
-            .help(suggestion)
-            .span_suggestion(
-                path.span,
-                &format!("make this visible only to module `{}` with `in`", path_str),
-                format!("in {}", path_str),
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx()
+            .emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
 
         Ok(())
     }
 
     /// Parses `extern string_literal?`.
-    fn parse_extern(&mut self) -> Extern {
-        if self.eat_keyword(kw::Extern) {
+    fn parse_extern(&mut self, case: Case) -> Extern {
+        if self.eat_keyword_case(kw::Extern, case) {
             let mut extern_span = self.prev_token.span;
             let abi = self.parse_abi();
             if let Some(abi) = abi {
@@ -1370,16 +1459,9 @@ impl<'a> Parser<'a> {
         match self.parse_str_lit() {
             Ok(str_lit) => Some(str_lit),
             Err(Some(lit)) => match lit.kind {
-                ast::LitKind::Err(_) => None,
+                ast::LitKind::Err => None,
                 _ => {
-                    self.struct_span_err(lit.span, "non-string ABI literal")
-                        .span_suggestion(
-                            lit.span,
-                            "specify the ABI with a string literal",
-                            "\"C\"",
-                            Applicability::MaybeIncorrect,
-                        )
-                        .emit();
+                    self.dcx().emit_err(NonStringAbiLiteral { span: lit.span });
                     None
                 }
             },
@@ -1411,52 +1493,38 @@ impl<'a> Parser<'a> {
     pub fn clear_expected_tokens(&mut self) {
         self.expected_tokens.clear();
     }
+
+    pub fn approx_token_stream_pos(&self) -> usize {
+        self.num_bump_calls
+    }
 }
 
 pub(crate) fn make_unclosed_delims_error(
-    unmatched: UnmatchedBrace,
+    unmatched: UnmatchedDelim,
     sess: &ParseSess,
-) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
+) -> Option<DiagnosticBuilder<'_>> {
     // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
-    // `unmatched_braces` only for error recovery in the `Parser`.
+    // `unmatched_delims` only for error recovery in the `Parser`.
     let found_delim = unmatched.found_delim?;
-    let span: MultiSpan = if let Some(sp) = unmatched.unclosed_span {
-        vec![unmatched.found_span, sp].into()
-    } else {
-        unmatched.found_span.into()
-    };
-    let mut err = sess.span_diagnostic.struct_span_err(
-        span,
-        &format!(
-            "mismatched closing delimiter: `{}`",
-            pprust::token_kind_to_string(&token::CloseDelim(found_delim)),
-        ),
-    );
-    err.span_label(unmatched.found_span, "mismatched closing delimiter");
-    if let Some(sp) = unmatched.candidate_span {
-        err.span_label(sp, "closing delimiter possibly meant for this");
-    }
+    let mut spans = vec![unmatched.found_span];
     if let Some(sp) = unmatched.unclosed_span {
-        err.span_label(sp, "unclosed delimiter");
-    }
+        spans.push(sp);
+    };
+    let err = sess.dcx.create_err(MismatchedClosingDelimiter {
+        spans,
+        delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
+        unmatched: unmatched.found_span,
+        opening_candidate: unmatched.candidate_span,
+        unclosed: unmatched.unclosed_span,
+    });
     Some(err)
 }
 
-pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
-    *sess.reached_eof.borrow_mut() |=
-        unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
-    for unmatched in unclosed_delims.drain(..) {
-        if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
-            e.emit();
-        }
-    }
-}
-
-/// A helper struct used when building an `AttrAnnotatedTokenStream` from
-/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
+/// A helper struct used when building an `AttrTokenStream` from
+/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
 /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
-/// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested
-/// `AttrAnnotatedTokenTree::Delimited` tokens
+/// is then 'parsed' to build up an `AttrTokenStream` with nested
+/// `AttrTokenTree::Delimited` tokens.
 #[derive(Debug, Clone)]
 pub enum FlatToken {
     /// A token - this holds both delimiter (e.g. '{' and '}')
@@ -1464,17 +1532,17 @@ pub enum FlatToken {
     Token(Token),
     /// Holds the `AttributesData` for an AST node. The
     /// `AttributesData` is inserted directly into the
-    /// constructed `AttrAnnotatedTokenStream` as
-    /// an `AttrAnnotatedTokenTree::Attributes`
+    /// constructed `AttrTokenStream` as
+    /// an `AttrTokenTree::Attributes`.
     AttrTarget(AttributesData),
     /// A special 'empty' token that is ignored during the conversion
-    /// to an `AttrAnnotatedTokenStream`. This is used to simplify the
+    /// to an `AttrTokenStream`. This is used to simplify the
     /// handling of replace ranges.
     Empty,
 }
 
 #[derive(Debug)]
-pub enum NtOrTt {
+pub enum ParseNtResult {
     Nt(Nonterminal),
     Tt(TokenTree),
 }
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index e215b6872bf..071d6b72f3b 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -1,12 +1,13 @@
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
+use rustc_ast::token::{self, Delimiter, Nonterminal::*, NonterminalKind, Token};
 use rustc_ast::HasTokens;
 use rustc_ast_pretty::pprust;
 use rustc_errors::PResult;
 use rustc_span::symbol::{kw, Ident};
 
+use crate::errors::UnexpectedNonterminal;
 use crate::parser::pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
-use crate::parser::{FollowedByType, ForceCollect, NtOrTt, Parser, PathStyle};
+use crate::parser::{FollowedByType, ForceCollect, ParseNtResult, Parser, PathStyle};
 
 impl<'a> Parser<'a> {
     /// Checks whether a non-terminal may begin with a particular token.
@@ -18,11 +19,20 @@ impl<'a> Parser<'a> {
     pub fn nonterminal_may_begin_with(kind: NonterminalKind, token: &Token) -> bool {
         /// Checks whether the non-terminal may contain a single (non-keyword) identifier.
         fn may_be_ident(nt: &token::Nonterminal) -> bool {
-            match *nt {
-                token::NtItem(_) | token::NtBlock(_) | token::NtVis(_) | token::NtLifetime(_) => {
-                    false
-                }
-                _ => true,
+            match nt {
+                NtStmt(_)
+                | NtPat(_)
+                | NtExpr(_)
+                | NtTy(_)
+                | NtIdent(..)
+                | NtLiteral(_) // `true`, `false`
+                | NtMeta(_)
+                | NtPath(_) => true,
+
+                NtItem(_)
+                | NtBlock(_)
+                | NtVis(_)
+                | NtLifetime(_) => false,
             }
         }
 
@@ -39,55 +49,47 @@ impl<'a> Parser<'a> {
             NonterminalKind::Literal => token.can_begin_literal_maybe_minus(),
             NonterminalKind::Vis => match token.kind {
                 // The follow-set of :vis + "priv" keyword + interpolated
-                token::Comma | token::Ident(..) | token::Interpolated(..) => true,
+                token::Comma | token::Ident(..) | token::Interpolated(_) => true,
                 _ => token.can_begin_type(),
             },
-            NonterminalKind::Block => match token.kind {
+            NonterminalKind::Block => match &token.kind {
                 token::OpenDelim(Delimiter::Brace) => true,
-                token::Interpolated(ref nt) => !matches!(
-                    **nt,
-                    token::NtItem(_)
-                        | token::NtPat(_)
-                        | token::NtTy(_)
-                        | token::NtIdent(..)
-                        | token::NtMeta(_)
-                        | token::NtPath(_)
-                        | token::NtVis(_)
-                ),
+                token::Interpolated(nt) => match &nt.0 {
+                    NtBlock(_) | NtLifetime(_) | NtStmt(_) | NtExpr(_) | NtLiteral(_) => true,
+                    NtItem(_) | NtPat(_) | NtTy(_) | NtIdent(..) | NtMeta(_) | NtPath(_)
+                    | NtVis(_) => false,
+                },
                 _ => false,
             },
-            NonterminalKind::Path | NonterminalKind::Meta => match token.kind {
+            NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::ModSep | token::Ident(..) => true,
-                token::Interpolated(ref nt) => match **nt {
-                    token::NtPath(_) | token::NtMeta(_) => true,
-                    _ => may_be_ident(&nt),
-                },
+                token::Interpolated(nt) => may_be_ident(&nt.0),
                 _ => false,
             },
-            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
-                match token.kind {
-                token::Ident(..) |                  // box, ref, mut, and other identifiers (can stricten)
-                token::OpenDelim(Delimiter::Parenthesis) |    // tuple pattern
-                token::OpenDelim(Delimiter::Bracket) |  // slice pattern
-                token::BinOp(token::And) |          // reference
-                token::BinOp(token::Minus) |        // negative literal
-                token::AndAnd |                     // double reference
-                token::Literal(..) |                // literal
-                token::DotDot |                     // range pattern (future compat)
-                token::DotDotDot |                  // range pattern (future compat)
-                token::ModSep |                     // path
-                token::Lt |                         // path (UFCS constant)
-                token::BinOp(token::Shl) => true,   // path (double UFCS)
+            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
+                match &token.kind {
+                token::Ident(..) |                          // box, ref, mut, and other identifiers (can stricten)
+                token::OpenDelim(Delimiter::Parenthesis) |  // tuple pattern
+                token::OpenDelim(Delimiter::Bracket) |      // slice pattern
+                token::BinOp(token::And) |                  // reference
+                token::BinOp(token::Minus) |                // negative literal
+                token::AndAnd |                             // double reference
+                token::Literal(_) |                        // literal
+                token::DotDot |                             // range pattern (future compat)
+                token::DotDotDot |                          // range pattern (future compat)
+                token::ModSep |                             // path
+                token::Lt |                                 // path (UFCS constant)
+                token::BinOp(token::Shl) => true,           // path (double UFCS)
                 // leading vert `|` or-pattern
-                token::BinOp(token::Or) =>  matches!(kind, NonterminalKind::PatWithOr {..}),
-                token::Interpolated(ref nt) => may_be_ident(nt),
+                token::BinOp(token::Or) => matches!(kind, NonterminalKind::PatWithOr),
+                token::Interpolated(nt) => may_be_ident(&nt.0),
                 _ => false,
             }
             }
-            NonterminalKind::Lifetime => match token.kind {
+            NonterminalKind::Lifetime => match &token.kind {
                 token::Lifetime(_) => true,
-                token::Interpolated(ref nt) => {
-                    matches!(**nt, token::NtLifetime(_))
+                token::Interpolated(nt) => {
+                    matches!(&nt.0, NtLifetime(_))
                 }
                 _ => false,
             },
@@ -100,37 +102,39 @@ impl<'a> Parser<'a> {
     /// Parse a non-terminal (e.g. MBE `:pat` or `:ident`). Inlined because there is only one call
     /// site.
     #[inline]
-    pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, NtOrTt> {
-        // Any `Nonterminal` which stores its tokens (currently `NtItem` and `NtExpr`)
-        // needs to have them force-captured here.
+    pub fn parse_nonterminal(&mut self, kind: NonterminalKind) -> PResult<'a, ParseNtResult> {
         // A `macro_rules!` invocation may pass a captured item/expr to a proc-macro,
         // which requires having captured tokens available. Since we cannot determine
         // in advance whether or not a proc-macro will be (transitively) invoked,
         // we always capture tokens for any `Nonterminal` which needs them.
         let mut nt = match kind {
             // Note that TT is treated differently to all the others.
-            NonterminalKind::TT => return Ok(NtOrTt::Tt(self.parse_token_tree())),
+            NonterminalKind::TT => return Ok(ParseNtResult::Tt(self.parse_token_tree())),
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
-                Some(item) => token::NtItem(item),
+                Some(item) => NtItem(item),
                 None => {
-                    return Err(self.struct_span_err(self.token.span, "expected an item keyword"));
+                    return Err(self
+                        .dcx()
+                        .create_err(UnexpectedNonterminal::Item(self.token.span)));
                 }
             },
             NonterminalKind::Block => {
                 // While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
                 // the ':block' matcher does not support them
-                token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
+                NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
             }
             NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
-                Some(s) => token::NtStmt(P(s)),
+                Some(s) => NtStmt(P(s)),
                 None => {
-                    return Err(self.struct_span_err(self.token.span, "expected a statement"));
+                    return Err(self
+                        .dcx()
+                        .create_err(UnexpectedNonterminal::Statement(self.token.span)));
                 }
             },
-            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
-                token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
-                    NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None),
-                    NonterminalKind::PatWithOr { .. } => this.parse_pat_allow_top_alt(
+            NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr => {
+                NtPat(self.collect_tokens_no_attrs(|this| match kind {
+                    NonterminalKind::PatParam { .. } => this.parse_pat_no_top_alt(None, None),
+                    NonterminalKind::PatWithOr => this.parse_pat_allow_top_alt(
                         None,
                         RecoverComma::No,
                         RecoverColon::No,
@@ -140,44 +144,43 @@ impl<'a> Parser<'a> {
                 })?)
             }
 
-            NonterminalKind::Expr => token::NtExpr(self.parse_expr_force_collect()?),
+            NonterminalKind::Expr => NtExpr(self.parse_expr_force_collect()?),
             NonterminalKind::Literal => {
                 // The `:literal` matcher does not support attributes
-                token::NtLiteral(
-                    self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
-                )
+                NtLiteral(self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?)
             }
 
-            NonterminalKind::Ty => token::NtTy(
-                self.collect_tokens_no_attrs(|this| this.parse_no_question_mark_recover())?,
-            ),
+            NonterminalKind::Ty => {
+                NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty_no_question_mark_recover())?)
+            }
 
             // this could be handled like a token, since it is one
-            NonterminalKind::Ident
-                if let Some((ident, is_raw)) = get_macro_ident(&self.token) =>
-            {
+            NonterminalKind::Ident if let Some((ident, is_raw)) = get_macro_ident(&self.token) => {
                 self.bump();
-                token::NtIdent(ident, is_raw)
+                NtIdent(ident, is_raw)
             }
             NonterminalKind::Ident => {
-                let token_str = pprust::token_to_string(&self.token);
-                let msg = &format!("expected ident, found {}", &token_str);
-                return Err(self.struct_span_err(self.token.span, msg));
+                return Err(self.dcx().create_err(UnexpectedNonterminal::Ident {
+                    span: self.token.span,
+                    token: self.token.clone(),
+                }));
+            }
+            NonterminalKind::Path => {
+                NtPath(P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?))
+            }
+            NonterminalKind::Meta => NtMeta(P(self.parse_attr_item(true)?)),
+            NonterminalKind::Vis => {
+                NtVis(P(self
+                    .collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?))
             }
-            NonterminalKind::Path => token::NtPath(
-                P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?),
-            ),
-            NonterminalKind::Meta => token::NtMeta(P(self.parse_attr_item(true)?)),
-            NonterminalKind::Vis => token::NtVis(
-                P(self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?),
-            ),
             NonterminalKind::Lifetime => {
                 if self.check_lifetime() {
-                    token::NtLifetime(self.expect_lifetime().ident)
+                    NtLifetime(self.expect_lifetime().ident)
                 } else {
-                    let token_str = pprust::token_to_string(&self.token);
-                    let msg = &format!("expected a lifetime, found `{}`", &token_str);
-                    return Err(self.struct_span_err(self.token.span, msg));
+                    return Err(self.dcx().create_err(UnexpectedNonterminal::Lifetime {
+                        span: self.token.span,
+                        token: self.token.clone(),
+                    }));
                 }
             }
         };
@@ -187,12 +190,12 @@ impl<'a> Parser<'a> {
             panic!(
                 "Missing tokens for nt {:?} at {:?}: {:?}",
                 nt,
-                nt.span(),
+                nt.use_span(),
                 pprust::nonterminal_to_string(&nt)
             );
         }
 
-        Ok(NtOrTt::Nt(nt))
+        Ok(ParseNtResult::Nt(nt))
     }
 }
 
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index ba77a395840..12260ec95a5 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1,21 +1,52 @@
-use super::{ForceCollect, Parser, PathStyle, TrailingToken};
+use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken};
+use crate::errors::{
+    self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
+    DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
+    ExpectedCommaAfterPatternField, GenericArgsInPatRequireTurbofishSyntax,
+    InclusiveRangeExtraEquals, InclusiveRangeMatchArrow, InclusiveRangeNoEnd, InvalidMutInPattern,
+    PatternOnWrongSideOfAt, RefMutOrderIncorrect, RemoveLet, RepeatedMutInPattern,
+    SwitchRefBoxOrder, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
+    TrailingVertNotAllowed, UnexpectedExpressionInPattern, UnexpectedLifetimeInPattern,
+    UnexpectedParenInRangePat, UnexpectedParenInRangePatSugg,
+    UnexpectedVertVertBeforeFunctionParam, UnexpectedVertVertInPattern,
+};
+use crate::parser::expr::could_be_unclosed_char_literal;
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter};
+use rustc_ast::token::{self, BinOpToken, Delimiter, Token};
 use rustc_ast::{
-    self as ast, AttrVec, Attribute, BindingMode, Expr, ExprKind, MacCall, Mutability, Pat,
-    PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
+    self as ast, AttrVec, BindingAnnotation, ByRef, Expr, ExprKind, MacCall, Mutability, Pat,
+    PatField, PatFieldsRest, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
 };
 use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
-use rustc_span::source_map::{respan, Span, Spanned};
+use rustc_errors::{Applicability, DiagnosticBuilder, PResult};
+use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::source_map::{respan, Spanned};
 use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{ErrorGuaranteed, Span};
+use thin_vec::{thin_vec, ThinVec};
 
-pub(super) type Expected = Option<&'static str>;
+#[derive(PartialEq, Copy, Clone)]
+pub enum Expected {
+    ParameterName,
+    ArgumentName,
+    Identifier,
+    BindingPattern,
+}
 
-/// `Expected` for function and lambda parameter patterns.
-pub(super) const PARAM_EXPECTED: Expected = Some("parameter name");
+impl Expected {
+    // FIXME(#100717): migrate users of this to proper localization
+    fn to_string_or_fallback(expected: Option<Expected>) -> &'static str {
+        match expected {
+            Some(Expected::ParameterName) => "parameter name",
+            Some(Expected::ArgumentName) => "argument name",
+            Some(Expected::Identifier) => "identifier",
+            Some(Expected::BindingPattern) => "binding pattern",
+            None => "pattern",
+        }
+    }
+}
 
 const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here";
 
@@ -52,14 +83,25 @@ enum EatOrResult {
     None,
 }
 
+/// The syntax location of a given pattern. Used for diagnostics.
+#[derive(Clone, Copy)]
+pub enum PatternLocation {
+    LetBinding,
+    FunctionParameter,
+}
+
 impl<'a> Parser<'a> {
     /// Parses a pattern.
     ///
     /// Corresponds to `pat<no_top_alt>` in RFC 2535 and does not admit or-patterns
     /// at the top level. Used when parsing the parameters of lambda expressions,
     /// functions, function pointers, and `pat` macro fragments.
-    pub fn parse_pat_no_top_alt(&mut self, expected: Expected) -> PResult<'a, P<Pat>> {
-        self.parse_pat_with_range_pat(true, expected)
+    pub fn parse_pat_no_top_alt(
+        &mut self,
+        expected: Option<Expected>,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, P<Pat>> {
+        self.parse_pat_with_range_pat(true, expected, syntax_loc)
     }
 
     /// Parses a pattern.
@@ -72,22 +114,23 @@ impl<'a> Parser<'a> {
     /// simplify the grammar somewhat.
     pub fn parse_pat_allow_top_alt(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
         ra: RecoverColon,
         rt: CommaRecoveryMode,
     ) -> PResult<'a, P<Pat>> {
-        self.parse_pat_allow_top_alt_inner(expected, rc, ra, rt).map(|(pat, _)| pat)
+        self.parse_pat_allow_top_alt_inner(expected, rc, ra, rt, None).map(|(pat, _)| pat)
     }
 
     /// Returns the pattern and a bool indicating whether we recovered from a trailing vert (true =
     /// recovered).
     fn parse_pat_allow_top_alt_inner(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
         ra: RecoverColon,
         rt: CommaRecoveryMode,
+        syntax_loc: Option<PatternLocation>,
     ) -> PResult<'a, (P<Pat>, bool)> {
         // Keep track of whether we recovered from a trailing vert so that we can avoid duplicated
         // suggestions (which bothers rustfix).
@@ -100,8 +143,20 @@ impl<'a> Parser<'a> {
         };
 
         // Parse the first pattern (`p_0`).
-        let mut first_pat = self.parse_pat_no_top_alt(expected)?;
-        if rc == RecoverComma::Yes {
+        let mut first_pat = match self.parse_pat_no_top_alt(expected, syntax_loc) {
+            Ok(pat) => pat,
+            Err(err)
+                if self.token.is_reserved_ident()
+                    && !self.token.is_keyword(kw::In)
+                    && !self.token.is_keyword(kw::If) =>
+            {
+                err.emit();
+                self.bump();
+                self.mk_pat(self.token.span, PatKind::Wild)
+            }
+            Err(err) => return Err(err),
+        };
+        if rc == RecoverComma::Yes && !first_pat.could_be_never_pattern() {
             self.maybe_recover_unexpected_comma(first_pat.span, rt)?;
         }
 
@@ -121,7 +176,7 @@ impl<'a> Parser<'a> {
                 // If there was a leading vert, treat this as an or-pattern. This improves
                 // diagnostics.
                 let span = leading_vert_span.to(self.prev_token.span);
-                return Ok((self.mk_pat(span, PatKind::Or(vec![first_pat])), trailing_vert));
+                return Ok((self.mk_pat(span, PatKind::Or(thin_vec![first_pat])), trailing_vert));
             }
 
             return Ok((first_pat, trailing_vert));
@@ -129,7 +184,7 @@ impl<'a> Parser<'a> {
 
         // Parse the patterns `p_1 | ... | p_n` where `n > 0`.
         let lo = leading_vert_span.unwrap_or(first_pat.span);
-        let mut pats = vec![first_pat];
+        let mut pats = thin_vec![first_pat];
         loop {
             match self.eat_or_separator(Some(lo)) {
                 EatOrResult::AteOr => {}
@@ -139,11 +194,11 @@ impl<'a> Parser<'a> {
                     break;
                 }
             }
-            let pat = self.parse_pat_no_top_alt(expected).map_err(|mut err| {
+            let pat = self.parse_pat_no_top_alt(expected, syntax_loc).map_err(|mut err| {
                 err.span_label(lo, WHILE_PARSING_OR_MSG);
                 err
             })?;
-            if rc == RecoverComma::Yes {
+            if rc == RecoverComma::Yes && !pat.could_be_never_pattern() {
                 self.maybe_recover_unexpected_comma(pat.span, rt)?;
             }
             pats.push(pat);
@@ -163,9 +218,9 @@ impl<'a> Parser<'a> {
     /// otherwise).
     pub(super) fn parse_pat_before_ty(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
-        syntax_loc: &str,
+        syntax_loc: PatternLocation,
     ) -> PResult<'a, (P<Pat>, bool)> {
         // We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
         // or-patterns so that we can detect when a user tries to use it. This allows us to print a
@@ -175,31 +230,31 @@ impl<'a> Parser<'a> {
             rc,
             RecoverColon::No,
             CommaRecoveryMode::LikelyTuple,
+            Some(syntax_loc),
         )?;
         let colon = self.eat(&token::Colon);
 
         if let PatKind::Or(pats) = &pat.kind {
-            let msg = format!("top-level or-patterns are not allowed in {}", syntax_loc);
-            let (help, fix) = if pats.len() == 1 {
-                // If all we have is a leading vert, then print a special message. This is the case
-                // if `parse_pat_allow_top_alt` returns an or-pattern with one variant.
-                let msg = "remove the `|`";
-                let fix = pprust::pat_to_string(&pat);
-                (msg, fix)
+            let span = pat.span;
+            let pat = pprust::pat_to_string(&pat);
+            let sub = if pats.len() == 1 {
+                Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert { span, pat })
             } else {
-                let msg = "wrap the pattern in parentheses";
-                let fix = format!("({})", pprust::pat_to_string(&pat));
-                (msg, fix)
+                Some(TopLevelOrPatternNotAllowedSugg::WrapInParens { span, pat })
             };
 
+            let err = self.dcx().create_err(match syntax_loc {
+                PatternLocation::LetBinding => {
+                    TopLevelOrPatternNotAllowed::LetBinding { span, sub }
+                }
+                PatternLocation::FunctionParameter => {
+                    TopLevelOrPatternNotAllowed::FunctionParameter { span, sub }
+                }
+            });
             if trailing_vert {
-                // We already emitted an error and suggestion to remove the trailing vert. Don't
-                // emit again.
-                self.sess.span_diagnostic.delay_span_bug(pat.span, &msg);
+                err.delay_as_bug();
             } else {
-                self.struct_span_err(pat.span, &msg)
-                    .span_suggestion(pat.span, help, fix, Applicability::MachineApplicable)
-                    .emit();
+                err.emit();
             }
         }
 
@@ -216,15 +271,15 @@ impl<'a> Parser<'a> {
         // a leading `||` probably doesn't indicate an or-pattern attempt, so we handle that
         // separately.
         if let token::OrOr = self.token.kind {
-            let span = self.token.span;
-            let mut err = self.struct_span_err(span, "unexpected `||` before function parameter");
-            err.span_suggestion(span, "remove the `||`", "", Applicability::MachineApplicable);
-            err.note("alternatives in or-patterns are separated with `|`, not `||`");
-            err.emit();
+            self.dcx().emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
             self.bump();
         }
 
-        self.parse_pat_before_ty(PARAM_EXPECTED, RecoverComma::No, "function parameters")
+        self.parse_pat_before_ty(
+            Some(Expected::ParameterName),
+            RecoverComma::No,
+            PatternLocation::FunctionParameter,
+        )
     }
 
     /// Eat the or-pattern `|` separator.
@@ -234,7 +289,7 @@ impl<'a> Parser<'a> {
             EatOrResult::TrailingVert
         } else if matches!(self.token.kind, token::OrOr) {
             // Found `||`; Recover and pretend we parsed `|`.
-            self.ban_unexpected_or_or(lo);
+            self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
             self.bump();
             EatOrResult::AteOr
         } else if self.eat(&token::BinOp(token::Or)) {
@@ -268,7 +323,13 @@ impl<'a> Parser<'a> {
         });
         match (is_end_ahead, &self.token.kind) {
             (true, token::BinOp(token::Or) | token::OrOr) => {
-                self.ban_illegal_vert(lo, "trailing", "not allowed in an or-pattern");
+                // A `|` or possibly `||` token shouldn't be here. Ban it.
+                self.dcx().emit_err(TrailingVertNotAllowed {
+                    span: self.token.span,
+                    start: lo,
+                    token: self.token.clone(),
+                    note_double_vert: matches!(self.token.kind, token::OrOr).then_some(()),
+                });
                 self.bump();
                 true
             }
@@ -276,38 +337,93 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// We have parsed `||` instead of `|`. Error and suggest `|` instead.
-    fn ban_unexpected_or_or(&mut self, lo: Option<Span>) {
-        let mut err = self.struct_span_err(self.token.span, "unexpected token `||` in pattern");
-        err.span_suggestion(
-            self.token.span,
-            "use a single `|` to separate multiple alternative patterns",
-            "|",
-            Applicability::MachineApplicable,
-        );
-        if let Some(lo) = lo {
-            err.span_label(lo, WHILE_PARSING_OR_MSG);
+    /// Ensures that the last parsed pattern (or pattern range bound) is not followed by a method call or an operator.
+    ///
+    /// `is_end_bound` indicates whether the last parsed thing was the end bound of a range pattern (see [`parse_pat_range_end`](Self::parse_pat_range_end))
+    /// in order to say "expected a pattern range bound" instead of "expected a pattern";
+    /// ```text
+    /// 0..=1 + 2
+    ///     ^^^^^
+    /// ```
+    /// Only the end bound is spanned, and this function have no idea if there were a `..=` before `pat_span`, hence the parameter.
+    #[must_use = "the pattern must be discarded as `PatKind::Err` if this function returns Some"]
+    fn maybe_recover_trailing_expr(
+        &mut self,
+        pat_span: Span,
+        is_end_bound: bool,
+    ) -> Option<ErrorGuaranteed> {
+        if self.prev_token.is_keyword(kw::Underscore) || !self.may_recover() {
+            // Don't recover anything after an `_` or if recovery is disabled.
+            return None;
         }
-        err.emit();
-    }
 
-    /// A `|` or possibly `||` token shouldn't be here. Ban it.
-    fn ban_illegal_vert(&mut self, lo: Option<Span>, pos: &str, ctx: &str) {
-        let span = self.token.span;
-        let mut err = self.struct_span_err(span, &format!("a {} `|` is {}", pos, ctx));
-        err.span_suggestion(
-            span,
-            &format!("remove the `{}`", pprust::token_to_string(&self.token)),
-            "",
-            Applicability::MachineApplicable,
-        );
-        if let Some(lo) = lo {
-            err.span_label(lo, WHILE_PARSING_OR_MSG);
+        // Check for `.hello()`, but allow `.Hello()` to be recovered as `, Hello()` in `parse_seq_to_before_tokens()`.
+        let has_trailing_method = self.check_noexpect(&token::Dot)
+            && self.look_ahead(1, |tok| {
+                tok.ident()
+                    .and_then(|(ident, _)| ident.name.as_str().chars().next())
+                    .is_some_and(char::is_lowercase)
+            })
+            && self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Parenthesis));
+
+        // Check for operators.
+        // `|` is excluded as it is used in pattern alternatives and lambdas,
+        // `?` is included for error propagation,
+        // `[` is included for indexing operations,
+        // `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`)
+        let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or)
+            || self.token.kind == token::Question
+            || (self.token.kind == token::OpenDelim(Delimiter::Bracket)
+                && self.look_ahead(1, |tok| tok.kind != token::CloseDelim(Delimiter::Bracket)));
+
+        if !has_trailing_method && !has_trailing_operator {
+            // Nothing to recover here.
+            return None;
         }
-        if let token::OrOr = self.token.kind {
-            err.note("alternatives in or-patterns are separated with `|`, not `||`");
+
+        // Let's try to parse an expression to emit a better diagnostic.
+        let mut snapshot = self.create_snapshot_for_diagnostic();
+        snapshot.restrictions.insert(Restrictions::IS_PAT);
+
+        // Parse `?`, `.f`, `(arg0, arg1, ...)` or `[expr]` until they've all been eaten.
+        if let Ok(expr) = snapshot
+            .parse_expr_dot_or_call_with(
+                self.mk_expr_err(pat_span), // equivalent to transforming the parsed pattern into an `Expr`
+                pat_span,
+                AttrVec::new(),
+            )
+            .map_err(|err| err.cancel())
+        {
+            let non_assoc_span = expr.span;
+
+            // Parse an associative expression such as `+ expr`, `% expr`, ...
+            // Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
+            if let Ok(expr) =
+                snapshot.parse_expr_assoc_with(0, expr.into()).map_err(|err| err.cancel())
+            {
+                // We got a valid expression.
+                self.restore_snapshot(snapshot);
+                self.restrictions.remove(Restrictions::IS_PAT);
+
+                let is_bound = is_end_bound
+                    // is_start_bound: either `..` or `)..`
+                    || self.token.is_range_separator()
+                    || self.token.kind == token::CloseDelim(Delimiter::Parenthesis)
+                        && self.look_ahead(1, Token::is_range_separator);
+
+                // Check that `parse_expr_assoc_with` didn't eat a rhs.
+                let is_method_call = has_trailing_method && non_assoc_span == expr.span;
+
+                return Some(self.dcx().emit_err(UnexpectedExpressionInPattern {
+                    span: expr.span,
+                    is_bound,
+                    is_method_call,
+                }));
+            }
         }
-        err.emit();
+
+        // We got a trailing method/operator, but we couldn't parse an expression.
+        None
     }
 
     /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
@@ -315,12 +431,19 @@ impl<'a> Parser<'a> {
     fn parse_pat_with_range_pat(
         &mut self,
         allow_range_pat: bool,
-        expected: Expected,
+        expected: Option<Expected>,
+        syntax_loc: Option<PatternLocation>,
     ) -> PResult<'a, P<Pat>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
         maybe_whole!(self, NtPat, |x| x);
 
-        let lo = self.token.span;
+        let mut lo = self.token.span;
+
+        if self.token.is_keyword(kw::Let) && self.look_ahead(1, |tok| tok.can_begin_pattern()) {
+            self.bump();
+            self.dcx().emit_err(RemoveLet { span: lo });
+            lo = self.token.span;
+        }
 
         let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
             self.parse_pat_deref(expected)?
@@ -345,15 +468,25 @@ impl<'a> Parser<'a> {
             self.recover_dotdotdot_rest_pat(lo)
         } else if let Some(form) = self.parse_range_end() {
             self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
+        } else if self.eat(&token::Not) {
+            // Parse `!`
+            self.sess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
+            PatKind::Never
         } else if self.eat_keyword(kw::Underscore) {
-            // Parse _
+            // Parse `_`
             PatKind::Wild
         } else if self.eat_keyword(kw::Mut) {
-            self.parse_pat_ident_mut()?
+            self.parse_pat_ident_mut(syntax_loc)?
         } else if self.eat_keyword(kw::Ref) {
+            if self.check_keyword(kw::Box) {
+                // Suggest `box ref`.
+                let span = self.prev_token.span.to(self.token.span);
+                self.bump();
+                self.dcx().emit_err(SwitchRefBoxOrder { span });
+            }
             // Parse ref ident @ pat / ref mut ident @ pat
             let mutbl = self.parse_mutability();
-            self.parse_pat_ident(BindingMode::ByRef(mutbl))?
+            self.parse_pat_ident(BindingAnnotation(ByRef::Yes, mutbl), syntax_loc)?
         } else if self.eat_keyword(kw::Box) {
             self.parse_pat_box()?
         } else if self.check_inline_const(0) {
@@ -365,42 +498,83 @@ impl<'a> Parser<'a> {
             } else {
                 PatKind::Lit(const_expr)
             }
-        } else if self.can_be_ident_pat() {
+        // Don't eagerly error on semantically invalid tokens when matching
+        // declarative macros, as the input to those doesn't have to be
+        // semantically valid. For attribute/derive proc macros this is not the
+        // case, so doing the recovery for them is fine.
+        } else if self.can_be_ident_pat()
+            || (self.is_lit_bad_ident().is_some() && self.may_recover())
+        {
             // Parse `ident @ pat`
             // This can give false positives and parse nullary enums,
             // they are dealt with later in resolve.
-            self.parse_pat_ident(BindingMode::ByValue(Mutability::Not))?
+            self.parse_pat_ident(BindingAnnotation::NONE, syntax_loc)?
         } else if self.is_start_of_pat_with_path() {
             // Parse pattern starting with a path
             let (qself, path) = if self.eat_lt() {
                 // Parse a qualified path
-                let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+                let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
                 (Some(qself), path)
             } else {
                 // Parse an unqualified path
-                (None, self.parse_path(PathStyle::Expr)?)
+                (None, self.parse_path(PathStyle::Pat)?)
             };
             let span = lo.to(self.prev_token.span);
 
             if qself.is_none() && self.check(&token::Not) {
                 self.parse_pat_mac_invoc(path)?
             } else if let Some(form) = self.parse_range_end() {
-                let begin = self.mk_expr(span, ExprKind::Path(qself, path), AttrVec::new());
+                let begin = self.mk_expr(span, ExprKind::Path(qself, path));
                 self.parse_pat_range_begin_with(begin, form)?
             } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
                 self.parse_pat_struct(qself, path)?
             } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
                 self.parse_pat_tuple_struct(qself, path)?
             } else {
-                PatKind::Path(qself, path)
+                match self.maybe_recover_trailing_expr(span, false) {
+                    Some(guar) => PatKind::Err(guar),
+                    None => PatKind::Path(qself, path),
+                }
             }
+        } else if let token::Lifetime(lt) = self.token.kind
+            // In pattern position, we're totally fine with using "next token isn't colon"
+            // as a heuristic. We could probably just always try to recover if it's a lifetime,
+            // because we never have `'a: label {}` in a pattern position anyways, but it does
+            // keep us from suggesting something like `let 'a: Ty = ..` => `let 'a': Ty = ..`
+            && could_be_unclosed_char_literal(Ident::with_dummy_span(lt))
+            && !self.look_ahead(1, |token| matches!(token.kind, token::Colon))
+        {
+            // Recover a `'a` as a `'a'` literal
+            let lt = self.expect_lifetime();
+            let (lit, _) =
+                self.recover_unclosed_char(lt.ident, Parser::mk_token_lit_char, |self_| {
+                    let expected = Expected::to_string_or_fallback(expected);
+                    let msg = format!(
+                        "expected {}, found {}",
+                        expected,
+                        super::token_descr(&self_.token)
+                    );
+
+                    self_
+                        .dcx()
+                        .struct_span_err(self_.token.span, msg)
+                        .with_span_label(self_.token.span, format!("expected {expected}"))
+                });
+            PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
         } else {
             // Try to parse everything else as literal with optional minus
             match self.parse_literal_maybe_minus() {
-                Ok(begin) => match self.parse_range_end() {
-                    Some(form) => self.parse_pat_range_begin_with(begin, form)?,
-                    None => PatKind::Lit(begin),
-                },
+                Ok(begin) => {
+                    let begin = match self.maybe_recover_trailing_expr(begin.span, false) {
+                        Some(_) => self.mk_expr_err(begin.span),
+                        None => begin,
+                    };
+
+                    match self.parse_range_end() {
+                        Some(form) => self.parse_pat_range_begin_with(begin, form)?,
+                        None => PatKind::Lit(begin),
+                    }
+                }
                 Err(err) => return self.fatal_unexpected_non_pat(err, expected),
             }
         };
@@ -423,22 +597,14 @@ impl<'a> Parser<'a> {
         self.bump(); // `...`
 
         // The user probably mistook `...` for a rest pattern `..`.
-        self.struct_span_err(lo, "unexpected `...`")
-            .span_label(lo, "not a valid pattern")
-            .span_suggestion_short(
-                lo,
-                "for a rest pattern, use `..` instead of `...`",
-                "..",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx().emit_err(DotDotDotRestPattern { span: lo });
         PatKind::Rest
     }
 
     /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
     ///
     /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
-    /// should already have been parsed by now  at this point,
+    /// should already have been parsed by now at this point,
     /// if the next token is `@` then we can try to parse the more general form.
     ///
     /// Consult `parse_pat_ident` for the `binding` grammar.
@@ -455,43 +621,33 @@ impl<'a> Parser<'a> {
 
         // At this point we attempt to parse `@ $pat_rhs` and emit an error.
         self.bump(); // `@`
-        let mut rhs = self.parse_pat_no_top_alt(None)?;
-        let sp = lhs.span.to(rhs.span);
+        let mut rhs = self.parse_pat_no_top_alt(None, None)?;
+        let whole_span = lhs.span.to(rhs.span);
 
-        if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+        if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
             // The user inverted the order, so help them fix that.
-            let mut applicability = Applicability::MachineApplicable;
-            // FIXME(bindings_after_at): Remove this code when stabilizing the feature.
-            lhs.walk(&mut |p| match p.kind {
-                // `check_match` is unhappy if the subpattern has a binding anywhere.
-                PatKind::Ident(..) => {
-                    applicability = Applicability::MaybeIncorrect;
-                    false // Short-circuit.
-                }
-                _ => true,
-            });
-
             let lhs_span = lhs.span;
             // Move the LHS into the RHS as a subpattern.
             // The RHS is now the full pattern.
             *sub = Some(lhs);
 
-            self.struct_span_err(sp, "pattern on wrong side of `@`")
-                .span_label(lhs_span, "pattern on the left, should be on the right")
-                .span_label(rhs.span, "binding on the right, should be on the left")
-                .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability)
-                .emit();
+            self.dcx().emit_err(PatternOnWrongSideOfAt {
+                whole_span,
+                whole_pat: pprust::pat_to_string(&rhs),
+                pattern: lhs_span,
+                binding: rhs.span,
+            });
         } else {
             // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
             rhs.kind = PatKind::Wild;
-            self.struct_span_err(sp, "left-hand side of `@` must be a binding")
-                .span_label(lhs.span, "interpreted as a pattern, not a binding")
-                .span_label(rhs.span, "also a pattern")
-                .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`")
-                .emit();
+            self.dcx().emit_err(ExpectedBindingLeftOfAt {
+                whole_span,
+                lhs: lhs.span,
+                rhs: rhs.span,
+            });
         }
 
-        rhs.span = sp;
+        rhs.span = whole_span;
         Ok(rhs)
     }
 
@@ -506,39 +662,29 @@ impl<'a> Parser<'a> {
             _ => return,
         }
 
-        self.struct_span_err(pat.span, "the range pattern here has ambiguous interpretation")
-            .span_suggestion(
-                pat.span,
-                "add parentheses to clarify the precedence",
-                format!("({})", pprust::pat_to_string(&pat)),
-                // "ambiguous interpretation" implies that we have to be guessing
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+        self.dcx()
+            .emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(pat) });
     }
 
     /// Parse `&pat` / `&mut pat`.
-    fn parse_pat_deref(&mut self, expected: Expected) -> PResult<'a, PatKind> {
+    fn parse_pat_deref(&mut self, expected: Option<Expected>) -> PResult<'a, PatKind> {
         self.expect_and()?;
-        self.recover_lifetime_in_deref_pat();
-        let mutbl = self.parse_mutability();
-        let subpat = self.parse_pat_with_range_pat(false, expected)?;
-        Ok(PatKind::Ref(subpat, mutbl))
-    }
-
-    fn recover_lifetime_in_deref_pat(&mut self) {
         if let token::Lifetime(name) = self.token.kind {
             self.bump(); // `'a`
 
-            let span = self.prev_token.span;
-            self.struct_span_err(span, &format!("unexpected lifetime `{}` in pattern", name))
-                .span_suggestion(span, "remove the lifetime", "", Applicability::MachineApplicable)
-                .emit();
+            self.dcx()
+                .emit_err(UnexpectedLifetimeInPattern { span: self.prev_token.span, symbol: name });
         }
+
+        let mutbl = self.parse_mutability();
+        let subpat = self.parse_pat_with_range_pat(false, expected, None)?;
+        Ok(PatKind::Ref(subpat, mutbl))
     }
 
     /// Parse a tuple or parenthesis pattern.
     fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> {
+        let open_paren = self.token.span;
+
         let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| {
             p.parse_pat_allow_top_alt(
                 None,
@@ -551,34 +697,73 @@ impl<'a> Parser<'a> {
         // Here, `(pat,)` is a tuple pattern.
         // For backward compatibility, `(..)` is a tuple pattern as well.
         Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) {
-            PatKind::Paren(fields.into_iter().next().unwrap())
+            let pat = fields.into_iter().next().unwrap();
+            let close_paren = self.prev_token.span;
+
+            match &pat.kind {
+                // recover ranges with parentheses around the `(start)..`
+                PatKind::Lit(begin)
+                    if self.may_recover()
+                        && let Some(form) = self.parse_range_end() =>
+                {
+                    self.dcx().emit_err(UnexpectedParenInRangePat {
+                        span: vec![open_paren, close_paren],
+                        sugg: UnexpectedParenInRangePatSugg {
+                            start_span: open_paren,
+                            end_span: close_paren,
+                        },
+                    });
+
+                    self.parse_pat_range_begin_with(begin.clone(), form)?
+                }
+                // recover ranges with parentheses around the `(start)..`
+                PatKind::Err(_)
+                    if self.may_recover()
+                        && let Some(form) = self.parse_range_end() =>
+                {
+                    self.dcx().emit_err(UnexpectedParenInRangePat {
+                        span: vec![open_paren, close_paren],
+                        sugg: UnexpectedParenInRangePatSugg {
+                            start_span: open_paren,
+                            end_span: close_paren,
+                        },
+                    });
+
+                    self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)?
+                }
+
+                // (pat) with optional parentheses
+                _ => PatKind::Paren(pat),
+            }
         } else {
             PatKind::Tuple(fields)
         })
     }
 
     /// Parse a mutable binding with the `mut` token already eaten.
-    fn parse_pat_ident_mut(&mut self) -> PResult<'a, PatKind> {
+    fn parse_pat_ident_mut(&mut self, syntax_loc: Option<PatternLocation>) -> PResult<'a, PatKind> {
         let mut_span = self.prev_token.span;
 
         if self.eat_keyword(kw::Ref) {
-            return self.recover_mut_ref_ident(mut_span);
+            self.dcx().emit_err(RefMutOrderIncorrect { span: mut_span.to(self.prev_token.span) });
+            return self.parse_pat_ident(BindingAnnotation::REF_MUT, syntax_loc);
         }
 
         self.recover_additional_muts();
 
         // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
-        if let token::Interpolated(ref nt) = self.token.kind {
-            if let token::NtPat(_) = **nt {
-                self.expected_ident_found().emit();
+        if let token::Interpolated(nt) = &self.token.kind {
+            if let token::NtPat(..) = &nt.0 {
+                self.expected_ident_found_err().emit();
             }
         }
 
         // Parse the pattern we hope to be an identifier.
-        let mut pat = self.parse_pat_no_top_alt(Some("identifier"))?;
+        let mut pat = self.parse_pat_no_top_alt(Some(Expected::Identifier), None)?;
 
         // If we don't have `mut $ident (@ pat)?`, error.
-        if let PatKind::Ident(BindingMode::ByValue(m @ Mutability::Not), ..) = &mut pat.kind {
+        if let PatKind::Ident(BindingAnnotation(ByRef::No, m @ Mutability::Not), ..) = &mut pat.kind
+        {
             // Don't recurse into the subpattern.
             // `mut` on the outer binding doesn't affect the inner bindings.
             *m = Mutability::Mut;
@@ -591,29 +776,14 @@ impl<'a> Parser<'a> {
         Ok(pat.into_inner().kind)
     }
 
-    /// Recover on `mut ref? ident @ pat` and suggest
-    /// that the order of `mut` and `ref` is incorrect.
-    fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> {
-        let mutref_span = lo.to(self.prev_token.span);
-        self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
-            .span_suggestion(
-                mutref_span,
-                "try switching the order",
-                "ref mut",
-                Applicability::MachineApplicable,
-            )
-            .emit();
-
-        self.parse_pat_ident(BindingMode::ByRef(Mutability::Mut))
-    }
-
     /// Turn all by-value immutable bindings in a pattern into mutable bindings.
     /// Returns `true` if any change was made.
     fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
         struct AddMut(bool);
         impl MutVisitor for AddMut {
             fn visit_pat(&mut self, pat: &mut P<Pat>) {
-                if let PatKind::Ident(BindingMode::ByValue(m @ Mutability::Not), ..) = &mut pat.kind
+                if let PatKind::Ident(BindingAnnotation(ByRef::No, m @ Mutability::Not), ..) =
+                    &mut pat.kind
                 {
                     self.0 = true;
                     *m = Mutability::Mut;
@@ -629,17 +799,14 @@ impl<'a> Parser<'a> {
 
     /// Error on `mut $pat` where `$pat` is not an ident.
     fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
-        let span = lo.to(pat.span);
-        let fix = pprust::pat_to_string(&pat);
-        let (problem, suggestion) = if changed_any_binding {
-            ("`mut` must be attached to each individual binding", "add `mut` to each binding")
+        self.dcx().emit_err(if changed_any_binding {
+            InvalidMutInPattern::NestedIdent {
+                span: lo.to(pat.span),
+                pat: pprust::pat_to_string(pat),
+            }
         } else {
-            ("`mut` must be followed by a named binding", "remove the `mut` prefix")
-        };
-        self.struct_span_err(span, problem)
-            .span_suggestion(span, suggestion, fix, Applicability::MachineApplicable)
-            .note("`mut` may be followed by `variable` and `variable @ pattern`")
-            .emit();
+            InvalidMutInPattern::NonIdent { span: lo.until(pat.span) }
+        });
     }
 
     /// Eat any extraneous `mut`s and error + recover if we ate any.
@@ -650,41 +817,33 @@ impl<'a> Parser<'a> {
             return;
         }
 
-        let span = lo.to(self.prev_token.span);
-        self.struct_span_err(span, "`mut` on a binding may not be repeated")
-            .span_suggestion(
-                span,
-                "remove the additional `mut`s",
-                "",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.dcx().emit_err(RepeatedMutInPattern { span: lo.to(self.prev_token.span) });
     }
 
     /// Parse macro invocation
     fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
         self.bump();
-        let args = self.parse_mac_args()?;
-        let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription };
+        let args = self.parse_delim_args()?;
+        let mac = P(MacCall { path, args });
         Ok(PatKind::MacCall(mac))
     }
 
     fn fatal_unexpected_non_pat(
         &mut self,
-        err: DiagnosticBuilder<'a, ErrorGuaranteed>,
-        expected: Expected,
+        err: DiagnosticBuilder<'a>,
+        expected: Option<Expected>,
     ) -> PResult<'a, P<Pat>> {
         err.cancel();
 
-        let expected = expected.unwrap_or("pattern");
+        let expected = Expected::to_string_or_fallback(expected);
         let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
 
-        let mut err = self.struct_span_err(self.token.span, &msg);
-        err.span_label(self.token.span, format!("expected {}", expected));
+        let mut err = self.dcx().struct_span_err(self.token.span, msg);
+        err.span_label(self.token.span, format!("expected {expected}"));
 
         let sp = self.sess.source_map().start_point(self.token.span);
         if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
-            self.sess.expr_parentheses_needed(&mut err, *sp);
+            err.subdiagnostic(ExprParenthesesNeeded::surrounding(*sp));
         }
 
         Err(err)
@@ -718,66 +877,53 @@ impl<'a> Parser<'a> {
             // Parsing e.g. `X..`.
             if let RangeEnd::Included(_) = re.node {
                 // FIXME(Centril): Consider semantic errors instead in `ast_validation`.
-                self.inclusive_range_with_incorrect_end(re.span);
+                self.inclusive_range_with_incorrect_end();
             }
             None
         };
         Ok(PatKind::Range(Some(begin), end, re))
     }
 
-    pub(super) fn inclusive_range_with_incorrect_end(&mut self, span: Span) {
+    pub(super) fn inclusive_range_with_incorrect_end(&mut self) {
         let tok = &self.token;
-
+        let span = self.prev_token.span;
         // If the user typed "..==" instead of "..=", we want to give them
         // a specific error message telling them to use "..=".
+        // If they typed "..=>", suggest they use ".. =>".
         // Otherwise, we assume that they meant to type a half open exclusive
         // range and give them an error telling them to do that instead.
-        if matches!(tok.kind, token::Eq) && tok.span.lo() == span.hi() {
-            let span_with_eq = span.to(tok.span);
+        let no_space = tok.span.lo() == span.hi();
+        match tok.kind {
+            token::Eq if no_space => {
+                let span_with_eq = span.to(tok.span);
 
-            // Ensure the user doesn't receive unhelpful unexpected token errors
-            self.bump();
-            if self.is_pat_range_end_start(0) {
-                let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
-            }
+                // Ensure the user doesn't receive unhelpful unexpected token errors
+                self.bump();
+                if self.is_pat_range_end_start(0) {
+                    let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
+                }
 
-            self.error_inclusive_range_with_extra_equals(span_with_eq);
-        } else {
-            self.error_inclusive_range_with_no_end(span);
+                self.dcx().emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
+            }
+            token::Gt if no_space => {
+                let after_pat = span.with_hi(span.hi() - rustc_span::BytePos(1)).shrink_to_hi();
+                self.dcx().emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
+            }
+            _ => {
+                self.dcx().emit_err(InclusiveRangeNoEnd { span });
+            }
         }
     }
 
-    fn error_inclusive_range_with_extra_equals(&self, span: Span) {
-        self.struct_span_err(span, "unexpected `=` after inclusive range")
-            .span_suggestion_short(span, "use `..=` instead", "..=", Applicability::MaybeIncorrect)
-            .note("inclusive ranges end with a single equals sign (`..=`)")
-            .emit();
-    }
-
-    fn error_inclusive_range_with_no_end(&self, span: Span) {
-        struct_span_err!(self.sess.span_diagnostic, span, E0586, "inclusive range with no end")
-            .span_suggestion_short(span, "use `..` instead", "..", Applicability::MachineApplicable)
-            .note("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)")
-            .emit();
-    }
-
     /// Parse a range-to pattern, `..X` or `..=X` where `X` remains to be parsed.
     ///
     /// The form `...X` is prohibited to reduce confusion with the potential
     /// expression syntax `...expr` for splatting in expressions.
     fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
         let end = self.parse_pat_range_end()?;
-        self.sess.gated_spans.gate(sym::half_open_range_patterns, re.span.to(self.prev_token.span));
-        if let RangeEnd::Included(ref mut syn @ RangeSyntax::DotDotDot) = &mut re.node {
+        if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
             *syn = RangeSyntax::DotDotEq;
-            self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
-                .span_suggestion_short(
-                    re.span,
-                    "use `..=` instead",
-                    "..=",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.dcx().emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
         }
         Ok(PatKind::Range(None, Some(end), re))
     }
@@ -790,27 +936,58 @@ impl<'a> Parser<'a> {
                 || t.kind == token::Dot // e.g. `.5` for recovery;
                 || t.can_begin_literal_maybe_minus() // e.g. `42`.
                 || t.is_whole_expr()
+                || t.is_lifetime() // recover `'a` instead of `'a'`
+                || (self.may_recover() // recover leading `(`
+                    && t.kind == token::OpenDelim(Delimiter::Parenthesis)
+                    && self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis))
+                    && self.is_pat_range_end_start(dist + 1))
             })
     }
 
+    /// Parse a range pattern end bound
     fn parse_pat_range_end(&mut self) -> PResult<'a, P<Expr>> {
-        if self.check_inline_const(0) {
+        // recover leading `(`
+        let open_paren = (self.may_recover()
+            && self.eat_noexpect(&token::OpenDelim(Delimiter::Parenthesis)))
+        .then_some(self.prev_token.span);
+
+        let bound = if self.check_inline_const(0) {
             self.parse_const_block(self.token.span, true)
         } else if self.check_path() {
             let lo = self.token.span;
             let (qself, path) = if self.eat_lt() {
                 // Parse a qualified path
-                let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
+                let (qself, path) = self.parse_qpath(PathStyle::Pat)?;
                 (Some(qself), path)
             } else {
                 // Parse an unqualified path
-                (None, self.parse_path(PathStyle::Expr)?)
+                (None, self.parse_path(PathStyle::Pat)?)
             };
             let hi = self.prev_token.span;
-            Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path), AttrVec::new()))
+            Ok(self.mk_expr(lo.to(hi), ExprKind::Path(qself, path)))
         } else {
             self.parse_literal_maybe_minus()
+        }?;
+
+        let recovered = self.maybe_recover_trailing_expr(bound.span, true);
+
+        // recover trailing `)`
+        if let Some(open_paren) = open_paren {
+            self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+
+            self.dcx().emit_err(UnexpectedParenInRangePat {
+                span: vec![open_paren, self.prev_token.span],
+                sugg: UnexpectedParenInRangePatSugg {
+                    start_span: open_paren,
+                    end_span: self.prev_token.span,
+                },
+            });
         }
+
+        Ok(match recovered {
+            Some(_) => self.mk_expr_err(bound.span),
+            None => bound,
+        })
     }
 
     /// Is this the start of a pattern beginning with a path?
@@ -838,10 +1015,26 @@ impl<'a> Parser<'a> {
     /// Parses `ident` or `ident @ pat`.
     /// Used by the copy foo and ref foo patterns to give a good
     /// error message when parsing mistakes like `ref foo(a, b)`.
-    fn parse_pat_ident(&mut self, binding_mode: BindingMode) -> PResult<'a, PatKind> {
-        let ident = self.parse_ident()?;
+    fn parse_pat_ident(
+        &mut self,
+        binding_annotation: BindingAnnotation,
+        syntax_loc: Option<PatternLocation>,
+    ) -> PResult<'a, PatKind> {
+        let ident = self.parse_ident_common(false)?;
+
+        if self.may_recover()
+            && !matches!(syntax_loc, Some(PatternLocation::FunctionParameter))
+            && self.check_noexpect(&token::Lt)
+            && self.look_ahead(1, |t| t.can_begin_type())
+        {
+            return Err(self.dcx().create_err(GenericArgsInPatRequireTurbofishSyntax {
+                span: self.token.span,
+                suggest_turbofish: self.token.span.shrink_to_lo(),
+            }));
+        }
+
         let sub = if self.eat(&token::At) {
-            Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+            Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
         } else {
             None
         };
@@ -853,14 +1046,25 @@ impl<'a> Parser<'a> {
         // will direct us over to `parse_enum_variant()`.
         if self.token == token::OpenDelim(Delimiter::Parenthesis) {
             return Err(self
-                .struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
+                .dcx()
+                .create_err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }));
         }
 
-        Ok(PatKind::Ident(binding_mode, ident, sub))
+        // Check for method calls after the `ident`,
+        // but not `ident @ subpat` as `subpat` was already checked and `ident` continues with `@`.
+
+        let pat = if sub.is_none()
+            && let Some(guar) = self.maybe_recover_trailing_expr(ident.span, false)
+        {
+            PatKind::Err(guar)
+        } else {
+            PatKind::Ident(binding_annotation, ident, sub)
+        };
+        Ok(pat)
     }
 
     /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
-    fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+    fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
         if qself.is_some() {
             // Feature gate the use of qualified paths in patterns
             self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
@@ -870,14 +1074,19 @@ impl<'a> Parser<'a> {
             e.span_label(path.span, "while parsing the fields for this pattern");
             e.emit();
             self.recover_stmt();
-            (vec![], true)
+            // When recovering, pretend we had `Foo { .. }`, to avoid cascading errors.
+            (ThinVec::new(), PatFieldsRest::Rest)
         });
         self.bump();
         Ok(PatKind::Struct(qself, path, fields, etc))
     }
 
     /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
-    fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+    fn parse_pat_tuple_struct(
+        &mut self,
+        qself: Option<P<QSelf>>,
+        path: Path,
+    ) -> PResult<'a, PatKind> {
         let (fields, _) = self.parse_paren_comma_seq(|p| {
             p.parse_pat_allow_top_alt(
                 None,
@@ -915,52 +1124,44 @@ impl<'a> Parser<'a> {
         let box_span = self.prev_token.span;
 
         if self.isnt_pattern_start() {
-            self.struct_span_err(
-                self.token.span,
-                format!("expected pattern, found {}", super::token_descr(&self.token)),
-            )
-            .span_note(box_span, "`box` is a reserved keyword")
-            .span_suggestion_verbose(
-                box_span.shrink_to_lo(),
-                "escape `box` to use it as an identifier",
-                "r#",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+            let descr = super::token_descr(&self.token);
+            self.dcx().emit_err(errors::BoxNotPat {
+                span: self.token.span,
+                kw: box_span,
+                lo: box_span.shrink_to_lo(),
+                descr,
+            });
 
             // We cannot use `parse_pat_ident()` since it will complain `box`
             // is not an identifier.
             let sub = if self.eat(&token::At) {
-                Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+                Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
             } else {
                 None
             };
 
-            Ok(PatKind::Ident(
-                BindingMode::ByValue(Mutability::Not),
-                Ident::new(kw::Box, box_span),
-                sub,
-            ))
+            Ok(PatKind::Ident(BindingAnnotation::NONE, Ident::new(kw::Box, box_span), sub))
         } else {
-            let pat = self.parse_pat_with_range_pat(false, None)?;
+            let pat = self.parse_pat_with_range_pat(false, None, None)?;
             self.sess.gated_spans.gate(sym::box_patterns, box_span.to(self.prev_token.span));
             Ok(PatKind::Box(pat))
         }
     }
 
     /// Parses the fields of a struct-like pattern.
-    fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
-        let mut fields = Vec::new();
-        let mut etc = false;
+    fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, PatFieldsRest)> {
+        let mut fields = ThinVec::new();
+        let mut etc = PatFieldsRest::None;
         let mut ate_comma = true;
-        let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
-        let mut etc_span = None;
+        let mut delayed_err: Option<DiagnosticBuilder<'a>> = None;
+        let mut first_etc_and_maybe_comma_span = None;
+        let mut last_non_comma_dotdot_span = None;
 
         while self.token != token::CloseDelim(Delimiter::Brace) {
             let attrs = match self.parse_outer_attributes() {
                 Ok(attrs) => attrs,
                 Err(err) => {
-                    if let Some(mut delayed) = delayed_err {
+                    if let Some(delayed) = delayed_err {
                         delayed.emit();
                     }
                     return Err(err);
@@ -970,28 +1171,48 @@ impl<'a> Parser<'a> {
 
             // check that a comma comes after every field
             if !ate_comma {
-                let err = self.struct_span_err(self.token.span, "expected `,`");
-                if let Some(mut delayed) = delayed_err {
+                let mut err =
+                    self.dcx().create_err(ExpectedCommaAfterPatternField { span: self.token.span });
+                if let Some(delayed) = delayed_err {
                     delayed.emit();
                 }
+                self.recover_misplaced_pattern_modifiers(&fields, &mut err);
                 return Err(err);
             }
             ate_comma = false;
 
-            if self.check(&token::DotDot) || self.token == token::DotDotDot {
-                etc = true;
+            if self.check(&token::DotDot)
+                || self.check_noexpect(&token::DotDotDot)
+                || self.check_keyword(kw::Underscore)
+            {
+                etc = PatFieldsRest::Rest;
                 let mut etc_sp = self.token.span;
+                if first_etc_and_maybe_comma_span.is_none() {
+                    if let Some(comma_tok) = self
+                        .look_ahead(1, |t| if *t == token::Comma { Some(t.clone()) } else { None })
+                    {
+                        let nw_span = self
+                            .sess
+                            .source_map()
+                            .span_extend_to_line(comma_tok.span)
+                            .trim_start(comma_tok.span.shrink_to_lo())
+                            .map(|s| self.sess.source_map().span_until_non_whitespace(s));
+                        first_etc_and_maybe_comma_span = nw_span.map(|s| etc_sp.to(s));
+                    } else {
+                        first_etc_and_maybe_comma_span =
+                            Some(self.sess.source_map().span_until_non_whitespace(etc_sp));
+                    }
+                }
 
-                self.recover_one_fewer_dotdot();
-                self.bump(); // `..` || `...`
+                self.recover_bad_dot_dot();
+                self.bump(); // `..` || `...` || `_`
 
                 if self.token == token::CloseDelim(Delimiter::Brace) {
-                    etc_span = Some(etc_sp);
                     break;
                 }
                 let token_str = super::token_descr(&self.token);
-                let msg = &format!("expected `}}`, found {}", token_str);
-                let mut err = self.struct_span_err(self.token.span, msg);
+                let msg = format!("expected `}}`, found {token_str}");
+                let mut err = self.dcx().struct_span_err(self.token.span, msg);
 
                 err.span_label(self.token.span, "expected `}`");
                 let mut comma_sp = None;
@@ -1008,7 +1229,6 @@ impl<'a> Parser<'a> {
                     ate_comma = true;
                 }
 
-                etc_span = Some(etc_sp.until(self.token.span));
                 if self.token == token::CloseDelim(Delimiter::Brace) {
                     // If the struct looks otherwise well formed, recover and continue.
                     if let Some(sp) = comma_sp {
@@ -1026,14 +1246,14 @@ impl<'a> Parser<'a> {
                     // This way we avoid "pattern missing fields" errors afterwards.
                     // We delay this error until the end in order to have a span for a
                     // suggested fix.
-                    if let Some(mut delayed_err) = delayed_err {
+                    if let Some(delayed_err) = delayed_err {
                         delayed_err.emit();
                         return Err(err);
                     } else {
                         delayed_err = Some(err);
                     }
                 } else {
-                    if let Some(mut err) = delayed_err {
+                    if let Some(err) = delayed_err {
                         err.emit();
                     }
                     return Err(err);
@@ -1045,13 +1265,16 @@ impl<'a> Parser<'a> {
                     let field = match this.parse_pat_field(lo, attrs) {
                         Ok(field) => Ok(field),
                         Err(err) => {
-                            if let Some(mut delayed_err) = delayed_err.take() {
+                            if let Some(delayed_err) = delayed_err.take() {
                                 delayed_err.emit();
                             }
                             return Err(err);
                         }
                     }?;
                     ate_comma = this.eat(&token::Comma);
+
+                    last_non_comma_dotdot_span = Some(this.prev_token.span);
+
                     // We just ate a comma, so there's no need to use
                     // `TrailingToken::Comma`
                     Ok((field, TrailingToken::None))
@@ -1061,39 +1284,79 @@ impl<'a> Parser<'a> {
         }
 
         if let Some(mut err) = delayed_err {
-            if let Some(etc_span) = etc_span {
-                err.multipart_suggestion(
-                    "move the `..` to the end of the field list",
-                    vec![
-                        (etc_span, String::new()),
-                        (self.token.span, format!("{}.. }}", if ate_comma { "" } else { ", " })),
-                    ],
-                    Applicability::MachineApplicable,
-                );
+            if let Some(first_etc_span) = first_etc_and_maybe_comma_span {
+                if self.prev_token == token::DotDot {
+                    // We have `.., x, ..`.
+                    err.multipart_suggestion(
+                        "remove the starting `..`",
+                        vec![(first_etc_span, String::new())],
+                        Applicability::MachineApplicable,
+                    );
+                } else {
+                    if let Some(last_non_comma_dotdot_span) = last_non_comma_dotdot_span {
+                        // We have `.., x`.
+                        err.multipart_suggestion(
+                            "move the `..` to the end of the field list",
+                            vec![
+                                (first_etc_span, String::new()),
+                                (
+                                    self.token.span.to(last_non_comma_dotdot_span.shrink_to_hi()),
+                                    format!("{} .. }}", if ate_comma { "" } else { "," }),
+                                ),
+                            ],
+                            Applicability::MachineApplicable,
+                        );
+                    }
+                }
             }
             err.emit();
         }
         Ok((fields, etc))
     }
 
-    /// Recover on `...` as if it were `..` to avoid further errors.
+    /// If the user writes `S { ref field: name }` instead of `S { field: ref name }`, we suggest
+    /// the correct code.
+    fn recover_misplaced_pattern_modifiers(
+        &self,
+        fields: &ThinVec<PatField>,
+        err: &mut DiagnosticBuilder<'a>,
+    ) {
+        if let Some(last) = fields.iter().last()
+            && last.is_shorthand
+            && let PatKind::Ident(binding, ident, None) = last.pat.kind
+            && binding != BindingAnnotation::NONE
+            && self.token == token::Colon
+            // We found `ref mut? ident:`, try to parse a `name,` or `name }`.
+            && let Some(name_span) = self.look_ahead(1, |t| t.is_ident().then(|| t.span))
+            && self.look_ahead(2, |t| {
+                t == &token::Comma || t == &token::CloseDelim(Delimiter::Brace)
+            })
+        {
+            let span = last.pat.span.with_hi(ident.span.lo());
+            // We have `S { ref field: name }` instead of `S { field: ref name }`
+            err.multipart_suggestion(
+                "the pattern modifiers belong after the `:`",
+                vec![
+                    (span, String::new()),
+                    (name_span.shrink_to_lo(), binding.prefix_str().to_string()),
+                ],
+                Applicability::MachineApplicable,
+            );
+        }
+    }
+
+    /// Recover on `...` or `_` as if it were `..` to avoid further errors.
     /// See issue #46718.
-    fn recover_one_fewer_dotdot(&self) {
-        if self.token != token::DotDotDot {
+    fn recover_bad_dot_dot(&self) {
+        if self.token == token::DotDot {
             return;
         }
 
-        self.struct_span_err(self.token.span, "expected field pattern, found `...`")
-            .span_suggestion(
-                self.token.span,
-                "to omit remaining fields, use one fewer `.`",
-                "..",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        let token_str = pprust::token_to_string(&self.token);
+        self.dcx().emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
     }
 
-    fn parse_pat_field(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, PatField> {
+    fn parse_pat_field(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, PatField> {
         // Check if a colon exists one ahead. This means we're parsing a fieldname.
         let hi;
         let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
@@ -1117,14 +1380,12 @@ impl<'a> Parser<'a> {
             let fieldname = self.parse_field_name()?;
             hi = self.prev_token.span;
 
-            let bind_type = match (is_ref, is_mut) {
-                (true, true) => BindingMode::ByRef(Mutability::Mut),
-                (true, false) => BindingMode::ByRef(Mutability::Not),
-                (false, true) => BindingMode::ByValue(Mutability::Mut),
-                (false, false) => BindingMode::ByValue(Mutability::Not),
+            let mutability = match is_mut {
+                false => Mutability::Not,
+                true => Mutability::Mut,
             };
-
-            let fieldpat = self.mk_pat_ident(boxed_span.to(hi), bind_type, fieldname);
+            let ann = BindingAnnotation(ByRef::from(is_ref), mutability);
+            let fieldpat = self.mk_pat_ident(boxed_span.to(hi), ann, fieldname);
             let subpat =
                 if is_box { self.mk_pat(lo.to(hi), PatKind::Box(fieldpat)) } else { fieldpat };
             (subpat, fieldname, true)
@@ -1134,15 +1395,15 @@ impl<'a> Parser<'a> {
             ident: fieldname,
             pat: subpat,
             is_shorthand,
-            attrs: attrs.into(),
+            attrs,
             id: ast::DUMMY_NODE_ID,
             span: lo.to(hi),
             is_placeholder: false,
         })
     }
 
-    pub(super) fn mk_pat_ident(&self, span: Span, bm: BindingMode, ident: Ident) -> P<Pat> {
-        self.mk_pat(span, PatKind::Ident(bm, ident, None))
+    pub(super) fn mk_pat_ident(&self, span: Span, ann: BindingAnnotation, ident: Ident) -> P<Pat> {
+        self.mk_pat(span, PatKind::Ident(ann, ident, None))
     }
 
     pub(super) fn mk_pat(&self, span: Span, kind: PatKind) -> P<Pat> {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index 5cf1758c31f..e7cad74b4dd 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -1,6 +1,7 @@
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{Parser, Restrictions, TokenType};
-use crate::maybe_whole;
+use crate::errors::PathSingleColon;
+use crate::{errors, maybe_whole};
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::{
@@ -8,11 +9,11 @@ use rustc_ast::{
     AssocConstraintKind, BlockCheckMode, GenericArg, GenericArgs, Generics, ParenthesizedArgs,
     Path, PathSegment, QSelf,
 };
-use rustc_errors::{pluralize, Applicability, PResult};
-use rustc_span::source_map::{BytePos, Span};
+use rustc_errors::{Applicability, PResult};
 use rustc_span::symbol::{kw, sym, Ident};
-
+use rustc_span::{BytePos, Span};
 use std::mem;
+use thin_vec::ThinVec;
 use tracing::debug;
 
 /// Specifies how to parse a path.
@@ -24,7 +25,19 @@ pub enum PathStyle {
     /// In all such contexts the non-path interpretation is preferred by default for practical
     /// reasons, but the path interpretation can be forced by the disambiguator `::`, e.g.
     /// `x<y>` - comparisons, `x::<y>` - unambiguously a path.
+    ///
+    /// Also, a path may never be followed by a `:`. This means that we can eagerly recover if
+    /// we encounter it.
     Expr,
+    /// The same as `Expr`, but may be followed by a `:`.
+    /// For example, this code:
+    /// ```rust
+    /// struct S;
+    ///
+    /// let S: S;
+    /// //  ^ Followed by a `:`
+    /// ```
+    Pat,
     /// In other contexts, notably in types, no ambiguity exists and paths can be written
     /// without the disambiguator, e.g., `x<y>` - unambiguously a path.
     /// Paths with disambiguators are still accepted, `x::<Y>` - unambiguously a path too.
@@ -38,6 +51,12 @@ pub enum PathStyle {
     Mod,
 }
 
+impl PathStyle {
+    fn has_generic_ambiguity(&self) -> bool {
+        matches!(self, Self::Expr | Self::Pat)
+    }
+}
+
 impl<'a> Parser<'a> {
     /// Parses a qualified path.
     /// Assumes that the leading `<` has been parsed already.
@@ -49,7 +68,7 @@ impl<'a> Parser<'a> {
     /// `<T as U>::a`
     /// `<T as U>::F::a<S>` (without disambiguator)
     /// `<T as U>::F::a::<S>` (with disambiguator)
-    pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
+    pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>, Path)> {
         let lo = self.prev_token.span;
         let ty = self.parse_ty()?;
 
@@ -64,7 +83,7 @@ impl<'a> Parser<'a> {
             path_span = path_lo.to(self.prev_token.span);
         } else {
             path_span = self.token.span.to(self.token.span);
-            path = ast::Path { segments: Vec::new(), span: path_span, tokens: None };
+            path = ast::Path { segments: ThinVec::new(), span: path_span, tokens: None };
         }
 
         // See doc comment for `unmatched_angle_bracket_count`.
@@ -78,7 +97,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::ModSep)?;
         }
 
-        let qself = QSelf { ty, path_span, position: path.segments.len() };
+        let qself = P(QSelf { ty, path_span, position: path.segments.len() });
         self.parse_path_segments(&mut path.segments, style, None)?;
 
         Ok((
@@ -104,12 +123,12 @@ impl<'a> Parser<'a> {
 
         self.bump(); // colon
 
-        self.diagnostic()
+        self.dcx()
             .struct_span_err(
                 self.prev_token.span,
                 "found single colon before projection in qualified path",
             )
-            .span_suggestion(
+            .with_span_suggestion(
                 self.prev_token.span,
                 "use double colon",
                 "::",
@@ -150,16 +169,13 @@ impl<'a> Parser<'a> {
             //
             if style == PathStyle::Mod && path.segments.iter().any(|segment| segment.args.is_some())
             {
-                parser
-                    .struct_span_err(
-                        path.segments
-                            .iter()
-                            .filter_map(|segment| segment.args.as_ref())
-                            .map(|arg| arg.span())
-                            .collect::<Vec<_>>(),
-                        "unexpected generic arguments in path",
-                    )
-                    .emit();
+                let span = path
+                    .segments
+                    .iter()
+                    .filter_map(|segment| segment.args.as_ref())
+                    .map(|arg| arg.span())
+                    .collect::<Vec<_>>();
+                parser.dcx().emit_err(errors::GenericsInPath { span });
             }
         };
 
@@ -169,7 +185,7 @@ impl<'a> Parser<'a> {
         });
 
         if let token::Interpolated(nt) = &self.token.kind {
-            if let token::NtTy(ty) = &**nt {
+            if let token::NtTy(ty) = &nt.0 {
                 if let ast::TyKind::Path(None, path) = &ty.kind {
                     let path = path.clone();
                     self.bump();
@@ -180,25 +196,24 @@ impl<'a> Parser<'a> {
         }
 
         let lo = self.token.span;
-        let mut segments = Vec::new();
+        let mut segments = ThinVec::new();
         let mod_sep_ctxt = self.token.span.ctxt();
         if self.eat(&token::ModSep) {
             segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
         }
         self.parse_path_segments(&mut segments, style, ty_generics)?;
-
         Ok(Path { segments, span: lo.to(self.prev_token.span), tokens: None })
     }
 
     pub(super) fn parse_path_segments(
         &mut self,
-        segments: &mut Vec<PathSegment>,
+        segments: &mut ThinVec<PathSegment>,
         style: PathStyle,
         ty_generics: Option<&Generics>,
     ) -> PResult<'a, ()> {
         loop {
             let segment = self.parse_path_segment(style, ty_generics)?;
-            if style == PathStyle::Expr {
+            if style.has_generic_ambiguity() {
                 // In order to check for trailing angle brackets, we must have finished
                 // recursing (`parse_path_segment` can indirectly call this function),
                 // that is, the next token must be the highlighted part of the below example:
@@ -220,6 +235,29 @@ impl<'a> Parser<'a> {
             segments.push(segment);
 
             if self.is_import_coupler() || !self.eat(&token::ModSep) {
+                if style == PathStyle::Expr
+                    && self.may_recover()
+                    && self.token == token::Colon
+                    && self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
+                {
+                    // Emit a special error message for `a::b:c` to help users
+                    // otherwise, `a: c` might have meant to introduce a new binding
+                    if self.token.span.lo() == self.prev_token.span.hi()
+                        && self.look_ahead(1, |token| self.token.span.hi() == token.span.lo())
+                    {
+                        self.bump(); // bump past the colon
+                        self.dcx().emit_err(PathSingleColon {
+                            span: self.prev_token.span,
+                            type_ascription: self
+                                .sess
+                                .unstable_features
+                                .is_nightly_build()
+                                .then_some(()),
+                        });
+                    }
+                    continue;
+                }
+
                 return Ok(());
             }
         }
@@ -273,12 +311,27 @@ impl<'a> Parser<'a> {
                         ty_generics,
                     )?;
                     self.expect_gt().map_err(|mut err| {
+                        // Try to recover a `:` into a `::`
+                        if self.token == token::Colon
+                            && self.look_ahead(1, |token| {
+                                token.is_ident() && !token.is_reserved_ident()
+                            })
+                        {
+                            err.cancel();
+                            err = self.dcx().create_err(PathSingleColon {
+                                span: self.token.span,
+                                type_ascription: self
+                                    .sess
+                                    .unstable_features
+                                    .is_nightly_build()
+                                    .then_some(()),
+                            });
+                        }
                         // Attempt to find places where a missing `>` might belong.
-                        if let Some(arg) = args
+                        else if let Some(arg) = args
                             .iter()
                             .rev()
-                            .skip_while(|arg| matches!(arg, AngleBracketedArg::Constraint(_)))
-                            .next()
+                            .find(|arg| !matches!(arg, AngleBracketedArg::Constraint(_)))
                         {
                             err.span_suggestion_verbose(
                                 arg.span().shrink_to_hi(),
@@ -291,6 +344,32 @@ impl<'a> Parser<'a> {
                     })?;
                     let span = lo.to(self.prev_token.span);
                     AngleBracketedArgs { args, span }.into()
+                } else if self.may_recover()
+                    && self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
+                    // FIXME(return_type_notation): Could also recover `...` here.
+                    && self.look_ahead(1, |tok| tok.kind == token::DotDot)
+                {
+                    self.bump();
+                    self.dcx()
+                        .emit_err(errors::BadReturnTypeNotationDotDot { span: self.token.span });
+                    self.bump();
+                    self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                    let span = lo.to(self.prev_token.span);
+
+                    if self.eat_noexpect(&token::RArrow) {
+                        let lo = self.prev_token.span;
+                        let ty = self.parse_ty()?;
+                        self.dcx()
+                            .emit_err(errors::BadReturnTypeNotationOutput { span: lo.to(ty.span) });
+                    }
+
+                    ParenthesizedArgs {
+                        span,
+                        inputs: ThinVec::new(),
+                        inputs_span: span,
+                        output: ast::FnRetTy::Default(self.prev_token.span.shrink_to_hi()),
+                    }
+                    .into()
                 } else {
                     // `(T, U) -> R`
                     let (inputs, _) = self.parse_paren_comma_seq(|p| p.parse_ty())?;
@@ -301,7 +380,7 @@ impl<'a> Parser<'a> {
                     ParenthesizedArgs { span, inputs, inputs_span, output }.into()
                 };
 
-                PathSegment { ident, args, id: ast::DUMMY_NODE_ID }
+                PathSegment { ident, args: Some(args), id: ast::DUMMY_NODE_ID }
             } else {
                 // Generic arguments are not found.
                 PathSegment::from_ident(ident)
@@ -333,7 +412,7 @@ impl<'a> Parser<'a> {
         style: PathStyle,
         lo: Span,
         ty_generics: Option<&Generics>,
-    ) -> PResult<'a, Vec<AngleBracketedArg>> {
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
         // We need to detect whether there are extra leading left angle brackets and produce an
         // appropriate error and suggestion. This cannot be implemented by looking ahead at
         // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
@@ -405,12 +484,26 @@ impl<'a> Parser<'a> {
 
         let is_first_invocation = style == PathStyle::Expr;
         // Take a snapshot before attempting to parse - we can restore this later.
-        let snapshot = if is_first_invocation { Some(self.clone()) } else { None };
+        let snapshot = is_first_invocation.then(|| self.clone());
 
+        self.angle_bracket_nesting += 1;
         debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
         match self.parse_angle_args(ty_generics) {
-            Ok(args) => Ok(args),
+            Ok(args) => {
+                self.angle_bracket_nesting -= 1;
+                Ok(args)
+            }
+            Err(e) if self.angle_bracket_nesting > 10 => {
+                self.angle_bracket_nesting -= 1;
+                // When encountering severely malformed code where there are several levels of
+                // nested unclosed angle args (`f::<f::<f::<f::<...`), we avoid severe O(n^2)
+                // behavior by bailing out earlier (#117080).
+                e.emit();
+                rustc_errors::FatalError.raise();
+            }
             Err(e) if is_first_invocation && self.unmatched_angle_bracket_count > 0 => {
+                self.angle_bracket_nesting -= 1;
+
                 // Swap `self` with our backup of the parser state before attempting to parse
                 // generic arguments.
                 let snapshot = mem::replace(self, snapshot.unwrap());
@@ -440,31 +533,21 @@ impl<'a> Parser<'a> {
                     // Make a span over ${unmatched angle bracket count} characters.
                     // This is safe because `all_angle_brackets` ensures that there are only `<`s,
                     // i.e. no multibyte characters, in this range.
-                    let span =
-                        lo.with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count));
-                    self.struct_span_err(
-                        span,
-                        &format!(
-                            "unmatched angle bracket{}",
-                            pluralize!(snapshot.unmatched_angle_bracket_count)
-                        ),
-                    )
-                    .span_suggestion(
+                    let span = lo
+                        .with_hi(lo.lo() + BytePos(snapshot.unmatched_angle_bracket_count.into()));
+                    self.dcx().emit_err(errors::UnmatchedAngle {
                         span,
-                        &format!(
-                            "remove extra angle bracket{}",
-                            pluralize!(snapshot.unmatched_angle_bracket_count)
-                        ),
-                        "",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                        plural: snapshot.unmatched_angle_bracket_count > 1,
+                    });
 
                     // Try again without unmatched angle bracket characters.
                     self.parse_angle_args(ty_generics)
                 }
             }
-            Err(e) => Err(e),
+            Err(e) => {
+                self.angle_bracket_nesting -= 1;
+                Err(e)
+            }
         }
     }
 
@@ -473,8 +556,8 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_angle_args(
         &mut self,
         ty_generics: Option<&Generics>,
-    ) -> PResult<'a, Vec<AngleBracketedArg>> {
-        let mut args = Vec::new();
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
+        let mut args = ThinVec::new();
         while let Some(arg) = self.parse_angle_arg(ty_generics)? {
             args.push(arg);
             if !self.eat(&token::Comma) {
@@ -527,11 +610,11 @@ impl<'a> Parser<'a> {
                         Ok(ident_gen_args) => ident_gen_args,
                         Err(()) => return Ok(Some(AngleBracketedArg::Arg(arg))),
                     };
-                    if binder.is_some() {
+                    if binder {
                         // FIXME(compiler-errors): this could be improved by suggesting lifting
                         // this up to the trait, at least before this becomes real syntax.
                         // e.g. `Trait<for<'a> Assoc = Ty>` -> `for<'a> Trait<Assoc = Ty>`
-                        return Err(self.struct_span_err(
+                        return Err(self.dcx().struct_span_err(
                             arg_span,
                             "`for<...>` is not allowed on associated type bounds",
                         ));
@@ -539,7 +622,7 @@ impl<'a> Parser<'a> {
                     let kind = if self.eat(&token::Colon) {
                         // Parse associated type constraint bound.
 
-                        let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?;
+                        let bounds = self.parse_generic_bounds()?;
                         AssocConstraintKind::Bound { bounds }
                     } else if self.eat(&token::Eq) {
                         self.parse_assoc_equality_term(ident, self.prev_token.span)?
@@ -548,10 +631,16 @@ impl<'a> Parser<'a> {
                     };
 
                     let span = lo.to(self.prev_token.span);
-
                     // Gate associated type bounds, e.g., `Iterator<Item: Ord>`.
                     if let AssocConstraintKind::Bound { .. } = kind {
-                        self.sess.gated_spans.gate(sym::associated_type_bounds, span);
+                        if let Some(ast::GenericArgs::Parenthesized(args)) = &gen_args
+                            && args.inputs.is_empty()
+                            && matches!(args.output, ast::FnRetTy::Default(..))
+                        {
+                            self.sess.gated_spans.gate(sym::return_type_notation, span);
+                        } else {
+                            self.sess.gated_spans.gate(sym::associated_type_bounds, span);
+                        }
                     }
                     let constraint =
                         AssocConstraint { id: ast::DUMMY_NODE_ID, ident, gen_args, kind, span };
@@ -589,16 +678,14 @@ impl<'a> Parser<'a> {
                 c.into()
             }
             Some(GenericArg::Lifetime(lt)) => {
-                self.struct_span_err(span, "associated lifetimes are not supported")
-                    .span_label(lt.ident.span, "the lifetime is given here")
-                    .help("if you meant to specify a trait object, write `dyn Trait + 'lifetime`")
-                    .emit();
+                self.dcx().emit_err(errors::AssocLifetime { span, lifetime: lt.ident.span });
                 self.mk_ty(span, ast::TyKind::Err).into()
             }
             None => {
                 let after_eq = eq.shrink_to_hi();
                 let before_next = self.token.span.shrink_to_lo();
                 let mut err = self
+                    .dcx()
                     .struct_span_err(after_eq.to(before_next), "missing type to the right of `=`");
                 if matches!(self.token.kind, token::Comma | token::Gt) {
                     err.span_suggestion(
@@ -609,14 +696,14 @@ impl<'a> Parser<'a> {
                     );
                     err.span_suggestion(
                         eq.to(before_next),
-                        &format!("remove the `=` if `{}` is a type", ident),
+                        format!("remove the `=` if `{ident}` is a type"),
                         "",
                         Applicability::MaybeIncorrect,
                     )
                 } else {
                     err.span_label(
                         self.token.span,
-                        &format!("expected type, found {}", super::token_descr(&self.token)),
+                        format!("expected type, found {}", super::token_descr(&self.token)),
                     )
                 };
                 return Err(err);
@@ -632,7 +719,9 @@ impl<'a> Parser<'a> {
     /// - A single-segment path.
     pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
         match &expr.kind {
-            ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true,
+            ast::ExprKind::Block(_, _)
+            | ast::ExprKind::Lit(_)
+            | ast::ExprKind::IncludedBytes(..) => true,
             ast::ExprKind::Unary(ast::UnOp::Neg, expr) => {
                 matches!(expr.kind, ast::ExprKind::Lit(_))
             }
@@ -652,12 +741,7 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
         // Parse const argument.
         let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
-            self.parse_block_expr(
-                None,
-                self.token.span,
-                BlockCheckMode::Default,
-                ast::AttrVec::new(),
-            )?
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?
         } else {
             self.handle_unambiguous_unbraced_const_arg()?
         };
@@ -679,22 +763,44 @@ impl<'a> Parser<'a> {
             GenericArg::Const(self.parse_const_arg()?)
         } else if self.check_type() {
             // Parse type argument.
-            let is_const_fn =
-                self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
-            let mut snapshot = self.create_snapshot_for_diagnostic();
+
+            // Proactively create a parser snapshot enabling us to rewind and try to reparse the
+            // input as a const expression in case we fail to parse a type. If we successfully
+            // do so, we will report an error that it needs to be wrapped in braces.
+            let mut snapshot = None;
+            if self.may_recover() && self.token.can_begin_expr() {
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
+
             match self.parse_ty() {
-                Ok(ty) => GenericArg::Type(ty),
+                Ok(ty) => {
+                    // Since the type parser recovers from some malformed slice and array types and
+                    // successfully returns a type, we need to look for `TyKind::Err`s in the
+                    // type to determine if error recovery has occurred and if the input is not a
+                    // syntactically valid type after all.
+                    if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
+                        && let ast::TyKind::Err = inner_ty.kind
+                        && let Some(snapshot) = snapshot
+                        && let Some(expr) =
+                            self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(
+                            self.dummy_const_arg_needs_braces(
+                                self.dcx()
+                                    .struct_span_err(expr.span, "invalid const generic expression"),
+                                expr.span,
+                            ),
+                        ));
+                    }
+
+                    GenericArg::Type(ty)
+                }
                 Err(err) => {
-                    if is_const_fn {
-                        match (*snapshot).parse_expr_res(Restrictions::CONST_EXPR, None) {
-                            Ok(expr) => {
-                                self.restore_snapshot(snapshot);
-                                return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
-                            }
-                            Err(err) => {
-                                err.cancel();
-                            }
-                        }
+                    if let Some(snapshot) = snapshot
+                        && let Some(expr) =
+                            self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
                     }
                     // Try to recover from possible `const` arg without braces.
                     return self.recover_const_arg(start, err).map(Some);
@@ -709,7 +815,7 @@ impl<'a> Parser<'a> {
             match self.parse_expr_res(Restrictions::CONST_EXPR, None) {
                 Ok(expr) => {
                     return Ok(Some(self.dummy_const_arg_needs_braces(
-                        self.struct_span_err(expr.span, "invalid const generic expression"),
+                        self.dcx().struct_span_err(expr.span, "invalid const generic expression"),
                         expr.span,
                     )));
                 }
@@ -725,28 +831,24 @@ impl<'a> Parser<'a> {
 
     /// Given a arg inside of generics, we try to destructure it as if it were the LHS in
     /// `LHS = ...`, i.e. an associated type binding.
-    /// This returns (optionally, if they are present) any `for<'a, 'b>` binder args, the
+    /// This returns a bool indicating if there are any `for<'a, 'b>` binder args, the
     /// identifier, and any GAT arguments.
     fn get_ident_from_generic_arg(
         &self,
         gen_arg: &GenericArg,
-    ) -> Result<(Option<Vec<ast::GenericParam>>, Ident, Option<GenericArgs>), ()> {
+    ) -> Result<(bool, Ident, Option<GenericArgs>), ()> {
         if let GenericArg::Type(ty) = gen_arg {
             if let ast::TyKind::Path(qself, path) = &ty.kind
                 && qself.is_none()
                 && let [seg] = path.segments.as_slice()
             {
-                return Ok((None, seg.ident, seg.args.as_deref().cloned()));
+                return Ok((false, seg.ident, seg.args.as_deref().cloned()));
             } else if let ast::TyKind::TraitObject(bounds, ast::TraitObjectSyntax::None) = &ty.kind
-                && let [ast::GenericBound::Trait(trait_ref, ast::TraitBoundModifier::None)] =
+                && let [ast::GenericBound::Trait(trait_ref, ast::TraitBoundModifiers::NONE)] =
                     bounds.as_slice()
                 && let [seg] = trait_ref.trait_ref.path.segments.as_slice()
             {
-                return Ok((
-                    Some(trait_ref.bound_generic_params.clone()),
-                    seg.ident,
-                    seg.args.as_deref().cloned(),
-                ));
+                return Ok((true, seg.ident, seg.args.as_deref().cloned()));
             }
         }
         Err(())
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 51bd9d2d386..1bae5b32240 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -1,42 +1,48 @@
-use super::attr::DEFAULT_INNER_ATTR_FORBIDDEN;
-use super::diagnostics::{AttemptLocalParseRecovery, Error};
+use super::attr::InnerAttrForbiddenReason;
+use super::diagnostics::AttemptLocalParseRecovery;
 use super::expr::LhsExpr;
-use super::pat::RecoverComma;
+use super::pat::{PatternLocation, RecoverComma};
 use super::path::PathStyle;
 use super::TrailingToken;
 use super::{
     AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
 };
+use crate::errors;
 use crate::maybe_whole;
 
+use crate::errors::MalformedLoopLabel;
+use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
 use rustc_ast::util::classify;
-use rustc_ast::{AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
+use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
 use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
 use rustc_ast::{StmtKind, DUMMY_NODE_ID};
-use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
-use rustc_span::source_map::{BytePos, Span};
-use rustc_span::symbol::{kw, sym};
+use rustc_errors::{Applicability, DiagnosticBuilder, PResult};
+use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{BytePos, Span};
 
+use std::borrow::Cow;
 use std::mem;
+use thin_vec::{thin_vec, ThinVec};
 
 impl<'a> Parser<'a> {
     /// Parses a statement. This stops just before trailing semicolons on everything but items.
     /// e.g., a `StmtKind::Semi` parses to a `StmtKind::Expr`, leaving the trailing `;` unconsumed.
     // Public for rustfmt usage.
     pub fn parse_stmt(&mut self, force_collect: ForceCollect) -> PResult<'a, Option<Stmt>> {
-        Ok(self.parse_stmt_without_recovery(false, force_collect).unwrap_or_else(|mut e| {
+        Ok(self.parse_stmt_without_recovery(false, force_collect).unwrap_or_else(|e| {
             e.emit();
             self.recover_stmt_(SemiColonMode::Break, BlockMode::Ignore);
             None
         }))
     }
 
-    /// If `force_capture` is true, forces collection of tokens regardless of whether
+    /// If `force_collect` is [`ForceCollect::Yes`], forces collection of tokens regardless of whether
     /// or not we have attributes
-    pub(crate) fn parse_stmt_without_recovery(
+    // Public for `cfg_eval` macro expansion.
+    pub fn parse_stmt_without_recovery(
         &mut self,
         capture_semi: bool,
         force_collect: ForceCollect,
@@ -46,7 +52,9 @@ impl<'a> Parser<'a> {
 
         // Don't use `maybe_whole` so that we have precise control
         // over when we bump the parser
-        if let token::Interpolated(nt) = &self.token.kind && let token::NtStmt(stmt) = &**nt {
+        if let token::Interpolated(nt) = &self.token.kind
+            && let token::NtStmt(stmt) = &nt.0
+        {
             let mut stmt = stmt.clone();
             self.bump();
             stmt.visit_attrs(|stmt_attrs| {
@@ -55,28 +63,58 @@ impl<'a> Parser<'a> {
             return Ok(Some(stmt.into_inner()));
         }
 
+        if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
+            self.bump();
+            let mut_let_span = lo.to(self.token.span);
+            self.dcx().emit_err(errors::InvalidVariableDeclaration {
+                span: mut_let_span,
+                sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
+            });
+        }
+
         Ok(Some(if self.token.is_keyword(kw::Let) {
             self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
-        } else if self.is_kw_followed_by_ident(kw::Mut) {
-            self.recover_stmt_local(lo, attrs, "missing keyword", "let mut")?
-        } else if self.is_kw_followed_by_ident(kw::Auto) {
+        } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::MissingLet,
+            )?
+        } else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
             self.bump(); // `auto`
-            let msg = "write `let` instead of `auto` to introduce a new variable";
-            self.recover_stmt_local(lo, attrs, msg, "let")?
-        } else if self.is_kw_followed_by_ident(sym::var) {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotAuto,
+            )?
+        } else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
             self.bump(); // `var`
-            let msg = "write `let` instead of `var` to introduce a new variable";
-            self.recover_stmt_local(lo, attrs, msg, "let")?
-        } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotVar,
+            )?
+        } else if self.check_path()
+            && !self.token.is_qpath_start()
+            && !self.is_path_start_item()
+            && !self.is_builtin()
+        {
             // We have avoided contextual keywords like `union`, items with `crate` visibility,
             // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
             // that starts like a path (1 token), but it fact not a path.
             // Also, we avoid stealing syntax from `parse_item_`.
-            if force_collect == ForceCollect::Yes {
-                self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))
-            } else {
-                self.parse_stmt_path_start(lo, attrs)
-            }?
+            match force_collect {
+                ForceCollect::Yes => {
+                    self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))?
+                }
+                ForceCollect::No => match self.parse_stmt_path_start(lo, attrs) {
+                    Ok(stmt) => stmt,
+                    Err(mut err) => {
+                        self.suggest_add_missing_let_for_stmt(&mut err);
+                        return Err(err);
+                    }
+                },
+            }
         } else if let Some(item) = self.parse_item_common(
             attrs.clone(),
             false,
@@ -88,30 +126,25 @@ impl<'a> Parser<'a> {
             self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
         } else if self.eat(&token::Semi) {
             // Do not attempt to parse an expression if we're done here.
-            self.error_outer_attrs(&attrs.take_for_recovery());
+            self.error_outer_attrs(attrs);
             self.mk_stmt(lo, StmtKind::Empty)
         } else if self.token != token::CloseDelim(Delimiter::Brace) {
             // Remainder are line-expr stmts.
-            let e = if force_collect == ForceCollect::Yes {
-                self.collect_tokens_no_attrs(|this| {
+            let e = match force_collect {
+                ForceCollect::Yes => self.collect_tokens_no_attrs(|this| {
                     this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
-                })
-            } else {
-                self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
-            }?;
+                })?,
+                ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?,
+            };
             if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
                 let bl = self.parse_block()?;
                 // Destructuring assignment ... else.
                 // This is not allowed, but point it out in a nice way.
-                let mut err = self.struct_span_err(
-                    e.span.to(bl.span),
-                    "<assignment> ... else { ... } is not allowed",
-                );
-                err.emit();
+                self.dcx().emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
             }
             self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
         } else {
-            self.error_outer_attrs(&attrs.take_for_recovery());
+            self.error_outer_attrs(attrs);
             return Ok(None);
         }))
     }
@@ -121,7 +154,7 @@ impl<'a> Parser<'a> {
             let path = this.parse_path(PathStyle::Expr)?;
 
             if this.eat(&token::Not) {
-                let stmt_mac = this.parse_stmt_mac(lo, attrs.into(), path)?;
+                let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
                 if this.token == token::Semi {
                     return Ok((stmt_mac, TrailingToken::Semi));
                 } else {
@@ -130,14 +163,14 @@ impl<'a> Parser<'a> {
             }
 
             let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
-                this.parse_struct_expr(None, path, AttrVec::new(), true)?
+                this.parse_expr_struct(None, path, true)?
             } else {
                 let hi = this.prev_token.span;
-                this.mk_expr(lo.to(hi), ExprKind::Path(None, path), AttrVec::new())
+                this.mk_expr(lo.to(hi), ExprKind::Path(None, path))
             };
 
             let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
-                this.parse_dot_or_call_expr_with(expr, lo, attrs)
+                this.parse_expr_dot_or_call_with(expr, lo, attrs)
             })?;
             // `DUMMY_SP` will get overwritten later in this function
             Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
@@ -147,7 +180,10 @@ impl<'a> Parser<'a> {
             // Perform this outside of the `collect_tokens_trailing_token` closure,
             // since our outer attributes do not apply to this part of the expression
             let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
-                this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
+                this.parse_expr_assoc_with(
+                    0,
+                    LhsExpr::AlreadyParsed { expr, starts_statement: true },
+                )
             })?;
             Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
         } else {
@@ -158,17 +194,15 @@ impl<'a> Parser<'a> {
     /// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
     /// At this point, the `!` token after the path has already been eaten.
     fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
-        let args = self.parse_mac_args()?;
-        let delim = args.delim();
+        let args = self.parse_delim_args()?;
         let hi = self.prev_token.span;
 
-        let style = match delim {
-            Some(Delimiter::Brace) => MacStmtStyle::Braces,
-            Some(_) => MacStmtStyle::NoBraces,
-            None => unreachable!(),
+        let style = match args.delim {
+            Delimiter::Brace => MacStmtStyle::Braces,
+            _ => MacStmtStyle::NoBraces,
         };
 
-        let mac = MacCall { path, args, prior_type_ascription: self.last_type_ascription };
+        let mac = P(MacCall { path, args });
 
         let kind = if (style == MacStmtStyle::Braces
             && self.token != token::Dot
@@ -179,10 +213,13 @@ impl<'a> Parser<'a> {
             StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None }))
         } else {
             // Since none of the above applied, this is an expression statement macro.
-            let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
+            let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
             let e = self.maybe_recover_from_bad_qpath(e)?;
-            let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
-            let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
+            let e = self.parse_expr_dot_or_call_with(e, lo, attrs)?;
+            let e = self.parse_expr_assoc_with(
+                0,
+                LhsExpr::AlreadyParsed { expr: e, starts_statement: false },
+            )?;
             StmtKind::Expr(e)
         };
         Ok(self.mk_stmt(lo.to(hi), kind))
@@ -190,27 +227,39 @@ impl<'a> Parser<'a> {
 
     /// Error on outer attributes in this context.
     /// Also error if the previous token was a doc comment.
-    fn error_outer_attrs(&self, attrs: &[Attribute]) {
-        if let [.., last] = attrs {
+    fn error_outer_attrs(&self, attrs: AttrWrapper) {
+        if !attrs.is_empty()
+            && let attrs = attrs.take_for_recovery(self.sess)
+            && let attrs @ [.., last] = &*attrs
+        {
             if last.is_doc_comment() {
-                self.span_err(last.span, Error::UselessDocComment).emit();
+                self.dcx().emit_err(errors::DocCommentDoesNotDocumentAnything {
+                    span: last.span,
+                    missing_comma: None,
+                });
             } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
-                self.struct_span_err(last.span, "expected statement after outer attribute").emit();
+                self.dcx().emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
             }
         }
     }
 
-    fn recover_stmt_local(
+    fn recover_stmt_local_after_let(
         &mut self,
         lo: Span,
         attrs: AttrWrapper,
-        msg: &str,
-        sugg: &str,
+        subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
     ) -> PResult<'a, Stmt> {
-        let stmt = self.recover_local_after_let(lo, attrs)?;
-        self.struct_span_err(lo, "invalid variable declaration")
-            .span_suggestion(lo, msg, sugg, Applicability::MachineApplicable)
-            .emit();
+        let stmt =
+            self.collect_tokens_trailing_token(attrs, ForceCollect::Yes, |this, attrs| {
+                let local = this.parse_local(attrs)?;
+                // FIXME - maybe capture semicolon in recovery?
+                Ok((
+                    this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
+                    TrailingToken::None,
+                ))
+            })?;
+        self.dcx()
+            .emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
         Ok(stmt)
     }
 
@@ -223,7 +272,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, Stmt> {
         self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
             this.expect_keyword(kw::Let)?;
-            let local = this.parse_local(attrs.into())?;
+            let local = this.parse_local(attrs)?;
             let trailing = if capture_semi && this.token.kind == token::Semi {
                 TrailingToken::Semi
             } else {
@@ -233,21 +282,17 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
-        self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
-            let local = this.parse_local(attrs.into())?;
-            // FIXME - maybe capture semicolon in recovery?
-            Ok((
-                this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
-                TrailingToken::None,
-            ))
-        })
-    }
-
     /// Parses a local variable declaration.
     fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
         let lo = self.prev_token.span;
-        let (pat, colon) = self.parse_pat_before_ty(None, RecoverComma::Yes, "`let` bindings")?;
+
+        if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
+            self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
+            self.bump();
+        }
+
+        let (pat, colon) =
+            self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
 
         let (err, ty) = if colon {
             // Save the state of the parser before parsing type normally, in case there is a `:`
@@ -258,7 +303,7 @@ impl<'a> Parser<'a> {
                 Ok(ty) => (None, Some(ty)),
                 Err(mut err) => {
                     if let Ok(snip) = self.span_to_snippet(pat.span) {
-                        err.span_label(pat.span, format!("while parsing the type for `{}`", snip));
+                        err.span_label(pat.span, format!("while parsing the type for `{snip}`"));
                     }
                     // we use noexpect here because we don't actually expect Eq to be here
                     // but we are still checking for it in order to be able to handle it if
@@ -323,7 +368,7 @@ impl<'a> Parser<'a> {
                         // `let...else if`. Emit the same error that `parse_block()` would,
                         // but explicitly point out that this pattern is not allowed.
                         let msg = "conditional `else if` is not supported for `let...else`";
-                        return Err(self.error_block_no_opening_brace_msg(msg));
+                        return Err(self.error_block_no_opening_brace_msg(Cow::from(msg)));
                     }
                     let els = self.parse_block()?;
                     self.check_let_else_init_bool_expr(&init);
@@ -340,45 +385,35 @@ impl<'a> Parser<'a> {
 
     fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
         if let ast::ExprKind::Binary(op, ..) = init.kind {
-            if op.node.lazy() {
-                let suggs = vec![
-                    (init.span.shrink_to_lo(), "(".to_string()),
-                    (init.span.shrink_to_hi(), ")".to_string()),
-                ];
-                self.struct_span_err(
-                    init.span,
-                    &format!(
-                        "a `{}` expression cannot be directly assigned in `let...else`",
-                        op.node.to_string()
-                    ),
-                )
-                .multipart_suggestion(
-                    "wrap the expression in parentheses",
-                    suggs,
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            if op.node.is_lazy() {
+                self.dcx().emit_err(errors::InvalidExpressionInLetElse {
+                    span: init.span,
+                    operator: op.node.as_str(),
+                    sugg: errors::WrapInParentheses::Expression {
+                        left: init.span.shrink_to_lo(),
+                        right: init.span.shrink_to_hi(),
+                    },
+                });
             }
         }
     }
 
     fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
         if let Some(trailing) = classify::expr_trailing_brace(init) {
-            let err_span = trailing.span.with_lo(trailing.span.hi() - BytePos(1));
-            let suggs = vec![
-                (trailing.span.shrink_to_lo(), "(".to_string()),
-                (trailing.span.shrink_to_hi(), ")".to_string()),
-            ];
-            self.struct_span_err(
-                err_span,
-                "right curly brace `}` before `else` in a `let...else` statement not allowed",
-            )
-            .multipart_suggestion(
-                "try wrapping the expression in parentheses",
-                suggs,
-                Applicability::MachineApplicable,
-            )
-            .emit();
+            let sugg = match &trailing.kind {
+                ExprKind::MacCall(mac) => errors::WrapInParentheses::MacroArgs {
+                    left: mac.args.dspan.open,
+                    right: mac.args.dspan.close,
+                },
+                _ => errors::WrapInParentheses::Expression {
+                    left: trailing.span.shrink_to_lo(),
+                    right: trailing.span.shrink_to_hi(),
+                },
+            };
+            self.dcx().emit_err(errors::InvalidCurlyInLetElse {
+                span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
+                sugg,
+            });
         }
     }
 
@@ -387,18 +422,8 @@ impl<'a> Parser<'a> {
         let eq_consumed = match self.token.kind {
             token::BinOpEq(..) => {
                 // Recover `let x <op>= 1` as `let x = 1`
-                self.struct_span_err(
-                    self.token.span,
-                    "can't reassign to an uninitialized variable",
-                )
-                .span_suggestion_short(
-                    self.token.span,
-                    "initialize the variable",
-                    "=",
-                    Applicability::MaybeIncorrect,
-                )
-                .help("if you meant to overwrite, remove the `let` binding")
-                .emit();
+                self.dcx()
+                    .emit_err(errors::CompoundAssignmentExpressionInLet { span: self.token.span });
                 self.bump();
                 true
             }
@@ -412,17 +437,22 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_block(&mut self) -> PResult<'a, P<Block>> {
         let (attrs, block) = self.parse_inner_attrs_and_block()?;
         if let [.., last] = &*attrs {
-            self.error_on_forbidden_inner_attr(last.span, DEFAULT_INNER_ATTR_FORBIDDEN);
+            self.error_on_forbidden_inner_attr(
+                last.span,
+                super::attr::InnerAttrPolicy::Forbidden(Some(
+                    InnerAttrForbiddenReason::InCodeBlock,
+                )),
+            );
         }
         Ok(block)
     }
 
     fn error_block_no_opening_brace_msg(
         &mut self,
-        msg: &str,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+        msg: Cow<'static, str>,
+    ) -> DiagnosticBuilder<'a> {
         let sp = self.token.span;
-        let mut e = self.struct_span_err(sp, msg);
+        let mut e = self.dcx().struct_span_err(sp, msg);
         let do_not_suggest_help = self.token.is_keyword(kw::In) || self.token == token::Colon;
 
         // Check to see if the user has written something like
@@ -482,15 +512,13 @@ impl<'a> Parser<'a> {
 
     fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
         let tok = super::token_descr(&self.token);
-        let msg = format!("expected `{{`, found {}", tok);
-        Err(self.error_block_no_opening_brace_msg(&msg))
+        let msg = format!("expected `{{`, found {tok}");
+        Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
     }
 
     /// Parses a block. Inner attributes are allowed.
-    pub(super) fn parse_inner_attrs_and_block(
-        &mut self,
-    ) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
-        self.parse_block_common(self.token.span, BlockCheckMode::Default)
+    pub(super) fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (AttrVec, P<Block>)> {
+        self.parse_block_common(self.token.span, BlockCheckMode::Default, true)
     }
 
     /// Parses a block. Inner attributes are allowed.
@@ -498,16 +526,23 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         blk_mode: BlockCheckMode,
-    ) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
-        maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
+        can_be_struct_literal: bool,
+    ) -> PResult<'a, (AttrVec, P<Block>)> {
+        maybe_whole!(self, NtBlock, |x| (AttrVec::new(), x));
 
+        let maybe_ident = self.prev_token.clone();
         self.maybe_recover_unexpected_block_label();
         if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
             return self.error_block_no_opening_brace();
         }
 
         let attrs = self.parse_inner_attributes()?;
-        let tail = match self.maybe_suggest_struct_literal(lo, blk_mode) {
+        let tail = match self.maybe_suggest_struct_literal(
+            lo,
+            blk_mode,
+            maybe_ident,
+            can_be_struct_literal,
+        ) {
             Some(tail) => tail?,
             None => self.parse_block_tail(lo, blk_mode, AttemptLocalParseRecovery::Yes)?,
         };
@@ -522,14 +557,58 @@ impl<'a> Parser<'a> {
         s: BlockCheckMode,
         recover: AttemptLocalParseRecovery,
     ) -> PResult<'a, P<Block>> {
-        let mut stmts = vec![];
+        let mut stmts = ThinVec::new();
+        let mut snapshot = None;
         while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
             if self.token == token::Eof {
                 break;
             }
+            if self.is_diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+                // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                // that can be a valid path start, so we snapshot and reparse only we've
+                // encountered another parse error.
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
             let stmt = match self.parse_full_stmt(recover) {
                 Err(mut err) if recover.yes() => {
-                    self.maybe_annotate_with_ascription(&mut err, false);
+                    if let Some(ref mut snapshot) = snapshot {
+                        snapshot.recover_diff_marker();
+                    }
+                    if self.token == token::Colon {
+                        // if a previous and next token of the current one is
+                        // integer literal (e.g. `1:42`), it's likely a range
+                        // expression for Pythonistas and we can suggest so.
+                        if self.prev_token.is_integer_lit()
+                            && self.may_recover()
+                            && self.look_ahead(1, |token| token.is_integer_lit())
+                        {
+                            // FIXME(hkmatsumoto): Might be better to trigger
+                            // this only when parsing an index expression.
+                            err.span_suggestion_verbose(
+                                self.token.span,
+                                "you might have meant a range expression",
+                                "..",
+                                Applicability::MaybeIncorrect,
+                            );
+                        } else {
+                            // if next token is following a colon, it's likely a path
+                            // and we can suggest a path separator
+                            self.bump();
+                            if self.token.span.lo() == self.prev_token.span.hi() {
+                                err.span_suggestion_verbose(
+                                    self.prev_token.span,
+                                    "maybe write a path separator here",
+                                    "::",
+                                    Applicability::MaybeIncorrect,
+                                );
+                            }
+                            if self.sess.unstable_features.is_nightly_build() {
+                                // FIXME(Nilstrieb): Remove this again after a few months.
+                                err.note("type ascription syntax has been removed, see issue #101728 <https://github.com/rust-lang/rust/issues/101728>");
+                            }
+                        }
+                    }
+
                     err.emit();
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(self.mk_stmt_err(self.token.span))
@@ -560,48 +639,114 @@ impl<'a> Parser<'a> {
         };
 
         let mut eat_semi = true;
-        match stmt.kind {
+        let mut add_semi_to_stmt = false;
+
+        match &mut stmt.kind {
             // Expression without semicolon.
-            StmtKind::Expr(ref mut expr)
+            StmtKind::Expr(expr)
+                if classify::expr_requires_semi_to_be_stmt(expr)
+                    && !expr.attrs.is_empty()
+                    && ![token::Eof, token::Semi, token::CloseDelim(Delimiter::Brace)]
+                        .contains(&self.token.kind) =>
+            {
+                // The user has written `#[attr] expr` which is unsupported. (#106020)
+                self.attr_on_non_tail_expr(&expr);
+                // We already emitted an error, so don't emit another type error
+                let sp = expr.span.to(self.prev_token.span);
+                *expr = self.mk_expr_err(sp);
+            }
+
+            // Expression without semicolon.
+            StmtKind::Expr(expr)
                 if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) =>
             {
                 // Just check for errors and recover; do not eat semicolon yet.
-                if let Err(mut e) =
-                    self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)])
-                {
-                    if let TokenKind::DocComment(..) = self.token.kind {
-                        if let Ok(snippet) = self.span_to_snippet(self.token.span) {
-                            let sp = self.token.span;
-                            let marker = &snippet[..3];
-                            let (comment_marker, doc_comment_marker) = marker.split_at(2);
-
-                            e.span_suggestion(
-                                sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
-                                &format!(
-                                    "add a space before `{}` to use a regular comment",
-                                    doc_comment_marker,
-                                ),
-                                format!("{} {}", comment_marker, doc_comment_marker),
-                                Applicability::MaybeIncorrect,
-                            );
+                // `expect_one_of` returns PResult<'a, bool /* recovered */>
+
+                let expect_result =
+                    self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
+
+                let replace_with_err = 'break_recover: {
+                    match expect_result {
+                        // Recover from parser, skip type error to avoid extra errors.
+                        Ok(true) => true,
+                        Err(e) => {
+                            if self.recover_colon_as_semi() {
+                                // recover_colon_as_semi has already emitted a nicer error.
+                                e.delay_as_bug();
+                                add_semi_to_stmt = true;
+                                eat_semi = false;
+
+                                break 'break_recover false;
+                            }
+
+                            match &expr.kind {
+                                ExprKind::Path(None, ast::Path { segments, .. })
+                                    if segments.len() == 1 =>
+                                {
+                                    if self.token == token::Colon
+                                        && self.look_ahead(1, |token| {
+                                            token.is_whole_block()
+                                                || matches!(
+                                                    token.kind,
+                                                    token::Ident(
+                                                        kw::For | kw::Loop | kw::While,
+                                                        false
+                                                    ) | token::OpenDelim(Delimiter::Brace)
+                                                )
+                                        })
+                                    {
+                                        let snapshot = self.create_snapshot_for_diagnostic();
+                                        let label = Label {
+                                            ident: Ident::from_str_and_span(
+                                                &format!("'{}", segments[0].ident),
+                                                segments[0].ident.span,
+                                            ),
+                                        };
+                                        match self.parse_expr_labeled(label, false) {
+                                            Ok(labeled_expr) => {
+                                                e.delay_as_bug();
+                                                self.dcx().emit_err(MalformedLoopLabel {
+                                                    span: label.ident.span,
+                                                    correct_label: label.ident,
+                                                });
+                                                *expr = labeled_expr;
+                                                break 'break_recover false;
+                                            }
+                                            Err(err) => {
+                                                err.cancel();
+                                                self.restore_snapshot(snapshot);
+                                            }
+                                        }
+                                    }
+                                }
+                                _ => {}
+                            }
+
+                            if let Err(e) =
+                                self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
+                            {
+                                if recover.no() {
+                                    return Err(e);
+                                }
+                                e.emit();
+                                self.recover_stmt();
+                            }
+
+                            true
                         }
+                        Ok(false) => false,
                     }
-                    if let Err(mut e) =
-                        self.check_mistyped_turbofish_with_multiple_type_params(e, expr)
-                    {
-                        if recover.no() {
-                            return Err(e);
-                        }
-                        e.emit();
-                        self.recover_stmt();
-                    }
-                    // Don't complain about type errors in body tail after parse error (#57383).
+                };
+
+                if replace_with_err {
+                    // We already emitted an error, so don't emit another type error
                     let sp = expr.span.to(self.prev_token.span);
                     *expr = self.mk_expr_err(sp);
                 }
             }
             StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
-            StmtKind::Local(ref mut local) if let Err(e) = self.expect_semi() => {
+            StmtKind::Local(local) if let Err(e) = self.expect_semi() => {
                 // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
                 match &mut local.kind {
                     LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
@@ -613,17 +758,25 @@ impl<'a> Parser<'a> {
                 }
                 eat_semi = false;
             }
-            StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => eat_semi = false,
+            StmtKind::Empty | StmtKind::Item(_) | StmtKind::Local(_) | StmtKind::Semi(_) => {
+                eat_semi = false
+            }
         }
 
-        if eat_semi && self.eat(&token::Semi) {
+        if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
             stmt = stmt.add_trailing_semicolon();
         }
+
         stmt.span = stmt.span.to(self.prev_token.span);
         Ok(Some(stmt))
     }
 
-    pub(super) fn mk_block(&self, stmts: Vec<Stmt>, rules: BlockCheckMode, span: Span) -> P<Block> {
+    pub(super) fn mk_block(
+        &self,
+        stmts: ThinVec<Stmt>,
+        rules: BlockCheckMode,
+        span: Span,
+    ) -> P<Block> {
         P(Block {
             stmts,
             id: DUMMY_NODE_ID,
@@ -643,6 +796,6 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn mk_block_err(&self, span: Span) -> P<Block> {
-        self.mk_block(vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
+        self.mk_block(thin_vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
     }
 }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 31b40a83e60..157fb9e505a 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,36 +1,25 @@
 use super::{Parser, PathStyle, TokenType};
 
+use crate::errors::{
+    self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
+    FnPointerCannotBeAsync, FnPointerCannotBeConst, FnPtrWithGenerics, FnPtrWithGenericsSugg,
+    HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
+    NestedCVariadicType, ReturnTypesUseThinArrow,
+};
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::util::case::Case;
 use rustc_ast::{
-    self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
-    MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
+    self as ast, BareFnTy, BoundAsyncness, BoundConstness, BoundPolarity, FnRetTy, GenericBound,
+    GenericBounds, GenericParam, Generics, Lifetime, MacCall, MutTy, Mutability, PolyTraitRef,
+    TraitBoundModifiers, TraitObjectSyntax, Ty, TyKind, DUMMY_NODE_ID,
 };
-use rustc_errors::{pluralize, struct_span_err, Applicability, PResult};
-use rustc_span::source_map::Span;
-use rustc_span::symbol::{kw, sym};
-
-/// Any `?` or `~const` modifiers that appear at the start of a bound.
-struct BoundModifiers {
-    /// `?Trait`.
-    maybe: Option<Span>,
-
-    /// `~const Trait`.
-    maybe_const: Option<Span>,
-}
-
-impl BoundModifiers {
-    fn to_trait_bound_modifier(&self) -> TraitBoundModifier {
-        match (self.maybe, self.maybe_const) {
-            (None, None) => TraitBoundModifier::None,
-            (Some(_), None) => TraitBoundModifier::Maybe,
-            (None, Some(_)) => TraitBoundModifier::MaybeConst,
-            (Some(_), Some(_)) => TraitBoundModifier::MaybeConstMaybe,
-        }
-    }
-}
+use rustc_errors::{Applicability, PResult};
+use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::{Span, Symbol};
+use thin_vec::{thin_vec, ThinVec};
 
 #[derive(Copy, Clone, PartialEq)]
 pub(super) enum AllowPlus {
@@ -96,6 +85,18 @@ fn can_continue_type_after_non_fn_ident(t: &Token) -> bool {
     t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl)
 }
 
+fn can_begin_dyn_bound_in_edition_2015(t: &Token) -> bool {
+    // `Not`, `Tilde` & `Const` are deliberately not part of this list to
+    // contain the number of potential regressions esp. in MBE code.
+    // `Const` would regress `rfc-2632-const-trait-impl/mbe-dyn-const-2015.rs`.
+    // `Not` would regress `dyn!(...)` macro calls in Rust 2015.
+    t.is_path_start()
+        || t.is_lifetime()
+        || t == &TokenKind::Question
+        || t.is_keyword(kw::For)
+        || t == &TokenKind::OpenDelim(Delimiter::Parenthesis)
+}
+
 impl<'a> Parser<'a> {
     /// Parses a type.
     pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
@@ -123,6 +124,17 @@ impl<'a> Parser<'a> {
         )
     }
 
+    /// Parse a type suitable for a field definition.
+    /// The difference from `parse_ty` is that this version
+    /// allows anonymous structs and unions.
+    pub fn parse_ty_for_field_def(&mut self) -> PResult<'a, P<Ty>> {
+        if self.can_begin_anon_struct_or_union() {
+            self.parse_anon_struct_or_union()
+        } else {
+            self.parse_ty()
+        }
+    }
+
     /// Parse a type suitable for a function or function pointer parameter.
     /// The difference from `parse_ty` is that this version allows `...`
     /// (`CVarArgs`) at the top level of the type.
@@ -167,7 +179,7 @@ impl<'a> Parser<'a> {
         )
     }
 
-    pub(super) fn parse_no_question_mark_recover(&mut self) -> PResult<'a, P<Ty>> {
+    pub(super) fn parse_ty_no_question_mark_recover(&mut self) -> PResult<'a, P<Ty>> {
         self.parse_ty_common(
             AllowPlus::Yes,
             AllowCVariadic::No,
@@ -212,14 +224,7 @@ impl<'a> Parser<'a> {
             // Don't `eat` to prevent `=>` from being added as an expected token which isn't
             // actually expected and could only confuse users
             self.bump();
-            self.struct_span_err(self.prev_token.span, "return types are denoted using `->`")
-                .span_suggestion_short(
-                    self.prev_token.span,
-                    "use `->` instead",
-                    "->",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.dcx().emit_err(ReturnTypesUseThinArrow { span: self.prev_token.span });
             let ty = self.parse_ty_common(
                 allow_plus,
                 AllowCVariadic::No,
@@ -230,7 +235,7 @@ impl<'a> Parser<'a> {
             )?;
             FnRetTy::Ty(ty)
         } else {
-            FnRetTy::Default(self.token.span.shrink_to_lo())
+            FnRetTy::Default(self.prev_token.span.shrink_to_hi())
         })
     }
 
@@ -267,20 +272,61 @@ impl<'a> Parser<'a> {
         } else if self.eat_keyword(kw::Underscore) {
             // A type to be inferred `_`
             TyKind::Infer
-        } else if self.check_fn_front_matter(false) {
+        } else if self.check_fn_front_matter(false, Case::Sensitive) {
             // Function pointer type
-            self.parse_ty_bare_fn(lo, Vec::new(), recover_return_sign)?
+            self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
         } else if self.check_keyword(kw::For) {
+            let for_span = self.token.span;
             // Function pointer type or bound list (trait object type) starting with a poly-trait.
             //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
             //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
             let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
-            if self.check_fn_front_matter(false) {
-                self.parse_ty_bare_fn(lo, lifetime_defs, recover_return_sign)?
+            if self.check_fn_front_matter(false, Case::Sensitive) {
+                self.parse_ty_bare_fn(
+                    lo,
+                    lifetime_defs,
+                    Some(self.prev_token.span.shrink_to_lo()),
+                    recover_return_sign,
+                )?
             } else {
-                let path = self.parse_path(PathStyle::Type)?;
-                let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
-                self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
+                // Try to recover `for<'a> dyn Trait` or `for<'a> impl Trait`.
+                if self.may_recover()
+                    && (self.eat_keyword_noexpect(kw::Impl) || self.eat_keyword_noexpect(kw::Dyn))
+                {
+                    let kw = self.prev_token.ident().unwrap().0;
+                    let removal_span = kw.span.with_hi(self.token.span.lo());
+                    let path = self.parse_path(PathStyle::Type)?;
+                    let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+                    let kind =
+                        self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?;
+                    let err = self.dcx().create_err(errors::TransposeDynOrImpl {
+                        span: kw.span,
+                        kw: kw.name.as_str(),
+                        sugg: errors::TransposeDynOrImplSugg {
+                            removal_span,
+                            insertion_span: for_span.shrink_to_lo(),
+                            kw: kw.name.as_str(),
+                        },
+                    });
+
+                    // Take the parsed bare trait object and turn it either
+                    // into a `dyn` object or an `impl Trait`.
+                    let kind = match (kind, kw.name) {
+                        (TyKind::TraitObject(bounds, _), kw::Dyn) => {
+                            TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn)
+                        }
+                        (TyKind::TraitObject(bounds, _), kw::Impl) => {
+                            TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
+                        }
+                        _ => return Err(err),
+                    };
+                    err.emit();
+                    kind
+                } else {
+                    let path = self.parse_path(PathStyle::Type)?;
+                    let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
+                    self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
+                }
             }
         } else if self.eat_keyword(kw::Impl) {
             self.parse_impl_ty(&mut impl_dyn_multi)?
@@ -295,19 +341,19 @@ impl<'a> Parser<'a> {
         } else if self.can_begin_bound() {
             self.parse_bare_trait_object(lo, allow_plus)?
         } else if self.eat(&token::DotDotDot) {
-            if allow_c_variadic == AllowCVariadic::Yes {
-                TyKind::CVarArgs
-            } else {
-                // FIXME(Centril): Should we just allow `...` syntactically
-                // anywhere in a type and use semantic restrictions instead?
-                self.error_illegal_c_varadic_ty(lo);
-                TyKind::Err
+            match allow_c_variadic {
+                AllowCVariadic::Yes => TyKind::CVarArgs,
+                AllowCVariadic::No => {
+                    // FIXME(Centril): Should we just allow `...` syntactically
+                    // anywhere in a type and use semantic restrictions instead?
+                    self.dcx().emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
+                    TyKind::Err
+                }
             }
         } else {
             let msg = format!("expected type, found {}", super::token_descr(&self.token));
-            let mut err = self.struct_span_err(self.token.span, &msg);
+            let mut err = self.dcx().struct_span_err(self.token.span, msg);
             err.span_label(self.token.span, "expected type");
-            self.maybe_annotate_with_ascription(&mut err, true);
             return Err(err);
         };
 
@@ -315,10 +361,9 @@ impl<'a> Parser<'a> {
         let mut ty = self.mk_ty(span, kind);
 
         // Try to recover from use of `+` with incorrect priority.
-        if matches!(allow_plus, AllowPlus::Yes) {
-            self.maybe_recover_from_bad_type_plus(&ty)?;
-        } else {
-            self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty);
+        match allow_plus {
+            AllowPlus::Yes => self.maybe_recover_from_bad_type_plus(&ty)?,
+            AllowPlus::No => self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty),
         }
         if let RecoverQuestionMark::Yes = recover_question_mark {
             ty = self.maybe_recover_from_question_mark(ty);
@@ -326,6 +371,37 @@ impl<'a> Parser<'a> {
         if allow_qpath_recovery { self.maybe_recover_from_bad_qpath(ty) } else { Ok(ty) }
     }
 
+    /// Parse an anonymous struct or union (only for field definitions):
+    /// ```ignore (feature-not-ready)
+    /// #[repr(C)]
+    /// struct Foo {
+    ///     _: struct { // anonymous struct
+    ///         x: u32,
+    ///         y: f64,
+    ///     }
+    ///     _: union { // anonymous union
+    ///         z: u32,
+    ///         w: f64,
+    ///     }
+    /// }
+    /// ```
+    fn parse_anon_struct_or_union(&mut self) -> PResult<'a, P<Ty>> {
+        assert!(self.token.is_keyword(kw::Union) || self.token.is_keyword(kw::Struct));
+        let is_union = self.token.is_keyword(kw::Union);
+
+        let lo = self.token.span;
+        self.bump();
+
+        let (fields, _recovered) =
+            self.parse_record_struct_body(if is_union { "union" } else { "struct" }, lo, false)?;
+        let span = lo.to(self.prev_token.span);
+        self.sess.gated_spans.gate(sym::unnamed_fields, span);
+        let id = ast::DUMMY_NODE_ID;
+        let kind =
+            if is_union { TyKind::AnonUnion(id, fields) } else { TyKind::AnonStruct(id, fields) };
+        Ok(self.mk_ty(span, kind))
+    }
+
     /// Parses either:
     /// - `(TYPE)`, a parenthesized type.
     /// - `(TYPE,)`, a tuple with a single field of type TYPE.
@@ -343,7 +419,7 @@ impl<'a> Parser<'a> {
             match ty.kind {
                 // `(TY_BOUND_NOPAREN) + BOUND + ...`.
                 TyKind::Path(None, path) if maybe_bounds => {
-                    self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+                    self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
                 }
                 TyKind::TraitObject(bounds, TraitObjectSyntax::None)
                     if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
@@ -360,23 +436,22 @@ impl<'a> Parser<'a> {
 
     fn parse_bare_trait_object(&mut self, lo: Span, allow_plus: AllowPlus) -> PResult<'a, TyKind> {
         let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
-        let bounds = self.parse_generic_bounds_common(allow_plus, None)?;
+        let bounds = self.parse_generic_bounds_common(allow_plus)?;
         if lt_no_plus {
-            self.struct_span_err(lo, "lifetime in trait object type must be followed by `+`")
-                .emit();
+            self.dcx().emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
         }
         Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
     }
 
     fn parse_remaining_bounds_path(
         &mut self,
-        generic_params: Vec<GenericParam>,
+        generic_params: ThinVec<GenericParam>,
         path: ast::Path,
         lo: Span,
         parse_plus: bool,
     ) -> PResult<'a, TyKind> {
         let poly_trait_ref = PolyTraitRef::new(generic_params, path, lo.to(self.prev_token.span));
-        let bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifier::None)];
+        let bounds = vec![GenericBound::Trait(poly_trait_ref, TraitBoundModifiers::NONE)];
         self.parse_remaining_bounds(bounds, parse_plus)
     }
 
@@ -388,7 +463,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, TyKind> {
         if plus {
             self.eat_plus(); // `+`, or `+=` gets split and `+` is discarded
-            bounds.append(&mut self.parse_generic_bounds(Some(self.prev_token.span))?);
+            bounds.append(&mut self.parse_generic_bounds()?);
         }
         Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
     }
@@ -397,11 +472,10 @@ impl<'a> Parser<'a> {
     fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
         let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
             let span = self.prev_token.span;
-            let msg = "expected mut or const in raw pointer type";
-            self.struct_span_err(span, msg)
-                .span_label(span, msg)
-                .help("use `*mut T` or `*const T` as appropriate")
-                .emit();
+            self.dcx().emit_err(ExpectedMutOrConstInRawPointerType {
+                span,
+                after_asterisk: span.shrink_to_hi(),
+            });
             Mutability::Not
         });
         let ty = self.parse_ty_no_plus()?;
@@ -413,7 +487,7 @@ impl<'a> Parser<'a> {
     fn parse_array_or_slice_ty(&mut self) -> PResult<'a, TyKind> {
         let elt_ty = match self.parse_ty() {
             Ok(ty) => ty,
-            Err(mut err)
+            Err(err)
                 if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket))
                     | self.look_ahead(1, |t| t.kind == token::Semi) =>
             {
@@ -426,7 +500,7 @@ impl<'a> Parser<'a> {
         };
 
         let ty = if self.eat(&token::Semi) {
-            let mut length = self.parse_anon_const_expr()?;
+            let mut length = self.parse_expr_anon_const()?;
             if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
                 // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
                 self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
@@ -443,8 +517,7 @@ impl<'a> Parser<'a> {
 
     fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
         let and_span = self.prev_token.span;
-        let mut opt_lifetime =
-            if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
+        let mut opt_lifetime = self.check_lifetime().then(|| self.expect_lifetime());
         let mut mutbl = self.parse_mutability();
         if self.token.is_lifetime() && mutbl == Mutability::Mut && opt_lifetime.is_none() {
             // A lifetime is invalid here: it would be part of a bare trait bound, which requires
@@ -456,16 +529,13 @@ impl<'a> Parser<'a> {
                 let lifetime_span = self.token.span;
                 let span = and_span.to(lifetime_span);
 
-                let mut err = self.struct_span_err(span, "lifetime must precede `mut`");
-                if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
-                    err.span_suggestion(
-                        span,
-                        "place the lifetime before `mut`",
-                        format!("&{} mut", lifetime_src),
-                        Applicability::MaybeIncorrect,
-                    );
-                }
-                err.emit();
+                let (suggest_lifetime, snippet) =
+                    if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
+                        (Some(span), lifetime_src)
+                    } else {
+                        (None, String::new())
+                    };
+                self.dcx().emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
 
                 opt_lifetime = Some(self.expect_lifetime());
             }
@@ -475,14 +545,7 @@ impl<'a> Parser<'a> {
         {
             // We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
             let span = and_span.to(self.look_ahead(1, |t| t.span));
-            let mut err = self.struct_span_err(span, "`mut` must precede `dyn`");
-            err.span_suggestion(
-                span,
-                "place `mut` before `dyn`",
-                "&mut dyn",
-                Applicability::MachineApplicable,
-            );
-            err.emit();
+            self.dcx().emit_err(DynAfterMut { span });
 
             // Recovery
             mutbl = Mutability::Mut;
@@ -491,14 +554,14 @@ impl<'a> Parser<'a> {
             self.bump_with((dyn_tok, dyn_tok_sp));
         }
         let ty = self.parse_ty_no_plus()?;
-        Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }))
+        Ok(TyKind::Ref(opt_lifetime, MutTy { ty, mutbl }))
     }
 
     // Parses the `typeof(EXPR)`.
     // To avoid ambiguity, the type is surrounded by parentheses.
     fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
         self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
-        let expr = self.parse_anon_const_expr()?;
+        let expr = self.parse_expr_anon_const()?;
         self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
         Ok(TyKind::Typeof(expr))
     }
@@ -515,7 +578,8 @@ impl<'a> Parser<'a> {
     fn parse_ty_bare_fn(
         &mut self,
         lo: Span,
-        params: Vec<GenericParam>,
+        mut params: ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
         recover_return_sign: RecoverReturnSign,
     ) -> PResult<'a, TyKind> {
         let inherited_vis = rustc_ast::Visibility {
@@ -524,40 +588,86 @@ impl<'a> Parser<'a> {
             tokens: None,
         };
         let span_start = self.token.span;
-        let ast::FnHeader { ext, unsafety, constness, asyncness } =
-            self.parse_fn_front_matter(&inherited_vis)?;
+        let ast::FnHeader { ext, unsafety, constness, coroutine_kind } =
+            self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
+        if self.may_recover() && self.token.kind == TokenKind::Lt {
+            self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
+        }
         let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
         let whole_span = lo.to(self.prev_token.span);
         if let ast::Const::Yes(span) = constness {
             // If we ever start to allow `const fn()`, then update
             // feature gating for `#![feature(const_extern_fn)]` to
             // cover it.
-            self.error_fn_ptr_bad_qualifier(whole_span, span, "const");
+            self.dcx().emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
         }
-        if let ast::Async::Yes { span, .. } = asyncness {
-            self.error_fn_ptr_bad_qualifier(whole_span, span, "async");
+        if let Some(ast::CoroutineKind::Async { span, .. }) = coroutine_kind {
+            self.dcx().emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
         }
+        // FIXME(gen_blocks): emit a similar error for `gen fn()`
         let decl_span = span_start.to(self.token.span);
         Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
     }
 
-    /// Emit an error for the given bad function pointer qualifier.
-    fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
-        self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
-            .span_label(qual_span, format!("`{}` because of this", qual))
-            .span_suggestion_short(
-                qual_span,
-                &format!("remove the `{}` qualifier", qual),
-                "",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+    /// Recover from function pointer types with a generic parameter list (e.g. `fn<'a>(&'a str)`).
+    fn recover_fn_ptr_with_generics(
+        &mut self,
+        lo: Span,
+        params: &mut ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
+    ) -> PResult<'a, ()> {
+        let generics = self.parse_generics()?;
+        let arity = generics.params.len();
+
+        let mut lifetimes: ThinVec<_> = generics
+            .params
+            .into_iter()
+            .filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
+            .collect();
+
+        let sugg = if !lifetimes.is_empty() {
+            let snippet =
+                lifetimes.iter().map(|param| param.ident.as_str()).intersperse(", ").collect();
+
+            let (left, snippet) = if let Some(span) = param_insertion_point {
+                (span, if params.is_empty() { snippet } else { format!(", {snippet}") })
+            } else {
+                (lo.shrink_to_lo(), format!("for<{snippet}> "))
+            };
+
+            Some(FnPtrWithGenericsSugg {
+                left,
+                snippet,
+                right: generics.span,
+                arity,
+                for_param_list_exists: param_insertion_point.is_some(),
+            })
+        } else {
+            None
+        };
+
+        self.dcx().emit_err(FnPtrWithGenerics { span: generics.span, sugg });
+        params.append(&mut lifetimes);
+        Ok(())
     }
 
     /// Parses an `impl B0 + ... + Bn` type.
     fn parse_impl_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
         // Always parse bounds greedily for better error recovery.
-        let bounds = self.parse_generic_bounds(None)?;
+        if self.token.is_lifetime() {
+            self.look_ahead(1, |t| {
+                if let token::Ident(sym, _) = t.kind {
+                    // parse pattern with "'a Sized" we're supposed to give suggestion like
+                    // "'a + Sized"
+                    self.dcx().emit_err(errors::MissingPlusBounds {
+                        span: self.token.span,
+                        hi: self.token.span.shrink_to_hi(),
+                        sym,
+                    });
+                }
+            })
+        }
+        let bounds = self.parse_generic_bounds()?;
         *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
         Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
     }
@@ -565,9 +675,11 @@ impl<'a> Parser<'a> {
     /// Is a `dyn B0 + ... + Bn` type allowed here?
     fn is_explicit_dyn_type(&mut self) -> bool {
         self.check_keyword(kw::Dyn)
-            && (!self.token.uninterpolated_span().rust_2015()
+            && (self.token.uninterpolated_span().at_least_rust_2018()
                 || self.look_ahead(1, |t| {
-                    t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t)
+                    (can_begin_dyn_bound_in_edition_2015(t)
+                        || t.kind == TokenKind::BinOp(token::Star))
+                        && !can_continue_type_after_non_fn_ident(t)
                 }))
     }
 
@@ -575,11 +687,21 @@ impl<'a> Parser<'a> {
     ///
     /// Note that this does *not* parse bare trait objects.
     fn parse_dyn_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
+        let lo = self.token.span;
         self.bump(); // `dyn`
+
+        // parse dyn* types
+        let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
+            self.sess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
+            TraitObjectSyntax::DynStar
+        } else {
+            TraitObjectSyntax::Dyn
+        };
+
         // Always parse bounds greedily for better error recovery.
-        let bounds = self.parse_generic_bounds(None)?;
+        let bounds = self.parse_generic_bounds()?;
         *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
-        Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::Dyn))
+        Ok(TyKind::TraitObject(bounds, syntax))
     }
 
     /// Parses a type starting with a path.
@@ -598,141 +720,86 @@ impl<'a> Parser<'a> {
         let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
         if self.eat(&token::Not) {
             // Macro invocation in type position
-            Ok(TyKind::MacCall(MacCall {
-                path,
-                args: self.parse_mac_args()?,
-                prior_type_ascription: self.last_type_ascription,
-            }))
+            Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
         } else if allow_plus == AllowPlus::Yes && self.check_plus() {
             // `Trait1 + Trait2 + 'a`
-            self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+            self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
         } else {
             // Just a type path.
             Ok(TyKind::Path(None, path))
         }
     }
 
-    fn error_illegal_c_varadic_ty(&self, lo: Span) {
-        struct_span_err!(
-            self.sess.span_diagnostic,
-            lo.to(self.prev_token.span),
-            E0743,
-            "C-variadic type `...` may not be nested inside another type",
-        )
-        .emit();
-    }
-
-    pub(super) fn parse_generic_bounds(
-        &mut self,
-        colon_span: Option<Span>,
-    ) -> PResult<'a, GenericBounds> {
-        self.parse_generic_bounds_common(AllowPlus::Yes, colon_span)
+    pub(super) fn parse_generic_bounds(&mut self) -> PResult<'a, GenericBounds> {
+        self.parse_generic_bounds_common(AllowPlus::Yes)
     }
 
     /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
     ///
     /// See `parse_generic_bound` for the `BOUND` grammar.
-    fn parse_generic_bounds_common(
-        &mut self,
-        allow_plus: AllowPlus,
-        colon_span: Option<Span>,
-    ) -> PResult<'a, GenericBounds> {
+    fn parse_generic_bounds_common(&mut self, allow_plus: AllowPlus) -> PResult<'a, GenericBounds> {
         let mut bounds = Vec::new();
-        let mut negative_bounds = Vec::new();
 
-        while self.can_begin_bound() || self.token.is_keyword(kw::Dyn) {
+        // In addition to looping while we find generic bounds:
+        // We continue even if we find a keyword. This is necessary for error recovery on,
+        // for example, `impl fn()`. The only keyword that can go after generic bounds is
+        // `where`, so stop if it's it.
+        // We also continue if we find types (not traits), again for error recovery.
+        while self.can_begin_bound()
+            || (self.may_recover()
+                && (self.token.can_begin_type()
+                    || (self.token.is_reserved_ident() && !self.token.is_keyword(kw::Where))))
+        {
             if self.token.is_keyword(kw::Dyn) {
                 // Account for `&dyn Trait + dyn Other`.
-                self.struct_span_err(self.token.span, "invalid `dyn` keyword")
-                    .help("`dyn` is only needed at the start of a trait `+`-separated list")
-                    .span_suggestion(
-                        self.token.span,
-                        "remove this keyword",
-                        "",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.dcx().emit_err(InvalidDynKeyword { span: self.token.span });
                 self.bump();
             }
-            match self.parse_generic_bound()? {
-                Ok(bound) => bounds.push(bound),
-                Err(neg_sp) => negative_bounds.push(neg_sp),
-            }
+            bounds.push(self.parse_generic_bound()?);
             if allow_plus == AllowPlus::No || !self.eat_plus() {
                 break;
             }
         }
 
-        if !negative_bounds.is_empty() {
-            self.error_negative_bounds(colon_span, &bounds, negative_bounds);
-        }
-
         Ok(bounds)
     }
 
+    pub(super) fn can_begin_anon_struct_or_union(&mut self) -> bool {
+        (self.token.is_keyword(kw::Struct) || self.token.is_keyword(kw::Union))
+            && self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Brace))
+    }
+
     /// Can the current token begin a bound?
     fn can_begin_bound(&mut self) -> bool {
-        // This needs to be synchronized with `TokenKind::can_begin_bound`.
         self.check_path()
-        || self.check_lifetime()
-        || self.check(&token::Not) // Used for error reporting only.
-        || self.check(&token::Question)
-        || self.check(&token::Tilde)
-        || self.check_keyword(kw::For)
-        || self.check(&token::OpenDelim(Delimiter::Parenthesis))
-    }
-
-    fn error_negative_bounds(
-        &self,
-        colon_span: Option<Span>,
-        bounds: &[GenericBound],
-        negative_bounds: Vec<Span>,
-    ) {
-        let negative_bounds_len = negative_bounds.len();
-        let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
-        let mut err = self.struct_span_err(negative_bounds, "negative bounds are not supported");
-        err.span_label(last_span, "negative bounds are not supported");
-        if let Some(bound_list) = colon_span {
-            let bound_list = bound_list.to(self.prev_token.span);
-            let mut new_bound_list = String::new();
-            if !bounds.is_empty() {
-                let mut snippets = bounds.iter().map(|bound| self.span_to_snippet(bound.span()));
-                while let Some(Ok(snippet)) = snippets.next() {
-                    new_bound_list.push_str(" + ");
-                    new_bound_list.push_str(&snippet);
-                }
-                new_bound_list = new_bound_list.replacen(" +", ":", 1);
-            }
-            err.tool_only_span_suggestion(
-                bound_list,
-                &format!("remove the bound{}", pluralize!(negative_bounds_len)),
-                new_bound_list,
-                Applicability::MachineApplicable,
-            );
-        }
-        err.emit();
+            || self.check_lifetime()
+            || self.check(&token::Not)
+            || self.check(&token::Question)
+            || self.check(&token::Tilde)
+            || self.check_keyword(kw::Const)
+            || self.check_keyword(kw::For)
+            || self.check(&token::OpenDelim(Delimiter::Parenthesis))
     }
 
     /// Parses a bound according to the grammar:
     /// ```ebnf
     /// BOUND = TY_BOUND | LT_BOUND
     /// ```
-    fn parse_generic_bound(&mut self) -> PResult<'a, Result<GenericBound, Span>> {
-        let anchor_lo = self.prev_token.span;
+    fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
         let lo = self.token.span;
+        let leading_token = self.prev_token.clone();
         let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
         let inner_lo = self.token.span;
-        let is_negative = self.eat(&token::Not);
 
-        let modifiers = self.parse_ty_bound_modifiers()?;
+        let modifiers = self.parse_trait_bound_modifiers()?;
         let bound = if self.token.is_lifetime() {
             self.error_lt_bound_with_modifiers(modifiers);
             self.parse_generic_lt_bound(lo, inner_lo, has_parens)?
         } else {
-            self.parse_generic_ty_bound(lo, has_parens, modifiers)?
+            self.parse_generic_ty_bound(lo, has_parens, modifiers, &leading_token)?
         };
 
-        Ok(if is_negative { Err(anchor_lo.to(self.prev_token.span)) } else { Ok(bound) })
+        Ok(bound)
     }
 
     /// Parses a lifetime ("outlives") bound, e.g. `'a`, according to:
@@ -755,18 +822,25 @@ impl<'a> Parser<'a> {
     }
 
     /// Emits an error if any trait bound modifiers were present.
-    fn error_lt_bound_with_modifiers(&self, modifiers: BoundModifiers) {
-        if let Some(span) = modifiers.maybe_const {
-            self.struct_span_err(
-                span,
-                "`~const` may only modify trait bounds, not lifetime bounds",
-            )
-            .emit();
+    fn error_lt_bound_with_modifiers(&self, modifiers: TraitBoundModifiers) {
+        match modifiers.constness {
+            BoundConstness::Never => {}
+            BoundConstness::Always(span) | BoundConstness::Maybe(span) => {
+                self.dcx().emit_err(errors::ModifierLifetime {
+                    span,
+                    modifier: modifiers.constness.as_str(),
+                });
+            }
         }
 
-        if let Some(span) = modifiers.maybe {
-            self.struct_span_err(span, "`?` may only modify trait bounds, not lifetime bounds")
-                .emit();
+        match modifiers.polarity {
+            BoundPolarity::Positive => {}
+            BoundPolarity::Negative(span) | BoundPolarity::Maybe(span) => {
+                self.dcx().emit_err(errors::ModifierLifetime {
+                    span,
+                    modifier: modifiers.polarity.as_str(),
+                });
+            }
         }
     }
 
@@ -774,19 +848,14 @@ impl<'a> Parser<'a> {
     fn recover_paren_lifetime(&mut self, lo: Span, inner_lo: Span) -> PResult<'a, ()> {
         let inner_span = inner_lo.to(self.prev_token.span);
         self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
-        let mut err = self.struct_span_err(
-            lo.to(self.prev_token.span),
-            "parenthesized lifetime bounds are not supported",
-        );
-        if let Ok(snippet) = self.span_to_snippet(inner_span) {
-            err.span_suggestion_short(
-                lo.to(self.prev_token.span),
-                "remove the parentheses",
-                snippet,
-                Applicability::MachineApplicable,
-            );
-        }
-        err.emit();
+        let span = lo.to(self.prev_token.span);
+        let (sugg, snippet) = if let Ok(snippet) = self.span_to_snippet(inner_span) {
+            (Some(span), snippet)
+        } else {
+            (None, String::new())
+        };
+
+        self.dcx().emit_err(errors::ParenthesizedLifetime { span, sugg, snippet });
         Ok(())
     }
 
@@ -795,28 +864,56 @@ impl<'a> Parser<'a> {
     /// If no modifiers are present, this does not consume any tokens.
     ///
     /// ```ebnf
-    /// TY_BOUND_MODIFIERS = ["~const"] ["?"]
+    /// TRAIT_BOUND_MODIFIERS = [["~"] "const"] ["?" | "!"]
     /// ```
-    fn parse_ty_bound_modifiers(&mut self) -> PResult<'a, BoundModifiers> {
-        let maybe_const = if self.eat(&token::Tilde) {
+    fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
+        let constness = if self.eat(&token::Tilde) {
             let tilde = self.prev_token.span;
             self.expect_keyword(kw::Const)?;
             let span = tilde.to(self.prev_token.span);
             self.sess.gated_spans.gate(sym::const_trait_impl, span);
-            Some(span)
+            BoundConstness::Maybe(span)
+        } else if self.eat_keyword(kw::Const) {
+            self.sess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
+            BoundConstness::Always(self.prev_token.span)
         } else {
-            None
+            BoundConstness::Never
         };
 
-        let maybe = if self.eat(&token::Question) { Some(self.prev_token.span) } else { None };
+        let asyncness = if self.token.span.at_least_rust_2018() && self.eat_keyword(kw::Async) {
+            self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
+            BoundAsyncness::Async(self.prev_token.span)
+        } else if self.may_recover()
+            && self.token.span.is_rust_2015()
+            && self.is_kw_followed_by_ident(kw::Async)
+        {
+            self.bump(); // eat `async`
+            self.dcx().emit_err(errors::AsyncBoundModifierIn2015 {
+                span: self.prev_token.span,
+                help: HelpUseLatestEdition::new(),
+            });
+            self.sess.gated_spans.gate(sym::async_closure, self.prev_token.span);
+            BoundAsyncness::Async(self.prev_token.span)
+        } else {
+            BoundAsyncness::Normal
+        };
 
-        Ok(BoundModifiers { maybe, maybe_const })
+        let polarity = if self.eat(&token::Question) {
+            BoundPolarity::Maybe(self.prev_token.span)
+        } else if self.eat(&token::Not) {
+            self.sess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
+            BoundPolarity::Negative(self.prev_token.span)
+        } else {
+            BoundPolarity::Positive
+        };
+
+        Ok(TraitBoundModifiers { constness, asyncness, polarity })
     }
 
     /// Parses a type bound according to:
     /// ```ebnf
     /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
-    /// TY_BOUND_NOPAREN = [TY_BOUND_MODIFIERS] [for<LT_PARAM_DEFS>] SIMPLE_PATH
+    /// TY_BOUND_NOPAREN = [TRAIT_BOUND_MODIFIERS] [for<LT_PARAM_DEFS>] SIMPLE_PATH
     /// ```
     ///
     /// For example, this grammar accepts `~const ?for<'a: 'b> m::Trait<'a>`.
@@ -824,40 +921,126 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         has_parens: bool,
-        modifiers: BoundModifiers,
+        modifiers: TraitBoundModifiers,
+        leading_token: &Token,
     ) -> PResult<'a, GenericBound> {
-        let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
-        let path = self.parse_path(PathStyle::Type)?;
+        let mut lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+        let mut path = if self.token.is_keyword(kw::Fn)
+            && self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
+            && let Some(path) = self.recover_path_from_fn()
+        {
+            path
+        } else if !self.token.is_path_start() && self.token.can_begin_type() {
+            let ty = self.parse_ty_no_plus()?;
+            // Instead of finding a path (a trait), we found a type.
+            let mut err = self.dcx().struct_span_err(ty.span, "expected a trait, found type");
+
+            // If we can recover, try to extract a path from the type. Note
+            // that we do not use the try operator when parsing the type because
+            // if it fails then we get a parser error which we don't want (we're trying
+            // to recover from errors, not make more).
+            let path = if self.may_recover() {
+                let (span, message, sugg, path, applicability) = match &ty.kind {
+                    TyKind::Ptr(..) | TyKind::Ref(..)
+                        if let TyKind::Path(_, path) = &ty.peel_refs().kind =>
+                    {
+                        (
+                            ty.span.until(path.span),
+                            "consider removing the indirection",
+                            "",
+                            path,
+                            Applicability::MaybeIncorrect,
+                        )
+                    }
+                    TyKind::ImplTrait(_, bounds)
+                        if let [GenericBound::Trait(tr, ..), ..] = bounds.as_slice() =>
+                    {
+                        (
+                            ty.span.until(tr.span),
+                            "use the trait bounds directly",
+                            "",
+                            &tr.trait_ref.path,
+                            Applicability::MachineApplicable,
+                        )
+                    }
+                    _ => return Err(err),
+                };
+
+                err.span_suggestion_verbose(span, message, sugg, applicability);
+
+                path.clone()
+            } else {
+                return Err(err);
+            };
+
+            err.emit();
+
+            path
+        } else {
+            self.parse_path(PathStyle::Type)?
+        };
+
+        if self.may_recover() && self.token == TokenKind::OpenDelim(Delimiter::Parenthesis) {
+            self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?;
+        }
+
         if has_parens {
-            if self.token.is_like_plus() {
-                // Someone has written something like `&dyn (Trait + Other)`. The correct code
-                // would be `&(dyn Trait + Other)`, but we don't have access to the appropriate
-                // span to suggest that. When written as `&dyn Trait + Other`, an appropriate
-                // suggestion is given.
+            // Someone has written something like `&dyn (Trait + Other)`. The correct code
+            // would be `&(dyn Trait + Other)`
+            if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
                 let bounds = vec![];
                 self.parse_remaining_bounds(bounds, true)?;
                 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
-                let sp = vec![lo, self.prev_token.span];
-                let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
-                self.struct_span_err(sp, "incorrect braces around trait bounds")
-                    .multipart_suggestion(
-                        "remove the parentheses",
-                        sugg,
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.dcx().emit_err(errors::IncorrectParensTraitBounds {
+                    span: vec![lo, self.prev_token.span],
+                    sugg: errors::IncorrectParensTraitBoundsSugg {
+                        wrong_span: leading_token.span.shrink_to_hi().to(lo),
+                        new_span: leading_token.span.shrink_to_lo(),
+                    },
+                });
             } else {
                 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
             }
         }
 
-        let modifier = modifiers.to_trait_bound_modifier();
         let poly_trait = PolyTraitRef::new(lifetime_defs, path, lo.to(self.prev_token.span));
-        Ok(GenericBound::Trait(poly_trait, modifier))
+        Ok(GenericBound::Trait(poly_trait, modifiers))
+    }
+
+    // recovers a `Fn(..)` parenthesized-style path from `fn(..)`
+    fn recover_path_from_fn(&mut self) -> Option<ast::Path> {
+        let fn_token_span = self.token.span;
+        self.bump();
+        let args_lo = self.token.span;
+        let snapshot = self.create_snapshot_for_diagnostic();
+        match self.parse_fn_decl(|_| false, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) {
+            Ok(decl) => {
+                self.dcx().emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span });
+                Some(ast::Path {
+                    span: fn_token_span.to(self.prev_token.span),
+                    segments: thin_vec![ast::PathSegment {
+                        ident: Ident::new(Symbol::intern("Fn"), fn_token_span),
+                        id: DUMMY_NODE_ID,
+                        args: Some(P(ast::GenericArgs::Parenthesized(ast::ParenthesizedArgs {
+                            span: args_lo.to(self.prev_token.span),
+                            inputs: decl.inputs.iter().map(|a| a.ty.clone()).collect(),
+                            inputs_span: args_lo.until(decl.output.span()),
+                            output: decl.output.clone(),
+                        }))),
+                    }],
+                    tokens: None,
+                })
+            }
+            Err(diag) => {
+                diag.cancel();
+                self.restore_snapshot(snapshot);
+                None
+            }
+        }
     }
 
     /// Optionally parses `for<$generic_params>`.
-    pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
+    pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, ThinVec<GenericParam>> {
         if self.eat_keyword(kw::For) {
             self.expect_lt()?;
             let params = self.parse_generic_params()?;
@@ -866,8 +1049,98 @@ impl<'a> Parser<'a> {
             // parameters, and the lifetime parameters must not have bounds.
             Ok(params)
         } else {
-            Ok(Vec::new())
+            Ok(ThinVec::new())
+        }
+    }
+
+    /// Recover from `Fn`-family traits (Fn, FnMut, FnOnce) with lifetime arguments
+    /// (e.g. `FnOnce<'a>(&'a str) -> bool`). Up to generic arguments have already
+    /// been eaten.
+    fn recover_fn_trait_with_lifetime_params(
+        &mut self,
+        fn_path: &mut ast::Path,
+        lifetime_defs: &mut ThinVec<GenericParam>,
+    ) -> PResult<'a, ()> {
+        let fn_path_segment = fn_path.segments.last_mut().unwrap();
+        let generic_args = if let Some(p_args) = &fn_path_segment.args {
+            p_args.clone().into_inner()
+        } else {
+            // Normally it wouldn't come here because the upstream should have parsed
+            // generic parameters (otherwise it's impossible to call this function).
+            return Ok(());
+        };
+        let lifetimes =
+            if let ast::GenericArgs::AngleBracketed(ast::AngleBracketedArgs { span: _, args }) =
+                &generic_args
+            {
+                args.into_iter()
+                    .filter_map(|arg| {
+                        if let ast::AngleBracketedArg::Arg(generic_arg) = arg
+                            && let ast::GenericArg::Lifetime(lifetime) = generic_arg
+                        {
+                            Some(lifetime)
+                        } else {
+                            None
+                        }
+                    })
+                    .collect()
+            } else {
+                Vec::new()
+            };
+        // Only try to recover if the trait has lifetime params.
+        if lifetimes.is_empty() {
+            return Ok(());
+        }
+
+        // Parse `(T, U) -> R`.
+        let inputs_lo = self.token.span;
+        let inputs: ThinVec<_> =
+            self.parse_fn_params(|_| false)?.into_iter().map(|input| input.ty).collect();
+        let inputs_span = inputs_lo.to(self.prev_token.span);
+        let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?;
+        let args = ast::ParenthesizedArgs {
+            span: fn_path_segment.span().to(self.prev_token.span),
+            inputs,
+            inputs_span,
+            output,
         }
+        .into();
+        *fn_path_segment = ast::PathSegment {
+            ident: fn_path_segment.ident,
+            args: Some(args),
+            id: ast::DUMMY_NODE_ID,
+        };
+
+        // Convert parsed `<'a>` in `Fn<'a>` into `for<'a>`.
+        let mut generic_params = lifetimes
+            .iter()
+            .map(|lt| GenericParam {
+                id: lt.id,
+                ident: lt.ident,
+                attrs: ast::AttrVec::new(),
+                bounds: Vec::new(),
+                is_placeholder: false,
+                kind: ast::GenericParamKind::Lifetime,
+                colon_span: None,
+            })
+            .collect::<ThinVec<GenericParam>>();
+        lifetime_defs.append(&mut generic_params);
+
+        let generic_args_span = generic_args.span();
+        let snippet = format!(
+            "for<{}> ",
+            lifetimes.iter().map(|lt| lt.ident.as_str()).intersperse(", ").collect::<String>(),
+        );
+        let before_fn_path = fn_path.span.shrink_to_lo();
+        self.dcx()
+            .struct_span_err(generic_args_span, "`Fn` traits cannot take lifetime parameters")
+            .with_multipart_suggestion(
+                "consider using a higher-ranked trait bound instead",
+                vec![(generic_args_span, "".to_owned()), (before_fn_path, snippet)],
+                Applicability::MaybeIncorrect,
+            )
+            .emit();
+        Ok(())
     }
 
     pub(super) fn check_lifetime(&mut self) -> bool {
@@ -881,7 +1154,7 @@ impl<'a> Parser<'a> {
             self.bump();
             Lifetime { ident, id: ast::DUMMY_NODE_ID }
         } else {
-            self.span_bug(self.token.span, "not a lifetime")
+            self.dcx().span_bug(self.token.span, "not a lifetime")
         }
     }
 
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 47477898b24..2fafbd6d97b 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -1,17 +1,19 @@
 //! Meta-syntax validation logic of attributes for post-expansion.
 
-use crate::parse_in;
+use crate::{errors, parse_in};
 
+use rustc_ast::token::Delimiter;
 use rustc_ast::tokenstream::DelimSpan;
-use rustc_ast::{self as ast, Attribute, MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind};
-use rustc_ast_pretty::pprust;
+use rustc_ast::MetaItemKind;
+use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem};
 use rustc_errors::{Applicability, FatalError, PResult};
 use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
+use rustc_session::errors::report_lit_error;
 use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
 use rustc_session::parse::ParseSess;
-use rustc_span::{sym, Symbol};
+use rustc_span::{sym, Span, Symbol};
 
-pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
+pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
     if attr.is_doc_comment() {
         return;
     }
@@ -24,10 +26,10 @@ pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
         Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => {
             check_builtin_attribute(sess, attr, *name, *template)
         }
-        _ if let MacArgs::Eq(..) = attr.get_normal_item().args => {
+        _ if let AttrArgs::Eq(..) = attr.get_normal_item().args => {
             // All key-value attributes are restricted to meta-item syntax.
             parse_meta(sess, attr)
-                .map_err(|mut err| {
+                .map_err(|err| {
                     err.emit();
                 })
                 .ok();
@@ -42,58 +44,81 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
         span: attr.span,
         path: item.path.clone(),
         kind: match &item.args {
-            MacArgs::Empty => MetaItemKind::Word,
-            MacArgs::Delimited(dspan, delim, t) => {
-                check_meta_bad_delim(sess, *dspan, *delim, "wrong meta list delimiters");
-                let nmis = parse_in(sess, t.clone(), "meta list", |p| p.parse_meta_seq_top())?;
+            AttrArgs::Empty => MetaItemKind::Word,
+            AttrArgs::Delimited(DelimArgs { dspan, delim, tokens }) => {
+                check_meta_bad_delim(sess, *dspan, *delim);
+                let nmis = parse_in(sess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
                 MetaItemKind::List(nmis)
             }
-            MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
-                if let ast::ExprKind::Lit(lit) = &expr.kind {
-                    if !lit.kind.is_unsuffixed() {
-                        let mut err = sess.span_diagnostic.struct_span_err(
-                            lit.span,
-                            "suffixed literals are not allowed in attributes",
-                        );
-                        err.help(
-                            "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
-                            use an unsuffixed version (`1`, `1.0`, etc.)",
-                        );
-                        return Err(err);
-                    } else {
-                        MetaItemKind::NameValue(lit.clone())
-                    }
+            AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
+                if let ast::ExprKind::Lit(token_lit) = expr.kind {
+                    let res = ast::MetaItemLit::from_token_lit(token_lit, expr.span);
+                    let res = match res {
+                        Ok(lit) => {
+                            if token_lit.suffix.is_some() {
+                                let mut err = sess.dcx.struct_span_err(
+                                    expr.span,
+                                    "suffixed literals are not allowed in attributes",
+                                );
+                                err.help(
+                                    "instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), \
+                                    use an unsuffixed version (`1`, `1.0`, etc.)",
+                                );
+                                return Err(err);
+                            } else {
+                                MetaItemKind::NameValue(lit)
+                            }
+                        }
+                        Err(err) => {
+                            report_lit_error(sess, err, token_lit, expr.span);
+                            let lit = ast::MetaItemLit {
+                                symbol: token_lit.symbol,
+                                suffix: token_lit.suffix,
+                                kind: ast::LitKind::Err,
+                                span: expr.span,
+                            };
+                            MetaItemKind::NameValue(lit)
+                        }
+                    };
+                    res
                 } else {
-                    // The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
-                    // happen with e.g. `#[foo = include_str!("non-existent-file.rs")]`; in that
-                    // case we delay the error because an earlier error will have already been
-                    // reported.
-                    let msg = format!("unexpected expression: `{}`", pprust::expr_to_string(expr));
-                    let mut err = sess.span_diagnostic.struct_span_err(expr.span, msg);
+                    // Example cases:
+                    // - `#[foo = 1+1]`: results in `ast::ExprKind::BinOp`.
+                    // - `#[foo = include_str!("nonexistent-file.rs")]`:
+                    //   results in `ast::ExprKind::Err`. In that case we delay
+                    //   the error because an earlier error will have already
+                    //   been reported.
+                    let msg = format!("attribute value must be a literal");
+                    let mut err = sess.dcx.struct_span_err(expr.span, msg);
                     if let ast::ExprKind::Err = expr.kind {
                         err.downgrade_to_delayed_bug();
                     }
                     return Err(err);
                 }
             }
-            MacArgs::Eq(_, MacArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
+            AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
         },
     })
 }
 
-pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimiter, msg: &str) {
-    if let ast::MacDelimiter::Parenthesis = delim {
+pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+    if let Delimiter::Parenthesis = delim {
         return;
     }
+    sess.dcx.emit_err(errors::MetaBadDelim {
+        span: span.entire(),
+        sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
+    });
+}
 
-    sess.span_diagnostic
-        .struct_span_err(span.entire(), msg)
-        .multipart_suggestion(
-            "the delimiters should be `(` and `)`",
-            vec![(span.open, "(".to_string()), (span.close, ")".to_string())],
-            Applicability::MachineApplicable,
-        )
-        .emit();
+pub fn check_cfg_attr_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
+    if let Delimiter::Parenthesis = delim {
+        return;
+    }
+    sess.dcx.emit_err(errors::CfgAttrBadDelim {
+        span: span.entire(),
+        sugg: errors::MetaBadDelimSugg { open: span.open, close: span.close },
+    });
 }
 
 /// Checks that the given meta-item is compatible with this `AttributeTemplate`.
@@ -112,25 +137,34 @@ pub fn check_builtin_attribute(
     name: Symbol,
     template: AttributeTemplate,
 ) {
+    match parse_meta(sess, attr) {
+        Ok(meta) => check_builtin_meta_item(sess, &meta, attr.style, name, template),
+        Err(err) => {
+            err.emit();
+        }
+    }
+}
+
+pub fn check_builtin_meta_item(
+    sess: &ParseSess,
+    meta: &MetaItem,
+    style: ast::AttrStyle,
+    name: Symbol,
+    template: AttributeTemplate,
+) {
     // Some special attributes like `cfg` must be checked
     // before the generic check, so we skip them here.
     let should_skip = |name| name == sym::cfg;
 
-    match parse_meta(sess, attr) {
-        Ok(meta) => {
-            if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
-                emit_malformed_attribute(sess, attr, name, template);
-            }
-        }
-        Err(mut err) => {
-            err.emit();
-        }
+    if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
+        emit_malformed_attribute(sess, style, meta.span, name, template);
     }
 }
 
 fn emit_malformed_attribute(
     sess: &ParseSess,
-    attr: &Attribute,
+    style: ast::AttrStyle,
+    span: Span,
     name: Symbol,
     template: AttributeTemplate,
 ) {
@@ -140,15 +174,15 @@ fn emit_malformed_attribute(
         matches!(name, sym::doc | sym::ignore | sym::inline | sym::link | sym::test | sym::bench)
     };
 
-    let error_msg = format!("malformed `{}` attribute input", name);
+    let error_msg = format!("malformed `{name}` attribute input");
     let mut msg = "attribute must be of the form ".to_owned();
     let mut suggestions = vec![];
     let mut first = true;
-    let inner = if attr.style == ast::AttrStyle::Inner { "!" } else { "" };
+    let inner = if style == ast::AttrStyle::Inner { "!" } else { "" };
     if template.word {
         first = false;
-        let code = format!("#{}[{}]", inner, name);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name}]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
     if let Some(descr) = template.list {
@@ -156,31 +190,32 @@ fn emit_malformed_attribute(
             msg.push_str(" or ");
         }
         first = false;
-        let code = format!("#{}[{}({})]", inner, name, descr);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name}({descr})]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
     if let Some(descr) = template.name_value_str {
         if !first {
             msg.push_str(" or ");
         }
-        let code = format!("#{}[{} = \"{}\"]", inner, name, descr);
-        msg.push_str(&format!("`{}`", &code));
+        let code = format!("#{inner}[{name} = \"{descr}\"]");
+        msg.push_str(&format!("`{code}`"));
         suggestions.push(code);
     }
+    suggestions.sort();
     if should_warn(name) {
-        sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, attr.span, ast::CRATE_NODE_ID, &msg);
+        sess.buffer_lint(ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, msg);
     } else {
-        sess.span_diagnostic
-            .struct_span_err(attr.span, &error_msg)
-            .span_suggestions(
-                attr.span,
+        sess.dcx
+            .struct_span_err(span, error_msg)
+            .with_span_suggestions(
+                span,
                 if suggestions.len() == 1 {
                     "must be of the form"
                 } else {
                     "the following are the possible correct uses"
                 },
-                suggestions.into_iter(),
+                suggestions,
                 Applicability::HasPlaceholders,
             )
             .emit();
@@ -193,7 +228,7 @@ pub fn emit_fatal_malformed_builtin_attribute(
     name: Symbol,
 ) -> ! {
     let template = BUILTIN_ATTRIBUTE_MAP.get(&name).expect("builtin attr defined").template;
-    emit_malformed_attribute(sess, attr, name, template);
+    emit_malformed_attribute(sess, attr.style, attr.span, name, template);
     // This is fatal, otherwise it will likely cause a cascade of other errors
     // (and an error here is expected to be very rare).
     FatalError.raise()