about summary refs log tree commit diff
path: root/compiler/rustc_parse/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_parse/src')
-rw-r--r--compiler/rustc_parse/src/errors.rs1540
-rw-r--r--compiler/rustc_parse/src/lexer/diagnostics.rs119
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs248
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs150
-rw-r--r--compiler/rustc_parse/src/lexer/unescape_error_reporting.rs248
-rw-r--r--compiler/rustc_parse/src/lexer/unicode_chars.rs698
-rw-r--r--compiler/rustc_parse/src/lib.rs46
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs62
-rw-r--r--compiler/rustc_parse/src/parser/attr_wrapper.rs21
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs529
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs1063
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs169
-rw-r--r--compiler/rustc_parse/src/parser/item.rs608
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs289
-rw-r--r--compiler/rustc_parse/src/parser/nonterminal.rs38
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs428
-rw-r--r--compiler/rustc_parse/src/parser/path.rs78
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs185
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs425
-rw-r--r--compiler/rustc_parse/src/validate_attr.rs68
20 files changed, 4269 insertions, 2743 deletions
diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs
index d59982f7063..63e5bc50513 100644
--- a/compiler/rustc_parse/src/errors.rs
+++ b/compiler/rustc_parse/src/errors.rs
@@ -1,15 +1,19 @@
+use std::borrow::Cow;
+
 use rustc_ast::token::Token;
-use rustc_ast::Path;
-use rustc_errors::{fluent, AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
+use rustc_ast::{Path, Visibility};
+use rustc_errors::{AddToDiagnostic, Applicability, EmissionGuarantee, IntoDiagnostic};
 use rustc_macros::{Diagnostic, Subdiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_span::edition::{Edition, LATEST_STABLE_EDITION};
 use rustc_span::symbol::Ident;
 use rustc_span::{Span, Symbol};
 
+use crate::fluent_generated as fluent;
 use crate::parser::TokenDescription;
 
 #[derive(Diagnostic)]
-#[diag(parser_maybe_report_ambiguous_plus)]
+#[diag(parse_maybe_report_ambiguous_plus)]
 pub(crate) struct AmbiguousPlus {
     pub sum_ty: String,
     #[primary_span]
@@ -18,7 +22,7 @@ pub(crate) struct AmbiguousPlus {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_maybe_recover_from_bad_type_plus, code = "E0178")]
+#[diag(parse_maybe_recover_from_bad_type_plus, code = "E0178")]
 pub(crate) struct BadTypePlus {
     pub ty: String,
     #[primary_span]
@@ -30,7 +34,7 @@ pub(crate) struct BadTypePlus {
 #[derive(Subdiagnostic)]
 pub(crate) enum BadTypePlusSub {
     #[suggestion(
-        parser_add_paren,
+        parse_add_paren,
         code = "{sum_with_parens}",
         applicability = "machine-applicable"
     )]
@@ -39,12 +43,12 @@ pub(crate) enum BadTypePlusSub {
         #[primary_span]
         span: Span,
     },
-    #[label(parser_forgot_paren)]
+    #[label(parse_forgot_paren)]
     ForgotParen {
         #[primary_span]
         span: Span,
     },
-    #[label(parser_expect_path)]
+    #[label(parse_expect_path)]
     ExpectPath {
         #[primary_span]
         span: Span,
@@ -52,7 +56,7 @@ pub(crate) enum BadTypePlusSub {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_maybe_recover_from_bad_qpath_stage_2)]
+#[diag(parse_maybe_recover_from_bad_qpath_stage_2)]
 pub(crate) struct BadQPathStage2 {
     #[primary_span]
     #[suggestion(code = "", applicability = "maybe-incorrect")]
@@ -61,7 +65,7 @@ pub(crate) struct BadQPathStage2 {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_incorrect_semicolon)]
+#[diag(parse_incorrect_semicolon)]
 pub(crate) struct IncorrectSemicolon<'a> {
     #[primary_span]
     #[suggestion(style = "short", code = "", applicability = "machine-applicable")]
@@ -72,26 +76,26 @@ pub(crate) struct IncorrectSemicolon<'a> {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_incorrect_use_of_await)]
+#[diag(parse_incorrect_use_of_await)]
 pub(crate) struct IncorrectUseOfAwait {
     #[primary_span]
-    #[suggestion(parentheses_suggestion, code = "", applicability = "machine-applicable")]
+    #[suggestion(parse_parentheses_suggestion, code = "", applicability = "machine-applicable")]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_incorrect_use_of_await)]
+#[diag(parse_incorrect_use_of_await)]
 pub(crate) struct IncorrectAwait {
     #[primary_span]
     pub span: Span,
-    #[suggestion(postfix_suggestion, code = "{expr}.await{question_mark}")]
+    #[suggestion(parse_postfix_suggestion, code = "{expr}.await{question_mark}")]
     pub sugg_span: (Span, Applicability),
     pub expr: String,
     pub question_mark: &'static str,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_in_in_typo)]
+#[diag(parse_in_in_typo)]
 pub(crate) struct InInTypo {
     #[primary_span]
     pub span: Span,
@@ -100,7 +104,7 @@ pub(crate) struct InInTypo {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_variable_declaration)]
+#[diag(parse_invalid_variable_declaration)]
 pub(crate) struct InvalidVariableDeclaration {
     #[primary_span]
     pub span: Span,
@@ -110,22 +114,22 @@ pub(crate) struct InvalidVariableDeclaration {
 
 #[derive(Subdiagnostic)]
 pub(crate) enum InvalidVariableDeclarationSub {
-    #[suggestion(parser_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
+    #[suggestion(parse_switch_mut_let_order, applicability = "maybe-incorrect", code = "let mut")]
     SwitchMutLetOrder(#[primary_span] Span),
     #[suggestion(
-        parser_missing_let_before_mut,
+        parse_missing_let_before_mut,
         applicability = "machine-applicable",
         code = "let mut"
     )]
     MissingLet(#[primary_span] Span),
-    #[suggestion(parser_use_let_not_auto, applicability = "machine-applicable", code = "let")]
+    #[suggestion(parse_use_let_not_auto, applicability = "machine-applicable", code = "let")]
     UseLetNotAuto(#[primary_span] Span),
-    #[suggestion(parser_use_let_not_var, applicability = "machine-applicable", code = "let")]
+    #[suggestion(parse_use_let_not_var, applicability = "machine-applicable", code = "let")]
     UseLetNotVar(#[primary_span] Span),
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_comparison_operator)]
+#[diag(parse_invalid_comparison_operator)]
 pub(crate) struct InvalidComparisonOperator {
     #[primary_span]
     pub span: Span,
@@ -137,7 +141,7 @@ pub(crate) struct InvalidComparisonOperator {
 #[derive(Subdiagnostic)]
 pub(crate) enum InvalidComparisonOperatorSub {
     #[suggestion(
-        use_instead,
+        parse_use_instead,
         style = "short",
         applicability = "machine-applicable",
         code = "{correct}"
@@ -148,12 +152,12 @@ pub(crate) enum InvalidComparisonOperatorSub {
         invalid: String,
         correct: String,
     },
-    #[label(spaceship_operator_invalid)]
+    #[label(parse_spaceship_operator_invalid)]
     Spaceship(#[primary_span] Span),
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_logical_operator)]
+#[diag(parse_invalid_logical_operator)]
 #[note]
 pub(crate) struct InvalidLogicalOperator {
     #[primary_span]
@@ -166,14 +170,14 @@ pub(crate) struct InvalidLogicalOperator {
 #[derive(Subdiagnostic)]
 pub(crate) enum InvalidLogicalOperatorSub {
     #[suggestion(
-        use_amp_amp_for_conjunction,
+        parse_use_amp_amp_for_conjunction,
         style = "short",
         applicability = "machine-applicable",
         code = "&&"
     )]
     Conjunction(#[primary_span] Span),
     #[suggestion(
-        use_pipe_pipe_for_disjunction,
+        parse_use_pipe_pipe_for_disjunction,
         style = "short",
         applicability = "machine-applicable",
         code = "||"
@@ -182,7 +186,7 @@ pub(crate) enum InvalidLogicalOperatorSub {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_tilde_is_not_unary_operator)]
+#[diag(parse_tilde_is_not_unary_operator)]
 pub(crate) struct TildeAsUnaryOperator(
     #[primary_span]
     #[suggestion(style = "short", applicability = "machine-applicable", code = "!")]
@@ -190,7 +194,7 @@ pub(crate) struct TildeAsUnaryOperator(
 );
 
 #[derive(Diagnostic)]
-#[diag(parser_unexpected_token_after_not)]
+#[diag(parse_unexpected_token_after_not)]
 pub(crate) struct NotAsNegationOperator {
     #[primary_span]
     pub negated: Span,
@@ -202,7 +206,7 @@ pub(crate) struct NotAsNegationOperator {
 #[derive(Subdiagnostic)]
 pub enum NotAsNegationOperatorSub {
     #[suggestion(
-        parser_unexpected_token_after_not_default,
+        parse_unexpected_token_after_not_default,
         style = "short",
         applicability = "machine-applicable",
         code = "!"
@@ -210,7 +214,7 @@ pub enum NotAsNegationOperatorSub {
     SuggestNotDefault(#[primary_span] Span),
 
     #[suggestion(
-        parser_unexpected_token_after_not_bitwise,
+        parse_unexpected_token_after_not_bitwise,
         style = "short",
         applicability = "machine-applicable",
         code = "!"
@@ -218,7 +222,7 @@ pub enum NotAsNegationOperatorSub {
     SuggestNotBitwise(#[primary_span] Span),
 
     #[suggestion(
-        parser_unexpected_token_after_not_logical,
+        parse_unexpected_token_after_not_logical,
         style = "short",
         applicability = "machine-applicable",
         code = "!"
@@ -227,7 +231,7 @@ pub enum NotAsNegationOperatorSub {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_malformed_loop_label)]
+#[diag(parse_malformed_loop_label)]
 pub(crate) struct MalformedLoopLabel {
     #[primary_span]
     #[suggestion(applicability = "machine-applicable", code = "{correct_label}")]
@@ -236,7 +240,7 @@ pub(crate) struct MalformedLoopLabel {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_lifetime_in_borrow_expression)]
+#[diag(parse_lifetime_in_borrow_expression)]
 pub(crate) struct LifetimeInBorrowExpression {
     #[primary_span]
     pub span: Span,
@@ -246,27 +250,27 @@ pub(crate) struct LifetimeInBorrowExpression {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_field_expression_with_generic)]
+#[diag(parse_field_expression_with_generic)]
 pub(crate) struct FieldExpressionWithGeneric(#[primary_span] pub Span);
 
 #[derive(Diagnostic)]
-#[diag(parser_macro_invocation_with_qualified_path)]
+#[diag(parse_macro_invocation_with_qualified_path)]
 pub(crate) struct MacroInvocationWithQualifiedPath(#[primary_span] pub Span);
 
 #[derive(Diagnostic)]
-#[diag(parser_unexpected_token_after_label)]
+#[diag(parse_unexpected_token_after_label)]
 pub(crate) struct UnexpectedTokenAfterLabel {
     #[primary_span]
-    #[label(parser_unexpected_token_after_label)]
+    #[label(parse_unexpected_token_after_label)]
     pub span: Span,
-    #[suggestion(suggestion_remove_label, style = "verbose", code = "")]
+    #[suggestion(parse_suggestion_remove_label, style = "verbose", code = "")]
     pub remove_label: Option<Span>,
     #[subdiagnostic]
     pub enclose_in_block: Option<UnexpectedTokenAfterLabelSugg>,
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_enclose_in_block, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion_enclose_in_block, applicability = "machine-applicable")]
 pub(crate) struct UnexpectedTokenAfterLabelSugg {
     #[suggestion_part(code = "{{ ")]
     pub left: Span,
@@ -275,7 +279,7 @@ pub(crate) struct UnexpectedTokenAfterLabelSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_require_colon_after_labeled_expression)]
+#[diag(parse_require_colon_after_labeled_expression)]
 #[note]
 pub(crate) struct RequireColonAfterLabeledExpression {
     #[primary_span]
@@ -287,7 +291,7 @@ pub(crate) struct RequireColonAfterLabeledExpression {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_do_catch_syntax_removed)]
+#[diag(parse_do_catch_syntax_removed)]
 #[note]
 pub(crate) struct DoCatchSyntaxRemoved {
     #[primary_span]
@@ -296,7 +300,7 @@ pub(crate) struct DoCatchSyntaxRemoved {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_float_literal_requires_integer_part)]
+#[diag(parse_float_literal_requires_integer_part)]
 pub(crate) struct FloatLiteralRequiresIntegerPart {
     #[primary_span]
     #[suggestion(applicability = "machine-applicable", code = "{correct}")]
@@ -305,62 +309,7 @@ pub(crate) struct FloatLiteralRequiresIntegerPart {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_int_literal_width)]
-#[help]
-pub(crate) struct InvalidIntLiteralWidth {
-    #[primary_span]
-    pub span: Span,
-    pub width: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_num_literal_base_prefix)]
-#[note]
-pub(crate) struct InvalidNumLiteralBasePrefix {
-    #[primary_span]
-    #[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
-    pub span: Span,
-    pub fixed: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_num_literal_suffix)]
-#[help]
-pub(crate) struct InvalidNumLiteralSuffix {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-    pub suffix: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_float_literal_width)]
-#[help]
-pub(crate) struct InvalidFloatLiteralWidth {
-    #[primary_span]
-    pub span: Span,
-    pub width: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_float_literal_suffix)]
-#[help]
-pub(crate) struct InvalidFloatLiteralSuffix {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-    pub suffix: String,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_int_literal_too_large)]
-pub(crate) struct IntLiteralTooLarge {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_missing_semicolon_before_array)]
+#[diag(parse_missing_semicolon_before_array)]
 pub(crate) struct MissingSemicolonBeforeArray {
     #[primary_span]
     pub open_delim: Span,
@@ -369,7 +318,7 @@ pub(crate) struct MissingSemicolonBeforeArray {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_expect_dotdot_not_dotdotdot)]
+#[diag(parse_expect_dotdot_not_dotdotdot)]
 pub(crate) struct MissingDotDot {
     #[primary_span]
     pub token_span: Span,
@@ -378,7 +327,7 @@ pub(crate) struct MissingDotDot {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_block_macro_segment)]
+#[diag(parse_invalid_block_macro_segment)]
 pub(crate) struct InvalidBlockMacroSegment {
     #[primary_span]
     pub span: Span,
@@ -387,41 +336,50 @@ pub(crate) struct InvalidBlockMacroSegment {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_if_expression_missing_then_block)]
+#[diag(parse_if_expression_missing_then_block)]
 pub(crate) struct IfExpressionMissingThenBlock {
     #[primary_span]
     pub if_span: Span,
     #[subdiagnostic]
-    pub sub: IfExpressionMissingThenBlockSub,
+    pub missing_then_block_sub: IfExpressionMissingThenBlockSub,
+    #[subdiagnostic]
+    pub let_else_sub: Option<IfExpressionLetSomeSub>,
 }
 
 #[derive(Subdiagnostic)]
 pub(crate) enum IfExpressionMissingThenBlockSub {
-    #[help(condition_possibly_unfinished)]
+    #[help(parse_condition_possibly_unfinished)]
     UnfinishedCondition(#[primary_span] Span),
-    #[help(add_then_block)]
+    #[help(parse_add_then_block)]
     AddThenBlock(#[primary_span] Span),
 }
 
+#[derive(Subdiagnostic)]
+#[suggestion(parse_extra_if_in_let_else, applicability = "maybe-incorrect", code = "")]
+pub(crate) struct IfExpressionLetSomeSub {
+    #[primary_span]
+    pub if_span: Span,
+}
+
 #[derive(Diagnostic)]
-#[diag(parser_if_expression_missing_condition)]
+#[diag(parse_if_expression_missing_condition)]
 pub(crate) struct IfExpressionMissingCondition {
     #[primary_span]
-    #[label(condition_label)]
+    #[label(parse_condition_label)]
     pub if_span: Span,
-    #[label(block_label)]
+    #[label(parse_block_label)]
     pub block_span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_expected_expression_found_let)]
+#[diag(parse_expected_expression_found_let)]
 pub(crate) struct ExpectedExpressionFoundLet {
     #[primary_span]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_expect_eq_instead_of_eqeq)]
+#[diag(parse_expect_eq_instead_of_eqeq)]
 pub(crate) struct ExpectedEqForLetExpr {
     #[primary_span]
     pub span: Span,
@@ -430,7 +388,7 @@ pub(crate) struct ExpectedEqForLetExpr {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_expected_else_block)]
+#[diag(parse_expected_else_block)]
 pub(crate) struct ExpectedElseBlock {
     #[primary_span]
     pub first_tok_span: Span,
@@ -442,15 +400,15 @@ pub(crate) struct ExpectedElseBlock {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_outer_attribute_not_allowed_on_if_else)]
+#[diag(parse_outer_attribute_not_allowed_on_if_else)]
 pub(crate) struct OuterAttributeNotAllowedOnIfElse {
     #[primary_span]
     pub last: Span,
 
-    #[label(branch_label)]
+    #[label(parse_branch_label)]
     pub branch_span: Span,
 
-    #[label(ctx_label)]
+    #[label(parse_ctx_label)]
     pub ctx_span: Span,
     pub ctx: String,
 
@@ -459,7 +417,7 @@ pub(crate) struct OuterAttributeNotAllowedOnIfElse {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_missing_in_in_for_loop)]
+#[diag(parse_missing_in_in_for_loop)]
 pub(crate) struct MissingInInForLoop {
     #[primary_span]
     pub span: Span,
@@ -470,14 +428,42 @@ pub(crate) struct MissingInInForLoop {
 #[derive(Subdiagnostic)]
 pub(crate) enum MissingInInForLoopSub {
     // Has been misleading, at least in the past (closed Issue #48492), thus maybe-incorrect
-    #[suggestion(use_in_not_of, style = "short", applicability = "maybe-incorrect", code = "in")]
+    #[suggestion(
+        parse_use_in_not_of,
+        style = "short",
+        applicability = "maybe-incorrect",
+        code = "in"
+    )]
     InNotOf(#[primary_span] Span),
-    #[suggestion(add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
+    #[suggestion(parse_add_in, style = "short", applicability = "maybe-incorrect", code = " in ")]
     AddIn(#[primary_span] Span),
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_missing_comma_after_match_arm)]
+#[diag(parse_missing_expression_in_for_loop)]
+pub(crate) struct MissingExpressionInForLoop {
+    #[primary_span]
+    #[suggestion(
+        code = "/* expression */ ",
+        applicability = "has-placeholders",
+        style = "verbose"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_loop_else)]
+#[note]
+pub(crate) struct LoopElseNotSupported {
+    #[primary_span]
+    pub span: Span,
+    pub loop_kind: &'static str,
+    #[label(parse_loop_keyword)]
+    pub loop_kw: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_comma_after_match_arm)]
 pub(crate) struct MissingCommaAfterMatchArm {
     #[primary_span]
     #[suggestion(applicability = "machine-applicable", code = ",")]
@@ -485,7 +471,7 @@ pub(crate) struct MissingCommaAfterMatchArm {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_catch_after_try)]
+#[diag(parse_catch_after_try)]
 #[help]
 pub(crate) struct CatchAfterTry {
     #[primary_span]
@@ -493,7 +479,7 @@ pub(crate) struct CatchAfterTry {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_comma_after_base_struct)]
+#[diag(parse_comma_after_base_struct)]
 #[note]
 pub(crate) struct CommaAfterBaseStruct {
     #[primary_span]
@@ -503,7 +489,7 @@ pub(crate) struct CommaAfterBaseStruct {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_eq_field_init)]
+#[diag(parse_eq_field_init)]
 pub(crate) struct EqFieldInit {
     #[primary_span]
     pub span: Span,
@@ -512,16 +498,16 @@ pub(crate) struct EqFieldInit {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_dotdotdot)]
+#[diag(parse_dotdotdot)]
 pub(crate) struct DotDotDot {
     #[primary_span]
-    #[suggestion(suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
-    #[suggestion(suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
+    #[suggestion(parse_suggest_exclusive_range, applicability = "maybe-incorrect", code = "..")]
+    #[suggestion(parse_suggest_inclusive_range, applicability = "maybe-incorrect", code = "..=")]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_left_arrow_operator)]
+#[diag(parse_left_arrow_operator)]
 pub(crate) struct LeftArrowOperator {
     #[primary_span]
     #[suggestion(applicability = "maybe-incorrect", code = "< -")]
@@ -529,7 +515,7 @@ pub(crate) struct LeftArrowOperator {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_remove_let)]
+#[diag(parse_remove_let)]
 pub(crate) struct RemoveLet {
     #[primary_span]
     #[suggestion(applicability = "machine-applicable", code = "")]
@@ -537,7 +523,7 @@ pub(crate) struct RemoveLet {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_use_eq_instead)]
+#[diag(parse_use_eq_instead)]
 pub(crate) struct UseEqInstead {
     #[primary_span]
     #[suggestion(style = "short", applicability = "machine-applicable", code = "=")]
@@ -545,7 +531,7 @@ pub(crate) struct UseEqInstead {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_use_empty_block_not_semi)]
+#[diag(parse_use_empty_block_not_semi)]
 pub(crate) struct UseEmptyBlockNotSemi {
     #[primary_span]
     #[suggestion(style = "hidden", applicability = "machine-applicable", code = "{{}}")]
@@ -553,33 +539,33 @@ pub(crate) struct UseEmptyBlockNotSemi {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_comparison_interpreted_as_generic)]
+#[diag(parse_comparison_interpreted_as_generic)]
 pub(crate) struct ComparisonInterpretedAsGeneric {
     #[primary_span]
-    #[label(label_comparison)]
+    #[label(parse_label_comparison)]
     pub comparison: Span,
     pub r#type: Path,
-    #[label(label_args)]
+    #[label(parse_label_args)]
     pub args: Span,
     #[subdiagnostic]
     pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_shift_interpreted_as_generic)]
+#[diag(parse_shift_interpreted_as_generic)]
 pub(crate) struct ShiftInterpretedAsGeneric {
     #[primary_span]
-    #[label(label_comparison)]
+    #[label(parse_label_comparison)]
     pub shift: Span,
     pub r#type: Path,
-    #[label(label_args)]
+    #[label(parse_label_args)]
     pub args: Span,
     #[subdiagnostic]
     pub suggestion: ComparisonOrShiftInterpretedAsGenericSugg,
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
     #[suggestion_part(code = "(")]
     pub left: Span,
@@ -588,7 +574,7 @@ pub(crate) struct ComparisonOrShiftInterpretedAsGenericSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_found_expr_would_be_stmt)]
+#[diag(parse_found_expr_would_be_stmt)]
 pub(crate) struct FoundExprWouldBeStmt {
     #[primary_span]
     #[label]
@@ -599,13 +585,13 @@ pub(crate) struct FoundExprWouldBeStmt {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_leading_plus_not_supported)]
+#[diag(parse_leading_plus_not_supported)]
 pub(crate) struct LeadingPlusNotSupported {
     #[primary_span]
     #[label]
     pub span: Span,
     #[suggestion(
-        suggestion_remove_plus,
+        parse_suggestion_remove_plus,
         style = "verbose",
         code = "",
         applicability = "machine-applicable"
@@ -616,7 +602,7 @@ pub(crate) struct LeadingPlusNotSupported {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_parentheses_with_struct_fields)]
+#[diag(parse_parentheses_with_struct_fields)]
 pub(crate) struct ParenthesesWithStructFields {
     #[primary_span]
     pub span: Span,
@@ -628,7 +614,7 @@ pub(crate) struct ParenthesesWithStructFields {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_braces_for_struct, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion_braces_for_struct, applicability = "maybe-incorrect")]
 pub(crate) struct BracesForStructLiteral {
     #[suggestion_part(code = " {{ ")]
     pub first: Span,
@@ -637,14 +623,14 @@ pub(crate) struct BracesForStructLiteral {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion_no_fields_for_fn, applicability = "maybe-incorrect")]
 pub(crate) struct NoFieldsForFnCall {
     #[suggestion_part(code = "")]
     pub fields: Vec<Span>,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_labeled_loop_in_break)]
+#[diag(parse_labeled_loop_in_break)]
 pub(crate) struct LabeledLoopInBreak {
     #[primary_span]
     pub span: Span,
@@ -654,7 +640,7 @@ pub(crate) struct LabeledLoopInBreak {
 
 #[derive(Subdiagnostic)]
 #[multipart_suggestion(
-    parser_sugg_wrap_expression_in_parentheses,
+    parse_sugg_wrap_expression_in_parentheses,
     applicability = "machine-applicable"
 )]
 pub(crate) struct WrapExpressionInParentheses {
@@ -665,7 +651,7 @@ pub(crate) struct WrapExpressionInParentheses {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_array_brackets_instead_of_braces)]
+#[diag(parse_array_brackets_instead_of_braces)]
 pub(crate) struct ArrayBracketsInsteadOfSpaces {
     #[primary_span]
     pub span: Span,
@@ -674,7 +660,7 @@ pub(crate) struct ArrayBracketsInsteadOfSpaces {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "maybe-incorrect")]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
 pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
     #[suggestion_part(code = "[")]
     pub left: Span,
@@ -683,21 +669,60 @@ pub(crate) struct ArrayBracketsInsteadOfSpacesSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_match_arm_body_without_braces)]
+#[diag(parse_match_arm_body_without_braces)]
 pub(crate) struct MatchArmBodyWithoutBraces {
     #[primary_span]
-    #[label(label_statements)]
+    #[label(parse_label_statements)]
     pub statements: Span,
-    #[label(label_arrow)]
+    #[label(parse_label_arrow)]
     pub arrow: Span,
     pub num_statements: usize,
     #[subdiagnostic]
     pub sub: MatchArmBodyWithoutBracesSugg,
 }
 
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_extra_equals)]
+#[note]
+pub(crate) struct InclusiveRangeExtraEquals {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_remove_eq,
+        style = "short",
+        code = "..=",
+        applicability = "maybe-incorrect"
+    )]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_match_arrow)]
+pub(crate) struct InclusiveRangeMatchArrow {
+    #[primary_span]
+    pub arrow: Span,
+    #[label]
+    pub span: Span,
+    #[suggestion(style = "verbose", code = " ", applicability = "machine-applicable")]
+    pub after_pat: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_inclusive_range_no_end, code = "E0586")]
+#[note]
+pub(crate) struct InclusiveRangeNoEnd {
+    #[primary_span]
+    #[suggestion(
+        parse_suggestion_open_range,
+        code = "..",
+        applicability = "machine-applicable",
+        style = "short"
+    )]
+    pub span: Span,
+}
+
 #[derive(Subdiagnostic)]
 pub(crate) enum MatchArmBodyWithoutBracesSugg {
-    #[multipart_suggestion(suggestion_add_braces, applicability = "machine-applicable")]
+    #[multipart_suggestion(parse_suggestion_add_braces, applicability = "machine-applicable")]
     AddBraces {
         #[suggestion_part(code = "{{ ")]
         left: Span,
@@ -705,7 +730,7 @@ pub(crate) enum MatchArmBodyWithoutBracesSugg {
         right: Span,
     },
     #[suggestion(
-        suggestion_use_comma_not_semicolon,
+        parse_suggestion_use_comma_not_semicolon,
         code = ",",
         applicability = "machine-applicable"
     )]
@@ -716,7 +741,7 @@ pub(crate) enum MatchArmBodyWithoutBracesSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_struct_literal_not_allowed_here)]
+#[diag(parse_struct_literal_not_allowed_here)]
 pub(crate) struct StructLiteralNotAllowedHere {
     #[primary_span]
     pub span: Span,
@@ -725,7 +750,7 @@ pub(crate) struct StructLiteralNotAllowedHere {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct StructLiteralNotAllowedHereSugg {
     #[suggestion_part(code = "(")]
     pub left: Span,
@@ -734,62 +759,27 @@ pub(crate) struct StructLiteralNotAllowedHereSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_interpolated_expression)]
+#[diag(parse_invalid_interpolated_expression)]
 pub(crate) struct InvalidInterpolatedExpression {
     #[primary_span]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_hexadecimal_float_literal_not_supported)]
-pub(crate) struct HexadecimalFloatLiteralNotSupported {
-    #[primary_span]
-    #[label(parser_not_supported)]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_octal_float_literal_not_supported)]
-pub(crate) struct OctalFloatLiteralNotSupported {
-    #[primary_span]
-    #[label(parser_not_supported)]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_binary_float_literal_not_supported)]
-pub(crate) struct BinaryFloatLiteralNotSupported {
-    #[primary_span]
-    #[label(parser_not_supported)]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_literal_suffix)]
-pub(crate) struct InvalidLiteralSuffix {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-    // FIXME(#100717)
-    pub kind: String,
-    pub suffix: Symbol,
-}
-
-#[derive(Diagnostic)]
-#[diag(parser_invalid_literal_suffix_on_tuple_index)]
+#[diag(parse_invalid_literal_suffix_on_tuple_index)]
 pub(crate) struct InvalidLiteralSuffixOnTupleIndex {
     #[primary_span]
     #[label]
     pub span: Span,
     pub suffix: Symbol,
-    #[help(tuple_exception_line_1)]
-    #[help(tuple_exception_line_2)]
-    #[help(tuple_exception_line_3)]
+    #[help(parse_tuple_exception_line_1)]
+    #[help(parse_tuple_exception_line_2)]
+    #[help(parse_tuple_exception_line_3)]
     pub exception: Option<()>,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_non_string_abi_literal)]
+#[diag(parse_non_string_abi_literal)]
 pub(crate) struct NonStringAbiLiteral {
     #[primary_span]
     #[suggestion(code = "\"C\"", applicability = "maybe-incorrect")]
@@ -797,21 +787,21 @@ pub(crate) struct NonStringAbiLiteral {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_mismatched_closing_delimiter)]
+#[diag(parse_mismatched_closing_delimiter)]
 pub(crate) struct MismatchedClosingDelimiter {
     #[primary_span]
     pub spans: Vec<Span>,
     pub delimiter: String,
-    #[label(label_unmatched)]
+    #[label(parse_label_unmatched)]
     pub unmatched: Span,
-    #[label(label_opening_candidate)]
+    #[label(parse_label_opening_candidate)]
     pub opening_candidate: Option<Span>,
-    #[label(label_unclosed)]
+    #[label(parse_label_unclosed)]
     pub unclosed: Option<Span>,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_incorrect_visibility_restriction, code = "E0704")]
+#[diag(parse_incorrect_visibility_restriction, code = "E0704")]
 #[help]
 pub(crate) struct IncorrectVisibilityRestriction {
     #[primary_span]
@@ -821,21 +811,21 @@ pub(crate) struct IncorrectVisibilityRestriction {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_assignment_else_not_allowed)]
+#[diag(parse_assignment_else_not_allowed)]
 pub(crate) struct AssignmentElseNotAllowed {
     #[primary_span]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_expected_statement_after_outer_attr)]
+#[diag(parse_expected_statement_after_outer_attr)]
 pub(crate) struct ExpectedStatementAfterOuterAttr {
     #[primary_span]
     pub span: Span,
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_doc_comment_does_not_document_anything, code = "E0585")]
+#[diag(parse_doc_comment_does_not_document_anything, code = "E0585")]
 #[help]
 pub(crate) struct DocCommentDoesNotDocumentAnything {
     #[primary_span]
@@ -845,7 +835,7 @@ pub(crate) struct DocCommentDoesNotDocumentAnything {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_const_let_mutually_exclusive)]
+#[diag(parse_const_let_mutually_exclusive)]
 pub(crate) struct ConstLetMutuallyExclusive {
     #[primary_span]
     #[suggestion(code = "const", applicability = "maybe-incorrect")]
@@ -853,7 +843,7 @@ pub(crate) struct ConstLetMutuallyExclusive {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_expression_in_let_else)]
+#[diag(parse_invalid_expression_in_let_else)]
 pub(crate) struct InvalidExpressionInLetElse {
     #[primary_span]
     pub span: Span,
@@ -863,7 +853,7 @@ pub(crate) struct InvalidExpressionInLetElse {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_curly_in_let_else)]
+#[diag(parse_invalid_curly_in_let_else)]
 pub(crate) struct InvalidCurlyInLetElse {
     #[primary_span]
     pub span: Span,
@@ -872,7 +862,7 @@ pub(crate) struct InvalidCurlyInLetElse {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_compound_assignment_expression_in_let)]
+#[diag(parse_compound_assignment_expression_in_let)]
 #[help]
 pub(crate) struct CompoundAssignmentExpressionInLet {
     #[primary_span]
@@ -881,7 +871,7 @@ pub(crate) struct CompoundAssignmentExpressionInLet {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_suffixed_literal_in_attribute)]
+#[diag(parse_suffixed_literal_in_attribute)]
 #[help]
 pub(crate) struct SuffixedLiteralInAttribute {
     #[primary_span]
@@ -889,7 +879,7 @@ pub(crate) struct SuffixedLiteralInAttribute {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_invalid_meta_item)]
+#[diag(parse_invalid_meta_item)]
 pub(crate) struct InvalidMetaItem {
     #[primary_span]
     pub span: Span,
@@ -898,7 +888,7 @@ pub(crate) struct InvalidMetaItem {
 
 #[derive(Subdiagnostic)]
 #[suggestion(
-    parser_sugg_escape_to_use_as_identifier,
+    parse_sugg_escape_to_use_as_identifier,
     style = "verbose",
     applicability = "maybe-incorrect",
     code = "r#"
@@ -910,7 +900,7 @@ pub(crate) struct SuggEscapeToUseAsIdentifier {
 }
 
 #[derive(Subdiagnostic)]
-#[suggestion(parser_sugg_remove_comma, applicability = "machine-applicable", code = "")]
+#[suggestion(parse_sugg_remove_comma, applicability = "machine-applicable", code = "")]
 pub(crate) struct SuggRemoveComma {
     #[primary_span]
     pub span: Span,
@@ -918,15 +908,15 @@ pub(crate) struct SuggRemoveComma {
 
 #[derive(Subdiagnostic)]
 pub(crate) enum ExpectedIdentifierFound {
-    #[label(parser_expected_identifier_found_reserved_identifier)]
+    #[label(parse_expected_identifier_found_reserved_identifier)]
     ReservedIdentifier(#[primary_span] Span),
-    #[label(parser_expected_identifier_found_keyword)]
+    #[label(parse_expected_identifier_found_keyword)]
     Keyword(#[primary_span] Span),
-    #[label(parser_expected_identifier_found_reserved_keyword)]
+    #[label(parse_expected_identifier_found_reserved_keyword)]
     ReservedKeyword(#[primary_span] Span),
-    #[label(parser_expected_identifier_found_doc_comment)]
+    #[label(parse_expected_identifier_found_doc_comment)]
     DocComment(#[primary_span] Span),
-    #[label(parser_expected_identifier)]
+    #[label(parse_expected_identifier)]
     Other(#[primary_span] Span),
 }
 
@@ -949,6 +939,7 @@ pub(crate) struct ExpectedIdentifier {
     pub token: Token,
     pub suggest_raw: Option<SuggEscapeToUseAsIdentifier>,
     pub suggest_remove_comma: Option<SuggRemoveComma>,
+    pub help_cannot_start_number: Option<HelpIdentifierStartsWithNumber>,
 }
 
 impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
@@ -957,20 +948,20 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
         self,
         handler: &'a rustc_errors::Handler,
     ) -> rustc_errors::DiagnosticBuilder<'a, G> {
-        let token_descr = super::parser::TokenDescription::from_token(&self.token);
+        let token_descr = TokenDescription::from_token(&self.token);
 
         let mut diag = handler.struct_diagnostic(match token_descr {
             Some(TokenDescription::ReservedIdentifier) => {
-                fluent::parser_expected_identifier_found_reserved_identifier_str
+                fluent::parse_expected_identifier_found_reserved_identifier_str
             }
-            Some(TokenDescription::Keyword) => fluent::parser_expected_identifier_found_keyword_str,
+            Some(TokenDescription::Keyword) => fluent::parse_expected_identifier_found_keyword_str,
             Some(TokenDescription::ReservedKeyword) => {
-                fluent::parser_expected_identifier_found_reserved_keyword_str
+                fluent::parse_expected_identifier_found_reserved_keyword_str
             }
             Some(TokenDescription::DocComment) => {
-                fluent::parser_expected_identifier_found_doc_comment_str
+                fluent::parse_expected_identifier_found_doc_comment_str
             }
-            None => fluent::parser_expected_identifier_found_str,
+            None => fluent::parse_expected_identifier_found_str,
         });
         diag.set_span(self.span);
         diag.set_arg("token", self.token);
@@ -985,10 +976,18 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedIdentifier {
             sugg.add_to_diagnostic(&mut diag);
         }
 
+        if let Some(help) = self.help_cannot_start_number {
+            help.add_to_diagnostic(&mut diag);
+        }
+
         diag
     }
 }
 
+#[derive(Subdiagnostic)]
+#[help(parse_invalid_identifier_with_leading_number)]
+pub(crate) struct HelpIdentifierStartsWithNumber;
+
 pub(crate) struct ExpectedSemi {
     pub span: Span,
     pub token: Token,
@@ -1003,26 +1002,24 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
         self,
         handler: &'a rustc_errors::Handler,
     ) -> rustc_errors::DiagnosticBuilder<'a, G> {
-        let token_descr = super::parser::TokenDescription::from_token(&self.token);
+        let token_descr = TokenDescription::from_token(&self.token);
 
         let mut diag = handler.struct_diagnostic(match token_descr {
             Some(TokenDescription::ReservedIdentifier) => {
-                fluent::parser_expected_semi_found_reserved_identifier_str
+                fluent::parse_expected_semi_found_reserved_identifier_str
             }
-            Some(TokenDescription::Keyword) => fluent::parser_expected_semi_found_keyword_str,
+            Some(TokenDescription::Keyword) => fluent::parse_expected_semi_found_keyword_str,
             Some(TokenDescription::ReservedKeyword) => {
-                fluent::parser_expected_semi_found_reserved_keyword_str
-            }
-            Some(TokenDescription::DocComment) => {
-                fluent::parser_expected_semi_found_doc_comment_str
+                fluent::parse_expected_semi_found_reserved_keyword_str
             }
-            None => fluent::parser_expected_semi_found_str,
+            Some(TokenDescription::DocComment) => fluent::parse_expected_semi_found_doc_comment_str,
+            None => fluent::parse_expected_semi_found_str,
         });
         diag.set_span(self.span);
         diag.set_arg("token", self.token);
 
         if let Some(unexpected_token_label) = self.unexpected_token_label {
-            diag.span_label(unexpected_token_label, fluent::parser_label_unexpected_token);
+            diag.span_label(unexpected_token_label, fluent::parse_label_unexpected_token);
         }
 
         self.sugg.add_to_diagnostic(&mut diag);
@@ -1033,14 +1030,10 @@ impl<'a, G: EmissionGuarantee> IntoDiagnostic<'a, G> for ExpectedSemi {
 
 #[derive(Subdiagnostic)]
 pub(crate) enum ExpectedSemiSugg {
-    #[suggestion(
-        parser_sugg_change_this_to_semi,
-        code = ";",
-        applicability = "machine-applicable"
-    )]
+    #[suggestion(parse_sugg_change_this_to_semi, code = ";", applicability = "machine-applicable")]
     ChangeToSemi(#[primary_span] Span),
     #[suggestion(
-        parser_sugg_add_semi,
+        parse_sugg_add_semi,
         style = "short",
         code = ";",
         applicability = "machine-applicable"
@@ -1049,7 +1042,7 @@ pub(crate) enum ExpectedSemiSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_struct_literal_body_without_path)]
+#[diag(parse_struct_literal_body_without_path)]
 pub(crate) struct StructLiteralBodyWithoutPath {
     #[primary_span]
     pub span: Span,
@@ -1058,7 +1051,7 @@ pub(crate) struct StructLiteralBodyWithoutPath {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "has-placeholders")]
+#[multipart_suggestion(parse_suggestion, applicability = "has-placeholders")]
 pub(crate) struct StructLiteralBodyWithoutPathSugg {
     #[suggestion_part(code = "{{ SomeStruct ")]
     pub before: Span,
@@ -1067,7 +1060,25 @@ pub(crate) struct StructLiteralBodyWithoutPathSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_unmatched_angle_brackets)]
+#[diag(parse_struct_literal_needing_parens)]
+pub(crate) struct StructLiteralNeedingParens {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: StructLiteralNeedingParensSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct StructLiteralNeedingParensSugg {
+    #[suggestion_part(code = "(")]
+    pub before: Span,
+    #[suggestion_part(code = ")")]
+    pub after: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unmatched_angle_brackets)]
 pub(crate) struct UnmatchedAngleBrackets {
     #[primary_span]
     #[suggestion(code = "", applicability = "machine-applicable")]
@@ -1076,7 +1087,7 @@ pub(crate) struct UnmatchedAngleBrackets {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_generic_parameters_without_angle_brackets)]
+#[diag(parse_generic_parameters_without_angle_brackets)]
 pub(crate) struct GenericParamsWithoutAngleBrackets {
     #[primary_span]
     pub span: Span,
@@ -1085,7 +1096,7 @@ pub(crate) struct GenericParamsWithoutAngleBrackets {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
     #[suggestion_part(code = "<")]
     pub left: Span,
@@ -1094,19 +1105,19 @@ pub(crate) struct GenericParamsWithoutAngleBracketsSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_comparison_operators_cannot_be_chained)]
+#[diag(parse_comparison_operators_cannot_be_chained)]
 pub(crate) struct ComparisonOperatorsCannotBeChained {
     #[primary_span]
     pub span: Vec<Span>,
     #[suggestion(
-        parser_sugg_turbofish_syntax,
+        parse_sugg_turbofish_syntax,
         style = "verbose",
         code = "::",
         applicability = "maybe-incorrect"
     )]
     pub suggest_turbofish: Option<Span>,
-    #[help(parser_sugg_turbofish_syntax)]
-    #[help(sugg_parentheses_for_function_args)]
+    #[help(parse_sugg_turbofish_syntax)]
+    #[help(parse_sugg_parentheses_for_function_args)]
     pub help_turbofish: Option<()>,
     #[subdiagnostic]
     pub chaining_sugg: Option<ComparisonOperatorsCannotBeChainedSugg>,
@@ -1115,7 +1126,7 @@ pub(crate) struct ComparisonOperatorsCannotBeChained {
 #[derive(Subdiagnostic)]
 pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
     #[suggestion(
-        sugg_split_comparison,
+        parse_sugg_split_comparison,
         style = "verbose",
         code = " && {middle_term}",
         applicability = "maybe-incorrect"
@@ -1125,7 +1136,7 @@ pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
         span: Span,
         middle_term: String,
     },
-    #[multipart_suggestion(sugg_parenthesize, applicability = "maybe-incorrect")]
+    #[multipart_suggestion(parse_sugg_parenthesize, applicability = "maybe-incorrect")]
     Parenthesize {
         #[suggestion_part(code = "(")]
         left: Span,
@@ -1135,7 +1146,7 @@ pub(crate) enum ComparisonOperatorsCannotBeChainedSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_question_mark_in_type)]
+#[diag(parse_question_mark_in_type)]
 pub(crate) struct QuestionMarkInType {
     #[primary_span]
     #[label]
@@ -1145,7 +1156,7 @@ pub(crate) struct QuestionMarkInType {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct QuestionMarkInTypeSugg {
     #[suggestion_part(code = "Option<")]
     pub left: Span,
@@ -1154,7 +1165,7 @@ pub(crate) struct QuestionMarkInTypeSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_unexpected_parentheses_in_for_head)]
+#[diag(parse_unexpected_parentheses_in_for_head)]
 pub(crate) struct ParenthesesInForHead {
     #[primary_span]
     pub span: Vec<Span>,
@@ -1163,7 +1174,7 @@ pub(crate) struct ParenthesesInForHead {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct ParenthesesInForHeadSugg {
     #[suggestion_part(code = "{left_snippet}")]
     pub left: Span,
@@ -1174,7 +1185,7 @@ pub(crate) struct ParenthesesInForHeadSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_doc_comment_on_param_type)]
+#[diag(parse_doc_comment_on_param_type)]
 pub(crate) struct DocCommentOnParamType {
     #[primary_span]
     #[label]
@@ -1182,7 +1193,7 @@ pub(crate) struct DocCommentOnParamType {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_attribute_on_param_type)]
+#[diag(parse_attribute_on_param_type)]
 pub(crate) struct AttributeOnParamType {
     #[primary_span]
     #[label]
@@ -1190,7 +1201,7 @@ pub(crate) struct AttributeOnParamType {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_pattern_method_param_without_body, code = "E0642")]
+#[diag(parse_pattern_method_param_without_body, code = "E0642")]
 pub(crate) struct PatternMethodParamWithoutBody {
     #[primary_span]
     #[suggestion(code = "_", applicability = "machine-applicable")]
@@ -1198,7 +1209,7 @@ pub(crate) struct PatternMethodParamWithoutBody {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_self_param_not_first)]
+#[diag(parse_self_param_not_first)]
 pub(crate) struct SelfParamNotFirst {
     #[primary_span]
     #[label]
@@ -1206,7 +1217,7 @@ pub(crate) struct SelfParamNotFirst {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_const_generic_without_braces)]
+#[diag(parse_const_generic_without_braces)]
 pub(crate) struct ConstGenericWithoutBraces {
     #[primary_span]
     pub span: Span,
@@ -1215,7 +1226,7 @@ pub(crate) struct ConstGenericWithoutBraces {
 }
 
 #[derive(Subdiagnostic)]
-#[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
 pub(crate) struct ConstGenericWithoutBracesSugg {
     #[suggestion_part(code = "{{ ")]
     pub left: Span,
@@ -1224,7 +1235,7 @@ pub(crate) struct ConstGenericWithoutBracesSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_unexpected_const_param_declaration)]
+#[diag(parse_unexpected_const_param_declaration)]
 pub(crate) struct UnexpectedConstParamDeclaration {
     #[primary_span]
     #[label]
@@ -1235,7 +1246,7 @@ pub(crate) struct UnexpectedConstParamDeclaration {
 
 #[derive(Subdiagnostic)]
 pub(crate) enum UnexpectedConstParamDeclarationSugg {
-    #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
     AddParam {
         #[suggestion_part(code = "<{snippet}>")]
         impl_generics: Span,
@@ -1244,7 +1255,7 @@ pub(crate) enum UnexpectedConstParamDeclarationSugg {
         snippet: String,
         ident: String,
     },
-    #[multipart_suggestion(suggestion, applicability = "machine-applicable")]
+    #[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
     AppendParam {
         #[suggestion_part(code = ", {snippet}")]
         impl_generics_end: Span,
@@ -1256,7 +1267,7 @@ pub(crate) enum UnexpectedConstParamDeclarationSugg {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_unexpected_const_in_generic_param)]
+#[diag(parse_unexpected_const_in_generic_param)]
 pub(crate) struct UnexpectedConstInGenericParam {
     #[primary_span]
     pub span: Span,
@@ -1265,7 +1276,7 @@ pub(crate) struct UnexpectedConstInGenericParam {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_async_move_order_incorrect)]
+#[diag(parse_async_move_order_incorrect)]
 pub(crate) struct AsyncMoveOrderIncorrect {
     #[primary_span]
     #[suggestion(style = "verbose", code = "async move", applicability = "maybe-incorrect")]
@@ -1273,10 +1284,1019 @@ pub(crate) struct AsyncMoveOrderIncorrect {
 }
 
 #[derive(Diagnostic)]
-#[diag(parser_double_colon_in_bound)]
+#[diag(parse_double_colon_in_bound)]
 pub(crate) struct DoubleColonInBound {
     #[primary_span]
     pub span: Span,
     #[suggestion(code = ": ", applicability = "machine-applicable")]
     pub between: Span,
 }
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_ptr_with_generics)]
+pub(crate) struct FnPtrWithGenerics {
+    #[primary_span]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: Option<FnPtrWithGenericsSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "maybe-incorrect")]
+pub(crate) struct FnPtrWithGenericsSugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+    pub arity: usize,
+    pub for_param_list_exists: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_if_with_if)]
+pub(crate) struct UnexpectedIfWithIf(
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = " ", style = "verbose")]
+    pub Span,
+);
+
+#[derive(Diagnostic)]
+#[diag(parse_maybe_fn_typo_with_impl)]
+pub(crate) struct FnTypoWithImpl {
+    #[primary_span]
+    #[suggestion(applicability = "maybe-incorrect", code = "impl", style = "verbose")]
+    pub fn_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_fn_path_found_fn_keyword)]
+pub(crate) struct ExpectedFnPathFoundFnKeyword {
+    #[primary_span]
+    #[suggestion(applicability = "machine-applicable", code = "Fn", style = "verbose")]
+    pub fn_token_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_where_clause_before_tuple_struct_body)]
+pub(crate) struct WhereClauseBeforeTupleStructBody {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[label(parse_name_label)]
+    pub name: Span,
+    #[label(parse_body_label)]
+    pub body: Span,
+    #[subdiagnostic]
+    pub sugg: Option<WhereClauseBeforeTupleStructBodySugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct WhereClauseBeforeTupleStructBodySugg {
+    #[suggestion_part(code = "{snippet}")]
+    pub left: Span,
+    pub snippet: String,
+    #[suggestion_part(code = "")]
+    pub right: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_async_fn_in_2015, code = "E0670")]
+pub(crate) struct AsyncFnIn2015 {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub help: HelpUseLatestEdition,
+}
+
+#[derive(Subdiagnostic)]
+#[label(parse_async_block_in_2015)]
+pub(crate) struct AsyncBlockIn2015 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_self_argument_pointer)]
+pub(crate) struct SelfArgumentPointer {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_token_after_dot)]
+pub struct UnexpectedTokenAfterDot<'a> {
+    #[primary_span]
+    pub span: Span,
+    pub actual: Cow<'a, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_visibility_not_followed_by_item)]
+#[help]
+pub(crate) struct VisibilityNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub vis: Visibility,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_default_not_followed_by_item)]
+#[note]
+pub(crate) struct DefaultNotFollowedByItem {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum MissingKeywordForItemDefinition {
+    #[diag(parse_missing_struct_for_struct_definition)]
+    Struct {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " struct ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_function_definition)]
+    Function {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_missing_fn_for_method_definition)]
+    Method {
+        #[primary_span]
+        #[suggestion(style = "short", applicability = "maybe-incorrect", code = " fn ")]
+        span: Span,
+        ident: Ident,
+    },
+    #[diag(parse_ambiguous_missing_keyword_for_item_definition)]
+    Ambiguous {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        subdiag: Option<AmbiguousMissingKwForItemSub>,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum AmbiguousMissingKwForItemSub {
+    #[suggestion(parse_suggestion, applicability = "maybe-incorrect", code = "{snippet}!")]
+    SuggestMacro {
+        #[primary_span]
+        span: Span,
+        snippet: String,
+    },
+    #[help(parse_help)]
+    HelpMacro,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_trait_in_trait_impl)]
+pub(crate) struct MissingTraitInTraitImpl {
+    #[primary_span]
+    #[suggestion(parse_suggestion_add_trait, code = " Trait ", applicability = "has-placeholders")]
+    pub span: Span,
+    #[suggestion(parse_suggestion_remove_for, code = "", applicability = "maybe-incorrect")]
+    pub for_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_for_in_trait_impl)]
+pub(crate) struct MissingForInTraitImpl {
+    #[primary_span]
+    #[suggestion(style = "short", code = " for ", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_trait_in_trait_impl_found_type)]
+pub(crate) struct ExpectedTraitInTraitImplFoundType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_bounds_not_allowed_on_trait_aliases)]
+pub(crate) struct BoundsNotAllowedOnTraitAliases {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_auto)]
+pub(crate) struct TraitAliasCannotBeAuto {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_auto)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trait_alias_cannot_be_unsafe)]
+pub(crate) struct TraitAliasCannotBeUnsafe {
+    #[primary_span]
+    #[label(parse_trait_alias_cannot_be_unsafe)]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_associated_static_item_not_allowed)]
+pub(crate) struct AssociatedStaticItemNotAllowed {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_crate_name_with_dashes)]
+pub(crate) struct ExternCrateNameWithDashes {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    #[subdiagnostic]
+    pub sugg: ExternCrateNameWithDashesSugg,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
+pub(crate) struct ExternCrateNameWithDashesSugg {
+    #[suggestion_part(code = "_")]
+    pub dashes: Vec<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_extern_item_cannot_be_const)]
+#[note]
+pub(crate) struct ExternItemCannotBeConst {
+    #[primary_span]
+    pub ident_span: Span,
+    #[suggestion(code = "static ", applicability = "machine-applicable")]
+    pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_const_global_cannot_be_mutable)]
+pub(crate) struct ConstGlobalCannotBeMutable {
+    #[primary_span]
+    #[label]
+    pub ident_span: Span,
+    #[suggestion(code = "static", applicability = "maybe-incorrect")]
+    pub const_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_missing_const_type)]
+pub(crate) struct MissingConstType {
+    #[primary_span]
+    #[suggestion(code = "{colon} <type>", applicability = "has-placeholders")]
+    pub span: Span,
+
+    pub kind: &'static str,
+    pub colon: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_struct_mutually_exclusive)]
+pub(crate) struct EnumStructMutuallyExclusive {
+    #[primary_span]
+    #[suggestion(code = "enum", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedTokenAfterStructName {
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_identifier)]
+    ReservedIdentifier {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_keyword)]
+    Keyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_reserved_keyword)]
+    ReservedKeyword {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_doc_comment)]
+    DocComment {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_unexpected_token_after_struct_name_found_other)]
+    Other {
+        #[primary_span]
+        #[label(parse_unexpected_token_after_struct_name)]
+        span: Span,
+        token: Token,
+    },
+}
+
+impl UnexpectedTokenAfterStructName {
+    pub fn new(span: Span, token: Token) -> Self {
+        match TokenDescription::from_token(&token) {
+            Some(TokenDescription::ReservedIdentifier) => Self::ReservedIdentifier { span, token },
+            Some(TokenDescription::Keyword) => Self::Keyword { span, token },
+            Some(TokenDescription::ReservedKeyword) => Self::ReservedKeyword { span, token },
+            Some(TokenDescription::DocComment) => Self::DocComment { span, token },
+            None => Self::Other { span, token },
+        }
+    }
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_self_in_generic_parameters)]
+#[note]
+pub(crate) struct UnexpectedSelfInGenericParameters {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_default_value_for_lifetime_in_generic_parameters)]
+pub(crate) struct UnexpectedDefaultValueForLifetimeInGenericParameters {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_multiple_where_clauses)]
+pub(crate) struct MultipleWhereClauses {
+    #[primary_span]
+    pub span: Span,
+    #[label]
+    pub previous: Span,
+    #[suggestion(style = "verbose", code = ",", applicability = "maybe-incorrect")]
+    pub between: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum UnexpectedNonterminal {
+    #[diag(parse_nonterminal_expected_item_keyword)]
+    Item(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_statement)]
+    Statement(#[primary_span] Span),
+    #[diag(parse_nonterminal_expected_ident)]
+    Ident {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+    #[diag(parse_nonterminal_expected_lifetime)]
+    Lifetime {
+        #[primary_span]
+        span: Span,
+        token: Token,
+    },
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowed {
+    #[diag(parse_or_pattern_not_allowed_in_let_binding)]
+    LetBinding {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+    #[diag(parse_or_pattern_not_allowed_in_fn_parameters)]
+    FunctionParameter {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        sub: Option<TopLevelOrPatternNotAllowedSugg>,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cannot_be_raw_ident)]
+pub struct CannotBeRawIdent {
+    #[primary_span]
+    pub span: Span,
+    pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_cr_doc_comment)]
+pub struct CrDocComment {
+    #[primary_span]
+    pub span: Span,
+    pub block: bool,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_no_digits_literal, code = "E0768")]
+pub struct NoDigitsLiteral {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_digit_literal)]
+pub struct InvalidDigitLiteral {
+    #[primary_span]
+    pub span: Span,
+    pub base: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_empty_exponent_float)]
+pub struct EmptyExponentFloat {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_float_literal_unsupported_base)]
+pub struct FloatLiteralUnsupportedBase {
+    #[primary_span]
+    pub span: Span,
+    pub base: &'static str,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_prefix)]
+#[note]
+pub struct UnknownPrefix<'a> {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+    pub prefix: &'a str,
+    #[subdiagnostic]
+    pub sugg: Option<UnknownPrefixSugg>,
+}
+
+#[derive(Subdiagnostic)]
+pub enum UnknownPrefixSugg {
+    #[suggestion(
+        parse_suggestion_br,
+        code = "br",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    UseBr(#[primary_span] Span),
+    #[suggestion(
+        parse_suggestion_whitespace,
+        code = " ",
+        applicability = "maybe-incorrect",
+        style = "verbose"
+    )]
+    Whitespace(#[primary_span] Span),
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_too_many_hashes)]
+pub struct TooManyHashes {
+    #[primary_span]
+    pub span: Span,
+    pub num: u32,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unknown_start_of_token)]
+pub struct UnknownTokenStart {
+    #[primary_span]
+    pub span: Span,
+    pub escaped: String,
+    #[subdiagnostic]
+    pub sugg: Option<TokenSubstitution>,
+    #[subdiagnostic]
+    pub null: Option<UnknownTokenNull>,
+    #[subdiagnostic]
+    pub repeat: Option<UnknownTokenRepeat>,
+}
+
+#[derive(Subdiagnostic)]
+pub enum TokenSubstitution {
+    #[suggestion(parse_sugg_quotes, code = "{suggestion}", applicability = "maybe-incorrect")]
+    DirectedQuotes {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+    #[suggestion(parse_sugg_other, code = "{suggestion}", applicability = "maybe-incorrect")]
+    Other {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+        ch: String,
+        u_name: &'static str,
+        ascii_str: &'static str,
+        ascii_name: &'static str,
+    },
+}
+
+#[derive(Subdiagnostic)]
+#[note(parse_note_repeats)]
+pub struct UnknownTokenRepeat {
+    pub repeats: usize,
+}
+
+#[derive(Subdiagnostic)]
+#[help(parse_help_null)]
+pub struct UnknownTokenNull;
+
+#[derive(Diagnostic)]
+pub enum UnescapeError {
+    #[diag(parse_invalid_unicode_escape)]
+    #[help]
+    InvalidUnicodeEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        surrogate: bool,
+    },
+    #[diag(parse_escape_only_char)]
+    EscapeOnlyChar {
+        #[primary_span]
+        span: Span,
+        #[suggestion(parse_escape, applicability = "machine-applicable", code = "{escaped_sugg}")]
+        char_span: Span,
+        escaped_sugg: String,
+        escaped_msg: String,
+        byte: bool,
+    },
+    #[diag(parse_bare_cr)]
+    BareCr {
+        #[primary_span]
+        #[suggestion(parse_escape, applicability = "machine-applicable", code = "\\r")]
+        span: Span,
+        double_quotes: bool,
+    },
+    #[diag(parse_bare_cr_in_raw_string)]
+    BareCrRawString(#[primary_span] Span),
+    #[diag(parse_too_short_hex_escape)]
+    TooShortHexEscape(#[primary_span] Span),
+    #[diag(parse_invalid_char_in_escape)]
+    InvalidCharInEscape {
+        #[primary_span]
+        #[label]
+        span: Span,
+        is_hex: bool,
+        ch: String,
+    },
+    #[diag(parse_out_of_range_hex_escape)]
+    OutOfRangeHexEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_leading_underscore_unicode_escape)]
+    LeadingUnderscoreUnicodeEscape {
+        #[primary_span]
+        #[label(parse_leading_underscore_unicode_escape_label)]
+        span: Span,
+        ch: String,
+    },
+    #[diag(parse_overlong_unicode_escape)]
+    OverlongUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unclosed_unicode_escape)]
+    UnclosedUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+        #[suggestion(
+            parse_terminate,
+            code = "}}",
+            applicability = "maybe-incorrect",
+            style = "verbose"
+        )]
+        Span,
+    ),
+    #[diag(parse_no_brace_unicode_escape)]
+    NoBraceInUnicodeEscape {
+        #[primary_span]
+        span: Span,
+        #[label]
+        label: Option<Span>,
+        #[subdiagnostic]
+        sub: NoBraceUnicodeSub,
+    },
+    #[diag(parse_unicode_escape_in_byte)]
+    #[help]
+    UnicodeEscapeInByte(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_empty_unicode_escape)]
+    EmptyUnicodeEscape(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_zero_chars)]
+    ZeroChars(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_lone_slash)]
+    LoneSlash(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_unskipped_whitespace)]
+    UnskippedWhitespace {
+        #[primary_span]
+        span: Span,
+        #[label]
+        char_span: Span,
+        ch: String,
+    },
+    #[diag(parse_multiple_skipped_lines)]
+    MultipleSkippedLinesWarning(
+        #[primary_span]
+        #[label]
+        Span,
+    ),
+    #[diag(parse_more_than_one_char)]
+    MoreThanOneChar {
+        #[primary_span]
+        span: Span,
+        #[subdiagnostic]
+        note: Option<MoreThanOneCharNote>,
+        #[subdiagnostic]
+        suggestion: MoreThanOneCharSugg,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharSugg {
+    #[suggestion(
+        parse_consider_normalized,
+        code = "{normalized}",
+        applicability = "machine-applicable"
+    )]
+    NormalizedForm {
+        #[primary_span]
+        span: Span,
+        ch: String,
+        normalized: String,
+    },
+    #[suggestion(parse_remove_non, code = "{ch}", applicability = "maybe-incorrect")]
+    RemoveNonPrinting {
+        #[primary_span]
+        span: Span,
+        ch: String,
+    },
+    #[suggestion(parse_use_double_quotes, code = "{sugg}", applicability = "machine-applicable")]
+    Quotes {
+        #[primary_span]
+        span: Span,
+        is_byte: bool,
+        sugg: String,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum MoreThanOneCharNote {
+    #[note(parse_followed_by)]
+    AllCombining {
+        #[primary_span]
+        span: Span,
+        chr: String,
+        len: usize,
+        escaped_marks: String,
+    },
+    #[note(parse_non_printing)]
+    NonPrinting {
+        #[primary_span]
+        span: Span,
+        escaped: String,
+    },
+}
+
+#[derive(Subdiagnostic)]
+pub enum NoBraceUnicodeSub {
+    #[suggestion(parse_use_braces, code = "{suggestion}", applicability = "maybe-incorrect")]
+    Suggestion {
+        #[primary_span]
+        span: Span,
+        suggestion: String,
+    },
+    #[help(parse_format_of_unicode)]
+    Help,
+}
+
+#[derive(Subdiagnostic)]
+pub(crate) enum TopLevelOrPatternNotAllowedSugg {
+    #[suggestion(
+        parse_sugg_remove_leading_vert_in_pattern,
+        code = "{pat}",
+        applicability = "machine-applicable"
+    )]
+    RemoveLeadingVert {
+        #[primary_span]
+        span: Span,
+        pat: String,
+    },
+    #[suggestion(
+        parse_sugg_wrap_pattern_in_parens,
+        code = "({pat})",
+        applicability = "machine-applicable"
+    )]
+    WrapInParens {
+        #[primary_span]
+        span: Span,
+        pat: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_before_function_parameter)]
+#[note(parse_note_pattern_alternatives_use_single_vert)]
+pub(crate) struct UnexpectedVertVertBeforeFunctionParam {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_vert_vert_in_pattern)]
+pub(crate) struct UnexpectedVertVertInPattern {
+    #[primary_span]
+    #[suggestion(code = "|", applicability = "machine-applicable")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_trailing_vert_not_allowed)]
+pub(crate) struct TrailingVertNotAllowed {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    #[label(parse_label_while_parsing_or_pattern_here)]
+    pub start: Option<Span>,
+    pub token: Token,
+    #[note(parse_note_pattern_alternatives_use_single_vert)]
+    pub note_double_vert: Option<()>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dotdotdot_rest_pattern)]
+pub(crate) struct DotDotDotRestPattern {
+    #[primary_span]
+    #[suggestion(style = "short", code = "..", applicability = "machine-applicable")]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_pattern_on_wrong_side_of_at)]
+pub(crate) struct PatternOnWrongSideOfAt {
+    #[primary_span]
+    #[suggestion(code = "{whole_pat}", applicability = "machine-applicable")]
+    pub whole_span: Span,
+    pub whole_pat: String,
+    #[label(parse_label_pattern)]
+    pub pattern: Span,
+    #[label(parse_label_binding)]
+    pub binding: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_binding_left_of_at)]
+#[note]
+pub(crate) struct ExpectedBindingLeftOfAt {
+    #[primary_span]
+    pub whole_span: Span,
+    #[label(parse_label_lhs)]
+    pub lhs: Span,
+    #[label(parse_label_rhs)]
+    pub rhs: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ambiguous_range_pattern)]
+pub(crate) struct AmbiguousRangePattern {
+    #[primary_span]
+    #[suggestion(code = "({pat})", applicability = "maybe-incorrect")]
+    pub span: Span,
+    pub pat: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_unexpected_lifetime_in_pattern)]
+pub(crate) struct UnexpectedLifetimeInPattern {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+    pub symbol: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_ref_mut_order_incorrect)]
+pub(crate) struct RefMutOrderIncorrect {
+    #[primary_span]
+    #[suggestion(code = "ref mut", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+pub(crate) enum InvalidMutInPattern {
+    #[diag(parse_mut_on_nested_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NestedIdent {
+        #[primary_span]
+        #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+        span: Span,
+        pat: String,
+    },
+    #[diag(parse_mut_on_non_ident_pattern)]
+    #[note(parse_note_mut_pattern_usage)]
+    NonIdent {
+        #[primary_span]
+        #[suggestion(code = "{pat}", applicability = "machine-applicable")]
+        span: Span,
+        pat: String,
+    },
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_repeated_mut_in_pattern)]
+pub(crate) struct RepeatedMutInPattern {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_range_to_pattern_not_allowed)]
+pub(crate) struct DotDotDotRangeToPatternNotAllowed {
+    #[primary_span]
+    #[suggestion(style = "short", code = "..=", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_enum_pattern_instead_of_identifier)]
+pub(crate) struct EnumPatternInsteadOfIdentifier {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dot_dot_dot_for_remaining_fields)]
+pub(crate) struct DotDotDotForRemainingFields {
+    #[primary_span]
+    #[suggestion(code = "..", style = "verbose", applicability = "machine-applicable")]
+    pub span: Span,
+    pub token_str: Cow<'static, str>,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_comma_after_pattern_field)]
+pub(crate) struct ExpectedCommaAfterPatternField {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_return_types_use_thin_arrow)]
+pub(crate) struct ReturnTypesUseThinArrow {
+    #[primary_span]
+    #[suggestion(style = "short", code = "->", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_need_plus_after_trait_object_lifetime)]
+pub(crate) struct NeedPlusAfterTraitObjectLifetime {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_expected_mut_or_const_in_raw_pointer_type)]
+pub(crate) struct ExpectedMutOrConstInRawPointerType {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code("mut ", "const "), applicability = "has-placeholders")]
+    pub after_asterisk: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_lifetime_after_mut)]
+pub(crate) struct LifetimeAfterMut {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "&{snippet} mut", applicability = "maybe-incorrect")]
+    pub suggest_lifetime: Option<Span>,
+    pub snippet: String,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_dyn_after_mut)]
+pub(crate) struct DynAfterMut {
+    #[primary_span]
+    #[suggestion(code = "&mut dyn", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_const)]
+pub(crate) struct FnPointerCannotBeConst {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    #[label]
+    pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_fn_pointer_cannot_be_async)]
+pub(crate) struct FnPointerCannotBeAsync {
+    #[primary_span]
+    pub span: Span,
+    #[suggestion(code = "", applicability = "maybe-incorrect")]
+    #[label]
+    pub qualifier: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_nested_c_variadic_type, code = "E0743")]
+pub(crate) struct NestedCVariadicType {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_invalid_dyn_keyword)]
+#[help]
+pub(crate) struct InvalidDynKeyword {
+    #[primary_span]
+    #[suggestion(code = "", applicability = "machine-applicable")]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(parse_negative_bounds_not_supported)]
+pub(crate) struct NegativeBoundsNotSupported {
+    #[primary_span]
+    pub negative_bounds: Vec<Span>,
+    #[label]
+    pub last_span: Span,
+    #[subdiagnostic]
+    pub sub: Option<NegativeBoundsNotSupportedSugg>,
+}
+
+#[derive(Subdiagnostic)]
+#[suggestion(
+    parse_suggestion,
+    style = "tool-only",
+    code = "{fixed}",
+    applicability = "machine-applicable"
+)]
+pub(crate) struct NegativeBoundsNotSupportedSugg {
+    #[primary_span]
+    pub bound_list: Span,
+    pub num_bounds: usize,
+    pub fixed: String,
+}
+
+#[derive(Subdiagnostic)]
+pub enum HelpUseLatestEdition {
+    #[help(parse_help_set_edition_cargo)]
+    #[note(parse_note_edition_guide)]
+    Cargo { edition: Edition },
+    #[help(parse_help_set_edition_standalone)]
+    #[note(parse_note_edition_guide)]
+    Standalone { edition: Edition },
+}
+
+impl HelpUseLatestEdition {
+    pub fn new() -> Self {
+        let edition = LATEST_STABLE_EDITION;
+        if std::env::var_os("CARGO").is_some() {
+            Self::Cargo { edition }
+        } else {
+            Self::Standalone { edition }
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/diagnostics.rs b/compiler/rustc_parse/src/lexer/diagnostics.rs
new file mode 100644
index 00000000000..c4b9fdc81c5
--- /dev/null
+++ b/compiler/rustc_parse/src/lexer/diagnostics.rs
@@ -0,0 +1,119 @@
+use super::UnmatchedDelim;
+use rustc_ast::token::Delimiter;
+use rustc_errors::Diagnostic;
+use rustc_span::source_map::SourceMap;
+use rustc_span::Span;
+
+#[derive(Default)]
+pub struct TokenTreeDiagInfo {
+    /// Stack of open delimiters and their spans. Used for error message.
+    pub open_braces: Vec<(Delimiter, Span)>,
+    pub unmatched_delims: Vec<UnmatchedDelim>,
+
+    /// Used only for error recovery when arriving to EOF with mismatched braces.
+    pub last_unclosed_found_span: Option<Span>,
+
+    /// Collect empty block spans that might have been auto-inserted by editors.
+    pub empty_block_spans: Vec<Span>,
+
+    /// Collect the spans of braces (Open, Close). Used only
+    /// for detecting if blocks are empty and only braces.
+    pub matching_block_spans: Vec<(Span, Span)>,
+}
+
+pub fn same_identation_level(sm: &SourceMap, open_sp: Span, close_sp: Span) -> bool {
+    match (sm.span_to_margin(open_sp), sm.span_to_margin(close_sp)) {
+        (Some(open_padding), Some(close_padding)) => open_padding == close_padding,
+        _ => false,
+    }
+}
+
+// When we get a `)` or `]` for `{`, we should emit help message here
+// it's more friendly compared to report `unmatched error` in later phase
+pub fn report_missing_open_delim(
+    err: &mut Diagnostic,
+    unmatched_delims: &[UnmatchedDelim],
+) -> bool {
+    let mut reported_missing_open = false;
+    for unmatch_brace in unmatched_delims.iter() {
+        if let Some(delim) = unmatch_brace.found_delim
+            && matches!(delim, Delimiter::Parenthesis | Delimiter::Bracket)
+        {
+            let missed_open = match delim {
+                Delimiter::Parenthesis => "(",
+                Delimiter::Bracket => "[",
+                _ => unreachable!(),
+            };
+            err.span_label(
+                unmatch_brace.found_span.shrink_to_lo(),
+                format!("missing open `{}` for this delimiter", missed_open),
+            );
+            reported_missing_open = true;
+        }
+    }
+    reported_missing_open
+}
+
+pub fn report_suspicious_mismatch_block(
+    err: &mut Diagnostic,
+    diag_info: &TokenTreeDiagInfo,
+    sm: &SourceMap,
+    delim: Delimiter,
+) {
+    if report_missing_open_delim(err, &diag_info.unmatched_delims) {
+        return;
+    }
+
+    let mut matched_spans: Vec<(Span, bool)> = diag_info
+        .matching_block_spans
+        .iter()
+        .map(|&(open, close)| (open.with_hi(close.lo()), same_identation_level(sm, open, close)))
+        .collect();
+
+    // sort by `lo`, so the large block spans in the front
+    matched_spans.sort_by_key(|(span, _)| span.lo());
+
+    // We use larger block whose identation is well to cover those inner mismatched blocks
+    // O(N^2) here, but we are on error reporting path, so it is fine
+    for i in 0..matched_spans.len() {
+        let (block_span, same_ident) = matched_spans[i];
+        if same_ident {
+            for j in i + 1..matched_spans.len() {
+                let (inner_block, inner_same_ident) = matched_spans[j];
+                if block_span.contains(inner_block) && !inner_same_ident {
+                    matched_spans[j] = (inner_block, true);
+                }
+            }
+        }
+    }
+
+    // Find the inner-most span candidate for final report
+    let candidate_span =
+        matched_spans.into_iter().rev().find(|&(_, same_ident)| !same_ident).map(|(span, _)| span);
+
+    if let Some(block_span) = candidate_span {
+        err.span_label(block_span.shrink_to_lo(), "this delimiter might not be properly closed...");
+        err.span_label(
+            block_span.shrink_to_hi(),
+            "...as it matches this but it has different indentation",
+        );
+
+        // If there is a empty block in the mismatched span, note it
+        if delim == Delimiter::Brace {
+            for span in diag_info.empty_block_spans.iter() {
+                if block_span.contains(*span) {
+                    err.span_label(*span, "block is empty, you might have not meant to close it");
+                    break;
+                }
+            }
+        }
+    } else {
+        // If there is no suspicious span, give the last properly closed block may help
+        if let Some(parent) = diag_info.matching_block_spans.last()
+            && diag_info.open_braces.last().is_none()
+            && diag_info.empty_block_spans.iter().all(|&sp| sp != parent.0.to(parent.1)) {
+                err.span_label(parent.0, "this opening brace...");
+                err.span_label(parent.1, "...matches this closing brace");
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 645262bd2f1..9dbddee5a56 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -1,11 +1,11 @@
+use crate::errors;
 use crate::lexer::unicode_chars::UNICODE_ARRAY;
+use crate::make_unclosed_delims_error;
 use rustc_ast::ast::{self, AttrStyle};
 use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::util::unicode::contains_text_flow_control_chars;
-use rustc_errors::{
-    error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult, StashKey,
-};
+use rustc_errors::{error_code, Applicability, Diagnostic, DiagnosticBuilder, StashKey};
 use rustc_lexer::unescape::{self, Mode};
 use rustc_lexer::Cursor;
 use rustc_lexer::{Base, DocStyle, RawStrError};
@@ -17,6 +17,7 @@ use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{sym, Symbol};
 use rustc_span::{edition::Edition, BytePos, Pos, Span};
 
+mod diagnostics;
 mod tokentrees;
 mod unescape_error_reporting;
 mod unicode_chars;
@@ -31,7 +32,7 @@ use unescape_error_reporting::{emit_unescape_error, escaped_char};
 rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
 
 #[derive(Clone, Debug)]
-pub struct UnmatchedBrace {
+pub struct UnmatchedDelim {
     pub expected_delim: Delimiter,
     pub found_delim: Option<Delimiter>,
     pub found_span: Span,
@@ -44,7 +45,7 @@ pub(crate) fn parse_token_trees<'a>(
     mut src: &'a str,
     mut start_pos: BytePos,
     override_span: Option<Span>,
-) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+) -> Result<TokenStream, Vec<Diagnostic>> {
     // Skip `#!`, if present.
     if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
         src = &src[shebang_len..];
@@ -52,9 +53,38 @@ pub(crate) fn parse_token_trees<'a>(
     }
 
     let cursor = Cursor::new(src);
-    let string_reader =
-        StringReader { sess, start_pos, pos: start_pos, src, cursor, override_span };
-    tokentrees::TokenTreesReader::parse_all_token_trees(string_reader)
+    let string_reader = StringReader {
+        sess,
+        start_pos,
+        pos: start_pos,
+        src,
+        cursor,
+        override_span,
+        nbsp_is_whitespace: false,
+    };
+    let (token_trees, unmatched_delims) =
+        tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
+    match token_trees {
+        Ok(stream) if unmatched_delims.is_empty() => Ok(stream),
+        _ => {
+            // Return error if there are unmatched delimiters or unclosng delimiters.
+            // We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
+            // because the delimiter mismatch is more likely to be the root cause of error
+
+            let mut buffer = Vec::with_capacity(1);
+            // Not using `emit_unclosed_delims` to use `db.buffer`
+            for unmatched in unmatched_delims {
+                if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
+                    err.buffer(&mut buffer);
+                }
+            }
+            if let Err(err) = token_trees {
+                // Add unclosing delimiter error
+                err.buffer(&mut buffer);
+            }
+            Err(buffer)
+        }
+    }
 }
 
 struct StringReader<'a> {
@@ -68,6 +98,10 @@ struct StringReader<'a> {
     /// Cursor for getting lexer tokens.
     cursor: Cursor<'a>,
     override_span: Option<Span>,
+    /// When a "unknown start of token: \u{a0}" has already been emitted earlier
+    /// in this file, it's safe to treat further occurrences of the non-breaking
+    /// space character as whitespace.
+    nbsp_is_whitespace: bool,
 }
 
 impl<'a> StringReader<'a> {
@@ -79,7 +113,7 @@ impl<'a> StringReader<'a> {
     /// preceded by whitespace.
     fn next_token(&mut self) -> (Token, bool) {
         let mut preceded_by_whitespace = false;
-
+        let mut swallow_next_invalid = 0;
         // Skip trivial (whitespace & comments) tokens
         loop {
             let token = self.cursor.advance_token();
@@ -139,7 +173,7 @@ impl<'a> StringReader<'a> {
                     let span = self.mk_sp(start, self.pos);
                     self.sess.symbol_gallery.insert(sym, span);
                     if !sym.can_be_raw() {
-                        self.err_span(span, &format!("`{}` cannot be a raw identifier", sym));
+                        self.sess.emit_err(errors::CannotBeRawIdent { span, ident: sym });
                     }
                     self.sess.raw_identifier_spans.borrow_mut().push(span);
                     token::Ident(sym, true)
@@ -189,16 +223,21 @@ impl<'a> StringReader<'a> {
                     };
                     token::Literal(token::Lit { kind, symbol, suffix })
                 }
-                rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+                rustc_lexer::TokenKind::Lifetime { starts_with_number, contains_emoji } => {
                     // Include the leading `'` in the real identifier, for macro
                     // expansion purposes. See #12512 for the gory details of why
                     // this is necessary.
                     let lifetime_name = self.str_from(start);
                     if starts_with_number {
                         let span = self.mk_sp(start, self.pos);
-                        let mut diag = self.sess.struct_err("lifetimes cannot start with a number");
+                        let mut diag = self.sess.struct_err("lifetimes or labels cannot start with a number");
                         diag.set_span(span);
                         diag.stash(span, StashKey::LifetimeIsChar);
+                    } else if contains_emoji {
+                        let span = self.mk_sp(start, self.pos);
+                        let mut diag = self.sess.struct_err("lifetimes or labels cannot contain emojis");
+                        diag.set_span(span);
+                        diag.stash(span, StashKey::LifetimeContainsEmoji);
                     }
                     let ident = Symbol::intern(lifetime_name);
                     token::Lifetime(ident)
@@ -232,20 +271,42 @@ impl<'a> StringReader<'a> {
                 rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
 
                 rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
-                    let c = self.str_from(start).chars().next().unwrap();
-                    let mut err =
-                        self.struct_err_span_char(start, self.pos, "unknown start of token", c);
+                    // Don't emit diagnostics for sequences of the same invalid token
+                    if swallow_next_invalid > 0 {
+                        swallow_next_invalid -= 1;
+                        continue;
+                    }
+                    let mut it = self.str_from_to_end(start).chars();
+                    let c = it.next().unwrap();
+                    if c == '\u{00a0}' {
+                        // If an error has already been reported on non-breaking
+                        // space characters earlier in the file, treat all
+                        // subsequent occurrences as whitespace.
+                        if self.nbsp_is_whitespace {
+                            preceded_by_whitespace = true;
+                            continue;
+                        }
+                        self.nbsp_is_whitespace = true;
+                    }
+                    let repeats = it.take_while(|c1| *c1 == c).count();
                     // FIXME: the lexer could be used to turn the ASCII version of unicode
                     // homoglyphs, instead of keeping a table in `check_for_substitution`into the
                     // token. Ideally, this should be inside `rustc_lexer`. However, we should
                     // first remove compound tokens like `<<` from `rustc_lexer`, and then add
                     // fancier error recovery to it, as there will be less overall work to do this
                     // way.
-                    let token = unicode_chars::check_for_substitution(self, start, c, &mut err);
-                    if c == '\x00' {
-                        err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used");
-                    }
-                    err.emit();
+                    let (token, sugg) = unicode_chars::check_for_substitution(self, start, c, repeats+1);
+                    self.sess.emit_err(errors::UnknownTokenStart {
+                        span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
+                        escaped: escaped_char(c),
+                        sugg,
+                        null: if c == '\x00' {Some(errors::UnknownTokenNull)} else {None},
+                        repeat: if repeats > 0 {
+                            swallow_next_invalid = repeats;
+                            Some(errors::UnknownTokenRepeat { repeats })
+                        } else {None}
+                    });
+
                     if let Some(token) = token {
                         token
                     } else {
@@ -260,26 +321,6 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    /// Report a fatal lexical error with a given span.
-    fn fatal_span(&self, sp: Span, m: &str) -> ! {
-        self.sess.span_diagnostic.span_fatal(sp, m)
-    }
-
-    /// Report a lexical error with a given span.
-    fn err_span(&self, sp: Span, m: &str) {
-        self.sess.span_diagnostic.struct_span_err(sp, m).emit();
-    }
-
-    /// Report a fatal error spanning [`from_pos`, `to_pos`).
-    fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! {
-        self.fatal_span(self.mk_sp(from_pos, to_pos), m)
-    }
-
-    /// Report a lexical error spanning [`from_pos`, `to_pos`).
-    fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
-        self.err_span(self.mk_sp(from_pos, to_pos), m)
-    }
-
     fn struct_fatal_span_char(
         &self,
         from_pos: BytePos,
@@ -292,18 +333,6 @@ impl<'a> StringReader<'a> {
             .struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
     }
 
-    fn struct_err_span_char(
-        &self,
-        from_pos: BytePos,
-        to_pos: BytePos,
-        m: &str,
-        c: char,
-    ) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
-        self.sess
-            .span_diagnostic
-            .struct_span_err(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c)))
-    }
-
     /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly
     /// complain about it.
     fn lint_unicode_text_flow(&self, start: BytePos) {
@@ -331,14 +360,12 @@ impl<'a> StringReader<'a> {
     ) -> TokenKind {
         if content.contains('\r') {
             for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
-                self.err_span_(
+                let span = self.mk_sp(
                     content_start + BytePos(idx as u32),
                     content_start + BytePos(idx as u32 + 1),
-                    match comment_kind {
-                        CommentKind::Line => "bare CR not allowed in doc-comment",
-                        CommentKind::Block => "bare CR not allowed in block doc-comment",
-                    },
                 );
+                let block = matches!(comment_kind, CommentKind::Block);
+                self.sess.emit_err(errors::CrDocComment { span, block });
             }
         }
 
@@ -417,26 +444,20 @@ impl<'a> StringReader<'a> {
             }
             rustc_lexer::LiteralKind::Int { base, empty_int } => {
                 if empty_int {
-                    self.sess
-                        .span_diagnostic
-                        .struct_span_err_with_code(
-                            self.mk_sp(start, end),
-                            "no valid digits found for number",
-                            error_code!(E0768),
-                        )
-                        .emit();
+                    let span = self.mk_sp(start, end);
+                    self.sess.emit_err(errors::NoDigitsLiteral { span });
                     (token::Integer, sym::integer(0))
                 } else {
                     if matches!(base, Base::Binary | Base::Octal) {
                         let base = base as u32;
                         let s = self.str_from_to(start + BytePos(2), end);
                         for (idx, c) in s.char_indices() {
+                            let span = self.mk_sp(
+                                start + BytePos::from_usize(2 + idx),
+                                start + BytePos::from_usize(2 + idx + c.len_utf8()),
+                            );
                             if c != '_' && c.to_digit(base).is_none() {
-                                self.err_span_(
-                                    start + BytePos::from_usize(2 + idx),
-                                    start + BytePos::from_usize(2 + idx + c.len_utf8()),
-                                    &format!("invalid digit for a base {} literal", base),
-                                );
+                                self.sess.emit_err(errors::InvalidDigitLiteral { span, base });
                             }
                         }
                     }
@@ -445,19 +466,18 @@ impl<'a> StringReader<'a> {
             }
             rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
                 if empty_exponent {
-                    self.err_span_(start, self.pos, "expected at least one digit in exponent");
+                    let span = self.mk_sp(start, self.pos);
+                    self.sess.emit_err(errors::EmptyExponentFloat { span });
                 }
-                match base {
-                    Base::Hexadecimal => {
-                        self.err_span_(start, end, "hexadecimal float literal is not supported")
-                    }
-                    Base::Octal => {
-                        self.err_span_(start, end, "octal float literal is not supported")
-                    }
-                    Base::Binary => {
-                        self.err_span_(start, end, "binary float literal is not supported")
-                    }
-                    _ => {}
+                let base = match base {
+                    Base::Hexadecimal => Some("hexadecimal"),
+                    Base::Octal => Some("octal"),
+                    Base::Binary => Some("binary"),
+                    _ => None,
+                };
+                if let Some(base) = base {
+                    let span = self.mk_sp(start, end);
+                    self.sess.emit_err(errors::FloatLiteralUnsupportedBase { span, base });
                 }
                 (token::Float, self.symbol_from_to(start, end))
             }
@@ -471,7 +491,7 @@ impl<'a> StringReader<'a> {
 
     /// Slice of the source text from `start` up to but excluding `self.pos`,
     /// meaning the slice does not include the character `self.ch`.
-    fn str_from(&self, start: BytePos) -> &str {
+    fn str_from(&self, start: BytePos) -> &'a str {
         self.str_from_to(start, self.pos)
     }
 
@@ -482,10 +502,15 @@ impl<'a> StringReader<'a> {
     }
 
     /// Slice of the source text spanning from `start` up to but excluding `end`.
-    fn str_from_to(&self, start: BytePos, end: BytePos) -> &str {
+    fn str_from_to(&self, start: BytePos, end: BytePos) -> &'a str {
         &self.src[self.src_index(start)..self.src_index(end)]
     }
 
+    /// Slice of the source text spanning from `start` until the end
+    fn str_from_to_end(&self, start: BytePos) -> &'a str {
+        &self.src[self.src_index(start)..]
+    }
+
     fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
         match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
             Err(RawStrError::InvalidStarter { bad_char }) => {
@@ -602,54 +627,34 @@ impl<'a> StringReader<'a> {
     // identifier tokens.
     fn report_unknown_prefix(&self, start: BytePos) {
         let prefix_span = self.mk_sp(start, self.pos);
-        let prefix_str = self.str_from_to(start, self.pos);
-        let msg = format!("prefix `{}` is unknown", prefix_str);
+        let prefix = self.str_from_to(start, self.pos);
 
         let expn_data = prefix_span.ctxt().outer_expn_data();
 
         if expn_data.edition >= Edition::Edition2021 {
             // In Rust 2021, this is a hard error.
-            let mut err = self.sess.span_diagnostic.struct_span_err(prefix_span, &msg);
-            err.span_label(prefix_span, "unknown prefix");
-            if prefix_str == "rb" {
-                err.span_suggestion_verbose(
-                    prefix_span,
-                    "use `br` for a raw byte string",
-                    "br",
-                    Applicability::MaybeIncorrect,
-                );
+            let sugg = if prefix == "rb" {
+                Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
             } else if expn_data.is_root() {
-                err.span_suggestion_verbose(
-                    prefix_span.shrink_to_hi(),
-                    "consider inserting whitespace here",
-                    " ",
-                    Applicability::MaybeIncorrect,
-                );
-            }
-            err.note("prefixed identifiers and literals are reserved since Rust 2021");
-            err.emit();
+                Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
+            } else {
+                None
+            };
+            self.sess.emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
         } else {
             // Before Rust 2021, only emit a lint for migration.
             self.sess.buffer_lint_with_diagnostic(
                 &RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
                 prefix_span,
                 ast::CRATE_NODE_ID,
-                &msg,
+                &format!("prefix `{prefix}` is unknown"),
                 BuiltinLintDiagnostics::ReservedPrefix(prefix_span),
             );
         }
     }
 
-    fn report_too_many_hashes(&self, start: BytePos, found: u32) -> ! {
-        self.fatal_span_(
-            start,
-            self.pos,
-            &format!(
-                "too many `#` symbols: raw strings may be delimited \
-                by up to 255 `#` symbols, but found {}",
-                found
-            ),
-        )
+    fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
+        self.sess.emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
     }
 
     fn cook_quoted(
@@ -661,6 +666,7 @@ impl<'a> StringReader<'a> {
         prefix_len: u32,
         postfix_len: u32,
     ) -> (token::LitKind, Symbol) {
+        let mut has_fatal_err = false;
         let content_start = start + BytePos(prefix_len);
         let content_end = end - BytePos(postfix_len);
         let lit_content = self.str_from_to(content_start, content_end);
@@ -672,6 +678,9 @@ impl<'a> StringReader<'a> {
                 let lo = content_start + BytePos(start);
                 let hi = lo + BytePos(end - start);
                 let span = self.mk_sp(lo, hi);
+                if err.is_fatal() {
+                    has_fatal_err = true;
+                }
                 emit_unescape_error(
                     &self.sess.span_diagnostic,
                     lit_content,
@@ -683,7 +692,14 @@ impl<'a> StringReader<'a> {
                 );
             }
         });
-        (kind, Symbol::intern(lit_content))
+
+        // We normally exclude the quotes for the symbol, but for errors we
+        // include it because it results in clearer error messages.
+        if !has_fatal_err {
+            (kind, Symbol::intern(lit_content))
+        } else {
+            (token::Err, self.symbol_from_to(start, end))
+        }
     }
 }
 
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index b2701817d48..36fd1e37d65 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -1,47 +1,31 @@
-use super::{StringReader, UnmatchedBrace};
+use super::diagnostics::report_suspicious_mismatch_block;
+use super::diagnostics::same_identation_level;
+use super::diagnostics::TokenTreeDiagInfo;
+use super::{StringReader, UnmatchedDelim};
 use rustc_ast::token::{self, Delimiter, Token};
 use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::token_to_string;
-use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{PErr, PResult};
-use rustc_span::Span;
 
 pub(super) struct TokenTreesReader<'a> {
     string_reader: StringReader<'a>,
     /// The "next" token, which has been obtained from the `StringReader` but
     /// not yet handled by the `TokenTreesReader`.
     token: Token,
-    /// Stack of open delimiters and their spans. Used for error message.
-    open_braces: Vec<(Delimiter, Span)>,
-    unmatched_braces: Vec<UnmatchedBrace>,
-    /// The type and spans for all braces
-    ///
-    /// Used only for error recovery when arriving to EOF with mismatched braces.
-    matching_delim_spans: Vec<(Delimiter, Span, Span)>,
-    last_unclosed_found_span: Option<Span>,
-    /// Collect empty block spans that might have been auto-inserted by editors.
-    last_delim_empty_block_spans: FxHashMap<Delimiter, Span>,
-    /// Collect the spans of braces (Open, Close). Used only
-    /// for detecting if blocks are empty and only braces.
-    matching_block_spans: Vec<(Span, Span)>,
+    diag_info: TokenTreeDiagInfo,
 }
 
 impl<'a> TokenTreesReader<'a> {
     pub(super) fn parse_all_token_trees(
         string_reader: StringReader<'a>,
-    ) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
+    ) -> (PResult<'a, TokenStream>, Vec<UnmatchedDelim>) {
         let mut tt_reader = TokenTreesReader {
             string_reader,
             token: Token::dummy(),
-            open_braces: Vec::new(),
-            unmatched_braces: Vec::new(),
-            matching_delim_spans: Vec::new(),
-            last_unclosed_found_span: None,
-            last_delim_empty_block_spans: FxHashMap::default(),
-            matching_block_spans: Vec::new(),
+            diag_info: TokenTreeDiagInfo::default(),
         };
         let res = tt_reader.parse_token_trees(/* is_delimited */ false);
-        (res, tt_reader.unmatched_braces)
+        (res, tt_reader.diag_info.unmatched_delims)
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s.
@@ -50,7 +34,7 @@ impl<'a> TokenTreesReader<'a> {
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
-                token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)),
+                token::OpenDelim(delim) => buf.push(self.parse_token_tree_open_delim(delim)?),
                 token::CloseDelim(delim) => {
                     return if is_delimited {
                         Ok(TokenStream::new(buf))
@@ -59,10 +43,11 @@ impl<'a> TokenTreesReader<'a> {
                     };
                 }
                 token::Eof => {
-                    if is_delimited {
-                        self.eof_err().emit();
-                    }
-                    return Ok(TokenStream::new(buf));
+                    return if is_delimited {
+                        Err(self.eof_err())
+                    } else {
+                        Ok(TokenStream::new(buf))
+                    };
                 }
                 _ => {
                     // Get the next normal token. This might require getting multiple adjacent
@@ -92,9 +77,9 @@ impl<'a> TokenTreesReader<'a> {
     fn eof_err(&mut self) -> PErr<'a> {
         let msg = "this file contains an unclosed delimiter";
         let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
-        for &(_, sp) in &self.open_braces {
+        for &(_, sp) in &self.diag_info.open_braces {
             err.span_label(sp, "unclosed delimiter");
-            self.unmatched_braces.push(UnmatchedBrace {
+            self.diag_info.unmatched_delims.push(UnmatchedDelim {
                 expected_delim: Delimiter::Brace,
                 found_delim: None,
                 found_span: self.token.span,
@@ -103,69 +88,53 @@ impl<'a> TokenTreesReader<'a> {
             });
         }
 
-        if let Some((delim, _)) = self.open_braces.last() {
-            if let Some((_, open_sp, close_sp)) =
-                self.matching_delim_spans.iter().find(|(d, open_sp, close_sp)| {
-                    let sm = self.string_reader.sess.source_map();
-                    if let Some(close_padding) = sm.span_to_margin(*close_sp) {
-                        if let Some(open_padding) = sm.span_to_margin(*open_sp) {
-                            return delim == d && close_padding != open_padding;
-                        }
-                    }
-                    false
-                })
-            // these are in reverse order as they get inserted on close, but
-            {
-                // we want the last open/first close
-                err.span_label(*open_sp, "this delimiter might not be properly closed...");
-                err.span_label(*close_sp, "...as it matches this but it has different indentation");
-            }
+        if let Some((delim, _)) = self.diag_info.open_braces.last() {
+            report_suspicious_mismatch_block(
+                &mut err,
+                &self.diag_info,
+                &self.string_reader.sess.source_map(),
+                *delim,
+            )
         }
         err
     }
 
-    fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> TokenTree {
+    fn parse_token_tree_open_delim(&mut self, open_delim: Delimiter) -> PResult<'a, TokenTree> {
         // The span for beginning of the delimited section
         let pre_span = self.token.span;
 
-        self.open_braces.push((open_delim, self.token.span));
+        self.diag_info.open_braces.push((open_delim, self.token.span));
 
         // Parse the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let tts = self.parse_token_trees(/* is_delimited */ true).unwrap();
+        let tts = self.parse_token_trees(/* is_delimited */ true)?;
 
         // Expand to cover the entire delimited token tree
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
+        let sm = self.string_reader.sess.source_map();
 
         match self.token.kind {
             // Correct delimiter.
             token::CloseDelim(close_delim) if close_delim == open_delim => {
-                let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
+                let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
                 let close_brace_span = self.token.span;
 
-                if tts.is_empty() {
+                if tts.is_empty() && close_delim == Delimiter::Brace {
                     let empty_block_span = open_brace_span.to(close_brace_span);
-                    let sm = self.string_reader.sess.source_map();
                     if !sm.is_multiline(empty_block_span) {
                         // Only track if the block is in the form of `{}`, otherwise it is
                         // likely that it was written on purpose.
-                        self.last_delim_empty_block_spans.insert(open_delim, empty_block_span);
+                        self.diag_info.empty_block_spans.push(empty_block_span);
                     }
                 }
 
-                //only add braces
+                // only add braces
                 if let (Delimiter::Brace, Delimiter::Brace) = (open_brace, open_delim) {
-                    self.matching_block_spans.push((open_brace_span, close_brace_span));
+                    // Add all the matching spans, we will sort by span later
+                    self.diag_info.matching_block_spans.push((open_brace_span, close_brace_span));
                 }
 
-                if self.open_braces.is_empty() {
-                    // Clear up these spans to avoid suggesting them as we've found
-                    // properly matched delimiters so far for an entire block.
-                    self.matching_delim_spans.clear();
-                } else {
-                    self.matching_delim_spans.push((open_brace, open_brace_span, close_brace_span));
-                }
                 // Move past the closing delimiter.
                 self.token = self.string_reader.next_token().0;
             }
@@ -174,28 +143,25 @@ impl<'a> TokenTreesReader<'a> {
                 let mut unclosed_delimiter = None;
                 let mut candidate = None;
 
-                if self.last_unclosed_found_span != Some(self.token.span) {
+                if self.diag_info.last_unclosed_found_span != Some(self.token.span) {
                     // do not complain about the same unclosed delimiter multiple times
-                    self.last_unclosed_found_span = Some(self.token.span);
+                    self.diag_info.last_unclosed_found_span = Some(self.token.span);
                     // This is a conservative error: only report the last unclosed
                     // delimiter. The previous unclosed delimiters could actually be
                     // closed! The parser just hasn't gotten to them yet.
-                    if let Some(&(_, sp)) = self.open_braces.last() {
+                    if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
                         unclosed_delimiter = Some(sp);
                     };
-                    let sm = self.string_reader.sess.source_map();
-                    if let Some(current_padding) = sm.span_to_margin(self.token.span) {
-                        for (brace, brace_span) in &self.open_braces {
-                            if let Some(padding) = sm.span_to_margin(*brace_span) {
-                                // high likelihood of these two corresponding
-                                if current_padding == padding && brace == &close_delim {
-                                    candidate = Some(*brace_span);
-                                }
-                            }
+                    for (brace, brace_span) in &self.diag_info.open_braces {
+                        if same_identation_level(&sm, self.token.span, *brace_span)
+                            && brace == &close_delim
+                        {
+                            // high likelihood of these two corresponding
+                            candidate = Some(*brace_span);
                         }
                     }
-                    let (tok, _) = self.open_braces.pop().unwrap();
-                    self.unmatched_braces.push(UnmatchedBrace {
+                    let (tok, _) = self.diag_info.open_braces.pop().unwrap();
+                    self.diag_info.unmatched_delims.push(UnmatchedDelim {
                         expected_delim: tok,
                         found_delim: Some(close_delim),
                         found_span: self.token.span,
@@ -203,7 +169,7 @@ impl<'a> TokenTreesReader<'a> {
                         candidate_span: candidate,
                     });
                 } else {
-                    self.open_braces.pop();
+                    self.diag_info.open_braces.pop();
                 }
 
                 // If the incorrect delimiter matches an earlier opening
@@ -213,7 +179,7 @@ impl<'a> TokenTreesReader<'a> {
                 // fn foo() {
                 //     bar(baz(
                 // }  // Incorrect delimiter but matches the earlier `{`
-                if !self.open_braces.iter().any(|&(b, _)| b == close_delim) {
+                if !self.diag_info.open_braces.iter().any(|&(b, _)| b == close_delim) {
                     self.token = self.string_reader.next_token().0;
                 }
             }
@@ -225,7 +191,7 @@ impl<'a> TokenTreesReader<'a> {
             _ => unreachable!(),
         }
 
-        TokenTree::Delimited(delim_span, open_delim, tts)
+        Ok(TokenTree::Delimited(delim_span, open_delim, tts))
     }
 
     fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'a> {
@@ -236,22 +202,12 @@ impl<'a> TokenTreesReader<'a> {
         let mut err =
             self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg);
 
-        // Braces are added at the end, so the last element is the biggest block
-        if let Some(parent) = self.matching_block_spans.last() {
-            if let Some(span) = self.last_delim_empty_block_spans.remove(&delim) {
-                // Check if the (empty block) is in the last properly closed block
-                if (parent.0.to(parent.1)).contains(span) {
-                    err.span_label(span, "block is empty, you might have not meant to close it");
-                } else {
-                    err.span_label(parent.0, "this opening brace...");
-                    err.span_label(parent.1, "...matches this closing brace");
-                }
-            } else {
-                err.span_label(parent.0, "this opening brace...");
-                err.span_label(parent.1, "...matches this closing brace");
-            }
-        }
-
+        report_suspicious_mismatch_block(
+            &mut err,
+            &self.diag_info,
+            &self.string_reader.sess.source_map(),
+            delim,
+        );
         err.span_label(self.token.span, "unexpected closing delimiter");
         err
     }
diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
index 6373f5b4fd6..0d12ec6081d 100644
--- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
+++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs
@@ -3,10 +3,12 @@
 use std::iter::once;
 use std::ops::Range;
 
-use rustc_errors::{pluralize, Applicability, Handler};
+use rustc_errors::{Applicability, Handler};
 use rustc_lexer::unescape::{EscapeError, Mode};
 use rustc_span::{BytePos, Span};
 
+use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError};
+
 pub(crate) fn emit_unescape_error(
     handler: &Handler,
     // interior part of the literal, without quotes
@@ -31,53 +33,32 @@ pub(crate) fn emit_unescape_error(
     };
     match error {
         EscapeError::LoneSurrogateUnicodeEscape => {
-            handler
-                .struct_span_err(span, "invalid unicode character escape")
-                .span_label(span, "invalid escape")
-                .help("unicode escape must not be a surrogate")
-                .emit();
+            handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: true });
         }
         EscapeError::OutOfRangeUnicodeEscape => {
-            handler
-                .struct_span_err(span, "invalid unicode character escape")
-                .span_label(span, "invalid escape")
-                .help("unicode escape must be at most 10FFFF")
-                .emit();
+            handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: false });
         }
         EscapeError::MoreThanOneChar => {
             use unicode_normalization::{char::is_combining_mark, UnicodeNormalization};
+            let mut sugg = None;
+            let mut note = None;
 
-            let mut has_help = false;
-            let mut handler = handler.struct_span_err(
-                span_with_quotes,
-                "character literal may only contain one codepoint",
-            );
-
-            if lit.chars().skip(1).all(|c| is_combining_mark(c)) {
-                let escaped_marks =
-                    lit.chars().skip(1).map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
-                handler.span_note(
-                    span,
-                    &format!(
-                        "this `{}` is followed by the combining mark{} `{}`",
-                        lit.chars().next().unwrap(),
-                        pluralize!(escaped_marks.len()),
-                        escaped_marks.join(""),
-                    ),
-                );
+            let lit_chars = lit.chars().collect::<Vec<_>>();
+            let (first, rest) = lit_chars.split_first().unwrap();
+            if rest.iter().copied().all(is_combining_mark) {
                 let normalized = lit.nfc().to_string();
                 if normalized.chars().count() == 1 {
-                    has_help = true;
-                    handler.span_suggestion(
-                        span,
-                        &format!(
-                            "consider using the normalized form `{}` of this character",
-                            normalized.chars().next().unwrap().escape_default()
-                        ),
-                        normalized,
-                        Applicability::MachineApplicable,
-                    );
+                    let ch = normalized.chars().next().unwrap().escape_default().to_string();
+                    sugg = Some(MoreThanOneCharSugg::NormalizedForm { span, ch, normalized });
                 }
+                let escaped_marks =
+                    rest.iter().map(|c| c.escape_default().to_string()).collect::<Vec<_>>();
+                note = Some(MoreThanOneCharNote::AllCombining {
+                    span,
+                    chr: format!("{first}"),
+                    len: escaped_marks.len(),
+                    escaped_marks: escaped_marks.join(""),
+                });
             } else {
                 let printable: Vec<char> = lit
                     .chars()
@@ -87,32 +68,18 @@ pub(crate) fn emit_unescape_error(
                     })
                     .collect();
 
-                if let [ch] = printable.as_slice() {
-                    has_help = true;
-
-                    handler.span_note(
+                if let &[ch] = printable.as_slice() {
+                    sugg =
+                        Some(MoreThanOneCharSugg::RemoveNonPrinting { span, ch: ch.to_string() });
+                    note = Some(MoreThanOneCharNote::NonPrinting {
                         span,
-                        &format!(
-                            "there are non-printing characters, the full sequence is `{}`",
-                            lit.escape_default(),
-                        ),
-                    );
-
-                    handler.span_suggestion(
-                        span,
-                        "consider removing the non-printing characters",
-                        ch,
-                        Applicability::MaybeIncorrect,
-                    );
+                        escaped: lit.escape_default().to_string(),
+                    });
                 }
-            }
-
-            if !has_help {
-                let (prefix, msg) = if mode.is_byte() {
-                    ("b", "if you meant to write a byte string literal, use double quotes")
-                } else {
-                    ("", "if you meant to write a `str` literal, use double quotes")
-                };
+            };
+            let sugg = sugg.unwrap_or_else(|| {
+                let is_byte = mode.is_byte();
+                let prefix = if is_byte { "b" } else { "" };
                 let mut escaped = String::with_capacity(lit.len());
                 let mut chrs = lit.chars().peekable();
                 while let Some(first) = chrs.next() {
@@ -129,54 +96,32 @@ pub(crate) fn emit_unescape_error(
                         (c, _) => escaped.push(c),
                     };
                 }
-                handler.span_suggestion(
-                    span_with_quotes,
-                    msg,
-                    format!("{prefix}\"{escaped}\""),
-                    Applicability::MachineApplicable,
-                );
-            }
-
-            handler.emit();
+                let sugg = format!("{prefix}\"{escaped}\"");
+                MoreThanOneCharSugg::Quotes { span: span_with_quotes, is_byte, sugg }
+            });
+            handler.emit_err(UnescapeError::MoreThanOneChar {
+                span: span_with_quotes,
+                note,
+                suggestion: sugg,
+            });
         }
         EscapeError::EscapeOnlyChar => {
             let (c, char_span) = last_char();
-
-            let msg = if mode.is_byte() {
-                "byte constant must be escaped"
-            } else {
-                "character constant must be escaped"
-            };
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, escaped_char(c)))
-                .span_suggestion(
-                    char_span,
-                    "escape the character",
-                    c.escape_default(),
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            handler.emit_err(UnescapeError::EscapeOnlyChar {
+                span,
+                char_span,
+                escaped_sugg: c.escape_default().to_string(),
+                escaped_msg: escaped_char(c),
+                byte: mode.is_byte(),
+            });
         }
         EscapeError::BareCarriageReturn => {
-            let msg = if mode.in_double_quotes() {
-                "bare CR not allowed in string, use `\\r` instead"
-            } else {
-                "character constant must be escaped: `\\r`"
-            };
-            handler
-                .struct_span_err(span, msg)
-                .span_suggestion(
-                    span,
-                    "escape the character",
-                    "\\r",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            let double_quotes = mode.in_double_quotes();
+            handler.emit_err(UnescapeError::BareCr { span, double_quotes });
         }
         EscapeError::BareCarriageReturnInRawString => {
             assert!(mode.in_double_quotes());
-            let msg = "bare CR not allowed in raw string";
-            handler.span_err(span, msg);
+            handler.emit_err(UnescapeError::BareCrRawString(span));
         }
         EscapeError::InvalidEscape => {
             let (c, span) = last_char();
@@ -213,22 +158,13 @@ pub(crate) fn emit_unescape_error(
             diag.emit();
         }
         EscapeError::TooShortHexEscape => {
-            handler.span_err(span, "numeric character escape is too short");
+            handler.emit_err(UnescapeError::TooShortHexEscape(span));
         }
         EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => {
             let (c, span) = last_char();
-
-            let msg = if error == EscapeError::InvalidCharInHexEscape {
-                "invalid character in numeric character escape"
-            } else {
-                "invalid character in unicode escape"
-            };
-            let c = escaped_char(c);
-
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, c))
-                .span_label(span, msg)
-                .emit();
+            let is_hex = error == EscapeError::InvalidCharInHexEscape;
+            let ch = escaped_char(c);
+            handler.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch });
         }
         EscapeError::NonAsciiCharInByte => {
             let (c, span) = last_char();
@@ -278,41 +214,22 @@ pub(crate) fn emit_unescape_error(
             err.emit();
         }
         EscapeError::OutOfRangeHexEscape => {
-            handler
-                .struct_span_err(span, "out of range hex escape")
-                .span_label(span, "must be a character in the range [\\x00-\\x7f]")
-                .emit();
+            handler.emit_err(UnescapeError::OutOfRangeHexEscape(span));
         }
         EscapeError::LeadingUnderscoreUnicodeEscape => {
             let (c, span) = last_char();
-            let msg = "invalid start of unicode escape";
-            handler
-                .struct_span_err(span, &format!("{}: `{}`", msg, c))
-                .span_label(span, msg)
-                .emit();
+            handler.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape {
+                span,
+                ch: escaped_char(c),
+            });
         }
         EscapeError::OverlongUnicodeEscape => {
-            handler
-                .struct_span_err(span, "overlong unicode escape")
-                .span_label(span, "must have at most 6 hex digits")
-                .emit();
+            handler.emit_err(UnescapeError::OverlongUnicodeEscape(span));
         }
         EscapeError::UnclosedUnicodeEscape => {
-            handler
-                .struct_span_err(span, "unterminated unicode escape")
-                .span_label(span, "missing a closing `}`")
-                .span_suggestion_verbose(
-                    span.shrink_to_hi(),
-                    "terminate the unicode escape",
-                    "}",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            handler.emit_err(UnescapeError::UnclosedUnicodeEscape(span, span.shrink_to_hi()));
         }
         EscapeError::NoBraceInUnicodeEscape => {
-            let msg = "incorrect unicode escape sequence";
-            let mut diag = handler.struct_span_err(span, msg);
-
             let mut suggestion = "\\u{".to_owned();
             let mut suggestion_len = 0;
             let (c, char_span) = last_char();
@@ -322,54 +239,37 @@ pub(crate) fn emit_unescape_error(
                 suggestion_len += c.len_utf8();
             }
 
-            if suggestion_len > 0 {
+            let (label, sub) = if suggestion_len > 0 {
                 suggestion.push('}');
                 let hi = char_span.lo() + BytePos(suggestion_len as u32);
-                diag.span_suggestion(
-                    span.with_hi(hi),
-                    "format of unicode escape sequences uses braces",
-                    suggestion,
-                    Applicability::MaybeIncorrect,
-                );
+                (None, NoBraceUnicodeSub::Suggestion { span: span.with_hi(hi), suggestion })
             } else {
-                diag.span_label(span, msg);
-                diag.help("format of unicode escape sequences is `\\u{...}`");
-            }
-
-            diag.emit();
+                (Some(span), NoBraceUnicodeSub::Help)
+            };
+            handler.emit_err(UnescapeError::NoBraceInUnicodeEscape { span, label, sub });
         }
         EscapeError::UnicodeEscapeInByte => {
-            let msg = "unicode escape in byte string";
-            handler
-                .struct_span_err(span, msg)
-                .span_label(span, msg)
-                .help("unicode escape sequences cannot be used as a byte or in a byte string")
-                .emit();
+            handler.emit_err(UnescapeError::UnicodeEscapeInByte(span));
         }
         EscapeError::EmptyUnicodeEscape => {
-            handler
-                .struct_span_err(span, "empty unicode escape")
-                .span_label(span, "this escape must have at least 1 hex digit")
-                .emit();
+            handler.emit_err(UnescapeError::EmptyUnicodeEscape(span));
         }
         EscapeError::ZeroChars => {
-            let msg = "empty character literal";
-            handler.struct_span_err(span, msg).span_label(span, msg).emit();
+            handler.emit_err(UnescapeError::ZeroChars(span));
         }
         EscapeError::LoneSlash => {
-            let msg = "invalid trailing slash in literal";
-            handler.struct_span_err(span, msg).span_label(span, msg).emit();
+            handler.emit_err(UnescapeError::LoneSlash(span));
         }
         EscapeError::UnskippedWhitespaceWarning => {
             let (c, char_span) = last_char();
-            let msg =
-                format!("non-ASCII whitespace symbol '{}' is not skipped", c.escape_unicode());
-            handler.struct_span_warn(span, &msg).span_label(char_span, &msg).emit();
+            handler.emit_warning(UnescapeError::UnskippedWhitespace {
+                span,
+                ch: escaped_char(c),
+                char_span,
+            });
         }
         EscapeError::MultipleSkippedLinesWarning => {
-            let msg = "multiple lines skipped by escaped newline";
-            let bottom_msg = "skipping everything up to and including this point";
-            handler.struct_span_warn(span, msg).span_label(span, bottom_msg).emit();
+            handler.emit_warning(UnescapeError::MultipleSkippedLinesWarning(span));
         }
     }
 }
diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs
index 2c68cc5895c..d4f971d5bc8 100644
--- a/compiler/rustc_parse/src/lexer/unicode_chars.rs
+++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs
@@ -1,377 +1,383 @@
-// Characters and their corresponding confusables were collected from
-// https://www.unicode.org/Public/security/10.0.0/confusables.txt
+//! Characters and their corresponding confusables were collected from
+//! <https://www.unicode.org/Public/security/10.0.0/confusables.txt>
 
 use super::StringReader;
-use crate::token::{self, Delimiter};
-use rustc_errors::{Applicability, Diagnostic};
+use crate::{
+    errors::TokenSubstitution,
+    token::{self, Delimiter},
+};
 use rustc_span::{symbol::kw, BytePos, Pos, Span};
 
 #[rustfmt::skip] // for line breaks
-pub(crate) const UNICODE_ARRAY: &[(char, &str, char)] = &[
-    ('
', "Line Separator", ' '),
-    ('
', "Paragraph Separator", ' '),
-    (' ', "Ogham Space mark", ' '),
-    (' ', "En Quad", ' '),
-    (' ', "Em Quad", ' '),
-    (' ', "En Space", ' '),
-    (' ', "Em Space", ' '),
-    (' ', "Three-Per-Em Space", ' '),
-    (' ', "Four-Per-Em Space", ' '),
-    (' ', "Six-Per-Em Space", ' '),
-    (' ', "Punctuation Space", ' '),
-    (' ', "Thin Space", ' '),
-    (' ', "Hair Space", ' '),
-    (' ', "Medium Mathematical Space", ' '),
-    (' ', "No-Break Space", ' '),
-    (' ', "Figure Space", ' '),
-    (' ', "Narrow No-Break Space", ' '),
-    (' ', "Ideographic Space", ' '),
-
-    ('ߺ', "Nko Lajanyalan", '_'),
-    ('﹍', "Dashed Low Line", '_'),
-    ('﹎', "Centreline Low Line", '_'),
-    ('﹏', "Wavy Low Line", '_'),
-    ('_', "Fullwidth Low Line", '_'),
-
-    ('‐', "Hyphen", '-'),
-    ('‑', "Non-Breaking Hyphen", '-'),
-    ('‒', "Figure Dash", '-'),
-    ('–', "En Dash", '-'),
-    ('—', "Em Dash", '-'),
-    ('﹘', "Small Em Dash", '-'),
-    ('۔', "Arabic Full Stop", '-'),
-    ('⁃', "Hyphen Bullet", '-'),
-    ('˗', "Modifier Letter Minus Sign", '-'),
-    ('−', "Minus Sign", '-'),
-    ('➖', "Heavy Minus Sign", '-'),
-    ('Ⲻ', "Coptic Letter Dialect-P Ni", '-'),
-    ('ー', "Katakana-Hiragana Prolonged Sound Mark", '-'),
-    ('-', "Fullwidth Hyphen-Minus", '-'),
-    ('―', "Horizontal Bar", '-'),
-    ('─', "Box Drawings Light Horizontal", '-'),
-    ('━', "Box Drawings Heavy Horizontal", '-'),
-    ('㇐', "CJK Stroke H", '-'),
-    ('ꟷ', "Latin Epigraphic Letter Sideways I", '-'),
-    ('ᅳ', "Hangul Jungseong Eu", '-'),
-    ('ㅡ', "Hangul Letter Eu", '-'),
-    ('一', "CJK Unified Ideograph-4E00", '-'),
-    ('⼀', "Kangxi Radical One", '-'),
-
-    ('؍', "Arabic Date Separator", ','),
-    ('٫', "Arabic Decimal Separator", ','),
-    ('‚', "Single Low-9 Quotation Mark", ','),
-    ('¸', "Cedilla", ','),
-    ('ꓹ', "Lisu Letter Tone Na Po", ','),
-    (',', "Fullwidth Comma", ','),
-
-    (';', "Greek Question Mark", ';'),
-    (';', "Fullwidth Semicolon", ';'),
-    ('︔', "Presentation Form For Vertical Semicolon", ';'),
-
-    ('ः', "Devanagari Sign Visarga", ':'),
-    ('ઃ', "Gujarati Sign Visarga", ':'),
-    (':', "Fullwidth Colon", ':'),
-    ('։', "Armenian Full Stop", ':'),
-    ('܃', "Syriac Supralinear Colon", ':'),
-    ('܄', "Syriac Sublinear Colon", ':'),
-    ('᛬', "Runic Multiple Punctuation", ':'),
-    ('︰', "Presentation Form For Vertical Two Dot Leader", ':'),
-    ('᠃', "Mongolian Full Stop", ':'),
-    ('᠉', "Mongolian Manchu Full Stop", ':'),
-    ('⁚', "Two Dot Punctuation", ':'),
-    ('׃', "Hebrew Punctuation Sof Pasuq", ':'),
-    ('˸', "Modifier Letter Raised Colon", ':'),
-    ('꞉', "Modifier Letter Colon", ':'),
-    ('∶', "Ratio", ':'),
-    ('ː', "Modifier Letter Triangular Colon", ':'),
-    ('ꓽ', "Lisu Letter Tone Mya Jeu", ':'),
-    ('︓', "Presentation Form For Vertical Colon", ':'),
-
-    ('!', "Fullwidth Exclamation Mark", '!'),
-    ('ǃ', "Latin Letter Retroflex Click", '!'),
-    ('ⵑ', "Tifinagh Letter Tuareg Yang", '!'),
-    ('︕', "Presentation Form For Vertical Exclamation Mark", '!'),
-
-    ('ʔ', "Latin Letter Glottal Stop", '?'),
-    ('Ɂ', "Latin Capital Letter Glottal Stop", '?'),
-    ('ॽ', "Devanagari Letter Glottal Stop", '?'),
-    ('Ꭾ', "Cherokee Letter He", '?'),
-    ('ꛫ', "Bamum Letter Ntuu", '?'),
-    ('?', "Fullwidth Question Mark", '?'),
-    ('︖', "Presentation Form For Vertical Question Mark", '?'),
-
-    ('𝅭', "Musical Symbol Combining Augmentation Dot", '.'),
-    ('․', "One Dot Leader", '.'),
-    ('܁', "Syriac Supralinear Full Stop", '.'),
-    ('܂', "Syriac Sublinear Full Stop", '.'),
-    ('꘎', "Vai Full Stop", '.'),
-    ('𐩐', "Kharoshthi Punctuation Dot", '.'),
-    ('٠', "Arabic-Indic Digit Zero", '.'),
-    ('۰', "Extended Arabic-Indic Digit Zero", '.'),
-    ('ꓸ', "Lisu Letter Tone Mya Ti", '.'),
-    ('·', "Middle Dot", '.'),
-    ('・', "Katakana Middle Dot", '.'),
-    ('・', "Halfwidth Katakana Middle Dot", '.'),
-    ('᛫', "Runic Single Punctuation", '.'),
-    ('·', "Greek Ano Teleia", '.'),
-    ('⸱', "Word Separator Middle Dot", '.'),
-    ('𐄁', "Aegean Word Separator Dot", '.'),
-    ('•', "Bullet", '.'),
-    ('‧', "Hyphenation Point", '.'),
-    ('∙', "Bullet Operator", '.'),
-    ('⋅', "Dot Operator", '.'),
-    ('ꞏ', "Latin Letter Sinological Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('ᐧ', "Canadian Syllabics Final Middle Dot", '.'),
-    ('.', "Fullwidth Full Stop", '.'),
-    ('。', "Ideographic Full Stop", '.'),
-    ('︒', "Presentation Form For Vertical Ideographic Full Stop", '.'),
-
-    ('՝', "Armenian Comma", '\''),
-    (''', "Fullwidth Apostrophe", '\''),
-    ('‘', "Left Single Quotation Mark", '\''),
-    ('’', "Right Single Quotation Mark", '\''),
-    ('‛', "Single High-Reversed-9 Quotation Mark", '\''),
-    ('′', "Prime", '\''),
-    ('‵', "Reversed Prime", '\''),
-    ('՚', "Armenian Apostrophe", '\''),
-    ('׳', "Hebrew Punctuation Geresh", '\''),
-    ('`', "Grave Accent", '\''),
-    ('`', "Greek Varia", '\''),
-    ('`', "Fullwidth Grave Accent", '\''),
-    ('´', "Acute Accent", '\''),
-    ('΄', "Greek Tonos", '\''),
-    ('´', "Greek Oxia", '\''),
-    ('᾽', "Greek Koronis", '\''),
-    ('᾿', "Greek Psili", '\''),
-    ('῾', "Greek Dasia", '\''),
-    ('ʹ', "Modifier Letter Prime", '\''),
-    ('ʹ', "Greek Numeral Sign", '\''),
-    ('ˈ', "Modifier Letter Vertical Line", '\''),
-    ('ˊ', "Modifier Letter Acute Accent", '\''),
-    ('ˋ', "Modifier Letter Grave Accent", '\''),
-    ('˴', "Modifier Letter Middle Grave Accent", '\''),
-    ('ʻ', "Modifier Letter Turned Comma", '\''),
-    ('ʽ', "Modifier Letter Reversed Comma", '\''),
-    ('ʼ', "Modifier Letter Apostrophe", '\''),
-    ('ʾ', "Modifier Letter Right Half Ring", '\''),
-    ('ꞌ', "Latin Small Letter Saltillo", '\''),
-    ('י', "Hebrew Letter Yod", '\''),
-    ('ߴ', "Nko High Tone Apostrophe", '\''),
-    ('ߵ', "Nko Low Tone Apostrophe", '\''),
-    ('ᑊ', "Canadian Syllabics West-Cree P", '\''),
-    ('ᛌ', "Runic Letter Short-Twig-Sol S", '\''),
-    ('𖽑', "Miao Sign Aspiration", '\''),
-    ('𖽒', "Miao Sign Reformed Voicing", '\''),
-
-    ('᳓', "Vedic Sign Nihshvasa", '"'),
-    ('"', "Fullwidth Quotation Mark", '"'),
-    ('“', "Left Double Quotation Mark", '"'),
-    ('”', "Right Double Quotation Mark", '"'),
-    ('‟', "Double High-Reversed-9 Quotation Mark", '"'),
-    ('″', "Double Prime", '"'),
-    ('‶', "Reversed Double Prime", '"'),
-    ('〃', "Ditto Mark", '"'),
-    ('״', "Hebrew Punctuation Gershayim", '"'),
-    ('˝', "Double Acute Accent", '"'),
-    ('ʺ', "Modifier Letter Double Prime", '"'),
-    ('˶', "Modifier Letter Middle Double Acute Accent", '"'),
-    ('˵', "Modifier Letter Middle Double Grave Accent", '"'),
-    ('ˮ', "Modifier Letter Double Apostrophe", '"'),
-    ('ײ', "Hebrew Ligature Yiddish Double Yod", '"'),
-    ('❞', "Heavy Double Comma Quotation Mark Ornament", '"'),
-    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", '"'),
-
-    ('(', "Fullwidth Left Parenthesis", '('),
-    ('❨', "Medium Left Parenthesis Ornament", '('),
-    ('﴾', "Ornate Left Parenthesis", '('),
-
-    (')', "Fullwidth Right Parenthesis", ')'),
-    ('❩', "Medium Right Parenthesis Ornament", ')'),
-    ('﴿', "Ornate Right Parenthesis", ')'),
-
-    ('[', "Fullwidth Left Square Bracket", '['),
-    ('❲', "Light Left Tortoise Shell Bracket Ornament", '['),
-    ('「', "Left Corner Bracket", '['),
-    ('『', "Left White Corner Bracket", '['),
-    ('【', "Left Black Lenticular Bracket", '['),
-    ('〔', "Left Tortoise Shell Bracket", '['),
-    ('〖', "Left White Lenticular Bracket", '['),
-    ('〘', "Left White Tortoise Shell Bracket", '['),
-    ('〚', "Left White Square Bracket", '['),
-
-    (']', "Fullwidth Right Square Bracket", ']'),
-    ('❳', "Light Right Tortoise Shell Bracket Ornament", ']'),
-    ('」', "Right Corner Bracket", ']'),
-    ('』', "Right White Corner Bracket", ']'),
-    ('】', "Right Black Lenticular Bracket", ']'),
-    ('〕', "Right Tortoise Shell Bracket", ']'),
-    ('〗', "Right White Lenticular Bracket", ']'),
-    ('〙', "Right White Tortoise Shell Bracket", ']'),
-    ('〛', "Right White Square Bracket", ']'),
-
-    ('❴', "Medium Left Curly Bracket Ornament", '{'),
-    ('𝄔', "Musical Symbol Brace", '{'),
-    ('{', "Fullwidth Left Curly Bracket", '{'),
-
-    ('❵', "Medium Right Curly Bracket Ornament", '}'),
-    ('}', "Fullwidth Right Curly Bracket", '}'),
-
-    ('⁎', "Low Asterisk", '*'),
-    ('٭', "Arabic Five Pointed Star", '*'),
-    ('∗', "Asterisk Operator", '*'),
-    ('𐌟', "Old Italic Letter Ess", '*'),
-    ('*', "Fullwidth Asterisk", '*'),
-
-    ('᜵', "Philippine Single Punctuation", '/'),
-    ('⁁', "Caret Insertion Point", '/'),
-    ('∕', "Division Slash", '/'),
-    ('⁄', "Fraction Slash", '/'),
-    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", '/'),
-    ('⟋', "Mathematical Rising Diagonal", '/'),
-    ('⧸', "Big Solidus", '/'),
-    ('𝈺', "Greek Instrumental Notation Symbol-47", '/'),
-    ('㇓', "CJK Stroke Sp", '/'),
-    ('〳', "Vertical Kana Repeat Mark Upper Half", '/'),
-    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", '/'),
-    ('ノ', "Katakana Letter No", '/'),
-    ('丿', "CJK Unified Ideograph-4E3F", '/'),
-    ('⼃', "Kangxi Radical Slash", '/'),
-    ('/', "Fullwidth Solidus", '/'),
-
-    ('\', "Fullwidth Reverse Solidus", '\\'),
-    ('﹨', "Small Reverse Solidus", '\\'),
-    ('∖', "Set Minus", '\\'),
-    ('⟍', "Mathematical Falling Diagonal", '\\'),
-    ('⧵', "Reverse Solidus Operator", '\\'),
-    ('⧹', "Big Reverse Solidus", '\\'),
-    ('⧹', "Greek Vocal Notation Symbol-16", '\\'),
-    ('⧹', "Greek Instrumental Symbol-48", '\\'),
-    ('㇔', "CJK Stroke D", '\\'),
-    ('丶', "CJK Unified Ideograph-4E36", '\\'),
-    ('⼂', "Kangxi Radical Dot", '\\'),
-    ('、', "Ideographic Comma", '\\'),
-    ('ヽ', "Katakana Iteration Mark", '\\'),
-
-    ('ꝸ', "Latin Small Letter Um", '&'),
-    ('&', "Fullwidth Ampersand", '&'),
-
-    ('᛭', "Runic Cross Punctuation", '+'),
-    ('➕', "Heavy Plus Sign", '+'),
-    ('𐊛', "Lycian Letter H", '+'),
-    ('﬩', "Hebrew Letter Alternative Plus Sign", '+'),
-    ('+', "Fullwidth Plus Sign", '+'),
-
-    ('‹', "Single Left-Pointing Angle Quotation Mark", '<'),
-    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", '<'),
-    ('˂', "Modifier Letter Left Arrowhead", '<'),
-    ('𝈶', "Greek Instrumental Symbol-40", '<'),
-    ('ᐸ', "Canadian Syllabics Pa", '<'),
-    ('ᚲ', "Runic Letter Kauna", '<'),
-    ('❬', "Medium Left-Pointing Angle Bracket Ornament", '<'),
-    ('⟨', "Mathematical Left Angle Bracket", '<'),
-    ('〈', "Left-Pointing Angle Bracket", '<'),
-    ('〈', "Left Angle Bracket", '<'),
-    ('㇛', "CJK Stroke Pd", '<'),
-    ('く', "Hiragana Letter Ku", '<'),
-    ('𡿨', "CJK Unified Ideograph-21FE8", '<'),
-    ('《', "Left Double Angle Bracket", '<'),
-    ('<', "Fullwidth Less-Than Sign", '<'),
-
-    ('᐀', "Canadian Syllabics Hyphen", '='),
-    ('⹀', "Double Hyphen", '='),
-    ('゠', "Katakana-Hiragana Double Hyphen", '='),
-    ('꓿', "Lisu Punctuation Full Stop", '='),
-    ('=', "Fullwidth Equals Sign", '='),
-
-    ('›', "Single Right-Pointing Angle Quotation Mark", '>'),
-    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", '>'),
-    ('˃', "Modifier Letter Right Arrowhead", '>'),
-    ('𝈷', "Greek Instrumental Symbol-42", '>'),
-    ('ᐳ', "Canadian Syllabics Po", '>'),
-    ('𖼿', "Miao Letter Archaic Zza", '>'),
-    ('❭', "Medium Right-Pointing Angle Bracket Ornament", '>'),
-    ('⟩', "Mathematical Right Angle Bracket", '>'),
-    ('〉', "Right-Pointing Angle Bracket", '>'),
-    ('〉', "Right Angle Bracket", '>'),
-    ('》', "Right Double Angle Bracket", '>'),
-    ('>', "Fullwidth Greater-Than Sign", '>'),
+pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
+    ('
', "Line Separator", " "),
+    ('
', "Paragraph Separator", " "),
+    (' ', "Ogham Space mark", " "),
+    (' ', "En Quad", " "),
+    (' ', "Em Quad", " "),
+    (' ', "En Space", " "),
+    (' ', "Em Space", " "),
+    (' ', "Three-Per-Em Space", " "),
+    (' ', "Four-Per-Em Space", " "),
+    (' ', "Six-Per-Em Space", " "),
+    (' ', "Punctuation Space", " "),
+    (' ', "Thin Space", " "),
+    (' ', "Hair Space", " "),
+    (' ', "Medium Mathematical Space", " "),
+    (' ', "No-Break Space", " "),
+    (' ', "Figure Space", " "),
+    (' ', "Narrow No-Break Space", " "),
+    (' ', "Ideographic Space", " "),
+
+    ('ߺ', "Nko Lajanyalan", "_"),
+    ('﹍', "Dashed Low Line", "_"),
+    ('﹎', "Centreline Low Line", "_"),
+    ('﹏', "Wavy Low Line", "_"),
+    ('_', "Fullwidth Low Line", "_"),
+
+    ('‐', "Hyphen", "-"),
+    ('‑', "Non-Breaking Hyphen", "-"),
+    ('‒', "Figure Dash", "-"),
+    ('–', "En Dash", "-"),
+    ('—', "Em Dash", "-"),
+    ('﹘', "Small Em Dash", "-"),
+    ('۔', "Arabic Full Stop", "-"),
+    ('⁃', "Hyphen Bullet", "-"),
+    ('˗', "Modifier Letter Minus Sign", "-"),
+    ('−', "Minus Sign", "-"),
+    ('➖', "Heavy Minus Sign", "-"),
+    ('Ⲻ', "Coptic Letter Dialect-P Ni", "-"),
+    ('ー', "Katakana-Hiragana Prolonged Sound Mark", "-"),
+    ('-', "Fullwidth Hyphen-Minus", "-"),
+    ('―', "Horizontal Bar", "-"),
+    ('─', "Box Drawings Light Horizontal", "-"),
+    ('━', "Box Drawings Heavy Horizontal", "-"),
+    ('㇐', "CJK Stroke H", "-"),
+    ('ꟷ', "Latin Epigraphic Letter Sideways I", "-"),
+    ('ᅳ', "Hangul Jungseong Eu", "-"),
+    ('ㅡ', "Hangul Letter Eu", "-"),
+    ('一', "CJK Unified Ideograph-4E00", "-"),
+    ('⼀', "Kangxi Radical One", "-"),
+
+    ('؍', "Arabic Date Separator", ","),
+    ('٫', "Arabic Decimal Separator", ","),
+    ('‚', "Single Low-9 Quotation Mark", ","),
+    ('¸', "Cedilla", ","),
+    ('ꓹ', "Lisu Letter Tone Na Po", ","),
+    (',', "Fullwidth Comma", ","),
+
+    (';', "Greek Question Mark", ";"),
+    (';', "Fullwidth Semicolon", ";"),
+    ('︔', "Presentation Form For Vertical Semicolon", ";"),
+
+    ('ः', "Devanagari Sign Visarga", ":"),
+    ('ઃ', "Gujarati Sign Visarga", ":"),
+    (':', "Fullwidth Colon", ":"),
+    ('։', "Armenian Full Stop", ":"),
+    ('܃', "Syriac Supralinear Colon", ":"),
+    ('܄', "Syriac Sublinear Colon", ":"),
+    ('᛬', "Runic Multiple Punctuation", ":"),
+    ('︰', "Presentation Form For Vertical Two Dot Leader", ":"),
+    ('᠃', "Mongolian Full Stop", ":"),
+    ('᠉', "Mongolian Manchu Full Stop", ":"),
+    ('⁚', "Two Dot Punctuation", ":"),
+    ('׃', "Hebrew Punctuation Sof Pasuq", ":"),
+    ('˸', "Modifier Letter Raised Colon", ":"),
+    ('꞉', "Modifier Letter Colon", ":"),
+    ('∶', "Ratio", ":"),
+    ('ː', "Modifier Letter Triangular Colon", ":"),
+    ('ꓽ', "Lisu Letter Tone Mya Jeu", ":"),
+    ('︓', "Presentation Form For Vertical Colon", ":"),
+
+    ('!', "Fullwidth Exclamation Mark", "!"),
+    ('ǃ', "Latin Letter Retroflex Click", "!"),
+    ('ⵑ', "Tifinagh Letter Tuareg Yang", "!"),
+    ('︕', "Presentation Form For Vertical Exclamation Mark", "!"),
+
+    ('ʔ', "Latin Letter Glottal Stop", "?"),
+    ('Ɂ', "Latin Capital Letter Glottal Stop", "?"),
+    ('ॽ', "Devanagari Letter Glottal Stop", "?"),
+    ('Ꭾ', "Cherokee Letter He", "?"),
+    ('ꛫ', "Bamum Letter Ntuu", "?"),
+    ('?', "Fullwidth Question Mark", "?"),
+    ('︖', "Presentation Form For Vertical Question Mark", "?"),
+
+    ('𝅭', "Musical Symbol Combining Augmentation Dot", "."),
+    ('․', "One Dot Leader", "."),
+    ('܁', "Syriac Supralinear Full Stop", "."),
+    ('܂', "Syriac Sublinear Full Stop", "."),
+    ('꘎', "Vai Full Stop", "."),
+    ('𐩐', "Kharoshthi Punctuation Dot", "."),
+    ('٠', "Arabic-Indic Digit Zero", "."),
+    ('۰', "Extended Arabic-Indic Digit Zero", "."),
+    ('ꓸ', "Lisu Letter Tone Mya Ti", "."),
+    ('·', "Middle Dot", "."),
+    ('・', "Katakana Middle Dot", "."),
+    ('・', "Halfwidth Katakana Middle Dot", "."),
+    ('᛫', "Runic Single Punctuation", "."),
+    ('·', "Greek Ano Teleia", "."),
+    ('⸱', "Word Separator Middle Dot", "."),
+    ('𐄁', "Aegean Word Separator Dot", "."),
+    ('•', "Bullet", "."),
+    ('‧', "Hyphenation Point", "."),
+    ('∙', "Bullet Operator", "."),
+    ('⋅', "Dot Operator", "."),
+    ('ꞏ', "Latin Letter Sinological Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('ᐧ', "Canadian Syllabics Final Middle Dot", "."),
+    ('.', "Fullwidth Full Stop", "."),
+    ('。', "Ideographic Full Stop", "."),
+    ('︒', "Presentation Form For Vertical Ideographic Full Stop", "."),
+
+    ('՝', "Armenian Comma", "\'"),
+    (''', "Fullwidth Apostrophe", "\'"),
+    ('‘', "Left Single Quotation Mark", "\'"),
+    ('’', "Right Single Quotation Mark", "\'"),
+    ('‛', "Single High-Reversed-9 Quotation Mark", "\'"),
+    ('′', "Prime", "\'"),
+    ('‵', "Reversed Prime", "\'"),
+    ('՚', "Armenian Apostrophe", "\'"),
+    ('׳', "Hebrew Punctuation Geresh", "\'"),
+    ('`', "Grave Accent", "\'"),
+    ('`', "Greek Varia", "\'"),
+    ('`', "Fullwidth Grave Accent", "\'"),
+    ('´', "Acute Accent", "\'"),
+    ('΄', "Greek Tonos", "\'"),
+    ('´', "Greek Oxia", "\'"),
+    ('᾽', "Greek Koronis", "\'"),
+    ('᾿', "Greek Psili", "\'"),
+    ('῾', "Greek Dasia", "\'"),
+    ('ʹ', "Modifier Letter Prime", "\'"),
+    ('ʹ', "Greek Numeral Sign", "\'"),
+    ('ˈ', "Modifier Letter Vertical Line", "\'"),
+    ('ˊ', "Modifier Letter Acute Accent", "\'"),
+    ('ˋ', "Modifier Letter Grave Accent", "\'"),
+    ('˴', "Modifier Letter Middle Grave Accent", "\'"),
+    ('ʻ', "Modifier Letter Turned Comma", "\'"),
+    ('ʽ', "Modifier Letter Reversed Comma", "\'"),
+    ('ʼ', "Modifier Letter Apostrophe", "\'"),
+    ('ʾ', "Modifier Letter Right Half Ring", "\'"),
+    ('ꞌ', "Latin Small Letter Saltillo", "\'"),
+    ('י', "Hebrew Letter Yod", "\'"),
+    ('ߴ', "Nko High Tone Apostrophe", "\'"),
+    ('ߵ', "Nko Low Tone Apostrophe", "\'"),
+    ('ᑊ', "Canadian Syllabics West-Cree P", "\'"),
+    ('ᛌ', "Runic Letter Short-Twig-Sol S", "\'"),
+    ('𖽑', "Miao Sign Aspiration", "\'"),
+    ('𖽒', "Miao Sign Reformed Voicing", "\'"),
+
+    ('᳓', "Vedic Sign Nihshvasa", "\""),
+    ('"', "Fullwidth Quotation Mark", "\""),
+    ('“', "Left Double Quotation Mark", "\""),
+    ('”', "Right Double Quotation Mark", "\""),
+    ('‟', "Double High-Reversed-9 Quotation Mark", "\""),
+    ('″', "Double Prime", "\""),
+    ('‶', "Reversed Double Prime", "\""),
+    ('〃', "Ditto Mark", "\""),
+    ('״', "Hebrew Punctuation Gershayim", "\""),
+    ('˝', "Double Acute Accent", "\""),
+    ('ʺ', "Modifier Letter Double Prime", "\""),
+    ('˶', "Modifier Letter Middle Double Acute Accent", "\""),
+    ('˵', "Modifier Letter Middle Double Grave Accent", "\""),
+    ('ˮ', "Modifier Letter Double Apostrophe", "\""),
+    ('ײ', "Hebrew Ligature Yiddish Double Yod", "\""),
+    ('❞', "Heavy Double Comma Quotation Mark Ornament", "\""),
+    ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", "\""),
+
+    ('(', "Fullwidth Left Parenthesis", "("),
+    ('❨', "Medium Left Parenthesis Ornament", "("),
+    ('﴾', "Ornate Left Parenthesis", "("),
+
+    (')', "Fullwidth Right Parenthesis", ")"),
+    ('❩', "Medium Right Parenthesis Ornament", ")"),
+    ('﴿', "Ornate Right Parenthesis", ")"),
+
+    ('[', "Fullwidth Left Square Bracket", "["),
+    ('❲', "Light Left Tortoise Shell Bracket Ornament", "["),
+    ('「', "Left Corner Bracket", "["),
+    ('『', "Left White Corner Bracket", "["),
+    ('【', "Left Black Lenticular Bracket", "["),
+    ('〔', "Left Tortoise Shell Bracket", "["),
+    ('〖', "Left White Lenticular Bracket", "["),
+    ('〘', "Left White Tortoise Shell Bracket", "["),
+    ('〚', "Left White Square Bracket", "["),
+
+    (']', "Fullwidth Right Square Bracket", "]"),
+    ('❳', "Light Right Tortoise Shell Bracket Ornament", "]"),
+    ('」', "Right Corner Bracket", "]"),
+    ('』', "Right White Corner Bracket", "]"),
+    ('】', "Right Black Lenticular Bracket", "]"),
+    ('〕', "Right Tortoise Shell Bracket", "]"),
+    ('〗', "Right White Lenticular Bracket", "]"),
+    ('〙', "Right White Tortoise Shell Bracket", "]"),
+    ('〛', "Right White Square Bracket", "]"),
+
+    ('❴', "Medium Left Curly Bracket Ornament", "{"),
+    ('𝄔', "Musical Symbol Brace", "{"),
+    ('{', "Fullwidth Left Curly Bracket", "{"),
+
+    ('❵', "Medium Right Curly Bracket Ornament", "}"),
+    ('}', "Fullwidth Right Curly Bracket", "}"),
+
+    ('⁎', "Low Asterisk", "*"),
+    ('٭', "Arabic Five Pointed Star", "*"),
+    ('∗', "Asterisk Operator", "*"),
+    ('𐌟', "Old Italic Letter Ess", "*"),
+    ('*', "Fullwidth Asterisk", "*"),
+
+    ('᜵', "Philippine Single Punctuation", "/"),
+    ('⁁', "Caret Insertion Point", "/"),
+    ('∕', "Division Slash", "/"),
+    ('⁄', "Fraction Slash", "/"),
+    ('╱', "Box Drawings Light Diagonal Upper Right To Lower Left", "/"),
+    ('⟋', "Mathematical Rising Diagonal", "/"),
+    ('⧸', "Big Solidus", "/"),
+    ('𝈺', "Greek Instrumental Notation Symbol-47", "/"),
+    ('㇓', "CJK Stroke Sp", "/"),
+    ('〳', "Vertical Kana Repeat Mark Upper Half", "/"),
+    ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", "/"),
+    ('ノ', "Katakana Letter No", "/"),
+    ('丿', "CJK Unified Ideograph-4E3F", "/"),
+    ('⼃', "Kangxi Radical Slash", "/"),
+    ('/', "Fullwidth Solidus", "/"),
+
+    ('\', "Fullwidth Reverse Solidus", "\\"),
+    ('﹨', "Small Reverse Solidus", "\\"),
+    ('∖', "Set Minus", "\\"),
+    ('⟍', "Mathematical Falling Diagonal", "\\"),
+    ('⧵', "Reverse Solidus Operator", "\\"),
+    ('⧹', "Big Reverse Solidus", "\\"),
+    ('⧹', "Greek Vocal Notation Symbol-16", "\\"),
+    ('⧹', "Greek Instrumental Symbol-48", "\\"),
+    ('㇔', "CJK Stroke D", "\\"),
+    ('丶', "CJK Unified Ideograph-4E36", "\\"),
+    ('⼂', "Kangxi Radical Dot", "\\"),
+    ('、', "Ideographic Comma", "\\"),
+    ('ヽ', "Katakana Iteration Mark", "\\"),
+
+    ('ꝸ', "Latin Small Letter Um", "&"),
+    ('&', "Fullwidth Ampersand", "&"),
+
+    ('᛭', "Runic Cross Punctuation", "+"),
+    ('➕', "Heavy Plus Sign", "+"),
+    ('𐊛', "Lycian Letter H", "+"),
+    ('﬩', "Hebrew Letter Alternative Plus Sign", "+"),
+    ('+', "Fullwidth Plus Sign", "+"),
+
+    ('‹', "Single Left-Pointing Angle Quotation Mark", "<"),
+    ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", "<"),
+    ('˂', "Modifier Letter Left Arrowhead", "<"),
+    ('𝈶', "Greek Instrumental Symbol-40", "<"),
+    ('ᐸ', "Canadian Syllabics Pa", "<"),
+    ('ᚲ', "Runic Letter Kauna", "<"),
+    ('❬', "Medium Left-Pointing Angle Bracket Ornament", "<"),
+    ('⟨', "Mathematical Left Angle Bracket", "<"),
+    ('〈', "Left-Pointing Angle Bracket", "<"),
+    ('〈', "Left Angle Bracket", "<"),
+    ('㇛', "CJK Stroke Pd", "<"),
+    ('く', "Hiragana Letter Ku", "<"),
+    ('𡿨', "CJK Unified Ideograph-21FE8", "<"),
+    ('《', "Left Double Angle Bracket", "<"),
+    ('<', "Fullwidth Less-Than Sign", "<"),
+
+    ('᐀', "Canadian Syllabics Hyphen", "="),
+    ('⹀', "Double Hyphen", "="),
+    ('゠', "Katakana-Hiragana Double Hyphen", "="),
+    ('꓿', "Lisu Punctuation Full Stop", "="),
+    ('=', "Fullwidth Equals Sign", "="),
+
+    ('›', "Single Right-Pointing Angle Quotation Mark", ">"),
+    ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", ">"),
+    ('˃', "Modifier Letter Right Arrowhead", ">"),
+    ('𝈷', "Greek Instrumental Symbol-42", ">"),
+    ('ᐳ', "Canadian Syllabics Po", ">"),
+    ('𖼿', "Miao Letter Archaic Zza", ">"),
+    ('❭', "Medium Right-Pointing Angle Bracket Ornament", ">"),
+    ('⟩', "Mathematical Right Angle Bracket", ">"),
+    ('〉', "Right-Pointing Angle Bracket", ">"),
+    ('〉', "Right Angle Bracket", ">"),
+    ('》', "Right Double Angle Bracket", ">"),
+    ('>', "Fullwidth Greater-Than Sign", ">"),
+    ('⩵', "Two Consecutive Equals Signs", "==")
 ];
 
 // FIXME: the lexer could be used to turn the ASCII version of unicode homoglyphs, instead of
 // keeping the substitution token in this table. Ideally, this should be inside `rustc_lexer`.
 // However, we should first remove compound tokens like `<<` from `rustc_lexer`, and then add
 // fancier error recovery to it, as there will be less overall work to do this way.
-const ASCII_ARRAY: &[(char, &str, Option<token::TokenKind>)] = &[
-    (' ', "Space", None),
-    ('_', "Underscore", Some(token::Ident(kw::Underscore, false))),
-    ('-', "Minus/Hyphen", Some(token::BinOp(token::Minus))),
-    (',', "Comma", Some(token::Comma)),
-    (';', "Semicolon", Some(token::Semi)),
-    (':', "Colon", Some(token::Colon)),
-    ('!', "Exclamation Mark", Some(token::Not)),
-    ('?', "Question Mark", Some(token::Question)),
-    ('.', "Period", Some(token::Dot)),
-    ('(', "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
-    (')', "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
-    ('[', "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
-    (']', "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
-    ('{', "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
-    ('}', "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
-    ('*', "Asterisk", Some(token::BinOp(token::Star))),
-    ('/', "Slash", Some(token::BinOp(token::Slash))),
-    ('\\', "Backslash", None),
-    ('&', "Ampersand", Some(token::BinOp(token::And))),
-    ('+', "Plus Sign", Some(token::BinOp(token::Plus))),
-    ('<', "Less-Than Sign", Some(token::Lt)),
-    ('=', "Equals Sign", Some(token::Eq)),
-    ('>', "Greater-Than Sign", Some(token::Gt)),
+const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
+    (" ", "Space", None),
+    ("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
+    ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
+    (",", "Comma", Some(token::Comma)),
+    (";", "Semicolon", Some(token::Semi)),
+    (":", "Colon", Some(token::Colon)),
+    ("!", "Exclamation Mark", Some(token::Not)),
+    ("?", "Question Mark", Some(token::Question)),
+    (".", "Period", Some(token::Dot)),
+    ("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
+    (")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
+    ("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
+    ("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
+    ("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
+    ("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
+    ("*", "Asterisk", Some(token::BinOp(token::Star))),
+    ("/", "Slash", Some(token::BinOp(token::Slash))),
+    ("\\", "Backslash", None),
+    ("&", "Ampersand", Some(token::BinOp(token::And))),
+    ("+", "Plus Sign", Some(token::BinOp(token::Plus))),
+    ("<", "Less-Than Sign", Some(token::Lt)),
+    ("=", "Equals Sign", Some(token::Eq)),
+    ("==", "Double Equals Sign", Some(token::EqEq)),
+    (">", "Greater-Than Sign", Some(token::Gt)),
     // FIXME: Literals are already lexed by this point, so we can't recover gracefully just by
     // spitting the correct token out.
-    ('\'', "Single Quote", None),
-    ('"', "Quotation Mark", None),
+    ("\'", "Single Quote", None),
+    ("\"", "Quotation Mark", None),
 ];
 
 pub(super) fn check_for_substitution<'a>(
     reader: &StringReader<'a>,
     pos: BytePos,
     ch: char,
-    err: &mut Diagnostic,
-) -> Option<token::TokenKind> {
-    let &(_u_char, u_name, ascii_char) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?;
+    count: usize,
+) -> (Option<token::TokenKind>, Option<TokenSubstitution>) {
+    let Some(&(_, u_name, ascii_str)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else {
+        return (None, None);
+    };
 
-    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
+    let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
 
-    let Some((_ascii_char, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) else {
+    let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
         let msg = format!("substitution character not found for '{}'", ch);
         reader.sess.span_diagnostic.span_bug_no_panic(span, &msg);
-        return None;
+        return (None, None);
     };
 
     // special help suggestion for "directed" double quotes
-    if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
-        let msg = format!(
-            "Unicode characters '“' (Left Double Quotation Mark) and \
-             '”' (Right Double Quotation Mark) look like '{}' ({}), but are not",
-            ascii_char, ascii_name
-        );
-        err.span_suggestion(
-            Span::with_root_ctxt(
-                pos,
-                pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
-            ),
-            &msg,
-            format!("\"{}\"", s),
-            Applicability::MaybeIncorrect,
+    let sugg = if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') {
+        let span = Span::with_root_ctxt(
+            pos,
+            pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
         );
+        Some(TokenSubstitution::DirectedQuotes {
+            span,
+            suggestion: format!("\"{s}\""),
+            ascii_str,
+            ascii_name,
+        })
     } else {
-        let msg = format!(
-            "Unicode character '{}' ({}) looks like '{}' ({}), but it is not",
-            ch, u_name, ascii_char, ascii_name
-        );
-        err.span_suggestion(span, &msg, ascii_char, Applicability::MaybeIncorrect);
-    }
-    token.clone()
+        let suggestion = ascii_str.to_string().repeat(count);
+        Some(TokenSubstitution::Other {
+            span,
+            suggestion,
+            ch: ch.to_string(),
+            u_name,
+            ascii_str,
+            ascii_name,
+        })
+    };
+    (token.clone(), sugg)
 }
 
 /// Extract string if found at current position with given delimiters
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 3dcadb4c911..d1c3fd0cd0f 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -3,6 +3,7 @@
 #![feature(array_windows)]
 #![feature(box_patterns)]
 #![feature(if_let_guard)]
+#![feature(iter_intersperse)]
 #![feature(let_chains)]
 #![feature(never_type)]
 #![feature(rustc_attrs)]
@@ -14,11 +15,12 @@ extern crate tracing;
 use rustc_ast as ast;
 use rustc_ast::token;
 use rustc_ast::tokenstream::TokenStream;
-use rustc_ast::Attribute;
-use rustc_ast::{AttrItem, MetaItem};
+use rustc_ast::{AttrItem, Attribute, MetaItem};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::sync::Lrc;
 use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
+use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage};
+use rustc_macros::fluent_messages;
 use rustc_session::parse::ParseSess;
 use rustc_span::{FileName, SourceFile, Span};
 
@@ -28,12 +30,14 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
 
 #[macro_use]
 pub mod parser;
-use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
+use parser::{make_unclosed_delims_error, Parser};
 pub mod lexer;
 pub mod validate_attr;
 
 mod errors;
 
+fluent_messages! { "../locales/en-US.ftl" }
+
 // A bunch of utility functions of the form `parse_<thing>_from_<source>`
 // where <thing> includes crate, expr, item, stmt, tts, and one that
 // uses a HOF to parse anything, and <source> includes file and
@@ -92,10 +96,7 @@ pub fn parse_stream_from_source_str(
     sess: &ParseSess,
     override_span: Option<Span>,
 ) -> TokenStream {
-    let (stream, mut errors) =
-        source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
-    emit_unclosed_delims(&mut errors, &sess);
-    stream
+    source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
 }
 
 /// Creates a new parser from a source string.
@@ -131,9 +132,8 @@ fn maybe_source_file_to_parser(
     source_file: Lrc<SourceFile>,
 ) -> Result<Parser<'_>, Vec<Diagnostic>> {
     let end_pos = source_file.end_pos;
-    let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
+    let stream = maybe_file_to_stream(sess, source_file, None)?;
     let mut parser = stream_to_parser(sess, stream, None);
-    parser.unclosed_delims = unclosed_delims;
     if parser.token == token::Eof {
         parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
     }
@@ -178,7 +178,7 @@ pub fn source_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
-) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
+) -> TokenStream {
     panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
 }
 
@@ -188,7 +188,7 @@ pub fn maybe_file_to_stream(
     sess: &ParseSess,
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
-) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
+) -> Result<TokenStream, Vec<Diagnostic>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
         sess.span_diagnostic.bug(&format!(
             "cannot lex `source_file` without source: {}",
@@ -196,23 +196,7 @@ pub fn maybe_file_to_stream(
         ));
     });
 
-    let (token_trees, unmatched_braces) =
-        lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
-
-    match token_trees {
-        Ok(stream) => Ok((stream, unmatched_braces)),
-        Err(err) => {
-            let mut buffer = Vec::with_capacity(1);
-            err.buffer(&mut buffer);
-            // Not using `emit_unclosed_delims` to use `db.buffer`
-            for unmatched in unmatched_braces {
-                if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
-                    err.buffer(&mut buffer);
-                }
-            }
-            Err(buffer)
-        }
-    }
+    lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span)
 }
 
 /// Given a stream and the `ParseSess`, produces a parser.
@@ -256,10 +240,12 @@ pub fn parse_cfg_attr(
     parse_sess: &ParseSess,
 ) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
     match attr.get_normal_item().args {
-        ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => {
+        ast::AttrArgs::Delimited(ast::DelimArgs { dspan, delim, ref tokens })
+            if !tokens.is_empty() =>
+        {
             let msg = "wrong `cfg_attr` delimiters";
             crate::validate_attr::check_meta_bad_delim(parse_sess, dspan, delim, msg);
-            match parse_in(parse_sess, tts.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
+            match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
                 Ok(r) => return Some(r),
                 Err(mut e) => {
                     e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 9e45656946b..e3e7c63e344 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,12 +1,15 @@
 use crate::errors::{InvalidMetaItem, SuffixedLiteralInAttribute};
+use crate::fluent_generated as fluent;
 
 use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle};
 use rustc_ast as ast;
 use rustc_ast::attr;
 use rustc_ast::token::{self, Delimiter, Nonterminal};
-use rustc_errors::{error_code, fluent, Diagnostic, IntoDiagnostic, PResult};
+use rustc_errors::{error_code, Diagnostic, IntoDiagnostic, PResult};
 use rustc_span::{sym, BytePos, Span};
 use std::convert::TryInto;
+use thin_vec::ThinVec;
+use tracing::debug;
 
 // Public for rustfmt usage
 #[derive(Debug)]
@@ -55,7 +58,7 @@ impl<'a> Parser<'a> {
                     let span = self.token.span;
                     let mut err = self.sess.span_diagnostic.struct_span_err_with_code(
                         span,
-                        fluent::parser_inner_doc_comment_not_permitted,
+                        fluent::parse_inner_doc_comment_not_permitted,
                         error_code!(E0753),
                     );
                     if let Some(replacement_span) = self.annotate_following_item_if_applicable(
@@ -66,10 +69,10 @@ impl<'a> Parser<'a> {
                             token::CommentKind::Block => OuterAttributeType::DocBlockComment,
                         },
                     ) {
-                        err.note(fluent::note);
+                        err.note(fluent::parse_note);
                         err.span_suggestion_verbose(
                             replacement_span,
-                            fluent::suggestion,
+                            fluent::parse_suggestion,
                             "",
                             rustc_errors::Applicability::MachineApplicable,
                         );
@@ -173,10 +176,10 @@ impl<'a> Parser<'a> {
             Ok(Some(item)) => {
                 // FIXME(#100717)
                 err.set_arg("item", item.kind.descr());
-                err.span_label(item.span, fluent::label_does_not_annotate_this);
+                err.span_label(item.span, fluent::parse_label_does_not_annotate_this);
                 err.span_suggestion_verbose(
                     replacement_span,
-                    fluent::sugg_change_inner_to_outer,
+                    fluent::parse_sugg_change_inner_to_outer,
                     match attr_type {
                         OuterAttributeType::Attribute => "",
                         OuterAttributeType::DocBlockComment => "*",
@@ -200,27 +203,27 @@ impl<'a> Parser<'a> {
                 Some(InnerAttrForbiddenReason::AfterOuterDocComment { prev_doc_comment_span }) => {
                     let mut diag = self.struct_span_err(
                         attr_sp,
-                        fluent::parser_inner_attr_not_permitted_after_outer_doc_comment,
+                        fluent::parse_inner_attr_not_permitted_after_outer_doc_comment,
                     );
-                    diag.span_label(attr_sp, fluent::label_attr)
-                        .span_label(prev_doc_comment_span, fluent::label_prev_doc_comment);
+                    diag.span_label(attr_sp, fluent::parse_label_attr)
+                        .span_label(prev_doc_comment_span, fluent::parse_label_prev_doc_comment);
                     diag
                 }
                 Some(InnerAttrForbiddenReason::AfterOuterAttribute { prev_outer_attr_sp }) => {
                     let mut diag = self.struct_span_err(
                         attr_sp,
-                        fluent::parser_inner_attr_not_permitted_after_outer_attr,
+                        fluent::parse_inner_attr_not_permitted_after_outer_attr,
                     );
-                    diag.span_label(attr_sp, fluent::label_attr)
-                        .span_label(prev_outer_attr_sp, fluent::label_prev_attr);
+                    diag.span_label(attr_sp, fluent::parse_label_attr)
+                        .span_label(prev_outer_attr_sp, fluent::parse_label_prev_attr);
                     diag
                 }
                 Some(InnerAttrForbiddenReason::InCodeBlock) | None => {
-                    self.struct_span_err(attr_sp, fluent::parser_inner_attr_not_permitted)
+                    self.struct_span_err(attr_sp, fluent::parse_inner_attr_not_permitted)
                 }
             };
 
-            diag.note(fluent::parser_inner_attr_explanation);
+            diag.note(fluent::parse_inner_attr_explanation);
             if self
                 .annotate_following_item_if_applicable(
                     &mut diag,
@@ -229,7 +232,7 @@ impl<'a> Parser<'a> {
                 )
                 .is_some()
             {
-                diag.note(fluent::parser_outer_attr_explanation);
+                diag.note(fluent::parse_outer_attr_explanation);
             };
             diag.emit();
         }
@@ -245,9 +248,9 @@ impl<'a> Parser<'a> {
     ///     PATH `=` UNSUFFIXED_LIT
     /// The delimiters or `=` are still put into the resulting token stream.
     pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
-        let item = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
+        let item = match &self.token.kind {
+            token::Interpolated(nt) => match &**nt {
+                Nonterminal::NtMeta(item) => Some(item.clone().into_inner()),
                 _ => None,
             },
             _ => None,
@@ -315,9 +318,10 @@ impl<'a> Parser<'a> {
         Ok(attrs)
     }
 
-    pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
-        let lit = self.parse_lit()?;
-        debug!("checking if {:?} is unusuffixed", lit);
+    // Note: must be unsuffixed.
+    pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
+        let lit = self.parse_meta_item_lit()?;
+        debug!("checking if {:?} is unsuffixed", lit);
 
         if !lit.kind.is_unsuffixed() {
             self.sess.emit_err(SuffixedLiteralInAttribute { span: lit.span });
@@ -346,9 +350,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Matches `COMMASEP(meta_item_inner)`.
-    pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, Vec<ast::NestedMetaItem>> {
+    pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec<ast::NestedMetaItem>> {
         // Presumably, the majority of the time there will only be one attr.
-        let mut nmis = Vec::with_capacity(1);
+        let mut nmis = ThinVec::with_capacity(1);
         while self.token.kind != token::Eof {
             nmis.push(self.parse_meta_item_inner()?);
             if !self.eat(&token::Comma) {
@@ -364,9 +368,9 @@ impl<'a> Parser<'a> {
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     /// ```
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
-        let nt_meta = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
-                token::NtMeta(ref e) => Some(e.clone()),
+        let nt_meta = match &self.token.kind {
+            token::Interpolated(nt) => match &**nt {
+                token::NtMeta(e) => Some(e.clone()),
                 _ => None,
             },
             _ => None,
@@ -391,7 +395,7 @@ impl<'a> Parser<'a> {
 
     pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
         Ok(if self.eat(&token::Eq) {
-            ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
+            ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             // Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
             let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
@@ -403,8 +407,8 @@ impl<'a> Parser<'a> {
 
     /// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
     fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
-        match self.parse_unsuffixed_lit() {
-            Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
+        match self.parse_unsuffixed_meta_item_lit() {
+            Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
             Err(err) => err.cancel(),
         }
 
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs
index 1b16ecb5ec2..b0ab0f10624 100644
--- a/compiler/rustc_parse/src/parser/attr_wrapper.rs
+++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs
@@ -5,9 +5,9 @@ use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spac
 use rustc_ast::{self as ast};
 use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
 use rustc_errors::PResult;
-use rustc_span::{sym, Span};
+use rustc_session::parse::ParseSess;
+use rustc_span::{sym, Span, DUMMY_SP};
 
-use std::convert::TryInto;
 use std::ops::Range;
 
 /// A wrapper type to ensure that the parser handles outer attributes correctly.
@@ -39,12 +39,17 @@ impl AttrWrapper {
     pub fn empty() -> AttrWrapper {
         AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
     }
-    // FIXME: Delay span bug here?
-    pub(crate) fn take_for_recovery(self) -> AttrVec {
+
+    pub(crate) fn take_for_recovery(self, sess: &ParseSess) -> AttrVec {
+        sess.span_diagnostic.delay_span_bug(
+            self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP),
+            "AttrVec is taken for recovery but no error is produced",
+        );
+
         self.attrs
     }
 
-    // Prepend `self.attrs` to `attrs`.
+    /// Prepend `self.attrs` to `attrs`.
     // FIXME: require passing an NT to prevent misuse of this method
     pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
         let mut self_attrs = self.attrs;
@@ -129,11 +134,11 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
             // Process the replace ranges, starting from the highest start
             // position and working our way back. If have tokens like:
             //
-            // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
             //
             // Then we will generate replace ranges for both
             // the `#[cfg(FALSE)] field: bool` and the entire
-            // `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
+            // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
             //
             // By starting processing from the replace range with the greatest
             // start position, we ensure that any replace range which encloses
@@ -464,6 +469,6 @@ mod size_asserts {
     use rustc_data_structures::static_assert_size;
     // tidy-alphabetical-start
     static_assert_size!(AttrWrapper, 16);
-    static_assert_size!(LazyAttrTokenStreamImpl, 144);
+    static_assert_size!(LazyAttrTokenStreamImpl, 120);
     // tidy-alphabetical-end
 }
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 0bbe073fe2a..5b12bcc1822 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -8,16 +8,18 @@ use crate::errors::{
     ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg,
     ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentOnParamType,
     DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
-    GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg, InInTypo,
-    IncorrectAwait, IncorrectSemicolon, IncorrectUseOfAwait, ParenthesesInForHead,
-    ParenthesesInForHeadSugg, PatternMethodParamWithoutBody, QuestionMarkInType,
-    QuestionMarkInTypeSugg, SelfParamNotFirst, StructLiteralBodyWithoutPath,
-    StructLiteralBodyWithoutPathSugg, SuggEscapeToUseAsIdentifier, SuggRemoveComma,
+    GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
+    HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon,
+    IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg,
+    PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst,
+    StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens,
+    StructLiteralNeedingParensSugg, SuggEscapeToUseAsIdentifier, SuggRemoveComma,
     UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
     UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead,
 };
 
-use crate::lexer::UnmatchedBrace;
+use crate::fluent_generated as fluent;
+use crate::parser;
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Lit, LitKind, TokenKind};
@@ -30,18 +32,16 @@ use rustc_ast::{
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::{
-    fluent, Applicability, DiagnosticBuilder, DiagnosticMessage, Handler, MultiSpan, PResult,
+    pluralize, Applicability, Diagnostic, DiagnosticBuilder, DiagnosticMessage, ErrorGuaranteed,
+    FatalError, Handler, IntoDiagnostic, MultiSpan, PResult,
 };
-use rustc_errors::{pluralize, Diagnostic, ErrorGuaranteed, IntoDiagnostic};
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::Spanned;
 use rustc_span::symbol::{kw, sym, Ident};
 use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
-use std::ops::{Deref, DerefMut};
-
 use std::mem::take;
-
-use crate::parser;
+use std::ops::{Deref, DerefMut};
+use thin_vec::{thin_vec, ThinVec};
 
 /// Creates a placeholder argument.
 pub(super) fn dummy_arg(ident: Ident) -> Param {
@@ -65,7 +65,7 @@ pub(super) fn dummy_arg(ident: Ident) -> Param {
 pub(super) trait RecoverQPath: Sized + 'static {
     const PATH_STYLE: PathStyle = PathStyle::Expr;
     fn to_ty(&self) -> Option<P<Ty>>;
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self;
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self;
 }
 
 impl RecoverQPath for Ty {
@@ -73,7 +73,7 @@ impl RecoverQPath for Ty {
     fn to_ty(&self) -> Option<P<Ty>> {
         Some(P(self.clone()))
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: TyKind::Path(qself, path),
@@ -87,7 +87,7 @@ impl RecoverQPath for Pat {
     fn to_ty(&self) -> Option<P<Ty>> {
         self.to_ty()
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: PatKind::Path(qself, path),
@@ -101,7 +101,7 @@ impl RecoverQPath for Expr {
     fn to_ty(&self) -> Option<P<Ty>> {
         self.to_ty()
     }
-    fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self {
+    fn recovered(qself: Option<P<QSelf>>, path: ast::Path) -> Self {
         Self {
             span: path.span,
             kind: ExprKind::Path(qself, path),
@@ -159,15 +159,11 @@ enum IsStandalone {
     Standalone,
     /// It's a subexpression, i.e., *not* standalone.
     Subexpr,
-    /// It's maybe standalone; we're not sure.
-    Maybe,
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
 enum IncOrDec {
     Inc,
-    // FIXME: `i--` recovery isn't implemented yet
-    #[allow(dead_code)]
     Dec,
 }
 
@@ -213,23 +209,16 @@ impl MultiSugg {
         err.multipart_suggestion(&self.msg, self.patches, self.applicability);
     }
 
-    /// Overrides individual messages and applicabilities.
-    fn emit_many(
-        err: &mut Diagnostic,
-        msg: &str,
-        applicability: Applicability,
-        suggestions: impl Iterator<Item = Self>,
-    ) {
-        err.multipart_suggestions(msg, suggestions.map(|s| s.patches), applicability);
+    fn emit_verbose(self, err: &mut Diagnostic) {
+        err.multipart_suggestion_verbose(&self.msg, self.patches, self.applicability);
     }
 }
 
-// SnapshotParser is used to create a snapshot of the parser
-// without causing duplicate errors being emitted when the `Parser`
-// is dropped.
+/// SnapshotParser is used to create a snapshot of the parser
+/// without causing duplicate errors being emitted when the `Parser`
+/// is dropped.
 pub struct SnapshotParser<'a> {
     parser: Parser<'a>,
-    unclosed_delims: Vec<UnmatchedBrace>,
 }
 
 impl<'a> Deref for SnapshotParser<'a> {
@@ -264,34 +253,22 @@ impl<'a> Parser<'a> {
         &self.sess.span_diagnostic
     }
 
-    /// Replace `self` with `snapshot.parser` and extend `unclosed_delims` with `snapshot.unclosed_delims`.
-    /// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears.
+    /// Replace `self` with `snapshot.parser`.
     pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
         *self = snapshot.parser;
-        self.unclosed_delims.extend(snapshot.unclosed_delims);
-    }
-
-    pub fn unclosed_delims(&self) -> &[UnmatchedBrace] {
-        &self.unclosed_delims
     }
 
     /// Create a snapshot of the `Parser`.
     pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> {
-        let mut snapshot = self.clone();
-        let unclosed_delims = self.unclosed_delims.clone();
-        // Clear `unclosed_delims` in snapshot to avoid
-        // duplicate errors being emitted when the `Parser`
-        // is dropped (which may or may not happen, depending
-        // if the parsing the snapshot is created for is successful)
-        snapshot.unclosed_delims.clear();
-        SnapshotParser { parser: snapshot, unclosed_delims }
+        let snapshot = self.clone();
+        SnapshotParser { parser: snapshot }
     }
 
     pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
         self.sess.source_map().span_to_snippet(span)
     }
 
-    pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
+    pub(super) fn expected_ident_found(&mut self) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
         let valid_follow = &[
             TokenKind::Eq,
             TokenKind::Colon,
@@ -303,6 +280,7 @@ impl<'a> Parser<'a> {
             TokenKind::CloseDelim(Delimiter::Brace),
             TokenKind::CloseDelim(Delimiter::Parenthesis),
         ];
+
         let suggest_raw = match self.token.ident() {
             Some((ident, false))
                 if ident.is_raw_guess()
@@ -318,20 +296,86 @@ impl<'a> Parser<'a> {
             _ => None,
         };
 
-        let suggest_remove_comma =
-            if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) {
-                Some(SuggRemoveComma { span: self.token.span })
-            } else {
-                None
-            };
+        let suggest_remove_comma = (self.token == token::Comma
+            && self.look_ahead(1, |t| t.is_ident()))
+        .then_some(SuggRemoveComma { span: self.token.span });
+
+        let help_cannot_start_number =
+            self.is_lit_bad_ident().then_some(HelpIdentifierStartsWithNumber);
 
         let err = ExpectedIdentifier {
             span: self.token.span,
             token: self.token.clone(),
             suggest_raw,
             suggest_remove_comma,
+            help_cannot_start_number,
         };
-        err.into_diagnostic(&self.sess.span_diagnostic)
+        let mut err = err.into_diagnostic(&self.sess.span_diagnostic);
+
+        // if the token we have is a `<`
+        // it *might* be a misplaced generic
+        if self.token == token::Lt {
+            // all keywords that could have generic applied
+            let valid_prev_keywords =
+                [kw::Fn, kw::Type, kw::Struct, kw::Enum, kw::Union, kw::Trait];
+
+            // If we've expected an identifier,
+            // and the current token is a '<'
+            // if the previous token is a valid keyword
+            // that might use a generic, then suggest a correct
+            // generic placement (later on)
+            let maybe_keyword = self.prev_token.clone();
+            if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) {
+                // if we have a valid keyword, attempt to parse generics
+                // also obtain the keywords symbol
+                match self.parse_generics() {
+                    Ok(generic) => {
+                        if let TokenKind::Ident(symbol, _) = maybe_keyword.kind {
+                            let ident_name = symbol;
+                            // at this point, we've found something like
+                            // `fn <T>id`
+                            // and current token should be Ident with the item name (i.e. the function name)
+                            // if there is a `<` after the fn name, then don't show a suggestion, show help
+
+                            if !self.look_ahead(1, |t| *t == token::Lt) &&
+                                let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) {
+                                    err.multipart_suggestion_verbose(
+                                        format!("place the generic parameter name after the {ident_name} name"),
+                                        vec![
+                                            (self.token.span.shrink_to_hi(), snippet),
+                                            (generic.span, String::new())
+                                        ],
+                                        Applicability::MaybeIncorrect,
+                                    );
+                                } else {
+                                    err.help(format!(
+                                        "place the generic parameter name after the {ident_name} name"
+                                    ));
+                                }
+                        }
+                    }
+                    Err(err) => {
+                        // if there's an error parsing the generics,
+                        // then don't do a misplaced generics suggestion
+                        // and emit the expected ident error instead;
+                        err.cancel();
+                    }
+                }
+            }
+        }
+
+        err
+    }
+
+    /// Checks if the current token is a integer or float literal and looks like
+    /// it could be a invalid identifier with digits at the start.
+    pub(super) fn is_lit_bad_ident(&mut self) -> bool {
+        matches!(self.token.uninterpolate().kind, token::Literal(Lit { kind: token::LitKind::Integer | token::LitKind::Float, .. })
+            // ensure that the integer literal is followed by a *invalid*
+            // suffix: this is how we know that it is a identifier with an
+            // invalid beginning.
+            if rustc_ast::MetaItemLit::from_token(&self.token).is_none()
+        )
     }
 
     pub(super) fn expected_one_of_not_found(
@@ -534,21 +578,6 @@ impl<'a> Parser<'a> {
         } else {
             label_sp
         };
-        match self.recover_closing_delimiter(
-            &expected
-                .iter()
-                .filter_map(|tt| match tt {
-                    TokenType::Token(t) => Some(t.clone()),
-                    _ => None,
-                })
-                .collect::<Vec<_>>(),
-            err,
-        ) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
 
         if self.check_too_many_raw_str_terminators(&mut err) {
             if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
@@ -631,16 +660,22 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         s: BlockCheckMode,
+        maybe_struct_name: token::Token,
+        can_be_struct_literal: bool,
     ) -> Option<PResult<'a, P<Block>>> {
         if self.token.is_ident() && self.look_ahead(1, |t| t == &token::Colon) {
             // We might be having a struct literal where people forgot to include the path:
             // fn foo() -> Foo {
             //     field: value,
             // }
+            info!(?maybe_struct_name, ?self.token);
             let mut snapshot = self.create_snapshot_for_diagnostic();
-            let path =
-                Path { segments: vec![], span: self.prev_token.span.shrink_to_lo(), tokens: None };
-            let struct_expr = snapshot.parse_struct_expr(None, path, false);
+            let path = Path {
+                segments: ThinVec::new(),
+                span: self.prev_token.span.shrink_to_lo(),
+                tokens: None,
+            };
+            let struct_expr = snapshot.parse_expr_struct(None, path, false);
             let block_tail = self.parse_block_tail(lo, s, AttemptLocalParseRecovery::No);
             return Some(match (struct_expr, block_tail) {
                 (Ok(expr), Err(mut err)) => {
@@ -653,21 +688,32 @@ impl<'a> Parser<'a> {
                     //     field: value,
                     // } }
                     err.delay_as_bug();
-                    self.sess.emit_err(StructLiteralBodyWithoutPath {
-                        span: expr.span,
-                        sugg: StructLiteralBodyWithoutPathSugg {
-                            before: expr.span.shrink_to_lo(),
-                            after: expr.span.shrink_to_hi(),
-                        },
-                    });
                     self.restore_snapshot(snapshot);
                     let mut tail = self.mk_block(
-                        vec![self.mk_stmt_err(expr.span)],
+                        thin_vec![self.mk_stmt_err(expr.span)],
                         s,
                         lo.to(self.prev_token.span),
                     );
                     tail.could_be_bare_literal = true;
-                    Ok(tail)
+                    if maybe_struct_name.is_ident() && can_be_struct_literal {
+                        // Account for `if Example { a: one(), }.is_pos() {}`.
+                        Err(self.sess.create_err(StructLiteralNeedingParens {
+                            span: maybe_struct_name.span.to(expr.span),
+                            sugg: StructLiteralNeedingParensSugg {
+                                before: maybe_struct_name.span.shrink_to_lo(),
+                                after: expr.span.shrink_to_hi(),
+                            },
+                        }))
+                    } else {
+                        self.sess.emit_err(StructLiteralBodyWithoutPath {
+                            span: expr.span,
+                            sugg: StructLiteralBodyWithoutPathSugg {
+                                before: expr.span.shrink_to_lo(),
+                                after: expr.span.shrink_to_hi(),
+                            },
+                        });
+                        Ok(tail)
+                    }
                 }
                 (Err(err), Ok(tail)) => {
                     // We have a block tail that contains a somehow valid type ascription expr.
@@ -934,7 +980,7 @@ impl<'a> Parser<'a> {
                     if self.eat(&token::Gt) {
                         e.span_suggestion_verbose(
                             binop.span.shrink_to_lo(),
-                            fluent::parser_sugg_turbofish_syntax,
+                            fluent::parse_sugg_turbofish_syntax,
                             "::",
                             Applicability::MaybeIncorrect,
                         )
@@ -970,7 +1016,7 @@ impl<'a> Parser<'a> {
         inner_op: &Expr,
         outer_op: &Spanned<AssocOp>,
     ) -> bool /* advanced the cursor */ {
-        if let ExprKind::Binary(op, ref l1, ref r1) = inner_op.kind {
+        if let ExprKind::Binary(op, l1, r1) = &inner_op.kind {
             if let ExprKind::Field(_, ident) = l1.kind
                 && ident.as_str().parse::<i32>().is_err()
                 && !matches!(r1.kind, ExprKind::Lit(_))
@@ -1076,8 +1122,8 @@ impl<'a> Parser<'a> {
 
         let mk_err_expr = |this: &Self, span| Ok(Some(this.mk_expr(span, ExprKind::Err)));
 
-        match inner_op.kind {
-            ExprKind::Binary(op, ref l1, ref r1) if op.node.is_comparison() => {
+        match &inner_op.kind {
+            ExprKind::Binary(op, l1, r1) if op.node.is_comparison() => {
                 let mut err = ComparisonOperatorsCannotBeChained {
                     span: vec![op.span, self.prev_token.span],
                     suggest_turbofish: None,
@@ -1109,7 +1155,11 @@ impl<'a> Parser<'a> {
                     return if token::ModSep == self.token.kind {
                         // We have some certainty that this was a bad turbofish at this point.
                         // `foo< bar >::`
-                        err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = Some(());
+                        }
 
                         let snapshot = self.create_snapshot_for_diagnostic();
                         self.bump(); // `::`
@@ -1135,7 +1185,11 @@ impl<'a> Parser<'a> {
                     } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind {
                         // We have high certainty that this was a bad turbofish at this point.
                         // `foo< bar >(`
-                        err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        if let ExprKind::Binary(o, ..) = inner_op.kind && o.node == BinOpKind::Lt {
+                            err.suggest_turbofish = Some(op.span.shrink_to_lo());
+                        } else {
+                            err.help_turbofish = Some(());
+                        }
                         // Consume the fn call arguments.
                         match self.consume_fn_args() {
                             Err(()) => Err(err.into_diagnostic(&self.sess.span_diagnostic)),
@@ -1234,8 +1288,8 @@ impl<'a> Parser<'a> {
         let bounds = self.parse_generic_bounds(None)?;
         let sum_span = ty.span.to(self.prev_token.span);
 
-        let sub = match ty.kind {
-            TyKind::Rptr(ref lifetime, ref mut_ty) => {
+        let sub = match &ty.kind {
+            TyKind::Ref(lifetime, mut_ty) => {
                 let sum_with_parens = pprust::to_string(|s| {
                     s.s.word("&");
                     s.print_opt_lifetime(lifetime);
@@ -1264,12 +1318,10 @@ impl<'a> Parser<'a> {
         &mut self,
         operand_expr: P<Expr>,
         op_span: Span,
-        prev_is_semi: bool,
+        start_stmt: bool,
     ) -> PResult<'a, P<Expr>> {
-        let standalone =
-            if prev_is_semi { IsStandalone::Standalone } else { IsStandalone::Subexpr };
+        let standalone = if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr };
         let kind = IncDecRecovery { standalone, op: IncOrDec::Inc, fixity: UnaryFixity::Pre };
-
         self.recover_from_inc_dec(operand_expr, kind, op_span)
     }
 
@@ -1277,13 +1329,27 @@ impl<'a> Parser<'a> {
         &mut self,
         operand_expr: P<Expr>,
         op_span: Span,
+        start_stmt: bool,
     ) -> PResult<'a, P<Expr>> {
         let kind = IncDecRecovery {
-            standalone: IsStandalone::Maybe,
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
             op: IncOrDec::Inc,
             fixity: UnaryFixity::Post,
         };
+        self.recover_from_inc_dec(operand_expr, kind, op_span)
+    }
 
+    pub(super) fn recover_from_postfix_decrement(
+        &mut self,
+        operand_expr: P<Expr>,
+        op_span: Span,
+        start_stmt: bool,
+    ) -> PResult<'a, P<Expr>> {
+        let kind = IncDecRecovery {
+            standalone: if start_stmt { IsStandalone::Standalone } else { IsStandalone::Subexpr },
+            op: IncOrDec::Dec,
+            fixity: UnaryFixity::Post,
+        };
         self.recover_from_inc_dec(operand_expr, kind, op_span)
     }
 
@@ -1312,34 +1378,25 @@ impl<'a> Parser<'a> {
         };
 
         match kind.standalone {
-            IsStandalone::Standalone => self.inc_dec_standalone_suggest(kind, spans).emit(&mut err),
+            IsStandalone::Standalone => {
+                self.inc_dec_standalone_suggest(kind, spans).emit_verbose(&mut err)
+            }
             IsStandalone::Subexpr => {
                 let Ok(base_src) = self.span_to_snippet(base.span)
-                    else { return help_base_case(err, base) };
+                else { return help_base_case(err, base) };
                 match kind.fixity {
                     UnaryFixity::Pre => {
                         self.prefix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
                     }
                     UnaryFixity::Post => {
-                        self.postfix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                        // won't suggest since we can not handle the precedences
+                        // for example: `a + b++` has been parsed (a + b)++ and we can not suggest here
+                        if !matches!(base.kind, ExprKind::Binary(_, _, _)) {
+                            self.postfix_inc_dec_suggest(base_src, kind, spans).emit(&mut err)
+                        }
                     }
                 }
             }
-            IsStandalone::Maybe => {
-                let Ok(base_src) = self.span_to_snippet(base.span)
-                    else { return help_base_case(err, base) };
-                let sugg1 = match kind.fixity {
-                    UnaryFixity::Pre => self.prefix_inc_dec_suggest(base_src, kind, spans),
-                    UnaryFixity::Post => self.postfix_inc_dec_suggest(base_src, kind, spans),
-                };
-                let sugg2 = self.inc_dec_standalone_suggest(kind, spans);
-                MultiSugg::emit_many(
-                    &mut err,
-                    "use `+= 1` instead",
-                    Applicability::Unspecified,
-                    [sugg1, sugg2].into_iter(),
-                )
-            }
         }
         Err(err)
     }
@@ -1389,7 +1446,6 @@ impl<'a> Parser<'a> {
         }
 
         patches.push((post_span, format!(" {}= 1", kind.op.chr())));
-
         MultiSugg {
             msg: format!("use `{}= 1` instead", kind.op.chr()),
             patches,
@@ -1426,7 +1482,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<T>> {
         self.expect(&token::ModSep)?;
 
-        let mut path = ast::Path { segments: Vec::new(), span: DUMMY_SP, tokens: None };
+        let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
         self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
         path.span = ty_span.to(self.prev_token.span);
 
@@ -1437,7 +1493,7 @@ impl<'a> Parser<'a> {
         });
 
         let path_span = ty_span.shrink_to_hi(); // Use an empty path since `position == 0`.
-        Ok(P(T::recovered(Some(QSelf { ty, path_span, position: 0 }), path)))
+        Ok(P(T::recovered(Some(P(QSelf { ty, path_span, position: 0 })), path)))
     }
 
     pub fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool {
@@ -1501,12 +1557,6 @@ impl<'a> Parser<'a> {
         );
         let mut err = self.struct_span_err(sp, &msg);
         let label_exp = format!("expected `{token_str}`");
-        match self.recover_closing_delimiter(&[t.clone()], err) {
-            Err(e) => err = e,
-            Ok(recovered) => {
-                return Ok(recovered);
-            }
-        }
         let sm = self.sess.source_map();
         if !sm.is_multiline(prev_sp.until(sp)) {
             // When the spans are in the same line, it means that the only content
@@ -1564,7 +1614,7 @@ impl<'a> Parser<'a> {
             // Handle `await { <expr> }`.
             // This needs to be handled separately from the next arm to avoid
             // interpreting `await { <expr> }?` as `<expr>?.await`.
-            self.parse_block_expr(None, self.token.span, BlockCheckMode::Default)
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)
         } else {
             self.parse_expr()
         }
@@ -1657,14 +1707,14 @@ impl<'a> Parser<'a> {
                 let left = begin_par_sp;
                 let right = self.prev_token.span;
                 let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left) &&
-                        !snip.ends_with(" ") {
+                        !snip.ends_with(' ') {
                                 " ".to_string()
                             } else {
                                 "".to_string()
                             };
 
                 let right_snippet = if let Ok(snip) = sm.span_to_next_source(right) &&
-                        !snip.starts_with(" ") {
+                        !snip.starts_with(' ') {
                                 " ".to_string()
                             } else {
                                 "".to_string()
@@ -1723,81 +1773,6 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn recover_closing_delimiter(
-        &mut self,
-        tokens: &[TokenKind],
-        mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
-    ) -> PResult<'a, bool> {
-        let mut pos = None;
-        // We want to use the last closing delim that would apply.
-        for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
-            if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
-                && Some(self.token.span) > unmatched.unclosed_span
-            {
-                pos = Some(i);
-            }
-        }
-        match pos {
-            Some(pos) => {
-                // Recover and assume that the detected unclosed delimiter was meant for
-                // this location. Emit the diagnostic and act as if the delimiter was
-                // present for the parser's sake.
-
-                // Don't attempt to recover from this unclosed delimiter more than once.
-                let unmatched = self.unclosed_delims.remove(pos);
-                let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
-                if unmatched.found_delim.is_none() {
-                    // We encountered `Eof`, set this fact here to avoid complaining about missing
-                    // `fn main()` when we found place to suggest the closing brace.
-                    *self.sess.reached_eof.borrow_mut() = true;
-                }
-
-                // We want to suggest the inclusion of the closing delimiter where it makes
-                // the most sense, which is immediately after the last token:
-                //
-                //  {foo(bar {}}
-                //      ^      ^
-                //      |      |
-                //      |      help: `)` may belong here
-                //      |
-                //      unclosed delimiter
-                if let Some(sp) = unmatched.unclosed_span {
-                    let mut primary_span: Vec<Span> =
-                        err.span.primary_spans().iter().cloned().collect();
-                    primary_span.push(sp);
-                    let mut primary_span: MultiSpan = primary_span.into();
-                    for span_label in err.span.span_labels() {
-                        if let Some(label) = span_label.label {
-                            primary_span.push_span_label(span_label.span, label);
-                        }
-                    }
-                    err.set_span(primary_span);
-                    err.span_label(sp, "unclosed delimiter");
-                }
-                // Backticks should be removed to apply suggestions.
-                let mut delim = delim.to_string();
-                delim.retain(|c| c != '`');
-                err.span_suggestion_short(
-                    self.prev_token.span.shrink_to_hi(),
-                    &format!("`{delim}` may belong here"),
-                    delim,
-                    Applicability::MaybeIncorrect,
-                );
-                if unmatched.found_delim.is_none() {
-                    // Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
-                    // errors which would be emitted elsewhere in the parser and let other error
-                    // recovery consume the rest of the file.
-                    Err(err)
-                } else {
-                    err.emit();
-                    self.expected_tokens.clear(); // Reduce the number of errors.
-                    Ok(true)
-                }
-            }
-            _ => Err(err),
-        }
-    }
-
     /// Eats tokens until we can be relatively sure we reached the end of the
     /// statement. This is something of a best-effort heuristic.
     ///
@@ -2024,7 +1999,7 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
-        let pat = self.parse_pat_no_top_alt(Some("argument name"))?;
+        let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName))?;
         self.expect(&token::Colon)?;
         let ty = self.parse_ty()?;
 
@@ -2115,7 +2090,7 @@ impl<'a> Parser<'a> {
     /// the parameters are *names* (so we don't emit errors about not being able to find `b` in
     /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`,
     /// we deduplicate them to not complain about duplicated parameter names.
-    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) {
+    pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut ThinVec<Param>) {
         let mut seen_inputs = FxHashSet::default();
         for input in fn_inputs.iter_mut() {
             let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) =
@@ -2139,7 +2114,7 @@ impl<'a> Parser<'a> {
     /// like the user has forgotten them.
     pub fn handle_ambiguous_unbraced_const_arg(
         &mut self,
-        args: &mut Vec<AngleBracketedArg>,
+        args: &mut ThinVec<AngleBracketedArg>,
     ) -> PResult<'a, bool> {
         // If we haven't encountered a closing `>`, then the argument is malformed.
         // It's likely that the user has written a const expression without enclosing it
@@ -2347,6 +2322,28 @@ impl<'a> Parser<'a> {
         Err(err)
     }
 
+    /// Try to recover from an unbraced const argument whose first token [could begin a type][ty].
+    ///
+    /// [ty]: token::Token::can_begin_type
+    pub(crate) fn recover_unbraced_const_arg_that_can_begin_ty(
+        &mut self,
+        mut snapshot: SnapshotParser<'a>,
+    ) -> Option<P<ast::Expr>> {
+        match snapshot.parse_expr_res(Restrictions::CONST_EXPR, None) {
+            // Since we don't know the exact reason why we failed to parse the type or the
+            // expression, employ a simple heuristic to weed out some pathological cases.
+            Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => {
+                self.restore_snapshot(snapshot);
+                Some(expr)
+            }
+            Ok(_) => None,
+            Err(err) => {
+                err.cancel();
+                None
+            }
+        }
+    }
+
     /// Creates a dummy const argument, and reports that the expression must be enclosed in braces
     pub fn dummy_const_arg_needs_braces(
         &self,
@@ -2369,7 +2366,7 @@ impl<'a> Parser<'a> {
     pub(crate) fn maybe_recover_colon_colon_in_pat_typo(
         &mut self,
         mut first_pat: P<Pat>,
-        expected: Expected,
+        expected: Option<Expected>,
     ) -> P<Pat> {
         if token::Colon != self.token.kind {
             return first_pat;
@@ -2377,26 +2374,42 @@ impl<'a> Parser<'a> {
         if !matches!(first_pat.kind, PatKind::Ident(_, _, None) | PatKind::Path(..))
             || !self.look_ahead(1, |token| token.is_ident() && !token.is_reserved_ident())
         {
+            let mut snapshot_type = self.create_snapshot_for_diagnostic();
+            snapshot_type.bump(); // `:`
+            match snapshot_type.parse_ty() {
+                Err(inner_err) => {
+                    inner_err.cancel();
+                }
+                Ok(ty) => {
+                    let Err(mut err) = self.expected_one_of_not_found(&[], &[]) else {
+                        return first_pat;
+                    };
+                    err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                    self.restore_snapshot(snapshot_type);
+                    let span = first_pat.span.to(ty.span);
+                    first_pat = self.mk_pat(span, PatKind::Wild);
+                    err.emit();
+                }
+            }
             return first_pat;
         }
         // The pattern looks like it might be a path with a `::` -> `:` typo:
         // `match foo { bar:baz => {} }`
-        let span = self.token.span;
+        let colon_span = self.token.span;
         // We only emit "unexpected `:`" error here if we can successfully parse the
         // whole pattern correctly in that case.
-        let snapshot = self.create_snapshot_for_diagnostic();
+        let mut snapshot_pat = self.create_snapshot_for_diagnostic();
+        let mut snapshot_type = self.create_snapshot_for_diagnostic();
 
         // Create error for "unexpected `:`".
         match self.expected_one_of_not_found(&[], &[]) {
             Err(mut err) => {
-                self.bump(); // Skip the `:`.
-                match self.parse_pat_no_top_alt(expected) {
+                // Skip the `:`.
+                snapshot_pat.bump();
+                snapshot_type.bump();
+                match snapshot_pat.parse_pat_no_top_alt(expected) {
                     Err(inner_err) => {
-                        // Carry on as if we had not done anything, callers will emit a
-                        // reasonable error.
                         inner_err.cancel();
-                        err.cancel();
-                        self.restore_snapshot(snapshot);
                     }
                     Ok(mut pat) => {
                         // We've parsed the rest of the pattern.
@@ -2434,7 +2447,7 @@ impl<'a> Parser<'a> {
                                             None,
                                             Path {
                                                 span: new_span,
-                                                segments: vec![
+                                                segments: thin_vec![
                                                     PathSegment::from_ident(*old_ident),
                                                     PathSegment::from_ident(*ident),
                                                 ],
@@ -2460,8 +2473,8 @@ impl<'a> Parser<'a> {
                             _ => {}
                         }
                         if show_sugg {
-                            err.span_suggestion(
-                                span,
+                            err.span_suggestion_verbose(
+                                colon_span.until(self.look_ahead(1, |t| t.span)),
                                 "maybe write a path separator here",
                                 "::",
                                 Applicability::MaybeIncorrect,
@@ -2469,13 +2482,24 @@ impl<'a> Parser<'a> {
                         } else {
                             first_pat = self.mk_pat(new_span, PatKind::Wild);
                         }
-                        err.emit();
+                        self.restore_snapshot(snapshot_pat);
                     }
                 }
+                match snapshot_type.parse_ty() {
+                    Err(inner_err) => {
+                        inner_err.cancel();
+                    }
+                    Ok(ty) => {
+                        err.span_label(ty.span, "specifying the type of a pattern isn't supported");
+                        self.restore_snapshot(snapshot_type);
+                        let new_span = first_pat.span.to(ty.span);
+                        first_pat = self.mk_pat(new_span, PatKind::Wild);
+                    }
+                }
+                err.emit();
             }
             _ => {
                 // Carry on as if we had not done anything. This should be unreachable.
-                self.restore_snapshot(snapshot);
             }
         };
         first_pat
@@ -2562,7 +2586,7 @@ impl<'a> Parser<'a> {
             if let [a, b] = segments {
                 let (a_span, b_span) = (a.span(), b.span());
                 let between_span = a_span.shrink_to_hi().to(b_span.shrink_to_lo());
-                if self.span_to_snippet(between_span).as_ref().map(|a| &a[..]) == Ok(":: ") {
+                if self.span_to_snippet(between_span).as_deref() == Ok(":: ") {
                     return Err(DoubleColonInBound {
                         span: path.span.shrink_to_hi(),
                         between: between_span,
@@ -2574,6 +2598,75 @@ impl<'a> Parser<'a> {
         Ok(())
     }
 
+    pub fn is_diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> bool {
+        (0..3).all(|i| self.look_ahead(i, |tok| tok == long_kind))
+            && self.look_ahead(3, |tok| tok == short_kind)
+    }
+
+    fn diff_marker(&mut self, long_kind: &TokenKind, short_kind: &TokenKind) -> Option<Span> {
+        if self.is_diff_marker(long_kind, short_kind) {
+            let lo = self.token.span;
+            for _ in 0..4 {
+                self.bump();
+            }
+            return Some(lo.to(self.prev_token.span));
+        }
+        None
+    }
+
+    pub fn recover_diff_marker(&mut self) {
+        let Some(start) = self.diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) else {
+            return;
+        };
+        let mut spans = Vec::with_capacity(3);
+        spans.push(start);
+        let mut middlediff3 = None;
+        let mut middle = None;
+        let mut end = None;
+        loop {
+            if self.token.kind == TokenKind::Eof {
+                break;
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) {
+                middlediff3 = Some(span);
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::EqEq, &TokenKind::Eq) {
+                middle = Some(span);
+            }
+            if let Some(span) = self.diff_marker(&TokenKind::BinOp(token::Shr), &TokenKind::Gt) {
+                spans.push(span);
+                end = Some(span);
+                break;
+            }
+            self.bump();
+        }
+        let mut err = self.struct_span_err(spans, "encountered diff marker");
+        err.span_label(start, "after this is the code before the merge");
+        if let Some(middle) = middlediff3 {
+            err.span_label(middle, "");
+        }
+        if let Some(middle) = middle {
+            err.span_label(middle, "");
+        }
+        if let Some(end) = end {
+            err.span_label(end, "above this are the incoming code changes");
+        }
+        err.help(
+            "if you're having merge conflicts after pulling new code, the top section is the code \
+             you already had and the bottom section is the remote code",
+        );
+        err.help(
+            "if you're in the middle of a rebase, the top section is the code being rebased onto \
+             and the bottom section is the code coming from the current commit being rebased",
+        );
+        err.note(
+            "for an explanation on these markers from the `git` documentation, visit \
+             <https://git-scm.com/book/en/v2/Git-Tools-Advanced-Merging#_checking_out_conflicts>",
+        );
+        err.emit();
+        FatalError.raise()
+    }
+
     /// Parse and throw away a parenthesized comma separated
     /// sequence of patterns until `)` is reached.
     fn skip_pat_list(&mut self) -> PResult<'a, ()> {
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 7355730c9eb..e00eda47c66 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1,58 +1,37 @@
 use super::diagnostics::SnapshotParser;
-use super::pat::{CommaRecoveryMode, RecoverColon, RecoverComma, PARAM_EXPECTED};
+use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
     AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
     SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken,
 };
-use crate::errors::{
-    ArrayBracketsInsteadOfSpaces, ArrayBracketsInsteadOfSpacesSugg, AsyncMoveOrderIncorrect,
-    BinaryFloatLiteralNotSupported, BracesForStructLiteral, CatchAfterTry, CommaAfterBaseStruct,
-    ComparisonInterpretedAsGeneric, ComparisonOrShiftInterpretedAsGenericSugg,
-    DoCatchSyntaxRemoved, DotDotDot, EqFieldInit, ExpectedElseBlock, ExpectedEqForLetExpr,
-    ExpectedExpressionFoundLet, FieldExpressionWithGeneric, FloatLiteralRequiresIntegerPart,
-    FoundExprWouldBeStmt, HexadecimalFloatLiteralNotSupported, IfExpressionMissingCondition,
-    IfExpressionMissingThenBlock, IfExpressionMissingThenBlockSub, IntLiteralTooLarge,
-    InvalidBlockMacroSegment, InvalidComparisonOperator, InvalidComparisonOperatorSub,
-    InvalidFloatLiteralSuffix, InvalidFloatLiteralWidth, InvalidIntLiteralWidth,
-    InvalidInterpolatedExpression, InvalidLiteralSuffix, InvalidLiteralSuffixOnTupleIndex,
-    InvalidLogicalOperator, InvalidLogicalOperatorSub, InvalidNumLiteralBasePrefix,
-    InvalidNumLiteralSuffix, LabeledLoopInBreak, LeadingPlusNotSupported, LeftArrowOperator,
-    LifetimeInBorrowExpression, MacroInvocationWithQualifiedPath, MalformedLoopLabel,
-    MatchArmBodyWithoutBraces, MatchArmBodyWithoutBracesSugg, MissingCommaAfterMatchArm,
-    MissingDotDot, MissingInInForLoop, MissingInInForLoopSub, MissingSemicolonBeforeArray,
-    NoFieldsForFnCall, NotAsNegationOperator, NotAsNegationOperatorSub,
-    OctalFloatLiteralNotSupported, OuterAttributeNotAllowedOnIfElse, ParenthesesWithStructFields,
-    RequireColonAfterLabeledExpression, ShiftInterpretedAsGeneric, StructLiteralNotAllowedHere,
-    StructLiteralNotAllowedHereSugg, TildeAsUnaryOperator, UnexpectedTokenAfterLabel,
-    UnexpectedTokenAfterLabelSugg, WrapExpressionInParentheses,
-};
-use crate::maybe_recover_from_interpolated_ty_qpath;
 
+use crate::errors;
+use crate::maybe_recover_from_interpolated_ty_qpath;
 use core::mem;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::tokenstream::Spacing;
 use rustc_ast::util::case::Case;
 use rustc_ast::util::classify;
-use rustc_ast::util::literal::LitError;
 use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
 use rustc_ast::visit::Visitor;
-use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
+use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
 use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
 use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
-use rustc_ast::{ClosureBinder, StmtKind};
+use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{
-    Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
-    StashKey,
+    AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic,
+    PResult, StashKey,
 };
-use rustc_session::errors::ExprParenthesesNeeded;
+use rustc_session::errors::{report_lit_error, ExprParenthesesNeeded};
 use rustc_session::lint::builtin::BREAK_WITH_LABEL_AND_LOOP;
 use rustc_session::lint::BuiltinLintDiagnostics;
 use rustc_span::source_map::{self, Span, Spanned};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
 use rustc_span::{BytePos, Pos};
+use thin_vec::{thin_vec, ThinVec};
 
 /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
 /// dropped into the token stream, which happens while parsing the result of
@@ -88,7 +67,7 @@ macro_rules! maybe_whole_expr {
 pub(super) enum LhsExpr {
     NotYetParsed,
     AttributesParsed(AttrWrapper),
-    AlreadyParsed(P<Expr>),
+    AlreadyParsed { expr: P<Expr>, starts_statement: bool },
 }
 
 impl From<Option<AttrWrapper>> for LhsExpr {
@@ -102,11 +81,11 @@ impl From<Option<AttrWrapper>> for LhsExpr {
 }
 
 impl From<P<Expr>> for LhsExpr {
-    /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed(expr)`.
+    /// Converts the `expr: P<Expr>` into `LhsExpr::AlreadyParsed { expr, starts_statement: false }`.
     ///
     /// This conversion does not allocate.
     fn from(expr: P<Expr>) -> Self {
-        LhsExpr::AlreadyParsed(expr)
+        LhsExpr::AlreadyParsed { expr, starts_statement: false }
     }
 }
 
@@ -124,7 +103,7 @@ impl<'a> Parser<'a> {
         self.collect_tokens_no_attrs(|this| this.parse_expr())
     }
 
-    pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
+    pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> {
         self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value })
     }
 
@@ -146,7 +125,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a sequence of expressions delimited by parentheses.
-    fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec<P<Expr>>> {
+    fn parse_expr_paren_seq(&mut self) -> PResult<'a, ThinVec<P<Expr>>> {
         self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r)
     }
 
@@ -157,7 +136,7 @@ impl<'a> Parser<'a> {
         r: Restrictions,
         already_parsed_attrs: Option<AttrWrapper>,
     ) -> PResult<'a, P<Expr>> {
-        self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
+        self.with_res(r, |this| this.parse_expr_assoc(already_parsed_attrs))
     }
 
     /// Parses an associative expression.
@@ -165,30 +144,32 @@ impl<'a> Parser<'a> {
     /// This parses an expression accounting for associativity and precedence of the operators in
     /// the expression.
     #[inline]
-    fn parse_assoc_expr(
+    fn parse_expr_assoc(
         &mut self,
         already_parsed_attrs: Option<AttrWrapper>,
     ) -> PResult<'a, P<Expr>> {
-        self.parse_assoc_expr_with(0, already_parsed_attrs.into())
+        self.parse_expr_assoc_with(0, already_parsed_attrs.into())
     }
 
     /// Parses an associative expression with operators of at least `min_prec` precedence.
-    pub(super) fn parse_assoc_expr_with(
+    pub(super) fn parse_expr_assoc_with(
         &mut self,
         min_prec: usize,
         lhs: LhsExpr,
     ) -> PResult<'a, P<Expr>> {
-        let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs {
+        let mut starts_stmt = false;
+        let mut lhs = if let LhsExpr::AlreadyParsed { expr, starts_statement } = lhs {
+            starts_stmt = starts_statement;
             expr
         } else {
             let attrs = match lhs {
                 LhsExpr::AttributesParsed(attrs) => Some(attrs),
                 _ => None,
             };
-            if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) {
-                return self.parse_prefix_range_expr(attrs);
+            if self.token.is_range_separator() {
+                return self.parse_expr_prefix_range(attrs);
             } else {
-                self.parse_prefix_expr(attrs)?
+                self.parse_expr_prefix(attrs)?
             }
         };
         let last_type_ascription_set = self.last_type_ascription.is_some();
@@ -246,10 +227,10 @@ impl<'a> Parser<'a> {
                 }
                 .into();
                 let invalid = format!("{}=", &sugg);
-                self.sess.emit_err(InvalidComparisonOperator {
+                self.sess.emit_err(errors::InvalidComparisonOperator {
                     span: sp,
                     invalid: invalid.clone(),
-                    sub: InvalidComparisonOperatorSub::Correctable {
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
                         span: sp,
                         invalid,
                         correct: sugg,
@@ -264,10 +245,10 @@ impl<'a> Parser<'a> {
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
                 let sp = op.span.to(self.token.span);
-                self.sess.emit_err(InvalidComparisonOperator {
+                self.sess.emit_err(errors::InvalidComparisonOperator {
                     span: sp,
                     invalid: "<>".into(),
-                    sub: InvalidComparisonOperatorSub::Correctable {
+                    sub: errors::InvalidComparisonOperatorSub::Correctable {
                         span: sp,
                         invalid: "<>".into(),
                         correct: "!=".into(),
@@ -282,10 +263,10 @@ impl<'a> Parser<'a> {
                 && self.prev_token.span.hi() == self.token.span.lo()
             {
                 let sp = op.span.to(self.token.span);
-                self.sess.emit_err(InvalidComparisonOperator {
+                self.sess.emit_err(errors::InvalidComparisonOperator {
                     span: sp,
                     invalid: "<=>".into(),
-                    sub: InvalidComparisonOperatorSub::Spaceship(sp),
+                    sub: errors::InvalidComparisonOperatorSub::Spaceship(sp),
                 });
                 self.bump();
             }
@@ -297,7 +278,19 @@ impl<'a> Parser<'a> {
                 let op_span = self.prev_token.span.to(self.token.span);
                 // Eat the second `+`
                 self.bump();
-                lhs = self.recover_from_postfix_increment(lhs, op_span)?;
+                lhs = self.recover_from_postfix_increment(lhs, op_span, starts_stmt)?;
+                continue;
+            }
+
+            if self.prev_token == token::BinOp(token::Minus)
+                && self.token == token::BinOp(token::Minus)
+                && self.prev_token.span.between(self.token.span).is_empty()
+                && !self.look_ahead(1, |tok| tok.can_begin_expr())
+            {
+                let op_span = self.prev_token.span.to(self.token.span);
+                // Eat the second `-`
+                self.bump();
+                lhs = self.recover_from_postfix_decrement(lhs, op_span, starts_stmt)?;
                 continue;
             }
 
@@ -312,7 +305,7 @@ impl<'a> Parser<'a> {
             } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq {
                 // If we didn't have to handle `x..`/`x..=`, it would be pretty easy to
                 // generalise it to the Fixity::None code.
-                lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?;
+                lhs = self.parse_expr_range(prec, lhs, op, cur_op_span)?;
                 break;
             }
 
@@ -325,7 +318,7 @@ impl<'a> Parser<'a> {
                 Fixity::None => 1,
             };
             let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
-                this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
+                this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::NotYetParsed)
             })?;
 
             let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
@@ -395,20 +388,11 @@ impl<'a> Parser<'a> {
             // want to keep their span info to improve diagnostics in these cases in a later stage.
             (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
             (true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
-            (true, Some(AssocOp::Add)) // `{ 42 } + 42
-            // If the next token is a keyword, then the tokens above *are* unambiguously incorrect:
-            // `if x { a } else { b } && if y { c } else { d }`
-            if !self.look_ahead(1, |t| t.is_used_keyword()) => {
-                // These cases are ambiguous and can't be identified in the parser alone.
-                let sp = self.sess.source_map().start_point(self.token.span);
-                self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
-                false
-            }
-            (true, Some(AssocOp::LAnd)) |
-            (true, Some(AssocOp::LOr)) |
-            (true, Some(AssocOp::BitOr)) => {
-                // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`. Separated from the
-                // above due to #74233.
+            (true, Some(AssocOp::Add)) | // `{ 42 } + 42` (unary plus)
+            (true, Some(AssocOp::LAnd)) | // `{ 42 } &&x` (#61475) or `{ 42 } && if x { 1 } else { 0 }`
+            (true, Some(AssocOp::LOr)) | // `{ 42 } || 42` ("logical or" or closure)
+            (true, Some(AssocOp::BitOr)) // `{ 42 } | 42` or `{ 42 } |x| 42`
+            => {
                 // These cases are ambiguous and can't be identified in the parser alone.
                 //
                 // Bitwise AND is left out because guessing intent is hard. We can make
@@ -419,7 +403,7 @@ impl<'a> Parser<'a> {
                 self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
                 false
             }
-            (true, Some(ref op)) if !op.can_continue_expr_unambiguously() => false,
+            (true, Some(op)) if !op.can_continue_expr_unambiguously() => false,
             (true, Some(_)) => {
                 self.error_found_expr_would_be_stmt(lhs);
                 true
@@ -431,7 +415,7 @@ impl<'a> Parser<'a> {
     /// but the next token implies this should be parsed as an expression.
     /// For example: `if let Some(x) = x { x } else { 0 } / 2`.
     fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
-        self.sess.emit_err(FoundExprWouldBeStmt {
+        self.sess.emit_err(errors::FoundExprWouldBeStmt {
             span: self.token.span,
             token: self.token.clone(),
             suggestion: ExprParenthesesNeeded::surrounding(lhs.span),
@@ -458,18 +442,18 @@ impl<'a> Parser<'a> {
             }
             (Some(op), _) => (op, self.token.span),
             (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
-                self.sess.emit_err(InvalidLogicalOperator {
+                self.sess.emit_err(errors::InvalidLogicalOperator {
                     span: self.token.span,
                     incorrect: "and".into(),
-                    sub: InvalidLogicalOperatorSub::Conjunction(self.token.span),
+                    sub: errors::InvalidLogicalOperatorSub::Conjunction(self.token.span),
                 });
                 (AssocOp::LAnd, span)
             }
             (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
-                self.sess.emit_err(InvalidLogicalOperator {
+                self.sess.emit_err(errors::InvalidLogicalOperator {
                     span: self.token.span,
                     incorrect: "or".into(),
-                    sub: InvalidLogicalOperatorSub::Disjunction(self.token.span),
+                    sub: errors::InvalidLogicalOperatorSub::Disjunction(self.token.span),
                 });
                 (AssocOp::LOr, span)
             }
@@ -486,7 +470,7 @@ impl<'a> Parser<'a> {
 
     /// Parses `x..y`, `x..=y`, and `x..`/`x..=`.
     /// The other two variants are handled in `parse_prefix_range_expr` below.
-    fn parse_range_expr(
+    fn parse_expr_range(
         &mut self,
         prec: usize,
         lhs: P<Expr>,
@@ -494,7 +478,7 @@ impl<'a> Parser<'a> {
         cur_op_span: Span,
     ) -> PResult<'a, P<Expr>> {
         let rhs = if self.is_at_start_of_range_notation_rhs() {
-            Some(self.parse_assoc_expr_with(prec + 1, LhsExpr::NotYetParsed)?)
+            Some(self.parse_expr_assoc_with(prec + 1, LhsExpr::NotYetParsed)?)
         } else {
             None
         };
@@ -519,14 +503,14 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses prefix-forms of range notation: `..expr`, `..`, `..=expr`.
-    fn parse_prefix_range_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_prefix_range(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         // Check for deprecated `...` syntax.
         if self.token == token::DotDotDot {
             self.err_dotdotdot_syntax(self.token.span);
         }
 
         debug_assert!(
-            [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind),
+            self.token.is_range_separator(),
             "parse_prefix_range_expr: token {:?} is not DotDot/DotDotEq",
             self.token
         );
@@ -546,7 +530,7 @@ impl<'a> Parser<'a> {
             this.bump();
             let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
                 // RHS must be parsed with more associativity than the dots.
-                this.parse_assoc_expr_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
+                this.parse_expr_assoc_with(op.unwrap().precedence() + 1, LhsExpr::NotYetParsed)
                     .map(|x| (lo.to(x.span), Some(x)))?
             } else {
                 (lo, None)
@@ -557,7 +541,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a prefix-unary-operator expr.
-    fn parse_prefix_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_prefix(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         let lo = self.token.span;
 
@@ -574,20 +558,29 @@ impl<'a> Parser<'a> {
 
         // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
         match this.token.uninterpolate().kind {
-            token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr`
-            token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr`
+            // `!expr`
+            token::Not => make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Not)),
+            // `~expr`
+            token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)),
+            // `-expr`
             token::BinOp(token::Minus) => {
-                make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg))
-            } // `-expr`
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Neg))
+            }
+            // `*expr`
             token::BinOp(token::Star) => {
-                make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref))
-            } // `*expr`
+                make_it!(this, attrs, |this, _| this.parse_expr_unary(lo, UnOp::Deref))
+            }
+            // `&expr` and `&&expr`
             token::BinOp(token::And) | token::AndAnd => {
-                make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo))
+                make_it!(this, attrs, |this, _| this.parse_expr_borrow(lo))
             }
+            // `+lit`
             token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => {
-                let mut err =
-                    LeadingPlusNotSupported { span: lo, remove_plus: None, add_parentheses: None };
+                let mut err = errors::LeadingPlusNotSupported {
+                    span: lo,
+                    remove_plus: None,
+                    add_parentheses: None,
+                };
 
                 // a block on the LHS might have been intended to be an expression instead
                 if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
@@ -598,53 +591,54 @@ impl<'a> Parser<'a> {
                 this.sess.emit_err(err);
 
                 this.bump();
-                this.parse_prefix_expr(None)
-            } // `+expr`
+                this.parse_expr_prefix(None)
+            }
             // Recover from `++x`:
             token::BinOp(token::Plus)
                 if this.look_ahead(1, |t| *t == token::BinOp(token::Plus)) =>
             {
-                let prev_is_semi = this.prev_token == token::Semi;
+                let starts_stmt = this.prev_token == token::Semi
+                    || this.prev_token == token::CloseDelim(Delimiter::Brace);
                 let pre_span = this.token.span.to(this.look_ahead(1, |t| t.span));
                 // Eat both `+`s.
                 this.bump();
                 this.bump();
 
-                let operand_expr = this.parse_dot_or_call_expr(Default::default())?;
-                this.recover_from_prefix_increment(operand_expr, pre_span, prev_is_semi)
+                let operand_expr = this.parse_expr_dot_or_call(Default::default())?;
+                this.recover_from_prefix_increment(operand_expr, pre_span, starts_stmt)
             }
             token::Ident(..) if this.token.is_keyword(kw::Box) => {
-                make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
+                make_it!(this, attrs, |this, _| this.parse_expr_box(lo))
             }
             token::Ident(..) if this.may_recover() && this.is_mistaken_not_ident_negation() => {
                 make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
             }
-            _ => return this.parse_dot_or_call_expr(Some(attrs)),
+            _ => return this.parse_expr_dot_or_call(Some(attrs)),
         }
     }
 
-    fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
+    fn parse_expr_prefix_common(&mut self, lo: Span) -> PResult<'a, (Span, P<Expr>)> {
         self.bump();
-        let expr = self.parse_prefix_expr(None);
+        let expr = self.parse_expr_prefix(None);
         let (span, expr) = self.interpolated_or_expr_span(expr)?;
         Ok((lo.to(span), expr))
     }
 
-    fn parse_unary_expr(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
-        let (span, expr) = self.parse_prefix_expr_common(lo)?;
+    fn parse_expr_unary(&mut self, lo: Span, op: UnOp) -> PResult<'a, (Span, ExprKind)> {
+        let (span, expr) = self.parse_expr_prefix_common(lo)?;
         Ok((span, self.mk_unary(op, expr)))
     }
 
-    // Recover on `!` suggesting for bitwise negation instead.
+    /// Recover on `~expr` in favor of `!expr`.
     fn recover_tilde_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        self.sess.emit_err(TildeAsUnaryOperator(lo));
+        self.sess.emit_err(errors::TildeAsUnaryOperator(lo));
 
-        self.parse_unary_expr(lo, UnOp::Not)
+        self.parse_expr_unary(lo, UnOp::Not)
     }
 
     /// Parse `box expr`.
-    fn parse_box_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        let (span, expr) = self.parse_prefix_expr_common(lo)?;
+    fn parse_expr_box(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+        let (span, expr) = self.parse_expr_prefix_common(lo)?;
         self.sess.gated_spans.gate(sym::box_syntax, span);
         Ok((span, ExprKind::Box(expr)))
     }
@@ -662,18 +656,17 @@ impl<'a> Parser<'a> {
 
     /// Recover on `not expr` in favor of `!expr`.
     fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
-        // Emit the error...
         let negated_token = self.look_ahead(1, |t| t.clone());
 
         let sub_diag = if negated_token.is_numeric_lit() {
-            NotAsNegationOperatorSub::SuggestNotBitwise
+            errors::NotAsNegationOperatorSub::SuggestNotBitwise
         } else if negated_token.is_bool_lit() {
-            NotAsNegationOperatorSub::SuggestNotLogical
+            errors::NotAsNegationOperatorSub::SuggestNotLogical
         } else {
-            NotAsNegationOperatorSub::SuggestNotDefault
+            errors::NotAsNegationOperatorSub::SuggestNotDefault
         };
 
-        self.sess.emit_err(NotAsNegationOperator {
+        self.sess.emit_err(errors::NotAsNegationOperator {
             negated: negated_token.span,
             negated_desc: super::token_descr(&negated_token),
             // Span the `not` plus trailing whitespace to avoid
@@ -683,8 +676,7 @@ impl<'a> Parser<'a> {
             ),
         });
 
-        // ...and recover!
-        self.parse_unary_expr(lo, UnOp::Not)
+        self.parse_expr_unary(lo, UnOp::Not)
     }
 
     /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
@@ -742,10 +734,10 @@ impl<'a> Parser<'a> {
                                 segments[0].ident.span,
                             ),
                         };
-                        match self.parse_labeled_expr(label, false) {
+                        match self.parse_expr_labeled(label, false) {
                             Ok(expr) => {
                                 type_err.cancel();
-                                self.sess.emit_err(MalformedLoopLabel {
+                                self.sess.emit_err(errors::MalformedLoopLabel {
                                     span: label.ident.span,
                                     correct_label: label.ident,
                                 });
@@ -770,20 +762,22 @@ impl<'a> Parser<'a> {
                         );
 
                         let args_span = self.look_ahead(1, |t| t.span).to(span_after_type);
-                        let suggestion = ComparisonOrShiftInterpretedAsGenericSugg {
+                        let suggestion = errors::ComparisonOrShiftInterpretedAsGenericSugg {
                             left: expr.span.shrink_to_lo(),
                             right: expr.span.shrink_to_hi(),
                         };
 
                         match self.token.kind {
-                            token::Lt => self.sess.emit_err(ComparisonInterpretedAsGeneric {
-                                comparison: self.token.span,
-                                r#type: path,
-                                args: args_span,
-                                suggestion,
-                            }),
+                            token::Lt => {
+                                self.sess.emit_err(errors::ComparisonInterpretedAsGeneric {
+                                    comparison: self.token.span,
+                                    r#type: path,
+                                    args: args_span,
+                                    suggestion,
+                                })
+                            }
                             token::BinOp(token::Shl) => {
-                                self.sess.emit_err(ShiftInterpretedAsGeneric {
+                                self.sess.emit_err(errors::ShiftInterpretedAsGeneric {
                                     shift: self.token.span,
                                     r#type: path,
                                     args: args_span,
@@ -834,7 +828,7 @@ impl<'a> Parser<'a> {
                 ("cast", None)
             };
 
-        let with_postfix = self.parse_dot_or_call_expr_with_(cast_expr, span)?;
+        let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
 
         // Check if an illegal postfix operator has been added after the cast.
         // If the resulting expression is not a cast, it is an illegal postfix operator.
@@ -845,7 +839,7 @@ impl<'a> Parser<'a> {
                     ExprKind::Index(_, _) => "indexing",
                     ExprKind::Try(_) => "`?`",
                     ExprKind::Field(_, _) => "a field access",
-                    ExprKind::MethodCall(_, _, _, _) => "a method call",
+                    ExprKind::MethodCall(_) => "a method call",
                     ExprKind::Call(_, _) => "a function call",
                     ExprKind::Await(_) => "`.await`",
                     ExprKind::Err => return Ok(with_postfix),
@@ -905,12 +899,16 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse `& mut? <expr>` or `& raw [ const | mut ] <expr>`.
-    fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
+    fn parse_expr_borrow(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> {
         self.expect_and()?;
         let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon);
         let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below.
         let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo);
-        let expr = self.parse_prefix_expr(None);
+        let expr = if self.token.is_range_separator() {
+            self.parse_expr_prefix_range(None)
+        } else {
+            self.parse_expr_prefix(None)
+        };
         let (hi, expr) = self.interpolated_or_expr_span(expr)?;
         let span = lo.to(hi);
         if let Some(lt) = lifetime {
@@ -920,7 +918,7 @@ impl<'a> Parser<'a> {
     }
 
     fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) {
-        self.sess.emit_err(LifetimeInBorrowExpression { span, lifetime_span: lt_span });
+        self.sess.emit_err(errors::LifetimeInBorrowExpression { span, lifetime_span: lt_span });
     }
 
     /// Parse `mut?` or `raw [ const | mut ]`.
@@ -939,16 +937,16 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
-    fn parse_dot_or_call_expr(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_dot_or_call(&mut self, attrs: Option<AttrWrapper>) -> PResult<'a, P<Expr>> {
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         self.collect_tokens_for_expr(attrs, |this, attrs| {
-            let base = this.parse_bottom_expr();
+            let base = this.parse_expr_bottom();
             let (span, base) = this.interpolated_or_expr_span(base)?;
-            this.parse_dot_or_call_expr_with(base, span, attrs)
+            this.parse_expr_dot_or_call_with(base, span, attrs)
         })
     }
 
-    pub(super) fn parse_dot_or_call_expr_with(
+    pub(super) fn parse_expr_dot_or_call_with(
         &mut self,
         e0: P<Expr>,
         lo: Span,
@@ -957,7 +955,7 @@ impl<'a> Parser<'a> {
         // Stitch the list of outer attributes onto the return value.
         // A little bit ugly, but the best way given the current code
         // structure
-        let res = self.parse_dot_or_call_expr_with_(e0, lo);
+        let res = self.parse_expr_dot_or_call_with_(e0, lo);
         if attrs.is_empty() {
             res
         } else {
@@ -971,7 +969,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
+    fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
         loop {
             let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
                 // we are using noexpect here because we don't expect a `?` directly after a `return`
@@ -994,15 +992,15 @@ impl<'a> Parser<'a> {
             };
             if has_dot {
                 // expr.f
-                e = self.parse_dot_suffix_expr(lo, e)?;
+                e = self.parse_expr_dot_suffix(lo, e)?;
                 continue;
             }
             if self.expr_is_complete(&e) {
                 return Ok(e);
             }
             e = match self.token.kind {
-                token::OpenDelim(Delimiter::Parenthesis) => self.parse_fn_call_expr(lo, e),
-                token::OpenDelim(Delimiter::Bracket) => self.parse_index_expr(lo, e)?,
+                token::OpenDelim(Delimiter::Parenthesis) => self.parse_expr_fn_call(lo, e),
+                token::OpenDelim(Delimiter::Bracket) => self.parse_expr_index(lo, e)?,
                 _ => return Ok(e),
             }
         }
@@ -1014,14 +1012,14 @@ impl<'a> Parser<'a> {
             && self.look_ahead(3, |t| t.can_begin_expr())
     }
 
-    fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_dot_suffix(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
         match self.token.uninterpolate().kind {
             token::Ident(..) => self.parse_dot_suffix(base, lo),
             token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
-                Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
+                Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None))
             }
             token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
-                Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
+                Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix))
             }
             _ => {
                 self.error_unexpected_after_dot();
@@ -1033,7 +1031,7 @@ impl<'a> Parser<'a> {
     fn error_unexpected_after_dot(&self) {
         // FIXME Could factor this out into non_fatal_unexpected or something.
         let actual = pprust::token_to_string(&self.token);
-        self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit();
+        self.sess.emit_err(errors::UnexpectedTokenAfterDot { span: self.token.span, actual });
     }
 
     // We need an identifier or integer, but the next token is a float.
@@ -1043,7 +1041,7 @@ impl<'a> Parser<'a> {
     // support pushing "future tokens" (would be also helpful to `break_and_eat`), or
     // we should break everything including floats into more basic proc-macro style
     // tokens in the lexer (probably preferable).
-    fn parse_tuple_field_access_expr_float(
+    fn parse_expr_tuple_field_access_float(
         &mut self,
         lo: Span,
         base: P<Expr>,
@@ -1086,7 +1084,7 @@ impl<'a> Parser<'a> {
         match &*components {
             // 1e2
             [IdentLike(i)] => {
-                self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
+                self.parse_expr_tuple_field_access(lo, base, Symbol::intern(&i), suffix, None)
             }
             // 1.
             [IdentLike(i), Punct('.')] => {
@@ -1102,7 +1100,7 @@ impl<'a> Parser<'a> {
                 let symbol = Symbol::intern(&i);
                 self.token = Token::new(token::Ident(symbol, false), ident_span);
                 let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
-                self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
+                self.parse_expr_tuple_field_access(lo, base, symbol, None, Some(next_token))
             }
             // 1.2 | 1.2e3
             [IdentLike(i1), Punct('.'), IdentLike(i2)] => {
@@ -1123,11 +1121,11 @@ impl<'a> Parser<'a> {
                 // See issue #76399 and PR #76285 for more details
                 let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
                 let base1 =
-                    self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
+                    self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
                 let symbol2 = Symbol::intern(&i2);
                 let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
                 self.bump_with((next_token2, self.token_spacing)); // `.`
-                self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
+                self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
             }
             // 1e+ | 1e- (recovered)
             [IdentLike(_), Punct('+' | '-')] |
@@ -1145,7 +1143,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_tuple_field_access_expr(
+    fn parse_expr_tuple_field_access(
         &mut self,
         lo: Span,
         base: P<Expr>,
@@ -1166,7 +1164,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse a function call expression, `expr(...)`.
-    fn parse_fn_call_expr(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
+    fn parse_expr_fn_call(&mut self, lo: Span, fun: P<Expr>) -> P<Expr> {
         let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
             && self.look_ahead_type_ascription_as_field()
         {
@@ -1177,7 +1175,7 @@ impl<'a> Parser<'a> {
         let open_paren = self.token.span;
 
         let mut seq = self
-            .parse_paren_expr_seq()
+            .parse_expr_paren_seq()
             .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
         if let Some(expr) =
             self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot)
@@ -1212,16 +1210,21 @@ impl<'a> Parser<'a> {
                         // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
                         self.restore_snapshot(snapshot);
                         let close_paren = self.prev_token.span;
-                        let span = lo.to(self.prev_token.span);
-                        if !fields.is_empty() {
-                            let mut replacement_err = ParenthesesWithStructFields {
+                        let span = lo.to(close_paren);
+                        if !fields.is_empty() &&
+                            // `token.kind` should not be compared here.
+                            // This is because the `snapshot.token.kind` is treated as the same as
+                            // that of the open delim in `TokenTreesReader::parse_token_tree`, even if they are different.
+                            self.span_to_snippet(close_paren).map_or(false, |snippet| snippet == ")")
+                        {
+                            let mut replacement_err = errors::ParenthesesWithStructFields {
                                 span,
                                 r#type: path,
-                                braces_for_struct: BracesForStructLiteral {
+                                braces_for_struct: errors::BracesForStructLiteral {
                                     first: open_paren,
                                     second: close_paren,
                                 },
-                                no_fields_for_fn: NoFieldsForFnCall {
+                                no_fields_for_fn: errors::NoFieldsForFnCall {
                                     fields: fields
                                         .into_iter()
                                         .map(|field| field.span.until(field.expr.span))
@@ -1250,7 +1253,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse an indexing expression `expr[...]`.
-    fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
+    fn parse_expr_index(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
         let prev_span = self.prev_token.span;
         let open_delim_span = self.token.span;
         self.bump(); // `[`
@@ -1267,24 +1270,32 @@ impl<'a> Parser<'a> {
         }
 
         let fn_span_lo = self.token.span;
-        let mut segment = self.parse_path_segment(PathStyle::Expr, None)?;
-        self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(Delimiter::Parenthesis)]);
-        self.check_turbofish_missing_angle_brackets(&mut segment);
+        let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
+        self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
+        self.check_turbofish_missing_angle_brackets(&mut seg);
 
         if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             // Method call `expr.f()`
-            let args = self.parse_paren_expr_seq()?;
+            let args = self.parse_expr_paren_seq()?;
             let fn_span = fn_span_lo.to(self.prev_token.span);
             let span = lo.to(self.prev_token.span);
-            Ok(self.mk_expr(span, ExprKind::MethodCall(segment, self_arg, args, fn_span)))
+            Ok(self.mk_expr(
+                span,
+                ExprKind::MethodCall(Box::new(ast::MethodCall {
+                    seg,
+                    receiver: self_arg,
+                    args,
+                    span: fn_span,
+                })),
+            ))
         } else {
             // Field access `expr.f`
-            if let Some(args) = segment.args {
-                self.sess.emit_err(FieldExpressionWithGeneric(args.span()));
+            if let Some(args) = seg.args {
+                self.sess.emit_err(errors::FieldExpressionWithGeneric(args.span()));
             }
 
             let span = lo.to(self.prev_token.span);
-            Ok(self.mk_expr(span, ExprKind::Field(self_arg, segment.ident)))
+            Ok(self.mk_expr(span, ExprKind::Field(self_arg, seg.ident)))
         }
     }
 
@@ -1293,7 +1304,7 @@ impl<'a> Parser<'a> {
     ///
     /// N.B., this does not parse outer attributes, and is private because it only works
     /// correctly if called from `parse_dot_or_call_expr()`.
-    fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
         maybe_whole_expr!(self);
 
@@ -1306,13 +1317,13 @@ impl<'a> Parser<'a> {
             // This match arm is a special-case of the `_` match arm below and
             // could be removed without changing functionality, but it's faster
             // to have it here, especially for programs with large constants.
-            self.parse_lit_expr()
+            self.parse_expr_lit()
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
-            self.parse_tuple_parens_expr()
+            self.parse_expr_tuple_parens()
         } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            self.parse_block_expr(None, lo, BlockCheckMode::Default)
+            self.parse_expr_block(None, lo, BlockCheckMode::Default)
         } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) {
-            self.parse_closure_expr().map_err(|mut err| {
+            self.parse_expr_closure().map_err(|mut err| {
                 // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
                 // then suggest parens around the lhs.
                 if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&lo) {
@@ -1321,42 +1332,42 @@ impl<'a> Parser<'a> {
                 err
             })
         } else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
-            self.parse_array_or_repeat_expr(Delimiter::Bracket)
+            self.parse_expr_array_or_repeat(Delimiter::Bracket)
         } else if self.check_path() {
-            self.parse_path_start_expr()
-        } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
-            self.parse_closure_expr()
+            self.parse_expr_path_start()
+        } else if self.check_keyword(kw::Move)
+            || self.check_keyword(kw::Static)
+            || self.check_const_closure()
+        {
+            self.parse_expr_closure()
         } else if self.eat_keyword(kw::If) {
-            self.parse_if_expr()
+            self.parse_expr_if()
         } else if self.check_keyword(kw::For) {
             if self.choose_generics_over_qpath(1) {
-                self.parse_closure_expr()
+                self.parse_expr_closure()
             } else {
                 assert!(self.eat_keyword(kw::For));
-                self.parse_for_expr(None, self.prev_token.span)
+                self.parse_expr_for(None, self.prev_token.span)
             }
         } else if self.eat_keyword(kw::While) {
-            self.parse_while_expr(None, self.prev_token.span)
+            self.parse_expr_while(None, self.prev_token.span)
         } else if let Some(label) = self.eat_label() {
-            self.parse_labeled_expr(label, true)
+            self.parse_expr_labeled(label, true)
         } else if self.eat_keyword(kw::Loop) {
             let sp = self.prev_token.span;
-            self.parse_loop_expr(None, self.prev_token.span).map_err(|mut err| {
+            self.parse_expr_loop(None, self.prev_token.span).map_err(|mut err| {
                 err.span_label(sp, "while parsing this `loop` expression");
                 err
             })
-        } else if self.eat_keyword(kw::Continue) {
-            let kind = ExprKind::Continue(self.eat_label());
-            Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
         } else if self.eat_keyword(kw::Match) {
             let match_sp = self.prev_token.span;
-            self.parse_match_expr().map_err(|mut err| {
+            self.parse_expr_match().map_err(|mut err| {
                 err.span_label(match_sp, "while parsing this `match` expression");
                 err
             })
         } else if self.eat_keyword(kw::Unsafe) {
             let sp = self.prev_token.span;
-            self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
+            self.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
                 |mut err| {
                     err.span_label(sp, "while parsing this `unsafe` expression");
                     err
@@ -1370,30 +1381,19 @@ impl<'a> Parser<'a> {
             self.expect_keyword(kw::Try)?;
             self.parse_try_block(lo)
         } else if self.eat_keyword(kw::Return) {
-            self.parse_return_expr()
+            self.parse_expr_return()
+        } else if self.eat_keyword(kw::Continue) {
+            self.parse_expr_continue(lo)
         } else if self.eat_keyword(kw::Break) {
-            self.parse_break_expr()
+            self.parse_expr_break()
         } else if self.eat_keyword(kw::Yield) {
-            self.parse_yield_expr()
+            self.parse_expr_yield()
         } else if self.is_do_yeet() {
-            self.parse_yeet_expr()
+            self.parse_expr_yeet()
         } else if self.check_keyword(kw::Let) {
-            self.parse_let_expr()
+            self.parse_expr_let()
         } else if self.eat_keyword(kw::Underscore) {
             Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore))
-        } else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
-            // Don't complain about bare semicolons after unclosed braces
-            // recovery in order to keep the error count down. Fixing the
-            // delimiters will possibly also fix the bare semicolon found in
-            // expression context. For example, silence the following error:
-            //
-            //     error: expected expression, found `;`
-            //      --> file.rs:2:13
-            //       |
-            //     2 |     foo(bar(;
-            //       |             ^ expected expression
-            self.bump();
-            Ok(self.mk_expr_err(self.token.span))
         } else if self.token.uninterpolated_span().rust_2018() {
             // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
             if self.check_keyword(kw::Async) {
@@ -1401,30 +1401,30 @@ impl<'a> Parser<'a> {
                     // Check for `async {` and `async move {`.
                     self.parse_async_block()
                 } else {
-                    self.parse_closure_expr()
+                    self.parse_expr_closure()
                 }
             } else if self.eat_keyword(kw::Await) {
                 self.recover_incorrect_await_syntax(lo, self.prev_token.span)
             } else {
-                self.parse_lit_expr()
+                self.parse_expr_lit()
             }
         } else {
-            self.parse_lit_expr()
+            self.parse_expr_lit()
         }
     }
 
-    fn parse_lit_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_lit(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        match self.parse_opt_lit() {
-            Some(literal) => {
-                let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal));
+        match self.parse_opt_token_lit() {
+            Some((token_lit, _)) => {
+                let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(token_lit));
                 self.maybe_recover_from_bad_qpath(expr)
             }
             None => self.try_macro_suggestion(),
         }
     }
 
-    fn parse_tuple_parens_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_tuple_parens(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
         self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
         let (es, trailing_comma) = match self.parse_seq_to_end(
@@ -1448,40 +1448,39 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_array_or_repeat_expr(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
+    fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
         self.bump(); // `[` or other open delim
 
         let close = &token::CloseDelim(close_delim);
         let kind = if self.eat(close) {
             // Empty vector
-            ExprKind::Array(Vec::new())
+            ExprKind::Array(ThinVec::new())
         } else {
             // Non-empty vector
             let first_expr = self.parse_expr()?;
             if self.eat(&token::Semi) {
                 // Repeating array syntax: `[ 0; 512 ]`
-                let count = self.parse_anon_const_expr()?;
+                let count = self.parse_expr_anon_const()?;
                 self.expect(close)?;
                 ExprKind::Repeat(first_expr, count)
             } else if self.eat(&token::Comma) {
                 // Vector with two or more elements.
                 let sep = SeqSep::trailing_allowed(token::Comma);
-                let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
-                let mut exprs = vec![first_expr];
-                exprs.extend(remaining_exprs);
+                let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
+                exprs.insert(0, first_expr);
                 ExprKind::Array(exprs)
             } else {
                 // Vector with one element
                 self.expect(close)?;
-                ExprKind::Array(vec![first_expr])
+                ExprKind::Array(thin_vec![first_expr])
             }
         };
         let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_path_start_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_path_start(&mut self) -> PResult<'a, P<Expr>> {
         let (qself, path) = if self.eat_lt() {
             let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
             (Some(qself), path)
@@ -1493,21 +1492,22 @@ impl<'a> Parser<'a> {
         let (span, kind) = if self.eat(&token::Not) {
             // MACRO INVOCATION expression
             if qself.is_some() {
-                self.sess.emit_err(MacroInvocationWithQualifiedPath(path.span));
+                self.sess.emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
             }
             let lo = path.span;
             let mac = P(MacCall {
                 path,
-                args: self.parse_mac_args()?,
+                args: self.parse_delim_args()?,
                 prior_type_ascription: self.last_type_ascription,
             });
             (lo.to(self.prev_token.span), ExprKind::MacCall(mac))
-        } else if self.check(&token::OpenDelim(Delimiter::Brace)) &&
-            let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path) {
-                if qself.is_some() {
-                    self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
-                }
-                return expr;
+        } else if self.check(&token::OpenDelim(Delimiter::Brace))
+            && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
+        {
+            if qself.is_some() {
+                self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
+            }
+            return expr;
         } else {
             (path.span, ExprKind::Path(qself, path))
         };
@@ -1517,7 +1517,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse `'label: $expr`. The label is already parsed.
-    fn parse_labeled_expr(
+    fn parse_expr_labeled(
         &mut self,
         label_: Label,
         mut consume_colon: bool,
@@ -1526,34 +1526,35 @@ impl<'a> Parser<'a> {
         let label = Some(label_);
         let ate_colon = self.eat(&token::Colon);
         let expr = if self.eat_keyword(kw::While) {
-            self.parse_while_expr(label, lo)
+            self.parse_expr_while(label, lo)
         } else if self.eat_keyword(kw::For) {
-            self.parse_for_expr(label, lo)
+            self.parse_expr_for(label, lo)
         } else if self.eat_keyword(kw::Loop) {
-            self.parse_loop_expr(label, lo)
+            self.parse_expr_loop(label, lo)
         } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
             || self.token.is_whole_block()
         {
-            self.parse_block_expr(label, lo, BlockCheckMode::Default)
+            self.parse_expr_block(label, lo, BlockCheckMode::Default)
         } else if !ate_colon
             && self.may_recover()
             && (matches!(self.token.kind, token::CloseDelim(_) | token::Comma)
                 || self.token.is_op())
         {
-            let lit = self.recover_unclosed_char(label_.ident, |self_| {
-                self_.sess.create_err(UnexpectedTokenAfterLabel {
-                    span: self_.token.span,
-                    remove_label: None,
-                    enclose_in_block: None,
-                })
-            });
+            let (lit, _) =
+                self.recover_unclosed_char(label_.ident, Parser::mk_token_lit_char, |self_| {
+                    self_.sess.create_err(errors::UnexpectedTokenAfterLabel {
+                        span: self_.token.span,
+                        remove_label: None,
+                        enclose_in_block: None,
+                    })
+                });
             consume_colon = false;
             Ok(self.mk_expr(lo, ExprKind::Lit(lit)))
         } else if !ate_colon
             && (self.check_noexpect(&TokenKind::Comma) || self.check_noexpect(&TokenKind::Gt))
         {
             // We're probably inside of a `Path<'a>` that needs a turbofish
-            self.sess.emit_err(UnexpectedTokenAfterLabel {
+            self.sess.emit_err(errors::UnexpectedTokenAfterLabel {
                 span: self.token.span,
                 remove_label: None,
                 enclose_in_block: None,
@@ -1561,7 +1562,7 @@ impl<'a> Parser<'a> {
             consume_colon = false;
             Ok(self.mk_expr_err(lo))
         } else {
-            let mut err = UnexpectedTokenAfterLabel {
+            let mut err = errors::UnexpectedTokenAfterLabel {
                 span: self.token.span,
                 remove_label: None,
                 enclose_in_block: None,
@@ -1587,7 +1588,7 @@ impl<'a> Parser<'a> {
                     vis.0
                 };
 
-                // Suggestion involves adding a (as of time of writing this, unstable) labeled block.
+                // Suggestion involves adding a labeled block.
                 //
                 // If there are no breaks that may use this label, suggest removing the label and
                 // recover to the unmodified expression.
@@ -1597,14 +1598,14 @@ impl<'a> Parser<'a> {
                     return expr;
                 }
 
-                err.enclose_in_block = Some(UnexpectedTokenAfterLabelSugg {
+                err.enclose_in_block = Some(errors::UnexpectedTokenAfterLabelSugg {
                     left: span.shrink_to_lo(),
                     right: span.shrink_to_hi(),
                 });
 
                 // Replace `'label: non_block_expr` with `'label: {non_block_expr}` in order to suppress future errors about `break 'label`.
                 let stmt = self.mk_stmt(span, StmtKind::Expr(expr));
-                let blk = self.mk_block(vec![stmt], BlockCheckMode::Default, span);
+                let blk = self.mk_block(thin_vec![stmt], BlockCheckMode::Default, span);
                 self.mk_expr(span, ExprKind::Block(blk, label))
             });
 
@@ -1613,7 +1614,7 @@ impl<'a> Parser<'a> {
         }?;
 
         if !ate_colon && consume_colon {
-            self.sess.emit_err(RequireColonAfterLabeledExpression {
+            self.sess.emit_err(errors::RequireColonAfterLabeledExpression {
                 span: expr.span,
                 label: lo,
                 label_end: lo.shrink_to_hi(),
@@ -1623,12 +1624,13 @@ impl<'a> Parser<'a> {
         Ok(expr)
     }
 
-    /// Emit an error when a char is parsed as a lifetime because of a missing quote
-    pub(super) fn recover_unclosed_char(
-        &mut self,
+    /// Emit an error when a char is parsed as a lifetime because of a missing quote.
+    pub(super) fn recover_unclosed_char<L>(
+        &self,
         lifetime: Ident,
-        err: impl FnOnce(&mut Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
-    ) -> ast::Lit {
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+        err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
+    ) -> L {
         if let Some(mut diag) =
             self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
         {
@@ -1649,11 +1651,8 @@ impl<'a> Parser<'a> {
                 )
                 .emit();
         }
-        ast::Lit {
-            token_lit: token::Lit::new(token::LitKind::Char, lifetime.name, None),
-            kind: ast::LitKind::Char(lifetime.name.as_str().chars().next().unwrap_or('_')),
-            span: lifetime.span,
-        }
+        let name = lifetime.without_first_quote().name;
+        mk_lit_char(name, lifetime.span)
     }
 
     /// Recover on the syntax `do catch { ... }` suggesting `try { ... }` instead.
@@ -1664,7 +1663,7 @@ impl<'a> Parser<'a> {
         self.bump(); // `catch`
 
         let span = lo.to(self.prev_token.span);
-        self.sess.emit_err(DoCatchSyntaxRemoved { span });
+        self.sess.emit_err(errors::DoCatchSyntaxRemoved { span });
 
         self.parse_try_block(lo)
     }
@@ -1675,7 +1674,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse `"return" expr?`.
-    fn parse_return_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_return(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Ret(self.parse_expr_opt()?);
         let expr = self.mk_expr(lo.to(self.prev_token.span), kind);
@@ -1683,7 +1682,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parse `"do" "yeet" expr?`.
-    fn parse_yeet_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_yeet(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         self.bump(); // `do`
@@ -1705,16 +1704,16 @@ impl<'a> Parser<'a> {
     /// `break 'lbl: loop {}`); a labeled break with an unlabeled loop as its value
     /// expression only gets a warning for compatibility reasons; and a labeled break
     /// with a labeled loop does not even get a warning because there is no ambiguity.
-    fn parse_break_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_break(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let mut label = self.eat_label();
-        let kind = if label.is_some() && self.token == token::Colon {
+        let kind = if self.token == token::Colon && let Some(label) = label.take() {
             // The value expression can be a labeled loop, see issue #86948, e.g.:
             // `loop { break 'label: loop { break 'label 42; }; }`
-            let lexpr = self.parse_labeled_expr(label.take().unwrap(), true)?;
-            self.sess.emit_err(LabeledLoopInBreak {
+            let lexpr = self.parse_expr_labeled(label, true)?;
+            self.sess.emit_err(errors::LabeledLoopInBreak {
                 span: lexpr.span,
-                sub: WrapExpressionInParentheses {
+                sub: errors::WrapExpressionInParentheses {
                     left: lexpr.span.shrink_to_lo(),
                     right: lexpr.span.shrink_to_hi(),
                 },
@@ -1723,14 +1722,14 @@ impl<'a> Parser<'a> {
         } else if self.token != token::OpenDelim(Delimiter::Brace)
             || !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
         {
-            let expr = self.parse_expr_opt()?;
-            if let Some(ref expr) = expr {
+            let mut expr = self.parse_expr_opt()?;
+            if let Some(expr) = &mut expr {
                 if label.is_some()
                     && matches!(
                         expr.kind,
                         ExprKind::While(_, _, None)
                             | ExprKind::ForLoop(_, _, _, None)
-                            | ExprKind::Loop(_, None)
+                            | ExprKind::Loop(_, None, _)
                             | ExprKind::Block(_, None)
                     )
                 {
@@ -1742,7 +1741,19 @@ impl<'a> Parser<'a> {
                         BuiltinLintDiagnostics::BreakWithLabelAndLoop(expr.span),
                     );
                 }
+
+                // Recover `break label aaaaa`
+                if self.may_recover()
+                    && let ExprKind::Path(None, p) = &expr.kind
+                    && let [segment] = &*p.segments
+                    && let &ast::PathSegment { ident, args: None, .. } = segment
+                    && let Some(next) = self.parse_expr_opt()?
+                {
+                    label = Some(self.recover_ident_into_label(ident));
+                    *expr = next;
+                }
             }
+
             expr
         } else {
             None
@@ -1751,8 +1762,25 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr)
     }
 
+    /// Parse `"continue" label?`.
+    fn parse_expr_continue(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
+        let mut label = self.eat_label();
+
+        // Recover `continue label` -> `continue 'label`
+        if self.may_recover()
+            && label.is_none()
+            && let Some((ident, _)) = self.token.ident()
+        {
+            self.bump();
+            label = Some(self.recover_ident_into_label(ident));
+        }
+
+        let kind = ExprKind::Continue(label);
+        Ok(self.mk_expr(lo.to(self.prev_token.span), kind))
+    }
+
     /// Parse `"yield" expr?`.
-    fn parse_yield_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_yield(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
         let kind = ExprKind::Yield(self.parse_expr_opt()?);
         let span = lo.to(self.prev_token.span);
@@ -1764,13 +1792,13 @@ impl<'a> Parser<'a> {
     /// Returns a string literal if the next token is a string literal.
     /// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
     /// and returns `None` if the next token is not literal at all.
-    pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
-        match self.parse_opt_lit() {
+    pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
+        match self.parse_opt_meta_item_lit() {
             Some(lit) => match lit.kind {
                 ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
                     style,
-                    symbol: lit.token_lit.symbol,
-                    suffix: lit.token_lit.suffix,
+                    symbol: lit.symbol,
+                    suffix: lit.suffix,
                     span: lit.span,
                     symbol_unescaped,
                 }),
@@ -1780,41 +1808,65 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> {
-        self.parse_opt_lit().ok_or(()).or_else(|()| {
-            if let token::Interpolated(inner) = &self.token.kind {
-                let expr = match inner.as_ref() {
-                    token::NtExpr(expr) => Some(expr),
-                    token::NtLiteral(expr) => Some(expr),
-                    _ => None,
-                };
-                if let Some(expr) = expr {
-                    if matches!(expr.kind, ExprKind::Err) {
-                        let mut err = InvalidInterpolatedExpression { span: self.token.span }
-                            .into_diagnostic(&self.sess.span_diagnostic);
-                        err.downgrade_to_delayed_bug();
-                        return Err(err);
-                    }
-                }
-            }
-            let token = self.token.clone();
-            let err = |self_: &mut Self| {
-                let msg = format!("unexpected token: {}", super::token_descr(&token));
-                self_.struct_span_err(token.span, &msg)
+    pub(crate) fn mk_token_lit_char(name: Symbol, span: Span) -> (token::Lit, Span) {
+        (token::Lit { symbol: name, suffix: None, kind: token::Char }, span)
+    }
+
+    fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit {
+        ast::MetaItemLit {
+            symbol: name,
+            suffix: None,
+            kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
+            span,
+        }
+    }
+
+    fn handle_missing_lit<L>(
+        &mut self,
+        mk_lit_char: impl FnOnce(Symbol, Span) -> L,
+    ) -> PResult<'a, L> {
+        if let token::Interpolated(inner) = &self.token.kind {
+            let expr = match inner.as_ref() {
+                token::NtExpr(expr) => Some(expr),
+                token::NtLiteral(expr) => Some(expr),
+                _ => None,
             };
-            // On an error path, eagerly consider a lifetime to be an unclosed character lit
-            if self.token.is_lifetime() {
-                let lt = self.expect_lifetime();
-                Ok(self.recover_unclosed_char(lt.ident, err))
-            } else {
-                Err(err(self))
+            if let Some(expr) = expr {
+                if matches!(expr.kind, ExprKind::Err) {
+                    let mut err = errors::InvalidInterpolatedExpression { span: self.token.span }
+                        .into_diagnostic(&self.sess.span_diagnostic);
+                    err.downgrade_to_delayed_bug();
+                    return Err(err);
+                }
             }
-        })
+        }
+        let token = self.token.clone();
+        let err = |self_: &Self| {
+            let msg = format!("unexpected token: {}", super::token_descr(&token));
+            self_.struct_span_err(token.span, &msg)
+        };
+        // On an error path, eagerly consider a lifetime to be an unclosed character lit
+        if self.token.is_lifetime() {
+            let lt = self.expect_lifetime();
+            Ok(self.recover_unclosed_char(lt.ident, mk_lit_char, err))
+        } else {
+            Err(err(self))
+        }
     }
 
-    /// Matches `lit = true | false | token_lit`.
-    /// Returns `None` if the next token is not a literal.
-    pub(super) fn parse_opt_lit(&mut self) -> Option<Lit> {
+    pub(super) fn parse_token_lit(&mut self) -> PResult<'a, (token::Lit, Span)> {
+        self.parse_opt_token_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_token_lit_char))
+    }
+
+    pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
+        self.parse_opt_meta_item_lit()
+            .ok_or(())
+            .or_else(|()| self.handle_missing_lit(Parser::mk_meta_item_lit_char))
+    }
+
+    fn recover_after_dot(&mut self) -> Option<Token> {
         let mut recovered = None;
         if self.token == token::Dot {
             // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where
@@ -1823,7 +1875,16 @@ impl<'a> Parser<'a> {
                 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) =
                     next_token.kind
                 {
-                    if self.token.span.hi() == next_token.span.lo() {
+                    // If this integer looks like a float, then recover as such.
+                    //
+                    // We will never encounter the exponent part of a floating
+                    // point literal here, since there's no use of the exponent
+                    // syntax that also constitutes a valid integer, so we need
+                    // not check for that.
+                    if suffix.map_or(true, |s| s == sym::f32 || s == sym::f64)
+                        && symbol.as_str().chars().all(|c| c.is_numeric() || c == '_')
+                        && self.token.span.hi() == next_token.span.lo()
+                    {
                         let s = String::from("0.") + symbol.as_str();
                         let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
                         return Some(Token::new(kind, self.token.span.to(next_token.span)));
@@ -1833,107 +1894,60 @@ impl<'a> Parser<'a> {
             });
             if let Some(token) = &recovered {
                 self.bump();
-                self.sess.emit_err(FloatLiteralRequiresIntegerPart {
+                self.sess.emit_err(errors::FloatLiteralRequiresIntegerPart {
                     span: token.span,
                     correct: pprust::token_to_string(token).into_owned(),
                 });
             }
         }
 
-        let token = recovered.as_ref().unwrap_or(&self.token);
-        match Lit::from_token(token) {
-            Ok(lit) => {
-                self.bump();
-                Some(lit)
-            }
-            Err(LitError::NotLiteral) => None,
-            Err(err) => {
-                let span = token.span;
-                let token::Literal(lit) = token.kind else {
-                    unreachable!();
-                };
-                self.bump();
-                self.report_lit_error(err, lit, span);
-                // Pack possible quotes and prefixes from the original literal into
-                // the error literal's symbol so they can be pretty-printed faithfully.
-                let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
-                let symbol = Symbol::intern(&suffixless_lit.to_string());
-                let lit = token::Lit::new(token::Err, symbol, lit.suffix);
-                Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
-            }
-        }
+        recovered
     }
 
-    fn report_lit_error(&self, err: LitError, lit: token::Lit, span: Span) {
-        // Checks if `s` looks like i32 or u1234 etc.
-        fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
-            s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
-        }
-
-        // Try to lowercase the prefix if it's a valid base prefix.
-        fn fix_base_capitalisation(s: &str) -> Option<String> {
-            if let Some(stripped) = s.strip_prefix('B') {
-                Some(format!("0b{stripped}"))
-            } else if let Some(stripped) = s.strip_prefix('O') {
-                Some(format!("0o{stripped}"))
-            } else if let Some(stripped) = s.strip_prefix('X') {
-                Some(format!("0x{stripped}"))
-            } else {
-                None
-            }
-        }
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    pub(super) fn parse_opt_token_lit(&mut self) -> Option<(token::Lit, Span)> {
+        let recovered = self.recover_after_dot();
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        let span = token.span;
+        token::Lit::from_token(token).map(|token_lit| {
+            self.bump();
+            (token_lit, span)
+        })
+    }
 
-        let token::Lit { kind, suffix, .. } = lit;
-        match err {
-            // `NotLiteral` is not an error by itself, so we don't report
-            // it and give the parser opportunity to try something else.
-            LitError::NotLiteral => {}
-            // `LexerError` *is* an error, but it was already reported
-            // by lexer, so here we don't report it the second time.
-            LitError::LexerError => {}
-            LitError::InvalidSuffix => {
-                if let Some(suffix) = suffix {
-                    self.sess.emit_err(InvalidLiteralSuffix {
-                        span,
-                        kind: format!("{}", kind.descr()),
-                        suffix,
-                    });
-                }
-            }
-            LitError::InvalidIntSuffix => {
-                let suf = suffix.expect("suffix error with no suffix");
-                let suf = suf.as_str();
-                if looks_like_width_suffix(&['i', 'u'], &suf) {
-                    // If it looks like a width, try to be helpful.
-                    self.sess.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() });
-                } else if let Some(fixed) = fix_base_capitalisation(suf) {
-                    self.sess.emit_err(InvalidNumLiteralBasePrefix { span, fixed });
-                } else {
-                    self.sess.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() });
-                }
-            }
-            LitError::InvalidFloatSuffix => {
-                let suf = suffix.expect("suffix error with no suffix");
-                let suf = suf.as_str();
-                if looks_like_width_suffix(&['f'], suf) {
-                    // If it looks like a width, try to be helpful.
-                    self.sess
-                        .emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() });
-                } else {
-                    self.sess.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() });
+    /// Matches `lit = true | false | token_lit`.
+    /// Returns `None` if the next token is not a literal.
+    pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
+        let recovered = self.recover_after_dot();
+        let token = recovered.as_ref().unwrap_or(&self.token);
+        match token::Lit::from_token(token) {
+            Some(token_lit) => {
+                match MetaItemLit::from_token_lit(token_lit, token.span) {
+                    Ok(lit) => {
+                        self.bump();
+                        Some(lit)
+                    }
+                    Err(err) => {
+                        let span = token.span;
+                        let token::Literal(lit) = token.kind else {
+                            unreachable!();
+                        };
+                        self.bump();
+                        report_lit_error(&self.sess, err, lit, span);
+                        // Pack possible quotes and prefixes from the original literal into
+                        // the error literal's symbol so they can be pretty-printed faithfully.
+                        let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
+                        let symbol = Symbol::intern(&suffixless_lit.to_string());
+                        let lit = token::Lit::new(token::Err, symbol, lit.suffix);
+                        Some(
+                            MetaItemLit::from_token_lit(lit, span)
+                                .unwrap_or_else(|_| unreachable!()),
+                        )
+                    }
                 }
             }
-            LitError::NonDecimalFloat(base) => {
-                match base {
-                    16 => self.sess.emit_err(HexadecimalFloatLiteralNotSupported { span }),
-                    8 => self.sess.emit_err(OctalFloatLiteralNotSupported { span }),
-                    2 => self.sess.emit_err(BinaryFloatLiteralNotSupported { span }),
-                    _ => unreachable!(),
-                };
-            }
-            LitError::IntTooLarge => {
-                self.sess.emit_err(IntLiteralTooLarge { span });
-            }
+            None => None,
         }
     }
 
@@ -1941,13 +1955,17 @@ impl<'a> Parser<'a> {
         if [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suffix) {
             // #59553: warn instead of reject out of hand to allow the fix to percolate
             // through the ecosystem when people fix their macros
-            self.sess.emit_warning(InvalidLiteralSuffixOnTupleIndex {
+            self.sess.emit_warning(errors::InvalidLiteralSuffixOnTupleIndex {
                 span,
                 suffix,
                 exception: Some(()),
             });
         } else {
-            self.sess.emit_err(InvalidLiteralSuffixOnTupleIndex { span, suffix, exception: None });
+            self.sess.emit_err(errors::InvalidLiteralSuffixOnTupleIndex {
+                span,
+                suffix,
+                exception: None,
+            });
         }
     }
 
@@ -1958,8 +1976,8 @@ impl<'a> Parser<'a> {
 
         let lo = self.token.span;
         let minus_present = self.eat(&token::BinOp(token::Minus));
-        let lit = self.parse_lit()?;
-        let expr = self.mk_expr(lit.span, ExprKind::Lit(lit));
+        let (token_lit, span) = self.parse_token_lit()?;
+        let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
 
         if minus_present {
             Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_unary(UnOp::Neg, expr)))
@@ -1979,11 +1997,11 @@ impl<'a> Parser<'a> {
     /// expression.
     fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
         let mut snapshot = self.create_snapshot_for_diagnostic();
-        match snapshot.parse_array_or_repeat_expr(Delimiter::Brace) {
+        match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
             Ok(arr) => {
-                self.sess.emit_err(ArrayBracketsInsteadOfSpaces {
+                self.sess.emit_err(errors::ArrayBracketsInsteadOfSpaces {
                     span: arr.span,
-                    sub: ArrayBracketsInsteadOfSpacesSugg {
+                    sub: errors::ArrayBracketsInsteadOfSpacesSugg {
                         left: lo,
                         right: snapshot.prev_token.span,
                     },
@@ -2029,7 +2047,7 @@ impl<'a> Parser<'a> {
                         .span_to_snippet(snapshot.token.span)
                         .map_or(false, |snippet| snippet == "]") =>
                 {
-                    return Err(MissingSemicolonBeforeArray {
+                    return Err(errors::MissingSemicolonBeforeArray {
                         open_delim: open_delim_span,
                         semicolon: prev_span.shrink_to_hi(),
                     }.into_diagnostic(&self.sess.span_diagnostic));
@@ -2042,7 +2060,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a block or unsafe block.
-    pub(super) fn parse_block_expr(
+    pub(super) fn parse_expr_block(
         &mut self,
         opt_label: Option<Label>,
         lo: Span,
@@ -2055,13 +2073,13 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.is_whole_block() {
-            self.sess.emit_err(InvalidBlockMacroSegment {
+            self.sess.emit_err(errors::InvalidBlockMacroSegment {
                 span: self.token.span,
                 context: lo.to(self.token.span),
             });
         }
 
-        let (attrs, blk) = self.parse_block_common(lo, blk_mode)?;
+        let (attrs, blk) = self.parse_block_common(lo, blk_mode, true)?;
         Ok(self.mk_expr_with_attrs(blk.span, ExprKind::Block(blk, opt_label), attrs))
     }
 
@@ -2072,7 +2090,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a closure expression (e.g., `move |args| expr`).
-    fn parse_closure_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_closure(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
 
         let binder = if self.check_keyword(kw::For) {
@@ -2082,11 +2100,13 @@ impl<'a> Parser<'a> {
 
             self.sess.gated_spans.gate(sym::closure_lifetime_binder, span);
 
-            ClosureBinder::For { span, generic_params: P::from_vec(lifetime_defs) }
+            ClosureBinder::For { span, generic_params: lifetime_defs }
         } else {
             ClosureBinder::NotPresent
         };
 
+        let constness = self.parse_closure_constness(Case::Sensitive);
+
         let movability =
             if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
 
@@ -2097,9 +2117,9 @@ impl<'a> Parser<'a> {
         };
 
         let capture_clause = self.parse_capture_clause()?;
-        let decl = self.parse_fn_block_decl()?;
+        let (fn_decl, fn_arg_span) = self.parse_fn_block_decl()?;
         let decl_hi = self.prev_token.span;
-        let mut body = match decl.output {
+        let mut body = match fn_decl.output {
             FnRetTy::Default(_) => {
                 let restrictions = self.restrictions - Restrictions::STMT_EXPR;
                 self.parse_expr_res(restrictions, None)?
@@ -2107,7 +2127,7 @@ impl<'a> Parser<'a> {
             _ => {
                 // If an explicit return type is given, require a block to appear (RFC 968).
                 let body_lo = self.token.span;
-                self.parse_block_expr(None, body_lo, BlockCheckMode::Default)?
+                self.parse_expr_block(None, body_lo, BlockCheckMode::Default)?
             }
         };
 
@@ -2117,13 +2137,8 @@ impl<'a> Parser<'a> {
         }
 
         if self.token.kind == TokenKind::Semi
-            && matches!(self.token_cursor.frame.delim_sp, Some((Delimiter::Parenthesis, _)))
-            // HACK: This is needed so we can detect whether we're inside a macro,
-            // where regular assumptions about what tokens can follow other tokens
-            // don't necessarily apply.
+            && matches!(self.token_cursor.stack.last(), Some((_, Delimiter::Parenthesis, _)))
             && self.may_recover()
-            // FIXME(Nilstrieb): Remove this check once `may_recover` actually stops recovery
-            && self.subparser_name.is_none()
         {
             // It is likely that the closure body is a block but where the
             // braces have been removed. We will recover and eat the next
@@ -2135,15 +2150,17 @@ impl<'a> Parser<'a> {
 
         let closure = self.mk_expr(
             lo.to(body.span),
-            ExprKind::Closure(
+            ExprKind::Closure(Box::new(ast::Closure {
                 binder,
                 capture_clause,
+                constness,
                 asyncness,
                 movability,
-                decl,
+                fn_decl,
                 body,
-                lo.to(decl_hi),
-            ),
+                fn_decl_span: lo.to(decl_hi),
+                fn_arg_span,
+            })),
         );
 
         // Disable recovery for closure body
@@ -2160,7 +2177,7 @@ impl<'a> Parser<'a> {
             // Check for `move async` and recover
             if self.check_keyword(kw::Async) {
                 let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
-                Err(AsyncMoveOrderIncorrect { span: move_async_span }
+                Err(errors::AsyncMoveOrderIncorrect { span: move_async_span }
                     .into_diagnostic(&self.sess.span_diagnostic))
             } else {
                 Ok(CaptureBy::Value)
@@ -2171,9 +2188,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the `|arg, arg|` header of a closure.
-    fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
+    fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
+        let arg_start = self.token.span.lo();
+
         let inputs = if self.eat(&token::OrOr) {
-            Vec::new()
+            ThinVec::new()
         } else {
             self.expect(&token::BinOp(token::Or))?;
             let args = self
@@ -2187,10 +2206,11 @@ impl<'a> Parser<'a> {
             self.expect_or()?;
             args
         };
+        let arg_span = self.prev_token.span.with_lo(arg_start);
         let output =
             self.parse_ret_ty(AllowPlus::Yes, RecoverQPath::Yes, RecoverReturnSign::Yes)?;
 
-        Ok(P(FnDecl { inputs, output }))
+        Ok((P(FnDecl { inputs, output }), arg_span))
     }
 
     /// Parses a parameter in a closure header (e.g., `|arg, arg|`).
@@ -2198,7 +2218,7 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         let attrs = self.parse_outer_attributes()?;
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
-            let pat = this.parse_pat_no_top_alt(PARAM_EXPECTED)?;
+            let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName))?;
             let ty = if this.eat(&token::Colon) {
                 this.parse_ty()?
             } else {
@@ -2220,9 +2240,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an `if` expression (`if` token already eaten).
-    fn parse_if_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_if(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.prev_token.span;
-        let cond = self.parse_cond_expr()?;
+        let cond = self.parse_expr_cond()?;
         self.parse_if_after_cond(lo, cond)
     }
 
@@ -2235,16 +2255,17 @@ impl<'a> Parser<'a> {
             let block = match &mut cond.kind {
                 ExprKind::Binary(Spanned { span: binop_span, .. }, _, right)
                     if let ExprKind::Block(_, None) = right.kind => {
-                        self.sess.emit_err(IfExpressionMissingThenBlock {
+                        self.sess.emit_err(errors::IfExpressionMissingThenBlock {
                             if_span: lo,
-                            sub: IfExpressionMissingThenBlockSub::UnfinishedCondition(
-                                cond_span.shrink_to_lo().to(*binop_span)
-                            ),
+                            missing_then_block_sub:
+                                errors::IfExpressionMissingThenBlockSub::UnfinishedCondition(cond_span.shrink_to_lo().to(*binop_span)),
+                                let_else_sub: None,
+
                         });
                         std::mem::replace(right, this.mk_expr_err(binop_span.shrink_to_hi()))
                     },
                 ExprKind::Block(_, None) => {
-                    self.sess.emit_err(IfExpressionMissingCondition {
+                    self.sess.emit_err(errors::IfExpressionMissingCondition {
                         if_span: lo.shrink_to_hi(),
                         block_span: self.sess.source_map().start_point(cond_span),
                     });
@@ -2265,20 +2286,27 @@ impl<'a> Parser<'a> {
             if let Some(block) = recover_block_from_condition(self) {
                 block
             } else {
-                self.sess.emit_err(IfExpressionMissingThenBlock {
+                let let_else_sub = matches!(cond.kind, ExprKind::Let(..))
+                    .then(|| errors::IfExpressionLetSomeSub { if_span: lo.until(cond_span) });
+
+                self.sess.emit_err(errors::IfExpressionMissingThenBlock {
                     if_span: lo,
-                    sub: IfExpressionMissingThenBlockSub::AddThenBlock(cond_span.shrink_to_hi()),
+                    missing_then_block_sub: errors::IfExpressionMissingThenBlockSub::AddThenBlock(
+                        cond_span.shrink_to_hi(),
+                    ),
+                    let_else_sub,
                 });
                 self.mk_block_err(cond_span.shrink_to_hi())
             }
         } else {
-            let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+            let attrs = self.parse_outer_attributes()?; // For recovery.
             let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
                 self.parse_block()?
             } else {
                 if let Some(block) = recover_block_from_condition(self) {
                     block
                 } else {
+                    self.error_on_extra_if(&cond)?;
                     // Parse block, which will always fail, but we can add a nice note to the error
                     self.parse_block().map_err(|mut err| {
                         err.span_note(
@@ -2289,15 +2317,15 @@ impl<'a> Parser<'a> {
                     })?
                 }
             };
-            self.error_on_if_block_attrs(lo, false, block.span, &attrs);
+            self.error_on_if_block_attrs(lo, false, block.span, attrs);
             block
         };
-        let els = if self.eat_keyword(kw::Else) { Some(self.parse_else_expr()?) } else { None };
+        let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None };
         Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
     }
 
     /// Parses the condition of a `if` or `while` expression.
-    fn parse_cond_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_cond(&mut self) -> PResult<'a, P<Expr>> {
         let cond =
             self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
 
@@ -2310,7 +2338,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `let $pat = $expr` pseudo-expression.
-    fn parse_let_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_let(&mut self) -> PResult<'a, P<Expr>> {
         // This is a *approximate* heuristic that detects if `let` chains are
         // being parsed in the right position. It's approximate because it
         // doesn't deny all invalid `let` expressions, just completely wrong usages.
@@ -2319,7 +2347,7 @@ impl<'a> Parser<'a> {
             TokenKind::AndAnd | TokenKind::Ident(kw::If, _) | TokenKind::Ident(kw::While, _)
         );
         if !self.restrictions.contains(Restrictions::ALLOW_LET) || not_in_chain {
-            self.sess.emit_err(ExpectedExpressionFoundLet { span: self.token.span });
+            self.sess.emit_err(errors::ExpectedExpressionFoundLet { span: self.token.span });
         }
 
         self.bump(); // Eat `let` token
@@ -2331,7 +2359,7 @@ impl<'a> Parser<'a> {
             CommaRecoveryMode::LikelyTuple,
         )?;
         if self.token == token::EqEq {
-            self.sess.emit_err(ExpectedEqForLetExpr {
+            self.sess.emit_err(errors::ExpectedEqForLetExpr {
                 span: self.token.span,
                 sugg_span: self.token.span,
             });
@@ -2340,7 +2368,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::Eq)?;
         }
         let expr = self.with_res(self.restrictions | Restrictions::NO_STRUCT_LITERAL, |this| {
-            this.parse_assoc_expr_with(1 + prec_let_scrutinee_needs_par(), None.into())
+            this.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), None.into())
         })?;
         let span = lo.to(expr.span);
         self.sess.gated_spans.gate(sym::let_chains, span);
@@ -2348,11 +2376,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses an `else { ... }` expression (`else` token already eaten).
-    fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
         let else_span = self.prev_token.span; // `else`
-        let attrs = self.parse_outer_attributes()?.take_for_recovery(); // For recovery.
+        let attrs = self.parse_outer_attributes()?; // For recovery.
         let expr = if self.eat_keyword(kw::If) {
-            self.parse_if_expr()?
+            self.parse_expr_if()?
         } else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
             self.parse_simple_block()?
         } else {
@@ -2366,7 +2394,7 @@ impl<'a> Parser<'a> {
                     if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
                         && classify::expr_requires_semi_to_be_stmt(&cond) =>
                 {
-                    self.sess.emit_err(ExpectedElseBlock {
+                    self.sess.emit_err(errors::ExpectedElseBlock {
                         first_tok_span,
                         first_tok,
                         else_span,
@@ -2385,7 +2413,7 @@ impl<'a> Parser<'a> {
                 },
             }
         };
-        self.error_on_if_block_attrs(else_span, true, expr.span, &attrs);
+        self.error_on_if_block_attrs(else_span, true, expr.span, attrs);
         Ok(expr)
     }
 
@@ -2394,14 +2422,19 @@ impl<'a> Parser<'a> {
         ctx_span: Span,
         is_ctx_else: bool,
         branch_span: Span,
-        attrs: &[ast::Attribute],
+        attrs: AttrWrapper,
     ) {
+        if attrs.is_empty() {
+            return;
+        }
+
+        let attrs: &[ast::Attribute] = &attrs.take_for_recovery(self.sess);
         let (attributes, last) = match attrs {
             [] => return,
             [x0 @ xn] | [x0, .., xn] => (x0.span.to(xn.span), xn.span),
         };
         let ctx = if is_ctx_else { "else" } else { "if" };
-        self.sess.emit_err(OuterAttributeNotAllowedOnIfElse {
+        self.sess.emit_err(errors::OuterAttributeNotAllowedOnIfElse {
             last,
             branch_span,
             ctx_span,
@@ -2410,8 +2443,18 @@ impl<'a> Parser<'a> {
         });
     }
 
+    fn error_on_extra_if(&mut self, cond: &P<Expr>) -> PResult<'a, ()> {
+        if let ExprKind::Binary(Spanned { span: binop_span, node: binop}, _, right) = &cond.kind &&
+            let BinOpKind::And = binop &&
+            let ExprKind::If(cond, ..) = &right.kind {
+                    Err(self.sess.create_err(errors::UnexpectedIfWithIf(binop_span.shrink_to_hi().to(cond.span.shrink_to_lo()))))
+            } else {
+                Ok(())
+            }
+    }
+
     /// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
-    fn parse_for_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+    fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
         // Record whether we are about to parse `for (`.
         // This is used below for recovery in case of `for ( $stuff ) $block`
         // in which case we will suggest `for $stuff $block`.
@@ -2434,28 +2477,61 @@ impl<'a> Parser<'a> {
 
         let pat = self.recover_parens_around_for_head(pat, begin_paren);
 
+        // Recover from missing expression in `for` loop
+        if matches!(expr.kind, ExprKind::Block(..))
+            && !matches!(self.token.kind, token::OpenDelim(token::Delimiter::Brace))
+            && self.may_recover()
+        {
+            self.sess
+                .emit_err(errors::MissingExpressionInForLoop { span: expr.span.shrink_to_lo() });
+            let err_expr = self.mk_expr(expr.span, ExprKind::Err);
+            let block = self.mk_block(thin_vec![], BlockCheckMode::Default, self.prev_token.span);
+            return Ok(self.mk_expr(
+                lo.to(self.prev_token.span),
+                ExprKind::ForLoop(pat, err_expr, block, opt_label),
+            ));
+        }
+
         let (attrs, loop_block) = self.parse_inner_attrs_and_block()?;
 
         let kind = ExprKind::ForLoop(pat, expr, loop_block, opt_label);
+
+        self.recover_loop_else("for", lo)?;
+
         Ok(self.mk_expr_with_attrs(lo.to(self.prev_token.span), kind, attrs))
     }
 
+    /// Recovers from an `else` clause after a loop (`for...else`, `while...else`)
+    fn recover_loop_else(&mut self, loop_kind: &'static str, loop_kw: Span) -> PResult<'a, ()> {
+        if self.token.is_keyword(kw::Else) && self.may_recover() {
+            let else_span = self.token.span;
+            self.bump();
+            let else_clause = self.parse_expr_else()?;
+            self.sess.emit_err(errors::LoopElseNotSupported {
+                span: else_span.to(else_clause.span),
+                loop_kind,
+                loop_kw,
+            });
+        }
+        Ok(())
+    }
+
     fn error_missing_in_for_loop(&mut self) {
         let (span, sub): (_, fn(_) -> _) = if self.token.is_ident_named(sym::of) {
             // Possibly using JS syntax (#75311).
             let span = self.token.span;
             self.bump();
-            (span, MissingInInForLoopSub::InNotOf)
+            (span, errors::MissingInInForLoopSub::InNotOf)
         } else {
-            (self.prev_token.span.between(self.token.span), MissingInInForLoopSub::AddIn)
+            (self.prev_token.span.between(self.token.span), errors::MissingInInForLoopSub::AddIn)
         };
 
-        self.sess.emit_err(MissingInInForLoop { span, sub: sub(span) });
+        self.sess.emit_err(errors::MissingInInForLoop { span, sub: sub(span) });
     }
 
     /// Parses a `while` or `while let` expression (`while` token already eaten).
-    fn parse_while_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
-        let cond = self.parse_cond_expr().map_err(|mut err| {
+    fn parse_expr_while(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let cond = self.parse_expr_cond().map_err(|mut err| {
             err.span_label(lo, "while parsing the condition of this `while` expression");
             err
         })?;
@@ -2464,6 +2540,9 @@ impl<'a> Parser<'a> {
             err.span_label(cond.span, "this `while` condition successfully parsed");
             err
         })?;
+
+        self.recover_loop_else("while", lo)?;
+
         Ok(self.mk_expr_with_attrs(
             lo.to(self.prev_token.span),
             ExprKind::While(cond, body, opt_label),
@@ -2472,11 +2551,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses `loop { ... }` (`loop` token already eaten).
-    fn parse_loop_expr(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+    fn parse_expr_loop(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
+        let loop_span = self.prev_token.span;
         let (attrs, body) = self.parse_inner_attrs_and_block()?;
+        self.recover_loop_else("loop", lo)?;
         Ok(self.mk_expr_with_attrs(
             lo.to(self.prev_token.span),
-            ExprKind::Loop(body, opt_label),
+            ExprKind::Loop(body, opt_label, loop_span),
             attrs,
         ))
     }
@@ -2489,7 +2570,7 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses a `match ... { ... }` expression (`match` token already eaten).
-    fn parse_match_expr(&mut self) -> PResult<'a, P<Expr>> {
+    fn parse_expr_match(&mut self) -> PResult<'a, P<Expr>> {
         let match_span = self.prev_token.span;
         let lo = self.prev_token.span;
         let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
@@ -2511,7 +2592,7 @@ impl<'a> Parser<'a> {
         }
         let attrs = self.parse_inner_attributes()?;
 
-        let mut arms: Vec<Arm> = Vec::new();
+        let mut arms = ThinVec::new();
         while self.token != token::CloseDelim(Delimiter::Brace) {
             match self.parse_arm() {
                 Ok(arm) => arms.push(arm),
@@ -2553,17 +2634,17 @@ impl<'a> Parser<'a> {
         let err = |this: &Parser<'_>, stmts: Vec<ast::Stmt>| {
             let span = stmts[0].span.to(stmts[stmts.len() - 1].span);
 
-            this.sess.emit_err(MatchArmBodyWithoutBraces {
+            this.sess.emit_err(errors::MatchArmBodyWithoutBraces {
                 statements: span,
                 arrow: arrow_span,
                 num_statements: stmts.len(),
                 sub: if stmts.len() > 1 {
-                    MatchArmBodyWithoutBracesSugg::AddBraces {
+                    errors::MatchArmBodyWithoutBracesSugg::AddBraces {
                         left: span.shrink_to_lo(),
                         right: span.shrink_to_hi(),
                     }
                 } else {
-                    MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
+                    errors::MatchArmBodyWithoutBracesSugg::UseComma { semicolon: semi_sp }
                 },
             });
             this.mk_expr_err(span)
@@ -2619,8 +2700,8 @@ impl<'a> Parser<'a> {
         // Used to check the `let_chains` and `if_let_guard` features mostly by scanning
         // `&&` tokens.
         fn check_let_expr(expr: &Expr) -> (bool, bool) {
-            match expr.kind {
-                ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, ref lhs, ref rhs) => {
+            match &expr.kind {
+                ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
                     let lhs_rslt = check_let_expr(lhs);
                     let rhs_rslt = check_let_expr(rhs);
                     (lhs_rslt.0 || rhs_rslt.0, false)
@@ -2669,6 +2750,14 @@ impl<'a> Parser<'a> {
                     );
                     err.emit();
                     this.bump();
+                } else if matches!(
+                    (&this.prev_token.kind, &this.token.kind),
+                    (token::DotDotEq, token::Gt)
+                ) {
+                    // `error_inclusive_range_match_arrow` handles cases like `0..=> {}`,
+                    // so we supress the error here
+                    err.delay_as_bug();
+                    this.bump();
                 } else {
                     return Err(err);
                 }
@@ -2746,7 +2835,7 @@ impl<'a> Parser<'a> {
                                 .is_ok();
                             if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
                                 err.cancel();
-                                this.sess.emit_err(MissingCommaAfterMatchArm {
+                                this.sess.emit_err(errors::MissingCommaAfterMatchArm {
                                     span: hi.shrink_to_hi(),
                                 });
                                 return Ok(true);
@@ -2778,7 +2867,7 @@ impl<'a> Parser<'a> {
     fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
         let (attrs, body) = self.parse_inner_attrs_and_block()?;
         if self.eat_keyword(kw::Catch) {
-            Err(CatchAfterTry { span: self.prev_token.span }
+            Err(errors::CatchAfterTry { span: self.prev_token.span }
                 .into_diagnostic(&self.sess.span_diagnostic))
         } else {
             let span = span_lo.to(body.span);
@@ -2843,7 +2932,7 @@ impl<'a> Parser<'a> {
 
     fn maybe_parse_struct_expr(
         &mut self,
-        qself: Option<&ast::QSelf>,
+        qself: &Option<P<ast::QSelf>>,
         path: &ast::Path,
     ) -> Option<PResult<'a, P<Expr>>> {
         let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
@@ -2851,12 +2940,12 @@ impl<'a> Parser<'a> {
             if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
                 return Some(Err(err));
             }
-            let expr = self.parse_struct_expr(qself.cloned(), path.clone(), true);
+            let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
             if let (Ok(expr), false) = (&expr, struct_allowed) {
                 // This is a struct literal, but we don't can't accept them here.
-                self.sess.emit_err(StructLiteralNotAllowedHere {
+                self.sess.emit_err(errors::StructLiteralNotAllowedHere {
                     span: expr.span,
-                    sub: StructLiteralNotAllowedHereSugg {
+                    sub: errors::StructLiteralNotAllowedHereSugg {
                         left: path.span.shrink_to_lo(),
                         right: expr.span.shrink_to_hi(),
                     },
@@ -2872,15 +2961,15 @@ impl<'a> Parser<'a> {
         pth: ast::Path,
         recover: bool,
         close_delim: Delimiter,
-    ) -> PResult<'a, (Vec<ExprField>, ast::StructRest, bool)> {
-        let mut fields = Vec::new();
+    ) -> PResult<'a, (ThinVec<ExprField>, ast::StructRest, bool)> {
+        let mut fields = ThinVec::new();
         let mut base = ast::StructRest::None;
         let mut recover_async = false;
 
         let mut async_block_err = |e: &mut Diagnostic, span: Span| {
             recover_async = true;
-            e.span_label(span, "`async` blocks are only allowed in Rust 2018 or later");
-            e.help_use_latest_edition();
+            errors::AsyncBlockIn2015 { span }.add_to_diagnostic(e);
+            errors::HelpUseLatestEdition::new().add_to_diagnostic(e);
         };
 
         while self.token != token::CloseDelim(close_delim) {
@@ -2980,9 +3069,9 @@ impl<'a> Parser<'a> {
     }
 
     /// Precondition: already parsed the '{'.
-    pub(super) fn parse_struct_expr(
+    pub(super) fn parse_expr_struct(
         &mut self,
-        qself: Option<ast::QSelf>,
+        qself: Option<P<ast::QSelf>>,
         pth: ast::Path,
         recover: bool,
     ) -> PResult<'a, P<Expr>> {
@@ -3024,7 +3113,7 @@ impl<'a> Parser<'a> {
         if self.token != token::Comma {
             return;
         }
-        self.sess.emit_err(CommaAfterBaseStruct {
+        self.sess.emit_err(errors::CommaAfterBaseStruct {
             span: span.to(self.prev_token.span),
             comma: self.token.span,
         });
@@ -3037,15 +3126,35 @@ impl<'a> Parser<'a> {
         {
             // recover from typo of `...`, suggest `..`
             let span = self.prev_token.span;
-            self.sess.emit_err(MissingDotDot { token_span: span, sugg_span: span });
+            self.sess.emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
             return true;
         }
         false
     }
 
+    /// Converts an ident into 'label and emits an "expected a label, found an identifier" error.
+    fn recover_ident_into_label(&mut self, ident: Ident) -> Label {
+        // Convert `label` -> `'label`,
+        // so that nameres doesn't complain about non-existing label
+        let label = format!("'{}", ident.name);
+        let ident = Ident { name: Symbol::intern(&label), span: ident.span };
+
+        self.struct_span_err(ident.span, "expected a label, found an identifier")
+            .span_suggestion(
+                ident.span,
+                "labels start with a tick",
+                label,
+                Applicability::MachineApplicable,
+            )
+            .emit();
+
+        Label { ident }
+    }
+
     /// Parses `ident (COLON expr)?`.
     fn parse_expr_field(&mut self) -> PResult<'a, ExprField> {
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
 
@@ -3085,18 +3194,18 @@ impl<'a> Parser<'a> {
             return;
         }
 
-        self.sess.emit_err(EqFieldInit {
+        self.sess.emit_err(errors::EqFieldInit {
             span: self.token.span,
             eq: field_name.span.shrink_to_hi().to(self.token.span),
         });
     }
 
     fn err_dotdotdot_syntax(&self, span: Span) {
-        self.sess.emit_err(DotDotDot { span });
+        self.sess.emit_err(errors::DotDotDot { span });
     }
 
     fn err_larrow_operator(&self, span: Span) {
-        self.sess.emit_err(LeftArrowOperator { span });
+        self.sess.emit_err(errors::LeftArrowOperator { span });
     }
 
     fn mk_assign_op(&self, binop: BinOp, lhs: P<Expr>, rhs: P<Expr>) -> ExprKind {
@@ -3110,7 +3219,7 @@ impl<'a> Parser<'a> {
         limits: RangeLimits,
     ) -> ExprKind {
         if end.is_none() && limits == RangeLimits::Closed {
-            self.inclusive_range_with_incorrect_end(self.prev_token.span);
+            self.inclusive_range_with_incorrect_end();
             ExprKind::Err
         } else {
             ExprKind::Range(start, end, limits)
@@ -3129,7 +3238,7 @@ impl<'a> Parser<'a> {
         ExprKind::Index(expr, idx)
     }
 
-    fn mk_call(&self, f: P<Expr>, args: Vec<P<Expr>>) -> ExprKind {
+    fn mk_call(&self, f: P<Expr>, args: ThinVec<P<Expr>>) -> ExprKind {
         ExprKind::Call(f, args)
     }
 
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index fa75670b2ed..8d0f168e09d 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -1,11 +1,25 @@
+use crate::errors::{
+    MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters,
+    UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
+    WhereClauseBeforeTupleStructBodySugg,
+};
+
 use super::{ForceCollect, Parser, TrailingToken};
 
+use ast::token::Delimiter;
 use rustc_ast::token;
 use rustc_ast::{
     self as ast, AttrVec, GenericBounds, GenericParam, GenericParamKind, TyKind, WhereClause,
 };
 use rustc_errors::{Applicability, PResult};
-use rustc_span::symbol::kw;
+use rustc_span::symbol::{kw, Ident};
+use rustc_span::Span;
+use thin_vec::ThinVec;
+
+enum PredicateOrStructBody {
+    Predicate(ast::WherePredicate),
+    StructBody(ThinVec<ast::FieldDef>),
+}
 
 impl<'a> Parser<'a> {
     /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
@@ -108,8 +122,8 @@ impl<'a> Parser<'a> {
 
     /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
     /// a trailing comma and erroneous trailing attributes.
-    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
-        let mut params = Vec::new();
+    pub(super) fn parse_generic_params(&mut self) -> PResult<'a, ThinVec<ast::GenericParam>> {
+        let mut params = ThinVec::new();
         let mut done = false;
         while !done {
             let attrs = self.parse_outer_attributes()?;
@@ -118,12 +132,9 @@ impl<'a> Parser<'a> {
                     if this.eat_keyword_noexpect(kw::SelfUpper) {
                         // `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing
                         // as if `Self` never existed.
-                        this.struct_span_err(
-                            this.prev_token.span,
-                            "unexpected keyword `Self` in generic parameters",
-                        )
-                        .note("you cannot use `Self` as a generic parameter because it is reserved for associated items")
-                        .emit();
+                        this.sess.emit_err(UnexpectedSelfInGenericParameters {
+                            span: this.prev_token.span,
+                        });
 
                         this.eat(&token::Comma);
                     }
@@ -136,6 +147,20 @@ impl<'a> Parser<'a> {
                         } else {
                             (None, Vec::new())
                         };
+
+                        if this.check_noexpect(&token::Eq)
+                            && this.look_ahead(1, |t| t.is_lifetime())
+                        {
+                            let lo = this.token.span;
+                            // Parse `= 'lifetime`.
+                            this.bump(); // `=`
+                            this.bump(); // `'lifetime`
+                            let span = lo.to(this.prev_token.span);
+                            this.sess.emit_err(
+                                UnexpectedDefaultValueForLifetimeInGenericParameters { span },
+                            );
+                        }
+
                         Some(ast::GenericParam {
                             ident: lifetime.ident,
                             id: lifetime.id,
@@ -227,36 +252,52 @@ impl<'a> Parser<'a> {
             self.expect_gt()?;
             (params, span_lo.to(self.prev_token.span))
         } else {
-            (vec![], self.prev_token.span.shrink_to_hi())
+            (ThinVec::new(), self.prev_token.span.shrink_to_hi())
         };
         Ok(ast::Generics {
             params,
             where_clause: WhereClause {
                 has_where_token: false,
-                predicates: Vec::new(),
+                predicates: ThinVec::new(),
                 span: self.prev_token.span.shrink_to_hi(),
             },
             span,
         })
     }
 
-    /// Parses an optional where-clause and places it in `generics`.
+    /// Parses an optional where-clause.
     ///
     /// ```ignore (only-for-syntax-highlight)
     /// where T : Trait<U, V> + 'b, 'a : 'b
     /// ```
     pub(super) fn parse_where_clause(&mut self) -> PResult<'a, WhereClause> {
+        self.parse_where_clause_common(None).map(|(clause, _)| clause)
+    }
+
+    pub(super) fn parse_struct_where_clause(
+        &mut self,
+        struct_name: Ident,
+        body_insertion_point: Span,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
+        self.parse_where_clause_common(Some((struct_name, body_insertion_point)))
+    }
+
+    fn parse_where_clause_common(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+    ) -> PResult<'a, (WhereClause, Option<ThinVec<ast::FieldDef>>)> {
         let mut where_clause = WhereClause {
             has_where_token: false,
-            predicates: Vec::new(),
+            predicates: ThinVec::new(),
             span: self.prev_token.span.shrink_to_hi(),
         };
+        let mut tuple_struct_body = None;
 
         if !self.eat_keyword(kw::Where) {
-            return Ok(where_clause);
+            return Ok((where_clause, None));
         }
         where_clause.has_where_token = true;
-        let lo = self.prev_token.span;
+        let where_lo = self.prev_token.span;
 
         // We are considering adding generics to the `where` keyword as an alternative higher-rank
         // parameter syntax (as in `where<'a>` or `where<T>`. To avoid that being a breaking
@@ -272,7 +313,8 @@ impl<'a> Parser<'a> {
         }
 
         loop {
-            let lo = self.token.span;
+            let where_sp = where_lo.to(self.prev_token.span);
+            let pred_lo = self.token.span;
             if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                 let lifetime = self.expect_lifetime();
                 // Bounds starting with a colon are mandatory, but possibly empty.
@@ -280,13 +322,21 @@ impl<'a> Parser<'a> {
                 let bounds = self.parse_lt_param_bounds();
                 where_clause.predicates.push(ast::WherePredicate::RegionPredicate(
                     ast::WhereRegionPredicate {
-                        span: lo.to(self.prev_token.span),
+                        span: pred_lo.to(self.prev_token.span),
                         lifetime,
                         bounds,
                     },
                 ));
             } else if self.check_type() {
-                where_clause.predicates.push(self.parse_ty_where_predicate()?);
+                match self.parse_ty_where_predicate_or_recover_tuple_struct_body(
+                    struct_, pred_lo, where_sp,
+                )? {
+                    PredicateOrStructBody::Predicate(pred) => where_clause.predicates.push(pred),
+                    PredicateOrStructBody::StructBody(body) => {
+                        tuple_struct_body = Some(body);
+                        break;
+                    }
+                }
             } else {
                 break;
             }
@@ -295,23 +345,82 @@ impl<'a> Parser<'a> {
             let ate_comma = self.eat(&token::Comma);
 
             if self.eat_keyword_noexpect(kw::Where) {
-                let msg = "cannot define duplicate `where` clauses on an item";
-                let mut err = self.struct_span_err(self.token.span, msg);
-                err.span_label(lo, "previous `where` clause starts here");
-                err.span_suggestion_verbose(
-                    prev_token.shrink_to_hi().to(self.prev_token.span),
-                    "consider joining the two `where` clauses into one",
-                    ",",
-                    Applicability::MaybeIncorrect,
-                );
-                err.emit();
+                self.sess.emit_err(MultipleWhereClauses {
+                    span: self.token.span,
+                    previous: pred_lo,
+                    between: prev_token.shrink_to_hi().to(self.prev_token.span),
+                });
             } else if !ate_comma {
                 break;
             }
         }
 
-        where_clause.span = lo.to(self.prev_token.span);
-        Ok(where_clause)
+        where_clause.span = where_lo.to(self.prev_token.span);
+        Ok((where_clause, tuple_struct_body))
+    }
+
+    fn parse_ty_where_predicate_or_recover_tuple_struct_body(
+        &mut self,
+        struct_: Option<(Ident, Span)>,
+        pred_lo: Span,
+        where_sp: Span,
+    ) -> PResult<'a, PredicateOrStructBody> {
+        let mut snapshot = None;
+
+        if let Some(struct_) = struct_
+            && self.may_recover()
+            && self.token.kind == token::OpenDelim(Delimiter::Parenthesis)
+        {
+            snapshot = Some((struct_, self.create_snapshot_for_diagnostic()));
+        };
+
+        match self.parse_ty_where_predicate() {
+            Ok(pred) => Ok(PredicateOrStructBody::Predicate(pred)),
+            Err(type_err) => {
+                let Some(((struct_name, body_insertion_point), mut snapshot)) = snapshot else {
+                    return Err(type_err);
+                };
+
+                // Check if we might have encountered an out of place tuple struct body.
+                match snapshot.parse_tuple_struct_body() {
+                    // Since we don't know the exact reason why we failed to parse the
+                    // predicate (we might have stumbled upon something bogus like `(T): ?`),
+                    // employ a simple heuristic to weed out some pathological cases:
+                    // Look for a semicolon (strong indicator) or anything that might mark
+                    // the end of the item (weak indicator) following the body.
+                    Ok(body)
+                        if matches!(snapshot.token.kind, token::Semi | token::Eof)
+                            || snapshot.token.can_begin_item() =>
+                    {
+                        type_err.cancel();
+
+                        let body_sp = pred_lo.to(snapshot.prev_token.span);
+                        let map = self.sess.source_map();
+
+                        self.sess.emit_err(WhereClauseBeforeTupleStructBody {
+                            span: where_sp,
+                            name: struct_name.span,
+                            body: body_sp,
+                            sugg: map.span_to_snippet(body_sp).ok().map(|body| {
+                                WhereClauseBeforeTupleStructBodySugg {
+                                    left: body_insertion_point.shrink_to_hi(),
+                                    snippet: body,
+                                    right: map.end_point(where_sp).to(body_sp),
+                                }
+                            }),
+                        });
+
+                        self.restore_snapshot(snapshot);
+                        Ok(PredicateOrStructBody::StructBody(body))
+                    }
+                    Ok(_) => Err(type_err),
+                    Err(body_err) => {
+                        body_err.cancel();
+                        Err(type_err)
+                    }
+                }
+            }
+        }
     }
 
     fn parse_ty_where_predicate(&mut self) -> PResult<'a, ast::WherePredicate> {
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index 494f0cf56a8..85cc8ca02a9 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -1,9 +1,8 @@
-use crate::errors::{DocCommentDoesNotDocumentAnything, UseEmptyBlockNotSemi};
+use crate::errors;
 
 use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
-
 use rustc_ast::ast::*;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
@@ -14,17 +13,20 @@ use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree,
 use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind};
 use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
 use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
-use rustc_ast::{MacArgs, MacCall, MacDelimiter};
+use rustc_ast::{MacCall, MacDelimiter};
 use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, IntoDiagnostic, PResult, StashKey};
+use rustc_errors::{
+    struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
+    StashKey,
+};
+use rustc_span::edit_distance::edit_distance;
 use rustc_span::edition::Edition;
-use rustc_span::lev_distance::lev_distance;
 use rustc_span::source_map::{self, Span};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
 use rustc_span::DUMMY_SP;
-
-use std::convert::TryFrom;
+use std::fmt::Write;
 use std::mem;
+use thin_vec::{thin_vec, ThinVec};
 
 impl<'a> Parser<'a> {
     /// Parses a source module as a crate. This is the main entry point for the parser.
@@ -54,12 +56,12 @@ impl<'a> Parser<'a> {
     pub fn parse_mod(
         &mut self,
         term: &TokenKind,
-    ) -> PResult<'a, (AttrVec, Vec<P<Item>>, ModSpans)> {
+    ) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
         let lo = self.token.span;
         let attrs = self.parse_inner_attributes()?;
 
         let post_attr_lo = self.token.span;
-        let mut items = vec![];
+        let mut items = ThinVec::new();
         while let Some(item) = self.parse_item(ForceCollect::No)? {
             items.push(item);
             self.maybe_consume_incorrect_semicolon(&items);
@@ -99,7 +101,9 @@ impl<'a> Parser<'a> {
         fn_parse_mode: FnParseMode,
         force_collect: ForceCollect,
     ) -> PResult<'a, Option<Item>> {
+        self.recover_diff_marker();
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.parse_item_common(attrs, true, false, fn_parse_mode, force_collect)
     }
 
@@ -121,16 +125,13 @@ impl<'a> Parser<'a> {
             return Ok(Some(item.into_inner()));
         };
 
-        let mut unclosed_delims = vec![];
         let item =
             self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
                 let item =
                     this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
-                unclosed_delims.append(&mut this.unclosed_delims);
                 Ok((item?, TrailingToken::None))
             })?;
 
-        self.unclosed_delims.append(&mut unclosed_delims);
         Ok(item)
     }
 
@@ -162,35 +163,18 @@ impl<'a> Parser<'a> {
         }
 
         // At this point, we have failed to parse an item.
-        self.error_on_unmatched_vis(&vis);
-        self.error_on_unmatched_defaultness(def);
-        if !attrs_allowed {
-            self.recover_attrs_no_item(&attrs)?;
+        if !matches!(vis.kind, VisibilityKind::Inherited) {
+            self.sess.emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
         }
-        Ok(None)
-    }
 
-    /// Error in-case a non-inherited visibility was parsed but no item followed.
-    fn error_on_unmatched_vis(&self, vis: &Visibility) {
-        if let VisibilityKind::Inherited = vis.kind {
-            return;
+        if let Defaultness::Default(span) = def {
+            self.sess.emit_err(errors::DefaultNotFollowedByItem { span });
         }
-        let vs = pprust::vis_to_string(&vis);
-        let vs = vs.trim_end();
-        self.struct_span_err(vis.span, &format!("visibility `{vs}` is not followed by an item"))
-            .span_label(vis.span, "the visibility")
-            .help(&format!("you likely meant to define an item, e.g., `{vs} fn foo() {{}}`"))
-            .emit();
-    }
 
-    /// Error in-case a `default` was parsed but no item followed.
-    fn error_on_unmatched_defaultness(&self, def: Defaultness) {
-        if let Defaultness::Default(sp) = def {
-            self.struct_span_err(sp, "`default` is not followed by an item")
-                .span_label(sp, "the `default` qualifier")
-                .note("only `fn`, `const`, `type`, or `impl` items may be prefixed by `default`")
-                .emit();
+        if !attrs_allowed {
+            self.recover_attrs_no_item(&attrs)?;
         }
+        Ok(None)
     }
 
     /// Error in-case `default` was parsed in an in-appropriate context.
@@ -222,7 +206,8 @@ impl<'a> Parser<'a> {
             self.parse_use_item()?
         } else if self.check_fn_front_matter(def_final, case) {
             // FUNCTION ITEM
-            let (ident, sig, generics, body) = self.parse_fn(attrs, fn_parse_mode, lo, vis)?;
+            let (ident, sig, generics, body) =
+                self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
             (ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
         } else if self.eat_keyword(kw::Extern) {
             if self.eat_keyword(kw::Crate) {
@@ -382,86 +367,74 @@ impl<'a> Parser<'a> {
         let sp = self.prev_token.span.between(self.token.span);
         let full_sp = self.prev_token.span.to(self.token.span);
         let ident_sp = self.token.span;
-        if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) {
+
+        let ident = if self.look_ahead(1, |t| {
+            [
+                token::Lt,
+                token::OpenDelim(Delimiter::Brace),
+                token::OpenDelim(Delimiter::Parenthesis),
+            ]
+            .contains(&t.kind)
+        }) {
+            self.parse_ident().unwrap()
+        } else {
+            return Ok(());
+        };
+
+        let mut found_generics = false;
+        if self.check(&token::Lt) {
+            found_generics = true;
+            self.eat_to_tokens(&[&token::Gt]);
+            self.bump(); // `>`
+        }
+
+        let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
             // possible public struct definition where `struct` was forgotten
-            let ident = self.parse_ident().unwrap();
-            let msg = format!("add `struct` here to parse `{ident}` as a public struct");
-            let mut err = self.struct_span_err(sp, "missing `struct` for struct definition");
-            err.span_suggestion_short(
-                sp,
-                &msg,
-                " struct ",
-                Applicability::MaybeIncorrect, // speculative
-            );
-            Err(err)
-        } else if self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Parenthesis)) {
-            let ident = self.parse_ident().unwrap();
+            Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident })
+        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+            // possible public function or tuple struct definition where `fn`/`struct` was
+            // forgotten
             self.bump(); // `(`
-            let kw_name = self.recover_first_param();
+            let is_method = self.recover_self_param();
+
             self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
-            let (kw, kw_name, ambiguous) = if self.check(&token::RArrow) {
-                self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
-                self.bump(); // `{`
-                ("fn", kw_name, false)
-            } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-                self.bump(); // `{`
-                ("fn", kw_name, false)
-            } else if self.check(&token::Colon) {
-                let kw = "struct";
-                (kw, kw, false)
-            } else {
-                ("fn` or `struct", "function or struct", true)
-            };
 
-            let msg = format!("missing `{kw}` for {kw_name} definition");
-            let mut err = self.struct_span_err(sp, &msg);
-            if !ambiguous {
-                self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
-                let suggestion =
-                    format!("add `{kw}` here to parse `{ident}` as a public {kw_name}");
-                err.span_suggestion_short(
-                    sp,
-                    &suggestion,
-                    format!(" {kw} "),
-                    Applicability::MachineApplicable,
-                );
-            } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
-                err.span_suggestion(
-                    full_sp,
-                    "if you meant to call a macro, try",
-                    format!("{}!", snippet),
-                    // this is the `ambiguous` conditional branch
-                    Applicability::MaybeIncorrect,
-                );
-            } else {
-                err.help(
-                    "if you meant to call a macro, remove the `pub` \
-                              and add a trailing `!` after the identifier",
-                );
-            }
-            Err(err)
-        } else if self.look_ahead(1, |t| *t == token::Lt) {
-            let ident = self.parse_ident().unwrap();
-            self.eat_to_tokens(&[&token::Gt]);
-            self.bump(); // `>`
-            let (kw, kw_name, ambiguous) = if self.eat(&token::OpenDelim(Delimiter::Parenthesis)) {
-                ("fn", self.recover_first_param(), false)
-            } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
-                ("struct", "struct", false)
-            } else {
-                ("fn` or `struct", "function or struct", true)
-            };
-            let msg = format!("missing `{kw}` for {kw_name} definition");
-            let mut err = self.struct_span_err(sp, &msg);
-            if !ambiguous {
-                err.span_suggestion_short(
-                    sp,
-                    &format!("add `{kw}` here to parse `{ident}` as a public {kw_name}"),
-                    format!(" {} ", kw),
-                    Applicability::MachineApplicable,
-                );
-            }
-            Err(err)
+            let err =
+                if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) {
+                    self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
+                    self.bump(); // `{`
+                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                    if is_method {
+                        errors::MissingKeywordForItemDefinition::Method { span: sp, ident }
+                    } else {
+                        errors::MissingKeywordForItemDefinition::Function { span: sp, ident }
+                    }
+                } else if self.check(&token::Semi) {
+                    errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }
+                } else {
+                    errors::MissingKeywordForItemDefinition::Ambiguous {
+                        span: sp,
+                        subdiag: if found_generics {
+                            None
+                        } else if let Ok(snippet) = self.span_to_snippet(ident_sp) {
+                            Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro {
+                                span: full_sp,
+                                snippet,
+                            })
+                        } else {
+                            Some(errors::AmbiguousMissingKwForItemSub::HelpMacro)
+                        },
+                    }
+                };
+            Some(err)
+        } else if found_generics {
+            Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None })
+        } else {
+            None
+        };
+
+        if let Some(err) = err {
+            Err(err.into_diagnostic(&self.sess.span_diagnostic))
         } else {
             Ok(())
         }
@@ -471,7 +444,7 @@ impl<'a> Parser<'a> {
     fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
         let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
         self.expect(&token::Not)?; // `!`
-        match self.parse_mac_args() {
+        match self.parse_delim_args() {
             // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
             Ok(args) => {
                 self.eat_semi_for_macro_if_needed(&args);
@@ -483,7 +456,8 @@ impl<'a> Parser<'a> {
                 // Maybe the user misspelled `macro_rules` (issue #91227)
                 if self.token.is_ident()
                     && path.segments.len() == 1
-                    && lev_distance("macro_rules", &path.segments[0].ident.to_string(), 3).is_some()
+                    && edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2)
+                        .is_some()
                 {
                     err.span_suggestion(
                         path.span,
@@ -510,16 +484,13 @@ impl<'a> Parser<'a> {
         let mut err = self.struct_span_err(end.span, msg);
         if end.is_doc_comment() {
             err.span_label(end.span, "this doc comment doesn't document anything");
-        }
-        if end.meta_kind().is_some() {
-            if self.token.kind == TokenKind::Semi {
-                err.span_suggestion_verbose(
-                    self.token.span,
-                    "consider removing this semicolon",
-                    "",
-                    Applicability::MaybeIncorrect,
-                );
-            }
+        } else if self.token.kind == TokenKind::Semi {
+            err.span_suggestion_verbose(
+                self.token.span,
+                "consider removing this semicolon",
+                "",
+                Applicability::MaybeIncorrect,
+            );
         }
         if let [.., penultimate, _] = attrs {
             err.span_label(start.span.to(penultimate.span), "other attributes here");
@@ -586,20 +557,11 @@ impl<'a> Parser<'a> {
         let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt)
         {
             let span = self.prev_token.span.between(self.token.span);
-            self.struct_span_err(span, "missing trait in a trait impl")
-                .span_suggestion(
-                    span,
-                    "add a trait here",
-                    " Trait ",
-                    Applicability::HasPlaceholders,
-                )
-                .span_suggestion(
-                    span.to(self.token.span),
-                    "for an inherent impl, drop this `for`",
-                    "",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.sess.emit_err(errors::MissingTraitInTraitImpl {
+                span,
+                for_span: span.to(self.token.span),
+            });
+
             P(Ty {
                 kind: TyKind::Path(None, err_path(span)),
                 span,
@@ -632,14 +594,7 @@ impl<'a> Parser<'a> {
             Some(ty_second) => {
                 // impl Trait for Type
                 if !has_for {
-                    self.struct_span_err(missing_for_span, "missing `for` in a trait impl")
-                        .span_suggestion_short(
-                            missing_for_span,
-                            "add `for` here",
-                            " for ",
-                            Applicability::MachineApplicable,
-                        )
-                        .emit();
+                    self.sess.emit_err(errors::MissingForInTraitImpl { span: missing_for_span });
                 }
 
                 let ty_first = ty_first.into_inner();
@@ -647,7 +602,9 @@ impl<'a> Parser<'a> {
                     // This notably includes paths passed through `ty` macro fragments (#46438).
                     TyKind::Path(None, path) => path,
                     _ => {
-                        self.struct_span_err(ty_first.span, "expected a trait, found type").emit();
+                        self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType {
+                            span: ty_first.span,
+                        });
                         err_path(ty_first.span)
                     }
                 };
@@ -686,29 +643,30 @@ impl<'a> Parser<'a> {
         &mut self,
         attrs: &mut AttrVec,
         mut parse_item: impl FnMut(&mut Parser<'a>) -> PResult<'a, Option<Option<T>>>,
-    ) -> PResult<'a, Vec<T>> {
+    ) -> PResult<'a, ThinVec<T>> {
         let open_brace_span = self.token.span;
 
         // Recover `impl Ty;` instead of `impl Ty {}`
         if self.token == TokenKind::Semi {
-            self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+            self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
             self.bump();
-            return Ok(vec![]);
+            return Ok(ThinVec::new());
         }
 
         self.expect(&token::OpenDelim(Delimiter::Brace))?;
         attrs.extend(self.parse_inner_attributes()?);
 
-        let mut items = Vec::new();
+        let mut items = ThinVec::new();
         while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
             if self.recover_doc_comment_before_brace() {
                 continue;
             }
+            self.recover_diff_marker();
             match parse_item(self) {
                 Ok(None) => {
-                    let is_unnecessary_semicolon = !items.is_empty()
+                    let mut is_unnecessary_semicolon = !items.is_empty()
                         // When the close delim is `)` in a case like the following, `token.kind` is expected to be `token::CloseDelim(Delimiter::Parenthesis)`,
-                        // but the actual `token.kind` is `token::CloseDelim(Delimiter::Bracket)`.
+                        // but the actual `token.kind` is `token::CloseDelim(Delimiter::Brace)`.
                         // This is because the `token.kind` of the close delim is treated as the same as
                         // that of the open delim in `TokenTreesReader::parse_token_tree`, even if the delimiters of them are different.
                         // Therefore, `token.kind` should not be compared here.
@@ -727,7 +685,13 @@ impl<'a> Parser<'a> {
                             .span_to_snippet(self.prev_token.span)
                             .map_or(false, |snippet| snippet == "}")
                         && self.token.kind == token::Semi;
-                    let semicolon_span = self.token.span;
+                    let mut semicolon_span = self.token.span;
+                    if !is_unnecessary_semicolon {
+                        // #105369, Detect spurious `;` before assoc fn body
+                        is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace)
+                            && self.prev_token.kind == token::Semi;
+                        semicolon_span = self.prev_token.span;
+                    }
                     // We have to bail or we'll potentially never make progress.
                     let non_item_span = self.token.span;
                     let is_let = self.token.is_keyword(kw::Let);
@@ -774,6 +738,7 @@ impl<'a> Parser<'a> {
     fn recover_doc_comment_before_brace(&mut self) -> bool {
         if let token::DocComment(..) = self.token.kind {
             if self.look_ahead(1, |tok| tok == &token::CloseDelim(Delimiter::Brace)) {
+                // FIXME: merge with `DocCommentDoesNotDocumentAnything` (E0585)
                 struct_span_err!(
                     self.diagnostic(),
                     self.token.span,
@@ -840,7 +805,7 @@ impl<'a> Parser<'a> {
             // It's a trait alias.
             if had_colon {
                 let span = span_at_colon.to(span_before_eq);
-                self.struct_span_err(span, "bounds are not allowed on trait aliases").emit();
+                self.sess.emit_err(errors::BoundsNotAllowedOnTraitAliases { span });
             }
 
             let bounds = self.parse_generic_bounds(None)?;
@@ -849,12 +814,10 @@ impl<'a> Parser<'a> {
 
             let whole_span = lo.to(self.prev_token.span);
             if is_auto == IsAuto::Yes {
-                let msg = "trait aliases cannot be `auto`";
-                self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+                self.sess.emit_err(errors::TraitAliasCannotBeAuto { span: whole_span });
             }
             if let Unsafe::Yes(_) = unsafety {
-                let msg = "trait aliases cannot be `unsafe`";
-                self.struct_span_err(whole_span, msg).span_label(whole_span, msg).emit();
+                self.sess.emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span });
             }
 
             self.sess.gated_spans.gate(sym::trait_alias, whole_span);
@@ -900,8 +863,7 @@ impl<'a> Parser<'a> {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
                         ItemKind::Static(a, _, b) => {
-                            self.struct_span_err(span, "associated `static` items are not allowed")
-                                .emit();
+                            self.sess.emit_err(errors::AssociatedStaticItemNotAllowed { span });
                             AssocItemKind::Const(Defaultness::Final, a, b)
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"),
@@ -972,7 +934,8 @@ impl<'a> Parser<'a> {
     fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
         let lo = self.token.span;
 
-        let mut prefix = ast::Path { segments: Vec::new(), span: lo.shrink_to_lo(), tokens: None };
+        let mut prefix =
+            ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
         let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
             || self.check(&token::BinOp(token::Star))
             || self.is_import_coupler()
@@ -1010,7 +973,7 @@ impl<'a> Parser<'a> {
                     prefix.span = lo.to(self.prev_token.span);
                 }
 
-                UseTreeKind::Simple(self.parse_rename()?, DUMMY_NODE_ID, DUMMY_NODE_ID)
+                UseTreeKind::Simple(self.parse_rename()?)
             }
         };
 
@@ -1031,9 +994,12 @@ impl<'a> Parser<'a> {
     /// ```text
     /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
     /// ```
-    fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
-        self.parse_delim_comma_seq(Delimiter::Brace, |p| Ok((p.parse_use_tree()?, DUMMY_NODE_ID)))
-            .map(|(r, _)| r)
+    fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
+        self.parse_delim_comma_seq(Delimiter::Brace, |p| {
+            p.recover_diff_marker();
+            Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
+        })
+        .map(|(r, _)| r)
     }
 
     fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
@@ -1071,41 +1037,37 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_crate_name_with_dashes(&mut self) -> PResult<'a, Ident> {
-        let error_msg = "crate name using dashes are not valid in `extern crate` statements";
-        let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
-                              in the code";
-        let mut ident = if self.token.is_keyword(kw::SelfLower) {
+        let ident = if self.token.is_keyword(kw::SelfLower) {
             self.parse_path_segment_ident()
         } else {
             self.parse_ident()
         }?;
-        let mut idents = vec![];
-        let mut replacement = vec![];
-        let mut fixed_crate_name = false;
-        // Accept `extern crate name-like-this` for better diagnostics.
+
         let dash = token::BinOp(token::BinOpToken::Minus);
-        if self.token == dash {
-            // Do not include `-` as part of the expected tokens list.
-            while self.eat(&dash) {
-                fixed_crate_name = true;
-                replacement.push((self.prev_token.span, "_".to_string()));
-                idents.push(self.parse_ident()?);
-            }
+        if self.token != dash {
+            return Ok(ident);
         }
-        if fixed_crate_name {
-            let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
-            let mut fixed_name = ident.name.to_string();
-            for part in idents {
-                fixed_name.push_str(&format!("_{}", part.name));
-            }
-            ident = Ident::from_str_and_span(&fixed_name, fixed_name_sp);
 
-            self.struct_span_err(fixed_name_sp, error_msg)
-                .span_label(fixed_name_sp, "dash-separated idents are not valid")
-                .multipart_suggestion(suggestion_msg, replacement, Applicability::MachineApplicable)
-                .emit();
+        // Accept `extern crate name-like-this` for better diagnostics.
+        let mut dashes = vec![];
+        let mut idents = vec![];
+        while self.eat(&dash) {
+            dashes.push(self.prev_token.span);
+            idents.push(self.parse_ident()?);
         }
-        Ok(ident)
+
+        let fixed_name_sp = ident.span.to(idents.last().unwrap().span);
+        let mut fixed_name = ident.name.to_string();
+        for part in idents {
+            write!(fixed_name, "_{}", part.name).unwrap();
+        }
+
+        self.sess.emit_err(errors::ExternCrateNameWithDashes {
+            span: fixed_name_sp,
+            sugg: errors::ExternCrateNameWithDashesSugg { dashes },
+        });
+
+        Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp))
     }
 
     /// Parses `extern` for foreign ABIs modules.
@@ -1153,7 +1115,10 @@ impl<'a> Parser<'a> {
                     Ok(kind) => kind,
                     Err(kind) => match kind {
                         ItemKind::Const(_, a, b) => {
-                            self.error_on_foreign_const(span, ident);
+                            self.sess.emit_err(errors::ExternItemCannotBeConst {
+                                ident_span: ident.span,
+                                const_span: span.with_hi(ident.span.lo()),
+                            });
                             ForeignItemKind::Static(a, Mutability::Not, b)
                         }
                         _ => return self.error_bad_item_kind(span, &kind, "`extern` blocks"),
@@ -1165,6 +1130,7 @@ impl<'a> Parser<'a> {
     }
 
     fn error_bad_item_kind<T>(&self, span: Span, kind: &ItemKind, ctx: &str) -> Option<T> {
+        // FIXME(#100717): needs variant for each `ItemKind` (instead of using `ItemKind::descr()`)
         let span = self.sess.source_map().guess_head_span(span);
         let descr = kind.descr();
         self.struct_span_err(span, &format!("{descr} is not supported in {ctx}"))
@@ -1173,18 +1139,6 @@ impl<'a> Parser<'a> {
         None
     }
 
-    fn error_on_foreign_const(&self, span: Span, ident: Ident) {
-        self.struct_span_err(ident.span, "extern items cannot be `const`")
-            .span_suggestion(
-                span.with_hi(ident.span.lo()),
-                "try using a static value",
-                "static ",
-                Applicability::MachineApplicable,
-            )
-            .note("for more information, visit https://doc.rust-lang.org/std/keyword.extern.html")
-            .emit();
-    }
-
     fn is_unsafe_foreign_mod(&self) -> bool {
         self.token.is_keyword(kw::Unsafe)
             && self.is_keyword_ahead(1, &[kw::Extern])
@@ -1212,25 +1166,10 @@ impl<'a> Parser<'a> {
     fn recover_const_mut(&mut self, const_span: Span) {
         if self.eat_keyword(kw::Mut) {
             let span = self.prev_token.span;
-            self.struct_span_err(span, "const globals cannot be mutable")
-                .span_label(span, "cannot be mutable")
-                .span_suggestion(
-                    const_span,
-                    "you might want to declare a static instead",
-                    "static",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.sess.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
         } else if self.eat_keyword(kw::Let) {
             let span = self.prev_token.span;
-            self.struct_span_err(const_span.to(span), "`const` and `let` are mutually exclusive")
-                .span_suggestion(
-                    const_span.to(span),
-                    "remove `let`",
-                    "const",
-                    Applicability::MaybeIncorrect,
-                )
-                .emit();
+            self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
         }
     }
 
@@ -1254,8 +1193,8 @@ impl<'a> Parser<'a> {
             }
         };
 
-        match impl_info.1 {
-            ItemKind::Impl(box Impl { of_trait: Some(ref trai), ref mut constness, .. }) => {
+        match &mut impl_info.1 {
+            ItemKind::Impl(box Impl { of_trait: Some(trai), constness, .. }) => {
                 *constness = Const::Yes(const_span);
 
                 let before_trait = trai.path.span.shrink_to_lo();
@@ -1315,13 +1254,9 @@ impl<'a> Parser<'a> {
         };
 
         let span = self.prev_token.span.shrink_to_hi();
-        let mut err = self.struct_span_err(span, &format!("missing type for `{kind}` item"));
-        err.span_suggestion(
-            span,
-            "provide a type for the item",
-            format!("{colon} <type>"),
-            Applicability::HasPlaceholders,
-        );
+        let err: DiagnosticBuilder<'_, ErrorGuaranteed> =
+            errors::MissingConstType { span, colon, kind }
+                .into_diagnostic(&self.sess.span_diagnostic);
         err.stash(span, StashKey::ItemNoType);
 
         // The user intended that the type be inferred,
@@ -1333,18 +1268,12 @@ impl<'a> Parser<'a> {
     fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
         if self.token.is_keyword(kw::Struct) {
             let span = self.prev_token.span.to(self.token.span);
-            let mut err = self.struct_span_err(span, "`enum` and `struct` are mutually exclusive");
-            err.span_suggestion(
-                span,
-                "replace `enum struct` with",
-                "enum",
-                Applicability::MachineApplicable,
-            );
+            let err = errors::EnumStructMutuallyExclusive { span };
             if self.look_ahead(1, |t| t.is_ident()) {
                 self.bump();
-                err.emit();
+                self.sess.emit_err(err);
             } else {
-                return Err(err);
+                return Err(err.into_diagnostic(&self.sess.span_diagnostic));
             }
         }
 
@@ -1354,9 +1283,9 @@ impl<'a> Parser<'a> {
 
         // Possibly recover `enum Foo;` instead of `enum Foo {}`
         let (variants, _) = if self.token == TokenKind::Semi {
-            self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span });
+            self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span });
             self.bump();
-            (vec![], false)
+            (thin_vec![], false)
         } else {
             self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()).map_err(
                 |mut e| {
@@ -1372,7 +1301,9 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
+        self.recover_diff_marker();
         let variant_attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.collect_tokens_trailing_token(
             variant_attrs,
             ForceCollect::No,
@@ -1397,7 +1328,7 @@ impl<'a> Parser<'a> {
                 };
 
                 let disr_expr =
-                    if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
+                    if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
 
                 let vr = ast::Variant {
                     ident,
@@ -1412,7 +1343,10 @@ impl<'a> Parser<'a> {
 
                 Ok((Some(vr), TrailingToken::MaybeComma))
             },
-        )
+        ).map_err(|mut err|{
+            err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
+            err
+        })
     }
 
     /// Parses `struct Foo { ... }`.
@@ -1436,8 +1370,16 @@ impl<'a> Parser<'a> {
         // struct.
 
         let vdata = if self.token.is_keyword(kw::Where) {
-            generics.where_clause = self.parse_where_clause()?;
-            if self.eat(&token::Semi) {
+            let tuple_struct_body;
+            (generics.where_clause, tuple_struct_body) =
+                self.parse_struct_where_clause(class_name, generics.span)?;
+
+            if let Some(body) = tuple_struct_body {
+                // If we see a misplaced tuple struct body: `struct Foo<T> where T: Copy, (T);`
+                let body = VariantData::Tuple(body, DUMMY_NODE_ID);
+                self.expect_semi()?;
+                body
+            } else if self.eat(&token::Semi) {
                 // If we see a: `struct Foo<T> where T: Copy;` style decl.
                 VariantData::Unit(DUMMY_NODE_ID)
             } else {
@@ -1467,13 +1409,9 @@ impl<'a> Parser<'a> {
             self.expect_semi()?;
             body
         } else {
-            let token_str = super::token_descr(&self.token);
-            let msg = &format!(
-                "expected `where`, `{{`, `(`, or `;` after struct name, found {token_str}"
-            );
-            let mut err = self.struct_span_err(self.token.span, msg);
-            err.span_label(self.token.span, "expected `where`, `{`, `(`, or `;` after struct name");
-            return Err(err);
+            let err =
+                errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone());
+            return Err(err.into_diagnostic(&self.sess.span_diagnostic));
         };
 
         Ok((class_name, ItemKind::Struct(vdata, generics)))
@@ -1516,8 +1454,8 @@ impl<'a> Parser<'a> {
         adt_ty: &str,
         ident_span: Span,
         parsed_where: bool,
-    ) -> PResult<'a, (Vec<FieldDef>, /* recovered */ bool)> {
-        let mut fields = Vec::new();
+    ) -> PResult<'a, (ThinVec<FieldDef>, /* recovered */ bool)> {
+        let mut fields = ThinVec::new();
         let mut recovered = false;
         if self.eat(&token::OpenDelim(Delimiter::Brace)) {
             while self.token != token::CloseDelim(Delimiter::Brace) {
@@ -1557,15 +1495,38 @@ impl<'a> Parser<'a> {
         Ok((fields, recovered))
     }
 
-    fn parse_tuple_struct_body(&mut self) -> PResult<'a, Vec<FieldDef>> {
+    pub(super) fn parse_tuple_struct_body(&mut self) -> PResult<'a, ThinVec<FieldDef>> {
         // This is the case where we find `struct Foo<T>(T) where T: Copy;`
         // Unit like structs are handled in parse_item_struct function
         self.parse_paren_comma_seq(|p| {
             let attrs = p.parse_outer_attributes()?;
             p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
+                let mut snapshot = None;
+                if p.is_diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+                    // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                    // that can be a valid type start, so we snapshot and reparse only we've
+                    // encountered another parse error.
+                    snapshot = Some(p.create_snapshot_for_diagnostic());
+                }
                 let lo = p.token.span;
-                let vis = p.parse_visibility(FollowedByType::Yes)?;
-                let ty = p.parse_ty()?;
+                let vis = match p.parse_visibility(FollowedByType::Yes) {
+                    Ok(vis) => vis,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_diff_marker();
+                        }
+                        return Err(err);
+                    }
+                };
+                let ty = match p.parse_ty() {
+                    Ok(ty) => ty,
+                    Err(err) => {
+                        if let Some(ref mut snapshot) = snapshot {
+                            snapshot.recover_diff_marker();
+                        }
+                        return Err(err);
+                    }
+                };
 
                 Ok((
                     FieldDef {
@@ -1586,7 +1547,9 @@ impl<'a> Parser<'a> {
 
     /// Parses an element of a struct declaration.
     fn parse_field_def(&mut self, adt_ty: &str) -> PResult<'a, FieldDef> {
+        self.recover_diff_marker();
         let attrs = self.parse_outer_attributes()?;
+        self.recover_diff_marker();
         self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
             let lo = this.token.span;
             let vis = this.parse_visibility(FollowedByType::No)?;
@@ -1625,7 +1588,7 @@ impl<'a> Parser<'a> {
             token::CloseDelim(Delimiter::Brace) => {}
             token::DocComment(..) => {
                 let previous_span = self.prev_token.span;
-                let mut err = DocCommentDoesNotDocumentAnything {
+                let mut err = errors::DocCommentDoesNotDocumentAnything {
                     span: self.token.span,
                     missing_comma: None,
                 };
@@ -1756,7 +1719,7 @@ impl<'a> Parser<'a> {
         }
         if self.token.kind == token::Eq {
             self.bump();
-            let const_expr = self.parse_anon_const_expr()?;
+            let const_expr = self.parse_expr_anon_const()?;
             let sp = ty.span.shrink_to_hi().to(const_expr.value.span);
             self.struct_span_err(sp, "default values on `struct` fields aren't supported")
                 .span_suggestion(
@@ -1792,7 +1755,13 @@ impl<'a> Parser<'a> {
                 };
                 // We use `parse_fn` to get a span for the function
                 let fn_parse_mode = FnParseMode { req_name: |_| true, req_body: true };
-                match self.parse_fn(&mut AttrVec::new(), fn_parse_mode, lo, &inherited_vis) {
+                match self.parse_fn(
+                    &mut AttrVec::new(),
+                    fn_parse_mode,
+                    lo,
+                    &inherited_vis,
+                    Case::Insensitive,
+                ) {
                     Ok(_) => {
                         let mut err = self.struct_span_err(
                             lo.to(self.prev_token.span),
@@ -1866,7 +1835,7 @@ impl<'a> Parser<'a> {
     fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
         let ident = self.parse_ident()?;
         let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
-            self.parse_mac_args()? // `MacBody`
+            self.parse_delim_args()? // `MacBody`
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
             let params = self.parse_token_tree(); // `MacParams`
             let pspan = params.span();
@@ -1879,7 +1848,7 @@ impl<'a> Parser<'a> {
             let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
             let tokens = TokenStream::new(vec![params, arrow, body]);
             let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
-            P(MacArgs::Delimited(dspan, MacDelimiter::Brace, tokens))
+            P(DelimArgs { dspan, delim: MacDelimiter::Brace, tokens })
         } else {
             return self.unexpected();
         };
@@ -1934,7 +1903,7 @@ impl<'a> Parser<'a> {
                 .emit();
         }
 
-        let body = self.parse_mac_args()?;
+        let body = self.parse_delim_args()?;
         self.eat_semi_for_macro_if_needed(&body);
         self.complain_if_pub_macro(vis, true);
 
@@ -1973,14 +1942,14 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn eat_semi_for_macro_if_needed(&mut self, args: &MacArgs) {
+    fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
         if args.need_semicolon() && !self.eat(&token::Semi) {
             self.report_invalid_macro_expansion_item(args);
         }
     }
 
-    fn report_invalid_macro_expansion_item(&self, args: &MacArgs) {
-        let span = args.span().expect("undelimited macro call");
+    fn report_invalid_macro_expansion_item(&self, args: &DelimArgs) {
+        let span = args.dspan.entire();
         let mut err = self.struct_span_err(
             span,
             "macros that expand to items must be delimited with braces or followed by a semicolon",
@@ -1988,24 +1957,12 @@ impl<'a> Parser<'a> {
         // FIXME: This will make us not emit the help even for declarative
         // macros within the same crate (that we can fix), which is sad.
         if !span.from_expansion() {
-            if self.unclosed_delims.is_empty() {
-                let DelimSpan { open, close } = match args {
-                    MacArgs::Empty | MacArgs::Eq(..) => unreachable!(),
-                    MacArgs::Delimited(dspan, ..) => *dspan,
-                };
-                err.multipart_suggestion(
-                    "change the delimiters to curly braces",
-                    vec![(open, "{".to_string()), (close, '}'.to_string())],
-                    Applicability::MaybeIncorrect,
-                );
-            } else {
-                err.span_suggestion(
-                    span,
-                    "change the delimiters to curly braces",
-                    " { /* items */ }",
-                    Applicability::HasPlaceholders,
-                );
-            }
+            let DelimSpan { open, close } = args.dspan;
+            err.multipart_suggestion(
+                "change the delimiters to curly braces",
+                vec![(open, "{".to_string()), (close, '}'.to_string())],
+                Applicability::MaybeIncorrect,
+            );
             err.span_suggestion(
                 span.shrink_to_hi(),
                 "add a semicolon",
@@ -2116,12 +2073,28 @@ impl<'a> Parser<'a> {
         fn_parse_mode: FnParseMode,
         sig_lo: Span,
         vis: &Visibility,
+        case: Case,
     ) -> PResult<'a, (Ident, FnSig, Generics, Option<P<Block>>)> {
-        let header = self.parse_fn_front_matter(vis)?; // `const ... fn`
+        let fn_span = self.token.span;
+        let header = self.parse_fn_front_matter(vis, case)?; // `const ... fn`
         let ident = self.parse_ident()?; // `foo`
         let mut generics = self.parse_generics()?; // `<'a, T, ...>`
-        let decl =
-            self.parse_fn_decl(fn_parse_mode.req_name, AllowPlus::Yes, RecoverReturnSign::Yes)?; // `(p: u8, ...)`
+        let decl = match self.parse_fn_decl(
+            fn_parse_mode.req_name,
+            AllowPlus::Yes,
+            RecoverReturnSign::Yes,
+        ) {
+            Ok(decl) => decl,
+            Err(old_err) => {
+                // If we see `for Ty ...` then user probably meant `impl` item.
+                if self.token.is_keyword(kw::For) {
+                    old_err.cancel();
+                    return Err(self.sess.create_err(errors::FnTypoWithImpl { fn_span }));
+                } else {
+                    return Err(old_err);
+                }
+            }
+        };
         generics.where_clause = self.parse_where_clause()?; // `where T: Ord`
 
         let mut sig_hi = self.prev_token.span;
@@ -2152,7 +2125,8 @@ impl<'a> Parser<'a> {
             *sig_hi = self.prev_token.span;
             (AttrVec::new(), None)
         } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
-            self.parse_inner_attrs_and_block().map(|(attrs, body)| (attrs, Some(body)))?
+            self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
+                .map(|(attrs, body)| (attrs, Some(body)))?
         } else if self.token.kind == token::Eq {
             // Recover `fn foo() = $expr;`.
             self.bump(); // `=`
@@ -2241,24 +2215,33 @@ impl<'a> Parser<'a> {
     ///
     /// `vis` represents the visibility that was already parsed, if any. Use
     /// `Visibility::Inherited` when no visibility is known.
-    pub(super) fn parse_fn_front_matter(&mut self, orig_vis: &Visibility) -> PResult<'a, FnHeader> {
+    pub(super) fn parse_fn_front_matter(
+        &mut self,
+        orig_vis: &Visibility,
+        case: Case,
+    ) -> PResult<'a, FnHeader> {
         let sp_start = self.token.span;
-        let constness = self.parse_constness(Case::Insensitive);
+        let constness = self.parse_constness(case);
 
         let async_start_sp = self.token.span;
-        let asyncness = self.parse_asyncness(Case::Insensitive);
+        let asyncness = self.parse_asyncness(case);
 
         let unsafe_start_sp = self.token.span;
-        let unsafety = self.parse_unsafety(Case::Insensitive);
+        let unsafety = self.parse_unsafety(case);
 
         let ext_start_sp = self.token.span;
-        let ext = self.parse_extern(Case::Insensitive);
+        let ext = self.parse_extern(case);
 
         if let Async::Yes { span, .. } = asyncness {
-            self.ban_async_in_2015(span);
+            if span.is_rust_2015() {
+                self.sess.emit_err(errors::AsyncFnIn2015 {
+                    span,
+                    help: errors::HelpUseLatestEdition::new(),
+                });
+            }
         }
 
-        if !self.eat_keyword_case(kw::Fn, Case::Insensitive) {
+        if !self.eat_keyword_case(kw::Fn, case) {
             // It is possible for `expect_one_of` to recover given the contents of
             // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
             // account for this.
@@ -2365,17 +2348,6 @@ impl<'a> Parser<'a> {
         Ok(FnHeader { constness, unsafety, asyncness, ext })
     }
 
-    /// We are parsing `async fn`. If we are on Rust 2015, emit an error.
-    fn ban_async_in_2015(&self, span: Span) {
-        if span.rust_2015() {
-            let diag = self.diagnostic();
-            struct_span_err!(diag, span, E0670, "`async fn` is not permitted in Rust 2015")
-                .span_label(span, "to use `async fn`, switch to Rust 2018 or later")
-                .help_use_latest_edition()
-                .emit();
-        }
-    }
-
     /// Parses the parameter list and result type of a function declaration.
     pub(super) fn parse_fn_decl(
         &mut self,
@@ -2390,10 +2362,11 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the parameter list of a function, including the `(` and `)` delimiters.
-    fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, Vec<Param>> {
+    pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec<Param>> {
         let mut first_param = true;
         // Parse the arguments, starting out with `self` being allowed...
         let (mut params, _) = self.parse_paren_comma_seq(|p| {
+            p.recover_diff_marker();
             let param = p.parse_param_general(req_name, first_param).or_else(|mut e| {
                 e.emit();
                 let lo = p.prev_token.span;
@@ -2431,7 +2404,6 @@ impl<'a> Parser<'a> {
             };
             let (pat, ty) = if is_name_required || this.is_named_param() {
                 debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
-
                 let (pat, colon) = this.parse_fn_param_pat_colon()?;
                 if !colon {
                     let mut err = this.unexpected::<()>().unwrap_err();
@@ -2518,9 +2490,7 @@ impl<'a> Parser<'a> {
         };
         // Recover for the grammar `*self`, `*const self`, and `*mut self`.
         let recover_self_ptr = |this: &mut Self| {
-            let msg = "cannot pass `self` by raw pointer";
-            let span = this.token.span;
-            this.struct_span_err(span, msg).span_label(span, msg).emit();
+            self.sess.emit_err(errors::SelfArgumentPointer { span: this.token.span });
 
             Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span))
         };
@@ -2587,8 +2557,8 @@ impl<'a> Parser<'a> {
     }
 
     fn is_named_param(&self) -> bool {
-        let offset = match self.token.kind {
-            token::Interpolated(ref nt) => match **nt {
+        let offset = match &self.token.kind {
+            token::Interpolated(nt) => match **nt {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
             },
@@ -2601,14 +2571,14 @@ impl<'a> Parser<'a> {
             && self.look_ahead(offset + 1, |t| t == &token::Colon)
     }
 
-    fn recover_first_param(&mut self) -> &'static str {
+    fn recover_self_param(&mut self) -> bool {
         match self
             .parse_outer_attributes()
             .and_then(|_| self.parse_self_param())
             .map_err(|e| e.cancel())
         {
-            Ok(Some(_)) => "method",
-            _ => "function",
+            Ok(Some(_)) => true,
+            _ => false,
         }
     }
 }
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 14dc490fb02..6e9b447fa61 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -10,7 +10,7 @@ mod path;
 mod stmt;
 mod ty;
 
-use crate::lexer::UnmatchedBrace;
+use crate::lexer::UnmatchedDelim;
 pub use attr_wrapper::AttrWrapper;
 pub use diagnostics::AttemptLocalParseRecovery;
 pub(crate) use item::FnParseMode;
@@ -19,14 +19,13 @@ pub use path::PathStyle;
 
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::AttributesData;
-use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
+use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
 use rustc_ast::util::case::Case;
 use rustc_ast::AttrId;
 use rustc_ast::DUMMY_NODE_ID;
-use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, Extern};
-use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacArgsEq, MacDelimiter, Mutability, StrLit};
+use rustc_ast::{self as ast, AnonConst, AttrStyle, Const, DelimArgs, Extern};
+use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit};
 use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
@@ -37,9 +36,10 @@ use rustc_errors::{
 use rustc_session::parse::ParseSess;
 use rustc_span::source_map::{Span, DUMMY_SP};
 use rustc_span::symbol::{kw, sym, Ident, Symbol};
-
 use std::ops::Range;
 use std::{cmp, mem, slice};
+use thin_vec::ThinVec;
+use tracing::debug;
 
 use crate::errors::{
     DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
@@ -146,10 +146,7 @@ pub struct Parser<'a> {
     /// See the comments in the `parse_path_segment` function for more details.
     unmatched_angle_bracket_count: u32,
     max_angle_bracket_count: u32,
-    /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
-    /// it gets removed from here. Every entry left at the end gets emitted as an independent
-    /// error.
-    pub(super) unclosed_delims: Vec<UnmatchedBrace>,
+
     last_unexpected_token_span: Option<Span>,
     /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
     /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
@@ -168,7 +165,7 @@ pub struct Parser<'a> {
 // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
 // it doesn't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
-rustc_data_structures::static_assert_size!(Parser<'_>, 336);
+rustc_data_structures::static_assert_size!(Parser<'_>, 288);
 
 /// Stores span information about a closure.
 #[derive(Clone)]
@@ -215,24 +212,27 @@ struct CaptureState {
     inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
 }
 
-impl<'a> Drop for Parser<'a> {
-    fn drop(&mut self) {
-        emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
-    }
-}
-
+/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
+/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
+/// use this type to emit them as a linear sequence. But a linear sequence is
+/// what the parser expects, for the most part.
 #[derive(Clone)]
 struct TokenCursor {
-    // The current (innermost) frame. `frame` and `stack` could be combined,
-    // but it's faster to have them separately to access `frame` directly
-    // rather than via something like `stack.last().unwrap()` or
-    // `stack[stack.len() - 1]`.
-    frame: TokenCursorFrame,
-    // Additional frames that enclose `frame`.
-    stack: Vec<TokenCursorFrame>,
+    // Cursor for the current (innermost) token stream. The delimiters for this
+    // token stream are found in `self.stack.last()`; when that is `None` then
+    // we are in the outermost token stream which never has delimiters.
+    tree_cursor: TokenTreeCursor,
+
+    // Token streams surrounding the current one. The delimiters for stack[n]'s
+    // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
+    // because it's the outermost token stream which never has delimiters.
+    stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
+
     desugar_doc_comments: bool,
+
     // Counts the number of calls to `{,inlined_}next`.
     num_next_calls: usize,
+
     // During parsing, we may sometimes need to 'unglue' a
     // glued token into two component tokens
     // (e.g. '>>' into '>' and '>), so that the parser
@@ -257,18 +257,6 @@ struct TokenCursor {
     break_last_token: bool,
 }
 
-#[derive(Clone)]
-struct TokenCursorFrame {
-    delim_sp: Option<(Delimiter, DelimSpan)>,
-    tree_cursor: tokenstream::Cursor,
-}
-
-impl TokenCursorFrame {
-    fn new(delim_sp: Option<(Delimiter, DelimSpan)>, tts: TokenStream) -> Self {
-        TokenCursorFrame { delim_sp, tree_cursor: tts.into_trees() }
-    }
-}
-
 impl TokenCursor {
     fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
         self.inlined_next(desugar_doc_comments)
@@ -281,43 +269,52 @@ impl TokenCursor {
             // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
             // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
             // removed.
-            if let Some(tree) = self.frame.tree_cursor.next_ref() {
+            if let Some(tree) = self.tree_cursor.next_ref() {
                 match tree {
                     &TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
                         (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
-                            return self.desugar(attr_style, data, span);
+                            let desugared = self.desugar(attr_style, data, span);
+                            self.tree_cursor.replace_prev_and_rewind(desugared);
+                            // Continue to get the first token of the desugared doc comment.
+                        }
+                        _ => {
+                            debug_assert!(!matches!(
+                                token.kind,
+                                token::OpenDelim(_) | token::CloseDelim(_)
+                            ));
+                            return (token.clone(), spacing);
                         }
-                        _ => return (token.clone(), spacing),
                     },
                     &TokenTree::Delimited(sp, delim, ref tts) => {
-                        // Set `open_delim` to true here because we deal with it immediately.
-                        let frame = TokenCursorFrame::new(Some((delim, sp)), tts.clone());
-                        self.stack.push(mem::replace(&mut self.frame, frame));
+                        let trees = tts.clone().into_trees();
+                        self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
                         if delim != Delimiter::Invisible {
                             return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some(frame) = self.stack.pop() {
-                if let Some((delim, span)) = self.frame.delim_sp && delim != Delimiter::Invisible {
-                    self.frame = frame;
+            } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
+                // We have exhausted this token stream. Move back to its parent token stream.
+                self.tree_cursor = tree_cursor;
+                if delim != Delimiter::Invisible {
                     return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
                 }
-                self.frame = frame;
                 // No close delimiter to return; continue on to the next iteration.
             } else {
+                // We have exhausted the outermost token stream.
                 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
             }
         }
     }
 
-    fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> (Token, Spacing) {
+    // Desugar a doc comment into something like `#[doc = r"foo"]`.
+    fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
         // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
         // required to wrap the text. E.g.
         // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
         // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
-        // - `abc "##d##"` is wrapped as `r###"abc "d""###` (num_of_hashes = 3)
+        // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
         let mut num_of_hashes = 0;
         let mut count = 0;
         for ch in data.as_str().chars() {
@@ -329,7 +326,7 @@ impl TokenCursor {
             num_of_hashes = cmp::max(num_of_hashes, count);
         }
 
-        // `/// foo` becomes `doc = r"foo".
+        // `/// foo` becomes `doc = r"foo"`.
         let delim_span = DelimSpan::from_single(span);
         let body = TokenTree::Delimited(
             delim_span,
@@ -346,27 +343,15 @@ impl TokenCursor {
             .collect::<TokenStream>(),
         );
 
-        self.stack.push(mem::replace(
-            &mut self.frame,
-            TokenCursorFrame::new(
-                None,
-                if attr_style == AttrStyle::Inner {
-                    [
-                        TokenTree::token_alone(token::Pound, span),
-                        TokenTree::token_alone(token::Not, span),
-                        body,
-                    ]
-                    .into_iter()
-                    .collect::<TokenStream>()
-                } else {
-                    [TokenTree::token_alone(token::Pound, span), body]
-                        .into_iter()
-                        .collect::<TokenStream>()
-                },
-            ),
-        ));
-
-        self.next(/* desugar_doc_comments */ false)
+        if attr_style == AttrStyle::Inner {
+            vec![
+                TokenTree::token_alone(token::Pound, span),
+                TokenTree::token_alone(token::Not, span),
+                body,
+            ]
+        } else {
+            vec![TokenTree::token_alone(token::Pound, span), body]
+        }
     }
 }
 
@@ -384,8 +369,8 @@ enum TokenType {
 
 impl TokenType {
     fn to_string(&self) -> String {
-        match *self {
-            TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)),
+        match self {
+            TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
             TokenType::Keyword(kw) => format!("`{}`", kw),
             TokenType::Operator => "an operator".to_string(),
             TokenType::Lifetime => "lifetime".to_string(),
@@ -475,7 +460,7 @@ impl<'a> Parser<'a> {
             restrictions: Restrictions::empty(),
             expected_tokens: Vec::new(),
             token_cursor: TokenCursor {
-                frame: TokenCursorFrame::new(None, tokens),
+                tree_cursor: tokens.into_trees(),
                 stack: Vec::new(),
                 num_next_calls: 0,
                 desugar_doc_comments,
@@ -484,7 +469,6 @@ impl<'a> Parser<'a> {
             desugar_doc_comments,
             unmatched_angle_bracket_count: 0,
             max_angle_bracket_count: 0,
-            unclosed_delims: Vec::new(),
             last_unexpected_token_span: None,
             last_type_ascription: None,
             subparser_name,
@@ -503,8 +487,8 @@ impl<'a> Parser<'a> {
         parser
     }
 
-    pub fn forbid_recovery(mut self) -> Self {
-        self.recovery = Recovery::Forbidden;
+    pub fn recovery(mut self, recovery: Recovery) -> Self {
+        self.recovery = recovery;
         self
     }
 
@@ -542,9 +526,9 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Expect next token to be edible or inedible token.  If edible,
+    /// Expect next token to be edible or inedible token. If edible,
     /// then consume it; if inedible, then return without consuming
-    /// anything.  Signal a fatal error if next token is unexpected.
+    /// anything. Signal a fatal error if next token is unexpected.
     pub fn expect_one_of(
         &mut self,
         edible: &[TokenKind],
@@ -736,10 +720,21 @@ impl<'a> Parser<'a> {
         self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
     }
 
+    fn check_const_closure(&self) -> bool {
+        self.is_keyword_ahead(0, &[kw::Const])
+            && self.look_ahead(1, |t| match &t.kind {
+                // async closures do not work with const closures, so we do not parse that here.
+                token::Ident(kw::Move | kw::Static, _) | token::OrOr | token::BinOp(token::Or) => {
+                    true
+                }
+                _ => false,
+            })
+    }
+
     fn check_inline_const(&self, dist: usize) -> bool {
         self.is_keyword_ahead(dist, &[kw::Const])
-            && self.look_ahead(dist + 1, |t| match t.kind {
-                token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
+            && self.look_ahead(dist + 1, |t| match &t.kind {
+                token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
                 token::OpenDelim(Delimiter::Brace) => true,
                 _ => false,
             })
@@ -849,18 +844,17 @@ impl<'a> Parser<'a> {
         sep: SeqSep,
         expect: TokenExpectType,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */, bool /* recovered */)> {
         let mut first = true;
         let mut recovered = false;
         let mut trailing = false;
-        let mut v = vec![];
-        let unclosed_delims = !self.unclosed_delims.is_empty();
+        let mut v = ThinVec::new();
 
         while !self.expect_any_with_type(kets, expect) {
             if let token::CloseDelim(..) | token::Eof = self.token.kind {
                 break;
             }
-            if let Some(ref t) = sep.sep {
+            if let Some(t) = &sep.sep {
                 if first {
                     first = false;
                 } else {
@@ -895,8 +889,8 @@ impl<'a> Parser<'a> {
 
                                 _ => {
                                     // Attempt to keep parsing if it was a similar separator.
-                                    if let Some(ref tokens) = t.similar_tokens() {
-                                        if tokens.contains(&self.token.kind) && !unclosed_delims {
+                                    if let Some(tokens) = t.similar_tokens() {
+                                        if tokens.contains(&self.token.kind) {
                                             self.bump();
                                         }
                                     }
@@ -943,6 +937,10 @@ impl<'a> Parser<'a> {
                                 Err(e) => {
                                     // Parsing failed, therefore it must be something more serious
                                     // than just a missing separator.
+                                    for xx in &e.children {
+                                        // propagate the help message from sub error 'e' to main error 'expect_err;
+                                        expect_err.children.push(xx.clone());
+                                    }
                                     expect_err.emit();
 
                                     e.cancel();
@@ -973,7 +971,11 @@ impl<'a> Parser<'a> {
         let initial_semicolon = self.token.span;
 
         while self.eat(&TokenKind::Semi) {
-            let _ = self.parse_stmt(ForceCollect::Yes)?;
+            let _ =
+                self.parse_stmt_without_recovery(false, ForceCollect::Yes).unwrap_or_else(|e| {
+                    e.cancel();
+                    None
+                });
         }
 
         expect_err.set_primary_message(
@@ -1029,7 +1031,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool, bool)> {
         self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
     }
 
@@ -1041,7 +1043,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
+    ) -> PResult<'a, (ThinVec<T>, bool /* trailing */)> {
         let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
         if !recovered {
             self.eat(ket);
@@ -1058,7 +1060,7 @@ impl<'a> Parser<'a> {
         ket: &TokenKind,
         sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool)> {
         self.expect(bra)?;
         self.parse_seq_to_end(ket, sep, f)
     }
@@ -1067,7 +1069,7 @@ impl<'a> Parser<'a> {
         &mut self,
         delim: Delimiter,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool)> {
         self.parse_unspanned_seq(
             &token::OpenDelim(delim),
             &token::CloseDelim(delim),
@@ -1079,7 +1081,7 @@ impl<'a> Parser<'a> {
     fn parse_paren_comma_seq<T>(
         &mut self,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
-    ) -> PResult<'a, (Vec<T>, bool)> {
+    ) -> PResult<'a, (ThinVec<T>, bool)> {
         self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
     }
 
@@ -1128,14 +1130,16 @@ impl<'a> Parser<'a> {
             return looker(&self.token);
         }
 
-        let frame = &self.token_cursor.frame;
-        if let Some((delim, span)) = frame.delim_sp && delim != Delimiter::Invisible {
+        let tree_cursor = &self.token_cursor.tree_cursor;
+        if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
+            && delim != Delimiter::Invisible
+        {
             let all_normal = (0..dist).all(|i| {
-                let token = frame.tree_cursor.look_ahead(i);
+                let token = tree_cursor.look_ahead(i);
                 !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
             });
             if all_normal {
-                return match frame.tree_cursor.look_ahead(dist - 1) {
+                return match tree_cursor.look_ahead(dist - 1) {
                     Some(tree) => match tree {
                         TokenTree::Token(token, _) => looker(token),
                         TokenTree::Delimited(dspan, delim, _) => {
@@ -1189,8 +1193,18 @@ impl<'a> Parser<'a> {
 
     /// Parses constness: `const` or nothing.
     fn parse_constness(&mut self, case: Case) -> Const {
-        // Avoid const blocks to be parsed as const items
-        if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
+        self.parse_constness_(case, false)
+    }
+
+    /// Parses constness for closures
+    fn parse_closure_constness(&mut self, case: Case) -> Const {
+        self.parse_constness_(case, true)
+    }
+
+    fn parse_constness_(&mut self, case: Case, is_closure: bool) -> Const {
+        // Avoid const blocks and const closures to be parsed as const items
+        if (self.check_const_closure() == is_closure)
+            && self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
             && self.eat_keyword_case(kw::Const, case)
         {
             Const::Yes(self.prev_token.uninterpolated_span())
@@ -1213,11 +1227,7 @@ impl<'a> Parser<'a> {
             value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
         };
         let blk_span = anon_const.value.span;
-        Ok(self.mk_expr_with_attrs(
-            span.to(blk_span),
-            ExprKind::ConstBlock(anon_const),
-            AttrVec::from(attrs),
-        ))
+        Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
     }
 
     /// Parses mutability (`mut` or nothing).
@@ -1249,39 +1259,34 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_mac_args(&mut self) -> PResult<'a, P<MacArgs>> {
-        self.parse_mac_args_common(true).map(P)
+    fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
+        if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
     }
 
-    fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> {
-        self.parse_mac_args_common(false)
+    fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
+        Ok(if let Some(args) = self.parse_delim_args_inner() {
+            AttrArgs::Delimited(args)
+        } else {
+            if self.eat(&token::Eq) {
+                let eq_span = self.prev_token.span;
+                AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
+            } else {
+                AttrArgs::Empty
+            }
+        })
     }
 
-    fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> {
-        Ok(
-            if self.check(&token::OpenDelim(Delimiter::Parenthesis))
-                || self.check(&token::OpenDelim(Delimiter::Bracket))
-                || self.check(&token::OpenDelim(Delimiter::Brace))
-            {
-                match self.parse_token_tree() {
-                    TokenTree::Delimited(dspan, delim, tokens) =>
-                    // We've confirmed above that there is a delimiter so unwrapping is OK.
-                    {
-                        MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens)
-                    }
-                    _ => unreachable!(),
-                }
-            } else if !delimited_only {
-                if self.eat(&token::Eq) {
-                    let eq_span = self.prev_token.span;
-                    MacArgs::Eq(eq_span, MacArgsEq::Ast(self.parse_expr_force_collect()?))
-                } else {
-                    MacArgs::Empty
-                }
-            } else {
-                return self.unexpected();
-            },
-        )
+    fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
+        let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
+            || self.check(&token::OpenDelim(Delimiter::Bracket))
+            || self.check(&token::OpenDelim(Delimiter::Brace));
+
+        delimited.then(|| {
+            // We've confirmed above that there is a delimiter so unwrapping is OK.
+            let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else { unreachable!() };
+
+            DelimArgs { dspan, delim: MacDelimiter::from_token(delim).unwrap(), tokens }
+        })
     }
 
     fn parse_or_use_outer_attributes(
@@ -1299,10 +1304,10 @@ impl<'a> Parser<'a> {
     pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
-                // Grab the tokens from this frame.
-                let frame = &self.token_cursor.frame;
-                let stream = frame.tree_cursor.stream.clone();
-                let (delim, span) = frame.delim_sp.unwrap();
+                // Grab the tokens within the delimiters.
+                let tree_cursor = &self.token_cursor.tree_cursor;
+                let stream = tree_cursor.stream.clone();
+                let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1498,14 +1503,18 @@ impl<'a> Parser<'a> {
     pub fn clear_expected_tokens(&mut self) {
         self.expected_tokens.clear();
     }
+
+    pub fn approx_token_stream_pos(&self) -> usize {
+        self.token_cursor.num_next_calls
+    }
 }
 
 pub(crate) fn make_unclosed_delims_error(
-    unmatched: UnmatchedBrace,
+    unmatched: UnmatchedDelim,
     sess: &ParseSess,
 ) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
     // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
-    // `unmatched_braces` only for error recovery in the `Parser`.
+    // `unmatched_delims` only for error recovery in the `Parser`.
     let found_delim = unmatched.found_delim?;
     let mut spans = vec![unmatched.found_span];
     if let Some(sp) = unmatched.unclosed_span {
@@ -1522,7 +1531,7 @@ pub(crate) fn make_unclosed_delims_error(
     Some(err)
 }
 
-pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
+pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedDelim>, sess: &ParseSess) {
     *sess.reached_eof.borrow_mut() |=
         unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
     for unmatched in unclosed_delims.drain(..) {
diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs
index 103dd801257..7a4d53ed8bb 100644
--- a/compiler/rustc_parse/src/parser/nonterminal.rs
+++ b/compiler/rustc_parse/src/parser/nonterminal.rs
@@ -2,9 +2,11 @@ use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, NonterminalKind, Token};
 use rustc_ast::HasTokens;
 use rustc_ast_pretty::pprust;
+use rustc_errors::IntoDiagnostic;
 use rustc_errors::PResult;
 use rustc_span::symbol::{kw, Ident};
 
+use crate::errors::UnexpectedNonterminal;
 use crate::parser::pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 use crate::parser::{FollowedByType, ForceCollect, NtOrTt, Parser, PathStyle};
 
@@ -42,9 +44,9 @@ impl<'a> Parser<'a> {
                 token::Comma | token::Ident(..) | token::Interpolated(..) => true,
                 _ => token.can_begin_type(),
             },
-            NonterminalKind::Block => match token.kind {
+            NonterminalKind::Block => match &token.kind {
                 token::OpenDelim(Delimiter::Brace) => true,
-                token::Interpolated(ref nt) => !matches!(
+                token::Interpolated(nt) => !matches!(
                     **nt,
                     token::NtItem(_)
                         | token::NtPat(_)
@@ -56,16 +58,16 @@ impl<'a> Parser<'a> {
                 ),
                 _ => false,
             },
-            NonterminalKind::Path | NonterminalKind::Meta => match token.kind {
+            NonterminalKind::Path | NonterminalKind::Meta => match &token.kind {
                 token::ModSep | token::Ident(..) => true,
-                token::Interpolated(ref nt) => match **nt {
+                token::Interpolated(nt) => match **nt {
                     token::NtPath(_) | token::NtMeta(_) => true,
                     _ => may_be_ident(&nt),
                 },
                 _ => false,
             },
             NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
-                match token.kind {
+                match &token.kind {
                 token::Ident(..) |                          // box, ref, mut, and other identifiers (can stricten)
                 token::OpenDelim(Delimiter::Parenthesis) |  // tuple pattern
                 token::OpenDelim(Delimiter::Bracket) |      // slice pattern
@@ -80,13 +82,13 @@ impl<'a> Parser<'a> {
                 token::BinOp(token::Shl) => true,           // path (double UFCS)
                 // leading vert `|` or-pattern
                 token::BinOp(token::Or) =>  matches!(kind, NonterminalKind::PatWithOr {..}),
-                token::Interpolated(ref nt) => may_be_ident(nt),
+                token::Interpolated(nt) => may_be_ident(nt),
                 _ => false,
             }
             }
-            NonterminalKind::Lifetime => match token.kind {
+            NonterminalKind::Lifetime => match &token.kind {
                 token::Lifetime(_) => true,
-                token::Interpolated(ref nt) => {
+                token::Interpolated(nt) => {
                     matches!(**nt, token::NtLifetime(_))
                 }
                 _ => false,
@@ -113,7 +115,8 @@ impl<'a> Parser<'a> {
             NonterminalKind::Item => match self.parse_item(ForceCollect::Yes)? {
                 Some(item) => token::NtItem(item),
                 None => {
-                    return Err(self.struct_span_err(self.token.span, "expected an item keyword"));
+                    return Err(UnexpectedNonterminal::Item(self.token.span)
+                               .into_diagnostic(&self.sess.span_diagnostic));
                 }
             },
             NonterminalKind::Block => {
@@ -124,7 +127,8 @@ impl<'a> Parser<'a> {
             NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
                 Some(s) => token::NtStmt(P(s)),
                 None => {
-                    return Err(self.struct_span_err(self.token.span, "expected a statement"));
+                    return Err(UnexpectedNonterminal::Statement(self.token.span)
+                               .into_diagnostic(&self.sess.span_diagnostic));
                 }
             },
             NonterminalKind::PatParam { .. } | NonterminalKind::PatWithOr { .. } => {
@@ -160,9 +164,10 @@ impl<'a> Parser<'a> {
                 token::NtIdent(ident, is_raw)
             }
             NonterminalKind::Ident => {
-                let token_str = pprust::token_to_string(&self.token);
-                let msg = &format!("expected ident, found {}", &token_str);
-                return Err(self.struct_span_err(self.token.span, msg));
+                return Err(UnexpectedNonterminal::Ident {
+                    span: self.token.span,
+                    token: self.token.clone(),
+                }.into_diagnostic(&self.sess.span_diagnostic));
             }
             NonterminalKind::Path => token::NtPath(
                 P(self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?),
@@ -175,9 +180,10 @@ impl<'a> Parser<'a> {
                 if self.check_lifetime() {
                     token::NtLifetime(self.expect_lifetime().ident)
                 } else {
-                    let token_str = pprust::token_to_string(&self.token);
-                    let msg = &format!("expected a lifetime, found `{}`", &token_str);
-                    return Err(self.struct_span_err(self.token.span, msg));
+                    return Err(UnexpectedNonterminal::Lifetime {
+                        span: self.token.span,
+                        token: self.token.clone(),
+                    }.into_diagnostic(&self.sess.span_diagnostic));
                 }
             }
         };
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 52c11b4e35f..fc9f1d1330a 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1,5 +1,14 @@
 use super::{ForceCollect, Parser, PathStyle, TrailingToken};
-use crate::errors::RemoveLet;
+use crate::errors::{
+    AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed,
+    DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt,
+    ExpectedCommaAfterPatternField, InclusiveRangeExtraEquals, InclusiveRangeMatchArrow,
+    InclusiveRangeNoEnd, InvalidMutInPattern, PatternOnWrongSideOfAt, RefMutOrderIncorrect,
+    RemoveLet, RepeatedMutInPattern, TopLevelOrPatternNotAllowed, TopLevelOrPatternNotAllowedSugg,
+    TrailingVertNotAllowed, UnexpectedLifetimeInPattern, UnexpectedVertVertBeforeFunctionParam,
+    UnexpectedVertVertInPattern,
+};
+use crate::fluent_generated as fluent;
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
 use rustc_ast::ptr::P;
@@ -9,15 +18,32 @@ use rustc_ast::{
     PatField, PatKind, Path, QSelf, RangeEnd, RangeSyntax,
 };
 use rustc_ast_pretty::pprust;
-use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
+use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult};
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::source_map::{respan, Span, Spanned};
 use rustc_span::symbol::{kw, sym, Ident};
+use thin_vec::{thin_vec, ThinVec};
 
-pub(super) type Expected = Option<&'static str>;
+#[derive(PartialEq, Copy, Clone)]
+pub enum Expected {
+    ParameterName,
+    ArgumentName,
+    Identifier,
+    BindingPattern,
+}
 
-/// `Expected` for function and lambda parameter patterns.
-pub(super) const PARAM_EXPECTED: Expected = Some("parameter name");
+impl Expected {
+    // FIXME(#100717): migrate users of this to proper localization
+    fn to_string_or_fallback(expected: Option<Expected>) -> &'static str {
+        match expected {
+            Some(Expected::ParameterName) => "parameter name",
+            Some(Expected::ArgumentName) => "argument name",
+            Some(Expected::Identifier) => "identifier",
+            Some(Expected::BindingPattern) => "binding pattern",
+            None => "pattern",
+        }
+    }
+}
 
 const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here";
 
@@ -54,13 +80,19 @@ enum EatOrResult {
     None,
 }
 
+/// The syntax location of a given pattern. Used for diagnostics.
+pub(super) enum PatternLocation {
+    LetBinding,
+    FunctionParameter,
+}
+
 impl<'a> Parser<'a> {
     /// Parses a pattern.
     ///
     /// Corresponds to `pat<no_top_alt>` in RFC 2535 and does not admit or-patterns
     /// at the top level. Used when parsing the parameters of lambda expressions,
     /// functions, function pointers, and `pat` macro fragments.
-    pub fn parse_pat_no_top_alt(&mut self, expected: Expected) -> PResult<'a, P<Pat>> {
+    pub fn parse_pat_no_top_alt(&mut self, expected: Option<Expected>) -> PResult<'a, P<Pat>> {
         self.parse_pat_with_range_pat(true, expected)
     }
 
@@ -74,7 +106,7 @@ impl<'a> Parser<'a> {
     /// simplify the grammar somewhat.
     pub fn parse_pat_allow_top_alt(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
         ra: RecoverColon,
         rt: CommaRecoveryMode,
@@ -86,7 +118,7 @@ impl<'a> Parser<'a> {
     /// recovered).
     fn parse_pat_allow_top_alt_inner(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
         ra: RecoverColon,
         rt: CommaRecoveryMode,
@@ -123,7 +155,7 @@ impl<'a> Parser<'a> {
                 // If there was a leading vert, treat this as an or-pattern. This improves
                 // diagnostics.
                 let span = leading_vert_span.to(self.prev_token.span);
-                return Ok((self.mk_pat(span, PatKind::Or(vec![first_pat])), trailing_vert));
+                return Ok((self.mk_pat(span, PatKind::Or(thin_vec![first_pat])), trailing_vert));
             }
 
             return Ok((first_pat, trailing_vert));
@@ -131,7 +163,7 @@ impl<'a> Parser<'a> {
 
         // Parse the patterns `p_1 | ... | p_n` where `n > 0`.
         let lo = leading_vert_span.unwrap_or(first_pat.span);
-        let mut pats = vec![first_pat];
+        let mut pats = thin_vec![first_pat];
         loop {
             match self.eat_or_separator(Some(lo)) {
                 EatOrResult::AteOr => {}
@@ -165,9 +197,9 @@ impl<'a> Parser<'a> {
     /// otherwise).
     pub(super) fn parse_pat_before_ty(
         &mut self,
-        expected: Expected,
+        expected: Option<Expected>,
         rc: RecoverComma,
-        syntax_loc: &str,
+        syntax_loc: PatternLocation,
     ) -> PResult<'a, (P<Pat>, bool)> {
         // We use `parse_pat_allow_top_alt` regardless of whether we actually want top-level
         // or-patterns so that we can detect when a user tries to use it. This allows us to print a
@@ -181,27 +213,41 @@ impl<'a> Parser<'a> {
         let colon = self.eat(&token::Colon);
 
         if let PatKind::Or(pats) = &pat.kind {
-            let msg = format!("top-level or-patterns are not allowed in {}", syntax_loc);
-            let (help, fix) = if pats.len() == 1 {
-                // If all we have is a leading vert, then print a special message. This is the case
-                // if `parse_pat_allow_top_alt` returns an or-pattern with one variant.
-                let msg = "remove the `|`";
-                let fix = pprust::pat_to_string(&pat);
-                (msg, fix)
-            } else {
-                let msg = "wrap the pattern in parentheses";
-                let fix = format!("({})", pprust::pat_to_string(&pat));
-                (msg, fix)
-            };
+            let span = pat.span;
 
             if trailing_vert {
                 // We already emitted an error and suggestion to remove the trailing vert. Don't
                 // emit again.
-                self.sess.span_diagnostic.delay_span_bug(pat.span, &msg);
+
+                // FIXME(#100717): pass `TopLevelOrPatternNotAllowed::* { sub: None }` to
+                // `delay_span_bug()` instead of fluent message
+                self.sess.span_diagnostic.delay_span_bug(
+                    span,
+                    match syntax_loc {
+                        PatternLocation::LetBinding => {
+                            fluent::parse_or_pattern_not_allowed_in_let_binding
+                        }
+                        PatternLocation::FunctionParameter => {
+                            fluent::parse_or_pattern_not_allowed_in_fn_parameters
+                        }
+                    },
+                );
             } else {
-                self.struct_span_err(pat.span, &msg)
-                    .span_suggestion(pat.span, help, fix, Applicability::MachineApplicable)
-                    .emit();
+                let pat = pprust::pat_to_string(&pat);
+                let sub = if pats.len() == 1 {
+                    Some(TopLevelOrPatternNotAllowedSugg::RemoveLeadingVert { span, pat })
+                } else {
+                    Some(TopLevelOrPatternNotAllowedSugg::WrapInParens { span, pat })
+                };
+
+                self.sess.emit_err(match syntax_loc {
+                    PatternLocation::LetBinding => {
+                        TopLevelOrPatternNotAllowed::LetBinding { span, sub }
+                    }
+                    PatternLocation::FunctionParameter => {
+                        TopLevelOrPatternNotAllowed::FunctionParameter { span, sub }
+                    }
+                });
             }
         }
 
@@ -218,15 +264,15 @@ impl<'a> Parser<'a> {
         // a leading `||` probably doesn't indicate an or-pattern attempt, so we handle that
         // separately.
         if let token::OrOr = self.token.kind {
-            let span = self.token.span;
-            let mut err = self.struct_span_err(span, "unexpected `||` before function parameter");
-            err.span_suggestion(span, "remove the `||`", "", Applicability::MachineApplicable);
-            err.note("alternatives in or-patterns are separated with `|`, not `||`");
-            err.emit();
+            self.sess.emit_err(UnexpectedVertVertBeforeFunctionParam { span: self.token.span });
             self.bump();
         }
 
-        self.parse_pat_before_ty(PARAM_EXPECTED, RecoverComma::No, "function parameters")
+        self.parse_pat_before_ty(
+            Some(Expected::ParameterName),
+            RecoverComma::No,
+            PatternLocation::FunctionParameter,
+        )
     }
 
     /// Eat the or-pattern `|` separator.
@@ -236,7 +282,7 @@ impl<'a> Parser<'a> {
             EatOrResult::TrailingVert
         } else if matches!(self.token.kind, token::OrOr) {
             // Found `||`; Recover and pretend we parsed `|`.
-            self.ban_unexpected_or_or(lo);
+            self.sess.emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
             self.bump();
             EatOrResult::AteOr
         } else if self.eat(&token::BinOp(token::Or)) {
@@ -270,7 +316,13 @@ impl<'a> Parser<'a> {
         });
         match (is_end_ahead, &self.token.kind) {
             (true, token::BinOp(token::Or) | token::OrOr) => {
-                self.ban_illegal_vert(lo, "trailing", "not allowed in an or-pattern");
+                // A `|` or possibly `||` token shouldn't be here. Ban it.
+                self.sess.emit_err(TrailingVertNotAllowed {
+                    span: self.token.span,
+                    start: lo,
+                    token: self.token.clone(),
+                    note_double_vert: matches!(self.token.kind, token::OrOr).then_some(()),
+                });
                 self.bump();
                 true
             }
@@ -278,46 +330,12 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// We have parsed `||` instead of `|`. Error and suggest `|` instead.
-    fn ban_unexpected_or_or(&mut self, lo: Option<Span>) {
-        let mut err = self.struct_span_err(self.token.span, "unexpected token `||` in pattern");
-        err.span_suggestion(
-            self.token.span,
-            "use a single `|` to separate multiple alternative patterns",
-            "|",
-            Applicability::MachineApplicable,
-        );
-        if let Some(lo) = lo {
-            err.span_label(lo, WHILE_PARSING_OR_MSG);
-        }
-        err.emit();
-    }
-
-    /// A `|` or possibly `||` token shouldn't be here. Ban it.
-    fn ban_illegal_vert(&mut self, lo: Option<Span>, pos: &str, ctx: &str) {
-        let span = self.token.span;
-        let mut err = self.struct_span_err(span, &format!("a {} `|` is {}", pos, ctx));
-        err.span_suggestion(
-            span,
-            &format!("remove the `{}`", pprust::token_to_string(&self.token)),
-            "",
-            Applicability::MachineApplicable,
-        );
-        if let Some(lo) = lo {
-            err.span_label(lo, WHILE_PARSING_OR_MSG);
-        }
-        if let token::OrOr = self.token.kind {
-            err.note("alternatives in or-patterns are separated with `|`, not `||`");
-        }
-        err.emit();
-    }
-
     /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
     /// allowed).
     fn parse_pat_with_range_pat(
         &mut self,
         allow_range_pat: bool,
-        expected: Expected,
+        expected: Option<Expected>,
     ) -> PResult<'a, P<Pat>> {
         maybe_recover_from_interpolated_ty_qpath!(self, true);
         maybe_whole!(self, NtPat, |x| x);
@@ -330,6 +348,10 @@ impl<'a> Parser<'a> {
             lo = self.token.span;
         }
 
+        if self.is_lit_bad_ident() {
+            return Err(self.expected_ident_found());
+        }
+
         let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd {
             self.parse_pat_deref(expected)?
         } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
@@ -411,15 +433,19 @@ impl<'a> Parser<'a> {
         {
             // Recover a `'a` as a `'a'` literal
             let lt = self.expect_lifetime();
-            let lit = self.recover_unclosed_char(lt.ident, |self_| {
-                let expected = expected.unwrap_or("pattern");
-                let msg =
-                    format!("expected {}, found {}", expected, super::token_descr(&self_.token));
+            let (lit, _) =
+                self.recover_unclosed_char(lt.ident, Parser::mk_token_lit_char, |self_| {
+                    let expected = Expected::to_string_or_fallback(expected);
+                    let msg = format!(
+                        "expected {}, found {}",
+                        expected,
+                        super::token_descr(&self_.token)
+                    );
 
-                let mut err = self_.struct_span_err(self_.token.span, &msg);
-                err.span_label(self_.token.span, format!("expected {}", expected));
-                err
-            });
+                    let mut err = self_.struct_span_err(self_.token.span, &msg);
+                    err.span_label(self_.token.span, format!("expected {}", expected));
+                    err
+                });
             PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
         } else {
             // Try to parse everything else as literal with optional minus
@@ -450,22 +476,14 @@ impl<'a> Parser<'a> {
         self.bump(); // `...`
 
         // The user probably mistook `...` for a rest pattern `..`.
-        self.struct_span_err(lo, "unexpected `...`")
-            .span_label(lo, "not a valid pattern")
-            .span_suggestion_short(
-                lo,
-                "for a rest pattern, use `..` instead of `...`",
-                "..",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.sess.emit_err(DotDotDotRestPattern { span: lo });
         PatKind::Rest
     }
 
     /// Try to recover the more general form `intersect ::= $pat_lhs @ $pat_rhs`.
     ///
     /// Allowed binding patterns generated by `binding ::= ref? mut? $ident @ $pat_rhs`
-    /// should already have been parsed by now  at this point,
+    /// should already have been parsed by now at this point,
     /// if the next token is `@` then we can try to parse the more general form.
     ///
     /// Consult `parse_pat_ident` for the `binding` grammar.
@@ -483,42 +501,32 @@ impl<'a> Parser<'a> {
         // At this point we attempt to parse `@ $pat_rhs` and emit an error.
         self.bump(); // `@`
         let mut rhs = self.parse_pat_no_top_alt(None)?;
-        let sp = lhs.span.to(rhs.span);
+        let whole_span = lhs.span.to(rhs.span);
 
-        if let PatKind::Ident(_, _, ref mut sub @ None) = rhs.kind {
+        if let PatKind::Ident(_, _, sub @ None) = &mut rhs.kind {
             // The user inverted the order, so help them fix that.
-            let mut applicability = Applicability::MachineApplicable;
-            // FIXME(bindings_after_at): Remove this code when stabilizing the feature.
-            lhs.walk(&mut |p| match p.kind {
-                // `check_match` is unhappy if the subpattern has a binding anywhere.
-                PatKind::Ident(..) => {
-                    applicability = Applicability::MaybeIncorrect;
-                    false // Short-circuit.
-                }
-                _ => true,
-            });
-
             let lhs_span = lhs.span;
             // Move the LHS into the RHS as a subpattern.
             // The RHS is now the full pattern.
             *sub = Some(lhs);
 
-            self.struct_span_err(sp, "pattern on wrong side of `@`")
-                .span_label(lhs_span, "pattern on the left, should be on the right")
-                .span_label(rhs.span, "binding on the right, should be on the left")
-                .span_suggestion(sp, "switch the order", pprust::pat_to_string(&rhs), applicability)
-                .emit();
+            self.sess.emit_err(PatternOnWrongSideOfAt {
+                whole_span,
+                whole_pat: pprust::pat_to_string(&rhs),
+                pattern: lhs_span,
+                binding: rhs.span,
+            });
         } else {
             // The special case above doesn't apply so we may have e.g. `A(x) @ B(y)`.
             rhs.kind = PatKind::Wild;
-            self.struct_span_err(sp, "left-hand side of `@` must be a binding")
-                .span_label(lhs.span, "interpreted as a pattern, not a binding")
-                .span_label(rhs.span, "also a pattern")
-                .note("bindings are `x`, `mut x`, `ref x`, and `ref mut x`")
-                .emit();
+            self.sess.emit_err(ExpectedBindingLeftOfAt {
+                whole_span,
+                lhs: lhs.span,
+                rhs: rhs.span,
+            });
         }
 
-        rhs.span = sp;
+        rhs.span = whole_span;
         Ok(rhs)
     }
 
@@ -533,35 +541,23 @@ impl<'a> Parser<'a> {
             _ => return,
         }
 
-        self.struct_span_err(pat.span, "the range pattern here has ambiguous interpretation")
-            .span_suggestion(
-                pat.span,
-                "add parentheses to clarify the precedence",
-                format!("({})", pprust::pat_to_string(&pat)),
-                // "ambiguous interpretation" implies that we have to be guessing
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+        self.sess
+            .emit_err(AmbiguousRangePattern { span: pat.span, pat: pprust::pat_to_string(&pat) });
     }
 
     /// Parse `&pat` / `&mut pat`.
-    fn parse_pat_deref(&mut self, expected: Expected) -> PResult<'a, PatKind> {
+    fn parse_pat_deref(&mut self, expected: Option<Expected>) -> PResult<'a, PatKind> {
         self.expect_and()?;
-        self.recover_lifetime_in_deref_pat();
-        let mutbl = self.parse_mutability();
-        let subpat = self.parse_pat_with_range_pat(false, expected)?;
-        Ok(PatKind::Ref(subpat, mutbl))
-    }
-
-    fn recover_lifetime_in_deref_pat(&mut self) {
         if let token::Lifetime(name) = self.token.kind {
             self.bump(); // `'a`
 
-            let span = self.prev_token.span;
-            self.struct_span_err(span, &format!("unexpected lifetime `{}` in pattern", name))
-                .span_suggestion(span, "remove the lifetime", "", Applicability::MachineApplicable)
-                .emit();
+            self.sess
+                .emit_err(UnexpectedLifetimeInPattern { span: self.prev_token.span, symbol: name });
         }
+
+        let mutbl = self.parse_mutability();
+        let subpat = self.parse_pat_with_range_pat(false, expected)?;
+        Ok(PatKind::Ref(subpat, mutbl))
     }
 
     /// Parse a tuple or parenthesis pattern.
@@ -589,20 +585,21 @@ impl<'a> Parser<'a> {
         let mut_span = self.prev_token.span;
 
         if self.eat_keyword(kw::Ref) {
-            return self.recover_mut_ref_ident(mut_span);
+            self.sess.emit_err(RefMutOrderIncorrect { span: mut_span.to(self.prev_token.span) });
+            return self.parse_pat_ident(BindingAnnotation::REF_MUT);
         }
 
         self.recover_additional_muts();
 
         // Make sure we don't allow e.g. `let mut $p;` where `$p:pat`.
-        if let token::Interpolated(ref nt) = self.token.kind {
+        if let token::Interpolated(nt) = &self.token.kind {
             if let token::NtPat(_) = **nt {
                 self.expected_ident_found().emit();
             }
         }
 
         // Parse the pattern we hope to be an identifier.
-        let mut pat = self.parse_pat_no_top_alt(Some("identifier"))?;
+        let mut pat = self.parse_pat_no_top_alt(Some(Expected::Identifier))?;
 
         // If we don't have `mut $ident (@ pat)?`, error.
         if let PatKind::Ident(BindingAnnotation(ByRef::No, m @ Mutability::Not), ..) = &mut pat.kind
@@ -619,22 +616,6 @@ impl<'a> Parser<'a> {
         Ok(pat.into_inner().kind)
     }
 
-    /// Recover on `mut ref? ident @ pat` and suggest
-    /// that the order of `mut` and `ref` is incorrect.
-    fn recover_mut_ref_ident(&mut self, lo: Span) -> PResult<'a, PatKind> {
-        let mutref_span = lo.to(self.prev_token.span);
-        self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
-            .span_suggestion(
-                mutref_span,
-                "try switching the order",
-                "ref mut",
-                Applicability::MachineApplicable,
-            )
-            .emit();
-
-        self.parse_pat_ident(BindingAnnotation::REF_MUT)
-    }
-
     /// Turn all by-value immutable bindings in a pattern into mutable bindings.
     /// Returns `true` if any change was made.
     fn make_all_value_bindings_mutable(pat: &mut P<Pat>) -> bool {
@@ -659,16 +640,13 @@ impl<'a> Parser<'a> {
     /// Error on `mut $pat` where `$pat` is not an ident.
     fn ban_mut_general_pat(&self, lo: Span, pat: &Pat, changed_any_binding: bool) {
         let span = lo.to(pat.span);
-        let fix = pprust::pat_to_string(&pat);
-        let (problem, suggestion) = if changed_any_binding {
-            ("`mut` must be attached to each individual binding", "add `mut` to each binding")
+        let pat = pprust::pat_to_string(&pat);
+
+        self.sess.emit_err(if changed_any_binding {
+            InvalidMutInPattern::NestedIdent { span, pat }
         } else {
-            ("`mut` must be followed by a named binding", "remove the `mut` prefix")
-        };
-        self.struct_span_err(span, problem)
-            .span_suggestion(span, suggestion, fix, Applicability::MachineApplicable)
-            .note("`mut` may be followed by `variable` and `variable @ pattern`")
-            .emit();
+            InvalidMutInPattern::NonIdent { span, pat }
+        });
     }
 
     /// Eat any extraneous `mut`s and error + recover if we ate any.
@@ -679,21 +657,13 @@ impl<'a> Parser<'a> {
             return;
         }
 
-        let span = lo.to(self.prev_token.span);
-        self.struct_span_err(span, "`mut` on a binding may not be repeated")
-            .span_suggestion(
-                span,
-                "remove the additional `mut`s",
-                "",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        self.sess.emit_err(RepeatedMutInPattern { span: lo.to(self.prev_token.span) });
     }
 
     /// Parse macro invocation
     fn parse_pat_mac_invoc(&mut self, path: Path) -> PResult<'a, PatKind> {
         self.bump();
-        let args = self.parse_mac_args()?;
+        let args = self.parse_delim_args()?;
         let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
         Ok(PatKind::MacCall(mac))
     }
@@ -701,11 +671,11 @@ impl<'a> Parser<'a> {
     fn fatal_unexpected_non_pat(
         &mut self,
         err: DiagnosticBuilder<'a, ErrorGuaranteed>,
-        expected: Expected,
+        expected: Option<Expected>,
     ) -> PResult<'a, P<Pat>> {
         err.cancel();
 
-        let expected = expected.unwrap_or("pattern");
+        let expected = Expected::to_string_or_fallback(expected);
         let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
 
         let mut err = self.struct_span_err(self.token.span, &msg);
@@ -747,65 +717,53 @@ impl<'a> Parser<'a> {
             // Parsing e.g. `X..`.
             if let RangeEnd::Included(_) = re.node {
                 // FIXME(Centril): Consider semantic errors instead in `ast_validation`.
-                self.inclusive_range_with_incorrect_end(re.span);
+                self.inclusive_range_with_incorrect_end();
             }
             None
         };
         Ok(PatKind::Range(Some(begin), end, re))
     }
 
-    pub(super) fn inclusive_range_with_incorrect_end(&mut self, span: Span) {
+    pub(super) fn inclusive_range_with_incorrect_end(&mut self) {
         let tok = &self.token;
-
+        let span = self.prev_token.span;
         // If the user typed "..==" instead of "..=", we want to give them
         // a specific error message telling them to use "..=".
+        // If they typed "..=>", suggest they use ".. =>".
         // Otherwise, we assume that they meant to type a half open exclusive
         // range and give them an error telling them to do that instead.
-        if matches!(tok.kind, token::Eq) && tok.span.lo() == span.hi() {
-            let span_with_eq = span.to(tok.span);
+        let no_space = tok.span.lo() == span.hi();
+        match tok.kind {
+            token::Eq if no_space => {
+                let span_with_eq = span.to(tok.span);
 
-            // Ensure the user doesn't receive unhelpful unexpected token errors
-            self.bump();
-            if self.is_pat_range_end_start(0) {
-                let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
-            }
+                // Ensure the user doesn't receive unhelpful unexpected token errors
+                self.bump();
+                if self.is_pat_range_end_start(0) {
+                    let _ = self.parse_pat_range_end().map_err(|e| e.cancel());
+                }
 
-            self.error_inclusive_range_with_extra_equals(span_with_eq);
-        } else {
-            self.error_inclusive_range_with_no_end(span);
+                self.sess.emit_err(InclusiveRangeExtraEquals { span: span_with_eq });
+            }
+            token::Gt if no_space => {
+                let after_pat = span.with_hi(span.hi() - rustc_span::BytePos(1)).shrink_to_hi();
+                self.sess.emit_err(InclusiveRangeMatchArrow { span, arrow: tok.span, after_pat });
+            }
+            _ => {
+                self.sess.emit_err(InclusiveRangeNoEnd { span });
+            }
         }
     }
 
-    fn error_inclusive_range_with_extra_equals(&self, span: Span) {
-        self.struct_span_err(span, "unexpected `=` after inclusive range")
-            .span_suggestion_short(span, "use `..=` instead", "..=", Applicability::MaybeIncorrect)
-            .note("inclusive ranges end with a single equals sign (`..=`)")
-            .emit();
-    }
-
-    fn error_inclusive_range_with_no_end(&self, span: Span) {
-        struct_span_err!(self.sess.span_diagnostic, span, E0586, "inclusive range with no end")
-            .span_suggestion_short(span, "use `..` instead", "..", Applicability::MachineApplicable)
-            .note("inclusive ranges must be bounded at the end (`..=b` or `a..=b`)")
-            .emit();
-    }
-
     /// Parse a range-to pattern, `..X` or `..=X` where `X` remains to be parsed.
     ///
     /// The form `...X` is prohibited to reduce confusion with the potential
     /// expression syntax `...expr` for splatting in expressions.
     fn parse_pat_range_to(&mut self, mut re: Spanned<RangeEnd>) -> PResult<'a, PatKind> {
         let end = self.parse_pat_range_end()?;
-        if let RangeEnd::Included(ref mut syn @ RangeSyntax::DotDotDot) = &mut re.node {
+        if let RangeEnd::Included(syn @ RangeSyntax::DotDotDot) = &mut re.node {
             *syn = RangeSyntax::DotDotEq;
-            self.struct_span_err(re.span, "range-to patterns with `...` are not allowed")
-                .span_suggestion_short(
-                    re.span,
-                    "use `..=` instead",
-                    "..=",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.sess.emit_err(DotDotDotRangeToPatternNotAllowed { span: re.span });
         }
         Ok(PatKind::Range(None, Some(end), re))
     }
@@ -870,7 +828,7 @@ impl<'a> Parser<'a> {
     fn parse_pat_ident(&mut self, binding_annotation: BindingAnnotation) -> PResult<'a, PatKind> {
         let ident = self.parse_ident()?;
         let sub = if self.eat(&token::At) {
-            Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+            Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern))?)
         } else {
             None
         };
@@ -881,15 +839,15 @@ impl<'a> Parser<'a> {
         // binding mode then we do not end up here, because the lookahead
         // will direct us over to `parse_enum_variant()`.
         if self.token == token::OpenDelim(Delimiter::Parenthesis) {
-            return Err(self
-                .struct_span_err(self.prev_token.span, "expected identifier, found enum pattern"));
+            return Err(EnumPatternInsteadOfIdentifier { span: self.prev_token.span }
+                .into_diagnostic(&self.sess.span_diagnostic));
         }
 
         Ok(PatKind::Ident(binding_annotation, ident, sub))
     }
 
     /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`).
-    fn parse_pat_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+    fn parse_pat_struct(&mut self, qself: Option<P<QSelf>>, path: Path) -> PResult<'a, PatKind> {
         if qself.is_some() {
             // Feature gate the use of qualified paths in patterns
             self.sess.gated_spans.gate(sym::more_qualified_paths, path.span);
@@ -899,14 +857,18 @@ impl<'a> Parser<'a> {
             e.span_label(path.span, "while parsing the fields for this pattern");
             e.emit();
             self.recover_stmt();
-            (vec![], true)
+            (ThinVec::new(), true)
         });
         self.bump();
         Ok(PatKind::Struct(qself, path, fields, etc))
     }
 
     /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`).
-    fn parse_pat_tuple_struct(&mut self, qself: Option<QSelf>, path: Path) -> PResult<'a, PatKind> {
+    fn parse_pat_tuple_struct(
+        &mut self,
+        qself: Option<P<QSelf>>,
+        path: Path,
+    ) -> PResult<'a, PatKind> {
         let (fields, _) = self.parse_paren_comma_seq(|p| {
             p.parse_pat_allow_top_alt(
                 None,
@@ -960,7 +922,7 @@ impl<'a> Parser<'a> {
             // We cannot use `parse_pat_ident()` since it will complain `box`
             // is not an identifier.
             let sub = if self.eat(&token::At) {
-                Some(self.parse_pat_no_top_alt(Some("binding pattern"))?)
+                Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern))?)
             } else {
                 None
             };
@@ -974,8 +936,8 @@ impl<'a> Parser<'a> {
     }
 
     /// Parses the fields of a struct-like pattern.
-    fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<PatField>, bool)> {
-        let mut fields = Vec::new();
+    fn parse_pat_fields(&mut self) -> PResult<'a, (ThinVec<PatField>, bool)> {
+        let mut fields = ThinVec::new();
         let mut etc = false;
         let mut ate_comma = true;
         let mut delayed_err: Option<DiagnosticBuilder<'a, ErrorGuaranteed>> = None;
@@ -995,7 +957,8 @@ impl<'a> Parser<'a> {
 
             // check that a comma comes after every field
             if !ate_comma {
-                let err = self.struct_span_err(self.token.span, "expected `,`");
+                let err = ExpectedCommaAfterPatternField { span: self.token.span }
+                    .into_diagnostic(&self.sess.span_diagnostic);
                 if let Some(mut delayed) = delayed_err {
                     delayed.emit();
                 }
@@ -1003,12 +966,15 @@ impl<'a> Parser<'a> {
             }
             ate_comma = false;
 
-            if self.check(&token::DotDot) || self.token == token::DotDotDot {
+            if self.check(&token::DotDot)
+                || self.check_noexpect(&token::DotDotDot)
+                || self.check_keyword(kw::Underscore)
+            {
                 etc = true;
                 let mut etc_sp = self.token.span;
 
-                self.recover_one_fewer_dotdot();
-                self.bump(); // `..` || `...`
+                self.recover_bad_dot_dot();
+                self.bump(); // `..` || `...` || `_`
 
                 if self.token == token::CloseDelim(Delimiter::Brace) {
                     etc_span = Some(etc_sp);
@@ -1101,21 +1067,15 @@ impl<'a> Parser<'a> {
         Ok((fields, etc))
     }
 
-    /// Recover on `...` as if it were `..` to avoid further errors.
+    /// Recover on `...` or `_` as if it were `..` to avoid further errors.
     /// See issue #46718.
-    fn recover_one_fewer_dotdot(&self) {
-        if self.token != token::DotDotDot {
+    fn recover_bad_dot_dot(&self) {
+        if self.token == token::DotDot {
             return;
         }
 
-        self.struct_span_err(self.token.span, "expected field pattern, found `...`")
-            .span_suggestion(
-                self.token.span,
-                "to omit remaining fields, use one fewer `.`",
-                "..",
-                Applicability::MachineApplicable,
-            )
-            .emit();
+        let token_str = pprust::token_to_string(&self.token);
+        self.sess.emit_err(DotDotDotForRemainingFields { span: self.token.span, token_str });
     }
 
     fn parse_pat_field(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, PatField> {
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index fdc1af27f82..b50d2984a4e 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -11,8 +11,9 @@ use rustc_ast::{
 use rustc_errors::{pluralize, Applicability, PResult};
 use rustc_span::source_map::{BytePos, Span};
 use rustc_span::symbol::{kw, sym, Ident};
-
 use std::mem;
+use thin_vec::ThinVec;
+use tracing::debug;
 
 /// Specifies how to parse a path.
 #[derive(Copy, Clone, PartialEq)]
@@ -48,7 +49,7 @@ impl<'a> Parser<'a> {
     /// `<T as U>::a`
     /// `<T as U>::F::a<S>` (without disambiguator)
     /// `<T as U>::F::a::<S>` (with disambiguator)
-    pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (QSelf, Path)> {
+    pub(super) fn parse_qpath(&mut self, style: PathStyle) -> PResult<'a, (P<QSelf>, Path)> {
         let lo = self.prev_token.span;
         let ty = self.parse_ty()?;
 
@@ -63,7 +64,7 @@ impl<'a> Parser<'a> {
             path_span = path_lo.to(self.prev_token.span);
         } else {
             path_span = self.token.span.to(self.token.span);
-            path = ast::Path { segments: Vec::new(), span: path_span, tokens: None };
+            path = ast::Path { segments: ThinVec::new(), span: path_span, tokens: None };
         }
 
         // See doc comment for `unmatched_angle_bracket_count`.
@@ -77,7 +78,7 @@ impl<'a> Parser<'a> {
             self.expect(&token::ModSep)?;
         }
 
-        let qself = QSelf { ty, path_span, position: path.segments.len() };
+        let qself = P(QSelf { ty, path_span, position: path.segments.len() });
         self.parse_path_segments(&mut path.segments, style, None)?;
 
         Ok((
@@ -179,7 +180,7 @@ impl<'a> Parser<'a> {
         }
 
         let lo = self.token.span;
-        let mut segments = Vec::new();
+        let mut segments = ThinVec::new();
         let mod_sep_ctxt = self.token.span.ctxt();
         if self.eat(&token::ModSep) {
             segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
@@ -191,7 +192,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn parse_path_segments(
         &mut self,
-        segments: &mut Vec<PathSegment>,
+        segments: &mut ThinVec<PathSegment>,
         style: PathStyle,
         ty_generics: Option<&Generics>,
     ) -> PResult<'a, ()> {
@@ -276,8 +277,7 @@ impl<'a> Parser<'a> {
                         if let Some(arg) = args
                             .iter()
                             .rev()
-                            .skip_while(|arg| matches!(arg, AngleBracketedArg::Constraint(_)))
-                            .next()
+                            .find(|arg| !matches!(arg, AngleBracketedArg::Constraint(_)))
                         {
                             err.span_suggestion_verbose(
                                 arg.span().shrink_to_hi(),
@@ -332,7 +332,7 @@ impl<'a> Parser<'a> {
         style: PathStyle,
         lo: Span,
         ty_generics: Option<&Generics>,
-    ) -> PResult<'a, Vec<AngleBracketedArg>> {
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
         // We need to detect whether there are extra leading left angle brackets and produce an
         // appropriate error and suggestion. This cannot be implemented by looking ahead at
         // upcoming tokens for a matching `>` character - if there are unmatched `<` tokens
@@ -404,7 +404,7 @@ impl<'a> Parser<'a> {
 
         let is_first_invocation = style == PathStyle::Expr;
         // Take a snapshot before attempting to parse - we can restore this later.
-        let snapshot = if is_first_invocation { Some(self.clone()) } else { None };
+        let snapshot = is_first_invocation.then(|| self.clone());
 
         debug!("parse_generic_args_with_leading_angle_bracket_recovery: (snapshotting)");
         match self.parse_angle_args(ty_generics) {
@@ -472,8 +472,8 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_angle_args(
         &mut self,
         ty_generics: Option<&Generics>,
-    ) -> PResult<'a, Vec<AngleBracketedArg>> {
-        let mut args = Vec::new();
+    ) -> PResult<'a, ThinVec<AngleBracketedArg>> {
+        let mut args = ThinVec::new();
         while let Some(arg) = self.parse_angle_arg(ty_generics)? {
             args.push(arg);
             if !self.eat(&token::Comma) {
@@ -631,7 +631,9 @@ impl<'a> Parser<'a> {
     /// - A single-segment path.
     pub(super) fn expr_is_valid_const_arg(&self, expr: &P<rustc_ast::Expr>) -> bool {
         match &expr.kind {
-            ast::ExprKind::Block(_, _) | ast::ExprKind::Lit(_) => true,
+            ast::ExprKind::Block(_, _)
+            | ast::ExprKind::Lit(_)
+            | ast::ExprKind::IncludedBytes(..) => true,
             ast::ExprKind::Unary(ast::UnOp::Neg, expr) => {
                 matches!(expr.kind, ast::ExprKind::Lit(_))
             }
@@ -651,7 +653,7 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_const_arg(&mut self) -> PResult<'a, AnonConst> {
         // Parse const argument.
         let value = if let token::OpenDelim(Delimiter::Brace) = self.token.kind {
-            self.parse_block_expr(None, self.token.span, BlockCheckMode::Default)?
+            self.parse_expr_block(None, self.token.span, BlockCheckMode::Default)?
         } else {
             self.handle_unambiguous_unbraced_const_arg()?
         };
@@ -673,22 +675,42 @@ impl<'a> Parser<'a> {
             GenericArg::Const(self.parse_const_arg()?)
         } else if self.check_type() {
             // Parse type argument.
-            let is_const_fn =
-                self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Parenthesis));
-            let mut snapshot = self.create_snapshot_for_diagnostic();
+
+            // Proactively create a parser snapshot enabling us to rewind and try to reparse the
+            // input as a const expression in case we fail to parse a type. If we successfully
+            // do so, we will report an error that it needs to be wrapped in braces.
+            let mut snapshot = None;
+            if self.may_recover() && self.token.can_begin_expr() {
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
+
             match self.parse_ty() {
-                Ok(ty) => GenericArg::Type(ty),
+                Ok(ty) => {
+                    // Since the type parser recovers from some malformed slice and array types and
+                    // successfully returns a type, we need to look for `TyKind::Err`s in the
+                    // type to determine if error recovery has occurred and if the input is not a
+                    // syntactically valid type after all.
+                    if let ast::TyKind::Slice(inner_ty) | ast::TyKind::Array(inner_ty, _) = &ty.kind
+                        && let ast::TyKind::Err = inner_ty.kind
+                        && let Some(snapshot) = snapshot
+                        && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(self.dummy_const_arg_needs_braces(
+                            self.struct_span_err(expr.span, "invalid const generic expression"),
+                            expr.span,
+                        )));
+                    }
+
+                    GenericArg::Type(ty)
+                }
                 Err(err) => {
-                    if is_const_fn {
-                        match (*snapshot).parse_expr_res(Restrictions::CONST_EXPR, None) {
-                            Ok(expr) => {
-                                self.restore_snapshot(snapshot);
-                                return Ok(Some(self.dummy_const_arg_needs_braces(err, expr.span)));
-                            }
-                            Err(err) => {
-                                err.cancel();
-                            }
-                        }
+                    if let Some(snapshot) = snapshot
+                        && let Some(expr) = self.recover_unbraced_const_arg_that_can_begin_ty(snapshot)
+                    {
+                        return Ok(Some(self.dummy_const_arg_needs_braces(
+                            err,
+                            expr.span,
+                        )));
                     }
                     // Try to recover from possible `const` arg without braces.
                     return self.recover_const_arg(start, err).map(Some);
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 12753c6785c..fbe5b88c49e 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -1,32 +1,27 @@
 use super::attr::InnerAttrForbiddenReason;
 use super::diagnostics::AttemptLocalParseRecovery;
 use super::expr::LhsExpr;
-use super::pat::RecoverComma;
+use super::pat::{PatternLocation, RecoverComma};
 use super::path::PathStyle;
 use super::TrailingToken;
 use super::{
     AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode,
 };
-use crate::errors::{
-    AssignmentElseNotAllowed, CompoundAssignmentExpressionInLet, ConstLetMutuallyExclusive,
-    DocCommentDoesNotDocumentAnything, ExpectedStatementAfterOuterAttr, InvalidCurlyInLetElse,
-    InvalidExpressionInLetElse, InvalidVariableDeclaration, InvalidVariableDeclarationSub,
-    WrapExpressionInParentheses,
-};
+use crate::errors;
 use crate::maybe_whole;
 
 use rustc_ast as ast;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, TokenKind};
 use rustc_ast::util::classify;
-use rustc_ast::{AttrStyle, AttrVec, Attribute, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
+use rustc_ast::{AttrStyle, AttrVec, LocalKind, MacCall, MacCallStmt, MacStmtStyle};
 use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, HasAttrs, Local, Stmt};
 use rustc_ast::{StmtKind, DUMMY_NODE_ID};
 use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult};
 use rustc_span::source_map::{BytePos, Span};
 use rustc_span::symbol::{kw, sym};
-
 use std::mem;
+use thin_vec::{thin_vec, ThinVec};
 
 impl<'a> Parser<'a> {
     /// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -64,32 +59,45 @@ impl<'a> Parser<'a> {
         if self.token.is_keyword(kw::Mut) && self.is_keyword_ahead(1, &[kw::Let]) {
             self.bump();
             let mut_let_span = lo.to(self.token.span);
-            self.sess.emit_err(InvalidVariableDeclaration {
+            self.sess.emit_err(errors::InvalidVariableDeclaration {
                 span: mut_let_span,
-                sub: InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
+                sub: errors::InvalidVariableDeclarationSub::SwitchMutLetOrder(mut_let_span),
             });
         }
 
         Ok(Some(if self.token.is_keyword(kw::Let) {
             self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
-        } else if self.is_kw_followed_by_ident(kw::Mut) {
-            self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::MissingLet)?
-        } else if self.is_kw_followed_by_ident(kw::Auto) {
+        } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::MissingLet,
+            )?
+        } else if self.is_kw_followed_by_ident(kw::Auto) && self.may_recover() {
             self.bump(); // `auto`
-            self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotAuto)?
-        } else if self.is_kw_followed_by_ident(sym::var) {
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotAuto,
+            )?
+        } else if self.is_kw_followed_by_ident(sym::var) && self.may_recover() {
             self.bump(); // `var`
-            self.recover_stmt_local(lo, attrs, InvalidVariableDeclarationSub::UseLetNotVar)?
+            self.recover_stmt_local_after_let(
+                lo,
+                attrs,
+                errors::InvalidVariableDeclarationSub::UseLetNotVar,
+            )?
         } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
             // We have avoided contextual keywords like `union`, items with `crate` visibility,
             // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
             // that starts like a path (1 token), but it fact not a path.
             // Also, we avoid stealing syntax from `parse_item_`.
-            if force_collect == ForceCollect::Yes {
-                self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))
-            } else {
-                self.parse_stmt_path_start(lo, attrs)
-            }?
+            match force_collect {
+                ForceCollect::Yes => {
+                    self.collect_tokens_no_attrs(|this| this.parse_stmt_path_start(lo, attrs))?
+                }
+                ForceCollect::No => self.parse_stmt_path_start(lo, attrs)?,
+            }
         } else if let Some(item) = self.parse_item_common(
             attrs.clone(),
             false,
@@ -101,26 +109,25 @@ impl<'a> Parser<'a> {
             self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
         } else if self.eat(&token::Semi) {
             // Do not attempt to parse an expression if we're done here.
-            self.error_outer_attrs(&attrs.take_for_recovery());
+            self.error_outer_attrs(attrs);
             self.mk_stmt(lo, StmtKind::Empty)
         } else if self.token != token::CloseDelim(Delimiter::Brace) {
             // Remainder are line-expr stmts.
-            let e = if force_collect == ForceCollect::Yes {
-                self.collect_tokens_no_attrs(|this| {
+            let e = match force_collect {
+                ForceCollect::Yes => self.collect_tokens_no_attrs(|this| {
                     this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
-                })
-            } else {
-                self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))
-            }?;
+                })?,
+                ForceCollect::No => self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs))?,
+            };
             if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
                 let bl = self.parse_block()?;
                 // Destructuring assignment ... else.
                 // This is not allowed, but point it out in a nice way.
-                self.sess.emit_err(AssignmentElseNotAllowed { span: e.span.to(bl.span) });
+                self.sess.emit_err(errors::AssignmentElseNotAllowed { span: e.span.to(bl.span) });
             }
             self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
         } else {
-            self.error_outer_attrs(&attrs.take_for_recovery());
+            self.error_outer_attrs(attrs);
             return Ok(None);
         }))
     }
@@ -139,14 +146,14 @@ impl<'a> Parser<'a> {
             }
 
             let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
-                this.parse_struct_expr(None, path, true)?
+                this.parse_expr_struct(None, path, true)?
             } else {
                 let hi = this.prev_token.span;
                 this.mk_expr(lo.to(hi), ExprKind::Path(None, path))
             };
 
             let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
-                this.parse_dot_or_call_expr_with(expr, lo, attrs)
+                this.parse_expr_dot_or_call_with(expr, lo, attrs)
             })?;
             // `DUMMY_SP` will get overwritten later in this function
             Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
@@ -156,7 +163,10 @@ impl<'a> Parser<'a> {
             // Perform this outside of the `collect_tokens_trailing_token` closure,
             // since our outer attributes do not apply to this part of the expression
             let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
-                this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
+                this.parse_expr_assoc_with(
+                    0,
+                    LhsExpr::AlreadyParsed { expr, starts_statement: true },
+                )
             })?;
             Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
         } else {
@@ -167,14 +177,13 @@ impl<'a> Parser<'a> {
     /// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
     /// At this point, the `!` token after the path has already been eaten.
     fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
-        let args = self.parse_mac_args()?;
-        let delim = args.delim();
+        let args = self.parse_delim_args()?;
+        let delim = args.delim.to_token();
         let hi = self.prev_token.span;
 
         let style = match delim {
-            Some(Delimiter::Brace) => MacStmtStyle::Braces,
-            Some(_) => MacStmtStyle::NoBraces,
-            None => unreachable!(),
+            Delimiter::Brace => MacStmtStyle::Braces,
+            _ => MacStmtStyle::NoBraces,
         };
 
         let mac = P(MacCall { path, args, prior_type_ascription: self.last_type_ascription });
@@ -190,8 +199,11 @@ impl<'a> Parser<'a> {
             // Since none of the above applied, this is an expression statement macro.
             let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
             let e = self.maybe_recover_from_bad_qpath(e)?;
-            let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
-            let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
+            let e = self.parse_expr_dot_or_call_with(e, lo, attrs)?;
+            let e = self.parse_expr_assoc_with(
+                0,
+                LhsExpr::AlreadyParsed { expr: e, starts_statement: false },
+            )?;
             StmtKind::Expr(e)
         };
         Ok(self.mk_stmt(lo.to(hi), kind))
@@ -199,27 +211,37 @@ impl<'a> Parser<'a> {
 
     /// Error on outer attributes in this context.
     /// Also error if the previous token was a doc comment.
-    fn error_outer_attrs(&self, attrs: &[Attribute]) {
-        if let [.., last] = attrs {
+    fn error_outer_attrs(&self, attrs: AttrWrapper) {
+        if !attrs.is_empty()
+        && let attrs = attrs.take_for_recovery(self.sess)
+        && let attrs @ [.., last] = &*attrs {
             if last.is_doc_comment() {
-                self.sess.emit_err(DocCommentDoesNotDocumentAnything {
+                self.sess.emit_err(errors::DocCommentDoesNotDocumentAnything {
                     span: last.span,
                     missing_comma: None,
                 });
             } else if attrs.iter().any(|a| a.style == AttrStyle::Outer) {
-                self.sess.emit_err(ExpectedStatementAfterOuterAttr { span: last.span });
+                self.sess.emit_err(errors::ExpectedStatementAfterOuterAttr { span: last.span });
             }
         }
     }
 
-    fn recover_stmt_local(
+    fn recover_stmt_local_after_let(
         &mut self,
         lo: Span,
         attrs: AttrWrapper,
-        subdiagnostic: fn(Span) -> InvalidVariableDeclarationSub,
+        subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub,
     ) -> PResult<'a, Stmt> {
-        let stmt = self.recover_local_after_let(lo, attrs)?;
-        self.sess.emit_err(InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
+        let stmt =
+            self.collect_tokens_trailing_token(attrs, ForceCollect::Yes, |this, attrs| {
+                let local = this.parse_local(attrs)?;
+                // FIXME - maybe capture semicolon in recovery?
+                Ok((
+                    this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
+                    TrailingToken::None,
+                ))
+            })?;
+        self.sess.emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) });
         Ok(stmt)
     }
 
@@ -242,27 +264,17 @@ impl<'a> Parser<'a> {
         })
     }
 
-    fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
-        self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
-            let local = this.parse_local(attrs)?;
-            // FIXME - maybe capture semicolon in recovery?
-            Ok((
-                this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
-                TrailingToken::None,
-            ))
-        })
-    }
-
     /// Parses a local variable declaration.
     fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
         let lo = self.prev_token.span;
 
         if self.token.is_keyword(kw::Const) && self.look_ahead(1, |t| t.is_ident()) {
-            self.sess.emit_err(ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
+            self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: lo.to(self.token.span) });
             self.bump();
         }
 
-        let (pat, colon) = self.parse_pat_before_ty(None, RecoverComma::Yes, "`let` bindings")?;
+        let (pat, colon) =
+            self.parse_pat_before_ty(None, RecoverComma::Yes, PatternLocation::LetBinding)?;
 
         let (err, ty) = if colon {
             // Save the state of the parser before parsing type normally, in case there is a `:`
@@ -356,10 +368,10 @@ impl<'a> Parser<'a> {
     fn check_let_else_init_bool_expr(&self, init: &ast::Expr) {
         if let ast::ExprKind::Binary(op, ..) = init.kind {
             if op.node.lazy() {
-                self.sess.emit_err(InvalidExpressionInLetElse {
+                self.sess.emit_err(errors::InvalidExpressionInLetElse {
                     span: init.span,
                     operator: op.node.to_string(),
-                    sugg: WrapExpressionInParentheses {
+                    sugg: errors::WrapExpressionInParentheses {
                         left: init.span.shrink_to_lo(),
                         right: init.span.shrink_to_hi(),
                     },
@@ -370,9 +382,9 @@ impl<'a> Parser<'a> {
 
     fn check_let_else_init_trailing_brace(&self, init: &ast::Expr) {
         if let Some(trailing) = classify::expr_trailing_brace(init) {
-            self.sess.emit_err(InvalidCurlyInLetElse {
+            self.sess.emit_err(errors::InvalidCurlyInLetElse {
                 span: trailing.span.with_lo(trailing.span.hi() - BytePos(1)),
-                sugg: WrapExpressionInParentheses {
+                sugg: errors::WrapExpressionInParentheses {
                     left: trailing.span.shrink_to_lo(),
                     right: trailing.span.shrink_to_hi(),
                 },
@@ -385,7 +397,8 @@ impl<'a> Parser<'a> {
         let eq_consumed = match self.token.kind {
             token::BinOpEq(..) => {
                 // Recover `let x <op>= 1` as `let x = 1`
-                self.sess.emit_err(CompoundAssignmentExpressionInLet { span: self.token.span });
+                self.sess
+                    .emit_err(errors::CompoundAssignmentExpressionInLet { span: self.token.span });
                 self.bump();
                 true
             }
@@ -480,7 +493,7 @@ impl<'a> Parser<'a> {
 
     /// Parses a block. Inner attributes are allowed.
     pub(super) fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (AttrVec, P<Block>)> {
-        self.parse_block_common(self.token.span, BlockCheckMode::Default)
+        self.parse_block_common(self.token.span, BlockCheckMode::Default, true)
     }
 
     /// Parses a block. Inner attributes are allowed.
@@ -488,16 +501,23 @@ impl<'a> Parser<'a> {
         &mut self,
         lo: Span,
         blk_mode: BlockCheckMode,
+        can_be_struct_literal: bool,
     ) -> PResult<'a, (AttrVec, P<Block>)> {
         maybe_whole!(self, NtBlock, |x| (AttrVec::new(), x));
 
+        let maybe_ident = self.prev_token.clone();
         self.maybe_recover_unexpected_block_label();
         if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
             return self.error_block_no_opening_brace();
         }
 
         let attrs = self.parse_inner_attributes()?;
-        let tail = match self.maybe_suggest_struct_literal(lo, blk_mode) {
+        let tail = match self.maybe_suggest_struct_literal(
+            lo,
+            blk_mode,
+            maybe_ident,
+            can_be_struct_literal,
+        ) {
             Some(tail) => tail?,
             None => self.parse_block_tail(lo, blk_mode, AttemptLocalParseRecovery::Yes)?,
         };
@@ -512,14 +532,24 @@ impl<'a> Parser<'a> {
         s: BlockCheckMode,
         recover: AttemptLocalParseRecovery,
     ) -> PResult<'a, P<Block>> {
-        let mut stmts = vec![];
+        let mut stmts = ThinVec::new();
+        let mut snapshot = None;
         while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
             if self.token == token::Eof {
                 break;
             }
+            if self.is_diff_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) {
+                // Account for `<<<<<<<` diff markers. We can't proactively error here because
+                // that can be a valid path start, so we snapshot and reparse only we've
+                // encountered another parse error.
+                snapshot = Some(self.create_snapshot_for_diagnostic());
+            }
             let stmt = match self.parse_full_stmt(recover) {
                 Err(mut err) if recover.yes() => {
                     self.maybe_annotate_with_ascription(&mut err, false);
+                    if let Some(ref mut snapshot) = snapshot {
+                        snapshot.recover_diff_marker();
+                    }
                     err.emit();
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     Some(self.mk_stmt_err(self.token.span))
@@ -550,9 +580,9 @@ impl<'a> Parser<'a> {
         };
 
         let mut eat_semi = true;
-        match stmt.kind {
+        match &mut stmt.kind {
             // Expression without semicolon.
-            StmtKind::Expr(ref mut expr)
+            StmtKind::Expr(expr)
                 if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => {
                 // Just check for errors and recover; do not eat semicolon yet.
                 // `expect_one_of` returns PResult<'a, bool /* recovered */>
@@ -598,7 +628,7 @@ impl<'a> Parser<'a> {
                 }
             }
             StmtKind::Expr(_) | StmtKind::MacCall(_) => {}
-            StmtKind::Local(ref mut local) if let Err(e) = self.expect_semi() => {
+            StmtKind::Local(local) if let Err(e) = self.expect_semi() => {
                 // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover.
                 match &mut local.kind {
                     LocalKind::Init(expr) | LocalKind::InitElse(expr, _) => {
@@ -620,7 +650,12 @@ impl<'a> Parser<'a> {
         Ok(Some(stmt))
     }
 
-    pub(super) fn mk_block(&self, stmts: Vec<Stmt>, rules: BlockCheckMode, span: Span) -> P<Block> {
+    pub(super) fn mk_block(
+        &self,
+        stmts: ThinVec<Stmt>,
+        rules: BlockCheckMode,
+        span: Span,
+    ) -> P<Block> {
         P(Block {
             stmts,
             id: DUMMY_NODE_ID,
@@ -640,6 +675,6 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn mk_block_err(&self, span: Span) -> P<Block> {
-        self.mk_block(vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
+        self.mk_block(thin_vec![self.mk_stmt_err(span)], BlockCheckMode::Default, span)
     }
 }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 4d78c5bd0e2..6fe4da71f6b 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -1,7 +1,15 @@
 use super::{Parser, PathStyle, TokenType};
 
+use crate::errors::{
+    DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType,
+    FnPointerCannotBeAsync, FnPointerCannotBeConst, FnPtrWithGenerics, FnPtrWithGenericsSugg,
+    InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
+    NegativeBoundsNotSupported, NegativeBoundsNotSupportedSugg, NestedCVariadicType,
+    ReturnTypesUseThinArrow,
+};
 use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 
+use ast::DUMMY_NODE_ID;
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
 use rustc_ast::util::case::Case;
@@ -9,9 +17,11 @@ use rustc_ast::{
     self as ast, BareFnTy, FnRetTy, GenericBound, GenericBounds, GenericParam, Generics, Lifetime,
     MacCall, MutTy, Mutability, PolyTraitRef, TraitBoundModifier, TraitObjectSyntax, Ty, TyKind,
 };
-use rustc_errors::{pluralize, struct_span_err, Applicability, PResult};
+use rustc_errors::{Applicability, PResult};
 use rustc_span::source_map::Span;
-use rustc_span::symbol::{kw, sym};
+use rustc_span::symbol::{kw, sym, Ident};
+use rustc_span::Symbol;
+use thin_vec::{thin_vec, ThinVec};
 
 /// Any `?` or `~const` modifiers that appear at the start of a bound.
 struct BoundModifiers {
@@ -213,14 +223,7 @@ impl<'a> Parser<'a> {
             // Don't `eat` to prevent `=>` from being added as an expected token which isn't
             // actually expected and could only confuse users
             self.bump();
-            self.struct_span_err(self.prev_token.span, "return types are denoted using `->`")
-                .span_suggestion_short(
-                    self.prev_token.span,
-                    "use `->` instead",
-                    "->",
-                    Applicability::MachineApplicable,
-                )
-                .emit();
+            self.sess.emit_err(ReturnTypesUseThinArrow { span: self.prev_token.span });
             let ty = self.parse_ty_common(
                 allow_plus,
                 AllowCVariadic::No,
@@ -270,14 +273,19 @@ impl<'a> Parser<'a> {
             TyKind::Infer
         } else if self.check_fn_front_matter(false, Case::Sensitive) {
             // Function pointer type
-            self.parse_ty_bare_fn(lo, Vec::new(), recover_return_sign)?
+            self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
         } else if self.check_keyword(kw::For) {
             // Function pointer type or bound list (trait object type) starting with a poly-trait.
             //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
             //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
             let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
             if self.check_fn_front_matter(false, Case::Sensitive) {
-                self.parse_ty_bare_fn(lo, lifetime_defs, recover_return_sign)?
+                self.parse_ty_bare_fn(
+                    lo,
+                    lifetime_defs,
+                    Some(self.prev_token.span.shrink_to_lo()),
+                    recover_return_sign,
+                )?
             } else {
                 let path = self.parse_path(PathStyle::Type)?;
                 let parse_plus = allow_plus == AllowPlus::Yes && self.check_plus();
@@ -296,13 +304,14 @@ impl<'a> Parser<'a> {
         } else if self.can_begin_bound() {
             self.parse_bare_trait_object(lo, allow_plus)?
         } else if self.eat(&token::DotDotDot) {
-            if allow_c_variadic == AllowCVariadic::Yes {
-                TyKind::CVarArgs
-            } else {
-                // FIXME(Centril): Should we just allow `...` syntactically
-                // anywhere in a type and use semantic restrictions instead?
-                self.error_illegal_c_varadic_ty(lo);
-                TyKind::Err
+            match allow_c_variadic {
+                AllowCVariadic::Yes => TyKind::CVarArgs,
+                AllowCVariadic::No => {
+                    // FIXME(Centril): Should we just allow `...` syntactically
+                    // anywhere in a type and use semantic restrictions instead?
+                    self.sess.emit_err(NestedCVariadicType { span: lo.to(self.prev_token.span) });
+                    TyKind::Err
+                }
             }
         } else {
             let msg = format!("expected type, found {}", super::token_descr(&self.token));
@@ -316,10 +325,9 @@ impl<'a> Parser<'a> {
         let mut ty = self.mk_ty(span, kind);
 
         // Try to recover from use of `+` with incorrect priority.
-        if matches!(allow_plus, AllowPlus::Yes) {
-            self.maybe_recover_from_bad_type_plus(&ty)?;
-        } else {
-            self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty);
+        match allow_plus {
+            AllowPlus::Yes => self.maybe_recover_from_bad_type_plus(&ty)?,
+            AllowPlus::No => self.maybe_report_ambiguous_plus(impl_dyn_multi, &ty),
         }
         if let RecoverQuestionMark::Yes = recover_question_mark {
             ty = self.maybe_recover_from_question_mark(ty);
@@ -344,7 +352,7 @@ impl<'a> Parser<'a> {
             match ty.kind {
                 // `(TY_BOUND_NOPAREN) + BOUND + ...`.
                 TyKind::Path(None, path) if maybe_bounds => {
-                    self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+                    self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
                 }
                 TyKind::TraitObject(bounds, TraitObjectSyntax::None)
                     if maybe_bounds && bounds.len() == 1 && !trailing_plus =>
@@ -363,15 +371,14 @@ impl<'a> Parser<'a> {
         let lt_no_plus = self.check_lifetime() && !self.look_ahead(1, |t| t.is_like_plus());
         let bounds = self.parse_generic_bounds_common(allow_plus, None)?;
         if lt_no_plus {
-            self.struct_span_err(lo, "lifetime in trait object type must be followed by `+`")
-                .emit();
+            self.sess.emit_err(NeedPlusAfterTraitObjectLifetime { span: lo });
         }
         Ok(TyKind::TraitObject(bounds, TraitObjectSyntax::None))
     }
 
     fn parse_remaining_bounds_path(
         &mut self,
-        generic_params: Vec<GenericParam>,
+        generic_params: ThinVec<GenericParam>,
         path: ast::Path,
         lo: Span,
         parse_plus: bool,
@@ -398,14 +405,10 @@ impl<'a> Parser<'a> {
     fn parse_ty_ptr(&mut self) -> PResult<'a, TyKind> {
         let mutbl = self.parse_const_or_mut().unwrap_or_else(|| {
             let span = self.prev_token.span;
-            self.struct_span_err(span, "expected `mut` or `const` keyword in raw pointer type")
-                .span_suggestions(
-                    span.shrink_to_hi(),
-                    "add `mut` or `const` here",
-                    ["mut ".to_string(), "const ".to_string()],
-                    Applicability::HasPlaceholders,
-                )
-                .emit();
+            self.sess.emit_err(ExpectedMutOrConstInRawPointerType {
+                span,
+                after_asterisk: span.shrink_to_hi(),
+            });
             Mutability::Not
         });
         let ty = self.parse_ty_no_plus()?;
@@ -430,7 +433,7 @@ impl<'a> Parser<'a> {
         };
 
         let ty = if self.eat(&token::Semi) {
-            let mut length = self.parse_anon_const_expr()?;
+            let mut length = self.parse_expr_anon_const()?;
             if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
                 // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
                 self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
@@ -447,8 +450,7 @@ impl<'a> Parser<'a> {
 
     fn parse_borrowed_pointee(&mut self) -> PResult<'a, TyKind> {
         let and_span = self.prev_token.span;
-        let mut opt_lifetime =
-            if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
+        let mut opt_lifetime = self.check_lifetime().then(|| self.expect_lifetime());
         let mut mutbl = self.parse_mutability();
         if self.token.is_lifetime() && mutbl == Mutability::Mut && opt_lifetime.is_none() {
             // A lifetime is invalid here: it would be part of a bare trait bound, which requires
@@ -460,16 +462,13 @@ impl<'a> Parser<'a> {
                 let lifetime_span = self.token.span;
                 let span = and_span.to(lifetime_span);
 
-                let mut err = self.struct_span_err(span, "lifetime must precede `mut`");
-                if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
-                    err.span_suggestion(
-                        span,
-                        "place the lifetime before `mut`",
-                        format!("&{} mut", lifetime_src),
-                        Applicability::MaybeIncorrect,
-                    );
-                }
-                err.emit();
+                let (suggest_lifetime, snippet) =
+                    if let Ok(lifetime_src) = self.span_to_snippet(lifetime_span) {
+                        (Some(span), lifetime_src)
+                    } else {
+                        (None, String::new())
+                    };
+                self.sess.emit_err(LifetimeAfterMut { span, suggest_lifetime, snippet });
 
                 opt_lifetime = Some(self.expect_lifetime());
             }
@@ -479,14 +478,7 @@ impl<'a> Parser<'a> {
         {
             // We have `&dyn mut ...`, which is invalid and should be `&mut dyn ...`.
             let span = and_span.to(self.look_ahead(1, |t| t.span));
-            let mut err = self.struct_span_err(span, "`mut` must precede `dyn`");
-            err.span_suggestion(
-                span,
-                "place `mut` before `dyn`",
-                "&mut dyn",
-                Applicability::MachineApplicable,
-            );
-            err.emit();
+            self.sess.emit_err(DynAfterMut { span });
 
             // Recovery
             mutbl = Mutability::Mut;
@@ -495,14 +487,14 @@ impl<'a> Parser<'a> {
             self.bump_with((dyn_tok, dyn_tok_sp));
         }
         let ty = self.parse_ty_no_plus()?;
-        Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }))
+        Ok(TyKind::Ref(opt_lifetime, MutTy { ty, mutbl }))
     }
 
     // Parses the `typeof(EXPR)`.
     // To avoid ambiguity, the type is surrounded by parentheses.
     fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
         self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
-        let expr = self.parse_anon_const_expr()?;
+        let expr = self.parse_expr_anon_const()?;
         self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
         Ok(TyKind::Typeof(expr))
     }
@@ -519,7 +511,8 @@ impl<'a> Parser<'a> {
     fn parse_ty_bare_fn(
         &mut self,
         lo: Span,
-        params: Vec<GenericParam>,
+        mut params: ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
         recover_return_sign: RecoverReturnSign,
     ) -> PResult<'a, TyKind> {
         let inherited_vis = rustc_ast::Visibility {
@@ -529,38 +522,89 @@ impl<'a> Parser<'a> {
         };
         let span_start = self.token.span;
         let ast::FnHeader { ext, unsafety, constness, asyncness } =
-            self.parse_fn_front_matter(&inherited_vis)?;
+            self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?;
+        if self.may_recover() && self.token.kind == TokenKind::Lt {
+            self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?;
+        }
         let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?;
         let whole_span = lo.to(self.prev_token.span);
         if let ast::Const::Yes(span) = constness {
             // If we ever start to allow `const fn()`, then update
             // feature gating for `#![feature(const_extern_fn)]` to
             // cover it.
-            self.error_fn_ptr_bad_qualifier(whole_span, span, "const");
+            self.sess.emit_err(FnPointerCannotBeConst { span: whole_span, qualifier: span });
         }
         if let ast::Async::Yes { span, .. } = asyncness {
-            self.error_fn_ptr_bad_qualifier(whole_span, span, "async");
+            self.sess.emit_err(FnPointerCannotBeAsync { span: whole_span, qualifier: span });
         }
         let decl_span = span_start.to(self.token.span);
         Ok(TyKind::BareFn(P(BareFnTy { ext, unsafety, generic_params: params, decl, decl_span })))
     }
 
-    /// Emit an error for the given bad function pointer qualifier.
-    fn error_fn_ptr_bad_qualifier(&self, span: Span, qual_span: Span, qual: &str) {
-        self.struct_span_err(span, &format!("an `fn` pointer type cannot be `{}`", qual))
-            .span_label(qual_span, format!("`{}` because of this", qual))
-            .span_suggestion_short(
-                qual_span,
-                &format!("remove the `{}` qualifier", qual),
-                "",
-                Applicability::MaybeIncorrect,
-            )
-            .emit();
+    /// Recover from function pointer types with a generic parameter list (e.g. `fn<'a>(&'a str)`).
+    fn recover_fn_ptr_with_generics(
+        &mut self,
+        lo: Span,
+        params: &mut ThinVec<GenericParam>,
+        param_insertion_point: Option<Span>,
+    ) -> PResult<'a, ()> {
+        let generics = self.parse_generics()?;
+        let arity = generics.params.len();
+
+        let mut lifetimes: ThinVec<_> = generics
+            .params
+            .into_iter()
+            .filter(|param| matches!(param.kind, ast::GenericParamKind::Lifetime))
+            .collect();
+
+        let sugg = if !lifetimes.is_empty() {
+            let snippet =
+                lifetimes.iter().map(|param| param.ident.as_str()).intersperse(", ").collect();
+
+            let (left, snippet) = if let Some(span) = param_insertion_point {
+                (span, if params.is_empty() { snippet } else { format!(", {snippet}") })
+            } else {
+                (lo.shrink_to_lo(), format!("for<{snippet}> "))
+            };
+
+            Some(FnPtrWithGenericsSugg {
+                left,
+                snippet,
+                right: generics.span,
+                arity,
+                for_param_list_exists: param_insertion_point.is_some(),
+            })
+        } else {
+            None
+        };
+
+        self.sess.emit_err(FnPtrWithGenerics { span: generics.span, sugg });
+        params.append(&mut lifetimes);
+        Ok(())
     }
 
     /// Parses an `impl B0 + ... + Bn` type.
     fn parse_impl_ty(&mut self, impl_dyn_multi: &mut bool) -> PResult<'a, TyKind> {
         // Always parse bounds greedily for better error recovery.
+        if self.token.is_lifetime() {
+            self.look_ahead(1, |t| {
+                if let token::Ident(symname, _) = t.kind {
+                    // parse pattern with "'a Sized" we're supposed to give suggestion like
+                    // "'a + Sized"
+                    self.struct_span_err(
+                        self.token.span,
+                        &format!("expected `+` between lifetime and {}", symname),
+                    )
+                    .span_suggestion_verbose(
+                        self.token.span.shrink_to_hi(),
+                        "add `+`",
+                        " +",
+                        Applicability::MaybeIncorrect,
+                    )
+                    .emit();
+                }
+            })
+        }
         let bounds = self.parse_generic_bounds(None)?;
         *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus);
         Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds))
@@ -569,7 +613,7 @@ impl<'a> Parser<'a> {
     /// Is a `dyn B0 + ... + Bn` type allowed here?
     fn is_explicit_dyn_type(&mut self) -> bool {
         self.check_keyword(kw::Dyn)
-            && (!self.token.uninterpolated_span().rust_2015()
+            && (self.token.uninterpolated_span().rust_2018()
                 || self.look_ahead(1, |t| {
                     (t.can_begin_bound() || t.kind == TokenKind::BinOp(token::Star))
                         && !can_continue_type_after_non_fn_ident(t)
@@ -613,28 +657,18 @@ impl<'a> Parser<'a> {
             // Macro invocation in type position
             Ok(TyKind::MacCall(P(MacCall {
                 path,
-                args: self.parse_mac_args()?,
+                args: self.parse_delim_args()?,
                 prior_type_ascription: self.last_type_ascription,
             })))
         } else if allow_plus == AllowPlus::Yes && self.check_plus() {
             // `Trait1 + Trait2 + 'a`
-            self.parse_remaining_bounds_path(Vec::new(), path, lo, true)
+            self.parse_remaining_bounds_path(ThinVec::new(), path, lo, true)
         } else {
             // Just a type path.
             Ok(TyKind::Path(None, path))
         }
     }
 
-    fn error_illegal_c_varadic_ty(&self, lo: Span) {
-        struct_span_err!(
-            self.sess.span_diagnostic,
-            lo.to(self.prev_token.span),
-            E0743,
-            "C-variadic type `...` may not be nested inside another type",
-        )
-        .emit();
-    }
-
     pub(super) fn parse_generic_bounds(
         &mut self,
         colon_span: Option<Span>,
@@ -653,24 +687,19 @@ impl<'a> Parser<'a> {
         let mut bounds = Vec::new();
         let mut negative_bounds = Vec::new();
 
+        // In addition to looping while we find generic bounds:
+        // We continue even if we find a keyword. This is necessary for error recovery on,
+        // for example, `impl fn()`. The only keyword that can go after generic bounds is
+        // `where`, so stop if it's it.
+        // We also continue if we find types (not traits), again for error recovery.
         while self.can_begin_bound()
-            // Continue even if we find a keyword.
-            // This is necessary for error recover on, for example, `impl fn()`.
-            //
-            // The only keyword that can go after generic bounds is `where`, so stop if it's it.
-            || (self.token.is_reserved_ident() && !self.token.is_keyword(kw::Where))
+            || (self.may_recover()
+                && (self.token.can_begin_type()
+                    || (self.token.is_reserved_ident() && !self.token.is_keyword(kw::Where))))
         {
             if self.token.is_keyword(kw::Dyn) {
                 // Account for `&dyn Trait + dyn Other`.
-                self.struct_span_err(self.token.span, "invalid `dyn` keyword")
-                    .help("`dyn` is only needed at the start of a trait `+`-separated list")
-                    .span_suggestion(
-                        self.token.span,
-                        "remove this keyword",
-                        "",
-                        Applicability::MachineApplicable,
-                    )
-                    .emit();
+                self.sess.emit_err(InvalidDynKeyword { span: self.token.span });
                 self.bump();
             }
             match self.parse_generic_bound()? {
@@ -707,11 +736,7 @@ impl<'a> Parser<'a> {
         bounds: &[GenericBound],
         negative_bounds: Vec<Span>,
     ) {
-        let negative_bounds_len = negative_bounds.len();
-        let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
-        let mut err = self.struct_span_err(negative_bounds, "negative bounds are not supported");
-        err.span_label(last_span, "negative bounds are not supported");
-        if let Some(bound_list) = colon_span {
+        let sub = if let Some(bound_list) = colon_span {
             let bound_list = bound_list.to(self.prev_token.span);
             let mut new_bound_list = String::new();
             if !bounds.is_empty() {
@@ -722,14 +747,18 @@ impl<'a> Parser<'a> {
                 }
                 new_bound_list = new_bound_list.replacen(" +", ":", 1);
             }
-            err.tool_only_span_suggestion(
+
+            Some(NegativeBoundsNotSupportedSugg {
                 bound_list,
-                &format!("remove the bound{}", pluralize!(negative_bounds_len)),
-                new_bound_list,
-                Applicability::MachineApplicable,
-            );
-        }
-        err.emit();
+                num_bounds: negative_bounds.len(),
+                fixed: new_bound_list,
+            })
+        } else {
+            None
+        };
+
+        let last_span = *negative_bounds.last().expect("no negative bounds, but still error?");
+        self.sess.emit_err(NegativeBoundsNotSupported { negative_bounds, last_span, sub });
     }
 
     /// Parses a bound according to the grammar:
@@ -841,7 +870,7 @@ impl<'a> Parser<'a> {
             None
         };
 
-        let maybe = if self.eat(&token::Question) { Some(self.prev_token.span) } else { None };
+        let maybe = self.eat(&token::Question).then_some(self.prev_token.span);
 
         Ok(BoundModifiers { maybe, maybe_const })
     }
@@ -859,8 +888,50 @@ impl<'a> Parser<'a> {
         has_parens: bool,
         modifiers: BoundModifiers,
     ) -> PResult<'a, GenericBound> {
-        let lifetime_defs = self.parse_late_bound_lifetime_defs()?;
-        let path = self.parse_path(PathStyle::Type)?;
+        let mut lifetime_defs = self.parse_late_bound_lifetime_defs()?;
+        let mut path = if self.token.is_keyword(kw::Fn)
+            && self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis))
+            && let Some(path) = self.recover_path_from_fn()
+        {
+            path
+        } else if !self.token.is_path_start() && self.token.can_begin_type() {
+            let ty = self.parse_ty_no_plus()?;
+            // Instead of finding a path (a trait), we found a type.
+            let mut err = self.struct_span_err(ty.span, "expected a trait, found type");
+
+            // If we can recover, try to extract a path from the type. Note
+            // that we do not use the try operator when parsing the type because
+            // if it fails then we get a parser error which we don't want (we're trying
+            // to recover from errors, not make more).
+            let path = if self.may_recover()
+                && matches!(ty.kind, TyKind::Ptr(..) | TyKind::Ref(..))
+                && let TyKind::Path(_, path) = &ty.peel_refs().kind {
+                // Just get the indirection part of the type.
+                let span = ty.span.until(path.span);
+
+                err.span_suggestion_verbose(
+                    span,
+                    "consider removing the indirection",
+                    "",
+                    Applicability::MaybeIncorrect,
+                );
+
+                path.clone()
+            } else {
+                return Err(err);
+            };
+
+            err.emit();
+
+            path
+        } else {
+            self.parse_path(PathStyle::Type)?
+        };
+
+        if self.may_recover() && self.token == TokenKind::OpenDelim(Delimiter::Parenthesis) {
+            self.recover_fn_trait_with_lifetime_params(&mut path, &mut lifetime_defs)?;
+        }
+
         if has_parens {
             if self.token.is_like_plus() {
                 // Someone has written something like `&dyn (Trait + Other)`. The correct code
@@ -871,7 +942,7 @@ impl<'a> Parser<'a> {
                 self.parse_remaining_bounds(bounds, true)?;
                 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
                 let sp = vec![lo, self.prev_token.span];
-                let sugg: Vec<_> = sp.iter().map(|sp| (*sp, String::new())).collect();
+                let sugg = vec![(lo, String::from(" ")), (self.prev_token.span, String::new())];
                 self.struct_span_err(sp, "incorrect braces around trait bounds")
                     .multipart_suggestion(
                         "remove the parentheses",
@@ -889,8 +960,40 @@ impl<'a> Parser<'a> {
         Ok(GenericBound::Trait(poly_trait, modifier))
     }
 
+    // recovers a `Fn(..)` parenthesized-style path from `fn(..)`
+    fn recover_path_from_fn(&mut self) -> Option<ast::Path> {
+        let fn_token_span = self.token.span;
+        self.bump();
+        let args_lo = self.token.span;
+        let snapshot = self.create_snapshot_for_diagnostic();
+        match self.parse_fn_decl(|_| false, AllowPlus::No, RecoverReturnSign::OnlyFatArrow) {
+            Ok(decl) => {
+                self.sess.emit_err(ExpectedFnPathFoundFnKeyword { fn_token_span });
+                Some(ast::Path {
+                    span: fn_token_span.to(self.prev_token.span),
+                    segments: thin_vec![ast::PathSegment {
+                        ident: Ident::new(Symbol::intern("Fn"), fn_token_span),
+                        id: DUMMY_NODE_ID,
+                        args: Some(P(ast::GenericArgs::Parenthesized(ast::ParenthesizedArgs {
+                            span: args_lo.to(self.prev_token.span),
+                            inputs: decl.inputs.iter().map(|a| a.ty.clone()).collect(),
+                            inputs_span: args_lo.until(decl.output.span()),
+                            output: decl.output.clone(),
+                        }))),
+                    }],
+                    tokens: None,
+                })
+            }
+            Err(diag) => {
+                diag.cancel();
+                self.restore_snapshot(snapshot);
+                None
+            }
+        }
+    }
+
     /// Optionally parses `for<$generic_params>`.
-    pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
+    pub(super) fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, ThinVec<GenericParam>> {
         if self.eat_keyword(kw::For) {
             self.expect_lt()?;
             let params = self.parse_generic_params()?;
@@ -899,10 +1002,96 @@ impl<'a> Parser<'a> {
             // parameters, and the lifetime parameters must not have bounds.
             Ok(params)
         } else {
-            Ok(Vec::new())
+            Ok(ThinVec::new())
         }
     }
 
+    /// Recover from `Fn`-family traits (Fn, FnMut, FnOnce) with lifetime arguments
+    /// (e.g. `FnOnce<'a>(&'a str) -> bool`). Up to generic arguments have already
+    /// been eaten.
+    fn recover_fn_trait_with_lifetime_params(
+        &mut self,
+        fn_path: &mut ast::Path,
+        lifetime_defs: &mut ThinVec<GenericParam>,
+    ) -> PResult<'a, ()> {
+        let fn_path_segment = fn_path.segments.last_mut().unwrap();
+        let generic_args = if let Some(p_args) = &fn_path_segment.args {
+            p_args.clone().into_inner()
+        } else {
+            // Normally it wouldn't come here because the upstream should have parsed
+            // generic parameters (otherwise it's impossible to call this function).
+            return Ok(());
+        };
+        let lifetimes =
+            if let ast::GenericArgs::AngleBracketed(ast::AngleBracketedArgs { span: _, args }) =
+                &generic_args
+            {
+                args.into_iter()
+                    .filter_map(|arg| {
+                        if let ast::AngleBracketedArg::Arg(generic_arg) = arg
+                            && let ast::GenericArg::Lifetime(lifetime) = generic_arg {
+                            Some(lifetime)
+                        } else {
+                            None
+                        }
+                    })
+                    .collect()
+            } else {
+                Vec::new()
+            };
+        // Only try to recover if the trait has lifetime params.
+        if lifetimes.is_empty() {
+            return Ok(());
+        }
+
+        // Parse `(T, U) -> R`.
+        let inputs_lo = self.token.span;
+        let inputs: ThinVec<_> =
+            self.parse_fn_params(|_| false)?.into_iter().map(|input| input.ty).collect();
+        let inputs_span = inputs_lo.to(self.prev_token.span);
+        let output = self.parse_ret_ty(AllowPlus::No, RecoverQPath::No, RecoverReturnSign::No)?;
+        let args = ast::ParenthesizedArgs {
+            span: fn_path_segment.span().to(self.prev_token.span),
+            inputs,
+            inputs_span,
+            output,
+        }
+        .into();
+        *fn_path_segment =
+            ast::PathSegment { ident: fn_path_segment.ident, args, id: ast::DUMMY_NODE_ID };
+
+        // Convert parsed `<'a>` in `Fn<'a>` into `for<'a>`.
+        let mut generic_params = lifetimes
+            .iter()
+            .map(|lt| GenericParam {
+                id: lt.id,
+                ident: lt.ident,
+                attrs: ast::AttrVec::new(),
+                bounds: Vec::new(),
+                is_placeholder: false,
+                kind: ast::GenericParamKind::Lifetime,
+                colon_span: None,
+            })
+            .collect::<ThinVec<GenericParam>>();
+        lifetime_defs.append(&mut generic_params);
+
+        let generic_args_span = generic_args.span();
+        let mut err =
+            self.struct_span_err(generic_args_span, "`Fn` traits cannot take lifetime parameters");
+        let snippet = format!(
+            "for<{}> ",
+            lifetimes.iter().map(|lt| lt.ident.as_str()).intersperse(", ").collect::<String>(),
+        );
+        let before_fn_path = fn_path.span.shrink_to_lo();
+        err.multipart_suggestion(
+            "consider using a higher-ranked trait bound instead",
+            vec![(generic_args_span, "".to_owned()), (before_fn_path, snippet)],
+            Applicability::MaybeIncorrect,
+        )
+        .emit();
+        Ok(())
+    }
+
     pub(super) fn check_lifetime(&mut self) -> bool {
         self.expected_tokens.push(TokenType::Lifetime);
         self.token.is_lifetime()
diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs
index 47477898b24..72402a20090 100644
--- a/compiler/rustc_parse/src/validate_attr.rs
+++ b/compiler/rustc_parse/src/validate_attr.rs
@@ -3,15 +3,16 @@
 use crate::parse_in;
 
 use rustc_ast::tokenstream::DelimSpan;
-use rustc_ast::{self as ast, Attribute, MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind};
+use rustc_ast::MetaItemKind;
+use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MacDelimiter, MetaItem};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Applicability, FatalError, PResult};
 use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
 use rustc_session::lint::builtin::ILL_FORMED_ATTRIBUTE_INPUT;
 use rustc_session::parse::ParseSess;
-use rustc_span::{sym, Symbol};
+use rustc_span::{sym, Span, Symbol};
 
-pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
+pub fn check_attr(sess: &ParseSess, attr: &Attribute) {
     if attr.is_doc_comment() {
         return;
     }
@@ -24,7 +25,7 @@ pub fn check_meta(sess: &ParseSess, attr: &Attribute) {
         Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => {
             check_builtin_attribute(sess, attr, *name, *template)
         }
-        _ if let MacArgs::Eq(..) = attr.get_normal_item().args => {
+        _ if let AttrArgs::Eq(..) = attr.get_normal_item().args => {
             // All key-value attributes are restricted to meta-item syntax.
             parse_meta(sess, attr)
                 .map_err(|mut err| {
@@ -42,17 +43,19 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
         span: attr.span,
         path: item.path.clone(),
         kind: match &item.args {
-            MacArgs::Empty => MetaItemKind::Word,
-            MacArgs::Delimited(dspan, delim, t) => {
+            AttrArgs::Empty => MetaItemKind::Word,
+            AttrArgs::Delimited(DelimArgs { dspan, delim, tokens }) => {
                 check_meta_bad_delim(sess, *dspan, *delim, "wrong meta list delimiters");
-                let nmis = parse_in(sess, t.clone(), "meta list", |p| p.parse_meta_seq_top())?;
+                let nmis = parse_in(sess, tokens.clone(), "meta list", |p| p.parse_meta_seq_top())?;
                 MetaItemKind::List(nmis)
             }
-            MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
-                if let ast::ExprKind::Lit(lit) = &expr.kind {
-                    if !lit.kind.is_unsuffixed() {
+            AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
+                if let ast::ExprKind::Lit(token_lit) = expr.kind
+                    && let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span)
+                {
+                    if token_lit.suffix.is_some() {
                         let mut err = sess.span_diagnostic.struct_span_err(
-                            lit.span,
+                            expr.span,
                             "suffixed literals are not allowed in attributes",
                         );
                         err.help(
@@ -61,7 +64,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
                         );
                         return Err(err);
                     } else {
-                        MetaItemKind::NameValue(lit.clone())
+                        MetaItemKind::NameValue(lit)
                     }
                 } else {
                     // The non-error case can happen with e.g. `#[foo = 1+1]`. The error case can
@@ -76,7 +79,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
                     return Err(err);
                 }
             }
-            MacArgs::Eq(_, MacArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
+            AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => MetaItemKind::NameValue(lit.clone()),
         },
     })
 }
@@ -112,25 +115,34 @@ pub fn check_builtin_attribute(
     name: Symbol,
     template: AttributeTemplate,
 ) {
-    // Some special attributes like `cfg` must be checked
-    // before the generic check, so we skip them here.
-    let should_skip = |name| name == sym::cfg;
-
     match parse_meta(sess, attr) {
-        Ok(meta) => {
-            if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
-                emit_malformed_attribute(sess, attr, name, template);
-            }
-        }
+        Ok(meta) => check_builtin_meta_item(sess, &meta, attr.style, name, template),
         Err(mut err) => {
             err.emit();
         }
     }
 }
 
+pub fn check_builtin_meta_item(
+    sess: &ParseSess,
+    meta: &MetaItem,
+    style: ast::AttrStyle,
+    name: Symbol,
+    template: AttributeTemplate,
+) {
+    // Some special attributes like `cfg` must be checked
+    // before the generic check, so we skip them here.
+    let should_skip = |name| name == sym::cfg;
+
+    if !should_skip(name) && !is_attr_template_compatible(&template, &meta.kind) {
+        emit_malformed_attribute(sess, style, meta.span, name, template);
+    }
+}
+
 fn emit_malformed_attribute(
     sess: &ParseSess,
-    attr: &Attribute,
+    style: ast::AttrStyle,
+    span: Span,
     name: Symbol,
     template: AttributeTemplate,
 ) {
@@ -144,7 +156,7 @@ fn emit_malformed_attribute(
     let mut msg = "attribute must be of the form ".to_owned();
     let mut suggestions = vec![];
     let mut first = true;
-    let inner = if attr.style == ast::AttrStyle::Inner { "!" } else { "" };
+    let inner = if style == ast::AttrStyle::Inner { "!" } else { "" };
     if template.word {
         first = false;
         let code = format!("#{}[{}]", inner, name);
@@ -169,12 +181,12 @@ fn emit_malformed_attribute(
         suggestions.push(code);
     }
     if should_warn(name) {
-        sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, attr.span, ast::CRATE_NODE_ID, &msg);
+        sess.buffer_lint(&ILL_FORMED_ATTRIBUTE_INPUT, span, ast::CRATE_NODE_ID, &msg);
     } else {
         sess.span_diagnostic
-            .struct_span_err(attr.span, &error_msg)
+            .struct_span_err(span, &error_msg)
             .span_suggestions(
-                attr.span,
+                span,
                 if suggestions.len() == 1 {
                     "must be of the form"
                 } else {
@@ -193,7 +205,7 @@ pub fn emit_fatal_malformed_builtin_attribute(
     name: Symbol,
 ) -> ! {
     let template = BUILTIN_ATTRIBUTE_MAP.get(&name).expect("builtin attr defined").template;
-    emit_malformed_attribute(sess, attr, name, template);
+    emit_malformed_attribute(sess, attr.style, attr.span, name, template);
     // This is fatal, otherwise it will likely cause a cascade of other errors
     // (and an error here is expected to be very rare).
     FatalError.raise()